Child trie api changes BREAKING (#4857)

Co-Authored-By: thiolliere <gui.thiolliere@gmail.com>
This commit is contained in:
cheme
2020-04-20 15:21:22 +02:00
committed by GitHub
parent 7d9aa81bfc
commit 4ffcf98d8d
64 changed files with 1514 additions and 1655 deletions
@@ -20,7 +20,7 @@ use log::warn;
use hash_db::Hasher;
use codec::{Decode, Encode};
use sp_core::{traits::RuntimeCode, storage::{ChildInfo, OwnedChildInfo, well_known_keys}};
use sp_core::{traits::RuntimeCode, storage::{ChildInfo, well_known_keys}};
use sp_trie::{TrieMut, MemoryDB, trie_types::TrieDBMut};
use crate::{
@@ -54,19 +54,17 @@ pub trait Backend<H: Hasher>: std::fmt::Debug {
/// Get keyed child storage or None if there is nothing associated.
fn child_storage(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<StorageValue>, Self::Error>;
/// Get child keyed storage value hash or None if there is nothing associated.
fn child_storage_hash(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<H::Out>, Self::Error> {
self.child_storage(storage_key, child_info, key).map(|v| v.map(|v| H::hash(&v)))
self.child_storage(child_info, key).map(|v| v.map(|v| H::hash(&v)))
}
/// true if a key exists in storage.
@@ -77,11 +75,10 @@ pub trait Backend<H: Hasher>: std::fmt::Debug {
/// true if a key exists in child storage.
fn exists_child_storage(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<bool, Self::Error> {
Ok(self.child_storage(storage_key, child_info, key)?.is_some())
Ok(self.child_storage(child_info, key)?.is_some())
}
/// Return the next key in storage in lexicographic order or `None` if there is no value.
@@ -90,16 +87,14 @@ pub trait Backend<H: Hasher>: std::fmt::Debug {
/// Return the next key in child storage in lexicographic order or `None` if there is no value.
fn next_child_storage_key(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8]
) -> Result<Option<StorageKey>, Self::Error>;
/// Retrieve all entries keys of child storage and call `f` for each of those keys.
fn for_keys_in_child_storage<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
f: F,
);
@@ -118,8 +113,7 @@ pub trait Backend<H: Hasher>: std::fmt::Debug {
/// call `f` for each of those keys.
fn for_child_keys_with_prefix<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
f: F,
);
@@ -137,8 +131,7 @@ pub trait Backend<H: Hasher>: std::fmt::Debug {
/// is true if child storage root equals default storage root.
fn child_storage_root<I>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
delta: I,
) -> (H::Out, bool, Self::Transaction)
where
@@ -158,12 +151,11 @@ pub trait Backend<H: Hasher>: std::fmt::Debug {
/// Get all keys of child storage with given prefix
fn child_keys(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
) -> Vec<StorageKey> {
let mut all = Vec::new();
self.for_child_keys_with_prefix(storage_key, child_info, prefix, |k| all.push(k.to_vec()));
self.for_child_keys_with_prefix(child_info, prefix, |k| all.push(k.to_vec()));
all
}
@@ -183,20 +175,21 @@ pub trait Backend<H: Hasher>: std::fmt::Debug {
where
I1: IntoIterator<Item=(StorageKey, Option<StorageValue>)>,
I2i: IntoIterator<Item=(StorageKey, Option<StorageValue>)>,
I2: IntoIterator<Item=(StorageKey, I2i, OwnedChildInfo)>,
I2: IntoIterator<Item=(ChildInfo, I2i)>,
H::Out: Ord + Encode,
{
let mut txs: Self::Transaction = Default::default();
let mut child_roots: Vec<_> = Default::default();
// child first
for (storage_key, child_delta, child_info) in child_deltas {
for (child_info, child_delta) in child_deltas {
let (child_root, empty, child_txs) =
self.child_storage_root(&storage_key[..], child_info.as_ref(), child_delta);
self.child_storage_root(&child_info, child_delta);
let prefixed_storage_key = child_info.prefixed_storage_key();
txs.consolidate(child_txs);
if empty {
child_roots.push((storage_key, None));
child_roots.push((prefixed_storage_key.into_inner(), None));
} else {
child_roots.push((storage_key, Some(child_root.encode())));
child_roots.push((prefixed_storage_key.into_inner(), Some(child_root.encode())));
}
}
let (root, parent_txs) = self.storage_root(
@@ -239,20 +232,18 @@ impl<'a, T: Backend<H>, H: Hasher> Backend<H> for &'a T {
fn child_storage(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<StorageKey>, Self::Error> {
(*self).child_storage(storage_key, child_info, key)
(*self).child_storage(child_info, key)
}
fn for_keys_in_child_storage<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
f: F,
) {
(*self).for_keys_in_child_storage(storage_key, child_info, f)
(*self).for_keys_in_child_storage(child_info, f)
}
fn next_storage_key(&self, key: &[u8]) -> Result<Option<StorageKey>, Self::Error> {
@@ -261,11 +252,10 @@ impl<'a, T: Backend<H>, H: Hasher> Backend<H> for &'a T {
fn next_child_storage_key(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<StorageKey>, Self::Error> {
(*self).next_child_storage_key(storage_key, child_info, key)
(*self).next_child_storage_key(child_info, key)
}
fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], f: F) {
@@ -274,12 +264,11 @@ impl<'a, T: Backend<H>, H: Hasher> Backend<H> for &'a T {
fn for_child_keys_with_prefix<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
f: F,
) {
(*self).for_child_keys_with_prefix(storage_key, child_info, prefix, f)
(*self).for_child_keys_with_prefix(child_info, prefix, f)
}
fn storage_root<I>(&self, delta: I) -> (H::Out, Self::Transaction)
@@ -292,15 +281,14 @@ impl<'a, T: Backend<H>, H: Hasher> Backend<H> for &'a T {
fn child_storage_root<I>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
delta: I,
) -> (H::Out, bool, Self::Transaction)
where
I: IntoIterator<Item=(StorageKey, Option<StorageValue>)>,
H::Out: Ord,
{
(*self).child_storage_root(storage_key, child_info, delta)
(*self).child_storage_root(child_info, delta)
}
fn pairs(&self) -> Vec<(StorageKey, StorageValue)> {
@@ -331,7 +319,7 @@ impl Consolidate for () {
}
impl Consolidate for Vec<(
Option<(StorageKey, OwnedChildInfo)>,
Option<ChildInfo>,
StorageCollection,
)> {
fn consolidate(&mut self, mut other: Self) {
+44 -54
View File
@@ -21,11 +21,11 @@ use std::{
};
use crate::{Backend, InMemoryBackend, StorageKey, StorageValue};
use hash_db::Hasher;
use sp_trie::{TrieConfiguration, default_child_trie_root};
use sp_trie::{TrieConfiguration, empty_child_trie_root};
use sp_trie::trie_types::Layout;
use sp_core::{
storage::{
well_known_keys::is_child_storage_key, ChildStorageKey, Storage,
well_known_keys::is_child_storage_key, Storage,
ChildInfo, StorageChild,
},
traits::Externalities, Blake2Hasher,
@@ -83,7 +83,7 @@ impl BasicExternalities {
let mut ext = Self {
inner: Storage {
top: std::mem::replace(&mut storage.top, Default::default()),
children: std::mem::replace(&mut storage.children, Default::default()),
children_default: std::mem::replace(&mut storage.children_default, Default::default()),
},
extensions: Default::default(),
};
@@ -111,7 +111,7 @@ impl BasicExternalities {
impl PartialEq for BasicExternalities {
fn eq(&self, other: &BasicExternalities) -> bool {
self.inner.top.eq(&other.inner.top)
&& self.inner.children.eq(&other.inner.children)
&& self.inner.children_default.eq(&other.inner.children_default)
}
}
@@ -132,7 +132,7 @@ impl From<BTreeMap<StorageKey, StorageValue>> for BasicExternalities {
BasicExternalities {
inner: Storage {
top: hashmap,
children: Default::default(),
children_default: Default::default(),
},
extensions: Default::default(),
}
@@ -150,20 +150,19 @@ impl Externalities for BasicExternalities {
fn child_storage(
&self,
storage_key: ChildStorageKey,
_child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Option<StorageValue> {
self.inner.children.get(storage_key.as_ref()).and_then(|child| child.data.get(key)).cloned()
self.inner.children_default.get(child_info.storage_key())
.and_then(|child| child.data.get(key)).cloned()
}
fn child_storage_hash(
&self,
storage_key: ChildStorageKey,
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Option<Vec<u8>> {
self.child_storage(storage_key, child_info, key).map(|v| Blake2Hasher::hash(&v).encode())
self.child_storage(child_info, key).map(|v| Blake2Hasher::hash(&v).encode())
}
fn next_storage_key(&self, key: &[u8]) -> Option<StorageKey> {
@@ -173,12 +172,11 @@ impl Externalities for BasicExternalities {
fn next_child_storage_key(
&self,
storage_key: ChildStorageKey,
_child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Option<StorageKey> {
let range = (Bound::Excluded(key), Bound::Unbounded);
self.inner.children.get(storage_key.as_ref())
self.inner.children_default.get(child_info.storage_key())
.and_then(|child| child.data.range::<[u8], _>(range).next().map(|(k, _)| k).cloned())
}
@@ -196,12 +194,11 @@ impl Externalities for BasicExternalities {
fn place_child_storage(
&mut self,
storage_key: ChildStorageKey,
child_info: ChildInfo,
child_info: &ChildInfo,
key: StorageKey,
value: Option<StorageValue>,
) {
let child_map = self.inner.children.entry(storage_key.into_owned())
let child_map = self.inner.children_default.entry(child_info.storage_key().to_vec())
.or_insert_with(|| StorageChild {
data: Default::default(),
child_info: child_info.to_owned(),
@@ -215,10 +212,9 @@ impl Externalities for BasicExternalities {
fn kill_child_storage(
&mut self,
storage_key: ChildStorageKey,
_child_info: ChildInfo,
child_info: &ChildInfo,
) {
self.inner.children.remove(storage_key.as_ref());
self.inner.children_default.remove(child_info.storage_key());
}
fn clear_prefix(&mut self, prefix: &[u8]) {
@@ -243,11 +239,10 @@ impl Externalities for BasicExternalities {
fn clear_child_prefix(
&mut self,
storage_key: ChildStorageKey,
_child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
) {
if let Some(child) = self.inner.children.get_mut(storage_key.as_ref()) {
if let Some(child) = self.inner.children_default.get_mut(child_info.storage_key()) {
let to_remove = child.data.range::<[u8], _>((Bound::Included(prefix), Bound::Unbounded))
.map(|(k, _)| k)
.take_while(|k| k.starts_with(prefix))
@@ -264,20 +259,19 @@ impl Externalities for BasicExternalities {
fn storage_root(&mut self) -> Vec<u8> {
let mut top = self.inner.top.clone();
let keys: Vec<_> = self.inner.children.keys().map(|k| k.to_vec()).collect();
let prefixed_keys: Vec<_> = self.inner.children_default.iter().map(|(_k, v)| {
(v.child_info.prefixed_storage_key(), v.child_info.clone())
}).collect();
// Single child trie implementation currently allows using the same child
// empty root for all child trie. Using null storage key until multiple
// type of child trie support.
let empty_hash = default_child_trie_root::<Layout<Blake2Hasher>>(&[]);
for storage_key in keys {
let child_root = self.child_storage_root(
ChildStorageKey::from_slice(storage_key.as_slice())
.expect("Map only feed by valid keys; qed"),
);
let empty_hash = empty_child_trie_root::<Layout<Blake2Hasher>>();
for (prefixed_storage_key, child_info) in prefixed_keys {
let child_root = self.child_storage_root(&child_info);
if &empty_hash[..] == &child_root[..] {
top.remove(storage_key.as_slice());
top.remove(prefixed_storage_key.as_slice());
} else {
top.insert(storage_key, child_root);
top.insert(prefixed_storage_key.into_inner(), child_root);
}
}
@@ -286,15 +280,15 @@ impl Externalities for BasicExternalities {
fn child_storage_root(
&mut self,
storage_key: ChildStorageKey,
child_info: &ChildInfo,
) -> Vec<u8> {
if let Some(child) = self.inner.children.get(storage_key.as_ref()) {
if let Some(child) = self.inner.children_default.get(child_info.storage_key()) {
let delta = child.data.clone().into_iter().map(|(k, v)| (k, Some(v)));
InMemoryBackend::<Blake2Hasher>::default()
.child_storage_root(storage_key.as_ref(), child.child_info.as_ref(), delta).0
.child_storage_root(&child.child_info, delta).0
} else {
default_child_trie_root::<Layout<Blake2Hasher>>(storage_key.as_ref())
empty_child_trie_root::<Layout<Blake2Hasher>>()
}.encode()
}
@@ -336,8 +330,6 @@ mod tests {
use sp_core::storage::well_known_keys::CODE;
use hex_literal::hex;
const CHILD_INFO_1: ChildInfo<'static> = ChildInfo::new_default(b"unique_id_1");
#[test]
fn commit_should_work() {
let mut ext = BasicExternalities::default();
@@ -361,30 +353,28 @@ mod tests {
#[test]
fn children_works() {
let child_storage = b":child_storage:default:test".to_vec();
let child_info = ChildInfo::new_default(b"storage_key");
let child_info = &child_info;
let mut ext = BasicExternalities::new(Storage {
top: Default::default(),
children: map![
child_storage.clone() => StorageChild {
data: map![ b"doe".to_vec() => b"reindeer".to_vec() ],
child_info: CHILD_INFO_1.to_owned(),
children_default: map![
child_info.storage_key().to_vec() => StorageChild {
data: map![ b"doe".to_vec() => b"reindeer".to_vec() ],
child_info: child_info.to_owned(),
}
]
});
let child = || ChildStorageKey::from_vec(child_storage.clone()).unwrap();
assert_eq!(ext.child_storage(child_info, b"doe"), Some(b"reindeer".to_vec()));
assert_eq!(ext.child_storage(child(), CHILD_INFO_1, b"doe"), Some(b"reindeer".to_vec()));
ext.set_child_storage(child_info, b"dog".to_vec(), b"puppy".to_vec());
assert_eq!(ext.child_storage(child_info, b"dog"), Some(b"puppy".to_vec()));
ext.set_child_storage(child(), CHILD_INFO_1, b"dog".to_vec(), b"puppy".to_vec());
assert_eq!(ext.child_storage(child(), CHILD_INFO_1, b"dog"), Some(b"puppy".to_vec()));
ext.clear_child_storage(child_info, b"dog");
assert_eq!(ext.child_storage(child_info, b"dog"), None);
ext.clear_child_storage(child(), CHILD_INFO_1, b"dog");
assert_eq!(ext.child_storage(child(), CHILD_INFO_1, b"dog"), None);
ext.kill_child_storage(child(), CHILD_INFO_1);
assert_eq!(ext.child_storage(child(), CHILD_INFO_1, b"doe"), None);
ext.kill_child_storage(child_info);
assert_eq!(ext.child_storage(child_info, b"doe"), None);
}
#[test]
@@ -392,6 +382,6 @@ mod tests {
// Make sure no values are set by default in `BasicExternalities`.
let storage = BasicExternalities::new_empty().into_storages();
assert!(storage.top.is_empty());
assert!(storage.children.is_empty());
assert!(storage.children_default.is_empty());
}
}
@@ -32,6 +32,7 @@ use crate::{
input::{InputKey, InputPair, DigestIndex, ExtrinsicIndex, ChildIndex},
},
};
use sp_core::storage::{ChildInfo, ChildType, PrefixedStorageKey};
/// Prepare input pairs for building a changes trie of given block.
///
@@ -105,19 +106,19 @@ fn prepare_extrinsics_input<'a, B, H, Number>(
Number: BlockNumber,
{
let mut children_keys = BTreeSet::<StorageKey>::new();
let mut children_info = BTreeSet::<ChildInfo>::new();
let mut children_result = BTreeMap::new();
for (storage_key, _) in changes.prospective.children.iter()
.chain(changes.committed.children.iter()) {
children_keys.insert(storage_key.clone());
for (_storage_key, (_map, child_info)) in changes.prospective.children_default.iter()
.chain(changes.committed.children_default.iter()) {
children_info.insert(child_info.clone());
}
for storage_key in children_keys {
for child_info in children_info {
let child_index = ChildIndex::<Number> {
block: block.clone(),
storage_key: storage_key.clone(),
storage_key: child_info.prefixed_storage_key(),
};
let iter = prepare_extrinsics_input_inner(backend, block, changes, Some(storage_key))?;
let iter = prepare_extrinsics_input_inner(backend, block, changes, Some(child_info))?;
children_result.insert(child_index, iter);
}
@@ -130,22 +131,22 @@ fn prepare_extrinsics_input_inner<'a, B, H, Number>(
backend: &'a B,
block: &Number,
changes: &'a OverlayedChanges,
storage_key: Option<StorageKey>,
child_info: Option<ChildInfo>,
) -> Result<impl Iterator<Item=InputPair<Number>> + 'a, String>
where
B: Backend<H>,
H: Hasher,
Number: BlockNumber,
{
let (committed, prospective, child_info) = if let Some(sk) = storage_key.as_ref() {
let child_info = changes.child_info(sk).cloned();
(
changes.committed.children.get(sk).map(|c| &c.0),
changes.prospective.children.get(sk).map(|c| &c.0),
child_info,
)
let (committed, prospective) = if let Some(child_info) = child_info.as_ref() {
match child_info.child_type() {
ChildType::ParentKeyId => (
changes.committed.children_default.get(child_info.storage_key()).map(|c| &c.0),
changes.prospective.children_default.get(child_info.storage_key()).map(|c| &c.0),
),
}
} else {
(Some(&changes.committed.top), Some(&changes.prospective.top), None)
(Some(&changes.committed.top), Some(&changes.prospective.top))
};
committed.iter().flat_map(|c| c.iter())
.chain(prospective.iter().flat_map(|c| c.iter()))
@@ -155,13 +156,11 @@ fn prepare_extrinsics_input_inner<'a, B, H, Number>(
Entry::Vacant(entry) => {
// ignore temporary values (values that have null value at the end of operation
// AND are not in storage at the beginning of operation
if let Some(sk) = storage_key.as_ref() {
if !changes.child_storage(sk, k).map(|v| v.is_some()).unwrap_or_default() {
if let Some(child_info) = child_info.as_ref() {
if !backend.exists_child_storage(sk, child_info.as_ref(), k)
.map_err(|e| format!("{}", e))? {
return Ok(map);
}
if let Some(child_info) = child_info.as_ref() {
if !changes.child_storage(child_info, k).map(|v| v.is_some()).unwrap_or_default() {
if !backend.exists_child_storage(&child_info, k)
.map_err(|e| format!("{}", e))? {
return Ok(map);
}
}
} else {
@@ -281,7 +280,7 @@ fn prepare_digest_input<'a, H, Number>(
return Ok((map, child_map));
}
let mut children_roots = BTreeMap::<StorageKey, _>::new();
let mut children_roots = BTreeMap::<PrefixedStorageKey, _>::new();
{
let trie_storage = TrieBackendEssence::<_, H>::new(
crate::changes_trie::TrieBackendStorageAdapter(storage),
@@ -344,22 +343,20 @@ mod test {
use codec::Encode;
use sp_core::Blake2Hasher;
use sp_core::storage::well_known_keys::EXTRINSIC_INDEX;
use sp_core::storage::ChildInfo;
use crate::InMemoryBackend;
use crate::changes_trie::{RootsStorage, Configuration, storage::InMemoryStorage};
use crate::changes_trie::build_cache::{IncompleteCacheAction, IncompleteCachedBuildData};
use crate::overlayed_changes::{OverlayedValue, OverlayedChangeSet};
use super::*;
const CHILD_INFO_1: ChildInfo<'static> = ChildInfo::new_default(b"unique_id_1");
const CHILD_INFO_2: ChildInfo<'static> = ChildInfo::new_default(b"unique_id_2");
fn prepare_for_build(zero: u64) -> (
InMemoryBackend<Blake2Hasher>,
InMemoryStorage<Blake2Hasher, u64>,
OverlayedChanges,
Configuration,
) {
let child_info_1 = ChildInfo::new_default(b"storage_key1");
let child_info_2 = ChildInfo::new_default(b"storage_key2");
let backend: InMemoryBackend<_> = vec![
(vec![100], vec![255]),
(vec![101], vec![255]),
@@ -368,8 +365,9 @@ mod test {
(vec![104], vec![255]),
(vec![105], vec![255]),
].into_iter().collect::<std::collections::BTreeMap<_, _>>().into();
let child_trie_key1 = b"1".to_vec();
let child_trie_key2 = b"2".to_vec();
let prefixed_child_trie_key1 = child_info_1.prefixed_storage_key();
let child_trie_key1 = child_info_1.storage_key().to_vec();
let child_trie_key2 = child_info_2.storage_key().to_vec();
let storage = InMemoryStorage::with_inputs(vec![
(zero + 1, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![100] }, vec![1, 3]),
@@ -403,7 +401,7 @@ mod test {
]),
(zero + 9, Vec::new()), (zero + 10, Vec::new()), (zero + 11, Vec::new()), (zero + 12, Vec::new()),
(zero + 13, Vec::new()), (zero + 14, Vec::new()), (zero + 15, Vec::new()),
], vec![(child_trie_key1.clone(), vec![
], vec![(prefixed_child_trie_key1.clone(), vec![
(zero + 1, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![100] }, vec![1, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![101] }, vec![0, 2]),
@@ -430,19 +428,19 @@ mod test {
extrinsics: Some(vec![0, 1].into_iter().collect())
}),
].into_iter().collect(),
children: vec![
children_default: vec![
(child_trie_key1.clone(), (vec![
(vec![100], OverlayedValue {
value: Some(vec![200]),
extrinsics: Some(vec![0, 2].into_iter().collect())
})
].into_iter().collect(), CHILD_INFO_1.to_owned())),
].into_iter().collect(), child_info_1.to_owned())),
(child_trie_key2, (vec![
(vec![100], OverlayedValue {
value: Some(vec![200]),
extrinsics: Some(vec![0, 2].into_iter().collect())
})
].into_iter().collect(), CHILD_INFO_2.to_owned())),
].into_iter().collect(), child_info_2.to_owned())),
].into_iter().collect()
},
committed: OverlayedChangeSet { top: vec![
@@ -459,13 +457,13 @@ mod test {
extrinsics: Some(vec![1].into_iter().collect())
}),
].into_iter().collect(),
children: vec![
children_default: vec![
(child_trie_key1, (vec![
(vec![100], OverlayedValue {
value: Some(vec![202]),
extrinsics: Some(vec![3].into_iter().collect())
})
].into_iter().collect(), CHILD_INFO_1.to_owned())),
].into_iter().collect(), child_info_1.to_owned())),
].into_iter().collect(),
},
collect_extrinsics: true,
@@ -487,6 +485,8 @@ mod test {
#[test]
fn build_changes_trie_nodes_on_non_digest_block() {
fn test_with_zero(zero: u64) {
let child_trie_key1 = ChildInfo::new_default(b"storage_key1").prefixed_storage_key();
let child_trie_key2 = ChildInfo::new_default(b"storage_key2").prefixed_storage_key();
let (backend, storage, changes, config) = prepare_for_build(zero);
let parent = AnchorBlockId { hash: Default::default(), number: zero + 4 };
let changes_trie_nodes = prepare_input(
@@ -503,11 +503,11 @@ mod test {
]);
assert_eq!(changes_trie_nodes.1.into_iter()
.map(|(k,v)| (k, v.collect::<Vec<_>>())).collect::<Vec<_>>(), vec![
(ChildIndex { block: zero + 5u64, storage_key: b"1".to_vec() },
(ChildIndex { block: zero + 5u64, storage_key: child_trie_key1 },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5u64, key: vec![100] }, vec![0, 2, 3]),
]),
(ChildIndex { block: zero + 5, storage_key: b"2".to_vec() },
(ChildIndex { block: zero + 5, storage_key: child_trie_key2 },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5, key: vec![100] }, vec![0, 2]),
]),
@@ -523,6 +523,8 @@ mod test {
#[test]
fn build_changes_trie_nodes_on_digest_block_l1() {
fn test_with_zero(zero: u64) {
let child_trie_key1 = ChildInfo::new_default(b"storage_key1").prefixed_storage_key();
let child_trie_key2 = ChildInfo::new_default(b"storage_key2").prefixed_storage_key();
let (backend, storage, changes, config) = prepare_for_build(zero);
let parent = AnchorBlockId { hash: Default::default(), number: zero + 3 };
let changes_trie_nodes = prepare_input(
@@ -544,7 +546,7 @@ mod test {
]);
assert_eq!(changes_trie_nodes.1.into_iter()
.map(|(k,v)| (k, v.collect::<Vec<_>>())).collect::<Vec<_>>(), vec![
(ChildIndex { block: zero + 4u64, storage_key: b"1".to_vec() },
(ChildIndex { block: zero + 4u64, storage_key: child_trie_key1.clone() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4u64, key: vec![100] }, vec![0, 2, 3]),
@@ -553,7 +555,7 @@ mod test {
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![105] }, vec![zero + 1]),
]),
(ChildIndex { block: zero + 4, storage_key: b"2".to_vec() },
(ChildIndex { block: zero + 4, storage_key: child_trie_key2.clone() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![100] }, vec![0, 2]),
]),
@@ -568,6 +570,8 @@ mod test {
#[test]
fn build_changes_trie_nodes_on_digest_block_l2() {
fn test_with_zero(zero: u64) {
let child_trie_key1 = ChildInfo::new_default(b"storage_key1").prefixed_storage_key();
let child_trie_key2 = ChildInfo::new_default(b"storage_key2").prefixed_storage_key();
let (backend, storage, changes, config) = prepare_for_build(zero);
let parent = AnchorBlockId { hash: Default::default(), number: zero + 15 };
let changes_trie_nodes = prepare_input(
@@ -590,13 +594,13 @@ mod test {
]);
assert_eq!(changes_trie_nodes.1.into_iter()
.map(|(k,v)| (k, v.collect::<Vec<_>>())).collect::<Vec<_>>(), vec![
(ChildIndex { block: zero + 16u64, storage_key: b"1".to_vec() },
(ChildIndex { block: zero + 16u64, storage_key: child_trie_key1.clone() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16u64, key: vec![100] }, vec![0, 2, 3]),
InputPair::DigestIndex(DigestIndex { block: zero + 16, key: vec![102] }, vec![zero + 4]),
]),
(ChildIndex { block: zero + 16, storage_key: b"2".to_vec() },
(ChildIndex { block: zero + 16, storage_key: child_trie_key2.clone() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16, key: vec![100] }, vec![0, 2]),
]),
@@ -657,6 +661,8 @@ mod test {
#[test]
fn build_changes_trie_nodes_ignores_temporary_storage_values() {
fn test_with_zero(zero: u64) {
let child_trie_key1 = ChildInfo::new_default(b"storage_key1").prefixed_storage_key();
let child_trie_key2 = ChildInfo::new_default(b"storage_key2").prefixed_storage_key();
let (backend, storage, mut changes, config) = prepare_for_build(zero);
// 110: missing from backend, set to None in overlay
@@ -685,7 +691,7 @@ mod test {
]);
assert_eq!(changes_trie_nodes.1.into_iter()
.map(|(k,v)| (k, v.collect::<Vec<_>>())).collect::<Vec<_>>(), vec![
(ChildIndex { block: zero + 4u64, storage_key: b"1".to_vec() },
(ChildIndex { block: zero + 4u64, storage_key: child_trie_key1.clone() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4u64, key: vec![100] }, vec![0, 2, 3]),
@@ -694,7 +700,7 @@ mod test {
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![105] }, vec![zero + 1]),
]),
(ChildIndex { block: zero + 4, storage_key: b"2".to_vec() },
(ChildIndex { block: zero + 4, storage_key: child_trie_key2.clone() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![100] }, vec![0, 2]),
]),
@@ -709,6 +715,8 @@ mod test {
#[test]
fn cache_is_used_when_changes_trie_is_built() {
let child_trie_key1 = ChildInfo::new_default(b"storage_key1").prefixed_storage_key();
let child_trie_key2 = ChildInfo::new_default(b"storage_key2").prefixed_storage_key();
let (backend, mut storage, changes, config) = prepare_for_build(0);
let parent = AnchorBlockId { hash: Default::default(), number: 15 };
@@ -728,8 +736,8 @@ mod test {
let cached_data4 = IncompleteCacheAction::CacheBuildData(IncompleteCachedBuildData::new())
.set_digest_input_blocks(vec![1, 2, 3])
.insert(None, vec![vec![100], vec![102]].into_iter().collect())
.insert(Some(b"1".to_vec()), vec![vec![103], vec![104]].into_iter().collect())
.insert(Some(b"2".to_vec()), vec![vec![105], vec![106]].into_iter().collect())
.insert(Some(child_trie_key1.clone()), vec![vec![103], vec![104]].into_iter().collect())
.insert(Some(child_trie_key2.clone()), vec![vec![105], vec![106]].into_iter().collect())
.complete(4, &trie_root4);
storage.cache_mut().perform(cached_data4);
@@ -755,7 +763,10 @@ mod test {
.map(|(k, i)| (k, i.collect::<Vec<_>>()))
.collect::<BTreeMap<_, _>>();
assert_eq!(
child_changes_tries_nodes.get(&ChildIndex { block: 16u64, storage_key: b"1".to_vec() }).unwrap(),
child_changes_tries_nodes.get(&ChildIndex {
block: 16u64,
storage_key: child_trie_key1.clone(),
}).unwrap(),
&vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16u64, key: vec![100] }, vec![0, 2, 3]),
@@ -764,7 +775,7 @@ mod test {
],
);
assert_eq!(
child_changes_tries_nodes.get(&ChildIndex { block: 16u64, storage_key: b"2".to_vec() }).unwrap(),
child_changes_tries_nodes.get(&ChildIndex { block: 16u64, storage_key: child_trie_key2.clone() }).unwrap(),
&vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16u64, key: vec![100] }, vec![0, 2]),
@@ -19,6 +19,7 @@
use std::collections::{HashMap, HashSet};
use crate::StorageKey;
use sp_core::storage::PrefixedStorageKey;
/// Changes trie build cache.
///
@@ -38,7 +39,7 @@ pub struct BuildCache<H, N> {
/// The `Option<Vec<u8>>` in inner `HashMap` stands for the child storage key.
/// If it is `None`, then the `HashSet` contains keys changed in top-level storage.
/// If it is `Some`, then the `HashSet` contains keys changed in child storage, identified by the key.
changed_keys: HashMap<H, HashMap<Option<StorageKey>, HashSet<StorageKey>>>,
changed_keys: HashMap<H, HashMap<Option<PrefixedStorageKey>, HashSet<StorageKey>>>,
}
/// The action to perform when block-with-changes-trie is imported.
@@ -56,7 +57,7 @@ pub struct CachedBuildData<H, N> {
block: N,
trie_root: H,
digest_input_blocks: Vec<N>,
changed_keys: HashMap<Option<StorageKey>, HashSet<StorageKey>>,
changed_keys: HashMap<Option<PrefixedStorageKey>, HashSet<StorageKey>>,
}
/// The action to perform when block-with-changes-trie is imported.
@@ -72,7 +73,7 @@ pub(crate) enum IncompleteCacheAction<N> {
#[derive(Debug, PartialEq)]
pub(crate) struct IncompleteCachedBuildData<N> {
digest_input_blocks: Vec<N>,
changed_keys: HashMap<Option<StorageKey>, HashSet<StorageKey>>,
changed_keys: HashMap<Option<PrefixedStorageKey>, HashSet<StorageKey>>,
}
impl<H, N> BuildCache<H, N>
@@ -89,7 +90,7 @@ impl<H, N> BuildCache<H, N>
}
/// Get cached changed keys for changes trie with given root.
pub fn get(&self, root: &H) -> Option<&HashMap<Option<StorageKey>, HashSet<StorageKey>>> {
pub fn get(&self, root: &H) -> Option<&HashMap<Option<PrefixedStorageKey>, HashSet<StorageKey>>> {
self.changed_keys.get(&root)
}
@@ -98,7 +99,7 @@ impl<H, N> BuildCache<H, N>
pub fn with_changed_keys(
&self,
root: &H,
functor: &mut dyn FnMut(&HashMap<Option<StorageKey>, HashSet<StorageKey>>),
functor: &mut dyn FnMut(&HashMap<Option<PrefixedStorageKey>, HashSet<StorageKey>>),
) -> bool {
match self.changed_keys.get(&root) {
Some(changed_keys) => {
@@ -164,7 +165,7 @@ impl<N> IncompleteCacheAction<N> {
/// Insert changed keys of given storage into cached data.
pub(crate) fn insert(
self,
storage_key: Option<StorageKey>,
storage_key: Option<PrefixedStorageKey>,
changed_keys: HashSet<StorageKey>,
) -> Self {
match self {
@@ -200,7 +201,7 @@ impl<N> IncompleteCachedBuildData<N> {
fn insert(
mut self,
storage_key: Option<StorageKey>,
storage_key: Option<PrefixedStorageKey>,
changed_keys: HashSet<StorageKey>,
) -> Self {
self.changed_keys.insert(storage_key, changed_keys);
@@ -22,6 +22,7 @@ use std::collections::VecDeque;
use codec::{Decode, Encode, Codec};
use hash_db::Hasher;
use num_traits::Zero;
use sp_core::storage::PrefixedStorageKey;
use sp_trie::Recorder;
use crate::changes_trie::{AnchorBlockId, ConfigurationRange, RootsStorage, Storage, BlockNumber};
use crate::changes_trie::input::{DigestIndex, ExtrinsicIndex, DigestIndexValue, ExtrinsicIndexValue};
@@ -40,7 +41,7 @@ pub fn key_changes<'a, H: Hasher, Number: BlockNumber>(
begin: Number,
end: &'a AnchorBlockId<H::Out, Number>,
max: Number,
storage_key: Option<&'a [u8]>,
storage_key: Option<&'a PrefixedStorageKey>,
key: &'a [u8],
) -> Result<DrilldownIterator<'a, H, Number>, String> {
// we can't query any roots before root
@@ -79,7 +80,7 @@ pub fn key_changes_proof<'a, H: Hasher, Number: BlockNumber>(
begin: Number,
end: &AnchorBlockId<H::Out, Number>,
max: Number,
storage_key: Option<&[u8]>,
storage_key: Option<&PrefixedStorageKey>,
key: &[u8],
) -> Result<Vec<Vec<u8>>, String> where H::Out: Codec {
// we can't query any roots before root
@@ -127,7 +128,7 @@ pub fn key_changes_proof_check<'a, H: Hasher, Number: BlockNumber>(
begin: Number,
end: &AnchorBlockId<H::Out, Number>,
max: Number,
storage_key: Option<&[u8]>,
storage_key: Option<&PrefixedStorageKey>,
key: &[u8]
) -> Result<Vec<(Number, u32)>, String> where H::Out: Encode {
key_changes_proof_check_with_db(
@@ -150,7 +151,7 @@ pub fn key_changes_proof_check_with_db<'a, H: Hasher, Number: BlockNumber>(
begin: Number,
end: &AnchorBlockId<H::Out, Number>,
max: Number,
storage_key: Option<&[u8]>,
storage_key: Option<&PrefixedStorageKey>,
key: &[u8]
) -> Result<Vec<(Number, u32)>, String> where H::Out: Encode {
// we can't query any roots before root
@@ -188,7 +189,7 @@ pub struct DrilldownIteratorEssence<'a, H, Number>
Number: BlockNumber,
H::Out: 'a,
{
storage_key: Option<&'a [u8]>,
storage_key: Option<&'a PrefixedStorageKey>,
key: &'a [u8],
roots_storage: &'a dyn RootsStorage<H, Number>,
storage: &'a dyn Storage<H, Number>,
@@ -238,7 +239,7 @@ impl<'a, H, Number> DrilldownIteratorEssence<'a, H, Number>
let trie_root = if let Some(storage_key) = self.storage_key {
let child_key = ChildIndex {
block: block.clone(),
storage_key: storage_key.to_vec(),
storage_key: storage_key.clone(),
}.encode();
if let Some(trie_root) = trie_reader(self.storage, trie_root, &child_key)?
.and_then(|v| <Vec<u8>>::decode(&mut &v[..]).ok())
@@ -382,6 +383,11 @@ mod tests {
use sp_runtime::traits::BlakeTwo256;
use super::*;
fn child_key() -> PrefixedStorageKey {
let child_info = sp_core::storage::ChildInfo::new_default(&b"1"[..]);
child_info.prefixed_storage_key()
}
fn prepare_for_drilldown() -> (Configuration, InMemoryStorage<BlakeTwo256, u64>) {
let config = Configuration { digest_interval: 4, digest_levels: 2 };
let backend = InMemoryStorage::with_inputs(vec![
@@ -418,7 +424,7 @@ mod tests {
(16, vec![
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![42] }, vec![4, 8]),
]),
], vec![(b"1".to_vec(), vec![
], vec![(child_key(), vec![
(1, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 1, key: vec![42] }, vec![0]),
]),
@@ -535,7 +541,7 @@ mod tests {
1,
&AnchorBlockId { hash: Default::default(), number: 100 },
1000,
Some(&b"1"[..]),
Some(&child_key()),
&[42],
).and_then(|i| i.collect::<Result<Vec<_>, _>>()).is_err());
}
@@ -577,7 +583,7 @@ mod tests {
let (remote_config, remote_storage) = prepare_for_drilldown();
let remote_proof_child = key_changes_proof::<BlakeTwo256, u64>(
configuration_range(&remote_config, 0), &remote_storage, 1,
&AnchorBlockId { hash: Default::default(), number: 16 }, 16, Some(&b"1"[..]), &[42]).unwrap();
&AnchorBlockId { hash: Default::default(), number: 16 }, 16, Some(&child_key()), &[42]).unwrap();
// happens on local light node:
@@ -592,7 +598,7 @@ mod tests {
local_storage.clear_storage();
let local_result_child = key_changes_proof_check::<BlakeTwo256, u64>(
configuration_range(&local_config, 0), &local_storage, remote_proof_child, 1,
&AnchorBlockId { hash: Default::default(), number: 16 }, 16, Some(&b"1"[..]), &[42]);
&AnchorBlockId { hash: Default::default(), number: 16 }, 16, Some(&child_key()), &[42]);
// check that drilldown result is the same as if it was happening at the full node
assert_eq!(local_result, Ok(vec![(8, 2), (8, 1), (6, 3), (3, 0)]));
@@ -21,6 +21,7 @@ use crate::{
StorageKey, StorageValue,
changes_trie::BlockNumber
};
use sp_core::storage::PrefixedStorageKey;
/// Key of { changed key => set of extrinsic indices } mapping.
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -49,7 +50,7 @@ pub struct ChildIndex<Number: BlockNumber> {
/// Block at which this key has been inserted in the trie.
pub block: Number,
/// Storage key this node is responsible for.
pub storage_key: StorageKey,
pub storage_key: PrefixedStorageKey,
}
/// Value of { changed key => block/digest block numbers } mapping.
@@ -178,7 +179,7 @@ impl<Number: BlockNumber> Decode for InputKey<Number> {
})),
3 => Ok(InputKey::ChildIndex(ChildIndex {
block: Decode::decode(input)?,
storage_key: Decode::decode(input)?,
storage_key: PrefixedStorageKey::new(Decode::decode(input)?),
})),
_ => Err("Invalid input key variant".into()),
}
@@ -71,6 +71,7 @@ use hash_db::{Hasher, Prefix};
use num_traits::{One, Zero};
use codec::{Decode, Encode};
use sp_core;
use sp_core::storage::PrefixedStorageKey;
use sp_trie::{MemoryDB, DBValue, TrieMut};
use sp_trie::trie_types::TrieDBMut;
use crate::{
@@ -156,7 +157,7 @@ pub trait Storage<H: Hasher, Number: BlockNumber>: RootsStorage<H, Number> {
fn with_cached_changed_keys(
&self,
root: &H::Out,
functor: &mut dyn FnMut(&HashMap<Option<StorageKey>, HashSet<StorageKey>>),
functor: &mut dyn FnMut(&HashMap<Option<PrefixedStorageKey>, HashSet<StorageKey>>),
) -> bool;
/// Get a trie node.
fn get(&self, key: &H::Out, prefix: Prefix) -> Result<Option<DBValue>, String>;
@@ -137,7 +137,8 @@ mod tests {
#[test]
fn prune_works() {
fn prepare_storage() -> InMemoryStorage<BlakeTwo256, u64> {
let child_key = ChildIndex { block: 67u64, storage_key: b"1".to_vec() }.encode();
let child_info = sp_core::storage::ChildInfo::new_default(&b"1"[..]);
let child_key = ChildIndex { block: 67u64, storage_key: child_info.prefixed_storage_key() }.encode();
let mut mdb1 = MemoryDB::<BlakeTwo256>::default();
let root1 = insert_into_memory_db::<BlakeTwo256, _>(
&mut mdb1, vec![(vec![10], vec![20])]).unwrap();
@@ -18,6 +18,7 @@
use std::collections::{BTreeMap, HashSet, HashMap};
use hash_db::{Hasher, Prefix, EMPTY_PREFIX};
use sp_core::storage::PrefixedStorageKey;
use sp_trie::DBValue;
use sp_trie::MemoryDB;
use parking_lot::RwLock;
@@ -96,7 +97,7 @@ impl<H: Hasher, Number: BlockNumber> InMemoryStorage<H, Number> {
#[cfg(test)]
pub fn with_inputs(
mut top_inputs: Vec<(Number, Vec<InputPair<Number>>)>,
children_inputs: Vec<(StorageKey, Vec<(Number, Vec<InputPair<Number>>)>)>,
children_inputs: Vec<(PrefixedStorageKey, Vec<(Number, Vec<InputPair<Number>>)>)>,
) -> Self {
let mut mdb = MemoryDB::default();
let mut roots = BTreeMap::new();
@@ -182,7 +183,7 @@ impl<H: Hasher, Number: BlockNumber> Storage<H, Number> for InMemoryStorage<H, N
fn with_cached_changed_keys(
&self,
root: &H::Out,
functor: &mut dyn FnMut(&HashMap<Option<StorageKey>, HashSet<StorageKey>>),
functor: &mut dyn FnMut(&HashMap<Option<PrefixedStorageKey>, HashSet<StorageKey>>),
) -> bool {
self.cache.with_changed_keys(root, functor)
}
+71 -86
View File
@@ -24,10 +24,10 @@ use crate::{
use hash_db::Hasher;
use sp_core::{
storage::{ChildStorageKey, well_known_keys::is_child_storage_key, ChildInfo},
storage::{well_known_keys::is_child_storage_key, ChildInfo},
traits::Externalities, hexdisplay::HexDisplay,
};
use sp_trie::{trie_types::Layout, default_child_trie_root};
use sp_trie::{trie_types::Layout, empty_child_trie_root};
use sp_externalities::{Extensions, Extension};
use codec::{Decode, Encode};
@@ -181,22 +181,21 @@ where
fn child_storage(
&self,
storage_key: ChildStorageKey,
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Option<StorageValue> {
let _guard = sp_panic_handler::AbortGuard::force_abort();
let result = self.overlay
.child_storage(storage_key.as_ref(), key)
.child_storage(child_info, key)
.map(|x| x.map(|x| x.to_vec()))
.unwrap_or_else(||
self.backend.child_storage(storage_key.as_ref(), child_info, key)
self.backend.child_storage(child_info, key)
.expect(EXT_NOT_ALLOWED_TO_FAIL)
);
trace!(target: "state-trace", "{:04x}: GetChild({}) {}={:?}",
self.id,
HexDisplay::from(&storage_key.as_ref()),
HexDisplay::from(&child_info.storage_key()),
HexDisplay::from(&key),
result.as_ref().map(HexDisplay::from)
);
@@ -206,22 +205,21 @@ where
fn child_storage_hash(
&self,
storage_key: ChildStorageKey,
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Option<Vec<u8>> {
let _guard = sp_panic_handler::AbortGuard::force_abort();
let result = self.overlay
.child_storage(storage_key.as_ref(), key)
.child_storage(child_info, key)
.map(|x| x.map(|x| H::hash(x)))
.unwrap_or_else(||
self.backend.child_storage_hash(storage_key.as_ref(), child_info, key)
self.backend.child_storage_hash(child_info, key)
.expect(EXT_NOT_ALLOWED_TO_FAIL)
);
trace!(target: "state-trace", "{:04x}: ChildHash({}) {}={:?}",
self.id,
HexDisplay::from(&storage_key.as_ref()),
HexDisplay::from(&child_info.storage_key()),
HexDisplay::from(&key),
result,
);
@@ -247,22 +245,21 @@ where
fn exists_child_storage(
&self,
storage_key: ChildStorageKey,
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> bool {
let _guard = sp_panic_handler::AbortGuard::force_abort();
let result = match self.overlay.child_storage(storage_key.as_ref(), key) {
let result = match self.overlay.child_storage(child_info, key) {
Some(x) => x.is_some(),
_ => self.backend
.exists_child_storage(storage_key.as_ref(), child_info, key)
.exists_child_storage(child_info, key)
.expect(EXT_NOT_ALLOWED_TO_FAIL),
};
trace!(target: "state-trace", "{:04x}: ChildExists({}) {}={:?}",
self.id,
HexDisplay::from(&storage_key.as_ref()),
HexDisplay::from(&child_info.storage_key()),
HexDisplay::from(&key),
result,
);
@@ -286,15 +283,14 @@ where
fn next_child_storage_key(
&self,
storage_key: ChildStorageKey,
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Option<StorageKey> {
let next_backend_key = self.backend
.next_child_storage_key(storage_key.as_ref(), child_info, key)
.next_child_storage_key(child_info, key)
.expect(EXT_NOT_ALLOWED_TO_FAIL);
let next_overlay_key_change = self.overlay.next_child_storage_key_change(
storage_key.as_ref(),
child_info.storage_key(),
key
);
@@ -305,7 +301,6 @@ where
Some(overlay_key.0.to_vec())
} else {
self.next_child_storage_key(
storage_key,
child_info,
&overlay_key.0[..],
)
@@ -331,38 +326,36 @@ where
fn place_child_storage(
&mut self,
storage_key: ChildStorageKey,
child_info: ChildInfo,
child_info: &ChildInfo,
key: StorageKey,
value: Option<StorageValue>,
) {
trace!(target: "state-trace", "{:04x}: PutChild({}) {}={:?}",
self.id,
HexDisplay::from(&storage_key.as_ref()),
HexDisplay::from(&child_info.storage_key()),
HexDisplay::from(&key),
value.as_ref().map(HexDisplay::from)
);
let _guard = sp_panic_handler::AbortGuard::force_abort();
self.mark_dirty();
self.overlay.set_child_storage(storage_key.into_owned(), child_info, key, value);
self.overlay.set_child_storage(child_info, key, value);
}
fn kill_child_storage(
&mut self,
storage_key: ChildStorageKey,
child_info: ChildInfo,
child_info: &ChildInfo,
) {
trace!(target: "state-trace", "{:04x}: KillChild({})",
self.id,
HexDisplay::from(&storage_key.as_ref()),
HexDisplay::from(&child_info.storage_key()),
);
let _guard = sp_panic_handler::AbortGuard::force_abort();
self.mark_dirty();
self.overlay.clear_child_storage(storage_key.as_ref(), child_info);
self.backend.for_keys_in_child_storage(storage_key.as_ref(), child_info, |key| {
self.overlay.set_child_storage(storage_key.as_ref().to_vec(), child_info, key.to_vec(), None);
self.overlay.clear_child_storage(child_info);
self.backend.for_keys_in_child_storage(child_info, |key| {
self.overlay.set_child_storage(child_info, key.to_vec(), None);
});
}
@@ -386,21 +379,20 @@ where
fn clear_child_prefix(
&mut self,
storage_key: ChildStorageKey,
child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
) {
trace!(target: "state-trace", "{:04x}: ClearChildPrefix({}) {}",
self.id,
HexDisplay::from(&storage_key.as_ref()),
HexDisplay::from(&child_info.storage_key()),
HexDisplay::from(&prefix),
);
let _guard = sp_panic_handler::AbortGuard::force_abort();
self.mark_dirty();
self.overlay.clear_child_prefix(storage_key.as_ref(), child_info, prefix);
self.backend.for_child_keys_with_prefix(storage_key.as_ref(), child_info, prefix, |key| {
self.overlay.set_child_storage(storage_key.as_ref().to_vec(), child_info, key.to_vec(), None);
self.overlay.clear_child_prefix(child_info, prefix);
self.backend.for_child_keys_with_prefix(child_info, prefix, |key| {
self.overlay.set_child_storage(child_info, key.to_vec(), None);
});
}
@@ -425,37 +417,38 @@ where
fn child_storage_root(
&mut self,
storage_key: ChildStorageKey,
child_info: &ChildInfo,
) -> Vec<u8> {
let _guard = sp_panic_handler::AbortGuard::force_abort();
let storage_key = child_info.storage_key();
let prefixed_storage_key = child_info.prefixed_storage_key();
if self.storage_transaction_cache.transaction_storage_root.is_some() {
let root = self
.storage(storage_key.as_ref())
.storage(prefixed_storage_key.as_slice())
.and_then(|k| Decode::decode(&mut &k[..]).ok())
.unwrap_or(
default_child_trie_root::<Layout<H>>(storage_key.as_ref())
empty_child_trie_root::<Layout<H>>()
);
trace!(target: "state-trace", "{:04x}: ChildRoot({}) (cached) {}",
self.id,
HexDisplay::from(&storage_key.as_ref()),
HexDisplay::from(&storage_key),
HexDisplay::from(&root.as_ref()),
);
root.encode()
} else {
let storage_key = storage_key.as_ref();
if let Some(child_info) = self.overlay.child_info(storage_key).cloned() {
if let Some(child_info) = self.overlay.default_child_info(storage_key).cloned() {
let (root, is_empty, _) = {
let delta = self.overlay.committed.children.get(storage_key)
let delta = self.overlay.committed.children_default.get(storage_key)
.into_iter()
.flat_map(|(map, _)| map.clone().into_iter().map(|(k, v)| (k, v.value)))
.chain(
self.overlay.prospective.children.get(storage_key)
self.overlay.prospective.children_default.get(storage_key)
.into_iter()
.flat_map(|(map, _)| map.clone().into_iter().map(|(k, v)| (k, v.value)))
);
self.backend.child_storage_root(storage_key, child_info.as_ref(), delta)
self.backend.child_storage_root(&child_info, delta)
};
let root = root.encode();
@@ -465,9 +458,9 @@ where
// A better design would be to manage 'child_storage_transaction' in a
// similar way as 'storage_transaction' but for each child trie.
if is_empty {
self.overlay.set_storage(storage_key.into(), None);
self.overlay.set_storage(prefixed_storage_key.into_inner(), None);
} else {
self.overlay.set_storage(storage_key.into(), Some(root.clone()));
self.overlay.set_storage(prefixed_storage_key.into_inner(), Some(root.clone()));
}
trace!(target: "state-trace", "{:04x}: ChildRoot({}) {}",
@@ -479,10 +472,10 @@ where
} else {
// empty overlay
let root = self
.storage(storage_key.as_ref())
.storage(prefixed_storage_key.as_slice())
.and_then(|k| Decode::decode(&mut &k[..]).ok())
.unwrap_or(
default_child_trie_root::<Layout<H>>(storage_key.as_ref())
empty_child_trie_root::<Layout<H>>()
);
trace!(target: "state-trace", "{:04x}: ChildRoot({}) (no change) {}",
self.id,
@@ -591,11 +584,6 @@ mod tests {
type TestBackend = InMemoryBackend<Blake2Hasher>;
type TestExt<'a> = Ext<'a, Blake2Hasher, u64, TestBackend>;
const CHILD_KEY_1: &[u8] = b":child_storage:default:Child1";
const CHILD_UUID_1: &[u8] = b"unique_id_1";
const CHILD_INFO_1: ChildInfo<'static> = ChildInfo::new_default(CHILD_UUID_1);
fn prepare_overlay_with_changes() -> OverlayedChanges {
OverlayedChanges {
prospective: vec![
@@ -680,7 +668,7 @@ mod tests {
vec![20] => vec![20],
vec![40] => vec![40]
],
children: map![]
children_default: map![]
}.into();
let ext = TestExt::new(&mut overlay, &mut cache, &backend, None, None);
@@ -707,26 +695,23 @@ mod tests {
#[test]
fn next_child_storage_key_works() {
const CHILD_KEY_1: &[u8] = b":child_storage:default:Child1";
const CHILD_UUID_1: &[u8] = b"unique_id_1";
const CHILD_INFO_1: ChildInfo<'static> = ChildInfo::new_default(CHILD_UUID_1);
let child_info = ChildInfo::new_default(b"Child1");
let child_info = &child_info;
let mut cache = StorageTransactionCache::default();
let child = || ChildStorageKey::from_slice(CHILD_KEY_1).unwrap();
let mut overlay = OverlayedChanges::default();
overlay.set_child_storage(child().as_ref().to_vec(), CHILD_INFO_1, vec![20], None);
overlay.set_child_storage(child().as_ref().to_vec(), CHILD_INFO_1, vec![30], Some(vec![31]));
overlay.set_child_storage(child_info, vec![20], None);
overlay.set_child_storage(child_info, vec![30], Some(vec![31]));
let backend = Storage {
top: map![],
children: map![
child().as_ref().to_vec() => StorageChild {
children_default: map![
child_info.storage_key().to_vec() => StorageChild {
data: map![
vec![10] => vec![10],
vec![20] => vec![20],
vec![40] => vec![40]
],
child_info: CHILD_INFO_1.to_owned(),
child_info: child_info.to_owned(),
}
],
}.into();
@@ -735,65 +720,65 @@ mod tests {
let ext = TestExt::new(&mut overlay, &mut cache, &backend, None, None);
// next_backend < next_overlay
assert_eq!(ext.next_child_storage_key(child(), CHILD_INFO_1, &[5]), Some(vec![10]));
assert_eq!(ext.next_child_storage_key(child_info, &[5]), Some(vec![10]));
// next_backend == next_overlay but next_overlay is a delete
assert_eq!(ext.next_child_storage_key(child(), CHILD_INFO_1, &[10]), Some(vec![30]));
assert_eq!(ext.next_child_storage_key(child_info, &[10]), Some(vec![30]));
// next_overlay < next_backend
assert_eq!(ext.next_child_storage_key(child(), CHILD_INFO_1, &[20]), Some(vec![30]));
assert_eq!(ext.next_child_storage_key(child_info, &[20]), Some(vec![30]));
// next_backend exist but next_overlay doesn't exist
assert_eq!(ext.next_child_storage_key(child(), CHILD_INFO_1, &[30]), Some(vec![40]));
assert_eq!(ext.next_child_storage_key(child_info, &[30]), Some(vec![40]));
drop(ext);
overlay.set_child_storage(child().as_ref().to_vec(), CHILD_INFO_1, vec![50], Some(vec![50]));
overlay.set_child_storage(child_info, vec![50], Some(vec![50]));
let ext = TestExt::new(&mut overlay, &mut cache, &backend, None, None);
// next_overlay exist but next_backend doesn't exist
assert_eq!(ext.next_child_storage_key(child(), CHILD_INFO_1, &[40]), Some(vec![50]));
assert_eq!(ext.next_child_storage_key(child_info, &[40]), Some(vec![50]));
}
#[test]
fn child_storage_works() {
let child_info = ChildInfo::new_default(b"Child1");
let child_info = &child_info;
let mut cache = StorageTransactionCache::default();
let child = || ChildStorageKey::from_slice(CHILD_KEY_1).unwrap();
let mut overlay = OverlayedChanges::default();
overlay.set_child_storage(child().as_ref().to_vec(), CHILD_INFO_1, vec![20], None);
overlay.set_child_storage(child().as_ref().to_vec(), CHILD_INFO_1, vec![30], Some(vec![31]));
overlay.set_child_storage(child_info, vec![20], None);
overlay.set_child_storage(child_info, vec![30], Some(vec![31]));
let backend = Storage {
top: map![],
children: map![
child().as_ref().to_vec() => StorageChild {
children_default: map![
child_info.storage_key().to_vec() => StorageChild {
data: map![
vec![10] => vec![10],
vec![20] => vec![20],
vec![30] => vec![40]
],
child_info: CHILD_INFO_1.to_owned(),
child_info: child_info.to_owned(),
}
],
}.into();
let ext = TestExt::new(&mut overlay, &mut cache, &backend, None, None);
assert_eq!(ext.child_storage(child(), CHILD_INFO_1, &[10]), Some(vec![10]));
assert_eq!(ext.child_storage(child_info, &[10]), Some(vec![10]));
assert_eq!(
ext.child_storage_hash(child(), CHILD_INFO_1, &[10]),
ext.child_storage_hash(child_info, &[10]),
Some(Blake2Hasher::hash(&[10]).as_ref().to_vec()),
);
assert_eq!(ext.child_storage(child(), CHILD_INFO_1, &[20]), None);
assert_eq!(ext.child_storage(child_info, &[20]), None);
assert_eq!(
ext.child_storage_hash(child(), CHILD_INFO_1, &[20]),
ext.child_storage_hash(child_info, &[20]),
None,
);
assert_eq!(ext.child_storage(child(), CHILD_INFO_1, &[30]), Some(vec![31]));
assert_eq!(ext.child_storage(child_info, &[30]), Some(vec![31]));
assert_eq!(
ext.child_storage_hash(child(), CHILD_INFO_1, &[30]),
ext.child_storage_hash(child_info, &[30]),
Some(Blake2Hasher::hash(&[31]).as_ref().to_vec()),
);
}
}
@@ -25,10 +25,10 @@ use crate::{
use std::{error, fmt, collections::{BTreeMap, HashMap}, marker::PhantomData, ops};
use hash_db::Hasher;
use sp_trie::{
MemoryDB, child_trie_root, default_child_trie_root, TrieConfiguration, trie_types::Layout,
MemoryDB, child_trie_root, empty_child_trie_root, TrieConfiguration, trie_types::Layout,
};
use codec::Codec;
use sp_core::storage::{ChildInfo, OwnedChildInfo, Storage};
use sp_core::storage::{ChildInfo, ChildType, Storage};
/// Error impossible.
// FIXME: use `!` type when stabilized. https://github.com/rust-lang/rust/issues/35121
@@ -48,7 +48,7 @@ impl error::Error for Void {
/// In-memory backend. Fully recomputes tries each time `as_trie_backend` is called but useful for
/// tests and proof checking.
pub struct InMemory<H: Hasher> {
inner: HashMap<Option<(StorageKey, OwnedChildInfo)>, BTreeMap<StorageKey, StorageValue>>,
inner: HashMap<Option<ChildInfo>, BTreeMap<StorageKey, StorageValue>>,
// This field is only needed for returning reference in `as_trie_backend`.
trie: Option<TrieBackend<MemoryDB<H>, H>>,
_hasher: PhantomData<H>,
@@ -89,7 +89,7 @@ impl<H: Hasher> PartialEq for InMemory<H> {
impl<H: Hasher> InMemory<H> {
/// Copy the state, with applied updates
pub fn update<
T: IntoIterator<Item = (Option<(StorageKey, OwnedChildInfo)>, StorageCollection)>
T: IntoIterator<Item = (Option<ChildInfo>, StorageCollection)>
>(
&self,
changes: T,
@@ -108,10 +108,10 @@ impl<H: Hasher> InMemory<H> {
}
}
impl<H: Hasher> From<HashMap<Option<(StorageKey, OwnedChildInfo)>, BTreeMap<StorageKey, StorageValue>>>
impl<H: Hasher> From<HashMap<Option<ChildInfo>, BTreeMap<StorageKey, StorageValue>>>
for InMemory<H>
{
fn from(inner: HashMap<Option<(StorageKey, OwnedChildInfo)>, BTreeMap<StorageKey, StorageValue>>) -> Self {
fn from(inner: HashMap<Option<ChildInfo>, BTreeMap<StorageKey, StorageValue>>) -> Self {
InMemory {
inner,
trie: None,
@@ -122,8 +122,8 @@ impl<H: Hasher> From<HashMap<Option<(StorageKey, OwnedChildInfo)>, BTreeMap<Stor
impl<H: Hasher> From<Storage> for InMemory<H> {
fn from(inners: Storage) -> Self {
let mut inner: HashMap<Option<(StorageKey, OwnedChildInfo)>, BTreeMap<StorageKey, StorageValue>>
= inners.children.into_iter().map(|(k, c)| (Some((k, c.child_info)), c.data)).collect();
let mut inner: HashMap<Option<ChildInfo>, BTreeMap<StorageKey, StorageValue>>
= inners.children_default.into_iter().map(|(_k, c)| (Some(c.child_info), c.data)).collect();
inner.insert(None, inners.top);
InMemory {
inner,
@@ -145,12 +145,12 @@ impl<H: Hasher> From<BTreeMap<StorageKey, StorageValue>> for InMemory<H> {
}
}
impl<H: Hasher> From<Vec<(Option<(StorageKey, OwnedChildInfo)>, StorageCollection)>>
impl<H: Hasher> From<Vec<(Option<ChildInfo>, StorageCollection)>>
for InMemory<H> {
fn from(
inner: Vec<(Option<(StorageKey, OwnedChildInfo)>, StorageCollection)>,
inner: Vec<(Option<ChildInfo>, StorageCollection)>,
) -> Self {
let mut expanded: HashMap<Option<(StorageKey, OwnedChildInfo)>, BTreeMap<StorageKey, StorageValue>>
let mut expanded: HashMap<Option<ChildInfo>, BTreeMap<StorageKey, StorageValue>>
= HashMap::new();
for (child_info, key_values) in inner {
let entry = expanded.entry(child_info).or_default();
@@ -165,18 +165,16 @@ impl<H: Hasher> From<Vec<(Option<(StorageKey, OwnedChildInfo)>, StorageCollectio
}
impl<H: Hasher> InMemory<H> {
/// child storage key iterator
pub fn child_storage_keys(&self) -> impl Iterator<Item=(&[u8], ChildInfo)> {
self.inner.iter().filter_map(|item|
item.0.as_ref().map(|v|(&v.0[..], v.1.as_ref()))
)
/// Child storage infos iterator.
pub fn child_storage_infos(&self) -> impl Iterator<Item = &ChildInfo> {
self.inner.iter().filter_map(|item| item.0.as_ref())
}
}
impl<H: Hasher> Backend<H> for InMemory<H> where H::Out: Codec {
type Error = Void;
type Transaction = Vec<(
Option<(StorageKey, OwnedChildInfo)>,
Option<ChildInfo>,
StorageCollection,
)>;
type TrieBackendStorage = MemoryDB<H>;
@@ -187,11 +185,10 @@ impl<H: Hasher> Backend<H> for InMemory<H> where H::Out: Codec {
fn child_storage(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<StorageValue>, Self::Error> {
Ok(self.inner.get(&Some((storage_key.to_vec(), child_info.to_owned())))
Ok(self.inner.get(&Some(child_info.to_owned()))
.and_then(|map| map.get(key).map(Clone::clone)))
}
@@ -211,22 +208,20 @@ impl<H: Hasher> Backend<H> for InMemory<H> where H::Out: Codec {
fn for_keys_in_child_storage<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
mut f: F,
) {
self.inner.get(&Some((storage_key.to_vec(), child_info.to_owned())))
self.inner.get(&Some(child_info.to_owned()))
.map(|map| map.keys().for_each(|k| f(&k)));
}
fn for_child_keys_with_prefix<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
f: F,
) {
self.inner.get(&Some((storage_key.to_vec(), child_info.to_owned())))
self.inner.get(&Some(child_info.to_owned()))
.map(|map| map.keys().filter(|key| key.starts_with(prefix)).map(|k| &**k).for_each(f));
}
@@ -253,16 +248,15 @@ impl<H: Hasher> Backend<H> for InMemory<H> where H::Out: Codec {
fn child_storage_root<I>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
delta: I,
) -> (H::Out, bool, Self::Transaction)
where
I: IntoIterator<Item=(Vec<u8>, Option<Vec<u8>>)>,
H::Out: Ord
{
let storage_key = storage_key.to_vec();
let child_info = Some((storage_key.clone(), child_info.to_owned()));
let child_type = child_info.child_type();
let child_info = Some(child_info.to_owned());
let existing_pairs = self.inner.get(&child_info)
.into_iter()
@@ -270,7 +264,6 @@ impl<H: Hasher> Backend<H> for InMemory<H> where H::Out: Codec {
let transaction: Vec<_> = delta.into_iter().collect();
let root = child_trie_root::<Layout<H>, _, _, _>(
&storage_key,
existing_pairs.chain(transaction.iter().cloned())
.collect::<HashMap<_, _>>()
.into_iter()
@@ -279,7 +272,9 @@ impl<H: Hasher> Backend<H> for InMemory<H> where H::Out: Codec {
let full_transaction = transaction.into_iter().collect();
let is_default = root == default_child_trie_root::<Layout<H>>(&storage_key);
let is_default = match child_type {
ChildType::ParentKeyId => root == empty_child_trie_root::<Layout<H>>(),
};
(root, is_default, vec![(child_info, full_transaction)])
}
@@ -294,12 +289,11 @@ impl<H: Hasher> Backend<H> for InMemory<H> where H::Out: Codec {
fn next_child_storage_key(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<StorageKey>, Self::Error> {
let range = (ops::Bound::Excluded(key), ops::Bound::Unbounded);
let next_key = self.inner.get(&Some((storage_key.to_vec(), child_info.to_owned())))
let next_key = self.inner.get(&Some(child_info.to_owned()))
.and_then(|map| map.range::<[u8], _>(range).next().map(|(k, _)| k).cloned());
Ok(next_key)
@@ -321,11 +315,10 @@ impl<H: Hasher> Backend<H> for InMemory<H> where H::Out: Codec {
fn child_keys(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
) -> Vec<StorageKey> {
self.inner.get(&Some((storage_key.to_vec(), child_info.to_owned())))
self.inner.get(&Some(child_info.to_owned()))
.into_iter()
.flat_map(|map| map.keys().filter(|k| k.starts_with(prefix)).cloned())
.collect()
@@ -336,11 +329,10 @@ impl<H: Hasher> Backend<H> for InMemory<H> where H::Out: Codec {
let mut new_child_roots = Vec::new();
let mut root_map = None;
for (child_info, map) in &self.inner {
if let Some((storage_key, _child_info)) = child_info.as_ref() {
// no need to use child_info at this point because we use a MemoryDB for
// proof (with PrefixedMemoryDB it would be needed).
if let Some(child_info) = child_info.as_ref() {
let prefix_storage_key = child_info.prefixed_storage_key();
let ch = insert_into_memory_db::<H, _>(&mut mdb, map.clone().into_iter())?;
new_child_roots.push((storage_key.clone(), ch.as_ref().into()));
new_child_roots.push((prefix_storage_key.into_inner(), ch.as_ref().into()));
} else {
root_map = Some(map);
}
@@ -379,16 +371,18 @@ mod tests {
#[test]
fn in_memory_with_child_trie_only() {
let storage = InMemory::<BlakeTwo256>::default();
let child_info = OwnedChildInfo::new_default(b"unique_id_1".to_vec());
let child_info = ChildInfo::new_default(b"1");
let child_info = &child_info;
let mut storage = storage.update(
vec![(
Some((b"1".to_vec(), child_info.clone())),
Some(child_info.clone()),
vec![(b"2".to_vec(), Some(b"3".to_vec()))]
)]
);
let trie_backend = storage.as_trie_backend().unwrap();
assert_eq!(trie_backend.child_storage(b"1", child_info.as_ref(), b"2").unwrap(),
assert_eq!(trie_backend.child_storage(child_info, b"2").unwrap(),
Some(b"3".to_vec()));
assert!(trie_backend.storage(b"1").unwrap().is_some());
let storage_key = child_info.prefixed_storage_key();
assert!(trie_backend.storage(storage_key.as_slice()).unwrap().is_some());
}
}
+26 -32
View File
@@ -606,8 +606,7 @@ where
/// Generate child storage read proof.
pub fn prove_child_read<B, H, I>(
mut backend: B,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
keys: I,
) -> Result<StorageProof, Box<dyn Error>>
where
@@ -619,7 +618,7 @@ where
{
let trie_backend = backend.as_trie_backend()
.ok_or_else(|| Box::new(ExecutionError::UnableToGenerateProof) as Box<dyn Error>)?;
prove_child_read_on_trie_backend(trie_backend, storage_key, child_info, keys)
prove_child_read_on_trie_backend(trie_backend, child_info, keys)
}
/// Generate storage read proof on pre-created trie backend.
@@ -646,8 +645,7 @@ where
/// Generate storage read proof on pre-created trie backend.
pub fn prove_child_read_on_trie_backend<S, H, I>(
trie_backend: &TrieBackend<S, H>,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
keys: I,
) -> Result<StorageProof, Box<dyn Error>>
where
@@ -660,7 +658,7 @@ where
let proving_backend = proving_backend::ProvingBackend::<_, H>::new(trie_backend);
for key in keys.into_iter() {
proving_backend
.child_storage(storage_key, child_info.clone(), key.as_ref())
.child_storage(child_info, key.as_ref())
.map_err(|e| Box::new(e) as Box<dyn Error>)?;
}
Ok(proving_backend.extract_proof())
@@ -691,7 +689,7 @@ where
pub fn read_child_proof_check<H, I>(
root: H::Out,
proof: StorageProof,
storage_key: &[u8],
child_info: &ChildInfo,
keys: I,
) -> Result<HashMap<Vec<u8>, Option<Vec<u8>>>, Box<dyn Error>>
where
@@ -705,7 +703,7 @@ where
for key in keys.into_iter() {
let value = read_child_proof_check_on_proving_backend(
&proving_backend,
storage_key,
child_info,
key.as_ref(),
)?;
result.insert(key.as_ref().to_vec(), value);
@@ -728,15 +726,14 @@ where
/// Check child storage read proof on pre-created proving backend.
pub fn read_child_proof_check_on_proving_backend<H>(
proving_backend: &TrieBackend<MemoryDB<H>, H>,
storage_key: &[u8],
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<Vec<u8>>, Box<dyn Error>>
where
H: Hasher,
H::Out: Ord + Codec,
{
// Not a prefixed memory db, using empty unique id and include root resolution.
proving_backend.child_storage(storage_key, ChildInfo::new_default(&[]), key)
proving_backend.child_storage(child_info, key)
.map_err(|e| Box::new(e) as Box<dyn Error>)
}
@@ -748,7 +745,7 @@ mod tests {
use super::*;
use super::ext::Ext;
use super::changes_trie::Configuration as ChangesTrieConfig;
use sp_core::{map, traits::{Externalities, RuntimeCode}, storage::ChildStorageKey};
use sp_core::{map, traits::{Externalities, RuntimeCode}};
use sp_runtime::traits::BlakeTwo256;
#[derive(Clone)]
@@ -759,8 +756,6 @@ mod tests {
fallback_succeeds: bool,
}
const CHILD_INFO_1: ChildInfo<'static> = ChildInfo::new_default(b"unique_id_1");
impl CodeExecutor for DummyCodeExecutor {
type Error = u8;
@@ -1003,6 +998,8 @@ mod tests {
#[test]
fn set_child_storage_works() {
let child_info = ChildInfo::new_default(b"sub1");
let child_info = &child_info;
let mut state = InMemoryBackend::<BlakeTwo256>::default();
let backend = state.as_trie_backend().unwrap();
let mut overlay = OverlayedChanges::default();
@@ -1016,27 +1013,23 @@ mod tests {
);
ext.set_child_storage(
ChildStorageKey::from_slice(b":child_storage:default:testchild").unwrap(),
CHILD_INFO_1,
child_info,
b"abc".to_vec(),
b"def".to_vec()
);
assert_eq!(
ext.child_storage(
ChildStorageKey::from_slice(b":child_storage:default:testchild").unwrap(),
CHILD_INFO_1,
child_info,
b"abc"
),
Some(b"def".to_vec())
);
ext.kill_child_storage(
ChildStorageKey::from_slice(b":child_storage:default:testchild").unwrap(),
CHILD_INFO_1,
child_info,
);
assert_eq!(
ext.child_storage(
ChildStorageKey::from_slice(b":child_storage:default:testchild").unwrap(),
CHILD_INFO_1,
child_info,
b"abc"
),
None
@@ -1045,6 +1038,8 @@ mod tests {
#[test]
fn prove_read_and_proof_check_works() {
let child_info = ChildInfo::new_default(b"sub1");
let child_info = &child_info;
// fetch read proof from 'remote' full node
let remote_backend = trie_backend::tests::test_trie();
let remote_root = remote_backend.storage_root(::std::iter::empty()).0;
@@ -1071,20 +1066,19 @@ mod tests {
let remote_root = remote_backend.storage_root(::std::iter::empty()).0;
let remote_proof = prove_child_read(
remote_backend,
b":child_storage:default:sub1",
CHILD_INFO_1,
child_info,
&[b"value3"],
).unwrap();
let local_result1 = read_child_proof_check::<BlakeTwo256, _>(
remote_root,
remote_proof.clone(),
b":child_storage:default:sub1",
child_info,
&[b"value3"],
).unwrap();
let local_result2 = read_child_proof_check::<BlakeTwo256, _>(
remote_root,
remote_proof.clone(),
b":child_storage:default:sub1",
child_info,
&[b"value2"],
).unwrap();
assert_eq!(
@@ -1099,13 +1093,13 @@ mod tests {
#[test]
fn child_storage_uuid() {
const CHILD_INFO_1: ChildInfo<'static> = ChildInfo::new_default(b"unique_id_1");
const CHILD_INFO_2: ChildInfo<'static> = ChildInfo::new_default(b"unique_id_2");
let child_info_1 = ChildInfo::new_default(b"sub_test1");
let child_info_2 = ChildInfo::new_default(b"sub_test2");
use crate::trie_backend::tests::test_trie;
let mut overlay = OverlayedChanges::default();
let subtrie1 = ChildStorageKey::from_slice(b":child_storage:default:sub_test1").unwrap();
let subtrie2 = ChildStorageKey::from_slice(b":child_storage:default:sub_test2").unwrap();
let mut transaction = {
let backend = test_trie();
let mut cache = StorageTransactionCache::default();
@@ -1116,8 +1110,8 @@ mod tests {
changes_trie::disabled_state::<_, u64>(),
None,
);
ext.set_child_storage(subtrie1, CHILD_INFO_1, b"abc".to_vec(), b"def".to_vec());
ext.set_child_storage(subtrie2, CHILD_INFO_2, b"abc".to_vec(), b"def".to_vec());
ext.set_child_storage(&child_info_1, b"abc".to_vec(), b"def".to_vec());
ext.set_child_storage(&child_info_2, b"abc".to_vec(), b"def".to_vec());
ext.storage_root();
cache.transaction.unwrap()
};
@@ -29,7 +29,7 @@ use crate::{
use std::iter::FromIterator;
use std::collections::{HashMap, BTreeMap, BTreeSet};
use codec::{Decode, Encode};
use sp_core::storage::{well_known_keys::EXTRINSIC_INDEX, OwnedChildInfo, ChildInfo};
use sp_core::storage::{well_known_keys::EXTRINSIC_INDEX, ChildInfo};
use std::{mem, ops};
use hash_db::Hasher;
@@ -79,8 +79,8 @@ pub struct OverlayedValue {
pub struct OverlayedChangeSet {
/// Top level storage changes.
pub top: BTreeMap<StorageKey, OverlayedValue>,
/// Child storage changes.
pub children: HashMap<StorageKey, (BTreeMap<StorageKey, OverlayedValue>, OwnedChildInfo)>,
/// Child storage changes. The map key is the child storage key without the common prefix.
pub children_default: HashMap<StorageKey, (BTreeMap<StorageKey, OverlayedValue>, ChildInfo)>,
}
/// A storage changes structure that can be generated by the data collected in [`OverlayedChanges`].
@@ -174,7 +174,7 @@ impl FromIterator<(StorageKey, OverlayedValue)> for OverlayedChangeSet {
fn from_iter<T: IntoIterator<Item = (StorageKey, OverlayedValue)>>(iter: T) -> Self {
Self {
top: iter.into_iter().collect(),
children: Default::default(),
children_default: Default::default(),
}
}
}
@@ -182,13 +182,13 @@ impl FromIterator<(StorageKey, OverlayedValue)> for OverlayedChangeSet {
impl OverlayedChangeSet {
/// Whether the change set is empty.
pub fn is_empty(&self) -> bool {
self.top.is_empty() && self.children.is_empty()
self.top.is_empty() && self.children_default.is_empty()
}
/// Clear the change set.
pub fn clear(&mut self) {
self.top.clear();
self.children.clear();
self.children_default.clear();
}
}
@@ -219,8 +219,8 @@ impl OverlayedChanges {
/// Returns a double-Option: None if the key is unknown (i.e. and the query should be referred
/// to the backend); Some(None) if the key has been deleted. Some(Some(...)) for a key whose
/// value has been set.
pub fn child_storage(&self, storage_key: &[u8], key: &[u8]) -> Option<Option<&[u8]>> {
if let Some(map) = self.prospective.children.get(storage_key) {
pub fn child_storage(&self, child_info: &ChildInfo, key: &[u8]) -> Option<Option<&[u8]>> {
if let Some(map) = self.prospective.children_default.get(child_info.storage_key()) {
if let Some(val) = map.0.get(key) {
let size_read = val.value.as_ref().map(|x| x.len() as u64).unwrap_or(0);
self.stats.tally_read_modified(size_read);
@@ -228,7 +228,7 @@ impl OverlayedChanges {
}
}
if let Some(map) = self.committed.children.get(storage_key) {
if let Some(map) = self.committed.children_default.get(child_info.storage_key()) {
if let Some(val) = map.0.get(key) {
let size_read = val.value.as_ref().map(|x| x.len() as u64).unwrap_or(0);
self.stats.tally_read_modified(size_read);
@@ -260,15 +260,15 @@ impl OverlayedChanges {
/// `None` can be used to delete a value specified by the given key.
pub(crate) fn set_child_storage(
&mut self,
storage_key: StorageKey,
child_info: ChildInfo,
child_info: &ChildInfo,
key: StorageKey,
val: Option<StorageValue>,
) {
let size_write = val.as_ref().map(|x| x.len() as u64).unwrap_or(0);
self.stats.tally_write_overlay(size_write);
let extrinsic_index = self.extrinsic_index();
let map_entry = self.prospective.children.entry(storage_key)
let storage_key = child_info.storage_key().to_vec();
let map_entry = self.prospective.children_default.entry(storage_key)
.or_insert_with(|| (Default::default(), child_info.to_owned()));
let updatable = map_entry.1.try_update(child_info);
debug_assert!(updatable);
@@ -290,11 +290,11 @@ impl OverlayedChanges {
/// [`discard_prospective`]: #method.discard_prospective
pub(crate) fn clear_child_storage(
&mut self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
) {
let extrinsic_index = self.extrinsic_index();
let map_entry = self.prospective.children.entry(storage_key.to_vec())
let storage_key = child_info.storage_key();
let map_entry = self.prospective.children_default.entry(storage_key.to_vec())
.or_insert_with(|| (Default::default(), child_info.to_owned()));
let updatable = map_entry.1.try_update(child_info);
debug_assert!(updatable);
@@ -308,7 +308,7 @@ impl OverlayedChanges {
e.value = None;
});
if let Some((committed_map, _child_info)) = self.committed.children.get(storage_key) {
if let Some((committed_map, _child_info)) = self.committed.children_default.get(storage_key) {
for (key, value) in committed_map.iter() {
if !map_entry.0.contains_key(key) {
map_entry.0.insert(key.clone(), OverlayedValue {
@@ -364,12 +364,12 @@ impl OverlayedChanges {
pub(crate) fn clear_child_prefix(
&mut self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
) {
let extrinsic_index = self.extrinsic_index();
let map_entry = self.prospective.children.entry(storage_key.to_vec())
let storage_key = child_info.storage_key();
let map_entry = self.prospective.children_default.entry(storage_key.to_vec())
.or_insert_with(|| (Default::default(), child_info.to_owned()));
let updatable = map_entry.1.try_update(child_info);
debug_assert!(updatable);
@@ -385,7 +385,7 @@ impl OverlayedChanges {
}
}
if let Some((child_committed, _child_info)) = self.committed.children.get(storage_key) {
if let Some((child_committed, _child_info)) = self.committed.children_default.get(storage_key) {
// Then do the same with keys from committed changes.
// NOTE that we are making changes in the prospective change set.
for key in child_committed.keys() {
@@ -422,8 +422,8 @@ impl OverlayedChanges {
.extend(prospective_extrinsics);
}
}
for (storage_key, (map, child_info)) in self.prospective.children.drain() {
let child_content = self.committed.children.entry(storage_key)
for (storage_key, (map, child_info)) in self.prospective.children_default.drain() {
let child_content = self.committed.children_default.entry(storage_key)
.or_insert_with(|| (Default::default(), child_info));
// No update to child info at this point (will be needed for deletion).
for (key, val) in map.into_iter() {
@@ -445,14 +445,14 @@ impl OverlayedChanges {
/// Will panic if there are any uncommitted prospective changes.
fn drain_committed(&mut self) -> (
impl Iterator<Item=(StorageKey, Option<StorageValue>)>,
impl Iterator<Item=(StorageKey, (impl Iterator<Item=(StorageKey, Option<StorageValue>)>, OwnedChildInfo))>,
impl Iterator<Item=(StorageKey, (impl Iterator<Item=(StorageKey, Option<StorageValue>)>, ChildInfo))>,
) {
assert!(self.prospective.is_empty());
(
std::mem::replace(&mut self.committed.top, Default::default())
.into_iter()
.map(|(k, v)| (k, v.value)),
std::mem::replace(&mut self.committed.children, Default::default())
std::mem::replace(&mut self.committed.children_default, Default::default())
.into_iter()
.map(|(sk, (v, ci))| (sk, (v.into_iter().map(|(k, v)| (k, v.value)), ci))),
)
@@ -549,21 +549,20 @@ impl OverlayedChanges {
) -> H::Out
where H::Out: Ord + Encode,
{
let child_storage_keys = self.prospective.children.keys()
.chain(self.committed.children.keys());
let child_storage_keys = self.prospective.children_default.keys()
.chain(self.committed.children_default.keys());
let child_delta_iter = child_storage_keys.map(|storage_key|
(
storage_key.clone(),
self.committed.children.get(storage_key)
self.default_child_info(storage_key).cloned()
.expect("child info initialized in either committed or prospective"),
self.committed.children_default.get(storage_key)
.into_iter()
.flat_map(|(map, _)| map.iter().map(|(k, v)| (k.clone(), v.value.clone())))
.chain(
self.prospective.children.get(storage_key)
self.prospective.children_default.get(storage_key)
.into_iter()
.flat_map(|(map, _)| map.iter().map(|(k, v)| (k.clone(), v.value.clone())))
),
self.child_info(storage_key).cloned()
.expect("child info initialized in either committed or prospective"),
)
);
@@ -610,11 +609,11 @@ impl OverlayedChanges {
/// Get child info for a storage key.
/// Take the latest value so prospective first.
pub fn child_info(&self, storage_key: &[u8]) -> Option<&OwnedChildInfo> {
if let Some((_, ci)) = self.prospective.children.get(storage_key) {
pub fn default_child_info(&self, storage_key: &[u8]) -> Option<&ChildInfo> {
if let Some((_, ci)) = self.prospective.children_default.get(storage_key) {
return Some(&ci);
}
if let Some((_, ci)) = self.committed.children.get(storage_key) {
if let Some((_, ci)) = self.committed.children_default.get(storage_key) {
return Some(&ci);
}
None
@@ -654,10 +653,10 @@ impl OverlayedChanges {
) -> Option<(&[u8], &OverlayedValue)> {
let range = (ops::Bound::Excluded(key), ops::Bound::Unbounded);
let next_prospective_key = self.prospective.children.get(storage_key)
let next_prospective_key = self.prospective.children_default.get(storage_key)
.and_then(|(map, _)| map.range::<[u8], _>(range).next().map(|(k, v)| (&k[..], v)));
let next_committed_key = self.committed.children.get(storage_key)
let next_committed_key = self.committed.children_default.get(storage_key)
.and_then(|(map, _)| map.range::<[u8], _>(range).next().map(|(k, v)| (&k[..], v)));
match (next_committed_key, next_prospective_key) {
@@ -866,39 +865,40 @@ mod tests {
#[test]
fn next_child_storage_key_change_works() {
let child = b"Child1".to_vec();
let child_info = ChildInfo::new_default(b"uniqueid");
let child_info = ChildInfo::new_default(b"Child1");
let child_info = &child_info;
let child = child_info.storage_key();
let mut overlay = OverlayedChanges::default();
overlay.set_child_storage(child.clone(), child_info, vec![20], Some(vec![20]));
overlay.set_child_storage(child.clone(), child_info, vec![30], Some(vec![30]));
overlay.set_child_storage(child.clone(), child_info, vec![40], Some(vec![40]));
overlay.set_child_storage(child_info, vec![20], Some(vec![20]));
overlay.set_child_storage(child_info, vec![30], Some(vec![30]));
overlay.set_child_storage(child_info, vec![40], Some(vec![40]));
overlay.commit_prospective();
overlay.set_child_storage(child.clone(), child_info, vec![10], Some(vec![10]));
overlay.set_child_storage(child.clone(), child_info, vec![30], None);
overlay.set_child_storage(child_info, vec![10], Some(vec![10]));
overlay.set_child_storage(child_info, vec![30], None);
// next_prospective < next_committed
let next_to_5 = overlay.next_child_storage_key_change(&child, &[5]).unwrap();
let next_to_5 = overlay.next_child_storage_key_change(child, &[5]).unwrap();
assert_eq!(next_to_5.0.to_vec(), vec![10]);
assert_eq!(next_to_5.1.value, Some(vec![10]));
// next_committed < next_prospective
let next_to_10 = overlay.next_child_storage_key_change(&child, &[10]).unwrap();
let next_to_10 = overlay.next_child_storage_key_change(child, &[10]).unwrap();
assert_eq!(next_to_10.0.to_vec(), vec![20]);
assert_eq!(next_to_10.1.value, Some(vec![20]));
// next_committed == next_prospective
let next_to_20 = overlay.next_child_storage_key_change(&child, &[20]).unwrap();
let next_to_20 = overlay.next_child_storage_key_change(child, &[20]).unwrap();
assert_eq!(next_to_20.0.to_vec(), vec![30]);
assert_eq!(next_to_20.1.value, None);
// next_committed, no next_prospective
let next_to_30 = overlay.next_child_storage_key_change(&child, &[30]).unwrap();
let next_to_30 = overlay.next_child_storage_key_change(child, &[30]).unwrap();
assert_eq!(next_to_30.0.to_vec(), vec![40]);
assert_eq!(next_to_30.1.value, Some(vec![40]));
overlay.set_child_storage(child.clone(), child_info, vec![50], Some(vec![50]));
overlay.set_child_storage(child_info, vec![50], Some(vec![50]));
// next_prospective, no next_committed
let next_to_40 = overlay.next_child_storage_key_change(&child, &[40]).unwrap();
let next_to_40 = overlay.next_child_storage_key_change(child, &[40]).unwrap();
assert_eq!(next_to_40.0.to_vec(), vec![50]);
assert_eq!(next_to_40.1.value, Some(vec![50]));
}
@@ -22,7 +22,7 @@ use codec::{Decode, Codec};
use log::debug;
use hash_db::{Hasher, HashDB, EMPTY_PREFIX, Prefix};
use sp_trie::{
MemoryDB, default_child_trie_root, read_trie_value_with, read_child_trie_value_with,
MemoryDB, empty_child_trie_root, read_trie_value_with, read_child_trie_value_with,
record_all_keys, StorageProof,
};
pub use sp_trie::Recorder;
@@ -67,13 +67,13 @@ impl<'a, S, H> ProvingBackendRecorder<'a, S, H>
/// Produce proof for a child key query.
pub fn child_storage(
&mut self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8]
) -> Result<Option<Vec<u8>>, String> {
let storage_key = child_info.storage_key();
let root = self.storage(storage_key)?
.and_then(|r| Decode::decode(&mut &r[..]).ok())
.unwrap_or(default_child_trie_root::<Layout<H>>(storage_key));
.unwrap_or(empty_child_trie_root::<Layout<H>>());
let mut read_overlay = S::Overlay::default();
let eph = Ephemeral::new(
@@ -84,7 +84,6 @@ impl<'a, S, H> ProvingBackendRecorder<'a, S, H>
let map_e = |e| format!("Trie lookup error: {}", e);
read_child_trie_value_with::<Layout<H>, _, _>(
storage_key,
child_info.keyspace(),
&eph,
&root.as_ref(),
@@ -201,20 +200,18 @@ impl<'a, S, H> Backend<H> for ProvingBackend<'a, S, H>
fn child_storage(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<Vec<u8>>, Self::Error> {
self.0.child_storage(storage_key, child_info, key)
self.0.child_storage(child_info, key)
}
fn for_keys_in_child_storage<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
f: F,
) {
self.0.for_keys_in_child_storage(storage_key, child_info, f)
self.0.for_keys_in_child_storage(child_info, f)
}
fn next_storage_key(&self, key: &[u8]) -> Result<Option<Vec<u8>>, Self::Error> {
@@ -223,11 +220,10 @@ impl<'a, S, H> Backend<H> for ProvingBackend<'a, S, H>
fn next_child_storage_key(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<Vec<u8>>, Self::Error> {
self.0.next_child_storage_key(storage_key, child_info, key)
self.0.next_child_storage_key(child_info, key)
}
fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], f: F) {
@@ -240,12 +236,11 @@ impl<'a, S, H> Backend<H> for ProvingBackend<'a, S, H>
fn for_child_keys_with_prefix<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
f: F,
) {
self.0.for_child_keys_with_prefix(storage_key, child_info, prefix, f)
self.0.for_child_keys_with_prefix( child_info, prefix, f)
}
fn pairs(&self) -> Vec<(Vec<u8>, Vec<u8>)> {
@@ -258,11 +253,10 @@ impl<'a, S, H> Backend<H> for ProvingBackend<'a, S, H>
fn child_keys(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
) -> Vec<Vec<u8>> {
self.0.child_keys(storage_key, child_info, prefix)
self.0.child_keys(child_info, prefix)
}
fn storage_root<I>(&self, delta: I) -> (H::Out, Self::Transaction)
@@ -273,15 +267,14 @@ impl<'a, S, H> Backend<H> for ProvingBackend<'a, S, H>
fn child_storage_root<I>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
delta: I,
) -> (H::Out, bool, Self::Transaction)
where
I: IntoIterator<Item=(Vec<u8>, Option<Vec<u8>>)>,
H::Out: Ord
{
self.0.child_storage_root(storage_key, child_info, delta)
self.0.child_storage_root(child_info, delta)
}
fn register_overlay_stats(&mut self, _stats: &crate::stats::StateMachineStats) { }
@@ -314,14 +307,10 @@ mod tests {
use crate::InMemoryBackend;
use crate::trie_backend::tests::test_trie;
use super::*;
use sp_core::storage::ChildStorageKey;
use crate::proving_backend::create_proof_check_backend;
use sp_trie::PrefixedMemoryDB;
use sp_runtime::traits::BlakeTwo256;
const CHILD_INFO_1: ChildInfo<'static> = ChildInfo::new_default(b"unique_id_1");
const CHILD_INFO_2: ChildInfo<'static> = ChildInfo::new_default(b"unique_id_2");
fn test_proving<'a>(
trie_backend: &'a TrieBackend<PrefixedMemoryDB<BlakeTwo256>,BlakeTwo256>,
) -> ProvingBackend<'a, PrefixedMemoryDB<BlakeTwo256>, BlakeTwo256> {
@@ -389,33 +378,33 @@ mod tests {
#[test]
fn proof_recorded_and_checked_with_child() {
let subtrie1 = ChildStorageKey::from_slice(b":child_storage:default:sub1").unwrap();
let subtrie2 = ChildStorageKey::from_slice(b":child_storage:default:sub2").unwrap();
let own1 = subtrie1.into_owned();
let own2 = subtrie2.into_owned();
let child_info_1 = ChildInfo::new_default(b"sub1");
let child_info_2 = ChildInfo::new_default(b"sub2");
let child_info_1 = &child_info_1;
let child_info_2 = &child_info_2;
let contents = vec![
(None, (0..64).map(|i| (vec![i], Some(vec![i]))).collect()),
(Some((own1.clone(), CHILD_INFO_1.to_owned())),
(Some(child_info_1.clone()),
(28..65).map(|i| (vec![i], Some(vec![i]))).collect()),
(Some((own2.clone(), CHILD_INFO_2.to_owned())),
(Some(child_info_2.clone()),
(10..15).map(|i| (vec![i], Some(vec![i]))).collect()),
];
let in_memory = InMemoryBackend::<BlakeTwo256>::default();
let mut in_memory = in_memory.update(contents);
let in_memory_root = in_memory.full_storage_root::<_, Vec<_>, _>(
::std::iter::empty(),
in_memory.child_storage_keys().map(|k|(k.0.to_vec(), Vec::new(), k.1.to_owned()))
in_memory.child_storage_infos().map(|k|(k.to_owned(), Vec::new()))
).0;
(0..64).for_each(|i| assert_eq!(
in_memory.storage(&[i]).unwrap().unwrap(),
vec![i]
));
(28..65).for_each(|i| assert_eq!(
in_memory.child_storage(&own1[..], CHILD_INFO_1, &[i]).unwrap().unwrap(),
in_memory.child_storage(child_info_1, &[i]).unwrap().unwrap(),
vec![i]
));
(10..15).for_each(|i| assert_eq!(
in_memory.child_storage(&own2[..], CHILD_INFO_2, &[i]).unwrap().unwrap(),
in_memory.child_storage(child_info_2, &[i]).unwrap().unwrap(),
vec![i]
));
@@ -443,7 +432,7 @@ mod tests {
assert_eq!(proof_check.storage(&[64]).unwrap(), None);
let proving = ProvingBackend::new(trie);
assert_eq!(proving.child_storage(&own1[..], CHILD_INFO_1, &[64]), Ok(Some(vec![64])));
assert_eq!(proving.child_storage(child_info_1, &[64]), Ok(Some(vec![64])));
let proof = proving.extract_proof();
let proof_check = create_proof_check_backend::<BlakeTwo256>(
@@ -451,7 +440,7 @@ mod tests {
proof
).unwrap();
assert_eq!(
proof_check.child_storage(&own1[..], CHILD_INFO_1, &[64]).unwrap().unwrap(),
proof_check.child_storage(child_info_1, &[64]).unwrap().unwrap(),
vec![64]
);
}
@@ -93,7 +93,7 @@ impl<H: Hasher, N: ChangesTrieBlockNumber> TestExternalities<H, N>
overlay.set_collect_extrinsics(changes_trie_config.is_some());
assert!(storage.top.keys().all(|key| !is_child_storage_key(key)));
assert!(storage.children.keys().all(|key| is_child_storage_key(key)));
assert!(storage.children_default.keys().all(|key| is_child_storage_key(key)));
storage.top.insert(HEAP_PAGES.to_vec(), 8u64.encode());
storage.top.insert(CODE.to_vec(), code.to_vec());
@@ -133,11 +133,11 @@ impl<H: Hasher, N: ChangesTrieBlockNumber> TestExternalities<H, N>
.map(|(k, v)| (k, v.value)).collect();
let mut transaction = vec![(None, top)];
self.overlay.committed.children.clone().into_iter()
.chain(self.overlay.prospective.children.clone().into_iter())
.for_each(|(keyspace, (map, child_info))| {
self.overlay.committed.children_default.clone().into_iter()
.chain(self.overlay.prospective.children_default.clone().into_iter())
.for_each(|(_storage_key, (map, child_info))| {
transaction.push((
Some((keyspace, child_info)),
Some(child_info),
map.into_iter()
.map(|(k, v)| (k, v.value))
.collect::<Vec<_>>(),
@@ -18,9 +18,9 @@
use log::{warn, debug};
use hash_db::Hasher;
use sp_trie::{Trie, delta_trie_root, default_child_trie_root, child_delta_trie_root};
use sp_trie::{Trie, delta_trie_root, empty_child_trie_root, child_delta_trie_root};
use sp_trie::trie_types::{TrieDB, TrieError, Layout};
use sp_core::storage::ChildInfo;
use sp_core::storage::{ChildInfo, ChildType};
use codec::{Codec, Decode};
use crate::{
StorageKey, StorageValue, Backend,
@@ -80,11 +80,10 @@ impl<S: TrieBackendStorage<H>, H: Hasher> Backend<H> for TrieBackend<S, H> where
fn child_storage(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<StorageValue>, Self::Error> {
self.essence.child_storage(storage_key, child_info, key)
self.essence.child_storage(child_info, key)
}
fn next_storage_key(&self, key: &[u8]) -> Result<Option<StorageKey>, Self::Error> {
@@ -93,11 +92,10 @@ impl<S: TrieBackendStorage<H>, H: Hasher> Backend<H> for TrieBackend<S, H> where
fn next_child_storage_key(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<StorageKey>, Self::Error> {
self.essence.next_child_storage_key(storage_key, child_info, key)
self.essence.next_child_storage_key(child_info, key)
}
fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], f: F) {
@@ -110,21 +108,19 @@ impl<S: TrieBackendStorage<H>, H: Hasher> Backend<H> for TrieBackend<S, H> where
fn for_keys_in_child_storage<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
f: F,
) {
self.essence.for_keys_in_child_storage(storage_key, child_info, f)
self.essence.for_keys_in_child_storage(child_info, f)
}
fn for_child_keys_with_prefix<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
f: F,
) {
self.essence.for_child_keys_with_prefix(storage_key, child_info, prefix, f)
self.essence.for_child_keys_with_prefix(child_info, prefix, f)
}
fn pairs(&self) -> Vec<(StorageKey, StorageValue)> {
@@ -194,18 +190,20 @@ impl<S: TrieBackendStorage<H>, H: Hasher> Backend<H> for TrieBackend<S, H> where
fn child_storage_root<I>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
delta: I,
) -> (H::Out, bool, Self::Transaction)
where
I: IntoIterator<Item=(StorageKey, Option<StorageValue>)>,
H::Out: Ord,
{
let default_root = default_child_trie_root::<Layout<H>>(storage_key);
let default_root = match child_info.child_type() {
ChildType::ParentKeyId => empty_child_trie_root::<Layout<H>>()
};
let mut write_overlay = S::Overlay::default();
let mut root = match self.storage(storage_key) {
let prefixed_storage_key = child_info.prefixed_storage_key();
let mut root = match self.storage(prefixed_storage_key.as_slice()) {
Ok(value) =>
value.and_then(|r| Decode::decode(&mut &r[..]).ok()).unwrap_or(default_root.clone()),
Err(e) => {
@@ -221,7 +219,6 @@ impl<S: TrieBackendStorage<H>, H: Hasher> Backend<H> for TrieBackend<S, H> where
);
match child_delta_trie_root::<Layout<H>, _, _, _, _, _>(
storage_key,
child_info.keyspace(),
&mut eph,
root,
@@ -257,16 +254,14 @@ pub mod tests {
use sp_runtime::traits::BlakeTwo256;
use super::*;
const CHILD_KEY_1: &[u8] = b":child_storage:default:sub1";
const CHILD_UUID_1: &[u8] = b"unique_id_1";
const CHILD_INFO_1: ChildInfo<'static> = ChildInfo::new_default(CHILD_UUID_1);
const CHILD_KEY_1: &[u8] = b"sub1";
fn test_db() -> (PrefixedMemoryDB<BlakeTwo256>, H256) {
let child_info = ChildInfo::new_default(CHILD_KEY_1);
let mut root = H256::default();
let mut mdb = PrefixedMemoryDB::<BlakeTwo256>::default();
{
let mut mdb = KeySpacedDBMut::new(&mut mdb, CHILD_UUID_1);
let mut mdb = KeySpacedDBMut::new(&mut mdb, child_info.keyspace());
let mut trie = TrieDBMut::new(&mut mdb, &mut root);
trie.insert(b"value3", &[142]).expect("insert failed");
trie.insert(b"value4", &[124]).expect("insert failed");
@@ -276,7 +271,8 @@ pub mod tests {
let mut sub_root = Vec::new();
root.encode_to(&mut sub_root);
let mut trie = TrieDBMut::new(&mut mdb, &mut root);
trie.insert(CHILD_KEY_1, &sub_root[..]).expect("insert failed");
trie.insert(child_info.prefixed_storage_key().as_slice(), &sub_root[..])
.expect("insert failed");
trie.insert(b"key", b"value").expect("insert failed");
trie.insert(b"value1", &[42]).expect("insert failed");
trie.insert(b"value2", &[24]).expect("insert failed");
@@ -302,7 +298,7 @@ pub mod tests {
fn read_from_child_storage_returns_some() {
let test_trie = test_trie();
assert_eq!(
test_trie.child_storage(CHILD_KEY_1, CHILD_INFO_1, b"value3").unwrap(),
test_trie.child_storage(&ChildInfo::new_default(CHILD_KEY_1), b"value3").unwrap(),
Some(vec![142u8]),
);
}
@@ -22,7 +22,7 @@ use std::sync::Arc;
use log::{debug, warn};
use hash_db::{self, Hasher, EMPTY_PREFIX, Prefix};
use sp_trie::{Trie, MemoryDB, PrefixedMemoryDB, DBValue,
default_child_trie_root, read_trie_value, read_child_trie_value,
empty_child_trie_root, read_trie_value, read_child_trie_value,
for_keys_in_child_trie, KeySpacedDB, TrieDBIterator};
use sp_trie::trie_types::{TrieDB, TrieError, Layout};
use crate::{backend::Consolidate, StorageKey, StorageValue};
@@ -71,15 +71,19 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> where H::Out:
self.next_storage_key_from_root(&self.root, None, key)
}
/// Access the root of the child storage in its parent trie
fn child_root(&self, child_info: &ChildInfo) -> Result<Option<StorageValue>, String> {
self.storage(child_info.prefixed_storage_key().as_slice())
}
/// Return the next key in the child trie i.e. the minimum key that is strictly superior to
/// `key` in lexicographic order.
pub fn next_child_storage_key(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<StorageKey>, String> {
let child_root = match self.storage(storage_key)? {
let child_root = match self.child_root(child_info)? {
Some(child_root) => child_root,
None => return Ok(None),
};
@@ -87,7 +91,7 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> where H::Out:
let mut hash = H::Out::default();
if child_root.len() != hash.as_ref().len() {
return Err(format!("Invalid child storage hash at {:?}", storage_key));
return Err(format!("Invalid child storage hash at {:?}", child_info.storage_key()));
}
// note: child_root and hash must be same size, panics otherwise.
hash.as_mut().copy_from_slice(&child_root[..]);
@@ -99,7 +103,7 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> where H::Out:
fn next_storage_key_from_root(
&self,
root: &H::Out,
child_info: Option<ChildInfo>,
child_info: Option<&ChildInfo>,
key: &[u8],
) -> Result<Option<StorageKey>, String> {
let mut read_overlay = S::Overlay::default();
@@ -161,12 +165,11 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> where H::Out:
/// Get the value of child storage at given key.
pub fn child_storage(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
key: &[u8],
) -> Result<Option<StorageValue>, String> {
let root = self.storage(storage_key)?
.unwrap_or(default_child_trie_root::<Layout<H>>(storage_key).encode());
let root = self.child_root(child_info)?
.unwrap_or(empty_child_trie_root::<Layout<H>>().encode());
let mut read_overlay = S::Overlay::default();
let eph = Ephemeral {
@@ -176,19 +179,18 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> where H::Out:
let map_e = |e| format!("Trie lookup error: {}", e);
read_child_trie_value::<Layout<H>, _>(storage_key, child_info.keyspace(), &eph, &root, key)
read_child_trie_value::<Layout<H>, _>(child_info.keyspace(), &eph, &root, key)
.map_err(map_e)
}
/// Retrieve all entries keys of child storage and call `f` for each of those keys.
pub fn for_keys_in_child_storage<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
f: F,
) {
let root = match self.storage(storage_key) {
Ok(v) => v.unwrap_or(default_child_trie_root::<Layout<H>>(storage_key).encode()),
let root = match self.child_root(child_info) {
Ok(v) => v.unwrap_or(empty_child_trie_root::<Layout<H>>().encode()),
Err(e) => {
debug!(target: "trie", "Error while iterating child storage: {}", e);
return;
@@ -202,7 +204,6 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> where H::Out:
};
if let Err(e) = for_keys_in_child_trie::<Layout<H>, _, Ephemeral<S, H>>(
storage_key,
child_info.keyspace(),
&eph,
&root,
@@ -215,13 +216,12 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> where H::Out:
/// Execute given closure for all keys starting with prefix.
pub fn for_child_keys_with_prefix<F: FnMut(&[u8])>(
&self,
storage_key: &[u8],
child_info: ChildInfo,
child_info: &ChildInfo,
prefix: &[u8],
mut f: F,
) {
let root_vec = match self.storage(storage_key) {
Ok(v) => v.unwrap_or(default_child_trie_root::<Layout<H>>(storage_key).encode()),
let root_vec = match self.child_root(child_info) {
Ok(v) => v.unwrap_or(empty_child_trie_root::<Layout<H>>().encode()),
Err(e) => {
debug!(target: "trie", "Error while iterating child storage: {}", e);
return;
@@ -242,7 +242,7 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> where H::Out:
root: &H::Out,
prefix: &[u8],
mut f: F,
child_info: Option<ChildInfo>,
child_info: Option<&ChildInfo>,
) {
let mut read_overlay = S::Overlay::default();
let eph = Ephemeral {
@@ -436,7 +436,8 @@ mod test {
#[test]
fn next_storage_key_and_next_child_storage_key_work() {
let child_info = ChildInfo::new_default(b"uniqueid");
let child_info = ChildInfo::new_default(b"MyChild");
let child_info = &child_info;
// Contains values
let mut root_1 = H256::default();
// Contains child trie
@@ -460,7 +461,8 @@ mod test {
}
{
let mut trie = TrieDBMut::new(&mut mdb, &mut root_2);
trie.insert(b"MyChild", root_1.as_ref()).expect("insert failed");
trie.insert(child_info.prefixed_storage_key().as_slice(), root_1.as_ref())
.expect("insert failed");
};
let essence_1 = TrieBackendEssence::new(mdb, root_1);
@@ -475,19 +477,19 @@ mod test {
let essence_2 = TrieBackendEssence::new(mdb, root_2);
assert_eq!(
essence_2.next_child_storage_key(b"MyChild", child_info, b"2"), Ok(Some(b"3".to_vec()))
essence_2.next_child_storage_key(child_info, b"2"), Ok(Some(b"3".to_vec()))
);
assert_eq!(
essence_2.next_child_storage_key(b"MyChild", child_info, b"3"), Ok(Some(b"4".to_vec()))
essence_2.next_child_storage_key(child_info, b"3"), Ok(Some(b"4".to_vec()))
);
assert_eq!(
essence_2.next_child_storage_key(b"MyChild", child_info, b"4"), Ok(Some(b"6".to_vec()))
essence_2.next_child_storage_key(child_info, b"4"), Ok(Some(b"6".to_vec()))
);
assert_eq!(
essence_2.next_child_storage_key(b"MyChild", child_info, b"5"), Ok(Some(b"6".to_vec()))
essence_2.next_child_storage_key(child_info, b"5"), Ok(Some(b"6".to_vec()))
);
assert_eq!(
essence_2.next_child_storage_key(b"MyChild", child_info, b"6"), Ok(None)
essence_2.next_child_storage_key(child_info, b"6"), Ok(None)
);
}
}