Implement change trie for child trie. (#3122)

* Initial implementation, some redundancy is awkward and there is
some useless computation (but there is a pending pr for that).
Next are tests.

* Minimal tests and fix extend child.

* implement iterator for change child trie.

* prune child trie.

* Fix pruning test.

* bump spec version.

* Avoid empty child trie (could also be checked before)

* tabs.

* Fix child digest overriding each others.

* Restore doc deleted on merge.

* Check correct child value on extrinsics build.

* Revert runtime version update.
This commit is contained in:
cheme
2019-09-02 11:10:42 +02:00
committed by Svyatoslav Nikolsky
parent 06658e0c3c
commit e3d0c60a31
23 changed files with 670 additions and 138 deletions
+4
View File
@@ -137,6 +137,10 @@ impl<B: BlockT> StateBackend<Blake2Hasher> for RefTrackingState<B> {
self.state.for_keys_with_prefix(prefix, f)
}
fn for_key_values_with_prefix<F: FnMut(&[u8], &[u8])>(&self, prefix: &[u8], f: F) {
self.state.for_key_values_with_prefix(prefix, f)
}
fn for_keys_in_child_storage<F: FnMut(&[u8])>(&self, storage_key: &[u8], f: F) {
self.state.for_keys_in_child_storage(storage_key, f)
}
@@ -536,6 +536,10 @@ impl<H: Hasher, S: StateBackend<H>, B: BlockT> StateBackend<H> for CachingState<
self.state.for_keys_with_prefix(prefix, f)
}
fn for_key_values_with_prefix<F: FnMut(&[u8], &[u8])>(&self, prefix: &[u8], f: F) {
self.state.for_key_values_with_prefix(prefix, f)
}
fn for_keys_in_child_storage<F: FnMut(&[u8])>(&self, storage_key: &[u8], f: F) {
self.state.for_keys_in_child_storage(storage_key, f)
}
+14 -3
View File
@@ -535,6 +535,7 @@ impl<B, E, Block, RA> Client<B, E, Block, RA> where
&self,
first: NumberFor<Block>,
last: BlockId<Block>,
storage_key: Option<&StorageKey>,
key: &StorageKey
) -> error::Result<Vec<(NumberFor<Block>, u32)>> {
let (config, storage) = self.require_changes_trie()?;
@@ -557,6 +558,7 @@ impl<B, E, Block, RA> Client<B, E, Block, RA> where
number: last_number,
},
self.backend.blockchain().info().best_number,
storage_key.as_ref().map(|sk| sk.0.as_slice()),
&key.0)
.and_then(|r| r.map(|r| r.map(|(block, tx)| (block, tx))).collect::<Result<_, _>>())
.map_err(|err| error::Error::ChangesTrieAccessFailed(err))
@@ -574,13 +576,15 @@ impl<B, E, Block, RA> Client<B, E, Block, RA> where
last: Block::Hash,
min: Block::Hash,
max: Block::Hash,
key: &StorageKey
storage_key: Option<&StorageKey>,
key: &StorageKey,
) -> error::Result<ChangesProof<Block::Header>> {
self.key_changes_proof_with_cht_size(
first,
last,
min,
max,
storage_key,
key,
cht::size(),
)
@@ -593,6 +597,7 @@ impl<B, E, Block, RA> Client<B, E, Block, RA> where
last: Block::Hash,
min: Block::Hash,
max: Block::Hash,
storage_key: Option<&StorageKey>,
key: &StorageKey,
cht_size: NumberFor<Block>,
) -> error::Result<ChangesProof<Block::Header>> {
@@ -670,7 +675,8 @@ impl<B, E, Block, RA> Client<B, E, Block, RA> where
number: last_number,
},
max_number,
&key.0
storage_key.as_ref().map(|sk| sk.0.as_slice()),
&key.0,
)
.map_err(|err| error::Error::from(error::Error::ChangesTrieAccessFailed(err)))?;
@@ -2580,7 +2586,12 @@ pub(crate) mod tests {
for (index, (begin, end, key, expected_result)) in test_cases.into_iter().enumerate() {
let end = client.block_hash(end).unwrap().unwrap();
let actual_result = client.key_changes(begin, BlockId::Hash(end), &StorageKey(key)).unwrap();
let actual_result = client.key_changes(
begin,
BlockId::Hash(end),
None,
&StorageKey(key),
).unwrap();
match actual_result == expected_result {
true => (),
false => panic!(format!("Failed test {}: actual = {:?}, expected = {:?}",
@@ -381,6 +381,10 @@ where
// whole state is not available on light node
}
fn for_key_values_with_prefix<A: FnMut(&[u8], &[u8])>(&self, _prefix: &[u8], _action: A) {
// whole state is not available on light node
}
fn for_keys_in_child_storage<A: FnMut(&[u8])>(&self, _storage_key: &[u8], _action: A) {
// whole state is not available on light node
}
@@ -461,6 +465,15 @@ where
}
}
fn for_key_values_with_prefix<A: FnMut(&[u8], &[u8])>(&self, prefix: &[u8], action: A) {
match *self {
OnDemandOrGenesisState::OnDemand(ref state) =>
StateBackend::<H>::for_key_values_with_prefix(state, prefix, action),
OnDemandOrGenesisState::Genesis(ref state) => state.for_key_values_with_prefix(prefix, action),
}
}
fn for_keys_in_child_storage<A: FnMut(&[u8])>(&self, storage_key: &[u8], action: A) {
match *self {
OnDemandOrGenesisState::OnDemand(ref state) =>
+10 -4
View File
@@ -110,6 +110,8 @@ pub struct RemoteChangesRequest<Header: HeaderT> {
/// Known changes trie roots for the range of blocks [tries_roots.0..max_block].
/// Proofs for roots of ascendants of tries_roots.0 are provided by the remote node.
pub tries_roots: (Header::Number, Header::Hash, Vec<Header::Hash>),
/// Optional Child Storage key to read.
pub storage_key: Option<Vec<u8>>,
/// Storage key to read.
pub key: Vec<u8>,
/// Number of times to retry request. None means that default RETRY_COUNT is used.
@@ -307,6 +309,7 @@ impl<E, H, B: BlockT, S: BlockchainStorage<B>, F> LightDataChecker<E, H, B, S, F
number: request.last_block.0,
},
remote_max_block,
request.storage_key.as_ref().map(Vec::as_slice),
&request.key)
.map_err(|err| ClientError::ChangesTrieAccessFailed(err))
}
@@ -749,7 +752,7 @@ pub mod tests {
// 'fetch' changes proof from remote node
let key = StorageKey(key);
let remote_proof = remote_client.key_changes_proof(
begin_hash, end_hash, begin_hash, max_hash, &key
begin_hash, end_hash, begin_hash, max_hash, None, &key
).unwrap();
// check proof on local client
@@ -761,6 +764,7 @@ pub mod tests {
max_block: (max, max_hash),
tries_roots: (begin, begin_hash, local_roots_range),
key: key.0,
storage_key: None,
retry_count: None,
};
let local_result = local_checker.check_changes_proof(&request, ChangesProof {
@@ -795,7 +799,7 @@ pub mod tests {
let b3 = remote_client.block_hash_from_id(&BlockId::Number(3)).unwrap().unwrap();
let b4 = remote_client.block_hash_from_id(&BlockId::Number(4)).unwrap().unwrap();
let remote_proof = remote_client.key_changes_proof_with_cht_size(
b1, b4, b3, b4, &dave, 4
b1, b4, b3, b4, None, &dave, 4
).unwrap();
// prepare local checker, having a root of changes trie CHT#0
@@ -814,6 +818,7 @@ pub mod tests {
last_block: (4, b4),
max_block: (4, b4),
tries_roots: (3, b3, vec![remote_roots[2].clone(), remote_roots[3].clone()]),
storage_key: None,
key: dave.0,
retry_count: None,
};
@@ -845,7 +850,7 @@ pub mod tests {
// 'fetch' changes proof from remote node
let key = StorageKey(key);
let remote_proof = remote_client.key_changes_proof(
begin_hash, end_hash, begin_hash, max_hash, &key).unwrap();
begin_hash, end_hash, begin_hash, max_hash, None, &key).unwrap();
let local_roots_range = local_roots.clone()[(begin - 1) as usize..].to_vec();
let request = RemoteChangesRequest::<Header> {
@@ -854,6 +859,7 @@ pub mod tests {
last_block: (end, end_hash),
max_block: (max, max_hash),
tries_roots: (begin, begin_hash, local_roots_range.clone()),
storage_key: None,
key: key.0,
retry_count: None,
};
@@ -907,7 +913,7 @@ pub mod tests {
let b3 = remote_client.block_hash_from_id(&BlockId::Number(3)).unwrap().unwrap();
let b4 = remote_client.block_hash_from_id(&BlockId::Number(4)).unwrap().unwrap();
let remote_proof = remote_client.key_changes_proof_with_cht_size(
b1, b4, b3, b4, &dave, 4
b1, b4, b3, b4, None, &dave, 4
).unwrap();
// fails when changes trie CHT is missing from the local db
+4 -2
View File
@@ -64,6 +64,7 @@ pub trait Client<Block: BlockT>: Send + Sync {
last: Block::Hash,
min: Block::Hash,
max: Block::Hash,
storage_key: Option<&StorageKey>,
key: &StorageKey
) -> Result<ChangesProof<Block::Header>, Error>;
@@ -136,9 +137,10 @@ impl<B, E, Block, RA> Client<Block> for SubstrateClient<B, E, Block, RA> where
last: Block::Hash,
min: Block::Hash,
max: Block::Hash,
key: &StorageKey
storage_key: Option<&StorageKey>,
key: &StorageKey,
) -> Result<ChangesProof<Block::Header>, Error> {
(self as &SubstrateClient<B, E, Block, RA>).key_changes_proof(first, last, min, max, key)
(self as &SubstrateClient<B, E, Block, RA>).key_changes_proof(first, last, min, max, storage_key, key)
}
fn is_descendent_of(&self, base: &Block::Hash, block: &Block::Hash) -> Result<bool, Error> {
+16 -4
View File
@@ -226,7 +226,8 @@ impl<'a, B: BlockT> LightDispatchNetwork<B> for LightDispatchIn<'a, B> {
last: <B as BlockT>::Hash,
min: <B as BlockT>::Hash,
max: <B as BlockT>::Hash,
key: Vec<u8>
storage_key: Option<Vec<u8>>,
key: Vec<u8>,
) {
let message = message::generic::Message::RemoteChangesRequest(message::RemoteChangesRequest {
id,
@@ -234,6 +235,7 @@ impl<'a, B: BlockT> LightDispatchNetwork<B> for LightDispatchIn<'a, B> {
last,
min,
max,
storage_key,
key,
});
@@ -1385,24 +1387,34 @@ impl<B: BlockT, S: NetworkSpecialization<B>, H: ExHashT> Protocol<B, S, H> {
trace!(target: "sync", "Remote changes proof request {} from {} for key {} ({}..{})",
request.id,
who,
request.key.to_hex::<String>(),
if let Some(sk) = request.storage_key.as_ref() {
format!("{} : {}", sk.to_hex::<String>(), request.key.to_hex::<String>())
} else {
request.key.to_hex::<String>()
},
request.first,
request.last
);
let storage_key = request.storage_key.map(|sk| StorageKey(sk));
let key = StorageKey(request.key);
let proof = match self.context_data.chain.key_changes_proof(
request.first,
request.last,
request.min,
request.max,
&key
storage_key.as_ref(),
&key,
) {
Ok(proof) => proof,
Err(error) => {
trace!(target: "sync", "Remote changes proof request {} from {} for key {} ({}..{}) failed with: {}",
request.id,
who,
key.0.to_hex::<String>(),
if let Some(sk) = storage_key {
format!("{} : {}", sk.0.to_hex::<String>(), key.0.to_hex::<String>())
} else {
key.0.to_hex::<String>()
},
request.first,
request.last,
error
@@ -84,7 +84,8 @@ pub trait LightDispatchNetwork<B: BlockT> {
last: <B as BlockT>::Hash,
min: <B as BlockT>::Hash,
max: <B as BlockT>::Hash,
key: Vec<u8>
storage_key: Option<Vec<u8>>,
key: Vec<u8>,
);
/// Send to `who` a body request.
@@ -629,6 +630,7 @@ impl<Block: BlockT> Request<Block> {
data.last_block.1.clone(),
data.tries_roots.1.clone(),
data.max_block.1.clone(),
data.storage_key.clone(),
data.key.clone(),
),
RequestData::RemoteBody(ref data, _) =>
@@ -785,7 +787,7 @@ pub mod tests {
_: Vec<u8>) {}
fn send_call_request(&mut self, _: &PeerId, _: RequestId, _: <B as BlockT>::Hash, _: String, _: Vec<u8>) {}
fn send_changes_request(&mut self, _: &PeerId, _: RequestId, _: <B as BlockT>::Hash, _: <B as BlockT>::Hash,
_: <B as BlockT>::Hash, _: <B as BlockT>::Hash, _: Vec<u8>) {}
_: <B as BlockT>::Hash, _: <B as BlockT>::Hash, _: Option<Vec<u8>>, _: Vec<u8>) {}
fn send_body_request(&mut self, _: &PeerId, _: RequestId, _: BlockAttributes, _: FromBlock<<B as BlockT>::Hash,
<<B as BlockT>::Header as HeaderT>::Number>, _: Option<B::Hash>, _: Direction, _: Option<u32>) {}
}
@@ -1063,6 +1065,7 @@ pub mod tests {
max_block: (100, Default::default()),
tries_roots: (1, Default::default(), vec![]),
key: vec![],
storage_key: None,
retry_count: None,
}, tx));
@@ -334,6 +334,8 @@ pub mod generic {
pub min: H,
/// Hash of the last block that we can use when querying changes.
pub max: H,
/// Storage child node key which changes are requested.
pub storage_key: Option<Vec<u8>>,
/// Storage key which changes are requested.
pub key: Vec<u8>,
}
+1 -1
View File
@@ -188,7 +188,7 @@ impl<B, E, Block: BlockT, RA> FullState<B, E, Block, RA>
for key in keys {
let mut last_block = None;
let mut last_value = last_values.get(key).cloned().unwrap_or_default();
let key_changes = self.client.key_changes(begin, end, key).map_err(client_err)?;
let key_changes = self.client.key_changes(begin, end, None, key).map_err(client_err)?;
for (block, _) in key_changes.into_iter().rev() {
if last_block == Some(block) {
continue;
+13 -1
View File
@@ -72,7 +72,14 @@ pub trait Backend<H: Hasher> {
/// Retrieve all entries keys which start with the given prefix and
/// call `f` for each of those keys.
fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], f: F);
fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], mut f: F) {
self.for_key_values_with_prefix(prefix, |k, _v| f(k))
}
/// Retrieve all entries keys and values of which start with the given prefix and
/// call `f` for each of those keys.
fn for_key_values_with_prefix<F: FnMut(&[u8], &[u8])>(&self, prefix: &[u8], f: F);
/// Retrieve all child entries keys which start with the given prefix and
/// call `f` for each of those keys.
@@ -321,6 +328,11 @@ impl<H: Hasher> Backend<H> for InMemory<H> {
self.inner.get(&None).map(|map| map.keys().filter(|key| key.starts_with(prefix)).map(|k| &**k).for_each(f));
}
fn for_key_values_with_prefix<F: FnMut(&[u8], &[u8])>(&self, prefix: &[u8], mut f: F) {
self.inner.get(&None).map(|map| map.iter().filter(|(key, _val)| key.starts_with(prefix))
.for_each(|(k, v)| f(k, v)));
}
fn for_keys_in_child_storage<F: FnMut(&[u8])>(&self, storage_key: &[u8], mut f: F) {
self.inner.get(&Some(storage_key.to_vec())).map(|map| map.keys().for_each(|k| f(&k)));
}
@@ -16,7 +16,7 @@
//! Structures and functions required to build changes trie for given block.
use std::collections::BTreeMap;
use std::collections::{BTreeMap, BTreeSet};
use std::collections::btree_map::Entry;
use codec::Decode;
use hash_db::Hasher;
@@ -27,6 +27,7 @@ use crate::trie_backend_essence::TrieBackendEssence;
use crate::changes_trie::build_iterator::digest_build_iterator;
use crate::changes_trie::input::{InputKey, InputPair, DigestIndex, ExtrinsicIndex};
use crate::changes_trie::{AnchorBlockId, ConfigurationRange, Storage, BlockNumber};
use crate::changes_trie::input::ChildIndex;
/// Prepare input pairs for building a changes trie of given block.
///
@@ -38,49 +39,121 @@ pub fn prepare_input<'a, B, H, Number>(
config: ConfigurationRange<'a, Number>,
changes: &'a OverlayedChanges,
parent: &'a AnchorBlockId<H::Out, Number>,
) -> Result<impl Iterator<Item=InputPair<Number>> + 'a, String>
) -> Result<(
impl Iterator<Item=InputPair<Number>> + 'a,
Vec<(ChildIndex<Number>, impl Iterator<Item=InputPair<Number>> + 'a)>,
), String>
where
B: Backend<H>,
H: Hasher + 'a,
Number: BlockNumber,
{
let number = parent.number.clone() + One::one();
let extrinsics_input = prepare_extrinsics_input(
let (extrinsics_input, children_extrinsics_input) = prepare_extrinsics_input(
backend,
&number,
changes)?;
let digest_input = prepare_digest_input::<H, Number>(
let (digest_input, mut children_digest_input) = prepare_digest_input::<H, Number>(
parent,
config,
number,
&number,
storage)?;
Ok(extrinsics_input.chain(digest_input))
}
let mut children_digest = Vec::with_capacity(children_extrinsics_input.len());
for (child_index, ext_iter) in children_extrinsics_input.into_iter() {
let dig_iter = children_digest_input.remove(&child_index);
children_digest.push((
child_index,
Some(ext_iter).into_iter().flatten()
.chain(dig_iter.into_iter().flatten()),
));
}
for (child_index, dig_iter) in children_digest_input.into_iter() {
children_digest.push((
child_index,
None.into_iter().flatten()
.chain(Some(dig_iter).into_iter().flatten()),
));
}
Ok((
extrinsics_input.chain(digest_input),
children_digest,
))
}
/// Prepare ExtrinsicIndex input pairs.
fn prepare_extrinsics_input<'a, B, H, Number>(
backend: &'a B,
block: &Number,
changes: &'a OverlayedChanges,
) -> Result<(
impl Iterator<Item=InputPair<Number>> + 'a,
BTreeMap<ChildIndex<Number>, impl Iterator<Item=InputPair<Number>> + 'a>,
), String>
where
B: Backend<H>,
H: Hasher + 'a,
Number: BlockNumber,
{
let mut children_keys = BTreeSet::<Vec<u8>>::new();
let mut children_result = BTreeMap::new();
for (storage_key, _) in changes.prospective.children.iter()
.chain(changes.committed.children.iter()) {
children_keys.insert(storage_key.clone());
}
for storage_key in children_keys {
let child_index = ChildIndex::<Number> {
block: block.clone(),
storage_key: storage_key.clone(),
};
let iter = prepare_extrinsics_input_inner(backend, block, changes, Some(storage_key))?;
children_result.insert(child_index, iter);
}
let top = prepare_extrinsics_input_inner(backend, block, changes, None)?;
Ok((top, children_result))
}
fn prepare_extrinsics_input_inner<'a, B, H, Number>(
backend: &'a B,
block: &Number,
changes: &'a OverlayedChanges,
storage_key: Option<Vec<u8>>,
) -> Result<impl Iterator<Item=InputPair<Number>> + 'a, String>
where
B: Backend<H>,
H: Hasher,
Number: BlockNumber,
{
changes.committed.top.iter()
.chain(changes.prospective.top.iter())
let (committed, prospective) = if let Some(sk) = storage_key.as_ref() {
(changes.committed.children.get(sk), changes.prospective.children.get(sk))
} else {
(Some(&changes.committed.top), Some(&changes.prospective.top))
};
committed.iter().flat_map(|c| c.iter())
.chain(prospective.iter().flat_map(|c| c.iter()))
.filter(|( _, v)| v.extrinsics.is_some())
.try_fold(BTreeMap::new(), |mut map: BTreeMap<&[u8], (ExtrinsicIndex<Number>, Vec<u32>)>, (k, v)| {
match map.entry(k) {
Entry::Vacant(entry) => {
// ignore temporary values (values that have null value at the end of operation
// AND are not in storage at the beginning of operation
if !changes.storage(k).map(|v| v.is_some()).unwrap_or_default() {
if !backend.exists_storage(k).map_err(|e| format!("{}", e))? {
return Ok(map);
if let Some(sk) = storage_key.as_ref() {
if !changes.child_storage(sk, k).map(|v| v.is_some()).unwrap_or_default() {
if !backend.exists_child_storage(sk, k).map_err(|e| format!("{}", e))? {
return Ok(map);
}
}
}
} else {
if !changes.storage(k).map(|v| v.is_some()).unwrap_or_default() {
if !backend.exists_storage(k).map_err(|e| format!("{}", e))? {
return Ok(map);
}
}
};
let extrinsics = v.extrinsics.as_ref()
.expect("filtered by filter() call above; qed")
@@ -109,13 +182,17 @@ fn prepare_extrinsics_input<'a, B, H, Number>(
.map(|pairs| pairs.into_iter().map(|(_, (k, v))| InputPair::ExtrinsicIndex(k, v)))
}
/// Prepare DigestIndex input pairs.
fn prepare_digest_input<'a, H, Number>(
parent: &'a AnchorBlockId<H::Out, Number>,
config: ConfigurationRange<'a, Number>,
block: Number,
block: &Number,
storage: &'a dyn Storage<H, Number>,
) -> Result<impl Iterator<Item=InputPair<Number>> + 'a, String>
) -> Result<(
impl Iterator<Item=InputPair<Number>> + 'a,
BTreeMap<ChildIndex<Number>, impl Iterator<Item=InputPair<Number>> + 'a>,
), String>
where
H: Hasher,
H::Out: 'a,
@@ -131,15 +208,16 @@ fn prepare_digest_input<'a, H, Number>(
};
digest_build_iterator(config, block_for_digest)
.try_fold(BTreeMap::new(), move |mut map, digest_build_block| {
.try_fold(
(BTreeMap::new(), BTreeMap::new()),
move |(mut map, mut child_map), digest_build_block| {
let extrinsic_prefix = ExtrinsicIndex::key_neutral_prefix(digest_build_block.clone());
let digest_prefix = DigestIndex::key_neutral_prefix(digest_build_block.clone());
let child_prefix = ChildIndex::key_neutral_prefix(digest_build_block.clone());
let trie_root = storage.root(parent, digest_build_block.clone())?;
let trie_root = trie_root.ok_or_else(|| format!("No changes trie root for block {}", digest_build_block.clone()))?;
let trie_storage = TrieBackendEssence::<_, H>::new(
crate::changes_trie::TrieBackendStorageAdapter(storage),
trie_root,
);
let mut insert_to_map = |key: Vec<u8>| {
let insert_to_map = |map: &mut BTreeMap<_,_>, key: Vec<u8>| {
match map.entry(key.clone()) {
Entry::Vacant(entry) => {
entry.insert((DigestIndex {
@@ -161,32 +239,74 @@ fn prepare_digest_input<'a, H, Number>(
}
};
let extrinsic_prefix = ExtrinsicIndex::key_neutral_prefix(digest_build_block.clone());
trie_storage.for_keys_with_prefix(&extrinsic_prefix, |key|
if let Ok(InputKey::ExtrinsicIndex::<Number>(trie_key)) = Decode::decode(&mut &key[..]) {
insert_to_map(trie_key.key);
});
let mut children_roots = BTreeMap::<Vec<u8>, _>::new();
{
let trie_storage = TrieBackendEssence::<_, H>::new(
crate::changes_trie::TrieBackendStorageAdapter(storage),
trie_root,
);
let digest_prefix = DigestIndex::key_neutral_prefix(digest_build_block.clone());
trie_storage.for_keys_with_prefix(&digest_prefix, |key|
if let Ok(InputKey::DigestIndex::<Number>(trie_key)) = Decode::decode(&mut &key[..]) {
insert_to_map(trie_key.key);
});
trie_storage.for_key_values_with_prefix(&child_prefix, |key, value|
if let Ok(InputKey::ChildIndex::<Number>(trie_key)) = Decode::decode(&mut &key[..]) {
if let Ok(value) = <Vec<u8>>::decode(&mut &value[..]) {
let mut trie_root = <H as Hasher>::Out::default();
trie_root.as_mut().copy_from_slice(&value[..]);
children_roots.insert(trie_key.storage_key, trie_root);
}
});
trie_storage.for_keys_with_prefix(&extrinsic_prefix, |key|
if let Ok(InputKey::ExtrinsicIndex::<Number>(trie_key)) = Decode::decode(&mut &key[..]) {
insert_to_map(&mut map, trie_key.key);
});
trie_storage.for_keys_with_prefix(&digest_prefix, |key|
if let Ok(InputKey::DigestIndex::<Number>(trie_key)) = Decode::decode(&mut &key[..]) {
insert_to_map(&mut map, trie_key.key);
});
}
for (storage_key, trie_root) in children_roots.into_iter() {
let child_index = ChildIndex::<Number> {
block: block.clone(),
storage_key,
};
let mut map = child_map.entry(child_index).or_insert_with(|| BTreeMap::<Vec<u8>, _>::new());
let trie_storage = TrieBackendEssence::<_, H>::new(
crate::changes_trie::TrieBackendStorageAdapter(storage),
trie_root,
);
trie_storage.for_keys_with_prefix(&extrinsic_prefix, |key|
if let Ok(InputKey::ExtrinsicIndex::<Number>(trie_key)) = Decode::decode(&mut &key[..]) {
insert_to_map(&mut map, trie_key.key);
});
trie_storage.for_keys_with_prefix(&digest_prefix, |key|
if let Ok(InputKey::DigestIndex::<Number>(trie_key)) = Decode::decode(&mut &key[..]) {
insert_to_map(&mut map, trie_key.key);
});
}
Ok((map, child_map))
Ok(map)
})
.map(|pairs| pairs.into_iter().map(|(_, (k, v))| InputPair::DigestIndex(k, v)))
.map(|(pairs, child_pairs)| (
pairs.into_iter().map(|(_, (k, v))| InputPair::DigestIndex(k, v)),
child_pairs.into_iter().map(|(sk, pairs)|
(sk, pairs.into_iter().map(|(_, (k, v))| InputPair::DigestIndex(k, v)))).collect(),
))
}
#[cfg(test)]
mod test {
use codec::Encode;
use primitives::Blake2Hasher;
use primitives::storage::well_known_keys::EXTRINSIC_INDEX;
use primitives::storage::well_known_keys::{EXTRINSIC_INDEX};
use crate::backend::InMemory;
use crate::changes_trie::Configuration;
use crate::changes_trie::storage::InMemoryStorage;
use crate::overlayed_changes::OverlayedValue;
use crate::overlayed_changes::{OverlayedValue, OverlayedChangeSet};
use super::*;
fn prepare_for_build(zero: u64) -> (
@@ -204,6 +324,8 @@ mod test {
(vec![104], vec![255]),
(vec![105], vec![255]),
].into_iter().collect::<::std::collections::HashMap<_, _>>().into();
let child_trie_key1 = b"1".to_vec();
let child_trie_key2 = b"2".to_vec();
let storage = InMemoryStorage::with_inputs(vec![
(zero + 1, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![100] }, vec![1, 3]),
@@ -237,9 +359,24 @@ mod test {
]),
(zero + 9, Vec::new()), (zero + 10, Vec::new()), (zero + 11, Vec::new()), (zero + 12, Vec::new()),
(zero + 13, Vec::new()), (zero + 14, Vec::new()), (zero + 15, Vec::new()),
], vec![(child_trie_key1.clone(), vec![
(zero + 1, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![100] }, vec![1, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![101] }, vec![0, 2]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 1, key: vec![105] }, vec![0, 2, 4]),
]),
(zero + 2, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 2, key: vec![102] }, vec![0]),
]),
(zero + 4, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 2, key: vec![102] }, vec![0, 3]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]),
]),
]),
]);
let changes = OverlayedChanges {
prospective: vec![
prospective: OverlayedChangeSet { top: vec![
(vec![100], OverlayedValue {
value: Some(vec![200]),
extrinsics: Some(vec![0, 2].into_iter().collect())
@@ -249,7 +386,22 @@ mod test {
extrinsics: Some(vec![0, 1].into_iter().collect())
}),
].into_iter().collect(),
committed: vec![
children: vec![
(child_trie_key1.clone(), vec![
(vec![100], OverlayedValue {
value: Some(vec![200]),
extrinsics: Some(vec![0, 2].into_iter().collect())
})
].into_iter().collect()),
(child_trie_key2, vec![
(vec![100], OverlayedValue {
value: Some(vec![200]),
extrinsics: Some(vec![0, 2].into_iter().collect())
})
].into_iter().collect()),
].into_iter().collect()
},
committed: OverlayedChangeSet { top: vec![
(EXTRINSIC_INDEX.to_vec(), OverlayedValue {
value: Some(3u32.encode()),
extrinsics: None,
@@ -263,6 +415,15 @@ mod test {
extrinsics: Some(vec![1].into_iter().collect())
}),
].into_iter().collect(),
children: vec![
(child_trie_key1, vec![
(vec![100], OverlayedValue {
value: Some(vec![202]),
extrinsics: Some(vec![3].into_iter().collect())
})
].into_iter().collect()),
].into_iter().collect(),
},
changes_trie_config: Some(config.clone()),
};
@@ -289,11 +450,23 @@ mod test {
&changes,
&parent,
).unwrap();
assert_eq!(changes_trie_nodes.collect::<Vec<InputPair<u64>>>(), vec![
assert_eq!(changes_trie_nodes.0.collect::<Vec<InputPair<u64>>>(), vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5, key: vec![100] }, vec![0, 2, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5, key: vec![101] }, vec![1]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5, key: vec![103] }, vec![0, 1]),
]);
assert_eq!(changes_trie_nodes.1.into_iter()
.map(|(k,v)| (k, v.collect::<Vec<_>>())).collect::<Vec<_>>(), vec![
(ChildIndex { block: zero + 5u64, storage_key: b"1".to_vec() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5u64, key: vec![100] }, vec![0, 2, 3]),
]),
(ChildIndex { block: zero + 5, storage_key: b"2".to_vec() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 5, key: vec![100] }, vec![0, 2]),
]),
]);
}
test_with_zero(0);
@@ -313,7 +486,7 @@ mod test {
&changes,
&parent,
).unwrap();
assert_eq!(changes_trie_nodes.collect::<Vec<InputPair<u64>>>(), vec![
assert_eq!(changes_trie_nodes.0.collect::<Vec<InputPair<u64>>>(), vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![100] }, vec![0, 2, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![101] }, vec![1]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![103] }, vec![0, 1]),
@@ -323,6 +496,23 @@ mod test {
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![105] }, vec![zero + 1, zero + 3]),
]);
assert_eq!(changes_trie_nodes.1.into_iter()
.map(|(k,v)| (k, v.collect::<Vec<_>>())).collect::<Vec<_>>(), vec![
(ChildIndex { block: zero + 4u64, storage_key: b"1".to_vec() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4u64, key: vec![100] }, vec![0, 2, 3]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![100] }, vec![zero + 1]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![101] }, vec![zero + 1]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![105] }, vec![zero + 1]),
]),
(ChildIndex { block: zero + 4, storage_key: b"2".to_vec() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![100] }, vec![0, 2]),
]),
]);
}
test_with_zero(0);
@@ -342,7 +532,7 @@ mod test {
&changes,
&parent,
).unwrap();
assert_eq!(changes_trie_nodes.collect::<Vec<InputPair<u64>>>(), vec![
assert_eq!(changes_trie_nodes.0.collect::<Vec<InputPair<u64>>>(), vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16, key: vec![100] }, vec![0, 2, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16, key: vec![101] }, vec![1]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16, key: vec![103] }, vec![0, 1]),
@@ -353,6 +543,20 @@ mod test {
InputPair::DigestIndex(DigestIndex { block: zero + 16, key: vec![103] }, vec![zero + 4]),
InputPair::DigestIndex(DigestIndex { block: zero + 16, key: vec![105] }, vec![zero + 4, zero + 8]),
]);
assert_eq!(changes_trie_nodes.1.into_iter()
.map(|(k,v)| (k, v.collect::<Vec<_>>())).collect::<Vec<_>>(), vec![
(ChildIndex { block: zero + 16u64, storage_key: b"1".to_vec() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16u64, key: vec![100] }, vec![0, 2, 3]),
InputPair::DigestIndex(DigestIndex { block: zero + 16, key: vec![102] }, vec![zero + 4]),
]),
(ChildIndex { block: zero + 16, storage_key: b"2".to_vec() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 16, key: vec![100] }, vec![0, 2]),
]),
]);
}
test_with_zero(0);
@@ -374,7 +578,7 @@ mod test {
&changes,
&parent,
).unwrap();
assert_eq!(changes_trie_nodes.collect::<Vec<InputPair<u64>>>(), vec![
assert_eq!(changes_trie_nodes.0.collect::<Vec<InputPair<u64>>>(), vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![100] }, vec![0, 2, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![101] }, vec![1]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![103] }, vec![0, 1]),
@@ -388,7 +592,7 @@ mod test {
&changes,
&parent,
).unwrap();
assert_eq!(changes_trie_nodes.collect::<Vec<InputPair<u64>>>(), vec![
assert_eq!(changes_trie_nodes.0.collect::<Vec<InputPair<u64>>>(), vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![100] }, vec![0, 2, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![101] }, vec![1]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 11, key: vec![103] }, vec![0, 1]),
@@ -425,7 +629,7 @@ mod test {
&changes,
&parent,
).unwrap();
assert_eq!(changes_trie_nodes.collect::<Vec<InputPair<u64>>>(), vec![
assert_eq!(changes_trie_nodes.0.collect::<Vec<InputPair<u64>>>(), vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![100] }, vec![0, 2, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![101] }, vec![1]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![103] }, vec![0, 1]),
@@ -435,6 +639,23 @@ mod test {
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![105] }, vec![zero + 1, zero + 3]),
]);
assert_eq!(changes_trie_nodes.1.into_iter()
.map(|(k,v)| (k, v.collect::<Vec<_>>())).collect::<Vec<_>>(), vec![
(ChildIndex { block: zero + 4u64, storage_key: b"1".to_vec() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4u64, key: vec![100] }, vec![0, 2, 3]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![100] }, vec![zero + 1]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![101] }, vec![zero + 1]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![102] }, vec![zero + 2]),
InputPair::DigestIndex(DigestIndex { block: zero + 4, key: vec![105] }, vec![zero + 1]),
]),
(ChildIndex { block: zero + 4, storage_key: b"2".to_vec() },
vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: zero + 4, key: vec![100] }, vec![0, 2]),
]),
]);
}
test_with_zero(0);
@@ -26,6 +26,7 @@ use trie::Recorder;
use crate::changes_trie::{AnchorBlockId, ConfigurationRange, RootsStorage, Storage, BlockNumber};
use crate::changes_trie::input::{DigestIndex, ExtrinsicIndex, DigestIndexValue, ExtrinsicIndexValue};
use crate::changes_trie::storage::{TrieBackendAdapter, InMemoryStorage};
use crate::changes_trie::input::ChildIndex;
use crate::changes_trie::surface_iterator::{surface_iterator, SurfaceIterator};
use crate::proving_backend::ProvingBackendEssence;
use crate::trie_backend_essence::{TrieBackendEssence};
@@ -39,6 +40,7 @@ pub fn key_changes<'a, H: Hasher, Number: BlockNumber>(
begin: Number,
end: &'a AnchorBlockId<H::Out, Number>,
max: Number,
storage_key: Option<&'a [u8]>,
key: &'a [u8],
) -> Result<DrilldownIterator<'a, H, Number>, String> {
// we can't query any roots before root
@@ -46,6 +48,7 @@ pub fn key_changes<'a, H: Hasher, Number: BlockNumber>(
Ok(DrilldownIterator {
essence: DrilldownIteratorEssence {
storage_key,
key,
roots_storage: storage.as_roots_storage(),
storage,
@@ -67,6 +70,7 @@ pub fn key_changes<'a, H: Hasher, Number: BlockNumber>(
})
}
/// Returns proof of changes of given key at given blocks range.
/// `max` is the number of best known block.
pub fn key_changes_proof<'a, H: Hasher, Number: BlockNumber>(
@@ -75,6 +79,7 @@ pub fn key_changes_proof<'a, H: Hasher, Number: BlockNumber>(
begin: Number,
end: &AnchorBlockId<H::Out, Number>,
max: Number,
storage_key: Option<&[u8]>,
key: &[u8],
) -> Result<Vec<Vec<u8>>, String> {
// we can't query any roots before root
@@ -82,6 +87,7 @@ pub fn key_changes_proof<'a, H: Hasher, Number: BlockNumber>(
let mut iter = ProvingDrilldownIterator {
essence: DrilldownIteratorEssence {
storage_key,
key,
roots_storage: storage.as_roots_storage(),
storage,
@@ -121,6 +127,7 @@ pub fn key_changes_proof_check<'a, H: Hasher, Number: BlockNumber>(
begin: Number,
end: &AnchorBlockId<H::Out, Number>,
max: Number,
storage_key: Option<&[u8]>,
key: &[u8]
) -> Result<Vec<(Number, u32)>, String> {
key_changes_proof_check_with_db(
@@ -130,6 +137,7 @@ pub fn key_changes_proof_check<'a, H: Hasher, Number: BlockNumber>(
begin,
end,
max,
storage_key,
key,
)
}
@@ -142,6 +150,7 @@ pub fn key_changes_proof_check_with_db<'a, H: Hasher, Number: BlockNumber>(
begin: Number,
end: &AnchorBlockId<H::Out, Number>,
max: Number,
storage_key: Option<&[u8]>,
key: &[u8]
) -> Result<Vec<(Number, u32)>, String> {
// we can't query any roots before root
@@ -149,6 +158,7 @@ pub fn key_changes_proof_check_with_db<'a, H: Hasher, Number: BlockNumber>(
DrilldownIterator {
essence: DrilldownIteratorEssence {
storage_key,
key,
roots_storage,
storage: proof_db,
@@ -178,6 +188,7 @@ pub struct DrilldownIteratorEssence<'a, H, Number>
Number: BlockNumber,
H::Out: 'a,
{
storage_key: Option<&'a [u8]>,
key: &'a [u8],
roots_storage: &'a dyn RootsStorage<H, Number>,
storage: &'a dyn Storage<H, Number>,
@@ -224,6 +235,25 @@ impl<'a, H, Number> DrilldownIteratorEssence<'a, H, Number>
// AND trie roots for old blocks are known (both on full + light node)
let trie_root = self.roots_storage.root(&self.end, block.clone())?
.ok_or_else(|| format!("Changes trie root for block {} is not found", block.clone()))?;
let trie_root = if let Some(storage_key) = self.storage_key {
let child_key = ChildIndex {
block: block.clone(),
storage_key: storage_key.to_vec(),
}.encode();
if let Some(trie_root) = trie_reader(self.storage, trie_root, &child_key)?
.and_then(|v| <Vec<u8>>::decode(&mut &v[..]).ok())
.map(|v| {
let mut hash = H::Out::default();
hash.as_mut().copy_from_slice(&v[..]);
hash
}) {
trie_root
} else {
continue;
}
} else {
trie_root
};
// only return extrinsics for blocks before self.max
// most of blocks will be filtered out before pushing to `self.blocks`
@@ -356,8 +386,10 @@ mod tests {
let config = Configuration { digest_interval: 4, digest_levels: 2 };
let backend = InMemoryStorage::with_inputs(vec![
// digest: 1..4 => [(3, 0)]
(1, vec![]),
(2, vec![]),
(1, vec![
]),
(2, vec![
]),
(3, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 3, key: vec![42] }, vec![0]),
]),
@@ -386,6 +418,19 @@ mod tests {
(16, vec![
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![42] }, vec![4, 8]),
]),
], vec![(b"1".to_vec(), vec![
(1, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 1, key: vec![42] }, vec![0]),
]),
(2, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 2, key: vec![42] }, vec![3]),
]),
(16, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![42] }, vec![5]),
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![42] }, vec![2]),
]),
]),
]);
(config, backend)
@@ -408,6 +453,7 @@ mod tests {
1,
&AnchorBlockId { hash: Default::default(), number: 16 },
16,
None,
&[42],
).and_then(Result::from_iter);
assert_eq!(drilldown_result, Ok(vec![(8, 2), (8, 1), (6, 3), (3, 0)]));
@@ -418,6 +464,7 @@ mod tests {
1,
&AnchorBlockId { hash: Default::default(), number: 2 },
4,
None,
&[42],
).and_then(Result::from_iter);
assert_eq!(drilldown_result, Ok(vec![]));
@@ -428,6 +475,7 @@ mod tests {
1,
&AnchorBlockId { hash: Default::default(), number: 3 },
4,
None,
&[42],
).and_then(Result::from_iter);
assert_eq!(drilldown_result, Ok(vec![(3, 0)]));
@@ -438,6 +486,7 @@ mod tests {
1,
&AnchorBlockId { hash: Default::default(), number: 7 },
7,
None,
&[42],
).and_then(Result::from_iter);
assert_eq!(drilldown_result, Ok(vec![(6, 3), (3, 0)]));
@@ -448,6 +497,7 @@ mod tests {
7,
&AnchorBlockId { hash: Default::default(), number: 8 },
8,
None,
&[42],
).and_then(Result::from_iter);
assert_eq!(drilldown_result, Ok(vec![(8, 2), (8, 1)]));
@@ -458,6 +508,7 @@ mod tests {
5,
&AnchorBlockId { hash: Default::default(), number: 7 },
8,
None,
&[42],
).and_then(Result::from_iter);
assert_eq!(drilldown_result, Ok(vec![(6, 3)]));
@@ -474,6 +525,17 @@ mod tests {
1,
&AnchorBlockId { hash: Default::default(), number: 100 },
1000,
None,
&[42],
).and_then(|i| i.collect::<Result<Vec<_>, _>>()).is_err());
assert!(key_changes::<Blake2Hasher, u64>(
configuration_range(&config, 0),
&storage,
1,
&AnchorBlockId { hash: Default::default(), number: 100 },
1000,
Some(&b"1"[..]),
&[42],
).and_then(|i| i.collect::<Result<Vec<_>, _>>()).is_err());
}
@@ -487,6 +549,7 @@ mod tests {
1,
&AnchorBlockId { hash: Default::default(), number: 100 },
50,
None,
&[42],
).is_err());
assert!(key_changes::<Blake2Hasher, u64>(
@@ -495,6 +558,7 @@ mod tests {
20,
&AnchorBlockId { hash: Default::default(), number: 10 },
100,
None,
&[42],
).is_err());
}
@@ -507,8 +571,13 @@ mod tests {
// create drilldown iterator that records all trie nodes during drilldown
let (remote_config, remote_storage) = prepare_for_drilldown();
let remote_proof = key_changes_proof::<Blake2Hasher, u64>(
configuration_range(&remote_config, 0), &remote_storage,
1, &AnchorBlockId { hash: Default::default(), number: 16 }, 16, &[42]).unwrap();
configuration_range(&remote_config, 0), &remote_storage, 1,
&AnchorBlockId { hash: Default::default(), number: 16 }, 16, None, &[42]).unwrap();
let (remote_config, remote_storage) = prepare_for_drilldown();
let remote_proof_child = key_changes_proof::<Blake2Hasher, u64>(
configuration_range(&remote_config, 0), &remote_storage, 1,
&AnchorBlockId { hash: Default::default(), number: 16 }, 16, Some(&b"1"[..]), &[42]).unwrap();
// happens on local light node:
@@ -516,11 +585,18 @@ mod tests {
let (local_config, local_storage) = prepare_for_drilldown();
local_storage.clear_storage();
let local_result = key_changes_proof_check::<Blake2Hasher, u64>(
configuration_range(&local_config, 0), &local_storage, remote_proof,
1, &AnchorBlockId { hash: Default::default(), number: 16 }, 16, &[42]);
configuration_range(&local_config, 0), &local_storage, remote_proof, 1,
&AnchorBlockId { hash: Default::default(), number: 16 }, 16, None, &[42]);
let (local_config, local_storage) = prepare_for_drilldown();
local_storage.clear_storage();
let local_result_child = key_changes_proof_check::<Blake2Hasher, u64>(
configuration_range(&local_config, 0), &local_storage, remote_proof_child, 1,
&AnchorBlockId { hash: Default::default(), number: 16 }, 16, Some(&b"1"[..]), &[42]);
// check that drilldown result is the same as if it was happening at the full node
assert_eq!(local_result, Ok(vec![(8, 2), (8, 1), (6, 3), (3, 0)]));
assert_eq!(local_result_child, Ok(vec![(16, 5), (2, 3)]));
}
#[test]
@@ -543,11 +619,17 @@ mod tests {
input[79 - 1].1.push(InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 79, key: vec![42] }, vec![1]));
input[80 - 1].1.push(InputPair::DigestIndex(DigestIndex { block: 80, key: vec![42] }, vec![79]));
input[91 - 1].1.push(InputPair::DigestIndex(DigestIndex { block: 91, key: vec![42] }, vec![80]));
let storage = InMemoryStorage::with_inputs(input);
let storage = InMemoryStorage::with_inputs(input, vec![]);
let drilldown_result = key_changes::<Blake2Hasher, u64>(
config_range, &storage, 1, &AnchorBlockId { hash: Default::default(), number: 91 }, 100_000u64, &[42])
.and_then(Result::from_iter);
config_range,
&storage,
1,
&AnchorBlockId { hash: Default::default(), number: 91 },
100_000u64,
None,
&[42],
).and_then(Result::from_iter);
assert_eq!(drilldown_result, Ok(vec![(79, 1), (63, 0)]));
}
}
@@ -40,9 +40,22 @@ pub struct DigestIndex<Number: BlockNumber> {
pub key: Vec<u8>,
}
/// Key of { childtrie key => Childchange trie } mapping.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct ChildIndex<Number: BlockNumber> {
/// Block at which this key has been inserted in the trie.
pub block: Number,
/// Storage key this node is responsible for.
pub storage_key: Vec<u8>,
}
/// Value of { changed key => block/digest block numbers } mapping.
pub type DigestIndexValue<Number> = Vec<Number>;
/// Value of { changed key => block/digest block numbers } mapping.
/// That is the root of the child change trie.
pub type ChildIndexValue = Vec<u8>;
/// Single input pair of changes trie.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum InputPair<Number: BlockNumber> {
@@ -50,6 +63,8 @@ pub enum InputPair<Number: BlockNumber> {
ExtrinsicIndex(ExtrinsicIndex<Number>, ExtrinsicIndexValue),
/// Element of { key => set of blocks/digest blocks where key has been changed } element mapping.
DigestIndex(DigestIndex<Number>, DigestIndexValue<Number>),
/// Element of { childtrie key => Childchange trie } where key has been changed } element mapping.
ChildIndex(ChildIndex<Number>, ChildIndexValue),
}
/// Single input key of changes trie.
@@ -59,6 +74,8 @@ pub enum InputKey<Number: BlockNumber> {
ExtrinsicIndex(ExtrinsicIndex<Number>),
/// Key of { key => set of blocks/digest blocks where key has been changed } element mapping.
DigestIndex(DigestIndex<Number>),
/// Key of { childtrie key => Childchange trie } where key has been changed } element mapping.
ChildIndex(ChildIndex<Number>),
}
impl<Number: BlockNumber> Into<(Vec<u8>, Vec<u8>)> for InputPair<Number> {
@@ -66,6 +83,7 @@ impl<Number: BlockNumber> Into<(Vec<u8>, Vec<u8>)> for InputPair<Number> {
match self {
InputPair::ExtrinsicIndex(key, value) => (key.encode(), value.encode()),
InputPair::DigestIndex(key, value) => (key.encode(), value.encode()),
InputPair::ChildIndex(key, value) => (key.encode(), value.encode()),
}
}
}
@@ -75,6 +93,7 @@ impl<Number: BlockNumber> Into<InputKey<Number>> for InputPair<Number> {
match self {
InputPair::ExtrinsicIndex(key, _) => InputKey::ExtrinsicIndex(key),
InputPair::DigestIndex(key, _) => InputKey::DigestIndex(key),
InputPair::ChildIndex(key, _) => InputKey::ChildIndex(key),
}
}
}
@@ -114,6 +133,22 @@ impl<Number: BlockNumber> Encode for DigestIndex<Number> {
}
}
impl<Number: BlockNumber> ChildIndex<Number> {
pub fn key_neutral_prefix(block: Number) -> Vec<u8> {
let mut prefix = vec![3];
prefix.extend(block.encode());
prefix
}
}
impl<Number: BlockNumber> Encode for ChildIndex<Number> {
fn encode_to<W: Output>(&self, dest: &mut W) {
dest.push_byte(3);
self.block.encode_to(dest);
self.storage_key.encode_to(dest);
}
}
impl<Number: BlockNumber> codec::EncodeLike for DigestIndex<Number> {}
impl<Number: BlockNumber> Decode for InputKey<Number> {
@@ -127,6 +162,10 @@ impl<Number: BlockNumber> Decode for InputKey<Number> {
block: Decode::decode(input)?,
key: Decode::decode(input)?,
})),
3 => Ok(InputKey::ChildIndex(ChildIndex {
block: Decode::decode(input)?,
storage_key: Decode::decode(input)?,
})),
_ => Err("Invalid input key variant".into()),
}
}
@@ -186,13 +186,34 @@ pub fn build_changes_trie<'a, B: Backend<H>, S: Storage<H, Number>, H: Hasher, N
let parent = storage.build_anchor(parent_hash).map_err(|_| ())?;
// storage errors are considered fatal (similar to situations when runtime fetches values from storage)
let input_pairs = prepare_input::<B, H, Number>(backend, storage, config, changes, &parent)
.expect("changes trie: storage access is not allowed to fail within runtime");
let mut root = Default::default();
let (input_pairs, child_input_pairs) = prepare_input::<B, H, Number>(
backend,
storage,
config,
changes,
&parent,
).expect("changes trie: storage access is not allowed to fail within runtime");
let mut mdb = MemoryDB::default();
let mut child_roots = Vec::with_capacity(child_input_pairs.len());
for (child_index, input_pairs) in child_input_pairs {
let mut not_empty = false;
let mut root = Default::default();
{
let mut trie = TrieDBMut::<H>::new(&mut mdb, &mut root);
for (key, value) in input_pairs.map(Into::into) {
not_empty = true;
trie.insert(&key, &value)
.expect("changes trie: insertion to trie is not allowed to fail within runtime");
}
}
if not_empty {
child_roots.push(input::InputPair::ChildIndex(child_index, root.as_ref().to_vec()));
}
}
let mut root = Default::default();
{
let mut trie = TrieDBMut::<H>::new(&mut mdb, &mut root);
for (key, value) in input_pairs.map(Into::into) {
for (key, value) in input_pairs.chain(child_roots.into_iter()).map(Into::into) {
trie.insert(&key, &value)
.expect("changes trie: insertion to trie is not allowed to fail within runtime");
}
@@ -24,6 +24,8 @@ use crate::proving_backend::ProvingBackendEssence;
use crate::trie_backend_essence::TrieBackendEssence;
use crate::changes_trie::{AnchorBlockId, Configuration, Storage, BlockNumber};
use crate::changes_trie::storage::TrieBackendAdapter;
use crate::changes_trie::input::{ChildIndex, InputKey};
use codec::Decode;
/// Get number of oldest block for which changes trie is not pruned
/// given changes trie configuration, pruning parameter and number of
@@ -54,6 +56,7 @@ pub fn prune<S: Storage<H, Number>, H: Hasher, Number: BlockNumber, F: FnMut(H::
current_block: &AnchorBlockId<H::Out, Number>,
mut remove_trie_node: F,
) {
// select range for pruning
let (first, last) = match pruning_range(config, min_blocks_to_keep, current_block.number.clone()) {
Some((first, last)) => (first, last),
@@ -81,23 +84,55 @@ pub fn prune<S: Storage<H, Number>, H: Hasher, Number: BlockNumber, F: FnMut(H::
continue;
},
};
let children_roots = {
let trie_storage = TrieBackendEssence::<_, H>::new(
crate::changes_trie::TrieBackendStorageAdapter(storage),
root,
);
let child_prefix = ChildIndex::key_neutral_prefix(block.clone());
let mut children_roots = Vec::new();
trie_storage.for_key_values_with_prefix(&child_prefix, |key, value| {
if let Ok(InputKey::ChildIndex::<Number>(_trie_key)) = Decode::decode(&mut &key[..]) {
if let Ok(value) = <Vec<u8>>::decode(&mut &value[..]) {
let mut trie_root = <H as Hasher>::Out::default();
trie_root.as_mut().copy_from_slice(&value[..]);
children_roots.push(trie_root);
}
}
});
// enumerate all changes trie' keys, recording all nodes that have been 'touched'
// (effectively - all changes trie nodes)
let mut proof_recorder: Recorder<H::Out> = Default::default();
{
let mut trie = ProvingBackendEssence::<_, H> {
backend: &TrieBackendEssence::new(TrieBackendAdapter::new(storage), root),
proof_recorder: &mut proof_recorder,
};
trie.record_all_keys();
children_roots
};
for root in children_roots.into_iter() {
prune_trie(storage, root, &mut remove_trie_node);
}
// all nodes of this changes trie should be pruned
remove_trie_node(root);
for node in proof_recorder.drain().into_iter().map(|n| n.hash) {
remove_trie_node(node);
}
prune_trie(storage, root, &mut remove_trie_node);
}
}
// Prune a trie.
fn prune_trie<S: Storage<H, Number>, H: Hasher, Number: BlockNumber, F: FnMut(H::Out)>(
storage: &S,
root: H::Out,
remove_trie_node: &mut F,
) {
// enumerate all changes trie' keys, recording all nodes that have been 'touched'
// (effectively - all changes trie nodes)
let mut proof_recorder: Recorder<H::Out> = Default::default();
{
let mut trie = ProvingBackendEssence::<_, H> {
backend: &TrieBackendEssence::new(TrieBackendAdapter::new(storage), root),
proof_recorder: &mut proof_recorder,
};
trie.record_all_keys();
}
// all nodes of this changes trie should be pruned
remove_trie_node(root);
for node in proof_recorder.drain().into_iter().map(|n| n.hash) {
remove_trie_node(node);
}
}
@@ -169,6 +204,7 @@ mod tests {
use primitives::Blake2Hasher;
use crate::backend::insert_into_memory_db;
use crate::changes_trie::storage::InMemoryStorage;
use codec::Encode;
use super::*;
fn config(interval: u32, levels: u32) -> Configuration {
@@ -193,12 +229,19 @@ mod tests {
#[test]
fn prune_works() {
fn prepare_storage() -> InMemoryStorage<Blake2Hasher, u64> {
let child_key = ChildIndex { block: 67u64, storage_key: b"1".to_vec() }.encode();
let mut mdb1 = MemoryDB::<Blake2Hasher>::default();
let root1 = insert_into_memory_db::<Blake2Hasher, _>(&mut mdb1, vec![(vec![10], vec![20])]).unwrap();
let mut mdb2 = MemoryDB::<Blake2Hasher>::default();
let root2 = insert_into_memory_db::<Blake2Hasher, _>(&mut mdb2, vec![(vec![11], vec![21]), (vec![12], vec![22])]).unwrap();
let mut mdb3 = MemoryDB::<Blake2Hasher>::default();
let root3 = insert_into_memory_db::<Blake2Hasher, _>(&mut mdb3, vec![(vec![13], vec![23]), (vec![14], vec![24])]).unwrap();
let ch_root3 = insert_into_memory_db::<Blake2Hasher, _>(&mut mdb3, vec![(vec![110], vec![120])]).unwrap();
let root3 = insert_into_memory_db::<Blake2Hasher, _>(&mut mdb3, vec![
(vec![13], vec![23]),
(vec![14], vec![24]),
(child_key, ch_root3.as_ref().encode()),
]).unwrap();
let mut mdb4 = MemoryDB::<Blake2Hasher>::default();
let root4 = insert_into_memory_db::<Blake2Hasher, _>(&mut mdb4, vec![(vec![15], vec![25])]).unwrap();
let storage = InMemoryStorage::new();
@@ -29,7 +29,7 @@ use std::collections::HashSet;
#[cfg(test)]
use crate::backend::insert_into_memory_db;
#[cfg(test)]
use crate::changes_trie::input::InputPair;
use crate::changes_trie::input::{InputPair, ChildIndex};
/// In-memory implementation of changes trie storage.
pub struct InMemoryStorage<H: Hasher, Number: BlockNumber> {
@@ -85,10 +85,32 @@ impl<H: Hasher, Number: BlockNumber> InMemoryStorage<H, Number> {
}
#[cfg(test)]
pub fn with_inputs(inputs: Vec<(Number, Vec<InputPair<Number>>)>) -> Self {
pub fn with_inputs(
mut top_inputs: Vec<(Number, Vec<InputPair<Number>>)>,
children_inputs: Vec<(Vec<u8>, Vec<(Number, Vec<InputPair<Number>>)>)>,
) -> Self {
let mut mdb = MemoryDB::default();
let mut roots = BTreeMap::new();
for (block, pairs) in inputs {
for (storage_key, child_input) in children_inputs {
for (block, pairs) in child_input {
let root = insert_into_memory_db::<H, _>(&mut mdb, pairs.into_iter().map(Into::into));
if let Some(root) = root {
let ix = if let Some(ix) = top_inputs.iter().position(|v| v.0 == block) {
ix
} else {
top_inputs.push((block.clone(), Default::default()));
top_inputs.len() - 1
};
top_inputs[ix].1.push(InputPair::ChildIndex(
ChildIndex { block: block.clone(), storage_key: storage_key.clone() },
root.as_ref().to_vec(),
));
}
}
}
for (block, pairs) in top_inputs {
let root = insert_into_memory_db::<H, _>(&mut mdb, pairs.into_iter().map(Into::into));
if let Some(root) = root {
roots.insert(block, root);
+4 -4
View File
@@ -305,10 +305,10 @@ where
let child_delta_iter = child_storage_keys.map(|storage_key|
(storage_key.clone(), self.overlay.committed.children.get(storage_key)
.into_iter()
.flat_map(|map| map.1.iter().map(|(k, v)| (k.clone(), v.clone())))
.flat_map(|map| map.iter().map(|(k, v)| (k.clone(), v.value.clone())))
.chain(self.overlay.prospective.children.get(storage_key)
.into_iter()
.flat_map(|map| map.1.iter().map(|(k, v)| (k.clone(), v.clone()))))));
.flat_map(|map| map.iter().map(|(k, v)| (k.clone(), v.value.clone()))))));
// compute and memoize
@@ -333,10 +333,10 @@ where
let delta = self.overlay.committed.children.get(storage_key)
.into_iter()
.flat_map(|map| map.1.iter().map(|(k, v)| (k.clone(), v.clone())))
.flat_map(|map| map.iter().map(|(k, v)| (k.clone(), v.value.clone())))
.chain(self.overlay.prospective.children.get(storage_key)
.into_iter()
.flat_map(|map| map.1.clone().into_iter()));
.flat_map(|map| map.clone().into_iter().map(|(k, v)| (k.clone(), v.value.clone()))));
let root = self.backend.child_storage_root(storage_key, delta).0;
@@ -55,7 +55,7 @@ pub struct OverlayedChangeSet {
/// Top level storage changes.
pub top: HashMap<Vec<u8>, OverlayedValue>,
/// Child storage changes.
pub children: HashMap<Vec<u8>, (Option<BTreeSet<u32>>, HashMap<Vec<u8>, Option<Vec<u8>>>)>,
pub children: HashMap<Vec<u8>, HashMap<Vec<u8>, OverlayedValue>>,
}
#[cfg(test)]
@@ -117,14 +117,14 @@ impl OverlayedChanges {
/// value has been set.
pub fn child_storage(&self, storage_key: &[u8], key: &[u8]) -> Option<Option<&[u8]>> {
if let Some(map) = self.prospective.children.get(storage_key) {
if let Some(val) = map.1.get(key) {
return Some(val.as_ref().map(AsRef::as_ref));
if let Some(val) = map.get(key) {
return Some(val.value.as_ref().map(AsRef::as_ref));
}
}
if let Some(map) = self.committed.children.get(storage_key) {
if let Some(val) = map.1.get(key) {
return Some(val.as_ref().map(AsRef::as_ref));
if let Some(val) = map.get(key) {
return Some(val.value.as_ref().map(AsRef::as_ref));
}
}
@@ -151,10 +151,11 @@ impl OverlayedChanges {
pub(crate) fn set_child_storage(&mut self, storage_key: Vec<u8>, key: Vec<u8>, val: Option<Vec<u8>>) {
let extrinsic_index = self.extrinsic_index();
let map_entry = self.prospective.children.entry(storage_key).or_default();
map_entry.1.insert(key, val);
let entry = map_entry.entry(key).or_default();
entry.value = val;
if let Some(extrinsic) = extrinsic_index {
map_entry.0.get_or_insert_with(Default::default)
entry.extrinsics.get_or_insert_with(Default::default)
.insert(extrinsic);
}
}
@@ -169,16 +170,28 @@ impl OverlayedChanges {
let extrinsic_index = self.extrinsic_index();
let map_entry = self.prospective.children.entry(storage_key.to_vec()).or_default();
if let Some(extrinsic) = extrinsic_index {
map_entry.0.get_or_insert_with(Default::default)
.insert(extrinsic);
}
map_entry.values_mut().for_each(|e| {
if let Some(extrinsic) = extrinsic_index {
e.extrinsics.get_or_insert_with(Default::default)
.insert(extrinsic);
}
map_entry.1.values_mut().for_each(|e| *e = None);
e.value = None;
});
if let Some((_, committed_map)) = self.committed.children.get(storage_key) {
for (key, _) in committed_map.iter() {
map_entry.1.insert(key.clone(), None);
if let Some(committed_map) = self.committed.children.get(storage_key) {
for (key, value) in committed_map.iter() {
if !map_entry.contains_key(key) {
map_entry.insert(key.clone(), OverlayedValue {
value: None,
extrinsics: extrinsic_index.map(|i| {
let mut e = value.extrinsics.clone()
.unwrap_or_else(|| BTreeSet::default());
e.insert(i);
e
}),
});
}
}
}
}
@@ -224,12 +237,12 @@ impl OverlayedChanges {
let extrinsic_index = self.extrinsic_index();
let map_entry = self.prospective.children.entry(storage_key.to_vec()).or_default();
for (key, entry) in map_entry.1.iter_mut() {
for (key, entry) in map_entry.iter_mut() {
if key.starts_with(prefix) {
*entry = None;
entry.value = None;
if let Some(extrinsic) = extrinsic_index {
map_entry.0.get_or_insert_with(Default::default)
entry.extrinsics.get_or_insert_with(Default::default)
.insert(extrinsic);
}
}
@@ -238,13 +251,13 @@ impl OverlayedChanges {
if let Some(child_committed) = self.committed.children.get(storage_key) {
// Then do the same with keys from commited changes.
// NOTE that we are making changes in the prospective change set.
for key in child_committed.1.keys() {
for key in child_committed.keys() {
if key.starts_with(prefix) {
let entry = map_entry.1.entry(key.clone()).or_default();
*entry = None;
let entry = map_entry.entry(key.clone()).or_default();
entry.value = None;
if let Some(extrinsic) = extrinsic_index {
map_entry.0.get_or_insert_with(Default::default)
entry.extrinsics.get_or_insert_with(Default::default)
.insert(extrinsic);
}
}
@@ -271,13 +284,16 @@ impl OverlayedChanges {
.extend(prospective_extrinsics);
}
}
for (storage_key, map) in self.prospective.children.drain() {
let entry = self.committed.children.entry(storage_key).or_default();
entry.1.extend(map.1.iter().map(|(k, v)| (k.clone(), v.clone())));
for (storage_key, mut map) in self.prospective.children.drain() {
let map_dest = self.committed.children.entry(storage_key).or_default();
for (key, val) in map.drain() {
let entry = map_dest.entry(key).or_default();
entry.value = val.value;
if let Some(prospective_extrinsics) = map.0 {
entry.0.get_or_insert_with(Default::default)
.extend(prospective_extrinsics);
if let Some(prospective_extrinsics) = val.extrinsics {
entry.extrinsics.get_or_insert_with(Default::default)
.extend(prospective_extrinsics);
}
}
}
}
@@ -293,7 +309,8 @@ impl OverlayedChanges {
){
assert!(self.prospective.is_empty());
(self.committed.top.into_iter().map(|(k, v)| (k, v.value)),
self.committed.children.into_iter().map(|(sk, v)| (sk, v.1.into_iter())))
self.committed.children.into_iter()
.map(|(sk, v)| (sk, v.into_iter().map(|(k, v)| (k, v.value)))))
}
/// Inserts storage entry responsible for current extrinsic index.
@@ -174,6 +174,10 @@ impl<'a, S, H> Backend<H> for ProvingBackend<'a, S, H>
self.backend.for_keys_with_prefix(prefix, f)
}
fn for_key_values_with_prefix<F: FnMut(&[u8], &[u8])>(&self, prefix: &[u8], f: F) {
self.backend.for_key_values_with_prefix(prefix, f)
}
fn for_child_keys_with_prefix<F: FnMut(&[u8])>(&self, storage_key: &[u8], prefix: &[u8], f: F) {
self.backend.for_child_keys_with_prefix(storage_key, prefix, f)
}
+6 -6
View File
@@ -108,8 +108,8 @@ impl<H: Hasher, N: ChangesTrieBlockNumber> TestExternalities<H, N> {
let children = self.overlay.committed.children.clone().into_iter()
.chain(self.overlay.prospective.children.clone().into_iter())
.flat_map(|(keyspace, map)| {
map.1.into_iter()
.map(|(k, v)| (Some(keyspace.clone()), k, v))
map.into_iter()
.map(|(k, v)| (Some(keyspace.clone()), k, v.value))
.collect::<Vec<_>>()
});
@@ -236,10 +236,10 @@ impl<H, N> Externalities<H> for TestExternalities<H, N>
let child_delta_iter = child_storage_keys.map(|storage_key|
(storage_key.clone(), self.overlay.committed.children.get(storage_key)
.into_iter()
.flat_map(|map| map.1.iter().map(|(k, v)| (k.clone(), v.clone())))
.flat_map(|map| map.iter().map(|(k, v)| (k.clone(), v.value.clone())))
.chain(self.overlay.prospective.children.get(storage_key)
.into_iter()
.flat_map(|map| map.1.iter().map(|(k, v)| (k.clone(), v.clone()))))));
.flat_map(|map| map.iter().map(|(k, v)| (k.clone(), v.value.clone()))))));
// compute and memoize
@@ -255,10 +255,10 @@ impl<H, N> Externalities<H> for TestExternalities<H, N>
let (root, is_empty, _) = {
let delta = self.overlay.committed.children.get(storage_key)
.into_iter()
.flat_map(|map| map.1.iter().map(|(k, v)| (k.clone(), v.clone())))
.flat_map(|map| map.clone().into_iter().map(|(k, v)| (k, v.value)))
.chain(self.overlay.prospective.children.get(storage_key)
.into_iter()
.flat_map(|map| map.1.clone().into_iter()));
.flat_map(|map| map.clone().into_iter().map(|(k, v)| (k, v.value))));
self.backend.child_storage_root(storage_key, delta)
};
@@ -78,6 +78,10 @@ impl<S: TrieBackendStorage<H>, H: Hasher> Backend<H> for TrieBackend<S, H> where
self.essence.for_keys_with_prefix(prefix, f)
}
fn for_key_values_with_prefix<F: FnMut(&[u8], &[u8])>(&self, prefix: &[u8], f: F) {
self.essence.for_key_values_with_prefix(prefix, f)
}
fn for_keys_in_child_storage<F: FnMut(&[u8])>(&self, storage_key: &[u8], f: F) {
self.essence.for_keys_in_child_storage(storage_key, f)
}
@@ -119,7 +119,7 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> {
}
/// Execute given closure for all keys starting with prefix.
pub fn for_child_keys_with_prefix<F: FnMut(&[u8])>(&self, storage_key: &[u8], prefix: &[u8], f: F) {
pub fn for_child_keys_with_prefix<F: FnMut(&[u8])>(&self, storage_key: &[u8], prefix: &[u8], mut f: F) {
let root_vec = match self.storage(storage_key) {
Ok(v) => v.unwrap_or(default_child_trie_root::<Layout<H>>(storage_key)),
Err(e) => {
@@ -130,16 +130,21 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> {
let mut root = H::Out::default();
root.as_mut().copy_from_slice(&root_vec);
self.keys_with_prefix_inner(&root, prefix, f)
self.keys_values_with_prefix_inner(&root, prefix, |k, _v| f(k))
}
/// Execute given closure for all keys starting with prefix.
pub fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], f: F) {
self.keys_with_prefix_inner(&self.root, prefix, f)
pub fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], mut f: F) {
self.keys_values_with_prefix_inner(&self.root, prefix, |k, _v| f(k))
}
fn keys_with_prefix_inner<F: FnMut(&[u8])>(&self, root: &H::Out, prefix: &[u8], mut f: F) {
fn keys_values_with_prefix_inner<F: FnMut(&[u8], &[u8])>(
&self,
root: &H::Out,
prefix: &[u8],
mut f: F,
) {
let mut read_overlay = S::Overlay::default();
let eph = Ephemeral {
storage: &self.storage,
@@ -153,13 +158,13 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> {
iter.seek(prefix)?;
for x in iter {
let (key, _) = x?;
let (key, value) = x?;
if !key.starts_with(prefix) {
break;
}
f(&key);
f(&key, &value);
}
Ok(())
@@ -170,6 +175,11 @@ impl<S: TrieBackendStorage<H>, H: Hasher> TrieBackendEssence<S, H> {
}
}
/// Execute given closure for all key and values starting with prefix.
pub fn for_key_values_with_prefix<F: FnMut(&[u8], &[u8])>(&self, prefix: &[u8], f: F) {
self.keys_values_with_prefix_inner(&self.root, prefix, f)
}
}
pub(crate) struct Ephemeral<'a, S: 'a + TrieBackendStorage<H>, H: 'a + Hasher> {