mirror of
https://github.com/pezkuwichain/pezkuwi-subxt.git
synced 2026-04-30 06:08:00 +00:00
Light friendly storage tracking: changes trie + extending over ranges (#628)
* changes_trie * changs_trie: continue * changes_trie: adding tests * fixed TODO * removed obsolete ExtrinsicChanges * encodable ChangesTrieConfiguration * removed polkadot fle * fixed grumbles * ext_storage_changes_root returns u32 * moved changes trie root to digest * removed commented code * read storage values from native code * fixed grumbles * fixed grumbles * missing comma
This commit is contained in:
committed by
Gav Wood
parent
24479cd7f5
commit
7fa337afbc
@@ -0,0 +1,296 @@
|
||||
// Copyright 2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Substrate.
|
||||
|
||||
// Substrate is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Substrate is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Structures and functions required to build changes trie for given block.
|
||||
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use codec::Decode;
|
||||
use hashdb::Hasher;
|
||||
use heapsize::HeapSizeOf;
|
||||
use patricia_trie::NodeCodec;
|
||||
use backend::Backend;
|
||||
use overlayed_changes::OverlayedChanges;
|
||||
use trie_backend_essence::{TrieBackendStorage, TrieBackendEssence};
|
||||
use changes_trie::build_iterator::digest_build_iterator;
|
||||
use changes_trie::input::{InputKey, InputPair, DigestIndex, ExtrinsicIndex};
|
||||
use changes_trie::{Configuration, Storage};
|
||||
|
||||
/// Prepare input pairs for building a changes trie of given block.
|
||||
///
|
||||
/// Returns Err if storage error has occured OR if storage haven't returned
|
||||
/// required data.
|
||||
/// Returns Ok(None) data required to prepare input pairs is not collected
|
||||
/// or storage is not provided.
|
||||
pub fn prepare_input<'a, B, S, H, C>(
|
||||
backend: &B,
|
||||
storage: Option<&'a S>,
|
||||
changes: &OverlayedChanges,
|
||||
block: u64,
|
||||
) -> Result<Option<Vec<InputPair>>, String>
|
||||
where
|
||||
B: Backend<H, C>,
|
||||
S: Storage<H>,
|
||||
&'a S: TrieBackendStorage<H>,
|
||||
H: Hasher,
|
||||
H::Out: HeapSizeOf,
|
||||
C: NodeCodec<H>,
|
||||
{
|
||||
let (storage, config) = match (storage, changes.changes_trie_config.as_ref()) {
|
||||
(Some(storage), Some(config)) => (storage, config),
|
||||
_ => return Ok(None),
|
||||
};
|
||||
|
||||
let mut input = Vec::new();
|
||||
input.extend(prepare_extrinsics_input(
|
||||
backend,
|
||||
block,
|
||||
changes)?);
|
||||
input.extend(prepare_digest_input::<_, H, C>(
|
||||
block,
|
||||
config,
|
||||
storage)?);
|
||||
|
||||
Ok(Some(input))
|
||||
}
|
||||
|
||||
/// Prepare ExtrinsicIndex input pairs.
|
||||
fn prepare_extrinsics_input<B, H, C>(
|
||||
backend: &B,
|
||||
block: u64,
|
||||
changes: &OverlayedChanges,
|
||||
) -> Result<impl Iterator<Item=InputPair>, String>
|
||||
where
|
||||
B: Backend<H, C>,
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>,
|
||||
{
|
||||
let mut extrinsic_map = BTreeMap::<Vec<u8>, BTreeSet<u32>>::new();
|
||||
for (key, val) in changes.prospective.iter().chain(changes.committed.iter()) {
|
||||
let extrinsics = match val.extrinsics {
|
||||
Some(ref extrinsics) => extrinsics,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
// ignore values that have null value at the end of operation AND are not in storage
|
||||
// at the beginning of operation
|
||||
if !changes.storage(key).map(|v| v.is_some()).unwrap_or_default() {
|
||||
if !backend.exists_storage(key).map_err(|e| format!("{}", e))? {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
extrinsic_map.entry(key.clone()).or_default()
|
||||
.extend(extrinsics.iter().cloned());
|
||||
}
|
||||
|
||||
Ok(extrinsic_map.into_iter()
|
||||
.map(move |(key, extrinsics)| InputPair::ExtrinsicIndex(ExtrinsicIndex {
|
||||
block,
|
||||
key,
|
||||
}, extrinsics.iter().cloned().collect())))
|
||||
}
|
||||
|
||||
/// Prepare DigestIndex input pairs.
|
||||
fn prepare_digest_input<'a, S, H, C>(
|
||||
block: u64,
|
||||
config: &Configuration,
|
||||
storage: &'a S
|
||||
) -> Result<impl Iterator<Item=InputPair>, String>
|
||||
where
|
||||
S: Storage<H>,
|
||||
&'a S: TrieBackendStorage<H>,
|
||||
H: Hasher,
|
||||
H::Out: HeapSizeOf,
|
||||
C: NodeCodec<H>,
|
||||
{
|
||||
let mut digest_map = BTreeMap::<Vec<u8>, BTreeSet<u64>>::new();
|
||||
for digest_build_block in digest_build_iterator(config, block) {
|
||||
let trie_root = storage.root(digest_build_block)?;
|
||||
let trie_root = trie_root.ok_or_else(|| format!("No changes trie root for block {}", digest_build_block))?;
|
||||
let trie_storage = TrieBackendEssence::<_, H, C>::new(storage, trie_root);
|
||||
|
||||
let extrinsic_prefix = ExtrinsicIndex::key_neutral_prefix(digest_build_block);
|
||||
trie_storage.for_keys_with_prefix(&extrinsic_prefix, |key|
|
||||
if let Some(InputKey::ExtrinsicIndex(trie_key)) = Decode::decode(&mut &key[..]) {
|
||||
digest_map.entry(trie_key.key).or_default()
|
||||
.insert(digest_build_block);
|
||||
});
|
||||
|
||||
let digest_prefix = DigestIndex::key_neutral_prefix(digest_build_block);
|
||||
trie_storage.for_keys_with_prefix(&digest_prefix, |key|
|
||||
if let Some(InputKey::DigestIndex(trie_key)) = Decode::decode(&mut &key[..]) {
|
||||
digest_map.entry(trie_key.key).or_default()
|
||||
.insert(digest_build_block);
|
||||
});
|
||||
}
|
||||
|
||||
Ok(digest_map.into_iter()
|
||||
.map(move |(key, set)| InputPair::DigestIndex(DigestIndex {
|
||||
block,
|
||||
key
|
||||
}, set.into_iter().collect())))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use codec::Encode;
|
||||
use primitives::{Blake2Hasher, RlpCodec};
|
||||
use backend::InMemory;
|
||||
use changes_trie::storage::InMemoryStorage;
|
||||
use overlayed_changes::OverlayedValue;
|
||||
use super::*;
|
||||
|
||||
fn prepare_for_build() -> (InMemory<Blake2Hasher, RlpCodec>, InMemoryStorage<Blake2Hasher>, OverlayedChanges) {
|
||||
let backend: InMemory<_, _> = vec![
|
||||
(vec![100], vec![255]),
|
||||
(vec![101], vec![255]),
|
||||
(vec![102], vec![255]),
|
||||
(vec![103], vec![255]),
|
||||
(vec![104], vec![255]),
|
||||
(vec![105], vec![255]),
|
||||
].into_iter().collect::<::std::collections::HashMap<_, _>>().into();
|
||||
let storage = InMemoryStorage::with_inputs::<RlpCodec>(vec![
|
||||
(1, vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 1, key: vec![100] }, vec![1, 3]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 1, key: vec![101] }, vec![0, 2]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 1, key: vec![105] }, vec![0, 2, 4]),
|
||||
]),
|
||||
(2, vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 2, key: vec![102] }, vec![0]),
|
||||
]),
|
||||
(3, vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 3, key: vec![100] }, vec![0]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 3, key: vec![105] }, vec![1]),
|
||||
]),
|
||||
(4, vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![100] }, vec![0, 2, 3]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![101] }, vec![1]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![103] }, vec![0, 1]),
|
||||
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![100] }, vec![1, 3]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![101] }, vec![1]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![102] }, vec![2]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![105] }, vec![1, 3]),
|
||||
]),
|
||||
(5, Vec::new()),
|
||||
(6, vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 6, key: vec![105] }, vec![2]),
|
||||
]),
|
||||
(7, Vec::new()),
|
||||
(8, vec![
|
||||
InputPair::DigestIndex(DigestIndex { block: 8, key: vec![105] }, vec![6]),
|
||||
]),
|
||||
(9, Vec::new()), (10, Vec::new()), (11, Vec::new()), (12, Vec::new()), (13, Vec::new()),
|
||||
(14, Vec::new()), (15, Vec::new()),
|
||||
]);
|
||||
let changes = OverlayedChanges {
|
||||
prospective: vec![
|
||||
(vec![100], OverlayedValue {
|
||||
value: Some(vec![200]),
|
||||
extrinsics: Some(vec![0, 2].into_iter().collect())
|
||||
}),
|
||||
(vec![103], OverlayedValue {
|
||||
value: None,
|
||||
extrinsics: Some(vec![0, 1].into_iter().collect())
|
||||
}),
|
||||
].into_iter().collect(),
|
||||
committed: vec![
|
||||
(b":extrinsic_index".to_vec(), OverlayedValue {
|
||||
value: Some(3u32.encode()),
|
||||
extrinsics: None,
|
||||
}),
|
||||
(vec![100], OverlayedValue {
|
||||
value: Some(vec![202]),
|
||||
extrinsics: Some(vec![3].into_iter().collect())
|
||||
}),
|
||||
(vec![101], OverlayedValue {
|
||||
value: Some(vec![203]),
|
||||
extrinsics: Some(vec![1].into_iter().collect())
|
||||
}),
|
||||
].into_iter().collect(),
|
||||
changes_trie_config: Some(Configuration { digest_interval: 4, digest_levels: 2 }),
|
||||
};
|
||||
|
||||
(backend, storage, changes)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_changes_trie_nodes_on_non_digest_block() {
|
||||
let (backend, storage, changes) = prepare_for_build();
|
||||
let changes_trie_nodes = prepare_input::<_, _, _, RlpCodec>(&backend, Some(&storage), &changes, 5).unwrap();
|
||||
assert_eq!(changes_trie_nodes, Some(vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 5, key: vec![100] }, vec![0, 2, 3]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 5, key: vec![101] }, vec![1]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 5, key: vec![103] }, vec![0, 1]),
|
||||
]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_changes_trie_nodes_on_digest_block_l1() {
|
||||
let (backend, storage, changes) = prepare_for_build();
|
||||
let changes_trie_nodes = prepare_input::<_, _, _, RlpCodec>(&backend, Some(&storage), &changes, 4).unwrap();
|
||||
assert_eq!(changes_trie_nodes, Some(vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![100] }, vec![0, 2, 3]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![101] }, vec![1]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![103] }, vec![0, 1]),
|
||||
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![100] }, vec![1, 3]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![101] }, vec![1]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![102] }, vec![2]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![105] }, vec![1, 3]),
|
||||
]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_changes_trie_nodes_on_digest_block_l2() {
|
||||
let (backend, storage, changes) = prepare_for_build();
|
||||
let changes_trie_nodes = prepare_input::<_, _, _, RlpCodec>(&backend, Some(&storage), &changes, 16).unwrap();
|
||||
assert_eq!(changes_trie_nodes, Some(vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![100] }, vec![0, 2, 3]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![101] }, vec![1]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![103] }, vec![0, 1]),
|
||||
|
||||
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![100] }, vec![4]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![101] }, vec![4]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![102] }, vec![4]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![103] }, vec![4]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![105] }, vec![4, 8]),
|
||||
]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_changes_trie_nodes_ignores_temporary_storage_values() {
|
||||
let (backend, storage, mut changes) = prepare_for_build();
|
||||
|
||||
// 110: missing from backend, set to None in overlay
|
||||
changes.prospective.insert(vec![110], OverlayedValue {
|
||||
value: None,
|
||||
extrinsics: Some(vec![1].into_iter().collect())
|
||||
});
|
||||
|
||||
let changes_trie_nodes = prepare_input::<_, _, _, RlpCodec>(&backend, Some(&storage), &changes, 4).unwrap();
|
||||
assert_eq!(changes_trie_nodes, Some(vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![100] }, vec![0, 2, 3]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![101] }, vec![1]),
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![103] }, vec![0, 1]),
|
||||
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![100] }, vec![1, 3]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![101] }, vec![1]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![102] }, vec![2]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![105] }, vec![1, 3]),
|
||||
]));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,212 @@
|
||||
// Copyright 2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Substrate.
|
||||
|
||||
// Substrate is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Substrate is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Structures and functions to return blocks whose changes are to be included
|
||||
//! in given block' changes trie.
|
||||
|
||||
use changes_trie::Configuration;
|
||||
|
||||
/// Returns iterator of OTHER blocks that are required for inclusion into
|
||||
/// changes trie of given block.
|
||||
pub fn digest_build_iterator(config: &Configuration, block: u64) -> DigestBuildIterator {
|
||||
// digest is never built in these cases
|
||||
if block == 0 || config.digest_interval <= 1 || config.digest_levels == 0 {
|
||||
return DigestBuildIterator::empty();
|
||||
}
|
||||
|
||||
// digest is built every digest_multiplier blocks
|
||||
let mut digest_interval = config.digest_interval;
|
||||
if block % digest_interval != 0 {
|
||||
return DigestBuildIterator::empty();
|
||||
}
|
||||
|
||||
// we have checked that the block is at least level1-digest
|
||||
// => try to find highest digest level for inclusion
|
||||
let mut current_level = 1u32;
|
||||
let mut digest_step = 1u64;
|
||||
while current_level < config.digest_levels {
|
||||
let new_digest_interval = match digest_interval.checked_mul(config.digest_interval) {
|
||||
Some(new_digest_interval) if block % new_digest_interval == 0 => new_digest_interval,
|
||||
_ => break,
|
||||
};
|
||||
|
||||
digest_step = digest_interval;
|
||||
digest_interval = new_digest_interval;
|
||||
current_level = current_level + 1;
|
||||
}
|
||||
|
||||
DigestBuildIterator::new(block, config.digest_interval, digest_step)
|
||||
}
|
||||
|
||||
/// Changes trie build iterator that returns numbers of OTHER blocks that are
|
||||
/// required for inclusion into changes trie of given block.
|
||||
#[derive(Debug)]
|
||||
pub struct DigestBuildIterator {
|
||||
/// Block we're building changes trie for.
|
||||
block: u64,
|
||||
/// Interval for creation digest blocks.
|
||||
digest_interval: u64,
|
||||
/// Step of current blocks range.
|
||||
current_step: u64,
|
||||
/// Current blocks range.
|
||||
current_range: Option<::std::iter::StepBy<::std::ops::Range<u64>>>,
|
||||
/// Max step of blocks range.
|
||||
max_step: u64,
|
||||
}
|
||||
|
||||
impl DigestBuildIterator {
|
||||
/// Create new digest build iterator.
|
||||
pub fn new(block: u64, digest_interval: u64, max_step: u64) -> Self {
|
||||
DigestBuildIterator {
|
||||
block, digest_interval, max_step,
|
||||
current_step: 0,
|
||||
current_range: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create empty digest build iterator.
|
||||
pub fn empty() -> Self {
|
||||
Self::new(0, 0, 0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for DigestBuildIterator {
|
||||
type Item = u64;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(next) = self.current_range.as_mut().and_then(|iter| iter.next()) {
|
||||
return Some(next);
|
||||
}
|
||||
|
||||
// we are safe to use non-checking mul/sub versions here because:
|
||||
// DigestBuildIterator is created only by internal function that is checking
|
||||
// that all multiplications/subtractions are safe within max_step limit
|
||||
|
||||
let next_step = if self.current_step == 0 { 1 } else { self.current_step * self.digest_interval };
|
||||
if next_step > self.max_step {
|
||||
return None;
|
||||
}
|
||||
|
||||
self.current_step = next_step;
|
||||
self.current_range = Some(
|
||||
((self.block - self.current_step * self.digest_interval + self.current_step)..self.block)
|
||||
.step_by(self.current_step as usize)
|
||||
);
|
||||
|
||||
Some(self.current_range.as_mut()
|
||||
.expect("assigned one line above; qed")
|
||||
.next()
|
||||
.expect("X - I^(N+1) + I^N > X when X,I,N are > 1; qed"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn digest_build_iterator(digest_interval: u64, digest_levels: u32, block: u64) -> DigestBuildIterator {
|
||||
super::digest_build_iterator(&Configuration { digest_interval, digest_levels }, block)
|
||||
}
|
||||
|
||||
fn digest_build_iterator_basic(digest_interval: u64, digest_levels: u32, block: u64) -> (u64, u64, u64) {
|
||||
let iter = digest_build_iterator(digest_interval, digest_levels, block);
|
||||
(iter.block, iter.digest_interval, iter.max_step)
|
||||
}
|
||||
|
||||
fn digest_build_iterator_blocks(digest_interval: u64, digest_levels: u32, block: u64) -> Vec<u64> {
|
||||
digest_build_iterator(digest_interval, digest_levels, block).collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn suggest_digest_inclusion_returns_empty_iterator() {
|
||||
let empty = (0, 0, 0);
|
||||
assert_eq!(digest_build_iterator_basic(4, 16, 0), empty, "block is 0");
|
||||
assert_eq!(digest_build_iterator_basic(0, 16, 64), empty, "digest_interval is 0");
|
||||
assert_eq!(digest_build_iterator_basic(1, 16, 64), empty, "digest_interval is 1");
|
||||
assert_eq!(digest_build_iterator_basic(4, 0, 64), empty, "digest_levels is 0");
|
||||
assert_eq!(digest_build_iterator_basic(4, 16, 1), empty, "digest is not required for this block");
|
||||
assert_eq!(digest_build_iterator_basic(4, 16, 2), empty, "digest is not required for this block");
|
||||
assert_eq!(digest_build_iterator_basic(4, 16, 15), empty, "digest is not required for this block");
|
||||
assert_eq!(digest_build_iterator_basic(4, 16, 17), empty, "digest is not required for this block");
|
||||
assert_eq!(digest_build_iterator_basic(::std::u64::MAX / 2 + 1, 16, ::std::u64::MAX), empty, "digest_interval * 2 is greater than u64::MAX");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn suggest_digest_inclusion_returns_level1_iterator() {
|
||||
assert_eq!(digest_build_iterator_basic(16, 1, 16), (16, 16, 1), "!(block % interval) && first digest level == block");
|
||||
assert_eq!(digest_build_iterator_basic(16, 1, 256), (256, 16, 1), "!(block % interval^2), but there's only 1 digest level");
|
||||
assert_eq!(digest_build_iterator_basic(16, 2, 32), (32, 16, 1), "second level digest is not required for this block");
|
||||
assert_eq!(digest_build_iterator_basic(16, 3, 4080), (4080, 16, 1), "second && third level digest are not required for this block");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn suggest_digest_inclusion_returns_level2_iterator() {
|
||||
assert_eq!(digest_build_iterator_basic(16, 2, 256), (256, 16, 16), "second level digest");
|
||||
assert_eq!(digest_build_iterator_basic(16, 2, 4096), (4096, 16, 16), "!(block % interval^3), but there's only 2 digest levels");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn suggest_digest_inclusion_returns_level3_iterator() {
|
||||
assert_eq!(digest_build_iterator_basic(16, 3, 4096), (4096, 16, 256), "third level digest: beginning");
|
||||
assert_eq!(digest_build_iterator_basic(16, 3, 8192), (8192, 16, 256), "third level digest: next");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn digest_iterator_returns_level1_blocks() {
|
||||
assert_eq!(digest_build_iterator_blocks(16, 1, 16),
|
||||
vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]);
|
||||
assert_eq!(digest_build_iterator_blocks(16, 1, 256),
|
||||
vec![241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255]);
|
||||
assert_eq!(digest_build_iterator_blocks(16, 2, 32),
|
||||
vec![17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]);
|
||||
assert_eq!(digest_build_iterator_blocks(16, 3, 4080),
|
||||
vec![4065, 4066, 4067, 4068, 4069, 4070, 4071, 4072, 4073, 4074, 4075, 4076, 4077, 4078, 4079]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn digest_iterator_returns_level1_and_level2_blocks() {
|
||||
assert_eq!(digest_build_iterator_blocks(16, 2, 256),
|
||||
vec![
|
||||
// level2 is a level1 digest of 16-1 previous blocks:
|
||||
241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
|
||||
// level2 points to previous 16-1 level1 digests:
|
||||
16, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240,
|
||||
],
|
||||
);
|
||||
assert_eq!(digest_build_iterator_blocks(16, 2, 4096),
|
||||
vec![
|
||||
// level2 is a level1 digest of 16-1 previous blocks:
|
||||
4081, 4082, 4083, 4084, 4085, 4086, 4087, 4088, 4089, 4090, 4091, 4092, 4093, 4094, 4095,
|
||||
// level2 points to previous 16-1 level1 digests:
|
||||
3856, 3872, 3888, 3904, 3920, 3936, 3952, 3968, 3984, 4000, 4016, 4032, 4048, 4064, 4080,
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn digest_iterator_returns_level1_and_level2_and_level3_blocks() {
|
||||
assert_eq!(digest_build_iterator_blocks(16, 3, 4096),
|
||||
vec![
|
||||
// level3 is a level1 digest of 16-1 previous blocks:
|
||||
4081, 4082, 4083, 4084, 4085, 4086, 4087, 4088, 4089, 4090, 4091, 4092, 4093, 4094, 4095,
|
||||
// level3 points to previous 16-1 level1 digests:
|
||||
3856, 3872, 3888, 3904, 3920, 3936, 3952, 3968, 3984, 4000, 4016, 4032, 4048, 4064, 4080,
|
||||
// level3 points to previous 16-1 level2 digests:
|
||||
256, 512, 768, 1024, 1280, 1536, 1792, 2048, 2304, 2560, 2816, 3072, 3328, 3584, 3840,
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,453 @@
|
||||
// Copyright 2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Substrate.
|
||||
|
||||
// Substrate is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Substrate is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Functions + iterator that traverses changes tries and returns all
|
||||
//! (block, extrinsic) pairs where given key has been changed.
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::collections::VecDeque;
|
||||
use codec::{Decode, Encode};
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use heapsize::HeapSizeOf;
|
||||
use memorydb::MemoryDB;
|
||||
use patricia_trie::{NodeCodec, Recorder};
|
||||
use changes_trie::{Configuration, Storage};
|
||||
use changes_trie::input::{DigestIndex, ExtrinsicIndex, DigestIndexValue, ExtrinsicIndexValue};
|
||||
use changes_trie::storage::{TrieBackendAdapter, InMemoryStorage};
|
||||
use proving_backend::ProvingBackendEssence;
|
||||
use trie_backend_essence::{TrieBackendEssence};
|
||||
|
||||
/// Return changes of given key at given blocks range.
|
||||
/// `max` is the number of best known block.
|
||||
pub fn key_changes<S: Storage<H>, H: Hasher, C: NodeCodec<H>>(
|
||||
config: &Configuration,
|
||||
storage: &S,
|
||||
begin: u64,
|
||||
end: u64,
|
||||
max: u64,
|
||||
key: &[u8],
|
||||
) -> Result<Vec<(u64, u32)>, String> where H::Out: HeapSizeOf {
|
||||
DrilldownIterator {
|
||||
essence: DrilldownIteratorEssence {
|
||||
key,
|
||||
roots_storage: storage,
|
||||
storage,
|
||||
surface: surface_iterator(config, max, begin, end)?,
|
||||
|
||||
extrinsics: Default::default(),
|
||||
blocks: Default::default(),
|
||||
|
||||
_hasher: ::std::marker::PhantomData::<H>::default(),
|
||||
},
|
||||
_codec: ::std::marker::PhantomData::<C>::default(),
|
||||
}.collect()
|
||||
}
|
||||
|
||||
/// Returns proof of changes of given key at given blocks range.
|
||||
/// `max` is the number of best known block.
|
||||
pub fn key_changes_proof<S: Storage<H>, H: Hasher, C: NodeCodec<H>>(
|
||||
config: &Configuration,
|
||||
storage: &S,
|
||||
begin: u64,
|
||||
end: u64,
|
||||
max: u64,
|
||||
key: &[u8],
|
||||
) -> Result<Vec<Vec<u8>>, String> where H::Out: HeapSizeOf {
|
||||
let mut iter = ProvingDrilldownIterator {
|
||||
essence: DrilldownIteratorEssence {
|
||||
key,
|
||||
roots_storage: storage.clone(),
|
||||
storage,
|
||||
surface: surface_iterator(config, max, begin, end)?,
|
||||
|
||||
extrinsics: Default::default(),
|
||||
blocks: Default::default(),
|
||||
|
||||
_hasher: ::std::marker::PhantomData::<H>::default(),
|
||||
},
|
||||
proof_recorder: Default::default(),
|
||||
_codec: ::std::marker::PhantomData::<C>::default(),
|
||||
};
|
||||
|
||||
// iterate to collect proof
|
||||
while let Some(item) = iter.next() {
|
||||
item?;
|
||||
}
|
||||
|
||||
Ok(iter.extract_proof())
|
||||
}
|
||||
|
||||
/// Check key changes proog and return changes of the key at given blocks range.
|
||||
/// `max` is the number of best known block.
|
||||
pub fn key_changes_proof_check<S: Storage<H>, H: Hasher, C: NodeCodec<H>>(
|
||||
config: &Configuration,
|
||||
roots_storage: &S, // TODO: use RootsStorage is only used to gather root
|
||||
proof: Vec<Vec<u8>>,
|
||||
begin: u64,
|
||||
end: u64,
|
||||
max: u64,
|
||||
key: &[u8]
|
||||
) -> Result<Vec<(u64, u32)>, String> where H::Out: HeapSizeOf {
|
||||
let mut proof_db = MemoryDB::<H>::new();
|
||||
for item in proof {
|
||||
proof_db.insert(&item);
|
||||
}
|
||||
|
||||
let proof_db = InMemoryStorage::with_db(proof_db);
|
||||
DrilldownIterator {
|
||||
essence: DrilldownIteratorEssence {
|
||||
key,
|
||||
roots_storage,
|
||||
storage: &proof_db,
|
||||
surface: surface_iterator(config, max, begin, end)?,
|
||||
|
||||
extrinsics: Default::default(),
|
||||
blocks: Default::default(),
|
||||
|
||||
_hasher: ::std::marker::PhantomData::<H>::default(),
|
||||
},
|
||||
_codec: ::std::marker::PhantomData::<C>::default(),
|
||||
}.collect()
|
||||
}
|
||||
|
||||
/// Surface iterator - only traverses top-level digests from given range and tries to find
|
||||
/// all digest changes for the key.
|
||||
pub struct SurfaceIterator<'a> {
|
||||
config: &'a Configuration,
|
||||
begin: u64,
|
||||
max: u64,
|
||||
current: Option<u64>,
|
||||
current_begin: u64,
|
||||
digest_step: u64,
|
||||
digest_level: u32,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for SurfaceIterator<'a> {
|
||||
type Item = Result<(u64, u32), String>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let current = self.current?;
|
||||
let digest_level = self.digest_level;
|
||||
|
||||
if current < self.digest_step {
|
||||
self.current = None;
|
||||
}
|
||||
else {
|
||||
let next = current - self.digest_step;
|
||||
if next == 0 || next < self.begin {
|
||||
self.current = None;
|
||||
}
|
||||
else if next > self.current_begin {
|
||||
self.current = Some(next);
|
||||
} else {
|
||||
let (current, current_begin, digest_step, digest_level) = match
|
||||
lower_bound_max_digest(self.config, self.max, self.begin, next) {
|
||||
Err(err) => return Some(Err(err)),
|
||||
Ok(range) => range,
|
||||
};
|
||||
|
||||
self.current = Some(current);
|
||||
self.current_begin = current_begin;
|
||||
self.digest_step = digest_step;
|
||||
self.digest_level = digest_level;
|
||||
}
|
||||
}
|
||||
|
||||
Some(Ok((current, digest_level)))
|
||||
}
|
||||
}
|
||||
|
||||
/// Drilldown iterator - receives 'digest points' from surface iterator and explores
|
||||
/// every point until extrinsic is found.
|
||||
pub struct DrilldownIteratorEssence<'a, RS: 'a + Storage<H>, S: 'a + Storage<H>, H: Hasher> {
|
||||
key: &'a [u8],
|
||||
roots_storage: &'a RS,
|
||||
storage: &'a S,
|
||||
surface: SurfaceIterator<'a>,
|
||||
|
||||
extrinsics: VecDeque<(u64, u32)>,
|
||||
blocks: VecDeque<(u64, u32)>,
|
||||
|
||||
_hasher: ::std::marker::PhantomData<H>,
|
||||
}
|
||||
|
||||
impl<'a, RS: 'a + Storage<H>, S: Storage<H>, H: Hasher> DrilldownIteratorEssence<'a, RS, S, H> {
|
||||
pub fn next<F>(&mut self, trie_reader: F) -> Option<Result<(u64, u32), String>>
|
||||
where
|
||||
F: FnMut(&S, H::Out, &[u8]) -> Result<Option<Vec<u8>>, String>,
|
||||
{
|
||||
match self.do_next(trie_reader) {
|
||||
Ok(Some(res)) => Some(Ok(res)),
|
||||
Ok(None) => None,
|
||||
Err(err) => Some(Err(err)),
|
||||
}
|
||||
}
|
||||
|
||||
fn do_next<F>(&mut self, mut trie_reader: F) -> Result<Option<(u64, u32)>, String>
|
||||
where
|
||||
F: FnMut(&S, H::Out, &[u8]) -> Result<Option<Vec<u8>>, String>,
|
||||
{
|
||||
loop {
|
||||
if let Some((block, extrinsic)) = self.extrinsics.pop_front() {
|
||||
return Ok(Some((block, extrinsic)));
|
||||
}
|
||||
|
||||
if let Some((block, level)) = self.blocks.pop_front() {
|
||||
if let Some(trie_root) = self.roots_storage.root(block)? {
|
||||
let extrinsics_key = ExtrinsicIndex { block, key: self.key.to_vec() }.encode();
|
||||
let extrinsics = trie_reader(&self.storage, trie_root, &extrinsics_key);
|
||||
if let Some(extrinsics) = extrinsics? {
|
||||
let extrinsics: Option<ExtrinsicIndexValue> = Decode::decode(&mut &extrinsics[..]);
|
||||
if let Some(extrinsics) = extrinsics {
|
||||
self.extrinsics.extend(extrinsics.into_iter().rev().map(|e| (block, e)));
|
||||
}
|
||||
}
|
||||
|
||||
let blocks_key = DigestIndex { block, key: self.key.to_vec() }.encode();
|
||||
let blocks = trie_reader(&self.storage, trie_root, &blocks_key);
|
||||
if let Some(blocks) = blocks? {
|
||||
let blocks: Option<DigestIndexValue> = Decode::decode(&mut &blocks[..]);
|
||||
if let Some(blocks) = blocks {
|
||||
self.blocks.extend(blocks.into_iter().rev().map(|b| (b, level - 1)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
match self.surface.next() {
|
||||
Some(Ok(block)) => self.blocks.push_back(block),
|
||||
Some(Err(err)) => return Err(err),
|
||||
None => return Ok(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Exploring drilldown operator.
|
||||
struct DrilldownIterator<'a, RS: 'a + Storage<H>, S: 'a + Storage<H>, H: Hasher, C: NodeCodec<H>> {
|
||||
essence: DrilldownIteratorEssence<'a, RS, S, H>,
|
||||
_codec: ::std::marker::PhantomData<C>,
|
||||
}
|
||||
|
||||
impl<'a, RS: 'a + Storage<H>, S: Storage<H>, H: Hasher, C: NodeCodec<H>> Iterator for DrilldownIterator<'a, RS, S, H, C> where H::Out: HeapSizeOf {
|
||||
type Item = Result<(u64, u32), String>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.essence.next(|storage, root, key|
|
||||
TrieBackendEssence::<_, H, C>::new(TrieBackendAdapter::new(storage), root).storage(key))
|
||||
}
|
||||
}
|
||||
|
||||
/// Proving drilldown iterator.
|
||||
struct ProvingDrilldownIterator<'a, RS: 'a + Storage<H>, S: 'a + Storage<H>, H: Hasher, C: NodeCodec<H>> {
|
||||
essence: DrilldownIteratorEssence<'a, RS, S, H>,
|
||||
proof_recorder: RefCell<Recorder<H::Out>>,
|
||||
_codec: ::std::marker::PhantomData<C>,
|
||||
}
|
||||
|
||||
impl<'a, RS: 'a + Storage<H>, S: Storage<H>, H: Hasher, C: NodeCodec<H>> ProvingDrilldownIterator<'a, RS, S, H, C> {
|
||||
/// Consume the iterator, extracting the gathered proof in lexicographical order
|
||||
/// by value.
|
||||
pub fn extract_proof(self) -> Vec<Vec<u8>> {
|
||||
self.proof_recorder.into_inner().drain()
|
||||
.into_iter()
|
||||
.map(|n| n.data.to_vec())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, RS: 'a + Storage<H>, S: Storage<H>, H: Hasher, C: NodeCodec<H>> Iterator for ProvingDrilldownIterator<'a, RS, S, H, C> where H::Out: HeapSizeOf {
|
||||
type Item = Result<(u64, u32), String>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let proof_recorder = &mut *self.proof_recorder.try_borrow_mut()
|
||||
.expect("only fails when already borrowed; storage() is non-reentrant; qed");
|
||||
self.essence.next(|storage, root, key|
|
||||
ProvingBackendEssence::<_, H, C> {
|
||||
backend: &TrieBackendEssence::new(TrieBackendAdapter::new(storage), root),
|
||||
proof_recorder,
|
||||
}.storage(key))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns surface iterator for given range of blocks.
|
||||
fn surface_iterator<'a>(config: &'a Configuration, max: u64, begin: u64, end: u64) -> Result<SurfaceIterator<'a>, String> {
|
||||
let (current, current_begin, digest_step, digest_level) = lower_bound_max_digest(config, max, begin, end)?;
|
||||
Ok(SurfaceIterator {
|
||||
config,
|
||||
begin,
|
||||
max,
|
||||
current: Some(current),
|
||||
current_begin,
|
||||
digest_step,
|
||||
digest_level,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns parameters of highest level digest block that includes the end of given range
|
||||
/// and tends to include the whole range.
|
||||
fn lower_bound_max_digest(
|
||||
config: &Configuration,
|
||||
max: u64,
|
||||
begin: u64,
|
||||
end: u64,
|
||||
) -> Result<(u64, u64, u64, u32), String> {
|
||||
if end > max || begin > end {
|
||||
return Err("invalid changes range".into());
|
||||
}
|
||||
|
||||
let mut digest_level = 0u32;
|
||||
let mut digest_step = 1u64;
|
||||
let mut digest_interval = 0u64;
|
||||
let mut current = end;
|
||||
let mut current_begin = begin;
|
||||
if begin != end {
|
||||
while digest_level != config.digest_levels {
|
||||
let new_digest_level = digest_level + 1;
|
||||
let new_digest_step = digest_step * config.digest_interval;
|
||||
let new_digest_interval = config.digest_interval * {
|
||||
if digest_interval == 0 { 1 } else { digest_interval }
|
||||
};
|
||||
let new_digest_begin = ((current - 1) / new_digest_interval) * new_digest_interval;
|
||||
let new_digest_end = new_digest_begin + new_digest_interval;
|
||||
let new_current = new_digest_begin + new_digest_interval;
|
||||
|
||||
if new_digest_end > max {
|
||||
if begin < new_digest_begin {
|
||||
current_begin = new_digest_begin;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
digest_level = new_digest_level;
|
||||
digest_step = new_digest_step;
|
||||
digest_interval = new_digest_interval;
|
||||
current = new_current;
|
||||
current_begin = new_digest_begin;
|
||||
|
||||
if new_digest_begin <= begin && new_digest_end >= end {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((
|
||||
current,
|
||||
current_begin,
|
||||
digest_step,
|
||||
digest_level,
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use primitives::{Blake2Hasher, RlpCodec};
|
||||
use changes_trie::input::InputPair;
|
||||
use changes_trie::storage::InMemoryStorage;
|
||||
use super::*;
|
||||
|
||||
fn prepare_for_drilldown() -> (Configuration, InMemoryStorage<Blake2Hasher>) {
|
||||
let config = Configuration { digest_interval: 4, digest_levels: 2 };
|
||||
let backend = InMemoryStorage::with_inputs::<RlpCodec>(vec![
|
||||
// digest: 1..4 => [(3, 0)]
|
||||
(1, vec![]),
|
||||
(2, vec![]),
|
||||
(3, vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 3, key: vec![42] }, vec![0]),
|
||||
]),
|
||||
(4, vec![
|
||||
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![42] }, vec![3]),
|
||||
]),
|
||||
// digest: 5..8 => [(6, 3), (8, 1+2)]
|
||||
(5, vec![]),
|
||||
(6, vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 6, key: vec![42] }, vec![3]),
|
||||
]),
|
||||
(7, vec![]),
|
||||
(8, vec![
|
||||
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 8, key: vec![42] }, vec![1, 2]),
|
||||
InputPair::DigestIndex(DigestIndex { block: 8, key: vec![42] }, vec![6]),
|
||||
]),
|
||||
// digest: 9..12 => []
|
||||
(9, vec![]),
|
||||
(10, vec![]),
|
||||
(11, vec![]),
|
||||
(12, vec![]),
|
||||
// digest: 0..16 => [4, 8]
|
||||
(13, vec![]),
|
||||
(14, vec![]),
|
||||
(15, vec![]),
|
||||
(16, vec![
|
||||
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![42] }, vec![4, 8]),
|
||||
]),
|
||||
]);
|
||||
|
||||
(config, backend)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn drilldown_iterator_works() {
|
||||
let (config, storage) = prepare_for_drilldown();
|
||||
let drilldown_result = key_changes::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
|
||||
&config, &storage, 0, 100, 1000, &[42]);
|
||||
|
||||
assert_eq!(drilldown_result, Ok(vec![(8, 2), (8, 1), (6, 3), (3, 0)]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn drilldown_iterator_fails_when_storage_fails() {
|
||||
let (config, storage) = prepare_for_drilldown();
|
||||
storage.clear_storage();
|
||||
|
||||
assert!(key_changes::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
|
||||
&config, &storage, 0, 100, 1000, &[42]).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn drilldown_iterator_fails_when_range_is_invalid() {
|
||||
let (config, storage) = prepare_for_drilldown();
|
||||
assert!(key_changes::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
|
||||
&config, &storage, 0, 100, 50, &[42]).is_err());
|
||||
assert!(key_changes::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
|
||||
&config, &storage, 20, 10, 100, &[42]).is_err());
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn proving_drilldown_iterator_works() {
|
||||
// happens on remote full node:
|
||||
|
||||
// create drilldown iterator that records all trie nodes during drilldown
|
||||
let (remote_config, remote_storage) = prepare_for_drilldown();
|
||||
let remote_proof = key_changes_proof::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
|
||||
&remote_config, &remote_storage,
|
||||
0, 100, 1000, &[42]).unwrap();
|
||||
|
||||
// happens on local light node:
|
||||
|
||||
// create drilldown iterator that works the same, but only depends on trie
|
||||
let (local_config, local_storage) = prepare_for_drilldown();
|
||||
local_storage.clear_storage();
|
||||
let local_result = key_changes_proof_check::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
|
||||
&local_config, &local_storage, remote_proof,
|
||||
0, 100, 1000, &[42]);
|
||||
|
||||
// check that drilldown result is the same as if it was happening at the full node
|
||||
assert_eq!(local_result, Ok(vec![(8, 2), (8, 1), (6, 3), (3, 0)]));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,149 @@
|
||||
// Copyright 2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Substrate.
|
||||
|
||||
// Substrate is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Substrate is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Different types of changes trie input pairs.
|
||||
|
||||
use codec::{Decode, Encode, Input, Output};
|
||||
|
||||
/// Key of { changed key => set of extrinsic indices } mapping.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct ExtrinsicIndex {
|
||||
/// Block at which this key has been inserted in the trie.
|
||||
pub block: u64,
|
||||
/// Storage key this node is responsible for.
|
||||
pub key: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Value of { changed key => set of extrinsic indices } mapping.
|
||||
pub type ExtrinsicIndexValue = Vec<u32>;
|
||||
|
||||
/// Key of { changed key => block/digest block numbers } mapping.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct DigestIndex {
|
||||
/// Block at which this key has been inserted in the trie.
|
||||
pub block: u64,
|
||||
/// Storage key this node is responsible for.
|
||||
pub key: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Value of { changed key => block/digest block numbers } mapping.
|
||||
pub type DigestIndexValue = Vec<u64>;
|
||||
|
||||
/// Single input pair of changes trie.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum InputPair {
|
||||
/// Element of { key => set of extrinsics where key has been changed } element mapping.
|
||||
ExtrinsicIndex(ExtrinsicIndex, ExtrinsicIndexValue),
|
||||
/// Element of { key => set of blocks/digest blocks where key has been changed } element mapping.
|
||||
DigestIndex(DigestIndex, DigestIndexValue),
|
||||
}
|
||||
|
||||
/// Single input key of changes trie.
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum InputKey {
|
||||
/// Key of { key => set of extrinsics where key has been changed } element mapping.
|
||||
ExtrinsicIndex(ExtrinsicIndex),
|
||||
/// Key of { key => set of blocks/digest blocks where key has been changed } element mapping.
|
||||
DigestIndex(DigestIndex),
|
||||
}
|
||||
|
||||
impl Into<(Vec<u8>, Vec<u8>)> for InputPair {
|
||||
fn into(self) -> (Vec<u8>, Vec<u8>) {
|
||||
match self {
|
||||
InputPair::ExtrinsicIndex(key, value) => (key.encode(), value.encode()),
|
||||
InputPair::DigestIndex(key, value) => (key.encode(), value.encode()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<InputKey> for InputPair {
|
||||
fn into(self) -> InputKey {
|
||||
match self {
|
||||
InputPair::ExtrinsicIndex(key, _) => InputKey::ExtrinsicIndex(key),
|
||||
InputPair::DigestIndex(key, _) => InputKey::DigestIndex(key),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ExtrinsicIndex {
|
||||
pub fn key_neutral_prefix(block: u64) -> Vec<u8> {
|
||||
let mut prefix = vec![1];
|
||||
prefix.extend(block.encode());
|
||||
prefix
|
||||
}
|
||||
}
|
||||
|
||||
impl Encode for ExtrinsicIndex {
|
||||
fn encode_to<W: Output>(&self, dest: &mut W) {
|
||||
dest.push_byte(1);
|
||||
self.block.encode_to(dest);
|
||||
self.key.encode_to(dest);
|
||||
}
|
||||
}
|
||||
|
||||
impl DigestIndex {
|
||||
pub fn key_neutral_prefix(block: u64) -> Vec<u8> {
|
||||
let mut prefix = vec![2];
|
||||
prefix.extend(block.encode());
|
||||
prefix
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl Encode for DigestIndex {
|
||||
fn encode_to<W: Output>(&self, dest: &mut W) {
|
||||
dest.push_byte(2);
|
||||
self.block.encode_to(dest);
|
||||
self.key.encode_to(dest);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decode for InputKey {
|
||||
fn decode<I: Input>(input: &mut I) -> Option<Self> {
|
||||
match input.read_byte()? {
|
||||
1 => Some(InputKey::ExtrinsicIndex(ExtrinsicIndex {
|
||||
block: Decode::decode(input)?,
|
||||
key: Decode::decode(input)?,
|
||||
})),
|
||||
2 => Some(InputKey::DigestIndex(DigestIndex {
|
||||
block: Decode::decode(input)?,
|
||||
key: Decode::decode(input)?,
|
||||
})),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn extrinsic_index_serialized_and_deserialized() {
|
||||
let original = ExtrinsicIndex { block: 777, key: vec![42] };
|
||||
let serialized = original.encode();
|
||||
let deserialized: InputKey = Decode::decode(&mut &serialized[..]).unwrap();
|
||||
assert_eq!(InputKey::ExtrinsicIndex(original), deserialized);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn digest_index_serialized_and_deserialized() {
|
||||
let original = DigestIndex { block: 777, key: vec![42] };
|
||||
let serialized = original.encode();
|
||||
let deserialized: InputKey = Decode::decode(&mut &serialized[..]).unwrap();
|
||||
assert_eq!(InputKey::DigestIndex(original), deserialized);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,89 @@
|
||||
// Copyright 2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Substrate.
|
||||
|
||||
// Substrate is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Substrate is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Changes trie related structures and functions.
|
||||
//!
|
||||
//! Changes trie is a trie built of { storage key => extrinsiscs } pairs
|
||||
//! at the end of each block. For every changed storage key it contains
|
||||
//! a pair, mapping key to the set of extrinsics where it has been changed.
|
||||
//!
|
||||
//! Optionally, every N blocks, additional level1-digest nodes are appended
|
||||
//! to the changes trie, containing pairs { storage key => blocks }. For every
|
||||
//! storage key that has been changed in PREVIOUS N-1 blocks (except for genesis
|
||||
//! block) it contains a pair, mapping this key to the set of blocks where it
|
||||
//! has been changed.
|
||||
//!
|
||||
//! Optionally, every N^digest_level (where digest_level > 1) blocks, additional
|
||||
//! digest_level digest is created. It is built out of pairs { storage key => digest
|
||||
//! block }, containing entries for every storage key that has been changed in
|
||||
//! the last N*digest_level-1 blocks (except for genesis block), mapping these keys
|
||||
//! to the set of lower-level digest blocks.
|
||||
|
||||
mod build;
|
||||
mod build_iterator;
|
||||
mod changes_iterator;
|
||||
mod input;
|
||||
mod storage;
|
||||
|
||||
pub use self::storage::InMemoryStorage;
|
||||
pub use self::changes_iterator::{key_changes, key_changes_proof, key_changes_proof_check};
|
||||
|
||||
use hashdb::{DBValue, Hasher};
|
||||
use heapsize::HeapSizeOf;
|
||||
use patricia_trie::NodeCodec;
|
||||
use rlp::Encodable;
|
||||
use backend::Backend;
|
||||
use primitives;
|
||||
use changes_trie::build::prepare_input;
|
||||
use overlayed_changes::OverlayedChanges;
|
||||
use trie_backend_essence::TrieBackendStorage;
|
||||
|
||||
/// Changes that are made outside of extrinsics are marked with this index;
|
||||
pub const NO_EXTRINSIC_INDEX: u32 = 0xffffffff;
|
||||
|
||||
/// Changes trie storage. Provides access to trie roots and trie nodes.
|
||||
pub trait Storage<H: Hasher>: Send + Sync {
|
||||
/// Get changes trie root for given block.
|
||||
fn root(&self, block: u64) -> Result<Option<H::Out>, String>;
|
||||
|
||||
/// Get a trie node.
|
||||
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String>;
|
||||
}
|
||||
|
||||
/// Changes trie configuration.
|
||||
pub type Configuration = primitives::ChangesTrieConfiguration;
|
||||
|
||||
/// Compute the changes trie root and transaction for given block.
|
||||
/// Returns None if there's no data to perform computation.
|
||||
pub fn compute_changes_trie_root<'a, B: Backend<H, C>, S: Storage<H>, H: Hasher, C: NodeCodec<H>>(
|
||||
backend: &B,
|
||||
storage: Option<&'a S>,
|
||||
changes: &OverlayedChanges,
|
||||
block: u64,
|
||||
) -> Option<(H::Out, Vec<(Vec<u8>, Vec<u8>)>)>
|
||||
where
|
||||
&'a S: TrieBackendStorage<H>,
|
||||
H::Out: Ord + Encodable + HeapSizeOf,
|
||||
{
|
||||
let input_pairs = prepare_input::<B, S, H, C>(backend, storage, changes, block)
|
||||
.expect("storage is not allowed to fail within runtime")?;
|
||||
let transaction = input_pairs.into_iter()
|
||||
.map(Into::into)
|
||||
.collect::<Vec<_>>();
|
||||
let root = ::triehash::trie_root::<H, _, _, _>(transaction.iter().map(|(k, v)| (&*k, &*v)));
|
||||
|
||||
Some((root, transaction))
|
||||
}
|
||||
@@ -0,0 +1,118 @@
|
||||
// Copyright 2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Substrate.
|
||||
|
||||
// Substrate is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Substrate is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Changes trie storage utilities.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use hashdb::{Hasher, HashDB, DBValue};
|
||||
use heapsize::HeapSizeOf;
|
||||
use memorydb::MemoryDB;
|
||||
use parking_lot::RwLock;
|
||||
use changes_trie::Storage;
|
||||
use trie_backend_essence::TrieBackendStorage;
|
||||
|
||||
#[cfg(test)]
|
||||
use backend::insert_into_memory_db;
|
||||
#[cfg(test)]
|
||||
use patricia_trie::NodeCodec;
|
||||
#[cfg(test)]
|
||||
use changes_trie::input::InputPair;
|
||||
|
||||
/// In-memory implementation of changes trie storage.
|
||||
pub struct InMemoryStorage<H: Hasher> where H::Out: HeapSizeOf {
|
||||
data: RwLock<InMemoryStorageData<H>>,
|
||||
}
|
||||
|
||||
/// Adapter for using changes trie storage as a TrieBackendEssence' storage.
|
||||
pub struct TrieBackendAdapter<'a, H: Hasher, S: 'a + Storage<H>> {
|
||||
storage: &'a S,
|
||||
_hasher: ::std::marker::PhantomData<H>,
|
||||
}
|
||||
|
||||
struct InMemoryStorageData<H: Hasher> where H::Out: HeapSizeOf {
|
||||
roots: HashMap<u64, H::Out>,
|
||||
mdb: MemoryDB<H>,
|
||||
}
|
||||
|
||||
impl<H: Hasher> InMemoryStorage<H> where H::Out: HeapSizeOf {
|
||||
/// Create the storage from given in-memory database.
|
||||
pub fn with_db(mdb: MemoryDB<H>) -> Self {
|
||||
Self {
|
||||
data: RwLock::new(InMemoryStorageData {
|
||||
roots: HashMap::new(),
|
||||
mdb,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create the storage with empty database.
|
||||
pub fn new() -> Self {
|
||||
Self::with_db(Default::default())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn with_inputs<C: NodeCodec<H>>(inputs: Vec<(u64, Vec<InputPair>)>) -> Self {
|
||||
let mut mdb = MemoryDB::default();
|
||||
let mut roots = HashMap::new();
|
||||
for (block, pairs) in inputs {
|
||||
let root = insert_into_memory_db::<H, C, _>(&mut mdb, pairs.into_iter().map(Into::into));
|
||||
if let Some(root) = root {
|
||||
roots.insert(block, root);
|
||||
}
|
||||
}
|
||||
|
||||
InMemoryStorage {
|
||||
data: RwLock::new(InMemoryStorageData {
|
||||
roots,
|
||||
mdb,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn clear_storage(&self) {
|
||||
self.data.write().mdb = MemoryDB::new();
|
||||
}
|
||||
|
||||
/// Insert changes trie for given block.
|
||||
pub fn insert(&self, block: u64, changes_trie_root: H::Out, trie: MemoryDB<H>) {
|
||||
let mut data = self.data.write();
|
||||
data.roots.insert(block, changes_trie_root);
|
||||
data.mdb.consolidate(trie);
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: Hasher> Storage<H> for InMemoryStorage<H> where H::Out: HeapSizeOf {
|
||||
fn root(&self, block: u64) -> Result<Option<H::Out>, String> {
|
||||
Ok(self.data.read().roots.get(&block).cloned())
|
||||
}
|
||||
|
||||
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String> {
|
||||
Ok(HashDB::<H>::get(&self.data.read().mdb, key))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, H: Hasher, S: 'a + Storage<H>> TrieBackendAdapter<'a, H, S> {
|
||||
pub fn new(storage: &'a S) -> Self {
|
||||
Self { storage, _hasher: Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, H: Hasher, S: 'a + Storage<H>> TrieBackendStorage<H> for TrieBackendAdapter<'a, H, S> {
|
||||
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String> {
|
||||
self.storage.get(key)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user