Light friendly storage tracking: changes trie + extending over ranges (#628)

* changes_trie

* changs_trie: continue

* changes_trie: adding tests

* fixed TODO

* removed obsolete ExtrinsicChanges

* encodable ChangesTrieConfiguration

* removed polkadot fle

* fixed grumbles

* ext_storage_changes_root returns u32

* moved changes trie root to digest

* removed commented code

* read storage values from native code

* fixed grumbles

* fixed grumbles

* missing comma
This commit is contained in:
Svyatoslav Nikolsky
2018-09-18 10:14:41 +03:00
committed by Gav Wood
parent 24479cd7f5
commit 7fa337afbc
64 changed files with 3130 additions and 788 deletions
+1
View File
@@ -2731,6 +2731,7 @@ dependencies = [
"heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"memorydb 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-codec 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"patricia-trie 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+1
View File
@@ -26,6 +26,7 @@ substrate-telemetry = { path = "../telemetry" }
hashdb = "0.2.1"
patricia-trie = "0.2.1"
rlp = "0.2.4"
memorydb = "0.2.1"
[dev-dependencies]
substrate-test-client = { path = "../test-client" }
+158 -6
View File
@@ -57,7 +57,8 @@ use parking_lot::RwLock;
use primitives::{H256, AuthorityId, Blake2Hasher, RlpCodec};
use runtime_primitives::generic::BlockId;
use runtime_primitives::bft::Justification;
use runtime_primitives::traits::{Block as BlockT, Header as HeaderT, As, Hash, HashFor, NumberFor, Zero};
use runtime_primitives::traits::{Block as BlockT, Header as HeaderT, As, Hash, HashFor,
NumberFor, Zero, Digest, DigestItem};
use runtime_primitives::BuildStorage;
use state_machine::backend::Backend as StateBackend;
use executor::RuntimeInfo;
@@ -70,7 +71,7 @@ pub use state_db::PruningMode;
const FINALIZATION_WINDOW: u64 = 32;
/// DB-backed patricia trie state, transaction type is an overlay of changes to commit.
pub type DbState = state_machine::TrieBackend<Blake2Hasher, RlpCodec>;
pub type DbState = state_machine::TrieBackend<Arc<state_machine::Storage<Blake2Hasher>>, Blake2Hasher, RlpCodec>;
/// Database settings.
pub struct DatabaseSettings {
@@ -107,6 +108,7 @@ mod columns {
pub const HEADER: Option<u32> = Some(4);
pub const BODY: Option<u32> = Some(5);
pub const JUSTIFICATION: Option<u32> = Some(6);
pub const CHANGES_TRIE: Option<u32> = Some(7);
}
struct PendingBlock<Block: BlockT> {
@@ -230,6 +232,7 @@ impl<Block: BlockT> client::blockchain::Backend<Block> for BlockchainDb<Block> {
pub struct BlockImportOperation<Block: BlockT, H: Hasher> {
old_state: DbState,
updates: MemoryDB<H>,
changes_trie_updates: MemoryDB<H>,
pending_block: Option<PendingBlock<Block>>,
}
@@ -269,6 +272,11 @@ where Block: BlockT,
self.updates = update;
Ok(())
}
fn update_changes_trie(&mut self, update: MemoryDB<Blake2Hasher>) -> Result<(), client::error::Error> {
self.changes_trie_updates = update;
Ok(())
}
}
struct StorageDb<Block: BlockT> {
@@ -292,11 +300,55 @@ impl<Block: BlockT> state_db::HashDb for StorageDb<Block> {
}
}
struct DbGenesisStorage(pub H256);
impl DbGenesisStorage {
pub fn new() -> Self {
let mut root = H256::default();
let mut mdb = MemoryDB::<Blake2Hasher>::new();
state_machine::TrieDBMut::<Blake2Hasher, RlpCodec>::new(&mut mdb, &mut root);
DbGenesisStorage(root)
}
}
impl state_machine::Storage<Blake2Hasher> for DbGenesisStorage {
fn get(&self, _key: &H256) -> Result<Option<DBValue>, String> {
Ok(None)
}
}
pub struct DbChangesTrieStorage<Block: BlockT> {
db: Arc<KeyValueDB>,
_phantom: ::std::marker::PhantomData<Block>,
}
impl<Block: BlockT> state_machine::ChangesTrieStorage<Blake2Hasher> for DbChangesTrieStorage<Block> {
fn root(&self, block: u64) -> Result<Option<H256>, String> {
Ok(read_db::<Block>(&*self.db, columns::BLOCK_INDEX, columns::HEADER, BlockId::Number(As::sa(block)))
.map_err(|err| format!("{}", err))
.and_then(|header| match header {
Some(header) => Block::Header::decode(&mut &header[..])
.ok_or_else(|| format!("Failed to parse header of block {}", block))
.map(Some),
None => Ok(None)
})?
.and_then(|header| header.digest().logs().iter()
.find(|log| log.as_changes_trie_root().is_some())
.and_then(DigestItem::as_changes_trie_root)
.map(|root| H256::from_slice(root.as_ref()))))
}
fn get(&self, key: &H256) -> Result<Option<DBValue>, String> {
self.db.get(columns::CHANGES_TRIE, &key[..])
.map_err(|err| format!("{}", err))
}
}
/// Disk backend. Keeps data in a key-value store. In archive mode, trie nodes are kept from all blocks.
/// Otherwise, trie nodes are kept only from the most recent block.
pub struct Backend<Block: BlockT> {
storage: Arc<StorageDb<Block>>,
tries_change_storage: DbChangesTrieStorage<Block>,
blockchain: BlockchainDb<Block>,
finalization_window: u64,
}
@@ -323,12 +375,17 @@ impl<Block: BlockT> Backend<Block> {
let map_e = |e: state_db::Error<io::Error>| ::client::error::Error::from(format!("State database error: {:?}", e));
let state_db: StateDb<Block::Hash, H256> = StateDb::new(pruning, &StateMetaDb(&*db)).map_err(map_e)?;
let storage_db = StorageDb {
db,
db: db.clone(),
state_db,
};
let tries_change_storage = DbChangesTrieStorage {
db,
_phantom: Default::default(),
};
Ok(Backend {
storage: Arc::new(storage_db),
tries_change_storage: tries_change_storage,
blockchain,
finalization_window,
})
@@ -350,10 +407,17 @@ fn apply_state_commit(transaction: &mut DBTransaction, commit: state_db::CommitS
}
}
fn apply_changes_trie_commit(transaction: &mut DBTransaction, mut commit: MemoryDB<Blake2Hasher>) {
for (key, (val, _)) in commit.drain() {
transaction.put(columns::CHANGES_TRIE, &key[..], &val);
}
}
impl<Block> client::backend::Backend<Block, Blake2Hasher, RlpCodec> for Backend<Block> where Block: BlockT {
type BlockImportOperation = BlockImportOperation<Block, Blake2Hasher>;
type Blockchain = BlockchainDb<Block>;
type State = DbState;
type ChangesTrieStorage = DbChangesTrieStorage<Block>;
fn begin_operation(&self, block: BlockId<Block>) -> Result<Self::BlockImportOperation, client::error::Error> {
let state = self.state_at(block)?;
@@ -361,6 +425,7 @@ impl<Block> client::backend::Backend<Block, Blake2Hasher, RlpCodec> for Backend<
pending_block: None,
old_state: state,
updates: MemoryDB::default(),
changes_trie_updates: MemoryDB::default(),
})
}
@@ -393,6 +458,7 @@ impl<Block> client::backend::Backend<Block, Blake2Hasher, RlpCodec> for Backend<
let number_u64 = number.as_().into();
let commit = self.storage.state_db.insert_block(&hash, number_u64, &pending_block.header.parent_hash(), changeset);
apply_state_commit(&mut transaction, commit);
apply_changes_trie_commit(&mut transaction, operation.changes_trie_updates);
//finalize an older block
if number_u64 > self.finalization_window {
@@ -420,6 +486,10 @@ impl<Block> client::backend::Backend<Block, Blake2Hasher, RlpCodec> for Backend<
Ok(())
}
fn changes_trie_storage(&self) -> Option<&Self::ChangesTrieStorage> {
Some(&self.tries_change_storage)
}
fn revert(&self, n: NumberFor<Block>) -> Result<NumberFor<Block>, client::error::Error> {
use client::blockchain::HeaderBackend;
let mut best = self.blockchain.info()?.best_number;
@@ -459,15 +529,18 @@ impl<Block> client::backend::Backend<Block, Blake2Hasher, RlpCodec> for Backend<
// special case for genesis initialization
match block {
BlockId::Hash(h) if h == Default::default() =>
return Ok(DbState::with_storage_for_genesis(self.storage.clone())),
BlockId::Hash(h) if h == Default::default() => {
let genesis_storage = DbGenesisStorage::new();
let root = genesis_storage.0.clone();
return Ok(DbState::new(Arc::new(genesis_storage), root));
},
_ => {}
}
match self.blockchain.header(block) {
Ok(Some(ref hdr)) if !self.storage.state_db.is_pruned(hdr.number().as_()) => {
let root = H256::from_slice(hdr.state_root().as_ref());
Ok(DbState::with_storage(self.storage.clone(), root))
Ok(DbState::new(self.storage.clone(), root))
},
Err(e) => Err(e),
_ => Err(client::error::ErrorKind::UnknownBlock(format!("{:?}", block)).into()),
@@ -486,6 +559,7 @@ mod tests {
use client::backend::BlockImportOperation as Op;
use client::blockchain::HeaderBackend as BlockchainHeaderBackend;
use runtime_primitives::testing::{Header, Block as RawBlock};
use state_machine::{TrieMut, TrieDBMut, ChangesTrieStorage};
type Block = RawBlock<u64>;
@@ -711,4 +785,82 @@ mod tests {
assert!(backend.storage.db.get(::columns::STATE, &key.0[..]).unwrap().is_none());
}
}
#[test]
fn changes_trie_storage_works() {
let backend = Backend::<Block>::new_test(1000);
let prepare_changes = |changes: Vec<(Vec<u8>, Vec<u8>)>| {
let mut changes_root = H256::default();
let mut changes_trie_update = MemoryDB::<Blake2Hasher>::new();
{
let mut trie = TrieDBMut::<Blake2Hasher, RlpCodec>::new(
&mut changes_trie_update,
&mut changes_root
);
for (key, value) in changes {
trie.insert(&key, &value).unwrap();
}
}
(changes_root, changes_trie_update)
};
let insert_header = |number: u64, parent_hash: H256, changes: Vec<(Vec<u8>, Vec<u8>)>| {
use runtime_primitives::generic::DigestItem;
use runtime_primitives::testing::Digest;
let (changes_root, changes_trie_update) = prepare_changes(changes);
let digest = Digest {
logs: vec![
DigestItem::ChangesTrieRoot(changes_root),
],
};
let header = Header {
number,
parent_hash,
state_root: Default::default(),
digest,
extrinsics_root: Default::default(),
};
let header_hash = header.hash();
let block_id = if number == 0 {
BlockId::Hash(Default::default())
} else {
BlockId::Number(number - 1)
};
let mut op = backend.begin_operation(block_id).unwrap();
op.set_block_data(header, None, None, true).unwrap();
op.update_changes_trie(changes_trie_update).unwrap();
backend.commit_operation(op).unwrap();
header_hash
};
let check_changes = |backend: &Backend<Block>, block: u64, changes: Vec<(Vec<u8>, Vec<u8>)>| {
let (changes_root, mut changes_trie_update) = prepare_changes(changes);
assert_eq!(backend.tries_change_storage.root(block), Ok(Some(changes_root)));
for (key, (val, _)) in changes_trie_update.drain() {
assert_eq!(backend.changes_trie_storage().unwrap().get(&key), Ok(Some(val)));
}
};
let changes0 = vec![(b"key_at_0".to_vec(), b"val_at_0".to_vec())];
let changes1 = vec![
(b"key_at_1".to_vec(), b"val_at_1".to_vec()),
(b"another_key_at_1".to_vec(), b"another_val_at_1".to_vec()),
];
let changes2 = vec![(b"key_at_2".to_vec(), b"val_at_2".to_vec())];
let block0 = insert_header(0, Default::default(), changes0.clone());
let block1 = insert_header(1, block0, changes1.clone());
let _ = insert_header(2, block1, changes2.clone());
// check that the storage contains tries for all blocks
check_changes(&backend, 0, changes0);
check_changes(&backend, 1, changes1);
check_changes(&backend, 2, changes2);
}
}
+1 -1
View File
@@ -32,7 +32,7 @@ use DatabaseSettings;
/// Number of columns in the db. Must be the same for both full && light dbs.
/// Otherwise RocksDb will fail to open database && check its type.
pub const NUM_COLUMNS: u32 = 7;
pub const NUM_COLUMNS: u32 = 8;
/// Meta column. The set of keys in the column is shared by full && light storages.
pub const COLUMN_META: Option<u32> = Some(0);
+8
View File
@@ -22,8 +22,10 @@ use runtime_primitives::bft::Justification;
use runtime_primitives::generic::BlockId;
use runtime_primitives::traits::{Block as BlockT, NumberFor};
use state_machine::backend::Backend as StateBackend;
use state_machine::ChangesTrieStorage as StateChangesTrieStorage;
use patricia_trie::NodeCodec;
use hashdb::Hasher;
use memorydb::MemoryDB;
/// Block insertion operation. Keeps hold if the inserted block state and data.
pub trait BlockImportOperation<Block, H, C>
@@ -53,6 +55,8 @@ where
fn update_storage(&mut self, update: <Self::State as StateBackend<H, C>>::Transaction) -> error::Result<()>;
/// Inject storage data into the database replacing any existing data.
fn reset_storage<I: Iterator<Item=(Vec<u8>, Vec<u8>)>>(&mut self, iter: I) -> error::Result<()>;
/// Inject changes trie data into the database.
fn update_changes_trie(&mut self, update: MemoryDB<H>) -> error::Result<()>;
}
/// Client backend. Manages the data layer.
@@ -75,6 +79,8 @@ where
type Blockchain: ::blockchain::Backend<Block>;
/// Associated state backend type.
type State: StateBackend<H, C>;
/// Changes trie storage.
type ChangesTrieStorage: StateChangesTrieStorage<H>;
/// Begin a new block insertion transaction with given parent block id.
/// When constructing the genesis, this is called with all-zero hash.
@@ -83,6 +89,8 @@ where
fn commit_operation(&self, transaction: Self::BlockImportOperation) -> error::Result<()>;
/// Returns reference to blockchain backend.
fn blockchain(&self) -> &Self::Blockchain;
/// Returns reference to changes trie storage.
fn changes_trie_storage(&self) -> Option<&Self::ChangesTrieStorage>;
/// Returns state backend with post-state of given block.
fn state_at(&self, block: BlockId<Block>) -> error::Result<Self::State>;
/// Attempts to revert the chain by `n` blocks. Returns the number of blocks that were
+2 -2
View File
@@ -94,7 +94,7 @@ where
/// the error. Otherwise, it will return a mutable reference to self (in order to chain).
pub fn push(&mut self, xt: <Block as BlockT>::Extrinsic) -> error::Result<()> {
match self.executor.call_at_state(&self.state, &mut self.changes, "apply_extrinsic", &xt.encode(), native_when_possible()) {
Ok((result, _)) => {
Ok((result, _, _)) => {
match ApplyResult::decode(&mut result.as_slice()) {
Some(Ok(ApplyOutcome::Success)) | Some(Ok(ApplyOutcome::Fail)) => {
self.extrinsics.push(xt);
@@ -120,7 +120,7 @@ where
/// Consume the builder to return a valid `Block` containing all pushed extrinsics.
pub fn bake(mut self) -> error::Result<Block> {
let (output, _) = self.executor.call_at_state(
let (output, _, _) = self.executor.call_at_state(
&self.state,
&mut self.changes,
"finalise_block",
+7 -5
View File
@@ -24,6 +24,7 @@ use executor::{RuntimeVersion, RuntimeInfo};
use patricia_trie::NodeCodec;
use hashdb::Hasher;
use rlp::Encodable;
use memorydb::MemoryDB;
use codec::Decode;
use primitives::{Blake2Hasher, RlpCodec};
@@ -76,7 +77,7 @@ where
method: &str,
call_data: &[u8],
manager: ExecutionManager<F>
) -> Result<(Vec<u8>, S::Transaction), error::Error>;
) -> Result<(Vec<u8>, S::Transaction, Option<MemoryDB<H>>), error::Error>;
/// Execute a call to a contract on top of given state, gathering execution proof.
///
@@ -129,7 +130,7 @@ where
call_data: &[u8],
) -> error::Result<CallResult> {
let mut changes = OverlayedChanges::default();
let (return_data, _) = self.call_at_state(
let (return_data, _, _) = self.call_at_state(
&self.backend.state_at(*id)?,
&mut changes,
method,
@@ -152,7 +153,8 @@ where
.and_then(|v| u64::decode(&mut &v[..]))
.unwrap_or(8) as usize;
self.executor.runtime_version(&mut Ext::new(&mut overlay, &state), heap_pages, &code)
let mut ext = Ext::new(&mut overlay, &state, self.backend.changes_trie_storage());
self.executor.runtime_version(&mut ext, heap_pages, &code)
.ok_or(error::ErrorKind::VersionInvalid.into())
}
@@ -165,9 +167,10 @@ where
method: &str,
call_data: &[u8],
manager: ExecutionManager<F>,
) -> error::Result<(Vec<u8>, S::Transaction)> {
) -> error::Result<(Vec<u8>, S::Transaction, Option<MemoryDB<Blake2Hasher>>)> {
state_machine::execute_using_consensus_failure_handler(
state,
self.backend.changes_trie_storage(),
changes,
&self.executor,
method,
@@ -189,7 +192,6 @@ where
method,
call_data,
)
.map(|(result, proof, _)| (result, proof))
.map_err(Into::into)
}
+10 -6
View File
@@ -24,7 +24,7 @@ use runtime_primitives::{bft::Justification, generic::{BlockId, SignedBlock, Blo
use runtime_primitives::traits::{Block as BlockT, Header as HeaderT, Zero, One, As, NumberFor};
use runtime_primitives::BuildStorage;
use substrate_metadata::JsonMetadataDecodable;
use primitives::{Blake2Hasher, RlpCodec};
use primitives::{Blake2Hasher, RlpCodec, H256};
use primitives::storage::{StorageKey, StorageData};
use codec::{Encode, Decode};
use state_machine::{
@@ -170,6 +170,7 @@ pub fn new_in_mem<E, Block, S>(
E: CodeExecutor<Blake2Hasher> + RuntimeInfo,
S: BuildStorage,
Block: BlockT,
H256: From<Block::Hash>,
{
let backend = Arc::new(in_mem::Backend::new());
let executor = LocalCallExecutor::new(backend.clone(), executor);
@@ -359,7 +360,7 @@ impl<B, E, Block> Client<B, E, Block> where
&header.encode(),
execution_manager()
)?;
let (r, _) = args.using_encoded(|input|
let (r, _, _) = args.using_encoded(|input|
self.executor().call_at_state(
&state,
&mut overlay,
@@ -436,7 +437,7 @@ impl<B, E, Block> Client<B, E, Block> where
}
let mut transaction = self.backend.begin_operation(BlockId::Hash(parent_hash))?;
let (storage_update, storage_changes) = match transaction.state()? {
let (storage_update, changes_update, storage_changes) = match transaction.state()? {
Some(transaction_state) => {
let mut overlay = Default::default();
let mut r = self.executor.call_at_state(
@@ -462,11 +463,11 @@ impl<B, E, Block> Client<B, E, Block> where
}),
},
);
let (_, storage_update) = r?;
let (_, storage_update, changes_update) = r?;
overlay.commit_prospective();
(Some(storage_update), Some(overlay.into_committed()))
(Some(storage_update), Some(changes_update), Some(overlay.into_committed()))
},
None => (None, None)
None => (None, None, None)
};
let is_new_best = header.number() == &(self.backend.blockchain().info()?.best_number + One::one());
@@ -477,6 +478,9 @@ impl<B, E, Block> Client<B, E, Block> where
if let Some(storage_update) = storage_update {
transaction.update_storage(storage_update)?;
}
if let Some(Some(changes_update)) = changes_update {
transaction.update_changes_trie(changes_update)?;
}
self.backend.commit_operation(transaction)?;
if origin == BlockOrigin::NetworkBroadcast || origin == BlockOrigin::Own || origin == BlockOrigin::ConsensusBroadcast {
+15 -3
View File
@@ -45,7 +45,7 @@ mod tests {
use codec::{Encode, Decode, Joiner};
use keyring::Keyring;
use executor::NativeExecutionDispatch;
use state_machine::{execute, OverlayedChanges, ExecutionStrategy};
use state_machine::{execute, OverlayedChanges, ExecutionStrategy, InMemoryChangesTrieStorage};
use state_machine::backend::InMemory;
use test_client;
use test_client::runtime::genesismap::{GenesisConfig, additional_storage_with_genesis};
@@ -58,7 +58,13 @@ mod tests {
NativeExecutionDispatch::new()
}
fn construct_block(backend: &InMemory<Blake2Hasher, RlpCodec>, number: BlockNumber, parent_hash: Hash, state_root: Hash, txs: Vec<Transfer>) -> (Vec<u8>, Hash) {
fn construct_block(
backend: &InMemory<Blake2Hasher, RlpCodec>,
number: BlockNumber,
parent_hash: Hash,
state_root: Hash,
txs: Vec<Transfer>
) -> (Vec<u8>, Hash) {
use triehash::ordered_trie_root;
let transactions = txs.into_iter().map(|tx| {
@@ -83,6 +89,7 @@ mod tests {
execute(
backend,
Some(&InMemoryChangesTrieStorage::new()),
&mut overlay,
&executor(),
"initialise_block",
@@ -93,6 +100,7 @@ mod tests {
for tx in transactions.iter() {
execute(
backend,
Some(&InMemoryChangesTrieStorage::new()),
&mut overlay,
&executor(),
"apply_extrinsic",
@@ -101,8 +109,9 @@ mod tests {
).unwrap();
}
let (ret_data, _) = execute(
let (ret_data, _, _) = execute(
backend,
Some(&InMemoryChangesTrieStorage::new()),
&mut overlay,
&executor(),
"finalise_block",
@@ -145,6 +154,7 @@ mod tests {
let mut overlay = OverlayedChanges::default();
let _ = execute(
&backend,
Some(&InMemoryChangesTrieStorage::new()),
&mut overlay,
&executor(),
"execute_block",
@@ -168,6 +178,7 @@ mod tests {
let mut overlay = OverlayedChanges::default();
let _ = execute(
&backend,
Some(&InMemoryChangesTrieStorage::new()),
&mut overlay,
&executor(),
"execute_block",
@@ -192,6 +203,7 @@ mod tests {
let mut overlay = OverlayedChanges::default();
let _ = execute(
&backend,
Some(&InMemoryChangesTrieStorage::new()),
&mut overlay,
&Executor::new(),
"execute_block",
+37 -7
View File
@@ -24,13 +24,16 @@ use backend;
use light;
use primitives::AuthorityId;
use runtime_primitives::generic::BlockId;
use runtime_primitives::traits::{Block as BlockT, Header as HeaderT, Zero, NumberFor, As};
use runtime_primitives::traits::{Block as BlockT, Header as HeaderT, Zero,
NumberFor, As, Digest, DigestItem};
use runtime_primitives::bft::Justification;
use blockchain::{self, BlockStatus};
use state_machine::backend::{Backend as StateBackend, InMemory};
use state_machine::InMemoryChangesTrieStorage;
use patricia_trie::NodeCodec;
use hashdb::Hasher;
use heapsize::HeapSizeOf;
use memorydb::MemoryDB;
struct PendingBlock<B: BlockT> {
block: StoredBlock<B>,
@@ -85,9 +88,9 @@ impl<B: BlockT> StoredBlock<B> {
#[derive(Clone)]
struct BlockchainStorage<Block: BlockT> {
blocks: HashMap<Block::Hash, StoredBlock<Block>>,
hashes: HashMap<<<Block as BlockT>::Header as HeaderT>::Number, Block::Hash>,
hashes: HashMap<NumberFor<Block>, Block::Hash>,
best_hash: Block::Hash,
best_number: <<Block as BlockT>::Header as HeaderT>::Number,
best_number: NumberFor<Block>,
genesis_hash: Block::Hash,
cht_roots: HashMap<NumberFor<Block>, Block::Hash>,
}
@@ -275,6 +278,7 @@ pub struct BlockImportOperation<Block: BlockT, H: Hasher, C: NodeCodec<H>> {
pending_authorities: Option<Vec<AuthorityId>>,
old_state: InMemory<H, C>,
new_state: Option<InMemory<H, C>>,
changes_trie_update: Option<MemoryDB<H>>,
}
impl<Block, H, C> backend::BlockImportOperation<Block, H, C> for BlockImportOperation<Block, H, C>
@@ -314,6 +318,11 @@ where
Ok(())
}
fn update_changes_trie(&mut self, update: MemoryDB<H>) -> error::Result<()> {
self.changes_trie_update = Some(update);
Ok(())
}
fn reset_storage<I: Iterator<Item=(Vec<u8>, Vec<u8>)>>(&mut self, iter: I) -> error::Result<()> {
self.new_state = Some(InMemory::from(iter.collect::<HashMap<_, _>>()));
Ok(())
@@ -325,9 +334,11 @@ pub struct Backend<Block, H, C>
where
Block: BlockT,
H: Hasher,
C: NodeCodec<H>
C: NodeCodec<H>,
H::Out: HeapSizeOf + From<Block::Hash>,
{
states: RwLock<HashMap<Block::Hash, InMemory<H, C>>>,
changes_trie_storage: InMemoryChangesTrieStorage<H>,
blockchain: Blockchain<Block>,
}
@@ -335,12 +346,14 @@ impl<Block, H, C> Backend<Block, H, C>
where
Block: BlockT,
H: Hasher,
C: NodeCodec<H>
C: NodeCodec<H>,
H::Out: HeapSizeOf + From<Block::Hash>,
{
/// Create a new instance of in-mem backend.
pub fn new() -> Backend<Block, H, C> {
Backend {
states: RwLock::new(HashMap::new()),
changes_trie_storage: InMemoryChangesTrieStorage::new(),
blockchain: Blockchain::new(),
}
}
@@ -350,12 +363,13 @@ impl<Block, H, C> backend::Backend<Block, H, C> for Backend<Block, H, C>
where
Block: BlockT,
H: Hasher,
H::Out: HeapSizeOf,
H::Out: HeapSizeOf + From<Block::Hash>,
C: NodeCodec<H> + Send + Sync,
{
type BlockImportOperation = BlockImportOperation<Block, H, C>;
type Blockchain = Blockchain<Block>;
type State = InMemory<H, C>;
type ChangesTrieStorage = InMemoryChangesTrieStorage<H>;
fn begin_operation(&self, block: BlockId<Block>) -> error::Result<Self::BlockImportOperation> {
let state = match block {
@@ -368,6 +382,7 @@ where
pending_authorities: None,
old_state: state,
new_state: None,
changes_trie_update: None,
})
}
@@ -375,10 +390,21 @@ where
if let Some(pending_block) = operation.pending_block {
let old_state = &operation.old_state;
let (header, body, justification) = pending_block.block.into_inner();
let hash = header.hash();
let parent_hash = *header.parent_hash();
self.states.write().insert(hash, operation.new_state.unwrap_or_else(|| old_state.clone()));
let changes_trie_root = header.digest().logs().iter()
.find(|log| log.as_changes_trie_root().is_some())
.and_then(DigestItem::as_changes_trie_root)
.cloned();
if let Some(changes_trie_root) = changes_trie_root {
if let Some(changes_trie_update) = operation.changes_trie_update {
let changes_trie_root: H::Out = changes_trie_root.into();
self.changes_trie_storage.insert(header.number().as_(), changes_trie_root, changes_trie_update);
}
}
self.blockchain.insert(hash, header, justification, body, pending_block.is_best);
// dumb implementation - store value for each block
if pending_block.is_best {
@@ -392,6 +418,10 @@ where
&self.blockchain
}
fn changes_trie_storage(&self) -> Option<&Self::ChangesTrieStorage> {
Some(&self.changes_trie_storage)
}
fn state_at(&self, block: BlockId<Block>) -> error::Result<Self::State> {
match self.blockchain.id(block).and_then(|id| self.states.read().get(&id).cloned()) {
Some(state) => Ok(state),
@@ -408,7 +438,7 @@ impl<Block, H, C> backend::LocalBackend<Block, H, C> for Backend<Block, H, C>
where
Block: BlockT,
H: Hasher,
H::Out: HeapSizeOf,
H::Out: HeapSizeOf + From<Block::Hash>,
C: NodeCodec<H> + Send + Sync,
{}
+1
View File
@@ -39,6 +39,7 @@ extern crate patricia_trie;
extern crate hashdb;
extern crate rlp;
extern crate heapsize;
extern crate memorydb;
#[macro_use] extern crate error_chain;
#[macro_use] extern crate log;
+17 -14
View File
@@ -24,11 +24,7 @@ use parking_lot::RwLock;
use primitives::AuthorityId;
use runtime_primitives::{bft::Justification, generic::BlockId};
use runtime_primitives::traits::{Block as BlockT, NumberFor};
use state_machine::{
Backend as StateBackend,
TrieBackend as StateTrieBackend,
TryIntoTrieBackend as TryIntoStateTrieBackend
};
use state_machine::{Backend as StateBackend, InMemoryChangesTrieStorage, TrieBackend};
use backend::{Backend as ClientBackend, BlockImportOperation, RemoteBackend};
use blockchain::HeaderBackend as BlockchainHeaderBackend;
@@ -37,6 +33,8 @@ use light::blockchain::{Blockchain, Storage as BlockchainStorage};
use light::fetcher::{Fetcher, RemoteReadRequest};
use patricia_trie::NodeCodec;
use hashdb::Hasher;
use memorydb::MemoryDB;
use heapsize::HeapSizeOf;
/// Light client backend.
pub struct Backend<S, F> {
@@ -77,10 +75,12 @@ impl<S, F, Block, H, C> ClientBackend<Block, H, C> for Backend<S, F> where
F: Fetcher<Block>,
H: Hasher,
C: NodeCodec<H>,
H::Out: HeapSizeOf,
{
type BlockImportOperation = ImportOperation<Block, S, F>;
type Blockchain = Blockchain<S, F>;
type State = OnDemandState<Block, S, F>;
type ChangesTrieStorage = InMemoryChangesTrieStorage<H>;
fn begin_operation(&self, _block: BlockId<Block>) -> ClientResult<Self::BlockImportOperation> {
Ok(ImportOperation {
@@ -100,6 +100,10 @@ impl<S, F, Block, H, C> ClientBackend<Block, H, C> for Backend<S, F> where
&self.blockchain
}
fn changes_trie_storage(&self) -> Option<&Self::ChangesTrieStorage> {
None
}
fn state_at(&self, block: BlockId<Block>) -> ClientResult<Self::State> {
let block_hash = match block {
BlockId::Hash(h) => Some(h),
@@ -125,6 +129,7 @@ where
S: BlockchainStorage<Block>,
F: Fetcher<Block>,
H: Hasher,
H::Out: HeapSizeOf,
C: NodeCodec<H>,
{}
@@ -164,6 +169,11 @@ where
Ok(())
}
fn update_changes_trie(&mut self, _update: MemoryDB<H>) -> ClientResult<()> {
// we're not storing anything locally => ignore changes
Ok(())
}
fn reset_storage<I: Iterator<Item=(Vec<u8>, Vec<u8>)>>(&mut self, _iter: I) -> ClientResult<()> {
// we're not storing anything locally => ignore changes
Ok(())
@@ -180,6 +190,7 @@ impl<Block, S, F, H, C> StateBackend<H, C> for OnDemandState<Block, S, F>
{
type Error = ClientError;
type Transaction = ();
type TrieBackendStorage = MemoryDB<H>;
fn storage(&self, key: &[u8]) -> ClientResult<Option<Vec<u8>>> {
let mut header = self.cached_header.read().clone();
@@ -214,16 +225,8 @@ impl<Block, S, F, H, C> StateBackend<H, C> for OnDemandState<Block, S, F>
// whole state is not available on light node
Vec::new()
}
}
impl<Block, S, F, H, C> TryIntoStateTrieBackend<H, C> for OnDemandState<Block, S, F>
where
Block: BlockT,
F: Fetcher<Block>,
H: Hasher,
C: NodeCodec<H>,
{
fn try_into_trie_backend(self) -> Option<StateTrieBackend<H, C>> {
fn try_into_trie_backend(self) -> Option<TrieBackend<Self::TrieBackendStorage, H, C>> {
None
}
}
@@ -17,6 +17,7 @@
//! Light client call exector. Executes methods on remote full nodes, fetching
//! execution proof and checking it locally.
use std::marker::PhantomData;
use std::sync::Arc;
use futures::{IntoFuture, Future};
@@ -36,7 +37,7 @@ use light::fetcher::{Fetcher, RemoteCallRequest};
use executor::RuntimeVersion;
use codec::Decode;
use heapsize::HeapSizeOf;
use std::marker::PhantomData;
use memorydb::MemoryDB;
/// Call executor that executes methods on remote node, querying execution proof
/// and checking proof by re-executing locally.
@@ -108,7 +109,7 @@ where
_method: &str,
_call_data: &[u8],
_m: ExecutionManager<FF>
) -> ClientResult<(Vec<u8>, S::Transaction)> {
) -> ClientResult<(Vec<u8>, S::Transaction, Option<MemoryDB<H>>)> {
Err(ClientErrorKind::NotAvailableOnLightClient.into())
}
@@ -143,7 +144,7 @@ pub fn check_execution_proof<Header, E, H, C>(
let local_state_root = request.header.state_root();
let mut changes = OverlayedChanges::default();
let (local_result, _) = execution_proof_check::<H, C, _>(
let local_result = execution_proof_check::<H, C, _>(
H256::from_slice(local_state_root.as_ref()).into(),
remote_proof,
&mut changes,
+6 -5
View File
@@ -523,13 +523,14 @@ impl Store {
#[cfg(test)]
mod tests {
use primitives::{Blake2Hasher, RlpCodec};
use wasm_executor::WasmExecutor;
use state_machine::TestExternalities;
use wabt;
#[test]
fn sandbox_should_work() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<Blake2Hasher, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
let code = wabt::wat2wasm(r#"
@@ -561,7 +562,7 @@ mod tests {
#[test]
fn sandbox_trap() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<Blake2Hasher, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
let code = wabt::wat2wasm(r#"
@@ -582,7 +583,7 @@ mod tests {
#[test]
fn start_called() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<Blake2Hasher, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
let code = wabt::wat2wasm(r#"
@@ -620,7 +621,7 @@ mod tests {
#[test]
fn invoke_args() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<Blake2Hasher, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
let code = wabt::wat2wasm(r#"
@@ -654,7 +655,7 @@ mod tests {
#[test]
fn return_val() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<Blake2Hasher, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
let code = wabt::wat2wasm(r#"
+21 -15
View File
@@ -264,6 +264,13 @@ impl_function_executor!(this: FunctionExecutor<'e, E>,
this.memory.set(result, r.as_ref()).map_err(|_| UserError("Invalid attempt to set memory in ext_storage_root"))?;
Ok(())
},
ext_storage_changes_root(block: u64, result: *mut u8) -> u32 => {
let r = this.ext.storage_changes_root(block);
if let Some(ref r) = r {
this.memory.set(result, &r[..]).map_err(|_| UserError("Invalid attempt to set memory in ext_storage_changes_root"))?;
}
Ok(if r.is_some() { 1u32 } else { 0u32 })
},
ext_blake2_256_enumerated_trie_root(values_data: *const u8, lens_data: *const u32, lens_len: u32, result: *mut u8) => {
let values = (0..lens_len)
.map(|i| this.memory.read_primitive(lens_data + i * 4))
@@ -501,7 +508,6 @@ impl WasmExecutor {
// finish instantiation by running 'start' function (if any).
let instance = intermediate_instance.run_start(&mut fec)?;
let size = data.len() as u32;
let offset = fec.heap.allocate(size);
memory.set(offset, &data)?;
@@ -514,7 +520,6 @@ impl WasmExecutor {
],
&mut fec
);
let returned = match result {
Ok(x) => x,
Err(e) => {
@@ -537,6 +542,7 @@ impl WasmExecutor {
#[cfg(test)]
mod tests {
use primitives::RlpCodec;
use super::*;
use codec::Encode;
use state_machine::TestExternalities;
@@ -550,7 +556,7 @@ mod tests {
#[test]
fn returning_should_work() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<_, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
let output = WasmExecutor::new().call(&mut ext, 8, &test_code[..], "test_empty_return", &[]).unwrap();
@@ -559,7 +565,7 @@ mod tests {
#[test]
fn panicking_should_work() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<_, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
let output = WasmExecutor::new().call(&mut ext, 8, &test_code[..], "test_panic", &[]);
@@ -571,7 +577,7 @@ mod tests {
#[test]
fn storage_should_work() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<_, RlpCodec>::default();
ext.set_storage(b"foo".to_vec(), b"bar".to_vec());
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
@@ -579,17 +585,17 @@ mod tests {
assert_eq!(output, b"all ok!".to_vec());
let expected : TestExternalities<_> = map![
let expected = TestExternalities::<_, _>::new(map![
b"input".to_vec() => b"Hello world".to_vec(),
b"foo".to_vec() => b"bar".to_vec(),
b"baz".to_vec() => b"bar".to_vec()
];
assert_eq!(expected, ext);
]);
assert_eq!(ext, expected);
}
#[test]
fn clear_prefix_should_work() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<_, RlpCodec>::default();
ext.set_storage(b"aaa".to_vec(), b"1".to_vec());
ext.set_storage(b"aab".to_vec(), b"2".to_vec());
ext.set_storage(b"aba".to_vec(), b"3".to_vec());
@@ -602,7 +608,7 @@ mod tests {
assert_eq!(output, b"all ok!".to_vec());
let expected: TestExternalities<_> = map![
let expected: TestExternalities<_, RlpCodec> = map![
b"aaa".to_vec() => b"1".to_vec(),
b"aab".to_vec() => b"2".to_vec(),
b"bbb".to_vec() => b"5".to_vec()
@@ -612,7 +618,7 @@ mod tests {
#[test]
fn blake2_256_should_work() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<_, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
assert_eq!(
WasmExecutor::new().call(&mut ext, 8, &test_code[..], "test_blake2_256", &[]).unwrap(),
@@ -626,7 +632,7 @@ mod tests {
#[test]
fn twox_256_should_work() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<_, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
assert_eq!(
WasmExecutor::new().call(&mut ext, 8, &test_code[..], "test_twox_256", &[]).unwrap(),
@@ -640,7 +646,7 @@ mod tests {
#[test]
fn twox_128_should_work() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<_, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
assert_eq!(
WasmExecutor::new().call(&mut ext, 8, &test_code[..], "test_twox_128", &[]).unwrap(),
@@ -654,7 +660,7 @@ mod tests {
#[test]
fn ed25519_verify_should_work() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<Blake2Hasher, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
let key = ed25519::Pair::from_seed(&blake2_256(b"test"));
let sig = key.sign(b"all ok!");
@@ -680,7 +686,7 @@ mod tests {
#[test]
fn enumerated_trie_root_should_work() {
let mut ext = TestExternalities::default();
let mut ext = TestExternalities::<Blake2Hasher, RlpCodec>::default();
let test_code = include_bytes!("../wasm/target/wasm32-unknown-unknown/release/runtime_test.compact.wasm");
assert_eq!(
WasmExecutor::new().call(&mut ext, 8, &test_code[..], "test_enumerated_trie_root", &[]).unwrap(),
@@ -0,0 +1,30 @@
// Copyright 2018 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Substrate changes trie configuration.
/// Substrate changes trie configuration.
#[cfg_attr(any(feature = "std", test), derive(Serialize, Deserialize))]
#[derive(Debug, Clone, PartialEq, Eq, Default, Encode, Decode)]
pub struct ChangesTrieConfiguration {
/// Interval (in blocks) at which level1-digests are created. Digests are not
/// created when this is less or equal to 1.
pub digest_interval: u64,
/// Maximal number of digest levels in hierarchy. 0 means that digests are not
/// created at all (even level1 digests). 1 means only level1-digests are created.
/// 2 means that every digest_interval^2 there will be a level2-digest, and so on.
pub digest_levels: u32,
}
+2
View File
@@ -112,6 +112,7 @@ pub mod uint;
mod authority_id;
#[cfg(feature = "std")]
mod rlp_codec;
mod changes_trie;
#[cfg(test)]
mod tests;
@@ -119,6 +120,7 @@ mod tests;
pub use self::hash::{H160, H256, H512};
pub use self::uint::U256;
pub use authority_id::AuthorityId;
pub use changes_trie::ChangesTrieConfiguration;
// Switch back to Blake after PoC-3 is out
// pub use self::hasher::blake::BlakeHasher;
+14 -7
View File
@@ -30,7 +30,7 @@ pub extern crate parity_codec as codec;
// re-export hashing functions.
pub use primitives::{blake2_256, twox_128, twox_256, ed25519};
pub use primitives::Blake2Hasher;
pub use primitives::{Blake2Hasher, RlpCodec};
// Switch to this after PoC-3
// pub use primitives::BlakeHasher;
pub use substrate_state_machine::{Externalities, TestExternalities};
@@ -104,6 +104,13 @@ pub fn storage_root() -> H256 {
).unwrap_or(H256::new())
}
/// "Commit" all existing operations and get the resultant storage change root.
pub fn storage_changes_root(block: u64) -> Option<H256> {
ext::with(|ext|
ext.storage_changes_root(block)
).unwrap_or(None)
}
/// A trie root formed from the enumerated items.
pub fn enumerated_trie_root<H>(serialised_values: &[&[u8]]) -> H::Out
where
@@ -210,7 +217,7 @@ mod std_tests {
#[test]
fn storage_works() {
let mut t = TestExternalities::<Blake2Hasher>::new();
let mut t = TestExternalities::<Blake2Hasher, RlpCodec>::default();
assert!(with_externalities(&mut t, || {
assert_eq!(storage(b"hello"), None);
set_storage(b"hello", b"world");
@@ -220,7 +227,7 @@ mod std_tests {
true
}));
t = map![b"foo".to_vec() => b"bar".to_vec()];
t = TestExternalities::new(map![b"foo".to_vec() => b"bar".to_vec()]);
assert!(!with_externalities(&mut t, || {
assert_eq!(storage(b"hello"), None);
@@ -231,9 +238,9 @@ mod std_tests {
#[test]
fn read_storage_works() {
let mut t: TestExternalities<Blake2Hasher> = map![
let mut t = TestExternalities::<Blake2Hasher, RlpCodec>::new(map![
b":test".to_vec() => b"\x0b\0\0\0Hello world".to_vec()
];
]);
with_externalities(&mut t, || {
let mut v = [0u8; 4];
@@ -247,12 +254,12 @@ mod std_tests {
#[test]
fn clear_prefix_works() {
let mut t: TestExternalities<Blake2Hasher> = map![
let mut t = TestExternalities::<Blake2Hasher, RlpCodec>::new(map![
b":a".to_vec() => b"\x0b\0\0\0Hello world".to_vec(),
b":abcd".to_vec() => b"\x0b\0\0\0Hello world".to_vec(),
b":abc".to_vec() => b"\x0b\0\0\0Hello world".to_vec(),
b":abdd".to_vec() => b"\x0b\0\0\0Hello world".to_vec()
];
]);
with_externalities(&mut t, || {
clear_prefix(b":abc");
+15
View File
@@ -64,6 +64,7 @@ extern "C" {
fn ext_get_allocated_storage(key_data: *const u8, key_len: u32, written_out: *mut u32) -> *mut u8;
fn ext_get_storage_into(key_data: *const u8, key_len: u32, value_data: *mut u8, value_len: u32, value_offset: u32) -> u32;
fn ext_storage_root(result: *mut u8);
fn ext_storage_changes_root(block: u64, result: *mut u8) -> u32;
fn ext_blake2_256_enumerated_trie_root(values_data: *const u8, lens_data: *const u32, lens_len: u32, result: *mut u8);
fn ext_chain_id() -> u64;
fn ext_blake2_256(data: *const u8, len: u32, out: *mut u8);
@@ -170,6 +171,20 @@ pub fn storage_root() -> [u8; 32] {
result
}
/// The current storage' changes root.
pub fn storage_changes_root(block: u64) -> Option<[u8; 32]> {
let mut result: [u8; 32] = Default::default();
let is_set = unsafe {
ext_storage_changes_root(block, result.as_mut_ptr())
};
if is_set != 0 {
Some(result)
} else {
None
}
}
/// A trie root calculated from enumerated values.
pub fn enumerated_trie_root<H: Hasher + ExternTrieCrypto>(values: &[&[u8]]) -> [u8; 32] {
H::enumerated_trie_root(values)
@@ -36,6 +36,7 @@ impl<Item> Default for Digest<Item> {
impl<Item> traits::Digest for Digest<Item> where
Item: DigestItemT + Codec
{
type Hash = Item::Hash;
type Item = Item;
fn logs(&self) -> &[Self::Item] {
@@ -51,10 +52,14 @@ impl<Item> traits::Digest for Digest<Item> where
/// provide opaque access to other items.
#[derive(PartialEq, Eq, Clone)]
#[cfg_attr(feature = "std", derive(Debug, Serialize, Deserialize))]
pub enum DigestItem<AuthorityId> {
pub enum DigestItem<Hash, AuthorityId> {
/// System digest item announcing that authorities set has been changed
/// in the block. Contains the new set of authorities.
AuthoritiesChange(Vec<AuthorityId>),
/// System digest item that contains the root of changes trie at given
/// block. It is created for every block iff runtime supports changes
/// trie creation.
ChangesTrieRoot(Hash),
/// Any 'non-system' digest item, opaque to the native code.
Other(Vec<u8>),
}
@@ -63,9 +68,11 @@ pub enum DigestItem<AuthorityId> {
/// final runtime implementations for encoding/decoding its log items.
#[derive(PartialEq, Eq, Clone)]
#[cfg_attr(feature = "std", derive(Debug))]
pub enum DigestItemRef<'a, AuthorityId: 'a> {
pub enum DigestItemRef<'a, Hash: 'a, AuthorityId: 'a> {
/// Reference to `DigestItem::AuthoritiesChange`.
AuthoritiesChange(&'a [AuthorityId]),
/// Reference to `DigestItem::ChangesTrieRoot`.
ChangesTrieRoot(&'a Hash),
/// Reference to `DigestItem::Other`.
Other(&'a Vec<u8>),
}
@@ -79,9 +86,10 @@ pub enum DigestItemRef<'a, AuthorityId: 'a> {
enum DigestItemType {
Other = 0,
AuthoritiesChange,
ChangesTrieRoot,
}
impl<AuthorityId> DigestItem<AuthorityId> {
impl<Hash, AuthorityId> DigestItem<Hash, AuthorityId> {
/// Returns Some if `self` is a `DigestItem::Other`.
pub fn as_other(&self) -> Option<&Vec<u8>> {
match *self {
@@ -91,15 +99,17 @@ impl<AuthorityId> DigestItem<AuthorityId> {
}
/// Returns a 'referencing view' for this digest item.
fn dref<'a>(&'a self) -> DigestItemRef<'a, AuthorityId> {
fn dref<'a>(&'a self) -> DigestItemRef<'a, Hash, AuthorityId> {
match *self {
DigestItem::AuthoritiesChange(ref v) => DigestItemRef::AuthoritiesChange(v),
DigestItem::ChangesTrieRoot(ref v) => DigestItemRef::ChangesTrieRoot(v),
DigestItem::Other(ref v) => DigestItemRef::Other(v),
}
}
}
impl<AuthorityId: Member> traits::DigestItem for DigestItem<AuthorityId> {
impl<Hash: Member, AuthorityId: Member> traits::DigestItem for DigestItem<Hash, AuthorityId> {
type Hash = Hash;
type AuthorityId = AuthorityId;
fn as_authorities_change(&self) -> Option<&[Self::AuthorityId]> {
@@ -108,21 +118,31 @@ impl<AuthorityId: Member> traits::DigestItem for DigestItem<AuthorityId> {
_ => None,
}
}
fn as_changes_trie_root(&self) -> Option<&Hash> {
match *self {
DigestItem::ChangesTrieRoot(ref changes_trie_root) => Some(changes_trie_root),
_ => None,
}
}
}
impl<AuthorityId: Encode> Encode for DigestItem<AuthorityId> {
impl<Hash: Encode, AuthorityId: Encode> Encode for DigestItem<Hash, AuthorityId> {
fn encode(&self) -> Vec<u8> {
self.dref().encode()
}
}
impl<AuthorityId: Decode> Decode for DigestItem<AuthorityId> {
impl<Hash: Decode, AuthorityId: Decode> Decode for DigestItem<Hash, AuthorityId> {
fn decode<I: Input>(input: &mut I) -> Option<Self> {
let item_type: DigestItemType = Decode::decode(input)?;
match item_type {
DigestItemType::AuthoritiesChange => Some(DigestItem::AuthoritiesChange(
Decode::decode(input)?,
)),
DigestItemType::ChangesTrieRoot => Some(DigestItem::ChangesTrieRoot(
Decode::decode(input)?,
)),
DigestItemType::Other => Some(DigestItem::Other(
Decode::decode(input)?,
)),
@@ -130,7 +150,7 @@ impl<AuthorityId: Decode> Decode for DigestItem<AuthorityId> {
}
}
impl<'a, AuthorityId: Encode> Encode for DigestItemRef<'a, AuthorityId> {
impl<'a, Hash: Encode, AuthorityId: Encode> Encode for DigestItemRef<'a, Hash, AuthorityId> {
fn encode(&self) -> Vec<u8> {
let mut v = Vec::new();
@@ -139,6 +159,10 @@ impl<'a, AuthorityId: Encode> Encode for DigestItemRef<'a, AuthorityId> {
DigestItemType::AuthoritiesChange.encode_to(&mut v);
authorities.encode_to(&mut v);
},
DigestItemRef::ChangesTrieRoot(changes_trie_root) => {
DigestItemType::ChangesTrieRoot.encode_to(&mut v);
changes_trie_root.encode_to(&mut v);
},
DigestItemRef::Other(val) => {
DigestItemType::Other.encode_to(&mut v);
val.encode_to(&mut v);
@@ -117,7 +117,7 @@ impl<Number, Hash, DigestItem> Encode for Header<Number, Hash, DigestItem> where
impl<Number, Hash, DigestItem> traits::Header for Header<Number, Hash, DigestItem> where
Number: Member + ::rstd::hash::Hash + Copy + Codec + MaybeDisplay + SimpleArithmetic + Codec,
Hash: HashT,
DigestItem: DigestItemT + Codec,
DigestItem: DigestItemT<Hash = Hash::Output> + Codec,
Hash::Output: Default + ::rstd::hash::Hash + Copy + Member + MaybeDisplay + SimpleBitOps + Codec,
{
type Number = Number;
@@ -148,7 +148,11 @@ impl<Number, Hash, DigestItem> traits::Header for Header<Number, Hash, DigestIte
digest: Self::Digest
) -> Self {
Header {
number, extrinsics_root: extrinsics_root, state_root, parent_hash, digest
number,
extrinsics_root,
state_root,
parent_hash,
digest
}
}
}
@@ -21,7 +21,7 @@ use substrate_primitives::{H256, H512};
use super::{Digest, Header, DigestItem, UncheckedExtrinsic};
type Block = super::Block<
Header<u64, ::traits::BlakeTwo256, DigestItem<u32>>,
Header<u64, ::traits::BlakeTwo256, DigestItem<H256, u32>>,
UncheckedExtrinsic<H256, u64, u64, ::Ed25519Signature>,
>;
@@ -34,8 +34,8 @@ fn block_roundtrip_serialization() {
state_root: [1u8; 32].into(),
extrinsics_root: [2u8; 32].into(),
digest: Digest { logs: vec![
DigestItem::Other::<u32>(vec![1, 2, 3]),
DigestItem::Other::<u32>(vec![4, 5, 6]),
DigestItem::Other::<H256, u32>(vec![1, 2, 3]),
DigestItem::Other::<H256, u32>(vec![4, 5, 6]),
] },
},
extrinsics: vec![
@@ -70,7 +70,7 @@ fn block_roundtrip_serialization() {
#[test]
fn system_digest_item_encoding() {
let item = DigestItem::AuthoritiesChange::<u32>(vec![10, 20, 30]);
let item = DigestItem::AuthoritiesChange::<H256, u32>(vec![10, 20, 30]);
let encoded = item.encode();
assert_eq!(encoded, vec![
// type = DigestItemType::AuthoritiesChange
@@ -83,13 +83,13 @@ fn system_digest_item_encoding() {
30, 0, 0, 0,
]);
let decoded: DigestItem<u32> = Decode::decode(&mut &encoded[..]).unwrap();
let decoded: DigestItem<H256, u32> = Decode::decode(&mut &encoded[..]).unwrap();
assert_eq!(item, decoded);
}
#[test]
fn non_system_digest_item_encoding() {
let item = DigestItem::Other::<u32>(vec![10, 20, 30]);
let item = DigestItem::Other::<H256, u32>(vec![10, 20, 30]);
let encoded = item.encode();
assert_eq!(encoded, vec![
// type = DigestItemType::Other
@@ -100,6 +100,6 @@ fn non_system_digest_item_encoding() {
10, 20, 30,
]);
let decoded: DigestItem<u32> = Decode::decode(&mut &encoded[..]).unwrap();
let decoded: DigestItem<H256, u32> = Decode::decode(&mut &encoded[..]).unwrap();
assert_eq!(item, decoded);
}
+5 -4
View File
@@ -366,6 +366,7 @@ macro_rules! impl_outer_log {
#[cfg(test)]
mod tests {
use substrate_primitives::hash::H256;
use codec::{Encode, Decode, Input};
pub trait RuntimeT {
@@ -400,7 +401,7 @@ mod tests {
// TODO try to avoid redundant brackets: a(AuthoritiesChange), b
impl_outer_log! {
pub enum Log(InternalLog: DigestItem<u64>) for Runtime {
pub enum Log(InternalLog: DigestItem<H256, u64>) for Runtime {
a(AuthoritiesChange), b()
}
}
@@ -418,16 +419,16 @@ mod tests {
assert_eq!(auth_change, decoded_auth_change);
// interpret regular item using `generic::DigestItem`
let generic_b1: generic::DigestItem<u64> = Decode::decode(&mut &encoded_b1[..]).unwrap();
let generic_b1: generic::DigestItem<H256, u64> = Decode::decode(&mut &encoded_b1[..]).unwrap();
match generic_b1 {
generic::DigestItem::Other(_) => (),
_ => panic!("unexpected generic_b1: {:?}", generic_b1),
}
// interpret system item using `generic::DigestItem`
let generic_auth_change: generic::DigestItem<u64> = Decode::decode(&mut &encoded_auth_change[..]).unwrap();
let generic_auth_change: generic::DigestItem<H256, u64> = Decode::decode(&mut &encoded_auth_change[..]).unwrap();
match generic_auth_change {
generic::DigestItem::AuthoritiesChange(authorities) => assert_eq!(authorities, vec![100, 200, 300]),
generic::DigestItem::AuthoritiesChange::<H256, u64>(authorities) => assert_eq!(authorities, vec![100, 200, 300]),
_ => panic!("unexpected generic_auth_change: {:?}", generic_auth_change),
}
}
+11 -11
View File
@@ -20,16 +20,20 @@ use serde::{Serialize, de::DeserializeOwned};
use std::fmt::Debug;
use codec::Codec;
use traits::{self, Checkable, Applyable, BlakeTwo256};
use generic::DigestItem as GenDigestItem;
pub use substrate_primitives::H256;
pub type DigestItem = GenDigestItem<H256, u64>;
#[derive(Default, PartialEq, Eq, Clone, Serialize, Deserialize, Debug, Encode, Decode)]
pub struct Digest {
pub logs: Vec<u64>,
pub logs: Vec<DigestItem>,
}
impl traits::Digest for Digest {
type Item = u64;
type Hash = H256;
type Item = DigestItem;
fn logs(&self) -> &[Self::Item] {
&self.logs
@@ -40,14 +44,6 @@ impl traits::Digest for Digest {
}
}
impl traits::DigestItem for () {
type AuthorityId = ();
}
impl traits::DigestItem for u64 {
type AuthorityId = ();
}
#[derive(PartialEq, Eq, Clone, Serialize, Deserialize, Debug, Encode, Decode)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
@@ -88,7 +84,11 @@ impl traits::Header for Header {
digest: Self::Digest
) -> Self {
Header {
number, extrinsics_root: extrinsics_root, state_root, parent_hash, digest
number,
extrinsics_root: extrinsics_root,
state_root,
parent_hash,
digest
}
}
}
+20 -4
View File
@@ -244,6 +244,9 @@ pub trait Hash: 'static + MaybeSerializeDebug + Clone + Eq + PartialEq { // Stup
/// Acquire the global storage root.
fn storage_root() -> Self::Output;
/// Acquire the global storage changes root.
fn storage_changes_root(block: u64) -> Option<Self::Output>;
}
/// Blake2-256 Hash implementation.
@@ -275,6 +278,9 @@ impl Hash for BlakeTwo256 {
fn storage_root() -> Self::Output {
runtime_io::storage_root().into()
}
fn storage_changes_root(block: u64) -> Option<Self::Output> {
runtime_io::storage_changes_root(block).map(Into::into)
}
}
/// Something that can be checked for equality and printed out to a debug channel if bad.
@@ -343,7 +349,7 @@ pub trait Header: Clone + Send + Sync + Codec + Eq + MaybeSerializeDebug + 'stat
type Number: Member + ::rstd::hash::Hash + Copy + MaybeDisplay + SimpleArithmetic + Codec;
type Hash: Member + ::rstd::hash::Hash + Copy + MaybeDisplay + Default + SimpleBitOps + Codec + AsRef<[u8]>;
type Hashing: Hash<Output = Self::Hash>;
type Digest: Digest;
type Digest: Digest<Hash = Self::Hash>;
fn new(
number: Self::Number,
@@ -444,8 +450,12 @@ pub trait Applyable: Sized + Send + Sync {
/// Something that acts like a `Digest` - it can have `Log`s `push`ed onto it and these `Log`s are
/// each `Codec`.
pub trait Digest: Member + Default {
type Item: DigestItem;
type Hash: Member;
type Item: DigestItem<Hash = Self::Hash>;
/// Get reference to all digest items.
fn logs(&self) -> &[Self::Item];
/// Push new digest item.
fn push(&mut self, item: Self::Item);
}
@@ -454,10 +464,16 @@ pub trait Digest: Member + Default {
///
/// If the runtime does not supports some 'system' items, use `()` as a stub.
pub trait DigestItem: Member {
type AuthorityId;
type Hash: Member;
type AuthorityId: Member;
/// Returns Some if the entry is the `AuthoritiesChange` entry.
/// Returns Some if the entry is the `AuthoritiesChange` entry.
fn as_authorities_change(&self) -> Option<&[Self::AuthorityId]> {
None
}
/// Returns Some if the entry is the `ChangesTrieRoot` entry.
fn as_changes_trie_root(&self) -> Option<&Self::Hash> {
None
}
}
+49 -25
View File
@@ -20,11 +20,11 @@ use std::{error, fmt};
use std::cmp::Ord;
use std::collections::HashMap;
use std::marker::PhantomData;
use std::sync::Arc;
use hashdb::Hasher;
use memorydb::MemoryDB;
use rlp::Encodable;
use trie_backend::{TryIntoTrieBackend, TrieBackend};
use trie_backend::TrieBackend;
use trie_backend_essence::TrieBackendStorage;
use patricia_trie::{TrieDBMut, TrieMut, NodeCodec};
use heapsize::HeapSizeOf;
@@ -32,13 +32,16 @@ use heapsize::HeapSizeOf;
/// to it.
///
/// The clone operation (if implemented) should be cheap.
pub trait Backend<H: Hasher, C: NodeCodec<H>>: TryIntoTrieBackend<H, C> {
pub trait Backend<H: Hasher, C: NodeCodec<H>> {
/// An error type when fetching data is not possible.
type Error: super::Error;
/// Changes to be applied if committing
/// Storage changes to be applied if committing
type Transaction;
/// Type of trie backend storage.
type TrieBackendStorage: TrieBackendStorage<H>;
/// Get keyed storage associated with specific address, or None if there is nothing associated.
fn storage(&self, key: &[u8]) -> Result<Option<Vec<u8>>, Self::Error>;
@@ -60,6 +63,9 @@ pub trait Backend<H: Hasher, C: NodeCodec<H>>: TryIntoTrieBackend<H, C> {
/// Get all key/value pairs into a Vec.
fn pairs(&self) -> Vec<(Vec<u8>, Vec<u8>)>;
/// Try convert into trie backend.
fn try_into_trie_backend(self) -> Option<TrieBackend<Self::TrieBackendStorage, H, C>>;
}
/// Error impossible.
@@ -81,7 +87,7 @@ impl error::Error for Void {
/// tests.
#[derive(Eq)]
pub struct InMemory<H, C> {
inner: Arc<HashMap<Vec<u8>, Vec<u8>>>,
inner: HashMap<Vec<u8>, Vec<u8>>,
_hasher: PhantomData<H>,
_codec: PhantomData<C>,
}
@@ -89,7 +95,7 @@ pub struct InMemory<H, C> {
impl<H, C> Default for InMemory<H, C> {
fn default() -> Self {
InMemory {
inner: Arc::new(Default::default()),
inner: Default::default(),
_hasher: PhantomData,
_codec: PhantomData,
}
@@ -111,9 +117,17 @@ impl<H, C> PartialEq for InMemory<H, C> {
}
impl<H: Hasher, C: NodeCodec<H>> InMemory<H, C> where H::Out: HeapSizeOf {
/// Try convert into trie backend.
pub fn try_into_trie_backend(self) -> Option<TrieBackend<MemoryDB<H>, H, C>> {
let mut mdb = MemoryDB::default();
let root = insert_into_memory_db::<H, C, _>(&mut mdb, self.inner.into_iter())?;
Some(TrieBackend::new(mdb, root))
}
/// Copy the state, with applied updates
pub fn update(&self, changes: <Self as Backend<H, C>>::Transaction) -> Self {
let mut inner: HashMap<_, _> = (&*self.inner).clone();
let mut inner: HashMap<_, _> = self.inner.clone();
for (key, val) in changes {
match val {
Some(v) => { inner.insert(key, v); },
@@ -128,7 +142,7 @@ impl<H: Hasher, C: NodeCodec<H>> InMemory<H, C> where H::Out: HeapSizeOf {
impl<H, C> From<HashMap<Vec<u8>, Vec<u8>>> for InMemory<H, C> {
fn from(inner: HashMap<Vec<u8>, Vec<u8>>) -> Self {
InMemory {
inner: Arc::new(inner), _hasher: PhantomData, _codec: PhantomData
inner: inner, _hasher: PhantomData, _codec: PhantomData
}
}
}
@@ -138,6 +152,7 @@ impl super::Error for Void {}
impl<H: Hasher, C: NodeCodec<H>> Backend<H, C> for InMemory<H, C> where H::Out: HeapSizeOf {
type Error = Void;
type Transaction = Vec<(Vec<u8>, Option<Vec<u8>>)>;
type TrieBackendStorage = MemoryDB<H>;
fn storage(&self, key: &[u8]) -> Result<Option<Vec<u8>>, Self::Error> {
Ok(self.inner.get(key).map(Clone::clone))
@@ -171,23 +186,32 @@ impl<H: Hasher, C: NodeCodec<H>> Backend<H, C> for InMemory<H, C> where H::Out:
fn pairs(&self) -> Vec<(Vec<u8>, Vec<u8>)> {
self.inner.iter().map(|(k, v)| (k.clone(), v.clone())).collect()
}
}
impl<H: Hasher, C: NodeCodec<H>> TryIntoTrieBackend<H, C> for InMemory<H, C> where H::Out: HeapSizeOf {
fn try_into_trie_backend(self) -> Option<TrieBackend<H, C>> {
use memorydb::MemoryDB;
let mut root = <H as Hasher>::Out::default();
fn try_into_trie_backend(self) -> Option<TrieBackend<Self::TrieBackendStorage, H, C>> {
let mut mdb = MemoryDB::new();
{
let mut trie = TrieDBMut::<H, C>::new(&mut mdb, &mut root);
for (key, value) in self.inner.iter() {
if let Err(e) = trie.insert(&key, &value) {
warn!(target: "trie", "Failed to write to trie: {}", e);
return None;
}
}
}
Some(TrieBackend::with_memorydb(mdb, root))
let root = insert_into_memory_db::<H, C, _>(&mut mdb, self.inner.clone().into_iter())?;
Some(TrieBackend::new(mdb, root))
}
}
/// Insert input pairs into memory db.
pub(crate) fn insert_into_memory_db<H, C, I>(mdb: &mut MemoryDB<H>, input: I) -> Option<H::Out>
where
H: Hasher,
H::Out: HeapSizeOf,
C: NodeCodec<H>,
I: Iterator<Item=(Vec<u8>, Vec<u8>)>,
{
let mut root = <H as Hasher>::Out::default();
{
let mut trie = TrieDBMut::<H, C>::new(mdb, &mut root);
for (key, value) in input {
if let Err(e) = trie.insert(&key, &value) {
warn!(target: "trie", "Failed to write to trie: {}", e);
return None;
}
}
}
Some(root)
}
@@ -0,0 +1,296 @@
// Copyright 2017 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Structures and functions required to build changes trie for given block.
use std::collections::{BTreeMap, BTreeSet};
use codec::Decode;
use hashdb::Hasher;
use heapsize::HeapSizeOf;
use patricia_trie::NodeCodec;
use backend::Backend;
use overlayed_changes::OverlayedChanges;
use trie_backend_essence::{TrieBackendStorage, TrieBackendEssence};
use changes_trie::build_iterator::digest_build_iterator;
use changes_trie::input::{InputKey, InputPair, DigestIndex, ExtrinsicIndex};
use changes_trie::{Configuration, Storage};
/// Prepare input pairs for building a changes trie of given block.
///
/// Returns Err if storage error has occured OR if storage haven't returned
/// required data.
/// Returns Ok(None) data required to prepare input pairs is not collected
/// or storage is not provided.
pub fn prepare_input<'a, B, S, H, C>(
backend: &B,
storage: Option<&'a S>,
changes: &OverlayedChanges,
block: u64,
) -> Result<Option<Vec<InputPair>>, String>
where
B: Backend<H, C>,
S: Storage<H>,
&'a S: TrieBackendStorage<H>,
H: Hasher,
H::Out: HeapSizeOf,
C: NodeCodec<H>,
{
let (storage, config) = match (storage, changes.changes_trie_config.as_ref()) {
(Some(storage), Some(config)) => (storage, config),
_ => return Ok(None),
};
let mut input = Vec::new();
input.extend(prepare_extrinsics_input(
backend,
block,
changes)?);
input.extend(prepare_digest_input::<_, H, C>(
block,
config,
storage)?);
Ok(Some(input))
}
/// Prepare ExtrinsicIndex input pairs.
fn prepare_extrinsics_input<B, H, C>(
backend: &B,
block: u64,
changes: &OverlayedChanges,
) -> Result<impl Iterator<Item=InputPair>, String>
where
B: Backend<H, C>,
H: Hasher,
C: NodeCodec<H>,
{
let mut extrinsic_map = BTreeMap::<Vec<u8>, BTreeSet<u32>>::new();
for (key, val) in changes.prospective.iter().chain(changes.committed.iter()) {
let extrinsics = match val.extrinsics {
Some(ref extrinsics) => extrinsics,
None => continue,
};
// ignore values that have null value at the end of operation AND are not in storage
// at the beginning of operation
if !changes.storage(key).map(|v| v.is_some()).unwrap_or_default() {
if !backend.exists_storage(key).map_err(|e| format!("{}", e))? {
continue;
}
}
extrinsic_map.entry(key.clone()).or_default()
.extend(extrinsics.iter().cloned());
}
Ok(extrinsic_map.into_iter()
.map(move |(key, extrinsics)| InputPair::ExtrinsicIndex(ExtrinsicIndex {
block,
key,
}, extrinsics.iter().cloned().collect())))
}
/// Prepare DigestIndex input pairs.
fn prepare_digest_input<'a, S, H, C>(
block: u64,
config: &Configuration,
storage: &'a S
) -> Result<impl Iterator<Item=InputPair>, String>
where
S: Storage<H>,
&'a S: TrieBackendStorage<H>,
H: Hasher,
H::Out: HeapSizeOf,
C: NodeCodec<H>,
{
let mut digest_map = BTreeMap::<Vec<u8>, BTreeSet<u64>>::new();
for digest_build_block in digest_build_iterator(config, block) {
let trie_root = storage.root(digest_build_block)?;
let trie_root = trie_root.ok_or_else(|| format!("No changes trie root for block {}", digest_build_block))?;
let trie_storage = TrieBackendEssence::<_, H, C>::new(storage, trie_root);
let extrinsic_prefix = ExtrinsicIndex::key_neutral_prefix(digest_build_block);
trie_storage.for_keys_with_prefix(&extrinsic_prefix, |key|
if let Some(InputKey::ExtrinsicIndex(trie_key)) = Decode::decode(&mut &key[..]) {
digest_map.entry(trie_key.key).or_default()
.insert(digest_build_block);
});
let digest_prefix = DigestIndex::key_neutral_prefix(digest_build_block);
trie_storage.for_keys_with_prefix(&digest_prefix, |key|
if let Some(InputKey::DigestIndex(trie_key)) = Decode::decode(&mut &key[..]) {
digest_map.entry(trie_key.key).or_default()
.insert(digest_build_block);
});
}
Ok(digest_map.into_iter()
.map(move |(key, set)| InputPair::DigestIndex(DigestIndex {
block,
key
}, set.into_iter().collect())))
}
#[cfg(test)]
mod test {
use codec::Encode;
use primitives::{Blake2Hasher, RlpCodec};
use backend::InMemory;
use changes_trie::storage::InMemoryStorage;
use overlayed_changes::OverlayedValue;
use super::*;
fn prepare_for_build() -> (InMemory<Blake2Hasher, RlpCodec>, InMemoryStorage<Blake2Hasher>, OverlayedChanges) {
let backend: InMemory<_, _> = vec![
(vec![100], vec![255]),
(vec![101], vec![255]),
(vec![102], vec![255]),
(vec![103], vec![255]),
(vec![104], vec![255]),
(vec![105], vec![255]),
].into_iter().collect::<::std::collections::HashMap<_, _>>().into();
let storage = InMemoryStorage::with_inputs::<RlpCodec>(vec![
(1, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 1, key: vec![100] }, vec![1, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 1, key: vec![101] }, vec![0, 2]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 1, key: vec![105] }, vec![0, 2, 4]),
]),
(2, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 2, key: vec![102] }, vec![0]),
]),
(3, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 3, key: vec![100] }, vec![0]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 3, key: vec![105] }, vec![1]),
]),
(4, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![100] }, vec![0, 2, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![101] }, vec![1]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![103] }, vec![0, 1]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![100] }, vec![1, 3]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![101] }, vec![1]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![102] }, vec![2]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![105] }, vec![1, 3]),
]),
(5, Vec::new()),
(6, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 6, key: vec![105] }, vec![2]),
]),
(7, Vec::new()),
(8, vec![
InputPair::DigestIndex(DigestIndex { block: 8, key: vec![105] }, vec![6]),
]),
(9, Vec::new()), (10, Vec::new()), (11, Vec::new()), (12, Vec::new()), (13, Vec::new()),
(14, Vec::new()), (15, Vec::new()),
]);
let changes = OverlayedChanges {
prospective: vec![
(vec![100], OverlayedValue {
value: Some(vec![200]),
extrinsics: Some(vec![0, 2].into_iter().collect())
}),
(vec![103], OverlayedValue {
value: None,
extrinsics: Some(vec![0, 1].into_iter().collect())
}),
].into_iter().collect(),
committed: vec![
(b":extrinsic_index".to_vec(), OverlayedValue {
value: Some(3u32.encode()),
extrinsics: None,
}),
(vec![100], OverlayedValue {
value: Some(vec![202]),
extrinsics: Some(vec![3].into_iter().collect())
}),
(vec![101], OverlayedValue {
value: Some(vec![203]),
extrinsics: Some(vec![1].into_iter().collect())
}),
].into_iter().collect(),
changes_trie_config: Some(Configuration { digest_interval: 4, digest_levels: 2 }),
};
(backend, storage, changes)
}
#[test]
fn build_changes_trie_nodes_on_non_digest_block() {
let (backend, storage, changes) = prepare_for_build();
let changes_trie_nodes = prepare_input::<_, _, _, RlpCodec>(&backend, Some(&storage), &changes, 5).unwrap();
assert_eq!(changes_trie_nodes, Some(vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 5, key: vec![100] }, vec![0, 2, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 5, key: vec![101] }, vec![1]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 5, key: vec![103] }, vec![0, 1]),
]));
}
#[test]
fn build_changes_trie_nodes_on_digest_block_l1() {
let (backend, storage, changes) = prepare_for_build();
let changes_trie_nodes = prepare_input::<_, _, _, RlpCodec>(&backend, Some(&storage), &changes, 4).unwrap();
assert_eq!(changes_trie_nodes, Some(vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![100] }, vec![0, 2, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![101] }, vec![1]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![103] }, vec![0, 1]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![100] }, vec![1, 3]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![101] }, vec![1]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![102] }, vec![2]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![105] }, vec![1, 3]),
]));
}
#[test]
fn build_changes_trie_nodes_on_digest_block_l2() {
let (backend, storage, changes) = prepare_for_build();
let changes_trie_nodes = prepare_input::<_, _, _, RlpCodec>(&backend, Some(&storage), &changes, 16).unwrap();
assert_eq!(changes_trie_nodes, Some(vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![100] }, vec![0, 2, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![101] }, vec![1]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 16, key: vec![103] }, vec![0, 1]),
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![100] }, vec![4]),
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![101] }, vec![4]),
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![102] }, vec![4]),
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![103] }, vec![4]),
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![105] }, vec![4, 8]),
]));
}
#[test]
fn build_changes_trie_nodes_ignores_temporary_storage_values() {
let (backend, storage, mut changes) = prepare_for_build();
// 110: missing from backend, set to None in overlay
changes.prospective.insert(vec![110], OverlayedValue {
value: None,
extrinsics: Some(vec![1].into_iter().collect())
});
let changes_trie_nodes = prepare_input::<_, _, _, RlpCodec>(&backend, Some(&storage), &changes, 4).unwrap();
assert_eq!(changes_trie_nodes, Some(vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![100] }, vec![0, 2, 3]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![101] }, vec![1]),
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 4, key: vec![103] }, vec![0, 1]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![100] }, vec![1, 3]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![101] }, vec![1]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![102] }, vec![2]),
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![105] }, vec![1, 3]),
]));
}
}
@@ -0,0 +1,212 @@
// Copyright 2017 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Structures and functions to return blocks whose changes are to be included
//! in given block' changes trie.
use changes_trie::Configuration;
/// Returns iterator of OTHER blocks that are required for inclusion into
/// changes trie of given block.
pub fn digest_build_iterator(config: &Configuration, block: u64) -> DigestBuildIterator {
// digest is never built in these cases
if block == 0 || config.digest_interval <= 1 || config.digest_levels == 0 {
return DigestBuildIterator::empty();
}
// digest is built every digest_multiplier blocks
let mut digest_interval = config.digest_interval;
if block % digest_interval != 0 {
return DigestBuildIterator::empty();
}
// we have checked that the block is at least level1-digest
// => try to find highest digest level for inclusion
let mut current_level = 1u32;
let mut digest_step = 1u64;
while current_level < config.digest_levels {
let new_digest_interval = match digest_interval.checked_mul(config.digest_interval) {
Some(new_digest_interval) if block % new_digest_interval == 0 => new_digest_interval,
_ => break,
};
digest_step = digest_interval;
digest_interval = new_digest_interval;
current_level = current_level + 1;
}
DigestBuildIterator::new(block, config.digest_interval, digest_step)
}
/// Changes trie build iterator that returns numbers of OTHER blocks that are
/// required for inclusion into changes trie of given block.
#[derive(Debug)]
pub struct DigestBuildIterator {
/// Block we're building changes trie for.
block: u64,
/// Interval for creation digest blocks.
digest_interval: u64,
/// Step of current blocks range.
current_step: u64,
/// Current blocks range.
current_range: Option<::std::iter::StepBy<::std::ops::Range<u64>>>,
/// Max step of blocks range.
max_step: u64,
}
impl DigestBuildIterator {
/// Create new digest build iterator.
pub fn new(block: u64, digest_interval: u64, max_step: u64) -> Self {
DigestBuildIterator {
block, digest_interval, max_step,
current_step: 0,
current_range: None,
}
}
/// Create empty digest build iterator.
pub fn empty() -> Self {
Self::new(0, 0, 0)
}
}
impl Iterator for DigestBuildIterator {
type Item = u64;
fn next(&mut self) -> Option<Self::Item> {
if let Some(next) = self.current_range.as_mut().and_then(|iter| iter.next()) {
return Some(next);
}
// we are safe to use non-checking mul/sub versions here because:
// DigestBuildIterator is created only by internal function that is checking
// that all multiplications/subtractions are safe within max_step limit
let next_step = if self.current_step == 0 { 1 } else { self.current_step * self.digest_interval };
if next_step > self.max_step {
return None;
}
self.current_step = next_step;
self.current_range = Some(
((self.block - self.current_step * self.digest_interval + self.current_step)..self.block)
.step_by(self.current_step as usize)
);
Some(self.current_range.as_mut()
.expect("assigned one line above; qed")
.next()
.expect("X - I^(N+1) + I^N > X when X,I,N are > 1; qed"))
}
}
#[cfg(test)]
mod tests {
use super::*;
fn digest_build_iterator(digest_interval: u64, digest_levels: u32, block: u64) -> DigestBuildIterator {
super::digest_build_iterator(&Configuration { digest_interval, digest_levels }, block)
}
fn digest_build_iterator_basic(digest_interval: u64, digest_levels: u32, block: u64) -> (u64, u64, u64) {
let iter = digest_build_iterator(digest_interval, digest_levels, block);
(iter.block, iter.digest_interval, iter.max_step)
}
fn digest_build_iterator_blocks(digest_interval: u64, digest_levels: u32, block: u64) -> Vec<u64> {
digest_build_iterator(digest_interval, digest_levels, block).collect()
}
#[test]
fn suggest_digest_inclusion_returns_empty_iterator() {
let empty = (0, 0, 0);
assert_eq!(digest_build_iterator_basic(4, 16, 0), empty, "block is 0");
assert_eq!(digest_build_iterator_basic(0, 16, 64), empty, "digest_interval is 0");
assert_eq!(digest_build_iterator_basic(1, 16, 64), empty, "digest_interval is 1");
assert_eq!(digest_build_iterator_basic(4, 0, 64), empty, "digest_levels is 0");
assert_eq!(digest_build_iterator_basic(4, 16, 1), empty, "digest is not required for this block");
assert_eq!(digest_build_iterator_basic(4, 16, 2), empty, "digest is not required for this block");
assert_eq!(digest_build_iterator_basic(4, 16, 15), empty, "digest is not required for this block");
assert_eq!(digest_build_iterator_basic(4, 16, 17), empty, "digest is not required for this block");
assert_eq!(digest_build_iterator_basic(::std::u64::MAX / 2 + 1, 16, ::std::u64::MAX), empty, "digest_interval * 2 is greater than u64::MAX");
}
#[test]
fn suggest_digest_inclusion_returns_level1_iterator() {
assert_eq!(digest_build_iterator_basic(16, 1, 16), (16, 16, 1), "!(block % interval) && first digest level == block");
assert_eq!(digest_build_iterator_basic(16, 1, 256), (256, 16, 1), "!(block % interval^2), but there's only 1 digest level");
assert_eq!(digest_build_iterator_basic(16, 2, 32), (32, 16, 1), "second level digest is not required for this block");
assert_eq!(digest_build_iterator_basic(16, 3, 4080), (4080, 16, 1), "second && third level digest are not required for this block");
}
#[test]
fn suggest_digest_inclusion_returns_level2_iterator() {
assert_eq!(digest_build_iterator_basic(16, 2, 256), (256, 16, 16), "second level digest");
assert_eq!(digest_build_iterator_basic(16, 2, 4096), (4096, 16, 16), "!(block % interval^3), but there's only 2 digest levels");
}
#[test]
fn suggest_digest_inclusion_returns_level3_iterator() {
assert_eq!(digest_build_iterator_basic(16, 3, 4096), (4096, 16, 256), "third level digest: beginning");
assert_eq!(digest_build_iterator_basic(16, 3, 8192), (8192, 16, 256), "third level digest: next");
}
#[test]
fn digest_iterator_returns_level1_blocks() {
assert_eq!(digest_build_iterator_blocks(16, 1, 16),
vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]);
assert_eq!(digest_build_iterator_blocks(16, 1, 256),
vec![241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255]);
assert_eq!(digest_build_iterator_blocks(16, 2, 32),
vec![17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]);
assert_eq!(digest_build_iterator_blocks(16, 3, 4080),
vec![4065, 4066, 4067, 4068, 4069, 4070, 4071, 4072, 4073, 4074, 4075, 4076, 4077, 4078, 4079]);
}
#[test]
fn digest_iterator_returns_level1_and_level2_blocks() {
assert_eq!(digest_build_iterator_blocks(16, 2, 256),
vec![
// level2 is a level1 digest of 16-1 previous blocks:
241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255,
// level2 points to previous 16-1 level1 digests:
16, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240,
],
);
assert_eq!(digest_build_iterator_blocks(16, 2, 4096),
vec![
// level2 is a level1 digest of 16-1 previous blocks:
4081, 4082, 4083, 4084, 4085, 4086, 4087, 4088, 4089, 4090, 4091, 4092, 4093, 4094, 4095,
// level2 points to previous 16-1 level1 digests:
3856, 3872, 3888, 3904, 3920, 3936, 3952, 3968, 3984, 4000, 4016, 4032, 4048, 4064, 4080,
],
);
}
#[test]
fn digest_iterator_returns_level1_and_level2_and_level3_blocks() {
assert_eq!(digest_build_iterator_blocks(16, 3, 4096),
vec![
// level3 is a level1 digest of 16-1 previous blocks:
4081, 4082, 4083, 4084, 4085, 4086, 4087, 4088, 4089, 4090, 4091, 4092, 4093, 4094, 4095,
// level3 points to previous 16-1 level1 digests:
3856, 3872, 3888, 3904, 3920, 3936, 3952, 3968, 3984, 4000, 4016, 4032, 4048, 4064, 4080,
// level3 points to previous 16-1 level2 digests:
256, 512, 768, 1024, 1280, 1536, 1792, 2048, 2304, 2560, 2816, 3072, 3328, 3584, 3840,
],
);
}
}
@@ -0,0 +1,453 @@
// Copyright 2017 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Functions + iterator that traverses changes tries and returns all
//! (block, extrinsic) pairs where given key has been changed.
use std::cell::RefCell;
use std::collections::VecDeque;
use codec::{Decode, Encode};
use hashdb::{HashDB, Hasher};
use heapsize::HeapSizeOf;
use memorydb::MemoryDB;
use patricia_trie::{NodeCodec, Recorder};
use changes_trie::{Configuration, Storage};
use changes_trie::input::{DigestIndex, ExtrinsicIndex, DigestIndexValue, ExtrinsicIndexValue};
use changes_trie::storage::{TrieBackendAdapter, InMemoryStorage};
use proving_backend::ProvingBackendEssence;
use trie_backend_essence::{TrieBackendEssence};
/// Return changes of given key at given blocks range.
/// `max` is the number of best known block.
pub fn key_changes<S: Storage<H>, H: Hasher, C: NodeCodec<H>>(
config: &Configuration,
storage: &S,
begin: u64,
end: u64,
max: u64,
key: &[u8],
) -> Result<Vec<(u64, u32)>, String> where H::Out: HeapSizeOf {
DrilldownIterator {
essence: DrilldownIteratorEssence {
key,
roots_storage: storage,
storage,
surface: surface_iterator(config, max, begin, end)?,
extrinsics: Default::default(),
blocks: Default::default(),
_hasher: ::std::marker::PhantomData::<H>::default(),
},
_codec: ::std::marker::PhantomData::<C>::default(),
}.collect()
}
/// Returns proof of changes of given key at given blocks range.
/// `max` is the number of best known block.
pub fn key_changes_proof<S: Storage<H>, H: Hasher, C: NodeCodec<H>>(
config: &Configuration,
storage: &S,
begin: u64,
end: u64,
max: u64,
key: &[u8],
) -> Result<Vec<Vec<u8>>, String> where H::Out: HeapSizeOf {
let mut iter = ProvingDrilldownIterator {
essence: DrilldownIteratorEssence {
key,
roots_storage: storage.clone(),
storage,
surface: surface_iterator(config, max, begin, end)?,
extrinsics: Default::default(),
blocks: Default::default(),
_hasher: ::std::marker::PhantomData::<H>::default(),
},
proof_recorder: Default::default(),
_codec: ::std::marker::PhantomData::<C>::default(),
};
// iterate to collect proof
while let Some(item) = iter.next() {
item?;
}
Ok(iter.extract_proof())
}
/// Check key changes proog and return changes of the key at given blocks range.
/// `max` is the number of best known block.
pub fn key_changes_proof_check<S: Storage<H>, H: Hasher, C: NodeCodec<H>>(
config: &Configuration,
roots_storage: &S, // TODO: use RootsStorage is only used to gather root
proof: Vec<Vec<u8>>,
begin: u64,
end: u64,
max: u64,
key: &[u8]
) -> Result<Vec<(u64, u32)>, String> where H::Out: HeapSizeOf {
let mut proof_db = MemoryDB::<H>::new();
for item in proof {
proof_db.insert(&item);
}
let proof_db = InMemoryStorage::with_db(proof_db);
DrilldownIterator {
essence: DrilldownIteratorEssence {
key,
roots_storage,
storage: &proof_db,
surface: surface_iterator(config, max, begin, end)?,
extrinsics: Default::default(),
blocks: Default::default(),
_hasher: ::std::marker::PhantomData::<H>::default(),
},
_codec: ::std::marker::PhantomData::<C>::default(),
}.collect()
}
/// Surface iterator - only traverses top-level digests from given range and tries to find
/// all digest changes for the key.
pub struct SurfaceIterator<'a> {
config: &'a Configuration,
begin: u64,
max: u64,
current: Option<u64>,
current_begin: u64,
digest_step: u64,
digest_level: u32,
}
impl<'a> Iterator for SurfaceIterator<'a> {
type Item = Result<(u64, u32), String>;
fn next(&mut self) -> Option<Self::Item> {
let current = self.current?;
let digest_level = self.digest_level;
if current < self.digest_step {
self.current = None;
}
else {
let next = current - self.digest_step;
if next == 0 || next < self.begin {
self.current = None;
}
else if next > self.current_begin {
self.current = Some(next);
} else {
let (current, current_begin, digest_step, digest_level) = match
lower_bound_max_digest(self.config, self.max, self.begin, next) {
Err(err) => return Some(Err(err)),
Ok(range) => range,
};
self.current = Some(current);
self.current_begin = current_begin;
self.digest_step = digest_step;
self.digest_level = digest_level;
}
}
Some(Ok((current, digest_level)))
}
}
/// Drilldown iterator - receives 'digest points' from surface iterator and explores
/// every point until extrinsic is found.
pub struct DrilldownIteratorEssence<'a, RS: 'a + Storage<H>, S: 'a + Storage<H>, H: Hasher> {
key: &'a [u8],
roots_storage: &'a RS,
storage: &'a S,
surface: SurfaceIterator<'a>,
extrinsics: VecDeque<(u64, u32)>,
blocks: VecDeque<(u64, u32)>,
_hasher: ::std::marker::PhantomData<H>,
}
impl<'a, RS: 'a + Storage<H>, S: Storage<H>, H: Hasher> DrilldownIteratorEssence<'a, RS, S, H> {
pub fn next<F>(&mut self, trie_reader: F) -> Option<Result<(u64, u32), String>>
where
F: FnMut(&S, H::Out, &[u8]) -> Result<Option<Vec<u8>>, String>,
{
match self.do_next(trie_reader) {
Ok(Some(res)) => Some(Ok(res)),
Ok(None) => None,
Err(err) => Some(Err(err)),
}
}
fn do_next<F>(&mut self, mut trie_reader: F) -> Result<Option<(u64, u32)>, String>
where
F: FnMut(&S, H::Out, &[u8]) -> Result<Option<Vec<u8>>, String>,
{
loop {
if let Some((block, extrinsic)) = self.extrinsics.pop_front() {
return Ok(Some((block, extrinsic)));
}
if let Some((block, level)) = self.blocks.pop_front() {
if let Some(trie_root) = self.roots_storage.root(block)? {
let extrinsics_key = ExtrinsicIndex { block, key: self.key.to_vec() }.encode();
let extrinsics = trie_reader(&self.storage, trie_root, &extrinsics_key);
if let Some(extrinsics) = extrinsics? {
let extrinsics: Option<ExtrinsicIndexValue> = Decode::decode(&mut &extrinsics[..]);
if let Some(extrinsics) = extrinsics {
self.extrinsics.extend(extrinsics.into_iter().rev().map(|e| (block, e)));
}
}
let blocks_key = DigestIndex { block, key: self.key.to_vec() }.encode();
let blocks = trie_reader(&self.storage, trie_root, &blocks_key);
if let Some(blocks) = blocks? {
let blocks: Option<DigestIndexValue> = Decode::decode(&mut &blocks[..]);
if let Some(blocks) = blocks {
self.blocks.extend(blocks.into_iter().rev().map(|b| (b, level - 1)));
}
}
}
continue;
}
match self.surface.next() {
Some(Ok(block)) => self.blocks.push_back(block),
Some(Err(err)) => return Err(err),
None => return Ok(None),
}
}
}
}
/// Exploring drilldown operator.
struct DrilldownIterator<'a, RS: 'a + Storage<H>, S: 'a + Storage<H>, H: Hasher, C: NodeCodec<H>> {
essence: DrilldownIteratorEssence<'a, RS, S, H>,
_codec: ::std::marker::PhantomData<C>,
}
impl<'a, RS: 'a + Storage<H>, S: Storage<H>, H: Hasher, C: NodeCodec<H>> Iterator for DrilldownIterator<'a, RS, S, H, C> where H::Out: HeapSizeOf {
type Item = Result<(u64, u32), String>;
fn next(&mut self) -> Option<Self::Item> {
self.essence.next(|storage, root, key|
TrieBackendEssence::<_, H, C>::new(TrieBackendAdapter::new(storage), root).storage(key))
}
}
/// Proving drilldown iterator.
struct ProvingDrilldownIterator<'a, RS: 'a + Storage<H>, S: 'a + Storage<H>, H: Hasher, C: NodeCodec<H>> {
essence: DrilldownIteratorEssence<'a, RS, S, H>,
proof_recorder: RefCell<Recorder<H::Out>>,
_codec: ::std::marker::PhantomData<C>,
}
impl<'a, RS: 'a + Storage<H>, S: Storage<H>, H: Hasher, C: NodeCodec<H>> ProvingDrilldownIterator<'a, RS, S, H, C> {
/// Consume the iterator, extracting the gathered proof in lexicographical order
/// by value.
pub fn extract_proof(self) -> Vec<Vec<u8>> {
self.proof_recorder.into_inner().drain()
.into_iter()
.map(|n| n.data.to_vec())
.collect()
}
}
impl<'a, RS: 'a + Storage<H>, S: Storage<H>, H: Hasher, C: NodeCodec<H>> Iterator for ProvingDrilldownIterator<'a, RS, S, H, C> where H::Out: HeapSizeOf {
type Item = Result<(u64, u32), String>;
fn next(&mut self) -> Option<Self::Item> {
let proof_recorder = &mut *self.proof_recorder.try_borrow_mut()
.expect("only fails when already borrowed; storage() is non-reentrant; qed");
self.essence.next(|storage, root, key|
ProvingBackendEssence::<_, H, C> {
backend: &TrieBackendEssence::new(TrieBackendAdapter::new(storage), root),
proof_recorder,
}.storage(key))
}
}
/// Returns surface iterator for given range of blocks.
fn surface_iterator<'a>(config: &'a Configuration, max: u64, begin: u64, end: u64) -> Result<SurfaceIterator<'a>, String> {
let (current, current_begin, digest_step, digest_level) = lower_bound_max_digest(config, max, begin, end)?;
Ok(SurfaceIterator {
config,
begin,
max,
current: Some(current),
current_begin,
digest_step,
digest_level,
})
}
/// Returns parameters of highest level digest block that includes the end of given range
/// and tends to include the whole range.
fn lower_bound_max_digest(
config: &Configuration,
max: u64,
begin: u64,
end: u64,
) -> Result<(u64, u64, u64, u32), String> {
if end > max || begin > end {
return Err("invalid changes range".into());
}
let mut digest_level = 0u32;
let mut digest_step = 1u64;
let mut digest_interval = 0u64;
let mut current = end;
let mut current_begin = begin;
if begin != end {
while digest_level != config.digest_levels {
let new_digest_level = digest_level + 1;
let new_digest_step = digest_step * config.digest_interval;
let new_digest_interval = config.digest_interval * {
if digest_interval == 0 { 1 } else { digest_interval }
};
let new_digest_begin = ((current - 1) / new_digest_interval) * new_digest_interval;
let new_digest_end = new_digest_begin + new_digest_interval;
let new_current = new_digest_begin + new_digest_interval;
if new_digest_end > max {
if begin < new_digest_begin {
current_begin = new_digest_begin;
}
break;
}
digest_level = new_digest_level;
digest_step = new_digest_step;
digest_interval = new_digest_interval;
current = new_current;
current_begin = new_digest_begin;
if new_digest_begin <= begin && new_digest_end >= end {
break;
}
}
}
Ok((
current,
current_begin,
digest_step,
digest_level,
))
}
#[cfg(test)]
mod tests {
use primitives::{Blake2Hasher, RlpCodec};
use changes_trie::input::InputPair;
use changes_trie::storage::InMemoryStorage;
use super::*;
fn prepare_for_drilldown() -> (Configuration, InMemoryStorage<Blake2Hasher>) {
let config = Configuration { digest_interval: 4, digest_levels: 2 };
let backend = InMemoryStorage::with_inputs::<RlpCodec>(vec![
// digest: 1..4 => [(3, 0)]
(1, vec![]),
(2, vec![]),
(3, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 3, key: vec![42] }, vec![0]),
]),
(4, vec![
InputPair::DigestIndex(DigestIndex { block: 4, key: vec![42] }, vec![3]),
]),
// digest: 5..8 => [(6, 3), (8, 1+2)]
(5, vec![]),
(6, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 6, key: vec![42] }, vec![3]),
]),
(7, vec![]),
(8, vec![
InputPair::ExtrinsicIndex(ExtrinsicIndex { block: 8, key: vec![42] }, vec![1, 2]),
InputPair::DigestIndex(DigestIndex { block: 8, key: vec![42] }, vec![6]),
]),
// digest: 9..12 => []
(9, vec![]),
(10, vec![]),
(11, vec![]),
(12, vec![]),
// digest: 0..16 => [4, 8]
(13, vec![]),
(14, vec![]),
(15, vec![]),
(16, vec![
InputPair::DigestIndex(DigestIndex { block: 16, key: vec![42] }, vec![4, 8]),
]),
]);
(config, backend)
}
#[test]
fn drilldown_iterator_works() {
let (config, storage) = prepare_for_drilldown();
let drilldown_result = key_changes::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
&config, &storage, 0, 100, 1000, &[42]);
assert_eq!(drilldown_result, Ok(vec![(8, 2), (8, 1), (6, 3), (3, 0)]));
}
#[test]
fn drilldown_iterator_fails_when_storage_fails() {
let (config, storage) = prepare_for_drilldown();
storage.clear_storage();
assert!(key_changes::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
&config, &storage, 0, 100, 1000, &[42]).is_err());
}
#[test]
fn drilldown_iterator_fails_when_range_is_invalid() {
let (config, storage) = prepare_for_drilldown();
assert!(key_changes::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
&config, &storage, 0, 100, 50, &[42]).is_err());
assert!(key_changes::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
&config, &storage, 20, 10, 100, &[42]).is_err());
}
#[test]
fn proving_drilldown_iterator_works() {
// happens on remote full node:
// create drilldown iterator that records all trie nodes during drilldown
let (remote_config, remote_storage) = prepare_for_drilldown();
let remote_proof = key_changes_proof::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
&remote_config, &remote_storage,
0, 100, 1000, &[42]).unwrap();
// happens on local light node:
// create drilldown iterator that works the same, but only depends on trie
let (local_config, local_storage) = prepare_for_drilldown();
local_storage.clear_storage();
let local_result = key_changes_proof_check::<InMemoryStorage<Blake2Hasher>, Blake2Hasher, RlpCodec>(
&local_config, &local_storage, remote_proof,
0, 100, 1000, &[42]);
// check that drilldown result is the same as if it was happening at the full node
assert_eq!(local_result, Ok(vec![(8, 2), (8, 1), (6, 3), (3, 0)]));
}
}
@@ -0,0 +1,149 @@
// Copyright 2017 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Different types of changes trie input pairs.
use codec::{Decode, Encode, Input, Output};
/// Key of { changed key => set of extrinsic indices } mapping.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ExtrinsicIndex {
/// Block at which this key has been inserted in the trie.
pub block: u64,
/// Storage key this node is responsible for.
pub key: Vec<u8>,
}
/// Value of { changed key => set of extrinsic indices } mapping.
pub type ExtrinsicIndexValue = Vec<u32>;
/// Key of { changed key => block/digest block numbers } mapping.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct DigestIndex {
/// Block at which this key has been inserted in the trie.
pub block: u64,
/// Storage key this node is responsible for.
pub key: Vec<u8>,
}
/// Value of { changed key => block/digest block numbers } mapping.
pub type DigestIndexValue = Vec<u64>;
/// Single input pair of changes trie.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum InputPair {
/// Element of { key => set of extrinsics where key has been changed } element mapping.
ExtrinsicIndex(ExtrinsicIndex, ExtrinsicIndexValue),
/// Element of { key => set of blocks/digest blocks where key has been changed } element mapping.
DigestIndex(DigestIndex, DigestIndexValue),
}
/// Single input key of changes trie.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum InputKey {
/// Key of { key => set of extrinsics where key has been changed } element mapping.
ExtrinsicIndex(ExtrinsicIndex),
/// Key of { key => set of blocks/digest blocks where key has been changed } element mapping.
DigestIndex(DigestIndex),
}
impl Into<(Vec<u8>, Vec<u8>)> for InputPair {
fn into(self) -> (Vec<u8>, Vec<u8>) {
match self {
InputPair::ExtrinsicIndex(key, value) => (key.encode(), value.encode()),
InputPair::DigestIndex(key, value) => (key.encode(), value.encode()),
}
}
}
impl Into<InputKey> for InputPair {
fn into(self) -> InputKey {
match self {
InputPair::ExtrinsicIndex(key, _) => InputKey::ExtrinsicIndex(key),
InputPair::DigestIndex(key, _) => InputKey::DigestIndex(key),
}
}
}
impl ExtrinsicIndex {
pub fn key_neutral_prefix(block: u64) -> Vec<u8> {
let mut prefix = vec![1];
prefix.extend(block.encode());
prefix
}
}
impl Encode for ExtrinsicIndex {
fn encode_to<W: Output>(&self, dest: &mut W) {
dest.push_byte(1);
self.block.encode_to(dest);
self.key.encode_to(dest);
}
}
impl DigestIndex {
pub fn key_neutral_prefix(block: u64) -> Vec<u8> {
let mut prefix = vec![2];
prefix.extend(block.encode());
prefix
}
}
impl Encode for DigestIndex {
fn encode_to<W: Output>(&self, dest: &mut W) {
dest.push_byte(2);
self.block.encode_to(dest);
self.key.encode_to(dest);
}
}
impl Decode for InputKey {
fn decode<I: Input>(input: &mut I) -> Option<Self> {
match input.read_byte()? {
1 => Some(InputKey::ExtrinsicIndex(ExtrinsicIndex {
block: Decode::decode(input)?,
key: Decode::decode(input)?,
})),
2 => Some(InputKey::DigestIndex(DigestIndex {
block: Decode::decode(input)?,
key: Decode::decode(input)?,
})),
_ => None,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn extrinsic_index_serialized_and_deserialized() {
let original = ExtrinsicIndex { block: 777, key: vec![42] };
let serialized = original.encode();
let deserialized: InputKey = Decode::decode(&mut &serialized[..]).unwrap();
assert_eq!(InputKey::ExtrinsicIndex(original), deserialized);
}
#[test]
fn digest_index_serialized_and_deserialized() {
let original = DigestIndex { block: 777, key: vec![42] };
let serialized = original.encode();
let deserialized: InputKey = Decode::decode(&mut &serialized[..]).unwrap();
assert_eq!(InputKey::DigestIndex(original), deserialized);
}
}
@@ -0,0 +1,89 @@
// Copyright 2017 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Changes trie related structures and functions.
//!
//! Changes trie is a trie built of { storage key => extrinsiscs } pairs
//! at the end of each block. For every changed storage key it contains
//! a pair, mapping key to the set of extrinsics where it has been changed.
//!
//! Optionally, every N blocks, additional level1-digest nodes are appended
//! to the changes trie, containing pairs { storage key => blocks }. For every
//! storage key that has been changed in PREVIOUS N-1 blocks (except for genesis
//! block) it contains a pair, mapping this key to the set of blocks where it
//! has been changed.
//!
//! Optionally, every N^digest_level (where digest_level > 1) blocks, additional
//! digest_level digest is created. It is built out of pairs { storage key => digest
//! block }, containing entries for every storage key that has been changed in
//! the last N*digest_level-1 blocks (except for genesis block), mapping these keys
//! to the set of lower-level digest blocks.
mod build;
mod build_iterator;
mod changes_iterator;
mod input;
mod storage;
pub use self::storage::InMemoryStorage;
pub use self::changes_iterator::{key_changes, key_changes_proof, key_changes_proof_check};
use hashdb::{DBValue, Hasher};
use heapsize::HeapSizeOf;
use patricia_trie::NodeCodec;
use rlp::Encodable;
use backend::Backend;
use primitives;
use changes_trie::build::prepare_input;
use overlayed_changes::OverlayedChanges;
use trie_backend_essence::TrieBackendStorage;
/// Changes that are made outside of extrinsics are marked with this index;
pub const NO_EXTRINSIC_INDEX: u32 = 0xffffffff;
/// Changes trie storage. Provides access to trie roots and trie nodes.
pub trait Storage<H: Hasher>: Send + Sync {
/// Get changes trie root for given block.
fn root(&self, block: u64) -> Result<Option<H::Out>, String>;
/// Get a trie node.
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String>;
}
/// Changes trie configuration.
pub type Configuration = primitives::ChangesTrieConfiguration;
/// Compute the changes trie root and transaction for given block.
/// Returns None if there's no data to perform computation.
pub fn compute_changes_trie_root<'a, B: Backend<H, C>, S: Storage<H>, H: Hasher, C: NodeCodec<H>>(
backend: &B,
storage: Option<&'a S>,
changes: &OverlayedChanges,
block: u64,
) -> Option<(H::Out, Vec<(Vec<u8>, Vec<u8>)>)>
where
&'a S: TrieBackendStorage<H>,
H::Out: Ord + Encodable + HeapSizeOf,
{
let input_pairs = prepare_input::<B, S, H, C>(backend, storage, changes, block)
.expect("storage is not allowed to fail within runtime")?;
let transaction = input_pairs.into_iter()
.map(Into::into)
.collect::<Vec<_>>();
let root = ::triehash::trie_root::<H, _, _, _>(transaction.iter().map(|(k, v)| (&*k, &*v)));
Some((root, transaction))
}
@@ -0,0 +1,118 @@
// Copyright 2017 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Changes trie storage utilities.
use std::collections::HashMap;
use hashdb::{Hasher, HashDB, DBValue};
use heapsize::HeapSizeOf;
use memorydb::MemoryDB;
use parking_lot::RwLock;
use changes_trie::Storage;
use trie_backend_essence::TrieBackendStorage;
#[cfg(test)]
use backend::insert_into_memory_db;
#[cfg(test)]
use patricia_trie::NodeCodec;
#[cfg(test)]
use changes_trie::input::InputPair;
/// In-memory implementation of changes trie storage.
pub struct InMemoryStorage<H: Hasher> where H::Out: HeapSizeOf {
data: RwLock<InMemoryStorageData<H>>,
}
/// Adapter for using changes trie storage as a TrieBackendEssence' storage.
pub struct TrieBackendAdapter<'a, H: Hasher, S: 'a + Storage<H>> {
storage: &'a S,
_hasher: ::std::marker::PhantomData<H>,
}
struct InMemoryStorageData<H: Hasher> where H::Out: HeapSizeOf {
roots: HashMap<u64, H::Out>,
mdb: MemoryDB<H>,
}
impl<H: Hasher> InMemoryStorage<H> where H::Out: HeapSizeOf {
/// Create the storage from given in-memory database.
pub fn with_db(mdb: MemoryDB<H>) -> Self {
Self {
data: RwLock::new(InMemoryStorageData {
roots: HashMap::new(),
mdb,
}),
}
}
/// Create the storage with empty database.
pub fn new() -> Self {
Self::with_db(Default::default())
}
#[cfg(test)]
pub fn with_inputs<C: NodeCodec<H>>(inputs: Vec<(u64, Vec<InputPair>)>) -> Self {
let mut mdb = MemoryDB::default();
let mut roots = HashMap::new();
for (block, pairs) in inputs {
let root = insert_into_memory_db::<H, C, _>(&mut mdb, pairs.into_iter().map(Into::into));
if let Some(root) = root {
roots.insert(block, root);
}
}
InMemoryStorage {
data: RwLock::new(InMemoryStorageData {
roots,
mdb,
}),
}
}
#[cfg(test)]
pub fn clear_storage(&self) {
self.data.write().mdb = MemoryDB::new();
}
/// Insert changes trie for given block.
pub fn insert(&self, block: u64, changes_trie_root: H::Out, trie: MemoryDB<H>) {
let mut data = self.data.write();
data.roots.insert(block, changes_trie_root);
data.mdb.consolidate(trie);
}
}
impl<H: Hasher> Storage<H> for InMemoryStorage<H> where H::Out: HeapSizeOf {
fn root(&self, block: u64) -> Result<Option<H::Out>, String> {
Ok(self.data.read().roots.get(&block).cloned())
}
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String> {
Ok(HashDB::<H>::get(&self.data.read().mdb, key))
}
}
impl<'a, H: Hasher, S: 'a + Storage<H>> TrieBackendAdapter<'a, H, S> {
pub fn new(storage: &'a S) -> Self {
Self { storage, _hasher: Default::default() }
}
}
impl<'a, H: Hasher, S: 'a + Storage<H>> TrieBackendStorage<H> for TrieBackendAdapter<'a, H, S> {
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String> {
self.storage.get(key)
}
}
+154 -25
View File
@@ -18,10 +18,15 @@
use std::{error, fmt, cmp::Ord};
use backend::Backend;
use changes_trie::{Storage as ChangesTrieStorage, compute_changes_trie_root};
use {Externalities, OverlayedChanges};
use hashdb::Hasher;
use memorydb::MemoryDB;
use rlp::Encodable;
use patricia_trie::NodeCodec;
use patricia_trie::{NodeCodec, TrieDBMut, TrieMut};
use heapsize::HeapSizeOf;
const EXT_NOT_ALLOWED_TO_FAIL: &'static str = "Externalities not allowed to fail within runtime";
/// Errors that can occur when interacting with the externalities.
#[derive(Debug, Copy, Clone)]
@@ -53,64 +58,90 @@ impl<B: error::Error, E: error::Error> error::Error for Error<B, E> {
}
/// Wraps a read-only backend, call executor, and current overlayed changes.
pub struct Ext<'a, H, C, B>
pub struct Ext<'a, H, C, B, T>
where
H: Hasher,
C: NodeCodec<H>,
B: 'a + Backend<H, C>,
T: 'a + ChangesTrieStorage<H>,
{
// The overlayed changes to write to.
/// The overlayed changes to write to.
overlay: &'a mut OverlayedChanges,
// The storage backend to read from.
/// The storage backend to read from.
backend: &'a B,
// The transaction necessary to commit to the backend.
transaction: Option<(B::Transaction, H::Out)>,
/// The storage transaction necessary to commit to the backend. Is cached when
/// `storage_root` is called and the cache is cleared on every subsequent change.
storage_transaction: Option<(B::Transaction, H::Out)>,
/// Changes trie storage to read from.
changes_trie_storage: Option<&'a T>,
/// The changes trie transaction necessary to commit to the changes trie backend.
/// Set to Some when `storage_changes_root` is called. Could be replaced later
/// by calling `storage_changes_root` again => never used as cache.
/// This differs from `storage_transaction` behavior, because the moment when
/// `storage_changes_root` is called matters + we need to remember additional
/// data at this moment (block number).
changes_trie_transaction: Option<(u64, MemoryDB<H>, H::Out)>,
}
impl<'a, H, C, B> Ext<'a, H, C, B>
impl<'a, H, C, B, T> Ext<'a, H, C, B, T>
where
H: Hasher,
C: NodeCodec<H>,
B: 'a + Backend<H, C>,
H::Out: Ord + Encodable
T: 'a + ChangesTrieStorage<H>,
H::Out: Ord + Encodable + HeapSizeOf,
{
/// Create a new `Ext` from overlayed changes and read-only backend
pub fn new(overlay: &'a mut OverlayedChanges, backend: &'a B) -> Self {
pub fn new(overlay: &'a mut OverlayedChanges, backend: &'a B, changes_trie_storage: Option<&'a T>) -> Self {
Ext {
overlay,
backend,
transaction: None,
storage_transaction: None,
changes_trie_storage,
changes_trie_transaction: None,
}
}
/// Get the transaction necessary to update the backend.
pub fn transaction(mut self) -> B::Transaction {
pub fn transaction(mut self) -> (B::Transaction, Option<MemoryDB<H>>) {
let _ = self.storage_root();
self.transaction.expect("transaction always set after calling storage root; qed").0
let (storage_transaction, changes_trie_transaction) = (
self.storage_transaction
.expect("storage_transaction always set after calling storage root; qed"),
self.changes_trie_transaction
.map(|(_, tx, _)| tx),
);
(
storage_transaction.0,
changes_trie_transaction,
)
}
/// Invalidates the currently cached storage root and the db transaction.
///
/// Called when there are changes that likely will invalidate the storage root.
fn mark_dirty(&mut self) {
self.transaction = None;
self.storage_transaction = None;
}
}
#[cfg(test)]
impl<'a, H, C, B> Ext<'a, H, C, B>
impl<'a, H, C, B, T> Ext<'a, H, C, B, T>
where
H: Hasher,
C: NodeCodec<H>,
B: 'a + Backend<H,C>,
T: 'a + ChangesTrieStorage<H>,
{
pub fn storage_pairs(&self) -> Vec<(Vec<u8>, Vec<u8>)> {
use std::collections::HashMap;
self.backend.pairs().iter()
.map(|&(ref k, ref v)| (k.to_vec(), Some(v.to_vec())))
.chain(self.overlay.committed.clone().into_iter())
.chain(self.overlay.prospective.clone().into_iter())
.chain(self.overlay.committed.clone().into_iter().map(|(k, v)| (k, v.value)))
.chain(self.overlay.prospective.clone().into_iter().map(|(k, v)| (k, v.value)))
.collect::<HashMap<_, _>>()
.into_iter()
.filter_map(|(k, maybe_val)| maybe_val.map(|val| (k, val)))
@@ -118,22 +149,23 @@ where
}
}
impl<'a, B: 'a, H, C> Externalities<H> for Ext<'a, H, C, B>
impl<'a, B: 'a, T: 'a, H, C> Externalities<H> for Ext<'a, H, C, B, T>
where
H: Hasher,
C: NodeCodec<H>,
B: 'a + Backend<H, C>,
H::Out: Ord + Encodable
T: 'a + ChangesTrieStorage<H>,
H::Out: Ord + Encodable + HeapSizeOf,
{
fn storage(&self, key: &[u8]) -> Option<Vec<u8>> {
self.overlay.storage(key).map(|x| x.map(|x| x.to_vec())).unwrap_or_else(||
self.backend.storage(key).expect("Externalities not allowed to fail within runtime"))
self.backend.storage(key).expect(EXT_NOT_ALLOWED_TO_FAIL))
}
fn exists_storage(&self, key: &[u8]) -> bool {
match self.overlay.storage(key) {
Some(x) => x.is_some(),
_ => self.backend.exists_storage(key).expect("Externalities not allowed to fail within runtime"),
_ => self.backend.exists_storage(key).expect(EXT_NOT_ALLOWED_TO_FAIL),
}
}
@@ -155,17 +187,114 @@ where
}
fn storage_root(&mut self) -> H::Out {
if let Some((_, ref root)) = self.transaction {
if let Some((_, ref root)) = self.storage_transaction {
return root.clone();
}
// compute and memoize
let delta = self.overlay.committed.iter()
.chain(self.overlay.prospective.iter())
.map(|(k, v)| (k.clone(), v.clone()));
let delta = self.overlay.committed.iter().map(|(k, v)| (k.clone(), v.value.clone()))
.chain(self.overlay.prospective.iter().map(|(k, v)| (k.clone(), v.value.clone())));
let (root, transaction) = self.backend.storage_root(delta);
self.transaction = Some((transaction, root));
self.storage_transaction = Some((transaction, root));
root
}
fn storage_changes_root(&mut self, block: u64) -> Option<H::Out> {
let root_and_tx = compute_changes_trie_root::<_, T, H, C>(
self.backend,
self.changes_trie_storage.clone(),
self.overlay,
block,
);
let root_and_tx = root_and_tx.map(|(root, changes)| {
let mut calculated_root = Default::default();
let mut mdb = MemoryDB::new();
{
let mut trie = TrieDBMut::<H, C>::new(&mut mdb, &mut calculated_root);
for (key, value) in changes {
trie.insert(&key, &value).expect(EXT_NOT_ALLOWED_TO_FAIL);
}
}
(block, mdb, root)
});
let root = root_and_tx.as_ref().map(|(_, _, root)| root.clone());
self.changes_trie_transaction = root_and_tx;
root
}
}
#[cfg(test)]
mod tests {
use codec::Encode;
use primitives::{Blake2Hasher, RlpCodec};
use backend::InMemory;
use changes_trie::{Configuration as ChangesTrieConfiguration,
InMemoryStorage as InMemoryChangesTrieStorage};
use overlayed_changes::OverlayedValue;
use super::*;
type TestBackend = InMemory<Blake2Hasher, RlpCodec>;
type TestChangesTrieStorage = InMemoryChangesTrieStorage<Blake2Hasher>;
type TestExt<'a> = Ext<'a, Blake2Hasher, RlpCodec, TestBackend, TestChangesTrieStorage>;
fn prepare_overlay_with_changes() -> OverlayedChanges {
OverlayedChanges {
prospective: vec![
(b":extrinsic_index".to_vec(), OverlayedValue {
value: Some(3u32.encode()),
extrinsics: Some(vec![1].into_iter().collect())
}),
(vec![1], OverlayedValue {
value: Some(vec![100].into_iter().collect()),
extrinsics: Some(vec![1].into_iter().collect())
}),
].into_iter().collect(),
committed: Default::default(),
changes_trie_config: Some(ChangesTrieConfiguration {
digest_interval: 0,
digest_levels: 0,
}),
}
}
#[test]
fn storage_changes_root_is_none_when_storage_is_not_provided() {
let mut overlay = prepare_overlay_with_changes();
let backend = TestBackend::default();
let mut ext = TestExt::new(&mut overlay, &backend, None);
assert_eq!(ext.storage_changes_root(100), None);
}
#[test]
fn storage_changes_root_is_none_when_extrinsic_changes_are_none() {
let mut overlay = prepare_overlay_with_changes();
overlay.changes_trie_config = None;
let storage = TestChangesTrieStorage::new();
let backend = TestBackend::default();
let mut ext = TestExt::new(&mut overlay, &backend, Some(&storage));
assert_eq!(ext.storage_changes_root(100), None);
}
#[test]
fn storage_changes_root_is_some_when_extrinsic_changes_are_non_empty() {
let mut overlay = prepare_overlay_with_changes();
let storage = TestChangesTrieStorage::new();
let backend = TestBackend::default();
let mut ext = TestExt::new(&mut overlay, &backend, Some(&storage));
assert_eq!(ext.storage_changes_root(100),
Some(hex!("b2ecc5ca20de9f8a2d82482fcaa0fdfcca2fb76bf3d89860edf422bd15d075ec").into()));
}
#[test]
fn storage_changes_root_is_some_when_extrinsic_changes_are_empty() {
let mut overlay = prepare_overlay_with_changes();
overlay.prospective.get_mut(&vec![1]).unwrap().value = None;
let storage = TestChangesTrieStorage::new();
let backend = TestBackend::default();
let mut ext = TestExt::new(&mut overlay, &backend, Some(&storage));
assert_eq!(ext.storage_changes_root(100),
Some(hex!("8c12eccf80c166aefc23af540649979581cb404d95af25b0ed38dc6949ba2453").into()));
}
}
+124 -188
View File
@@ -30,15 +30,14 @@ extern crate hashdb;
extern crate memorydb;
extern crate triehash;
extern crate patricia_trie;
extern crate byteorder;
extern crate parking_lot;
extern crate rlp;
extern crate heapsize;
#[cfg(test)]
extern crate substrate_primitives as primitives;
extern crate parity_codec as codec;
use std::collections::HashMap;
use std::fmt;
use hashdb::Hasher;
use patricia_trie::NodeCodec;
@@ -47,99 +46,24 @@ use heapsize::HeapSizeOf;
use codec::Decode;
pub mod backend;
mod changes_trie;
mod ext;
mod testing;
mod overlayed_changes;
mod proving_backend;
mod trie_backend;
mod trie_backend_essence;
pub use patricia_trie::{TrieMut, TrieDBMut};
pub use testing::TestExternalities;
pub use ext::Ext;
pub use backend::Backend;
pub use trie_backend::{TryIntoTrieBackend, TrieBackend, Storage, DBValue};
/// The overlayed changes to state to be queried on top of the backend.
///
/// A transaction shares all prospective changes within an inner overlay
/// that can be cleared.
#[derive(Debug, Default, Clone)]
pub struct OverlayedChanges {
prospective: HashMap<Vec<u8>, Option<Vec<u8>>>,
committed: HashMap<Vec<u8>, Option<Vec<u8>>>,
}
impl OverlayedChanges {
/// Returns a double-Option: None if the key is unknown (i.e. and the query should be refered
/// to the backend); Some(None) if the key has been deleted. Some(Some(...)) for a key whose
/// value has been set.
pub fn storage(&self, key: &[u8]) -> Option<Option<&[u8]>> {
self.prospective.get(key)
.or_else(|| self.committed.get(key))
.map(|x| x.as_ref().map(AsRef::as_ref))
}
/// Inserts the given key-value pair into the prospective change set.
///
/// `None` can be used to delete a value specified by the given key.
fn set_storage(&mut self, key: Vec<u8>, val: Option<Vec<u8>>) {
self.prospective.insert(key, val);
}
/// Removes all key-value pairs which keys share the given prefix.
///
/// NOTE that this doesn't take place immediately but written into the prospective
/// change set, and still can be reverted by [`discard_prospective`].
///
/// [`discard_prospective`]: #method.discard_prospective
fn clear_prefix(&mut self, prefix: &[u8]) {
// Iterate over all prospective and mark all keys that share
// the given prefix as removed (None).
for (key, value) in self.prospective.iter_mut() {
if key.starts_with(prefix) {
*value = None;
}
}
// Then do the same with keys from commited changes.
// NOTE that we are making changes in the prospective change set.
for key in self.committed.keys() {
if key.starts_with(prefix) {
self.prospective.insert(key.to_owned(), None);
}
}
}
/// Discard prospective changes to state.
pub fn discard_prospective(&mut self) {
self.prospective.clear();
}
/// Commit prospective changes to state.
pub fn commit_prospective(&mut self) {
if self.committed.is_empty() {
::std::mem::swap(&mut self.prospective, &mut self.committed);
} else {
self.committed.extend(self.prospective.drain());
}
}
/// Drain committed changes to an iterator.
///
/// Panics:
/// Will panic if there are any uncommitted prospective changes.
pub fn drain<'a>(&'a mut self) -> impl Iterator<Item=(Vec<u8>, Option<Vec<u8>>)> + 'a {
assert!(self.prospective.is_empty());
self.committed.drain()
}
/// Consume `OverlayedChanges` and take committed set.
///
/// Panics:
/// Will panic if there are any uncommitted prospective changes.
pub fn into_committed(self) -> impl Iterator<Item=(Vec<u8>, Option<Vec<u8>>)> {
assert!(self.prospective.is_empty());
self.committed.into_iter()
}
}
pub use changes_trie::{Storage as ChangesTrieStorage,
InMemoryStorage as InMemoryChangesTrieStorage,
key_changes, key_changes_proof, key_changes_proof_check};
pub use overlayed_changes::OverlayedChanges;
pub use trie_backend_essence::Storage;
pub use trie_backend::{TrieBackend, DBValue};
/// State Machine Error bound.
///
@@ -155,6 +79,8 @@ impl Error for ExecutionError {}
/// and as a transition away from the pre-existing framework.
#[derive(Debug, Eq, PartialEq)]
pub enum ExecutionError {
/// Backend error.
Backend(String),
/// The entry `:code` doesn't exist in storage so there's no way we can execute anything.
CodeEntryDoesNotExist,
/// Backend is incompatible with execution proof generation process.
@@ -198,6 +124,9 @@ pub trait Externalities<H: Hasher> {
/// Get the trie root of the current storage map.
fn storage_root(&mut self) -> H::Out where H::Out: Ord + Encodable;
/// Get the change trie root of the current storage overlay at given block.
fn storage_changes_root(&mut self, block: u64) -> Option<H::Out> where H::Out: Ord + Encodable;
}
/// Code execution engine.
@@ -267,23 +196,26 @@ pub fn always_wasm<E>() -> ExecutionManager<fn(Result<Vec<u8>, E>, Result<Vec<u8
///
/// Note: changes to code will be in place if this call is made again. For running partial
/// blocks (e.g. a transaction at a time), ensure a different method is used.
pub fn execute<H, C, B, Exec>(
pub fn execute<H, C, B, T, Exec>(
backend: &B,
changes_trie_storage: Option<&T>,
overlay: &mut OverlayedChanges,
exec: &Exec,
method: &str,
call_data: &[u8],
strategy: ExecutionStrategy,
) -> Result<(Vec<u8>, B::Transaction), Box<Error>>
) -> Result<(Vec<u8>, B::Transaction, Option<memorydb::MemoryDB<H>>), Box<Error>>
where
H: Hasher,
C: NodeCodec<H>,
Exec: CodeExecutor<H>,
B: Backend<H, C>,
H::Out: Ord + Encodable
T: ChangesTrieStorage<H>,
H::Out: Ord + Encodable + HeapSizeOf,
{
execute_using_consensus_failure_handler(
backend,
changes_trie_storage,
overlay,
exec,
method,
@@ -307,38 +239,48 @@ where
///
/// Note: changes to code will be in place if this call is made again. For running partial
/// blocks (e.g. a transaction at a time), ensure a different method is used.
pub fn execute_using_consensus_failure_handler<H, C, B, Exec, Handler>(
pub fn execute_using_consensus_failure_handler<H, C, B, T, Exec, Handler>(
backend: &B,
changes_trie_storage: Option<&T>,
overlay: &mut OverlayedChanges,
exec: &Exec,
method: &str,
call_data: &[u8],
manager: ExecutionManager<Handler>,
) -> Result<(Vec<u8>, B::Transaction), Box<Error>>
) -> Result<(Vec<u8>, B::Transaction, Option<memorydb::MemoryDB<H>>), Box<Error>>
where
H: Hasher,
C: NodeCodec<H>,
Exec: CodeExecutor<H>,
B: Backend<H, C>,
H::Out: Ord + Encodable,
T: ChangesTrieStorage<H>,
H::Out: Ord + Encodable + HeapSizeOf,
Handler: FnOnce(Result<Vec<u8>, Exec::Error>, Result<Vec<u8>, Exec::Error>) -> Result<Vec<u8>, Exec::Error>
{
let strategy: ExecutionStrategy = (&manager).into();
// make a copy.
let code = ext::Ext::new(overlay, backend).storage(b":code")
let code = try_read_overlay_value(overlay, backend, b":code")?
.ok_or_else(|| Box::new(ExecutionError::CodeEntryDoesNotExist) as Box<Error>)?
.to_vec();
let heap_pages = ext::Ext::new(overlay, backend).storage(b":heappages")
let heap_pages = try_read_overlay_value(overlay, backend, b":heappages")?
.and_then(|v| u64::decode(&mut &v[..])).unwrap_or(8) as usize;
// read changes trie configuration. The reason why we're doing it here instead of the
// `OverlayedChanges` constructor is that we need proofs for this read as a part of
// proof-of-execution on light clients. And the proof is recorded by the backend which
// is created after OverlayedChanges
let changes_trie_config = try_read_overlay_value(overlay, backend, b":changes_trie")?;
set_changes_trie_config(overlay, changes_trie_config)?;
let result = {
let mut orig_prospective = overlay.prospective.clone();
let (result, was_native, delta) = {
let ((result, was_native), delta) = {
let mut externalities = ext::Ext::new(overlay, backend);
let (result, was_native, storage_delta, changes_delta) = {
let ((result, was_native), (storage_delta, changes_delta)) = {
let mut externalities = ext::Ext::new(overlay, backend, changes_trie_storage);
(
exec.call(
&mut externalities,
@@ -352,18 +294,18 @@ where
externalities.transaction()
)
};
(result, was_native, delta)
(result, was_native, storage_delta, changes_delta)
};
// run wasm separately if we did run native the first time and we're meant to run both
let (result, delta) = if let (true, ExecutionManager::Both(on_consensus_failure)) =
let (result, storage_delta, changes_delta) = if let (true, ExecutionManager::Both(on_consensus_failure)) =
(was_native, manager)
{
overlay.prospective = orig_prospective.clone();
let (wasm_result, wasm_delta) = {
let ((result, _), delta) = {
let mut externalities = ext::Ext::new(overlay, backend);
let (wasm_result, wasm_storage_delta, wasm_changes_delta) = {
let ((result, _), (storage_delta, changes_delta)) = {
let mut externalities = ext::Ext::new(overlay, backend, changes_trie_storage);
(
exec.call(
&mut externalities,
@@ -376,21 +318,21 @@ where
externalities.transaction()
)
};
(result, delta)
(result, storage_delta, changes_delta)
};
if (result.is_ok() && wasm_result.is_ok() && result.as_ref().unwrap() == wasm_result.as_ref().unwrap()/* && delta == wasm_delta*/)
|| (result.is_err() && wasm_result.is_err())
{
(result, delta)
(result, storage_delta, changes_delta)
} else {
// Consensus error.
(on_consensus_failure(wasm_result, result), wasm_delta)
(on_consensus_failure(wasm_result, result), wasm_storage_delta, wasm_changes_delta)
}
} else {
(result, delta)
(result, storage_delta, changes_delta)
};
result.map(move |out| (out, delta))
result.map(move |out| (out, storage_delta, changes_delta))
};
result.map_err(|e| Box::new(e) as _)
@@ -405,26 +347,34 @@ where
///
/// Note: changes to code will be in place if this call is made again. For running partial
/// blocks (e.g. a transaction at a time), ensure a different method is used.
pub fn prove_execution<H, C, B, Exec>(
pub fn prove_execution<B, H, C, Exec>(
backend: B,
overlay: &mut OverlayedChanges,
exec: &Exec,
method: &str,
call_data: &[u8],
) -> Result<(Vec<u8>, Vec<Vec<u8>>, <TrieBackend<H, C> as Backend<H, C>>::Transaction), Box<Error>>
) -> Result<(Vec<u8>, Vec<Vec<u8>>), Box<Error>>
where
B: Backend<H, C>,
H: Hasher,
Exec: CodeExecutor<H>,
C: NodeCodec<H>,
B: TryIntoTrieBackend<H, C>,
H::Out: Ord + Encodable + HeapSizeOf,
{
let trie_backend = backend.try_into_trie_backend()
.ok_or_else(|| Box::new(ExecutionError::UnableToGenerateProof) as Box<Error>)?;
let proving_backend = proving_backend::ProvingBackend::new(trie_backend);
let (result, transaction) = execute::<H, C, _, _>(&proving_backend, overlay, exec, method, call_data, ExecutionStrategy::NativeWhenPossible)?;
let (result, _, _) = execute::<H, C, _, changes_trie::InMemoryStorage<H>, _>(
&proving_backend,
None,
overlay,
exec,
method,
call_data,
ExecutionStrategy::NativeWhenPossible
)?;
let proof = proving_backend.extract_proof();
Ok((result, proof, transaction))
Ok((result, proof))
}
/// Check execution proof, generated by `prove_execution` call.
@@ -435,15 +385,16 @@ pub fn execution_proof_check<H, C, Exec>(
exec: &Exec,
method: &str,
call_data: &[u8],
) -> Result<(Vec<u8>, memorydb::MemoryDB<H>), Box<Error>>
) -> Result<Vec<u8>, Box<Error>>
where
H: Hasher,
C: NodeCodec<H>,
Exec: CodeExecutor<H>,
H::Out: Ord + Encodable + HeapSizeOf,
H: Hasher,
C: NodeCodec<H>,
Exec: CodeExecutor<H>,
H::Out: Ord + Encodable + HeapSizeOf,
{
let backend = proving_backend::create_proof_check_backend::<H, C>(root.into(), proof)?;
execute::<H, C, _, _>(&backend, overlay, exec, method, call_data, ExecutionStrategy::NativeWhenPossible)
execute::<H, C, _, changes_trie::InMemoryStorage<H>, _>(&backend, None, overlay, exec, method, call_data, ExecutionStrategy::NativeWhenPossible)
.map(|(result, _, _)| result)
}
/// Generate storage read proof.
@@ -452,14 +403,14 @@ pub fn prove_read<B, H, C>(
key: &[u8]
) -> Result<(Option<Vec<u8>>, Vec<Vec<u8>>), Box<Error>>
where
B: TryIntoTrieBackend<H, C>,
B: Backend<H, C>,
H: Hasher,
C: NodeCodec<H>,
H::Out: Ord + Encodable + HeapSizeOf
{
let trie_backend = backend.try_into_trie_backend()
.ok_or_else(|| Box::new(ExecutionError::UnableToGenerateProof) as Box<Error>)?;
let proving_backend = proving_backend::ProvingBackend::<H, C>::new(trie_backend);
let proving_backend = proving_backend::ProvingBackend::<_, H, C>::new(trie_backend);
let result = proving_backend.storage(key).map_err(|e| Box::new(e) as Box<Error>)?;
Ok((result, proving_backend.extract_proof()))
}
@@ -479,12 +430,46 @@ where
backend.storage(key).map_err(|e| Box::new(e) as Box<Error>)
}
/// Sets overlayed changes' changes trie configuration. Returns error if configuration
/// differs from previous OR config decode has failed.
pub(crate) fn set_changes_trie_config(overlay: &mut OverlayedChanges, config: Option<Vec<u8>>) -> Result<(), Box<Error>> {
let config = match config {
Some(v) => Some(changes_trie::Configuration::decode(&mut &v[..])
.ok_or_else(|| Box::new("Failed to decode changes trie configuration".to_owned()) as Box<Error>)?),
None => None,
};
if let Some(config) = config {
if !overlay.set_changes_trie_config(config) {
return Err(Box::new("Changes trie configuration change is not supported".to_owned()));
}
}
Ok(())
}
/// Reads storage value from overlay or from the backend.
fn try_read_overlay_value<H, C, B>(overlay: &OverlayedChanges, backend: &B, key: &[u8])
-> Result<Option<Vec<u8>>, Box<Error>>
where
H: Hasher,
C: NodeCodec<H>,
B: Backend<H, C>,
{
match overlay.storage(key).map(|x| x.map(|x| x.to_vec())) {
Some(value) => Ok(value),
None => backend.storage(key)
.map_err(|err| Box::new(ExecutionError::Backend(format!("{}", err))) as Box<Error>),
}
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use super::*;
use super::backend::InMemory;
use super::ext::Ext;
use primitives::{Blake2Hasher, RlpCodec, H256};
use super::changes_trie::InMemoryStorage as InMemoryChangesTrieStorage;
use primitives::{Blake2Hasher, RlpCodec};
struct DummyCodeExecutor {
native_available: bool,
@@ -515,69 +500,17 @@ mod tests {
impl Error for u8 {}
#[test]
fn overlayed_storage_works() {
let mut overlayed = OverlayedChanges::default();
let key = vec![42, 69, 169, 142];
assert!(overlayed.storage(&key).is_none());
overlayed.set_storage(key.clone(), Some(vec![1, 2, 3]));
assert_eq!(overlayed.storage(&key).unwrap(), Some(&[1, 2, 3][..]));
overlayed.commit_prospective();
assert_eq!(overlayed.storage(&key).unwrap(), Some(&[1, 2, 3][..]));
overlayed.set_storage(key.clone(), Some(vec![]));
assert_eq!(overlayed.storage(&key).unwrap(), Some(&[][..]));
overlayed.set_storage(key.clone(), None);
assert!(overlayed.storage(&key).unwrap().is_none());
overlayed.discard_prospective();
assert_eq!(overlayed.storage(&key).unwrap(), Some(&[1, 2, 3][..]));
overlayed.set_storage(key.clone(), None);
overlayed.commit_prospective();
assert!(overlayed.storage(&key).unwrap().is_none());
}
macro_rules! map {
($( $name:expr => $value:expr ),*) => (
vec![ $( ( $name, $value ) ),* ].into_iter().collect()
)
}
#[test]
fn overlayed_storage_root_works() {
let initial: HashMap<_, _> = map![
b"doe".to_vec() => b"reindeer".to_vec(),
b"dog".to_vec() => b"puppyXXX".to_vec(),
b"dogglesworth".to_vec() => b"catXXX".to_vec(),
b"doug".to_vec() => b"notadog".to_vec()
];
let backend = InMemory::<Blake2Hasher, RlpCodec>::from(initial);
let mut overlay = OverlayedChanges {
committed: map![
b"dog".to_vec() => Some(b"puppy".to_vec()),
b"dogglesworth".to_vec() => Some(b"catYYY".to_vec()),
b"doug".to_vec() => Some(vec![])
],
prospective: map![
b"dogglesworth".to_vec() => Some(b"cat".to_vec()),
b"doug".to_vec() => None
],
};
let mut ext = Ext::new(&mut overlay, &backend);
const ROOT: [u8; 32] = hex!("6ca394ff9b13d6690a51dea30b1b5c43108e52944d30b9095227c49bae03ff8b");
assert_eq!(ext.storage_root(), H256(ROOT));
}
#[test]
fn execute_works() {
assert_eq!(execute(
&trie_backend::tests::test_trie(),
Some(&InMemoryChangesTrieStorage::new()),
&mut Default::default(),
&DummyCodeExecutor {
native_available: true,
@@ -595,6 +528,7 @@ mod tests {
let mut consensus_failed = false;
assert!(execute_using_consensus_failure_handler(
&trie_backend::tests::test_trie(),
Some(&InMemoryChangesTrieStorage::new()),
&mut Default::default(),
&DummyCodeExecutor {
native_available: true,
@@ -623,11 +557,11 @@ mod tests {
// fetch execution proof from 'remote' full node
let remote_backend = trie_backend::tests::test_trie();
let remote_root = remote_backend.storage_root(::std::iter::empty()).0;
let (remote_result, remote_proof, _) = prove_execution(remote_backend,
let (remote_result, remote_proof) = prove_execution(remote_backend,
&mut Default::default(), &executor, "test", &[]).unwrap();
// check proof locally
let (local_result, _) = execution_proof_check::<Blake2Hasher, RlpCodec,_,>(remote_root, remote_proof,
let local_result = execution_proof_check::<Blake2Hasher, RlpCodec, _>(remote_root, remote_proof,
&mut Default::default(), &executor, "test", &[]).unwrap();
// check that both results are correct
@@ -646,17 +580,19 @@ mod tests {
let backend = InMemory::<Blake2Hasher, RlpCodec>::from(initial).try_into_trie_backend().unwrap();
let mut overlay = OverlayedChanges {
committed: map![
b"aba".to_vec() => Some(b"1312".to_vec()),
b"bab".to_vec() => Some(b"228".to_vec())
b"aba".to_vec() => Some(b"1312".to_vec()).into(),
b"bab".to_vec() => Some(b"228".to_vec()).into()
],
prospective: map![
b"abd".to_vec() => Some(b"69".to_vec()),
b"bbd".to_vec() => Some(b"42".to_vec())
b"abd".to_vec() => Some(b"69".to_vec()).into(),
b"bbd".to_vec() => Some(b"42".to_vec()).into()
],
..Default::default()
};
{
let mut ext = Ext::new(&mut overlay, &backend);
let changes_trie_storage = InMemoryChangesTrieStorage::new();
let mut ext = Ext::new(&mut overlay, &backend, Some(&changes_trie_storage));
ext.clear_prefix(b"ab");
}
overlay.commit_prospective();
@@ -664,13 +600,13 @@ mod tests {
assert_eq!(
overlay.committed,
map![
b"abb".to_vec() => None,
b"abc".to_vec() => None,
b"aba".to_vec() => None,
b"abd".to_vec() => None,
b"abc".to_vec() => None.into(),
b"abb".to_vec() => None.into(),
b"aba".to_vec() => None.into(),
b"abd".to_vec() => None.into(),
b"bab".to_vec() => Some(b"228".to_vec()),
b"bbd".to_vec() => Some(b"42".to_vec())
b"bab".to_vec() => Some(b"228".to_vec()).into(),
b"bbd".to_vec() => Some(b"42".to_vec()).into()
],
);
}
@@ -0,0 +1,371 @@
// Copyright 2017 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! The overlayed changes to state.
use std::collections::{HashMap, HashSet};
use codec::Decode;
use changes_trie::{NO_EXTRINSIC_INDEX, Configuration as ChangesTrieConfig};
/// The overlayed changes to state to be queried on top of the backend.
///
/// A transaction shares all prospective changes within an inner overlay
/// that can be cleared.
#[derive(Debug, Default, Clone)]
pub struct OverlayedChanges {
/// Changes that are not yet committed.
pub(crate) prospective: HashMap<Vec<u8>, OverlayedValue>,
/// Committed changes.
pub(crate) committed: HashMap<Vec<u8>, OverlayedValue>,
/// Changes trie configuration. None by default, but could be installed by the
/// runtime if it supports change tries.
pub(crate) changes_trie_config: Option<ChangesTrieConfig>,
}
/// The storage value, used inside OverlayedChanges.
#[derive(Debug, Default, Clone)]
#[cfg_attr(test, derive(PartialEq))]
pub struct OverlayedValue {
/// Current value. None if value has been deleted.
pub value: Option<Vec<u8>>,
/// The set of extinsic indices where the values has been changed.
/// Is filled only if runtime ahs announced changes trie support.
pub extrinsics: Option<HashSet<u32>>,
}
impl OverlayedChanges {
/// Sets the changes trie configuration.
///
/// Returns false if configuration has been set already and we now trying
/// to install different configuration. This isn't supported now.
#[must_use = "Result must be checked"]
pub(crate) fn set_changes_trie_config(&mut self, config: ChangesTrieConfig) -> bool {
if let Some(ref old_config) = self.changes_trie_config {
// we do not support changes trie configuration' change now
if *old_config != config {
return false;
}
}
self.changes_trie_config = Some(config);
true
}
/// Returns a double-Option: None if the key is unknown (i.e. and the query should be refered
/// to the backend); Some(None) if the key has been deleted. Some(Some(...)) for a key whose
/// value has been set.
pub fn storage(&self, key: &[u8]) -> Option<Option<&[u8]>> {
self.prospective.get(key)
.or_else(|| self.committed.get(key))
.map(|x| x.value.as_ref().map(AsRef::as_ref))
}
/// Inserts the given key-value pair into the prospective change set.
///
/// `None` can be used to delete a value specified by the given key.
pub(crate) fn set_storage(&mut self, key: Vec<u8>, val: Option<Vec<u8>>) {
let extrinsic_index = self.extrinsic_index();
let entry = self.prospective.entry(key).or_default();
entry.value = val;
if let Some(extrinsic) = extrinsic_index {
entry.extrinsics.get_or_insert_with(Default::default)
.insert(extrinsic);
}
}
/// Removes all key-value pairs which keys share the given prefix.
///
/// NOTE that this doesn't take place immediately but written into the prospective
/// change set, and still can be reverted by [`discard_prospective`].
///
/// [`discard_prospective`]: #method.discard_prospective
pub(crate) fn clear_prefix(&mut self, prefix: &[u8]) {
let extrinsic_index = self.extrinsic_index();
// Iterate over all prospective and mark all keys that share
// the given prefix as removed (None).
for (key, entry) in self.prospective.iter_mut() {
if key.starts_with(prefix) {
entry.value = None;
if let Some(extrinsic) = extrinsic_index {
entry.extrinsics.get_or_insert_with(Default::default)
.insert(extrinsic);
}
}
}
// Then do the same with keys from commited changes.
// NOTE that we are making changes in the prospective change set.
for key in self.committed.keys() {
if key.starts_with(prefix) {
let entry = self.prospective.entry(key.clone()).or_default();
entry.value = None;
if let Some(extrinsic) = extrinsic_index {
entry.extrinsics.get_or_insert_with(Default::default)
.insert(extrinsic);
}
}
}
}
/// Discard prospective changes to state.
pub fn discard_prospective(&mut self) {
self.prospective.clear();
}
/// Commit prospective changes to state.
pub fn commit_prospective(&mut self) {
if self.committed.is_empty() {
::std::mem::swap(&mut self.prospective, &mut self.committed);
} else {
for (key, val) in self.prospective.drain() {
let entry = self.committed.entry(key).or_default();
entry.value = val.value;
if let Some(prospective_extrinsics) = val.extrinsics {
entry.extrinsics.get_or_insert_with(Default::default)
.extend(prospective_extrinsics);
}
}
}
}
/// Drain committed changes to an iterator.
///
/// Panics:
/// Will panic if there are any uncommitted prospective changes.
pub fn drain<'a>(&'a mut self) -> impl Iterator<Item=(Vec<u8>, OverlayedValue)> + 'a {
assert!(self.prospective.is_empty());
self.committed.drain()
}
/// Consume `OverlayedChanges` and take committed set.
///
/// Panics:
/// Will panic if there are any uncommitted prospective changes.
pub fn into_committed(self) -> impl Iterator<Item=(Vec<u8>, Option<Vec<u8>>)> {
assert!(self.prospective.is_empty());
self.committed.into_iter().map(|(k, v)| (k, v.value))
}
/// Inserts storage entry responsible for current extrinsic index.
#[cfg(test)]
pub(crate) fn set_extrinsic_index(&mut self, extrinsic_index: u32) {
use codec::Encode;
self.prospective.insert(b":extrinsic_index".to_vec(), OverlayedValue {
value: Some(extrinsic_index.encode()),
extrinsics: None,
});
}
/// Returns current extrinsic index to use in changes trie construction.
/// None is returned if it is not set or changes trie config is not set.
/// Persistent value (from the backend) can be ignored because runtime must
/// set this index before first and unset after last extrinsic is executied.
/// Changes that are made outside of extrinsics, are marked with
/// `NO_EXTRINSIC_INDEX` index.
fn extrinsic_index(&self) -> Option<u32> {
match self.changes_trie_config.is_some() {
true => Some(
self.storage(b":extrinsic_index")
.and_then(|idx| idx.and_then(|idx| Decode::decode(&mut &*idx)))
.unwrap_or(NO_EXTRINSIC_INDEX)),
false => None,
}
}
}
#[cfg(test)]
impl From<Option<Vec<u8>>> for OverlayedValue {
fn from(value: Option<Vec<u8>>) -> OverlayedValue {
OverlayedValue { value, ..Default::default() }
}
}
#[cfg(test)]
mod tests {
use primitives::{Blake2Hasher, RlpCodec, H256};
use backend::InMemory;
use changes_trie::InMemoryStorage as InMemoryChangesTrieStorage;
use ext::Ext;
use {Externalities};
use super::*;
fn strip_extrinsic_index(map: &HashMap<Vec<u8>, OverlayedValue>) -> HashMap<Vec<u8>, OverlayedValue> {
let mut clone = map.clone();
clone.remove(&b":extrinsic_index".to_vec());
clone
}
#[test]
fn overlayed_storage_works() {
let mut overlayed = OverlayedChanges::default();
let key = vec![42, 69, 169, 142];
assert!(overlayed.storage(&key).is_none());
overlayed.set_storage(key.clone(), Some(vec![1, 2, 3]));
assert_eq!(overlayed.storage(&key).unwrap(), Some(&[1, 2, 3][..]));
overlayed.commit_prospective();
assert_eq!(overlayed.storage(&key).unwrap(), Some(&[1, 2, 3][..]));
overlayed.set_storage(key.clone(), Some(vec![]));
assert_eq!(overlayed.storage(&key).unwrap(), Some(&[][..]));
overlayed.set_storage(key.clone(), None);
assert!(overlayed.storage(&key).unwrap().is_none());
overlayed.discard_prospective();
assert_eq!(overlayed.storage(&key).unwrap(), Some(&[1, 2, 3][..]));
overlayed.set_storage(key.clone(), None);
overlayed.commit_prospective();
assert!(overlayed.storage(&key).unwrap().is_none());
}
#[test]
fn overlayed_storage_root_works() {
let initial: HashMap<_, _> = vec![
(b"doe".to_vec(), b"reindeer".to_vec()),
(b"dog".to_vec(), b"puppyXXX".to_vec()),
(b"dogglesworth".to_vec(), b"catXXX".to_vec()),
(b"doug".to_vec(), b"notadog".to_vec()),
].into_iter().collect();
let backend = InMemory::<Blake2Hasher, RlpCodec>::from(initial);
let mut overlay = OverlayedChanges {
committed: vec![
(b"dog".to_vec(), Some(b"puppy".to_vec()).into()),
(b"dogglesworth".to_vec(), Some(b"catYYY".to_vec()).into()),
(b"doug".to_vec(), Some(vec![]).into()),
].into_iter().collect(),
prospective: vec![
(b"dogglesworth".to_vec(), Some(b"cat".to_vec()).into()),
(b"doug".to_vec(), None.into()),
].into_iter().collect(),
..Default::default()
};
let changes_trie_storage = InMemoryChangesTrieStorage::new();
let mut ext = Ext::new(&mut overlay, &backend, Some(&changes_trie_storage));
const ROOT: [u8; 32] = hex!("6ca394ff9b13d6690a51dea30b1b5c43108e52944d30b9095227c49bae03ff8b");
assert_eq!(ext.storage_root(), H256(ROOT));
}
#[test]
fn changes_trie_configuration_is_saved() {
let mut overlay = OverlayedChanges::default();
assert!(overlay.changes_trie_config.is_none());
assert_eq!(overlay.set_changes_trie_config(ChangesTrieConfig {
digest_interval: 4, digest_levels: 1,
}), true);
assert!(overlay.changes_trie_config.is_some());
}
#[test]
fn changes_trie_configuration_is_saved_twice() {
let mut overlay = OverlayedChanges::default();
assert!(overlay.changes_trie_config.is_none());
assert_eq!(overlay.set_changes_trie_config(ChangesTrieConfig {
digest_interval: 4, digest_levels: 1,
}), true);
overlay.set_extrinsic_index(0);
overlay.set_storage(vec![1], Some(vec![2]));
assert_eq!(overlay.set_changes_trie_config(ChangesTrieConfig {
digest_interval: 4, digest_levels: 1,
}), true);
assert_eq!(
strip_extrinsic_index(&overlay.prospective),
vec![
(vec![1], OverlayedValue { value: Some(vec![2]), extrinsics: Some(vec![0].into_iter().collect()) }),
].into_iter().collect(),
);
}
#[test]
fn panics_when_trying_to_save_different_changes_trie_configuration() {
let mut overlay = OverlayedChanges::default();
assert_eq!(overlay.set_changes_trie_config(ChangesTrieConfig {
digest_interval: 4, digest_levels: 1,
}), true);
assert_eq!(overlay.set_changes_trie_config(ChangesTrieConfig {
digest_interval: 2, digest_levels: 1,
}), false);
}
#[test]
fn extrinsic_changes_are_collected() {
let mut overlay = OverlayedChanges::default();
let _ = overlay.set_changes_trie_config(ChangesTrieConfig {
digest_interval: 4, digest_levels: 1,
});
overlay.set_storage(vec![100], Some(vec![101]));
overlay.set_extrinsic_index(0);
overlay.set_storage(vec![1], Some(vec![2]));
overlay.set_extrinsic_index(1);
overlay.set_storage(vec![3], Some(vec![4]));
overlay.set_extrinsic_index(2);
overlay.set_storage(vec![1], Some(vec![6]));
assert_eq!(strip_extrinsic_index(&overlay.prospective),
vec![
(vec![1], OverlayedValue { value: Some(vec![6]), extrinsics: Some(vec![0, 2].into_iter().collect()) }),
(vec![3], OverlayedValue { value: Some(vec![4]), extrinsics: Some(vec![1].into_iter().collect()) }),
(vec![100], OverlayedValue { value: Some(vec![101]), extrinsics: Some(vec![NO_EXTRINSIC_INDEX].into_iter().collect()) }),
].into_iter().collect());
overlay.commit_prospective();
overlay.set_extrinsic_index(3);
overlay.set_storage(vec![3], Some(vec![7]));
overlay.set_extrinsic_index(4);
overlay.set_storage(vec![1], Some(vec![8]));
assert_eq!(strip_extrinsic_index(&overlay.committed),
vec![
(vec![1], OverlayedValue { value: Some(vec![6]), extrinsics: Some(vec![0, 2].into_iter().collect()) }),
(vec![3], OverlayedValue { value: Some(vec![4]), extrinsics: Some(vec![1].into_iter().collect()) }),
(vec![100], OverlayedValue { value: Some(vec![101]), extrinsics: Some(vec![NO_EXTRINSIC_INDEX].into_iter().collect()) }),
].into_iter().collect());
assert_eq!(strip_extrinsic_index(&overlay.prospective),
vec![
(vec![1], OverlayedValue { value: Some(vec![8]), extrinsics: Some(vec![4].into_iter().collect()) }),
(vec![3], OverlayedValue { value: Some(vec![7]), extrinsics: Some(vec![3].into_iter().collect()) }),
].into_iter().collect());
overlay.commit_prospective();
assert_eq!(strip_extrinsic_index(&overlay.committed),
vec![
(vec![1], OverlayedValue { value: Some(vec![8]), extrinsics: Some(vec![0, 2, 4].into_iter().collect()) }),
(vec![3], OverlayedValue { value: Some(vec![7]), extrinsics: Some(vec![1, 3].into_iter().collect()) }),
(vec![100], OverlayedValue { value: Some(vec![101]), extrinsics: Some(vec![NO_EXTRINSIC_INDEX].into_iter().collect()) }),
].into_iter().collect());
assert_eq!(overlay.prospective,
Default::default());
}
}
@@ -18,23 +18,54 @@
use std::cell::RefCell;
use hashdb::{Hasher, HashDB};
use heapsize::HeapSizeOf;
use memorydb::MemoryDB;
use patricia_trie::{TrieDB, Trie, Recorder, NodeCodec};
use trie_backend::{TrieBackend, Ephemeral};
use {Error, ExecutionError, Backend, TryIntoTrieBackend};
use rlp::Encodable;
use heapsize::HeapSizeOf;
use trie_backend::TrieBackend;
use trie_backend_essence::{Ephemeral, TrieBackendEssence, TrieBackendStorage};
use {Error, ExecutionError, Backend};
/// Patricia trie-based backend essence which also tracks all touched storage trie values.
/// These can be sent to remote node and used as a proof of execution.
pub struct ProvingBackendEssence<'a, S: 'a + TrieBackendStorage<H>, H: 'a + Hasher, C: 'a + NodeCodec<H>> {
pub(crate) backend: &'a TrieBackendEssence<S, H, C>,
pub(crate) proof_recorder: &'a mut Recorder<H::Out>,
}
impl<'a, S, H, C> ProvingBackendEssence<'a, S, H, C>
where
S: TrieBackendStorage<H>,
H: Hasher,
H::Out: HeapSizeOf,
C: NodeCodec<H>,
{
pub fn storage(&mut self, key: &[u8]) -> Result<Option<Vec<u8>>, String> {
let mut read_overlay = MemoryDB::default();
let eph = Ephemeral::new(
self.backend.backend_storage(),
&mut read_overlay,
);
let map_e = |e| format!("Trie lookup error: {}", e);
TrieDB::<H, C>::new(&eph, self.backend.root()).map_err(map_e)?
.get_with(key, &mut *self.proof_recorder)
.map(|x| x.map(|val| val.to_vec()))
.map_err(map_e)
}
}
/// Patricia trie-based backend which also tracks all touched storage trie values.
/// These can be sent to remote node and used as a proof of execution.
pub struct ProvingBackend<H: Hasher, C: NodeCodec<H>> {
backend: TrieBackend<H, C>,
pub struct ProvingBackend<S: TrieBackendStorage<H>, H: Hasher, C: NodeCodec<H>> {
backend: TrieBackend<S, H, C>,
proof_recorder: RefCell<Recorder<H::Out>>,
}
impl<H: Hasher, C: NodeCodec<H>> ProvingBackend<H, C> {
impl<S: TrieBackendStorage<H>, H: Hasher, C: NodeCodec<H>> ProvingBackend<S, H, C> {
/// Create new proving backend.
pub fn new(backend: TrieBackend<H, C>) -> Self {
pub fn new(backend: TrieBackend<S, H, C>) -> Self {
ProvingBackend {
backend,
proof_recorder: RefCell::new(Recorder::new()),
@@ -51,27 +82,23 @@ impl<H: Hasher, C: NodeCodec<H>> ProvingBackend<H, C> {
}
}
impl<H, C> Backend<H, C> for ProvingBackend<H, C>
where
H: Hasher,
C: NodeCodec<H>,
H::Out: Ord + Encodable + HeapSizeOf
impl<S, H, C> Backend<H, C> for ProvingBackend<S, H, C>
where
S: TrieBackendStorage<H>,
H: Hasher,
C: NodeCodec<H>,
H::Out: Ord + Encodable + HeapSizeOf,
{
type Error = String;
type Transaction = MemoryDB<H>;
type TrieBackendStorage = MemoryDB<H>;
fn storage(&self, key: &[u8]) -> Result<Option<Vec<u8>>, Self::Error> {
let mut read_overlay = MemoryDB::new();
let eph = Ephemeral::new(
self.backend.backend_storage(),
&mut read_overlay,
);
let map_e = |e| format!("Trie lookup error: {}", e);
let mut proof_recorder = self.proof_recorder.try_borrow_mut()
.expect("only fails when already borrowed; storage() is non-reentrant; qed");
TrieDB::<H, C>::new(&eph, &self.backend.root()).map_err(map_e)?
.get_with(key, &mut *proof_recorder).map(|x| x.map(|val| val.to_vec())).map_err(map_e)
ProvingBackendEssence {
backend: self.backend.essence(),
proof_recorder: &mut *self.proof_recorder.try_borrow_mut()
.expect("only fails when already borrowed; storage() is non-reentrant; qed"),
}.storage(key)
}
fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], f: F) {
@@ -87,10 +114,8 @@ where
{
self.backend.storage_root(delta)
}
}
impl<H: Hasher, C: NodeCodec<H>> TryIntoTrieBackend<H, C> for ProvingBackend<H, C> {
fn try_into_trie_backend(self) -> Option<TrieBackend<H, C>> {
fn try_into_trie_backend(self) -> Option<TrieBackend<Self::TrieBackendStorage, H, C>> {
None
}
}
@@ -99,7 +124,7 @@ impl<H: Hasher, C: NodeCodec<H>> TryIntoTrieBackend<H, C> for ProvingBackend<H,
pub fn create_proof_check_backend<H, C>(
root: H::Out,
proof: Vec<Vec<u8>>
) -> Result<TrieBackend<H, C>, Box<Error>>
) -> Result<TrieBackend<MemoryDB<H>, H, C>, Box<Error>>
where
H: Hasher,
C: NodeCodec<H>,
@@ -114,8 +139,7 @@ where
return Err(Box::new(ExecutionError::InvalidProof) as Box<Error>);
}
Ok(TrieBackend::with_memorydb(db, root))
Ok(TrieBackend::new(db, root))
}
#[cfg(test)]
@@ -125,7 +149,7 @@ mod tests {
use super::*;
use primitives::{Blake2Hasher, RlpCodec};
fn test_proving() -> ProvingBackend<Blake2Hasher, RlpCodec> {
fn test_proving() -> ProvingBackend<MemoryDB<Blake2Hasher>, Blake2Hasher, RlpCodec> {
ProvingBackend::new(test_trie())
}
+63 -29
View File
@@ -17,41 +17,62 @@
//! Test implementation for Externalities.
use std::collections::HashMap;
use std::cmp::Ord;
use super::Externalities;
use triehash::trie_root;
use hashdb::Hasher;
use rlp::Encodable;
use std::marker::PhantomData;
use std::iter::FromIterator;
use hashdb::Hasher;
use heapsize::HeapSizeOf;
use patricia_trie::NodeCodec;
use rlp::Encodable;
use triehash::trie_root;
use backend::InMemory;
use changes_trie::{compute_changes_trie_root, InMemoryStorage as ChangesTrieInMemoryStorage};
use super::{Externalities, OverlayedChanges};
/// Simple HashMap-based Externalities impl.
#[derive(Debug)]
pub struct TestExternalities<H> {
pub struct TestExternalities<H: Hasher, C: NodeCodec<H>> where H::Out: HeapSizeOf {
inner: HashMap<Vec<u8>, Vec<u8>>,
_hasher: PhantomData<H>,
changes_trie_storage: ChangesTrieInMemoryStorage<H>,
changes: OverlayedChanges,
_codec: ::std::marker::PhantomData<C>,
}
impl<H: Hasher> TestExternalities<H> {
impl<H: Hasher, C: NodeCodec<H>> TestExternalities<H, C> where H::Out: HeapSizeOf {
/// Create a new instance of `TestExternalities`
pub fn new() -> Self {
TestExternalities {inner: HashMap::new(), _hasher: PhantomData}
pub fn new(inner: HashMap<Vec<u8>, Vec<u8>>) -> Self {
let mut overlay = OverlayedChanges::default();
super::set_changes_trie_config(
&mut overlay,
inner.get(&b":changes_trie".to_vec()).cloned())
.expect("changes trie configuration is correct in test env; qed");
TestExternalities {
inner,
changes_trie_storage: ChangesTrieInMemoryStorage::new(),
changes: overlay,
_codec: Default::default(),
}
}
/// Insert key/value
pub fn insert(&mut self, k: Vec<u8>, v: Vec<u8>) -> Option<Vec<u8>> {
self.inner.insert(k, v)
}
}
impl<H: Hasher> PartialEq for TestExternalities<H> {
fn eq(&self, other: &TestExternalities<H>) -> bool {
impl<H: Hasher, C: NodeCodec<H>> ::std::fmt::Debug for TestExternalities<H, C> where H::Out: HeapSizeOf {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "{:?}", self.inner)
}
}
impl<H: Hasher, C: NodeCodec<H>> PartialEq for TestExternalities<H, C> where H::Out: HeapSizeOf {
fn eq(&self, other: &TestExternalities<H, C>) -> bool {
self.inner.eq(&other.inner)
}
}
impl<H: Hasher> FromIterator<(Vec<u8>, Vec<u8>)> for TestExternalities<H> {
impl<H: Hasher, C: NodeCodec<H>> FromIterator<(Vec<u8>, Vec<u8>)> for TestExternalities<H, C> where H::Out: HeapSizeOf {
fn from_iter<I: IntoIterator<Item=(Vec<u8>, Vec<u8>)>>(iter: I) -> Self {
let mut t = Self::new();
let mut t = Self::new(Default::default());
for i in iter {
t.inner.insert(i.0, i.1);
}
@@ -59,29 +80,34 @@ impl<H: Hasher> FromIterator<(Vec<u8>, Vec<u8>)> for TestExternalities<H> {
}
}
impl<H: Hasher> Default for TestExternalities<H> {
fn default() -> Self { Self::new() }
impl<H: Hasher, C: NodeCodec<H>> Default for TestExternalities<H, C> where H::Out: HeapSizeOf {
fn default() -> Self { Self::new(Default::default()) }
}
impl<H: Hasher> From<TestExternalities<H>> for HashMap<Vec<u8>, Vec<u8>> {
fn from(tex: TestExternalities<H>) -> Self {
impl<H: Hasher, C: NodeCodec<H>> From<TestExternalities<H, C>> for HashMap<Vec<u8>, Vec<u8>> where H::Out: HeapSizeOf {
fn from(tex: TestExternalities<H, C>) -> Self {
tex.inner.into()
}
}
impl<H: Hasher> From< HashMap<Vec<u8>, Vec<u8>> > for TestExternalities<H> {
impl<H: Hasher, C: NodeCodec<H>> From< HashMap<Vec<u8>, Vec<u8>> > for TestExternalities<H, C> where H::Out: HeapSizeOf {
fn from(hashmap: HashMap<Vec<u8>, Vec<u8>>) -> Self {
TestExternalities { inner: hashmap, _hasher: PhantomData }
TestExternalities {
inner: hashmap,
changes_trie_storage: ChangesTrieInMemoryStorage::new(),
changes: Default::default(),
_codec: ::std::marker::PhantomData::<C>::default(),
}
}
}
impl<H: Hasher> Externalities<H> for TestExternalities<H> where H::Out: Ord + Encodable {
impl<H: Hasher, C: NodeCodec<H>> Externalities<H> for TestExternalities<H, C> where H::Out: Ord + Encodable + HeapSizeOf {
fn storage(&self, key: &[u8]) -> Option<Vec<u8>> {
self.inner.get(key).map(|x| x.to_vec())
}
fn place_storage(&mut self, key: Vec<u8>, maybe_value: Option<Vec<u8>>) {
self.changes.set_storage(key.clone(), maybe_value.clone());
match maybe_value {
Some(value) => { self.inner.insert(key, value); }
None => { self.inner.remove(&key); }
@@ -89,9 +115,8 @@ impl<H: Hasher> Externalities<H> for TestExternalities<H> where H::Out: Ord + En
}
fn clear_prefix(&mut self, prefix: &[u8]) {
self.inner.retain(|key, _|
!key.starts_with(prefix)
)
self.changes.clear_prefix(prefix);
self.inner.retain(|key, _| !key.starts_with(prefix));
}
fn chain_id(&self) -> u64 { 42 }
@@ -99,16 +124,25 @@ impl<H: Hasher> Externalities<H> for TestExternalities<H> where H::Out: Ord + En
fn storage_root(&mut self) -> H::Out {
trie_root::<H, _, _, _>(self.inner.clone())
}
fn storage_changes_root(&mut self, block: u64) -> Option<H::Out> {
compute_changes_trie_root::<_, _, H, C>(
&InMemory::default(),
Some(&self.changes_trie_storage),
&self.changes,
block,
).map(|(root, _)| root.clone())
}
}
#[cfg(test)]
mod tests {
use super::*;
use primitives::{Blake2Hasher, H256};
use primitives::{Blake2Hasher, RlpCodec, H256};
#[test]
fn commit_should_work() {
let mut ext = TestExternalities::<Blake2Hasher>::new();
let mut ext = TestExternalities::<Blake2Hasher, RlpCodec>::default();
ext.set_storage(b"doe".to_vec(), b"reindeer".to_vec());
ext.set_storage(b"dog".to_vec(), b"puppy".to_vec());
ext.set_storage(b"dogglesworth".to_vec(), b"cat".to_vec());
+41 -189
View File
@@ -15,135 +15,70 @@
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Trie-based state machine backend.
use Backend;
use hashdb::{Hasher, HashDB, AsHashDB};
use memorydb::MemoryDB;
use patricia_trie::{TrieDB, TrieDBMut, TrieError, Trie, TrieMut, NodeCodec};
use std::collections::HashMap;
use std::sync::Arc;
use std::marker::PhantomData;
use hashdb::Hasher;
use heapsize::HeapSizeOf;
use memorydb::MemoryDB;
use rlp::Encodable;
use patricia_trie::{TrieDB, TrieDBMut, TrieError, Trie, TrieMut, NodeCodec};
use trie_backend_essence::{TrieBackendEssence, TrieBackendStorage, Ephemeral};
use {Backend};
pub use hashdb::DBValue;
/// Backend trie storage trait.
pub trait Storage<H: Hasher>: Send + Sync {
/// Get a trie node.
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String>;
}
/// Try convert into trie-based backend.
pub trait TryIntoTrieBackend<H: Hasher, C: NodeCodec<H>> {
/// Try to convert self into trie backend.
fn try_into_trie_backend(self) -> Option<TrieBackend<H, C>>;
}
/// Patricia trie-based backend. Transaction type is an overlay of changes to commit.
#[derive(Clone)]
pub struct TrieBackend<H: Hasher, C: NodeCodec<H>> {
storage: TrieBackendStorage<H>,
root: H::Out,
_codec: PhantomData<C>
pub struct TrieBackend<S: TrieBackendStorage<H>, H: Hasher, C: NodeCodec<H>> {
essence: TrieBackendEssence<S, H, C>,
}
impl<H: Hasher, C: NodeCodec<H>> TrieBackend<H, C> where H::Out: HeapSizeOf {
impl<S: TrieBackendStorage<H>, H: Hasher, C: NodeCodec<H>> TrieBackend<S, H, C> where H::Out: HeapSizeOf {
/// Create new trie-based backend.
pub fn with_storage(db: Arc<Storage<H>>, root: H::Out) -> Self {
pub fn new(storage: S, root: H::Out) -> Self {
TrieBackend {
storage: TrieBackendStorage::Storage(db),
root,
_codec: PhantomData,
essence: TrieBackendEssence::new(storage, root),
}
}
/// Create new trie-based backend for genesis block.
pub fn with_storage_for_genesis(db: Arc<Storage<H>>) -> Self {
let mut root = <H as Hasher>::Out::default();
let mut mdb = MemoryDB::<H>::new();
TrieDBMut::<H, C>::new(&mut mdb, &mut root);
Self::with_storage(db, root)
}
/// Create new trie-based backend backed by MemoryDb storage.
pub fn with_memorydb(db: MemoryDB<H>, root: H::Out) -> Self {
// TODO: check that root is a part of db???
TrieBackend {
storage: TrieBackendStorage::MemoryDb(db),
root,
_codec: PhantomData,
}
/// Get backend essence reference.
pub fn essence(&self) -> &TrieBackendEssence<S, H, C> {
&self.essence
}
/// Get backend storage reference.
pub fn backend_storage(&self) -> &TrieBackendStorage<H> {
&self.storage
pub fn backend_storage(&self) -> &S {
self.essence.backend_storage()
}
/// Get trie root.
pub fn root(&self) -> &H::Out {
&self.root
self.essence.root()
}
}
impl super::Error for String {}
impl<H: Hasher, C: NodeCodec<H>> Backend<H, C> for TrieBackend<H, C> where H::Out: HeapSizeOf {
impl<S: TrieBackendStorage<H>, H: Hasher, C: NodeCodec<H>> Backend<H, C> for TrieBackend<S, H, C>
where
H::Out: Ord + Encodable + HeapSizeOf,
{
type Error = String;
type Transaction = MemoryDB<H>;
type TrieBackendStorage = S;
fn storage(&self, key: &[u8]) -> Result<Option<Vec<u8>>, Self::Error> {
let mut read_overlay = MemoryDB::new();
let eph = Ephemeral {
storage: &self.storage,
overlay: &mut read_overlay,
};
let map_e = |e| format!("Trie lookup error: {}", e);
TrieDB::<H, C>::new(&eph, &self.root).map_err(map_e)?
.get(key).map(|x| x.map(|val| val.to_vec())).map_err(map_e)
self.essence.storage(key)
}
fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], mut f: F) {
let mut read_overlay = MemoryDB::new();
let eph = Ephemeral {
storage: &self.storage,
overlay: &mut read_overlay,
};
let mut iter = move || -> Result<(), Box<TrieError<H::Out, C::Error>>> {
let trie = TrieDB::<H, C>::new(&eph, &self.root)?;
let mut iter = trie.iter()?;
iter.seek(prefix)?;
for x in iter {
let (key, _) = x?;
if !key.starts_with(prefix) {
break;
}
f(&key);
}
Ok(())
};
if let Err(e) = iter() {
debug!(target: "trie", "Error while iterating by prefix: {}", e);
}
fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], f: F) {
self.essence.for_keys_with_prefix(prefix, f)
}
fn pairs(&self) -> Vec<(Vec<u8>, Vec<u8>)> {
let mut read_overlay = MemoryDB::new();
let eph = Ephemeral {
storage: &self.storage,
overlay: &mut read_overlay,
};
let eph = Ephemeral::new(self.essence.backend_storage(), &mut read_overlay);
let collect_all = || -> Result<_, Box<TrieError<H::Out, C::Error>>> {
let trie = TrieDB::<H, C>::new(&eph, &self.root)?;
let trie = TrieDB::<H, C>::new(&eph, self.essence.root())?;
let mut v = Vec::new();
for x in trie.iter()? {
let (key, value) = x?;
@@ -165,13 +100,13 @@ impl<H: Hasher, C: NodeCodec<H>> Backend<H, C> for TrieBackend<H, C> where H::Ou
fn storage_root<I>(&self, delta: I) -> (H::Out, MemoryDB<H>)
where I: IntoIterator<Item=(Vec<u8>, Option<Vec<u8>>)>
{
let mut write_overlay = MemoryDB::new();
let mut root = self.root;
let mut write_overlay = MemoryDB::default();
let mut root = *self.essence.root();
{
let mut eph = Ephemeral {
storage: &self.storage,
overlay: &mut write_overlay,
};
let mut eph = Ephemeral::new(
self.essence.backend_storage(),
&mut write_overlay,
);
let mut trie = TrieDBMut::<H, C>::from_existing(&mut eph, &mut root).expect("prior state root to exist"); // TODO: handle gracefully
for (key, change) in delta {
@@ -188,99 +123,17 @@ impl<H: Hasher, C: NodeCodec<H>> Backend<H, C> for TrieBackend<H, C> where H::Ou
(root, write_overlay)
}
}
impl<H: Hasher, C: NodeCodec<H>> TryIntoTrieBackend<H, C> for TrieBackend<H, C> {
fn try_into_trie_backend(self) -> Option<TrieBackend<H, C>> {
fn try_into_trie_backend(self) -> Option<TrieBackend<Self::TrieBackendStorage, H, C>> {
Some(self)
}
}
pub struct Ephemeral<'a, H: 'a + Hasher> {
storage: &'a TrieBackendStorage<H>,
overlay: &'a mut MemoryDB<H>,
}
impl<'a, H: Hasher> AsHashDB<H> for Ephemeral<'a, H> where H::Out: HeapSizeOf {
fn as_hashdb(&self) -> &HashDB<H> { self }
fn as_hashdb_mut(&mut self) -> &mut HashDB<H> { self }
}
impl<'a, H: Hasher> Ephemeral<'a, H> {
pub fn new(storage: &'a TrieBackendStorage<H>, overlay: &'a mut MemoryDB<H>) -> Self {
Ephemeral {
storage,
overlay,
}
}
}
impl<'a, H: Hasher> HashDB<H> for Ephemeral<'a, H> where H::Out: HeapSizeOf {
fn keys(&self) -> HashMap<H::Out, i32> {
self.overlay.keys() // TODO: iterate backing
}
fn get(&self, key: &H::Out) -> Option<DBValue> {
match self.overlay.raw(key) {
Some((val, i)) => {
if i <= 0 {
None
} else {
Some(val)
}
}
None => match self.storage.get(key) {
Ok(x) => x,
Err(e) => {
warn!(target: "trie", "Failed to read from DB: {}", e);
None
},
},
}
}
fn contains(&self, key: &H::Out) -> bool {
self.get(key).is_some()
}
fn insert(&mut self, value: &[u8]) -> H::Out {
self.overlay.insert(value)
}
fn emplace(&mut self, key: H::Out, value: DBValue) {
self.overlay.emplace(key, value)
}
fn remove(&mut self, key: &H::Out) {
self.overlay.remove(key)
}
}
#[derive(Clone)]
pub enum TrieBackendStorage<H: Hasher> {
/// Key value db + storage column.
Storage(Arc<Storage<H>>),
/// Hash db.
MemoryDb(MemoryDB<H>),
}
impl<H: Hasher> TrieBackendStorage<H> {
pub fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String> {
match *self {
TrieBackendStorage::Storage(ref db) =>
db.get(key)
.map_err(|e| format!("Trie lookup error: {}", e)),
TrieBackendStorage::MemoryDb(ref db) =>
Ok(db.get(key)),
}
}
}
#[cfg(test)]
pub mod tests {
use super::*;
use std::collections::HashSet;
use primitives::{Blake2Hasher, RlpCodec, H256};
use super::*;
fn test_db() -> (MemoryDB<Blake2Hasher>, H256) {
let mut root = H256::default();
@@ -298,9 +151,9 @@ pub mod tests {
(mdb, root)
}
pub(crate) fn test_trie() -> TrieBackend<Blake2Hasher, RlpCodec> {
pub(crate) fn test_trie() -> TrieBackend<MemoryDB<Blake2Hasher>, Blake2Hasher, RlpCodec> {
let (mdb, root) = test_db();
TrieBackend::with_memorydb(mdb, root)
TrieBackend::new(mdb, root)
}
#[test]
@@ -320,11 +173,10 @@ pub mod tests {
#[test]
fn pairs_are_empty_on_empty_storage() {
let db = TrieBackend::<Blake2Hasher, RlpCodec>::with_memorydb(
assert!(TrieBackend::<MemoryDB<Blake2Hasher>, Blake2Hasher, RlpCodec>::new(
MemoryDB::new(),
Default::default()
);
assert!(db.pairs().is_empty());
Default::default(),
).pairs().is_empty());
}
#[test]
@@ -0,0 +1,194 @@
// Copyright 2017 Parity Technologies (UK) Ltd.
// This file is part of Substrate.
// Substrate is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Substrate is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Substrate. If not, see <http://www.gnu.org/licenses/>.
//! Trie-based state machine backend essence used to read values
//! from storage.
use std::collections::HashMap;
use std::marker::PhantomData;
use std::ops::Deref;
use std::sync::Arc;
use hashdb::{Hasher, DBValue, AsHashDB, HashDB};
use heapsize::HeapSizeOf;
use memorydb::MemoryDB;
use patricia_trie::{TrieDB, TrieError, Trie, NodeCodec};
use changes_trie::Storage as ChangesTrieStorage;
/// Patricia trie-based storage trait.
pub trait Storage<H: Hasher>: Send + Sync {
/// Get a trie node.
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String>;
}
/// Patricia trie-based pairs storage essence.
pub struct TrieBackendEssence<S: TrieBackendStorage<H>, H: Hasher, C: NodeCodec<H>> {
storage: S,
root: H::Out,
_codec: PhantomData<C>,
}
impl<S: TrieBackendStorage<H>, H: Hasher, C: NodeCodec<H>> TrieBackendEssence<S, H, C> where H::Out: HeapSizeOf {
/// Create new trie-based backend.
pub fn new(storage: S, root: H::Out) -> Self {
TrieBackendEssence {
storage,
root,
_codec: Default::default(),
}
}
/// Get backend storage reference.
pub fn backend_storage(&self) -> &S {
&self.storage
}
/// Get trie root.
pub fn root(&self) -> &H::Out {
&self.root
}
/// Get the value of storage at given key.
pub fn storage(&self, key: &[u8]) -> Result<Option<Vec<u8>>, String> {
let mut read_overlay = MemoryDB::default();
let eph = Ephemeral {
storage: &self.storage,
overlay: &mut read_overlay,
};
let map_e = |e| format!("Trie lookup error: {}", e);
TrieDB::<H, C>::new(&eph, &self.root).map_err(map_e)?
.get(key).map(|x| x.map(|val| val.to_vec())).map_err(map_e)
}
/// Execute given closure for all keys starting with prefix.
pub fn for_keys_with_prefix<F: FnMut(&[u8])>(&self, prefix: &[u8], mut f: F) {
let mut read_overlay = MemoryDB::default();
let eph = Ephemeral {
storage: &self.storage,
overlay: &mut read_overlay,
};
let mut iter = move || -> Result<(), Box<TrieError<H::Out, C::Error>>> {
let trie = TrieDB::<H, C>::new(&eph, &self.root)?;
let mut iter = trie.iter()?;
iter.seek(prefix)?;
for x in iter {
let (key, _) = x?;
if !key.starts_with(prefix) {
break;
}
f(&key);
}
Ok(())
};
if let Err(e) = iter() {
debug!(target: "trie", "Error while iterating by prefix: {}", e);
}
}
}
pub(crate) struct Ephemeral<'a, S: 'a + TrieBackendStorage<H>, H: 'a + Hasher> {
storage: &'a S,
overlay: &'a mut MemoryDB<H>,
}
impl<'a, S: TrieBackendStorage<H>, H: Hasher> AsHashDB<H> for Ephemeral<'a, S, H> where H::Out: HeapSizeOf {
fn as_hashdb(&self) -> &HashDB<H> { self }
fn as_hashdb_mut(&mut self) -> &mut HashDB<H> { self }
}
impl<'a, S: 'a + TrieBackendStorage<H>, H: Hasher> Ephemeral<'a, S, H> {
pub fn new(storage: &'a S, overlay: &'a mut MemoryDB<H>) -> Self {
Ephemeral {
storage,
overlay,
}
}
}
impl<'a, S: TrieBackendStorage<H>, H: Hasher> HashDB<H> for Ephemeral<'a, S, H> where H::Out: HeapSizeOf {
fn keys(&self) -> HashMap<H::Out, i32> {
self.overlay.keys() // TODO: iterate backing
}
fn get(&self, key: &H::Out) -> Option<DBValue> {
match self.overlay.raw(key) {
Some((val, i)) => {
if i <= 0 {
None
} else {
Some(val)
}
}
None => match self.storage.get(&key) {
Ok(x) => x,
Err(e) => {
warn!(target: "trie", "Failed to read from DB: {}", e);
None
},
},
}
}
fn contains(&self, key: &H::Out) -> bool {
self.get(key).is_some()
}
fn insert(&mut self, value: &[u8]) -> H::Out {
self.overlay.insert(value)
}
fn emplace(&mut self, key: H::Out, value: DBValue) {
self.overlay.emplace(key, value)
}
fn remove(&mut self, key: &H::Out) {
self.overlay.remove(key)
}
}
/// Key-value pairs storage that is used by trie backend essence.
pub trait TrieBackendStorage<H: Hasher>: Send + Sync {
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String>;
}
// This implementation is used by normal storage trie clients.
impl<H: Hasher> TrieBackendStorage<H> for Arc<Storage<H>> {
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String> {
Storage::<H>::get(self.deref(), key)
}
}
// This implementation is used by test storage trie clients.
impl<H: Hasher> TrieBackendStorage<H> for MemoryDB<H> {
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String> {
Ok(HashDB::<H>::get(self, key))
}
}
// This implementation is used by changes trie clients.
impl<'a, S, H: Hasher> TrieBackendStorage<H> for &'a S where S: ChangesTrieStorage<H> {
fn get(&self, key: &H::Out) -> Result<Option<DBValue>, String> {
ChangesTrieStorage::<H>::get(*self, key)
}
}
+1 -1
View File
@@ -113,7 +113,7 @@ pub type BlockNumber = u64;
/// Index of a transaction.
pub type Index = u64;
/// The item of a block digest.
pub type DigestItem = runtime_primitives::generic::DigestItem<u64>;
pub type DigestItem = runtime_primitives::generic::DigestItem<H256, u64>;
/// The digest of a block.
pub type Digest = runtime_primitives::generic::Digest<DigestItem>;
/// A test block.
+14 -7
View File
@@ -20,10 +20,11 @@
use rstd::prelude::*;
use runtime_io::{storage_root, enumerated_trie_root};
use runtime_support::storage::{self, StorageValue, StorageMap};
use runtime_primitives::traits::{Hash as HashT, BlakeTwo256};
use runtime_primitives::traits::{Hash as HashT, BlakeTwo256, Digest as DigestT};
use runtime_primitives::generic;
use runtime_primitives::{ApplyError, ApplyOutcome, ApplyResult};
use codec::{KeyedVec, Encode};
use super::{AccountId, BlockNumber, Extrinsic, H256 as Hash, Block, Header};
use super::{AccountId, BlockNumber, Extrinsic, H256 as Hash, Block, Header, Digest};
use primitives::Blake2Hasher;
const NONCE_OF: &[u8] = b"nonce:";
@@ -101,13 +102,19 @@ pub fn finalise_block() -> Header {
let number = <Number>::take();
let parent_hash = <ParentHash>::take();
let storage_root = BlakeTwo256::storage_root();
let storage_changes_root = BlakeTwo256::storage_changes_root(number);
let mut digest = Digest::default();
if let Some(storage_changes_root) = storage_changes_root {
digest.push(generic::DigestItem::ChangesTrieRoot::<Hash, u64>(storage_changes_root));
}
Header {
number,
extrinsics_root,
state_root: storage_root,
parent_hash,
digest: Default::default(),
digest: digest,
}
}
@@ -172,17 +179,17 @@ mod tests {
use codec::{Joiner, KeyedVec};
use keyring::Keyring;
use ::{Header, Digest, Extrinsic, Transfer};
use primitives::Blake2Hasher;
use primitives::{Blake2Hasher, RlpCodec};
fn new_test_ext() -> TestExternalities<Blake2Hasher> {
map![
fn new_test_ext() -> TestExternalities<Blake2Hasher, RlpCodec> {
TestExternalities::new(map![
twox_128(b"latest").to_vec() => vec![69u8; 32],
twox_128(b":auth:len").to_vec() => vec![].and(&3u32),
twox_128(&0u32.to_keyed_vec(b":auth:")).to_vec() => Keyring::Alice.to_raw_public().to_vec(),
twox_128(&1u32.to_keyed_vec(b":auth:")).to_vec() => Keyring::Bob.to_raw_public().to_vec(),
twox_128(&2u32.to_keyed_vec(b":auth:")).to_vec() => Keyring::Charlie.to_raw_public().to_vec(),
twox_128(&Keyring::Alice.to_raw_public().to_keyed_vec(b"balance:")).to_vec() => vec![111u8, 0, 0, 0, 0, 0, 0, 0]
]
])
}
fn construct_signed_tx(tx: Transfer) -> Extrinsic {
+1 -1
View File
@@ -568,8 +568,8 @@ name = "srml-support"
version = "0.1.0"
dependencies = [
"hex-literal 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-codec 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"mashup 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-codec 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.64 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.64 (registry+https://github.com/rust-lang/crates.io-index)",
"sr-io 0.1.0",
+79 -33
View File
@@ -49,15 +49,17 @@ mod tests {
use codec::{Encode, Decode, Joiner};
use keyring::Keyring;
use runtime_support::{Hashable, StorageValue, StorageMap};
use state_machine::{CodeExecutor, TestExternalities};
use primitives::{twox_128, Blake2Hasher, ed25519::{Public, Pair}};
use state_machine::{CodeExecutor, Externalities, TestExternalities};
use primitives::{twox_128, Blake2Hasher, RlpCodec, ChangesTrieConfiguration,
ed25519::{Public, Pair}};
use node_primitives::{Hash, BlockNumber, AccountId};
use runtime_primitives::traits::Header as HeaderT;
use runtime_primitives::{ApplyOutcome, ApplyError, ApplyResult};
use runtime_primitives::traits::{Header as HeaderT, Digest as DigestT};
use runtime_primitives::{generic, ApplyOutcome, ApplyError, ApplyResult};
use {balances, staking, session, system, consensus, timestamp, treasury};
use system::{EventRecord, Phase};
use node_runtime::{Header, Block, UncheckedExtrinsic, CheckedExtrinsic, Call, Runtime, Balances,
BuildStorage, GenesisConfig, BalancesConfig, SessionConfig, StakingConfig, System, Event};
BuildStorage, GenesisConfig, BalancesConfig, SessionConfig, StakingConfig, System,
SystemConfig, Event, Log};
const BLOATY_CODE: &[u8] = include_bytes!("../../runtime/wasm/target/wasm32-unknown-unknown/release/node_runtime.wasm");
const COMPACT_CODE: &[u8] = include_bytes!("../../runtime/wasm/target/wasm32-unknown-unknown/release/node_runtime.compact.wasm");
@@ -115,7 +117,7 @@ mod tests {
#[test]
fn panic_execution_with_foreign_code_gives_error() {
let mut t: TestExternalities<Blake2Hasher> = map![
let mut t = TestExternalities::<Blake2Hasher, RlpCodec>::new(map![
twox_128(&<balances::FreeBalance<Runtime>>::key_for(alice())).to_vec() => vec![69u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TotalIssuance<Runtime>>::key()).to_vec() => vec![69u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TransactionBaseFee<Runtime>>::key()).to_vec() => vec![70u8; 8],
@@ -125,7 +127,7 @@ mod tests {
twox_128(<balances::TransferFee<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(<balances::NextEnumSet<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(&<system::BlockHash<Runtime>>::key_for(0)).to_vec() => vec![0u8; 32]
];
]);
let r = executor().call(&mut t, 8, BLOATY_CODE, "initialise_block", &vec![].and(&from_block_number(1u64)), true).0;
assert!(r.is_ok());
@@ -136,7 +138,7 @@ mod tests {
#[test]
fn bad_extrinsic_with_native_equivalent_code_gives_error() {
let mut t: TestExternalities<Blake2Hasher> = map![
let mut t = TestExternalities::<Blake2Hasher, RlpCodec>::new(map![
twox_128(&<balances::FreeBalance<Runtime>>::key_for(alice())).to_vec() => vec![69u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TotalIssuance<Runtime>>::key()).to_vec() => vec![69u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TransactionBaseFee<Runtime>>::key()).to_vec() => vec![70u8; 8],
@@ -146,7 +148,7 @@ mod tests {
twox_128(<balances::TransferFee<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(<balances::NextEnumSet<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(&<system::BlockHash<Runtime>>::key_for(0)).to_vec() => vec![0u8; 32]
];
]);
let r = executor().call(&mut t, 8, COMPACT_CODE, "initialise_block", &vec![].and(&from_block_number(1u64)), true).0;
assert!(r.is_ok());
@@ -157,7 +159,7 @@ mod tests {
#[test]
fn successful_execution_with_native_equivalent_code_gives_ok() {
let mut t: TestExternalities<Blake2Hasher> = map![
let mut t = TestExternalities::<Blake2Hasher, RlpCodec>::new(map![
twox_128(&<balances::FreeBalance<Runtime>>::key_for(alice())).to_vec() => vec![111u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TotalIssuance<Runtime>>::key()).to_vec() => vec![111u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TransactionBaseFee<Runtime>>::key()).to_vec() => vec![0u8; 8],
@@ -167,7 +169,7 @@ mod tests {
twox_128(<balances::TransferFee<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(<balances::NextEnumSet<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(&<system::BlockHash<Runtime>>::key_for(0)).to_vec() => vec![0u8; 32]
];
]);
let r = executor().call(&mut t, 8, COMPACT_CODE, "initialise_block", &vec![].and(&from_block_number(1u64)), true).0;
assert!(r.is_ok());
@@ -182,7 +184,7 @@ mod tests {
#[test]
fn successful_execution_with_foreign_code_gives_ok() {
let mut t: TestExternalities<Blake2Hasher> = map![
let mut t = TestExternalities::<Blake2Hasher, RlpCodec>::new(map![
twox_128(&<balances::FreeBalance<Runtime>>::key_for(alice())).to_vec() => vec![111u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TotalIssuance<Runtime>>::key()).to_vec() => vec![111u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TransactionBaseFee<Runtime>>::key()).to_vec() => vec![0u8; 8],
@@ -192,7 +194,7 @@ mod tests {
twox_128(<balances::TransferFee<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(<balances::NextEnumSet<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(&<system::BlockHash<Runtime>>::key_for(0)).to_vec() => vec![0u8; 32]
];
]);
let r = executor().call(&mut t, 8, BLOATY_CODE, "initialise_block", &vec![].and(&from_block_number(1u64)), true).0;
assert!(r.is_ok());
@@ -205,12 +207,18 @@ mod tests {
});
}
fn new_test_ext() -> TestExternalities<Blake2Hasher> {
fn new_test_ext(support_changes_trie: bool) -> TestExternalities<Blake2Hasher, RlpCodec> {
use keyring::Keyring::*;
let three = [3u8; 32].into();
GenesisConfig {
TestExternalities::new(GenesisConfig {
consensus: Some(Default::default()),
system: Some(Default::default()),
system: Some(SystemConfig {
changes_trie_config: if support_changes_trie { Some(ChangesTrieConfiguration {
digest_interval: 2,
digest_levels: 2,
}) } else { None },
..Default::default()
}),
balances: Some(BalancesConfig {
balances: vec![(alice(), 111)],
transaction_base_fee: 1,
@@ -240,32 +248,52 @@ mod tests {
timestamp: Some(Default::default()),
treasury: Some(Default::default()),
contract: Some(Default::default()),
}.build_storage().unwrap().into()
}.build_storage().unwrap())
}
fn construct_block(number: BlockNumber, parent_hash: Hash, state_root: Hash, extrinsics: Vec<CheckedExtrinsic>) -> (Vec<u8>, Hash) {
fn construct_block(
number: BlockNumber,
parent_hash: Hash,
state_root: Hash,
changes_root: Option<Hash>,
extrinsics: Vec<CheckedExtrinsic>
) -> (Vec<u8>, Hash) {
use triehash::ordered_trie_root;
let extrinsics = extrinsics.into_iter().map(sign).collect::<Vec<_>>();
let extrinsics_root = ordered_trie_root::<Blake2Hasher, _, _>(extrinsics.iter().map(Encode::encode)).0.into();
let mut digest = generic::Digest::<Log>::default();
if let Some(changes_root) = changes_root {
digest.push(Log::from(system::RawLog::ChangesTrieRoot::<Hash>(changes_root)));
}
let header = Header {
parent_hash,
number,
state_root,
extrinsics_root,
digest: Default::default(),
digest,
};
let hash = header.blake2_256();
(Block { header, extrinsics }.encode(), hash.into())
}
fn block1() -> (Vec<u8>, Hash) {
fn block1(support_changes_trie: bool) -> (Vec<u8>, Hash) {
construct_block(
1,
[69u8; 32].into(),
hex!("1f058f699ad3187bcf7e9ed8e44464d7a5added0cd912d2679b9dab2e7a04053").into(),
if support_changes_trie {
hex!("1755be7303767b4d3855694b4f0ebd9d64b7011124d0ec1ad3e17c2a0d65e245").into()
} else {
hex!("1f058f699ad3187bcf7e9ed8e44464d7a5added0cd912d2679b9dab2e7a04053").into()
},
if support_changes_trie {
Some(hex!("d7ff76d7fbb9b613e8d140da6f1d561b4928785d4e4818ed959bd1bd35abc7e8").into())
} else {
None
},
vec![
CheckedExtrinsic {
signed: None,
@@ -284,8 +312,9 @@ mod tests {
fn block2() -> (Vec<u8>, Hash) {
construct_block(
2,
block1().1,
block1(false).1,
hex!("29fa1d0aa83662c571315af54b106c73823a31f759793803bf8929960b67b138").into(),
None,
vec![
CheckedExtrinsic {
signed: None,
@@ -311,6 +340,7 @@ mod tests {
1,
[69u8; 32].into(),
hex!("fe0e07c7b054fe186387461d455d536860e9c71d6979fd9dbf755e96ce070d04").into(),
None,
vec![
CheckedExtrinsic {
signed: None,
@@ -328,9 +358,9 @@ mod tests {
#[test]
fn full_native_block_import_works() {
let mut t = new_test_ext();
let mut t = new_test_ext(false);
executor().call(&mut t, 8, COMPACT_CODE, "execute_block", &block1().0, true).0.unwrap();
executor().call(&mut t, 8, COMPACT_CODE, "execute_block", &block1(false).0, true).0.unwrap();
runtime_io::with_externalities(&mut t, || {
assert_eq!(Balances::total_balance(&alice()), 41);
@@ -438,9 +468,9 @@ mod tests {
#[test]
fn full_wasm_block_import_works() {
let mut t = new_test_ext();
let mut t = new_test_ext(false);
WasmExecutor::new().call(&mut t, 8, COMPACT_CODE, "execute_block", &block1().0).unwrap();
WasmExecutor::new().call(&mut t, 8, COMPACT_CODE, "execute_block", &block1(false).0).unwrap();
runtime_io::with_externalities(&mut t, || {
assert_eq!(Balances::total_balance(&alice()), 41);
@@ -457,7 +487,7 @@ mod tests {
#[test]
fn wasm_big_block_import_fails() {
let mut t = new_test_ext();
let mut t = new_test_ext(false);
let r = WasmExecutor::new().call(&mut t, 8, COMPACT_CODE, "execute_block", &block1big().0);
assert!(!r.is_ok());
@@ -465,7 +495,7 @@ mod tests {
#[test]
fn native_big_block_import_succeeds() {
let mut t = new_test_ext();
let mut t = new_test_ext(false);
let r = Executor::new().call(&mut t, 8, COMPACT_CODE, "execute_block", &block1big().0, true).0;
assert!(r.is_ok());
@@ -473,7 +503,7 @@ mod tests {
#[test]
fn native_big_block_import_fails_on_fallback() {
let mut t = new_test_ext();
let mut t = new_test_ext(false);
let r = Executor::new().call(&mut t, 8, COMPACT_CODE, "execute_block", &block1big().0, false).0;
assert!(!r.is_ok());
@@ -481,7 +511,7 @@ mod tests {
#[test]
fn panic_execution_gives_error() {
let mut t: TestExternalities<Blake2Hasher> = map![
let mut t = TestExternalities::<Blake2Hasher, RlpCodec>::new(map![
twox_128(&<balances::FreeBalance<Runtime>>::key_for(alice())).to_vec() => vec![69u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TotalIssuance<Runtime>>::key()).to_vec() => vec![69u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TransactionBaseFee<Runtime>>::key()).to_vec() => vec![70u8; 8],
@@ -491,7 +521,7 @@ mod tests {
twox_128(<balances::TransferFee<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(<balances::NextEnumSet<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(&<system::BlockHash<Runtime>>::key_for(0)).to_vec() => vec![0u8; 32]
];
]);
let foreign_code = include_bytes!("../../runtime/wasm/target/wasm32-unknown-unknown/release/node_runtime.wasm");
let r = WasmExecutor::new().call(&mut t, 8, &foreign_code[..], "initialise_block", &vec![].and(&from_block_number(1u64)));
@@ -503,7 +533,7 @@ mod tests {
#[test]
fn successful_execution_gives_ok() {
let mut t: TestExternalities<Blake2Hasher> = map![
let mut t = TestExternalities::<Blake2Hasher, RlpCodec>::new(map![
twox_128(&<balances::FreeBalance<Runtime>>::key_for(alice())).to_vec() => vec![111u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TotalIssuance<Runtime>>::key()).to_vec() => vec![111u8, 0, 0, 0, 0, 0, 0, 0],
twox_128(<balances::TransactionBaseFee<Runtime>>::key()).to_vec() => vec![0u8; 8],
@@ -513,7 +543,7 @@ mod tests {
twox_128(<balances::TransferFee<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(<balances::NextEnumSet<Runtime>>::key()).to_vec() => vec![0u8; 8],
twox_128(&<system::BlockHash<Runtime>>::key_for(0)).to_vec() => vec![0u8; 32]
];
]);
let foreign_code = include_bytes!("../../runtime/wasm/target/wasm32-unknown-unknown/release/node_runtime.compact.wasm");
let r = WasmExecutor::new().call(&mut t, 8, &foreign_code[..], "initialise_block", &vec![].and(&from_block_number(1u64)));
@@ -527,4 +557,20 @@ mod tests {
assert_eq!(Balances::total_balance(&bob()), 69);
});
}
#[test]
fn full_native_block_import_works_with_changes_trie() {
let mut t = new_test_ext(true);
Executor::new().call(&mut t, 8, COMPACT_CODE, "execute_block", &block1(true).0, true).0.unwrap();
assert!(t.storage_changes_root(1).is_some());
}
#[test]
fn full_wasm_block_import_works_with_changes_trie() {
let mut t = new_test_ext(true);
WasmExecutor::new().call(&mut t, 8, COMPACT_CODE, "execute_block", &block1(true).0).unwrap();
assert!(t.storage_changes_root(1).is_some());
}
}
+1 -1
View File
@@ -73,7 +73,7 @@ pub type Signature = runtime_primitives::Ed25519Signature;
pub type Timestamp = u64;
/// Header type.
pub type Header = generic::Header<BlockNumber, BlakeTwo256, generic::DigestItem<()>>;
pub type Header = generic::Header<BlockNumber, BlakeTwo256, generic::DigestItem<Hash, SessionKey>>;
/// Block type.
pub type Block = generic::Block<Header, UncheckedExtrinsic>;
/// Block ID.
+12 -2
View File
@@ -105,6 +105,7 @@ impl system::Trait for Runtime {
type AccountId = AccountId;
type Header = generic::Header<BlockNumber, BlakeTwo256, Log>;
type Event = Event;
type Log = Log;
}
impl balances::Trait for Runtime {
@@ -177,18 +178,27 @@ impl contract::Trait for Runtime {
}
impl DigestItem for Log {
type Hash = Hash;
type AuthorityId = SessionKey;
fn as_authorities_change(&self) -> Option<&[Self::AuthorityId]> {
match self.0 {
InternalLog::consensus(ref item) => item.as_authorities_change(),
_ => None,
}
}
fn as_changes_trie_root(&self) -> Option<&Self::Hash> {
match self.0 {
InternalLog::system(ref item) => item.as_changes_trie_root(),
_ => None,
}
}
}
construct_runtime!(
pub enum Runtime with Log(InternalLog: DigestItem<SessionKey>) {
System: system,
pub enum Runtime with Log(InternalLog: DigestItem<Hash, SessionKey>) {
System: system::{default, Log(ChangesTrieRoot)},
Consensus: consensus::{Module, Call, Storage, Config, Log(AuthoritiesChange)},
Balances: balances,
Timestamp: timestamp::{Module, Call, Storage, Config},
@@ -18,13 +18,11 @@
#![cfg(feature = "std")]
use std::collections::HashMap;
use rstd::prelude::*;
use codec::Encode;
use runtime_support::{StorageValue, StorageMap};
use primitives::traits::{Zero, As};
use substrate_primitives::Blake2Hasher;
use {runtime_io, primitives};
use primitives;
use super::{Trait, ENUM_SET_SIZE, EnumSet, NextEnumSet, CreationFee, TransferFee,
ReclaimRebate, ExistentialDeposit, TransactionByteFee, TransactionBaseFee, TotalIssuance,
FreeBalance};
@@ -57,10 +55,10 @@ impl<T: Trait> Default for GenesisConfig<T> {
}
impl<T: Trait> primitives::BuildStorage for GenesisConfig<T> {
fn build_storage(self) -> ::std::result::Result<HashMap<Vec<u8>, Vec<u8>>, String> {
fn build_storage(self) -> ::std::result::Result<primitives::StorageMap, String> {
let total_issuance: T::Balance = self.balances.iter().fold(Zero::zero(), |acc, &(_, n)| acc + n);
let mut r: runtime_io::TestExternalities<Blake2Hasher> = map![
let mut r: primitives::StorageMap = map![
Self::hash(<NextEnumSet<T>>::key()).to_vec() => T::AccountIndex::sa(self.balances.len() / ENUM_SET_SIZE).encode(),
Self::hash(<TransactionBaseFee<T>>::key()).to_vec() => self.transaction_base_fee.encode(),
Self::hash(<TransactionByteFee<T>>::key()).to_vec() => self.transaction_byte_fee.encode(),
@@ -79,6 +77,6 @@ impl<T: Trait> primitives::BuildStorage for GenesisConfig<T> {
for (who, value) in self.balances.into_iter() {
r.insert(Self::hash(&<FreeBalance<T>>::key_for(who)).to_vec(), value.encode());
}
Ok(r.into())
Ok(r)
}
}
+5 -4
View File
@@ -19,8 +19,8 @@
#![cfg(test)]
use primitives::BuildStorage;
use primitives::testing::{Digest, Header};
use substrate_primitives::{H256, Blake2Hasher};
use primitives::testing::{Digest, DigestItem, Header};
use substrate_primitives::{H256, Blake2Hasher, RlpCodec};
use runtime_io;
use {GenesisConfig, Module, Trait, system};
@@ -41,6 +41,7 @@ impl system::Trait for Runtime {
type AccountId = u64;
type Header = Header;
type Event = ();
type Log = DigestItem;
}
impl Trait for Runtime {
type Balance = u64;
@@ -50,7 +51,7 @@ impl Trait for Runtime {
type Event = ();
}
pub fn new_test_ext(ext_deposit: u64, monied: bool) -> runtime_io::TestExternalities<Blake2Hasher> {
pub fn new_test_ext(ext_deposit: u64, monied: bool) -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
let mut t = system::GenesisConfig::<Runtime>::default().build_storage().unwrap();
let balance_factor = if ext_deposit > 0 {
256
@@ -73,7 +74,7 @@ pub fn new_test_ext(ext_deposit: u64, monied: bool) -> runtime_io::TestExternali
t.into()
}
pub fn new_test_ext2(ext_deposit: u64, monied: bool) -> runtime_io::TestExternalities<Blake2Hasher> {
pub fn new_test_ext2(ext_deposit: u64, monied: bool) -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
let mut t = system::GenesisConfig::<Runtime>::default().build_storage().unwrap();
let balance_factor = if ext_deposit > 0 {
256
+13 -16
View File
@@ -46,15 +46,10 @@ use runtime_support::{storage, Parameter};
use runtime_support::dispatch::Result;
use runtime_support::storage::StorageValue;
use runtime_support::storage::unhashed::StorageVec;
use primitives::traits::{MaybeSerializeDebug, OnFinalise, Member, DigestItem};
use primitives::traits::{MaybeSerializeDebug, OnFinalise, Member};
use primitives::bft::MisbehaviorReport;
use system::{ensure_signed, ensure_inherent};
#[cfg(any(feature = "std", test))]
use substrate_primitives::Blake2Hasher;
#[cfg(any(feature = "std", test))]
use std::collections::HashMap;
pub const AUTHORITY_AT: &'static [u8] = b":auth:";
pub const AUTHORITY_COUNT: &'static [u8] = b":auth:len";
@@ -88,22 +83,24 @@ pub enum RawLog<SessionKey> {
AuthoritiesChange(Vec<SessionKey>),
}
impl<SessionKey: Member> DigestItem for RawLog<SessionKey> {
type AuthorityId = SessionKey;
impl<SessionKey: Member> RawLog<SessionKey> {
/// Try to cast the log entry as AuthoritiesChange log entry.
fn as_authorities_change(&self) -> Option<&[SessionKey]> {
pub fn as_authorities_change(&self) -> Option<&[SessionKey]> {
match *self {
RawLog::AuthoritiesChange(ref item) => Some(&item),
RawLog::AuthoritiesChange(ref item) => Some(item),
}
}
}
// Implementation for tests outside of this crate.
impl<N> From<RawLog<N>> for u64 {
fn from(log: RawLog<N>) -> u64 {
#[cfg(any(feature = "std", test))]
impl<N> From<RawLog<N>> for primitives::testing::DigestItem {
fn from(log: RawLog<N>) -> primitives::testing::DigestItem {
match log {
RawLog::AuthoritiesChange(_) => 1,
RawLog::AuthoritiesChange(authorities) =>
primitives::generic::DigestItem::AuthoritiesChange
::<substrate_primitives::H256, u64>(authorities.into_iter()
.enumerate().map(|(i, _)| i as u64).collect()),
}
}
}
@@ -252,10 +249,10 @@ impl<T: Trait> Default for GenesisConfig<T> {
#[cfg(any(feature = "std", test))]
impl<T: Trait> primitives::BuildStorage for GenesisConfig<T>
{
fn build_storage(self) -> ::std::result::Result<HashMap<Vec<u8>, Vec<u8>>, String> {
fn build_storage(self) -> ::std::result::Result<primitives::StorageMap, String> {
use codec::{Encode, KeyedVec};
let auth_count = self.authorities.len() as u32;
let mut r: runtime_io::TestExternalities<Blake2Hasher> = self.authorities.into_iter().enumerate().map(|(i, v)|
let mut r: primitives::StorageMap = self.authorities.into_iter().enumerate().map(|(i, v)|
((i as u32).to_keyed_vec(AUTHORITY_AT), v.encode())
).collect();
r.insert(AUTHORITY_COUNT.to_vec(), auth_count.encode());
@@ -22,11 +22,9 @@ use {Trait, ContractFee, CallBaseFee, CreateBaseFee, GasPrice, MaxDepth, BlockGa
use runtime_primitives;
use runtime_primitives::traits::As;
use runtime_io::{self, twox_128};
use runtime_io::twox_128;
use runtime_support::StorageValue;
use codec::Encode;
use std::collections::HashMap;
use substrate_primitives::Blake2Hasher;
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
@@ -54,15 +52,14 @@ impl<T: Trait> Default for GenesisConfig<T> {
}
impl<T: Trait> runtime_primitives::BuildStorage for GenesisConfig<T> {
fn build_storage(self) -> ::std::result::Result<HashMap<Vec<u8>, Vec<u8>>, String> {
let r: runtime_io::TestExternalities<Blake2Hasher> = map![
fn build_storage(self) -> ::std::result::Result<runtime_primitives::StorageMap, String> {
Ok(map![
twox_128(<ContractFee<T>>::key()).to_vec() => self.contract_fee.encode(),
twox_128(<CallBaseFee<T>>::key()).to_vec() => self.call_base_fee.encode(),
twox_128(<CreateBaseFee<T>>::key()).to_vec() => self.create_base_fee.encode(),
twox_128(<GasPrice<T>>::key()).to_vec() => self.gas_price.encode(),
twox_128(<MaxDepth<T>>::key()).to_vec() => self.max_depth.encode(),
twox_128(<BlockGasLimit<T>>::key()).to_vec() => self.block_gas_limit.encode()
];
Ok(r.into())
])
}
}
+5 -4
View File
@@ -16,11 +16,11 @@
use double_map::StorageDoubleMap;
use runtime_io::with_externalities;
use runtime_primitives::testing::{Digest, H256, Header};
use runtime_primitives::testing::{Digest, DigestItem, H256, Header};
use runtime_primitives::traits::{BlakeTwo256};
use runtime_primitives::BuildStorage;
use runtime_support::StorageMap;
use substrate_primitives::Blake2Hasher;
use substrate_primitives::{Blake2Hasher, RlpCodec};
use wabt;
use {
runtime_io, balances, system, CodeOf, ContractAddressFor,
@@ -43,6 +43,7 @@ impl system::Trait for Test {
type AccountId = u64;
type Header = Header;
type Event = ();
type Log = DigestItem;
}
impl balances::Trait for Test {
type Balance = u64;
@@ -105,7 +106,7 @@ impl ExtBuilder {
self.creation_fee = creation_fee;
self
}
fn build(self) -> runtime_io::TestExternalities<Blake2Hasher> {
fn build(self) -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
let mut t = system::GenesisConfig::<Test>::default()
.build_storage()
.unwrap();
@@ -132,7 +133,7 @@ impl ExtBuilder {
}.build_storage()
.unwrap(),
);
t.into()
runtime_io::TestExternalities::new(t)
}
}
+6 -7
View File
@@ -45,8 +45,6 @@ extern crate srml_system as system;
#[cfg(feature = "std")]
use rstd::prelude::*;
#[cfg(feature = "std")]
use std::collections::HashMap;
#[cfg(feature = "std")]
use primitives::traits::As;
#[cfg(feature = "std")]
use srml_support::StorageValue;
@@ -102,7 +100,7 @@ impl<T: seats::Trait + voting::Trait + motions::Trait> Default for GenesisConfig
#[cfg(feature = "std")]
impl<T: seats::Trait + voting::Trait + motions::Trait> primitives::BuildStorage for GenesisConfig<T>
{
fn build_storage(self) -> ::std::result::Result<HashMap<Vec<u8>, Vec<u8>>, String> {
fn build_storage(self) -> ::std::result::Result<primitives::StorageMap, String> {
use codec::Encode;
Ok(map![
@@ -132,8 +130,8 @@ mod tests {
pub use substrate_primitives::H256;
pub use primitives::BuildStorage;
pub use primitives::traits::{BlakeTwo256};
pub use primitives::testing::{Digest, Header};
pub use substrate_primitives::Blake2Hasher;
pub use primitives::testing::{Digest, DigestItem, Header};
pub use substrate_primitives::{Blake2Hasher, RlpCodec};
pub use {seats, motions, voting};
impl_outer_origin! {
@@ -168,6 +166,7 @@ mod tests {
type AccountId = u64;
type Header = Header;
type Event = Event;
type Log = DigestItem;
}
impl balances::Trait for Test {
type Balance = u64;
@@ -192,7 +191,7 @@ mod tests {
type Event = Event;
}
pub fn new_test_ext(with_council: bool) -> runtime_io::TestExternalities<Blake2Hasher> {
pub fn new_test_ext(with_council: bool) -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
let mut t = system::GenesisConfig::<Test>::default().build_storage().unwrap();
t.extend(balances::GenesisConfig::<Test>{
balances: vec![(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)],
@@ -226,7 +225,7 @@ mod tests {
cooloff_period: 2,
voting_period: 1,
}.build_storage().unwrap());
t.into()
runtime_io::TestExternalities::new(t)
}
pub type System = system::Module<Test>;
+1 -1
View File
@@ -622,7 +622,7 @@ mod tests {
});
}
fn new_test_ext_with_candidate_holes() -> runtime_io::TestExternalities<Blake2Hasher> {
fn new_test_ext_with_candidate_holes() -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
let mut t = new_test_ext(false);
with_externalities(&mut t, || {
<Candidates<Test>>::put(vec![0, 0, 1]);
+6 -8
View File
@@ -48,9 +48,6 @@ use srml_support::{StorageValue, StorageMap, Parameter, Dispatchable, IsSubType}
use srml_support::dispatch::Result;
use system::ensure_signed;
#[cfg(any(feature = "std", test))]
use std::collections::HashMap;
mod vote_threshold;
pub use vote_threshold::{Approved, VoteThreshold};
@@ -352,7 +349,7 @@ impl<T: Trait> Default for GenesisConfig<T> {
#[cfg(any(feature = "std", test))]
impl<T: Trait> primitives::BuildStorage for GenesisConfig<T>
{
fn build_storage(self) -> ::std::result::Result<HashMap<Vec<u8>, Vec<u8>>, String> {
fn build_storage(self) -> ::std::result::Result<primitives::StorageMap, String> {
use codec::Encode;
Ok(map![
@@ -370,10 +367,10 @@ impl<T: Trait> primitives::BuildStorage for GenesisConfig<T>
mod tests {
use super::*;
use runtime_io::with_externalities;
use substrate_primitives::{H256, Blake2Hasher};
use substrate_primitives::{H256, Blake2Hasher, RlpCodec};
use primitives::BuildStorage;
use primitives::traits::{BlakeTwo256};
use primitives::testing::{Digest, Header};
use primitives::testing::{Digest, DigestItem, Header};
impl_outer_origin! {
pub enum Origin for Test {}
@@ -399,6 +396,7 @@ mod tests {
type AccountId = u64;
type Header = Header;
type Event = ();
type Log = DigestItem;
}
impl balances::Trait for Test {
type Balance = u64;
@@ -412,7 +410,7 @@ mod tests {
type Event = ();
}
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher> {
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
let mut t = system::GenesisConfig::<Test>::default().build_storage().unwrap();
t.extend(balances::GenesisConfig::<Test>{
balances: vec![(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)],
@@ -428,7 +426,7 @@ mod tests {
voting_period: 1,
minimum_deposit: 1,
}.build_storage().unwrap());
t.into()
runtime_io::TestExternalities::new(t)
}
type System = system::Module<Test>;
+4 -2
View File
@@ -326,9 +326,10 @@ mod tests {
use super::*;
use runtime_io::with_externalities;
use substrate_primitives::{H256, Blake2Hasher};
use substrate_primitives::{H256, Blake2Hasher, RlpCodec};
use runtime_primitives::BuildStorage;
use runtime_primitives::traits::{BlakeTwo256};
use runtime_primitives::testing::DigestItem;
// The testing primitives are very useful for avoiding having to work with signatures
// or public keys. `u64` is used as the `AccountId` and no `Signature`s are requried.
@@ -353,6 +354,7 @@ mod tests {
type AccountId = u64;
type Header = Header;
type Event = ();
type Log = DigestItem;
}
impl balances::Trait for Test {
type Balance = u64;
@@ -368,7 +370,7 @@ mod tests {
// This function basically just builds a genesis storage key/value store according to
// our desired mockup.
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher> {
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
let mut t = system::GenesisConfig::<Test>::default().build_storage().unwrap();
// We use default for brevity, but you can configure as desired if needed.
t.extend(balances::GenesisConfig::<Test>::default().build_storage().unwrap());
+11 -9
View File
@@ -204,11 +204,11 @@ impl<
}
fn final_checks(header: &System::Header) {
// check digest
assert!(header.digest() == &<system::Module<System>>::digest());
// remove temporaries.
<system::Module<System>>::finalise();
let new_header = <system::Module<System>>::finalise();
// check digest
assert!(header.digest() == new_header.digest());
// check storage root.
let storage_root = System::Hashing::storage_root();
@@ -266,10 +266,10 @@ mod tests {
use super::*;
use balances::Call;
use runtime_io::with_externalities;
use substrate_primitives::{H256, Blake2Hasher};
use substrate_primitives::{H256, Blake2Hasher, RlpCodec};
use primitives::BuildStorage;
use primitives::traits::{Header as HeaderT, BlakeTwo256, Lookup};
use primitives::testing::{Digest, Header, Block};
use primitives::testing::{Digest, DigestItem, Header, Block};
use system;
struct NullLookup;
@@ -305,6 +305,7 @@ mod tests {
type AccountId = u64;
type Header = Header;
type Event = MetaEvent;
type Log = DigestItem;
}
impl balances::Trait for Runtime {
type Balance = u64;
@@ -330,16 +331,17 @@ mod tests {
reclaim_rebate: 0,
}.build_storage().unwrap());
let xt = primitives::testing::TestXt(Some(1), 0, Call::transfer(2.into(), 69));
let mut t = runtime_io::TestExternalities::from(t);
let mut t = runtime_io::TestExternalities::<Blake2Hasher, RlpCodec>::new(t);
with_externalities(&mut t, || {
Executive::initialise_block(&Header::new(1, H256::default(), H256::default(), [69u8; 32].into(), Digest::default()));
Executive::initialise_block(&Header::new(1, H256::default(), H256::default(),
[69u8; 32].into(), Digest::default()));
Executive::apply_extrinsic(xt).unwrap();
assert_eq!(<balances::Module<Runtime>>::total_balance(&1), 32);
assert_eq!(<balances::Module<Runtime>>::total_balance(&2), 69);
});
}
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher> {
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
let mut t = system::GenesisConfig::<Runtime>::default().build_storage().unwrap();
t.extend(balances::GenesisConfig::<Runtime>::default().build_storage().unwrap());
t.into()
+7 -10
View File
@@ -54,9 +54,6 @@ use runtime_support::{StorageValue, StorageMap};
use runtime_support::dispatch::Result;
use system::ensure_signed;
#[cfg(any(feature = "std", test))]
use std::collections::HashMap;
/// A session has changed.
pub trait OnSessionChange<T> {
/// Session has changed.
@@ -265,8 +262,7 @@ impl<T: Trait> Default for GenesisConfig<T> {
#[cfg(any(feature = "std", test))]
impl<T: Trait> primitives::BuildStorage for GenesisConfig<T>
{
fn build_storage(self) -> ::std::result::Result<HashMap<Vec<u8>, Vec<u8>>, String> {
fn build_storage(self) -> ::std::result::Result<primitives::StorageMap, String> {
use codec::Encode;
use primitives::traits::As;
Ok(map![
@@ -282,10 +278,10 @@ impl<T: Trait> primitives::BuildStorage for GenesisConfig<T>
mod tests {
use super::*;
use runtime_io::with_externalities;
use substrate_primitives::{H256, Blake2Hasher};
use substrate_primitives::{H256, Blake2Hasher, RlpCodec};
use primitives::BuildStorage;
use primitives::traits::{Identity, BlakeTwo256};
use primitives::testing::{Digest, Header};
use primitives::testing::{Digest, DigestItem, Header};
impl_outer_origin!{
pub enum Origin for Test {}
@@ -295,7 +291,7 @@ mod tests {
pub struct Test;
impl consensus::Trait for Test {
const NOTE_OFFLINE_POSITION: u32 = 1;
type Log = u64;
type Log = DigestItem;
type SessionKey = u64;
type OnOfflineValidator = ();
}
@@ -309,6 +305,7 @@ mod tests {
type AccountId = u64;
type Header = Header;
type Event = ();
type Log = DigestItem;
}
impl timestamp::Trait for Test {
const TIMESTAMP_SET_POSITION: u32 = 0;
@@ -324,7 +321,7 @@ mod tests {
type Consensus = consensus::Module<Test>;
type Session = Module<Test>;
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher> {
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
let mut t = system::GenesisConfig::<Test>::default().build_storage().unwrap();
t.extend(consensus::GenesisConfig::<Test>{
code: vec![],
@@ -337,7 +334,7 @@ mod tests {
session_length: 2,
validators: vec![1, 2, 3],
}.build_storage().unwrap());
t.into()
runtime_io::TestExternalities::new(t)
}
#[test]
+4 -7
View File
@@ -18,13 +18,11 @@
#![cfg(feature = "std")]
use std::collections::HashMap;
use rstd::prelude::*;
use codec::Encode;
use runtime_support::StorageValue;
use primitives::traits::As;
use substrate_primitives::Blake2Hasher;
use {runtime_io, primitives};
use primitives;
use super::{Trait, Intentions, CurrentEra, OfflineSlashGrace, MinimumValidatorCount,
BondingDuration, SessionsPerEra, ValidatorCount, SessionReward, OfflineSlash};
@@ -60,8 +58,8 @@ impl<T: Trait> Default for GenesisConfig<T> {
}
impl<T: Trait> primitives::BuildStorage for GenesisConfig<T> {
fn build_storage(self) -> ::std::result::Result<HashMap<Vec<u8>, Vec<u8>>, String> {
let r: runtime_io::TestExternalities<Blake2Hasher> = map![
fn build_storage(self) -> ::std::result::Result<primitives::StorageMap, String> {
Ok(map![
Self::hash(<Intentions<T>>::key()).to_vec() => self.intentions.encode(),
Self::hash(<SessionsPerEra<T>>::key()).to_vec() => self.sessions_per_era.encode(),
Self::hash(<ValidatorCount<T>>::key()).to_vec() => self.validator_count.encode(),
@@ -71,7 +69,6 @@ impl<T: Trait> primitives::BuildStorage for GenesisConfig<T> {
Self::hash(<SessionReward<T>>::key()).to_vec() => self.session_reward.encode(),
Self::hash(<OfflineSlash<T>>::key()).to_vec() => self.offline_slash.encode(),
Self::hash(<OfflineSlashGrace<T>>::key()).to_vec() => self.offline_slash_grace.encode()
];
Ok(r.into())
])
}
}
+13 -5
View File
@@ -20,8 +20,8 @@
use primitives::BuildStorage;
use primitives::traits::{Identity};
use primitives::testing::{Digest, Header};
use substrate_primitives::{H256, Blake2Hasher};
use primitives::testing::{Digest, DigestItem, Header};
use substrate_primitives::{H256, Blake2Hasher, RlpCodec};
use runtime_io;
use {GenesisConfig, Module, Trait, consensus, session, system, timestamp, balances};
@@ -34,7 +34,7 @@ impl_outer_origin!{
pub struct Test;
impl consensus::Trait for Test {
const NOTE_OFFLINE_POSITION: u32 = 1;
type Log = u64;
type Log = DigestItem;
type SessionKey = u64;
type OnOfflineValidator = ();
}
@@ -48,6 +48,7 @@ impl system::Trait for Test {
type AccountId = u64;
type Header = Header;
type Event = ();
type Log = DigestItem;
}
impl balances::Trait for Test {
type Balance = u64;
@@ -70,7 +71,14 @@ impl Trait for Test {
type Event = ();
}
pub fn new_test_ext(ext_deposit: u64, session_length: u64, sessions_per_era: u64, current_era: u64, monied: bool, reward: u64) -> runtime_io::TestExternalities<Blake2Hasher> {
pub fn new_test_ext(
ext_deposit: u64,
session_length: u64,
sessions_per_era: u64,
current_era: u64,
monied: bool,
reward: u64
) -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
let mut t = system::GenesisConfig::<Test>::default().build_storage().unwrap();
let balance_factor = if ext_deposit > 0 {
256
@@ -116,7 +124,7 @@ pub fn new_test_ext(ext_deposit: u64, session_length: u64, sessions_per_era: u64
t.extend(timestamp::GenesisConfig::<Test>{
period: 5
}.build_storage().unwrap());
t.into()
runtime_io::TestExternalities::new(t)
}
pub type System = system::Module<Test>;
+14 -14
View File
@@ -29,7 +29,7 @@ fn note_null_offline_should_work() {
assert_eq!(Staking::offline_slash_grace(), 0);
assert_eq!(Staking::slash_count(&10), 0);
assert_eq!(Balances::free_balance(&10), 1);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
assert_eq!(Staking::slash_count(&10), 0);
assert_eq!(Balances::free_balance(&10), 1);
assert!(Staking::forcing_new_era().is_none());
@@ -43,7 +43,7 @@ fn note_offline_should_work() {
assert_eq!(Staking::offline_slash_grace(), 0);
assert_eq!(Staking::slash_count(&10), 0);
assert_eq!(Balances::free_balance(&10), 70);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(0);
assert_eq!(Staking::slash_count(&10), 1);
assert_eq!(Balances::free_balance(&10), 50);
@@ -58,11 +58,11 @@ fn note_offline_exponent_should_work() {
assert_eq!(Staking::offline_slash_grace(), 0);
assert_eq!(Staking::slash_count(&10), 0);
assert_eq!(Balances::free_balance(&10), 150);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(0);
assert_eq!(Staking::slash_count(&10), 1);
assert_eq!(Balances::free_balance(&10), 130);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(0);
assert_eq!(Staking::slash_count(&10), 2);
assert_eq!(Balances::free_balance(&10), 90);
@@ -81,14 +81,14 @@ fn note_offline_grace_should_work() {
assert_eq!(Staking::slash_count(&10), 0);
assert_eq!(Balances::free_balance(&10), 70);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(0);
assert_eq!(Staking::slash_count(&10), 1);
assert_eq!(Balances::free_balance(&10), 70);
assert_eq!(Staking::slash_count(&20), 0);
assert_eq!(Balances::free_balance(&20), 70);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(0);
Staking::on_offline_validator(1);
assert_eq!(Staking::slash_count(&10), 2);
@@ -111,13 +111,13 @@ fn note_offline_force_unstake_session_change_should_work() {
assert_eq!(Staking::intentions(), vec![10, 20, 1]);
assert_eq!(Session::validators(), vec![10, 20]);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(0);
assert_eq!(Balances::free_balance(&10), 50);
assert_eq!(Staking::slash_count(&10), 1);
assert_eq!(Staking::intentions(), vec![10, 20, 1]);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(0);
assert_eq!(Staking::intentions(), vec![1, 20]);
assert_eq!(Balances::free_balance(&10), 10);
@@ -134,7 +134,7 @@ fn note_offline_auto_unstake_session_change_should_work() {
assert_eq!(Staking::intentions(), vec![10, 20]);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(0);
Staking::on_offline_validator(1);
assert_eq!(Balances::free_balance(&10), 6980);
@@ -142,7 +142,7 @@ fn note_offline_auto_unstake_session_change_should_work() {
assert_eq!(Staking::intentions(), vec![10, 20]);
assert!(Staking::forcing_new_era().is_none());
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(0);
Staking::on_offline_validator(1);
assert_eq!(Balances::free_balance(&10), 6940);
@@ -150,13 +150,13 @@ fn note_offline_auto_unstake_session_change_should_work() {
assert_eq!(Staking::intentions(), vec![20]);
assert!(Staking::forcing_new_era().is_some());
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(1);
assert_eq!(Balances::free_balance(&10), 6940);
assert_eq!(Balances::free_balance(&20), 6860);
assert_eq!(Staking::intentions(), vec![20]);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(1);
assert_eq!(Balances::free_balance(&10), 6940);
assert_eq!(Balances::free_balance(&20), 6700);
@@ -219,7 +219,7 @@ fn slashing_should_work() {
assert_eq!(Balances::total_balance(&10), 21);
System::set_block_number(7);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(0);
Staking::on_offline_validator(1);
assert_eq!(Balances::total_balance(&10), 1);
@@ -390,7 +390,7 @@ fn nominating_slashes_should_work() {
assert_eq!(Balances::total_balance(&4), 40);
System::set_block_number(5);
::system::ExtrinsicIndex::<Test>::put(1);
System::set_extrinsic_index(1);
Staking::on_offline_validator(0);
Staking::on_offline_validator(1);
assert_eq!(Balances::total_balance(&1), 0);
+1 -1
View File
@@ -906,7 +906,7 @@ macro_rules! __decl_outer_log {
;
) => {
impl_outer_log!(
pub enum Log($log_internal: DigestItem<$( $log_genarg)* >) for $runtime {
pub enum Log($log_internal: DigestItem<$( $log_genarg ),*>) for $runtime {
$( $parsed_modules ( $( $parsed_args ),* ) ),*
}
);
+5 -5
View File
@@ -543,11 +543,11 @@ pub mod unhashed {
#[cfg(test)]
mod tests {
use super::*;
use runtime_io::{twox_128, TestExternalities, with_externalities};
use runtime_io::{twox_128, TestExternalities, RlpCodec, with_externalities};
#[test]
fn integers_can_be_stored() {
let mut t = TestExternalities::new();
let mut t = TestExternalities::<_, RlpCodec>::default();
with_externalities(&mut t, || {
let x = 69u32;
put(b":test", &x);
@@ -564,7 +564,7 @@ mod tests {
#[test]
fn bools_can_be_stored() {
let mut t = TestExternalities::new();
let mut t = TestExternalities::<_, RlpCodec>::default();
with_externalities(&mut t, || {
let x = true;
put(b":test", &x);
@@ -582,7 +582,7 @@ mod tests {
#[test]
fn vecs_can_be_retrieved() {
let mut t = TestExternalities::new();
let mut t = TestExternalities::<_, RlpCodec>::default();
with_externalities(&mut t, || {
runtime_io::set_storage(&twox_128(b":test"), b"\x0b\0\0\0Hello world");
let x = b"Hello world".to_vec();
@@ -594,7 +594,7 @@ mod tests {
#[test]
fn vecs_can_be_stored() {
let mut t = TestExternalities::new();
let mut t = TestExternalities::<_, RlpCodec>::default();
let x = b"Hello world".to_vec();
with_externalities(&mut t, || {
+107 -29
View File
@@ -19,7 +19,6 @@
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(any(feature = "std", test))]
extern crate substrate_primitives;
#[cfg_attr(any(feature = "std", test), macro_use)]
@@ -45,17 +44,23 @@ extern crate safe_mix;
use rstd::prelude::*;
use primitives::traits::{self, CheckEqual, SimpleArithmetic, SimpleBitOps, Zero, One, Bounded,
Hash, Member, MaybeDisplay, EnsureOrigin};
use runtime_support::{StorageValue, StorageMap, Parameter};
Hash, Member, MaybeDisplay, EnsureOrigin, Digest as DigestT, As};
use runtime_support::{storage, StorageValue, StorageMap, Parameter};
use safe_mix::TripletMix;
#[cfg(any(feature = "std", test))]
use rstd::marker::PhantomData;
#[cfg(any(feature = "std", test))]
use codec::Encode;
#[cfg(any(feature = "std", test))]
use runtime_io::{twox_128, TestExternalities, Blake2Hasher};
use runtime_io::{twox_128, TestExternalities, Blake2Hasher, RlpCodec};
#[cfg(any(feature = "std", test))]
use substrate_primitives::ChangesTrieConfiguration;
/// Current extrinsic index (u32) is stored under this key.
pub const EXTRINSIC_INDEX: &'static [u8] = b":extrinsic_index";
/// Changes trie configuration is stored under this key.
pub const CHANGES_TRIE_CONFIG: &'static [u8] = b":changes_trie";
/// Compute the extrinsics root of a list of extrinsics.
pub fn extrinsics_root<H: Hash, E: codec::Encode>(extrinsics: &[E]) -> H::Output {
@@ -74,7 +79,7 @@ pub trait Trait: Eq + Clone {
type BlockNumber: Parameter + Member + MaybeDisplay + SimpleArithmetic + Default + Bounded + Copy + rstd::hash::Hash;
type Hash: Parameter + Member + MaybeDisplay + SimpleBitOps + Default + Copy + CheckEqual + rstd::hash::Hash + AsRef<[u8]>;
type Hashing: Hash<Output = Self::Hash>;
type Digest: Parameter + Member + Default + traits::Digest;
type Digest: Parameter + Member + Default + traits::Digest<Hash = Self::Hash>;
type AccountId: Parameter + Member + MaybeDisplay + Ord + Default;
type Header: Parameter + traits::Header<
Number = Self::BlockNumber,
@@ -82,6 +87,7 @@ pub trait Trait: Eq + Clone {
Digest = Self::Digest
>;
type Event: Parameter + Member + From<Event>;
type Log: From<Log<Self>> + Into<DigestItemOf<Self>>;
}
pub type DigestItemOf<T> = <<T as Trait>::Digest as traits::Digest>::Item;
@@ -144,6 +150,39 @@ impl<AccountId> From<Option<AccountId>> for RawOrigin<AccountId> {
/// Exposed trait-generic origin type.
pub type Origin<T> = RawOrigin<<T as Trait>::AccountId>;
pub type Log<T> = RawLog<
<T as Trait>::Hash,
>;
/// A logs in this module.
#[cfg_attr(feature = "std", derive(Serialize, Deserialize, Debug))]
#[derive(Encode, Decode, PartialEq, Eq, Clone)]
pub enum RawLog<Hash> {
/// Changes trie has been computed for this block. Contains the root of
/// changes trie.
ChangesTrieRoot(Hash),
}
impl<Hash: Member> RawLog<Hash> {
/// Try to cast the log entry as ChangesTrieRoot log entry.
pub fn as_changes_trie_root(&self) -> Option<&Hash> {
match *self {
RawLog::ChangesTrieRoot(ref item) => Some(item),
}
}
}
// Implementation for tests outside of this crate.
#[cfg(any(feature = "std", test))]
impl From<RawLog<substrate_primitives::H256>> for primitives::testing::DigestItem {
fn from(log: RawLog<substrate_primitives::H256>) -> primitives::testing::DigestItem {
match log {
RawLog::ChangesTrieRoot(root) => primitives::generic::DigestItem::ChangesTrieRoot
::<substrate_primitives::H256, u64>(root),
}
}
}
decl_storage! {
trait Store for Module<T: Trait> as System {
@@ -151,7 +190,6 @@ decl_storage! {
ExtrinsicCount: u32;
pub BlockHash get(block_hash): required map [ T::BlockNumber => T::Hash ];
pub ExtrinsicIndex get(extrinsic_index): u32;
ExtrinsicData get(extrinsic_data): required map [ u32 => Vec<u8> ];
RandomSeed get(random_seed): required T::Hash;
/// The current block number being processed. Set by `execute_block`.
@@ -204,15 +242,20 @@ pub fn ensure_inherent<OuterOrigin, AccountId>(o: OuterOrigin) -> Result<(), &'s
}
impl<T: Trait> Module<T> {
/// Gets the index of extrinsic that is currenty executing.
pub fn extrinsic_index() -> Option<u32> {
storage::unhashed::get(EXTRINSIC_INDEX)
}
/// Start the execution of a particular block.
pub fn initialise(number: &T::BlockNumber, parent_hash: &T::Hash, txs_root: &T::Hash) {
// populate environment.
storage::unhashed::put(EXTRINSIC_INDEX, &0u32);
<Number<T>>::put(number);
<ParentHash<T>>::put(parent_hash);
<BlockHash<T>>::insert(*number - One::one(), parent_hash);
<ExtrinsicsRoot<T>>::put(txs_root);
<RandomSeed<T>>::put(Self::calculate_random());
<ExtrinsicIndex<T>>::put(0u32);
<Events<T>>::kill();
}
@@ -223,13 +266,23 @@ impl<T: Trait> Module<T> {
let number = <Number<T>>::take();
let parent_hash = <ParentHash<T>>::take();
let digest = <Digest<T>>::take();
let mut digest = <Digest<T>>::take();
let extrinsics_root = <ExtrinsicsRoot<T>>::take();
let storage_root = T::Hashing::storage_root();
let storage_changes_root = T::Hashing::storage_changes_root(number.as_());
// we can't compute changes trie root earlier && put it to the Digest
// because it will include all currently existing temporaries
if let Some(storage_changes_root) = storage_changes_root {
let item = RawLog::ChangesTrieRoot(storage_changes_root);
let item = <T as Trait>::Log::from(item).into();
digest.push(item);
}
// <Events<T>> stays to be inspected by the client.
<T::Header as traits::Header>::new(number, extrinsics_root, storage_root, parent_hash, digest)
<T::Header as traits::Header>::new(number, extrinsics_root, storage_root,
parent_hash, digest)
}
/// Deposits a log and ensures it matches the blocks log data.
@@ -241,7 +294,8 @@ impl<T: Trait> Module<T> {
/// Deposits an event onto this block's event record.
pub fn deposit_event(event: T::Event) {
let phase = <ExtrinsicIndex<T>>::get().map_or(Phase::Finalization, |c| Phase::ApplyExtrinsic(c));
let extrinsic_index = Self::extrinsic_index();
let phase = extrinsic_index.map_or(Phase::Finalization, |c| Phase::ApplyExtrinsic(c));
let mut events = Self::events();
events.push(EventRecord { phase, event });
<Events<T>>::put(events);
@@ -261,13 +315,13 @@ impl<T: Trait> Module<T> {
/// Get the basic externalities for this module, useful for tests.
#[cfg(any(feature = "std", test))]
pub fn externalities() -> TestExternalities<Blake2Hasher> {
map![
pub fn externalities() -> TestExternalities<Blake2Hasher, RlpCodec> {
TestExternalities::new(map![
twox_128(&<BlockHash<T>>::key_for(T::BlockNumber::zero())).to_vec() => [69u8; 32].encode(), // TODO: replace with Hash::default().encode
twox_128(<Number<T>>::key()).to_vec() => T::BlockNumber::one().encode(),
twox_128(<ParentHash<T>>::key()).to_vec() => [69u8; 32].encode(), // TODO: replace with Hash::default().encode
twox_128(<RandomSeed<T>>::key()).to_vec() => T::Hash::default().encode()
]
])
}
/// Set the block number to something in particular. Can be used as an alternative to
@@ -277,6 +331,12 @@ impl<T: Trait> Module<T> {
<Number<T>>::put(n);
}
/// Sets the index of extrinsic that is currenty executing.
#[cfg(any(feature = "std", test))]
pub fn set_extrinsic_index(extrinsic_index: u32) {
storage::unhashed::put(EXTRINSIC_INDEX, &extrinsic_index)
}
/// Set the parent hash number to something in particular. Can be used as an alternative to
/// `initialise` for tests that don't need to bother with the other environment entries.
#[cfg(any(feature = "std", test))]
@@ -299,7 +359,7 @@ impl<T: Trait> Module<T> {
/// Note what the extrinsic data of the current extrinsic index is. If this is called, then
/// ensure `derive_extrinsics` is also called before block-building is completed.
pub fn note_extrinsic(encoded_xt: Vec<u8>) {
<ExtrinsicData<T>>::insert(<ExtrinsicIndex<T>>::get().unwrap_or_default(), encoded_xt);
<ExtrinsicData<T>>::insert(Self::extrinsic_index().unwrap_or_default(), encoded_xt);
}
/// To be called immediately after an extrinsic has been applied.
@@ -308,14 +368,16 @@ impl<T: Trait> Module<T> {
Ok(_) => Event::ExtrinsicSuccess,
Err(_) => Event::ExtrinsicFailed,
}.into());
<ExtrinsicIndex<T>>::put(<ExtrinsicIndex<T>>::get().unwrap_or_default() + 1u32);
let next_extrinsic_index = Self::extrinsic_index().unwrap_or_default() + 1u32;
storage::unhashed::put(EXTRINSIC_INDEX, &next_extrinsic_index);
}
/// To be called immediately after `note_applied_extrinsic` of the last extrinsic of the block
/// has been called.
pub fn note_finished_extrinsics() {
<ExtrinsicCount<T>>::put(<ExtrinsicIndex<T>>::get().unwrap_or_default());
<ExtrinsicIndex<T>>::kill();
let extrinsic_index: u32 = storage::unhashed::take(EXTRINSIC_INDEX).unwrap_or_default();
<ExtrinsicCount<T>>::put(extrinsic_index);
}
/// Remove all extrinsics data and save the extrinsics trie root.
@@ -330,12 +392,20 @@ impl<T: Trait> Module<T> {
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
pub struct GenesisConfig<T: Trait>(PhantomData<T>);
pub struct GenesisConfig<T: Trait> {
/// Changes trie configuration.
pub changes_trie_config: Option<ChangesTrieConfiguration>,
/// Marker for 'storing' T.
pub _phantom: ::std::marker::PhantomData<T>,
}
#[cfg(any(feature = "std", test))]
impl<T: Trait> Default for GenesisConfig<T> {
fn default() -> Self {
GenesisConfig(PhantomData)
GenesisConfig {
changes_trie_config: Default::default(),
_phantom: Default::default(),
}
}
}
@@ -345,13 +415,22 @@ impl<T: Trait> primitives::BuildStorage for GenesisConfig<T>
fn build_storage(self) -> Result<primitives::StorageMap, String> {
use codec::Encode;
Ok(map![
let mut storage: primitives::StorageMap = map![
Self::hash(&<BlockHash<T>>::key_for(T::BlockNumber::zero())).to_vec() => [69u8; 32].encode(),
Self::hash(<Number<T>>::key()).to_vec() => 1u64.encode(),
Self::hash(<ParentHash<T>>::key()).to_vec() => [69u8; 32].encode(),
Self::hash(<RandomSeed<T>>::key()).to_vec() => [0u8; 32].encode(),
Self::hash(<ExtrinsicIndex<T>>::key()).to_vec() => [0u8; 4].encode()
])
Self::hash(<RandomSeed<T>>::key()).to_vec() => [0u8; 32].encode()
];
storage.insert(EXTRINSIC_INDEX.to_vec(), 0u32.encode());
if let Some(changes_trie_config) = self.changes_trie_config {
storage.insert(
CHANGES_TRIE_CONFIG.to_vec(),
changes_trie_config.encode());
}
Ok(storage)
}
}
@@ -362,7 +441,7 @@ mod tests {
use substrate_primitives::H256;
use primitives::BuildStorage;
use primitives::traits::BlakeTwo256;
use primitives::testing::{Digest, Header};
use primitives::testing::{Digest, DigestItem, Header};
impl_outer_origin!{
pub enum Origin for Test where system = super {}
@@ -380,6 +459,7 @@ mod tests {
type AccountId = u64;
type Header = Header;
type Event = u16;
type Log = DigestItem;
}
impl From<Event> for u16 {
@@ -393,9 +473,7 @@ mod tests {
type System = Module<Test>;
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher> {
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
GenesisConfig::<Test>::default().build_storage().unwrap().into()
}
+10 -9
View File
@@ -167,11 +167,11 @@ impl<T: Trait> runtime_primitives::BuildStorage for GenesisConfig<T>
mod tests {
use super::*;
use runtime_io::with_externalities;
use runtime_io::{with_externalities, TestExternalities, RlpCodec};
use substrate_primitives::H256;
use runtime_primitives::BuildStorage;
use runtime_primitives::traits::{BlakeTwo256};
use runtime_primitives::testing::{Digest, Header};
use runtime_primitives::testing::{Digest, DigestItem, Header};
impl_outer_origin! {
pub enum Origin for Test {}
@@ -189,10 +189,11 @@ mod tests {
type AccountId = u64;
type Header = Header;
type Event = ();
type Log = DigestItem;
}
impl consensus::Trait for Test {
const NOTE_OFFLINE_POSITION: u32 = 1;
type Log = u64;
type Log = DigestItem;
type SessionKey = u64;
type OnOfflineValidator = ();
}
@@ -206,8 +207,8 @@ mod tests {
fn timestamp_works() {
let mut t = system::GenesisConfig::<Test>::default().build_storage().unwrap();
t.extend(GenesisConfig::<Test> { period: 0 }.build_storage().unwrap());
let mut t = runtime_io::TestExternalities::from(t);
with_externalities(&mut t, || {
with_externalities(&mut TestExternalities::<_, RlpCodec>::new(t), || {
Timestamp::set_timestamp(42);
assert_ok!(Timestamp::dispatch(Call::set(69), Origin::INHERENT));
assert_eq!(Timestamp::now(), 69);
@@ -219,8 +220,8 @@ mod tests {
fn double_timestamp_should_fail() {
let mut t = system::GenesisConfig::<Test>::default().build_storage().unwrap();
t.extend(GenesisConfig::<Test> { period: 5 }.build_storage().unwrap());
let mut t = runtime_io::TestExternalities::from(t);
with_externalities(&mut t, || {
with_externalities(&mut TestExternalities::<_, RlpCodec>::new(t), || {
Timestamp::set_timestamp(42);
assert_ok!(Timestamp::dispatch(Call::set(69), Origin::INHERENT));
let _ = Timestamp::dispatch(Call::set(70), Origin::INHERENT);
@@ -232,8 +233,8 @@ mod tests {
fn block_period_is_enforced() {
let mut t = system::GenesisConfig::<Test>::default().build_storage().unwrap();
t.extend(GenesisConfig::<Test> { period: 5 }.build_storage().unwrap());
let mut t = runtime_io::TestExternalities::from(t);
with_externalities(&mut t, || {
with_externalities(&mut TestExternalities::<_, RlpCodec>::new(t), || {
Timestamp::set_timestamp(42);
let _ = Timestamp::dispatch(Call::set(46), Origin::INHERENT);
});
+4 -3
View File
@@ -331,10 +331,10 @@ mod tests {
use super::*;
use runtime_io::with_externalities;
use substrate_primitives::{H256, Blake2Hasher};
use substrate_primitives::{H256, Blake2Hasher, RlpCodec};
use runtime_primitives::BuildStorage;
use runtime_primitives::traits::{BlakeTwo256};
use runtime_primitives::testing::{Digest, Header};
use runtime_primitives::testing::{Digest, DigestItem, Header};
impl_outer_origin! {
pub enum Origin for Test {}
@@ -352,6 +352,7 @@ mod tests {
type AccountId = u64;
type Header = Header;
type Event = ();
type Log = DigestItem;
}
impl balances::Trait for Test {
type Balance = u64;
@@ -368,7 +369,7 @@ mod tests {
type Balances = balances::Module<Test>;
type Treasury = Module<Test>;
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher> {
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher, RlpCodec> {
let mut t = system::GenesisConfig::<Test>::default().build_storage().unwrap();
t.extend(balances::GenesisConfig::<Test>{
balances: vec![(0, 100), (1, 99), (2, 1)],