Aura improvements (#8881)

* Aura: Expose function to build the verifier

* Use best block to initialize the authorities cache

* Use best block when determining the slot duration

* Remove `AuraBlockImport`

* Some cleanups

* Fix build error
This commit is contained in:
Bastian Köcher
2021-05-22 23:01:11 +02:00
committed by GitHub
parent 00328dca24
commit 4dc8f3a7e5
6 changed files with 99 additions and 152 deletions
@@ -30,12 +30,7 @@ pub fn new_partial(config: &Configuration) -> Result<sc_service::PartialComponen
sp_consensus::DefaultImportQueue<Block, FullClient>,
sc_transaction_pool::FullPool<Block, FullClient>,
(
sc_consensus_aura::AuraBlockImport<
Block,
FullClient,
sc_finality_grandpa::GrandpaBlockImport<FullBackend, Block, FullClient, FullSelectChain>,
AuraPair
>,
sc_finality_grandpa::GrandpaBlockImport<FullBackend, Block, FullClient, FullSelectChain>,
sc_finality_grandpa::LinkHalf<Block, FullClient, FullSelectChain>,
Option<Telemetry>,
)
@@ -84,15 +79,11 @@ pub fn new_partial(config: &Configuration) -> Result<sc_service::PartialComponen
telemetry.as_ref().map(|x| x.handle()),
)?;
let aura_block_import = sc_consensus_aura::AuraBlockImport::<_, _, _, AuraPair>::new(
grandpa_block_import.clone(), client.clone(),
);
let slot_duration = sc_consensus_aura::slot_duration(&*client)?.slot_duration();
let import_queue = sc_consensus_aura::import_queue::<AuraPair, _, _, _, _, _, _>(
ImportQueueParams {
block_import: aura_block_import.clone(),
block_import: grandpa_block_import.clone(),
justification_import: Some(Box::new(grandpa_block_import.clone())),
client: client.clone(),
create_inherent_data_providers: move |_, ()| async move {
@@ -122,7 +113,7 @@ pub fn new_partial(config: &Configuration) -> Result<sc_service::PartialComponen
keystore_container,
select_chain,
transaction_pool,
other: (aura_block_import, grandpa_link, telemetry),
other: (grandpa_block_import, grandpa_link, telemetry),
})
}
@@ -353,16 +344,11 @@ pub fn new_light(mut config: Configuration) -> Result<TaskManager, ServiceError>
telemetry.as_ref().map(|x| x.handle()),
)?;
let aura_block_import = sc_consensus_aura::AuraBlockImport::<_, _, _, AuraPair>::new(
grandpa_block_import.clone(),
client.clone(),
);
let slot_duration = sc_consensus_aura::slot_duration(&*client)?.slot_duration();
let import_queue = sc_consensus_aura::import_queue::<AuraPair, _, _, _, _, _, _>(
ImportQueueParams {
block_import: aura_block_import.clone(),
block_import: grandpa_block_import.clone(),
justification_import: Some(Box::new(grandpa_block_import.clone())),
client: client.clone(),
create_inherent_data_providers: move |_, ()| async move {
@@ -20,23 +20,23 @@
use crate::{AuthorityId, find_pre_digest, slot_author, aura_err, Error, authorities};
use std::{
sync::Arc, marker::PhantomData, hash::Hash, fmt::Debug, collections::HashMap,
sync::Arc, marker::PhantomData, hash::Hash, fmt::Debug,
};
use log::{debug, info, trace};
use prometheus_endpoint::Registry;
use codec::{Encode, Decode, Codec};
use sp_consensus::{
BlockImport, CanAuthorWith, ForkChoiceStrategy, BlockImportParams,
BlockOrigin, Error as ConsensusError, BlockCheckParams, ImportResult,
BlockOrigin, Error as ConsensusError,
import_queue::{
Verifier, BasicQueue, DefaultImportQueue, BoxJustificationImport,
},
};
use sc_client_api::{backend::AuxStore, BlockOf};
use sc_client_api::{BlockOf, UsageProvider, backend::AuxStore};
use sp_blockchain::{well_known_cache_keys::{self, Id as CacheKeyId}, ProvideCache, HeaderBackend};
use sp_block_builder::BlockBuilder as BlockBuilderApi;
use sp_runtime::{generic::{BlockId, OpaqueDigestItemId}, Justifications};
use sp_runtime::traits::{Block as BlockT, Header, DigestItemFor, Zero};
use sp_runtime::traits::{Block as BlockT, Header, DigestItemFor};
use sp_api::ProvideRuntimeApi;
use sp_core::crypto::Pair;
use sp_inherents::{CreateInherentDataProviders, InherentDataProvider as _};
@@ -113,19 +113,19 @@ fn check_header<C, B: BlockT, P: Pair>(
}
/// A verifier for Aura blocks.
pub struct AuraVerifier<C, P, CAW, IDP> {
pub struct AuraVerifier<C, P, CAW, CIDP> {
client: Arc<C>,
phantom: PhantomData<P>,
create_inherent_data_providers: IDP,
create_inherent_data_providers: CIDP,
can_author_with: CAW,
check_for_equivocation: CheckForEquivocation,
telemetry: Option<TelemetryHandle>,
}
impl<C, P, CAW, IDP> AuraVerifier<C, P, CAW, IDP> {
impl<C, P, CAW, CIDP> AuraVerifier<C, P, CAW, CIDP> {
pub(crate) fn new(
client: Arc<C>,
create_inherent_data_providers: IDP,
create_inherent_data_providers: CIDP,
can_author_with: CAW,
check_for_equivocation: CheckForEquivocation,
telemetry: Option<TelemetryHandle>,
@@ -141,21 +141,21 @@ impl<C, P, CAW, IDP> AuraVerifier<C, P, CAW, IDP> {
}
}
impl<C, P, CAW, IDP> AuraVerifier<C, P, CAW, IDP> where
impl<C, P, CAW, CIDP> AuraVerifier<C, P, CAW, CIDP> where
P: Send + Sync + 'static,
CAW: Send + Sync + 'static,
IDP: Send,
CIDP: Send,
{
async fn check_inherents<B: BlockT>(
&self,
block: B,
block_id: BlockId<B>,
inherent_data: sp_inherents::InherentData,
create_inherent_data_providers: IDP::InherentDataProviders,
create_inherent_data_providers: CIDP::InherentDataProviders,
) -> Result<(), Error<B>> where
C: ProvideRuntimeApi<B>, C::Api: BlockBuilderApi<B>,
CAW: CanAuthorWith<B>,
IDP: CreateInherentDataProviders<B, ()>,
CIDP: CreateInherentDataProviders<B, ()>,
{
if let Err(e) = self.can_author_with.can_author_with(&block_id) {
debug!(
@@ -187,7 +187,7 @@ impl<C, P, CAW, IDP> AuraVerifier<C, P, CAW, IDP> where
}
#[async_trait::async_trait]
impl<B: BlockT, C, P, CAW, IDP> Verifier<B> for AuraVerifier<C, P, CAW, IDP> where
impl<B: BlockT, C, P, CAW, CIDP> Verifier<B> for AuraVerifier<C, P, CAW, CIDP> where
C: ProvideRuntimeApi<B> +
Send +
Sync +
@@ -200,8 +200,8 @@ impl<B: BlockT, C, P, CAW, IDP> Verifier<B> for AuraVerifier<C, P, CAW, IDP> whe
P::Public: Send + Sync + Hash + Eq + Clone + Decode + Encode + Debug + 'static,
P::Signature: Encode + Decode,
CAW: CanAuthorWith<B> + Send + Sync + 'static,
IDP: CreateInherentDataProviders<B, ()> + Send + Sync,
IDP::InherentDataProviders: InherentDataProviderExt + Send + Sync,
CIDP: CreateInherentDataProviders<B, ()> + Send + Sync,
CIDP::InherentDataProviders: InherentDataProviderExt + Send + Sync,
{
async fn verify(
&mut self,
@@ -320,7 +320,7 @@ impl<B: BlockT, C, P, CAW, IDP> Verifier<B> for AuraVerifier<C, P, CAW, IDP> whe
fn initialize_authorities_cache<A, B, C>(client: &C) -> Result<(), ConsensusError> where
A: Codec + Debug,
B: BlockT,
C: ProvideRuntimeApi<B> + BlockOf + ProvideCache<B>,
C: ProvideRuntimeApi<B> + BlockOf + ProvideCache<B> + UsageProvider<B>,
C::Api: AuraApi<B, A>,
{
// no cache => no initialization
@@ -329,6 +329,8 @@ fn initialize_authorities_cache<A, B, C>(client: &C) -> Result<(), ConsensusErro
None => return Ok(()),
};
let best_hash = client.usage_info().chain.best_hash;
// check if we already have initialized the cache
let map_err = |error| sp_consensus::Error::from(sp_consensus::Error::ClientImport(
format!(
@@ -336,107 +338,22 @@ fn initialize_authorities_cache<A, B, C>(client: &C) -> Result<(), ConsensusErro
error,
)));
let genesis_id = BlockId::Number(Zero::zero());
let genesis_authorities: Option<Vec<A>> = cache
.get_at(&well_known_cache_keys::AUTHORITIES, &genesis_id)
let block_id = BlockId::hash(best_hash);
let authorities: Option<Vec<A>> = cache
.get_at(&well_known_cache_keys::AUTHORITIES, &block_id)
.unwrap_or(None)
.and_then(|(_, _, v)| Decode::decode(&mut &v[..]).ok());
if genesis_authorities.is_some() {
if authorities.is_some() {
return Ok(());
}
let genesis_authorities = authorities(client, &genesis_id)?;
cache.initialize(&well_known_cache_keys::AUTHORITIES, genesis_authorities.encode())
let authorities = crate::authorities(client, &block_id)?;
cache.initialize(&well_known_cache_keys::AUTHORITIES, authorities.encode())
.map_err(map_err)?;
Ok(())
}
/// A block-import handler for Aura.
pub struct AuraBlockImport<Block: BlockT, C, I: BlockImport<Block>, P> {
inner: I,
client: Arc<C>,
_phantom: PhantomData<(Block, P)>,
}
impl<Block: BlockT, C, I: Clone + BlockImport<Block>, P> Clone for AuraBlockImport<Block, C, I, P> {
fn clone(&self) -> Self {
AuraBlockImport {
inner: self.inner.clone(),
client: self.client.clone(),
_phantom: PhantomData,
}
}
}
impl<Block: BlockT, C, I: BlockImport<Block>, P> AuraBlockImport<Block, C, I, P> {
/// New aura block import.
pub fn new(
inner: I,
client: Arc<C>,
) -> Self {
Self {
inner,
client,
_phantom: PhantomData,
}
}
}
#[async_trait::async_trait]
impl<Block: BlockT, C, I, P> BlockImport<Block> for AuraBlockImport<Block, C, I, P> where
I: BlockImport<Block, Transaction = sp_api::TransactionFor<C, Block>> + Send + Sync,
I::Error: Into<ConsensusError>,
C: HeaderBackend<Block> + ProvideRuntimeApi<Block>,
P: Pair + Send + Sync + 'static,
P::Public: Clone + Eq + Send + Sync + Hash + Debug + Encode + Decode,
P::Signature: Encode + Decode,
sp_api::TransactionFor<C, Block>: Send + 'static,
{
type Error = ConsensusError;
type Transaction = sp_api::TransactionFor<C, Block>;
async fn check_block(
&mut self,
block: BlockCheckParams<Block>,
) -> Result<ImportResult, Self::Error> {
self.inner.check_block(block).await.map_err(Into::into)
}
async fn import_block(
&mut self,
block: BlockImportParams<Block, Self::Transaction>,
new_cache: HashMap<CacheKeyId, Vec<u8>>,
) -> Result<ImportResult, Self::Error> {
let hash = block.post_hash();
let slot = find_pre_digest::<Block, P::Signature>(&block.header)
.expect("valid Aura headers must contain a predigest; \
header has been already verified; qed");
let parent_hash = *block.header.parent_hash();
let parent_header = self.client.header(BlockId::Hash(parent_hash))
.map_err(|e| ConsensusError::ChainLookup(e.to_string()))?
.ok_or_else(|| ConsensusError::ChainLookup(aura_err(
Error::<Block>::ParentUnavailable(parent_hash, hash)
).into()))?;
let parent_slot = find_pre_digest::<Block, P::Signature>(&parent_header)
.expect("valid Aura headers contain a pre-digest; \
parent header has already been verified; qed");
// make sure that slot number is strictly increasing
if slot <= parent_slot {
return Err(
ConsensusError::ClientImport(aura_err(
Error::<Block>::SlotMustIncrease(parent_slot, slot)
).into())
);
}
self.inner.import_block(block, new_cache).await.map_err(Into::into)
}
}
/// Should we check for equivocation of a block author?
#[derive(Debug, Clone, Copy)]
pub enum CheckForEquivocation {
@@ -506,6 +423,7 @@ pub fn import_queue<'a, P, Block, I, C, S, CAW, CIDP>(
+ Send
+ Sync
+ AuxStore
+ UsageProvider<Block>
+ HeaderBackend<Block>,
I: BlockImport<Block, Error=ConsensusError, Transaction = sp_api::TransactionFor<C, Block>>
+ Send
@@ -522,12 +440,14 @@ pub fn import_queue<'a, P, Block, I, C, S, CAW, CIDP>(
{
initialize_authorities_cache(&*client)?;
let verifier = AuraVerifier::<_, P, _, _>::new(
client,
create_inherent_data_providers,
can_author_with,
check_for_equivocation,
telemetry,
let verifier = build_verifier::<P, _, _, _>(
BuildVerifierParams {
client,
create_inherent_data_providers,
can_author_with,
check_for_equivocation,
telemetry,
},
);
Ok(BasicQueue::new(
@@ -538,3 +458,36 @@ pub fn import_queue<'a, P, Block, I, C, S, CAW, CIDP>(
registry,
))
}
/// Parameters of [`build_verifier`].
pub struct BuildVerifierParams<C, CIDP, CAW> {
/// The client to interact with the chain.
pub client: Arc<C>,
/// Something that can create the inherent data providers.
pub create_inherent_data_providers: CIDP,
/// Can we author with the current node?
pub can_author_with: CAW,
/// Should we check for equivocation?
pub check_for_equivocation: CheckForEquivocation,
/// Telemetry instance used to report telemetry metrics.
pub telemetry: Option<TelemetryHandle>,
}
/// Build the [`AuraVerifier`]
pub fn build_verifier<P, C, CIDP, CAW>(
BuildVerifierParams {
client,
create_inherent_data_providers,
can_author_with,
check_for_equivocation,
telemetry,
}: BuildVerifierParams<C, CIDP, CAW>
) -> AuraVerifier<C, P, CAW, CIDP> {
AuraVerifier::<_, P, _, _>::new(
client,
create_inherent_data_providers,
can_author_with,
check_for_equivocation,
telemetry,
)
}
+6 -7
View File
@@ -44,7 +44,7 @@ use sp_consensus::{
BlockImport, Environment, Proposer, CanAuthorWith, ForkChoiceStrategy, BlockImportParams,
BlockOrigin, Error as ConsensusError, SelectChain,
};
use sc_client_api::{backend::AuxStore, BlockOf};
use sc_client_api::{backend::AuxStore, BlockOf, UsageProvider};
use sp_blockchain::{Result as CResult, well_known_cache_keys, ProvideCache, HeaderBackend};
use sp_core::crypto::Public;
use sp_application_crypto::{AppKey, AppPublic};
@@ -70,7 +70,10 @@ pub use sp_consensus_aura::{
},
};
pub use sp_consensus::SyncOracle;
pub use import_queue::{ImportQueueParams, import_queue, AuraBlockImport, CheckForEquivocation};
pub use import_queue::{
ImportQueueParams, import_queue, CheckForEquivocation,
build_verifier, BuildVerifierParams, AuraVerifier,
};
pub use sc_consensus_slots::SlotProportion;
type AuthorityId<P> = <P as Pair>::Public;
@@ -82,7 +85,7 @@ pub type SlotDuration = sc_consensus_slots::SlotDuration<sp_consensus_aura::Slot
pub fn slot_duration<A, B, C>(client: &C) -> CResult<SlotDuration> where
A: Codec,
B: BlockT,
C: AuxStore + ProvideRuntimeApi<B>,
C: AuxStore + ProvideRuntimeApi<B> + UsageProvider<B>,
C::Api: AuraApi<B, A>,
{
SlotDuration::get_or_compute(client, |a, b| a.slot_duration(b).map_err(Into::into))
@@ -491,10 +494,6 @@ enum Error<B: BlockT> {
#[display(fmt = "Bad signature on {:?}", _0)]
BadSignature(B::Hash),
Client(sp_blockchain::Error),
#[display(fmt = "Slot number must increase: parent slot: {}, this slot: {}", _0, _1)]
SlotMustIncrease(Slot, Slot),
#[display(fmt = "Parent ({}) of {} unavailable. Cannot import", _0, _1)]
ParentUnavailable(B::Hash, B::Hash),
#[display(fmt = "Unknown inherent error for identifier: {}", "String::from_utf8_lossy(_0)")]
UnknownInherentError(sp_inherents::InherentIdentifier),
#[display(fmt = "Inherent error: {}", _0)]
+2 -2
View File
@@ -98,7 +98,7 @@ use sp_consensus::{
};
use sp_consensus_babe::inherents::BabeInherentData;
use sc_client_api::{
backend::AuxStore, BlockchainEvents, ProvideUncles,
backend::AuxStore, BlockchainEvents, ProvideUncles, UsageProvider
};
use sp_block_builder::BlockBuilder as BlockBuilderApi;
use futures::channel::mpsc::{channel, Sender, Receiver};
@@ -317,7 +317,7 @@ impl Config {
/// Either fetch the slot duration from disk or compute it from the genesis
/// state.
pub fn get_or_compute<B: BlockT, C>(client: &C) -> ClientResult<Self> where
C: AuxStore + ProvideRuntimeApi<B>, C::Api: BabeApi<B>,
C: AuxStore + ProvideRuntimeApi<B> + UsageProvider<B>, C::Api: BabeApi<B>,
{
trace!(target: "babe", "Getting slot duration");
match sc_consensus_slots::SlotDuration::get_or_compute(client, |a, b| {
@@ -22,7 +22,7 @@ use super::ConsensusDataProvider;
use crate::Error;
use codec::Encode;
use std::{borrow::Cow, sync::{Arc, atomic}, time::SystemTime};
use sc_client_api::AuxStore;
use sc_client_api::{AuxStore, UsageProvider};
use sc_consensus_babe::{
Config, Epoch, authorship, CompatibleDigestItem, BabeIntermediate, INTERMEDIATE_KEY,
find_pre_digest,
@@ -67,7 +67,11 @@ pub struct BabeConsensusDataProvider<B: BlockT, C> {
impl<B, C> BabeConsensusDataProvider<B, C>
where
B: BlockT,
C: AuxStore + HeaderBackend<B> + ProvideRuntimeApi<B> + HeaderMetadata<B, Error = sp_blockchain::Error>,
C: AuxStore
+ HeaderBackend<B>
+ ProvideRuntimeApi<B>
+ HeaderMetadata<B, Error = sp_blockchain::Error>
+ UsageProvider<B>,
C::Api: BabeApi<B>,
{
pub fn new(
@@ -120,7 +124,11 @@ impl<B, C> BabeConsensusDataProvider<B, C>
impl<B, C> ConsensusDataProvider<B> for BabeConsensusDataProvider<B, C>
where
B: BlockT,
C: AuxStore + HeaderBackend<B> + HeaderMetadata<B, Error = sp_blockchain::Error> + ProvideRuntimeApi<B>,
C: AuxStore
+ HeaderBackend<B>
+ HeaderMetadata<B, Error = sp_blockchain::Error>
+ UsageProvider<B>
+ ProvideRuntimeApi<B>,
C::Api: BabeApi<B>,
{
type Transaction = TransactionFor<C, B>;
@@ -252,7 +260,7 @@ impl SlotTimestampProvider {
pub fn new<B, C>(client: Arc<C>) -> Result<Self, Error>
where
B: BlockT,
C: AuxStore + HeaderBackend<B> + ProvideRuntimeApi<B>,
C: AuxStore + HeaderBackend<B> + ProvideRuntimeApi<B> + UsageProvider<B>,
C::Api: BabeApi<B>,
{
let slot_duration = Config::get_or_compute(&*client)?.slot_duration;
+9 -8
View File
@@ -584,7 +584,7 @@ impl<T: Clone + Send + Sync + 'static> SlotDuration<T> {
/// `slot_key` is marked as `'static`, as it should really be a
/// compile-time constant.
pub fn get_or_compute<B: BlockT, C, CB>(client: &C, cb: CB) -> sp_blockchain::Result<Self> where
C: sc_client_api::backend::AuxStore,
C: sc_client_api::backend::AuxStore + sc_client_api::UsageProvider<B>,
C: ProvideRuntimeApi<B>,
CB: FnOnce(ApiRef<C::Api>, &BlockId<B>) -> sp_blockchain::Result<T>,
T: SlotData + Encode + Decode + Debug,
@@ -599,19 +599,20 @@ impl<T: Clone + Send + Sync + 'static> SlotDuration<T> {
})
}),
None => {
use sp_runtime::traits::Zero;
let genesis_slot_duration =
cb(client.runtime_api(), &BlockId::number(Zero::zero()))?;
let best_hash = client.usage_info().chain.best_hash;
let slot_duration =
cb(client.runtime_api(), &BlockId::hash(best_hash))?;
info!(
"⏱ Loaded block-time = {:?} from genesis on first-launch",
genesis_slot_duration.slot_duration()
"⏱ Loaded block-time = {:?} from block {:?}",
slot_duration.slot_duration(),
best_hash,
);
genesis_slot_duration
slot_duration
.using_encoded(|s| client.insert_aux(&[(T::SLOT_KEY, &s[..])], &[]))?;
Ok(SlotDuration(genesis_slot_duration))
Ok(SlotDuration(slot_duration))
}
}?;