mirror of
https://github.com/pezkuwichain/pezkuwi-subxt.git
synced 2026-05-01 12:27:56 +00:00
Move non runtime related stuff into node/primitives (#2743)
* Remove stuff out of the runtime that does not belong there. There might be more, but it is a start. * White space fixes. * Fix tests. * Leave whitespace in ui tests alone. * Add back zstd for no reason. * Fix browser wasm (hopefully)
This commit is contained in:
@@ -22,20 +22,21 @@
|
||||
|
||||
#![deny(missing_docs)]
|
||||
|
||||
use std::pin::Pin;
|
||||
|
||||
use serde::{Serialize, Deserialize};
|
||||
use futures::Future;
|
||||
use parity_scale_codec::{Decode, Encode};
|
||||
use polkadot_primitives::v1::{
|
||||
CandidateCommitments, CandidateHash, CollatorPair, CommittedCandidateReceipt, CompactStatement,
|
||||
EncodeAs, Hash, HeadData, Id as ParaId, OutboundHrmpMessage, PersistedValidationData, PoV,
|
||||
Signed, UpwardMessage, ValidationCode,
|
||||
};
|
||||
use std::pin::Pin;
|
||||
|
||||
pub use sp_core::traits::SpawnNamed;
|
||||
pub use sp_consensus_babe::{
|
||||
Epoch as BabeEpoch, BabeEpochConfiguration, AllowedSlots as BabeAllowedSlots,
|
||||
};
|
||||
|
||||
use polkadot_primitives::v1::{CandidateCommitments, CandidateHash, CollatorPair, CommittedCandidateReceipt, CompactStatement, EncodeAs, Hash, HeadData, Id as ParaId, OutboundHrmpMessage, PersistedValidationData, Signed, UpwardMessage, ValidationCode, BlakeTwo256, HashT, ValidatorIndex};
|
||||
pub use polkadot_parachain::primitives::BlockData;
|
||||
|
||||
|
||||
pub mod approval;
|
||||
|
||||
/// A statement, where the candidate receipt is included in the `Seconded` variant.
|
||||
@@ -140,6 +141,102 @@ pub enum ValidationResult {
|
||||
Invalid(InvalidCandidate),
|
||||
}
|
||||
|
||||
/// Maximum PoV size we support right now.
|
||||
pub const MAX_POV_SIZE: u32 = 50 * 1024 * 1024;
|
||||
|
||||
/// Very conservative (compression ratio of 1).
|
||||
///
|
||||
/// Experiments showed that we have a typical compression ratio of 3.4.
|
||||
/// https://github.com/ordian/bench-compression-algorithms/
|
||||
///
|
||||
/// So this could be reduced if deemed necessary.
|
||||
pub const MAX_COMPRESSED_POV_SIZE: u32 = MAX_POV_SIZE;
|
||||
|
||||
/// A Proof-of-Validity
|
||||
#[derive(PartialEq, Eq, Clone, Encode, Decode, Debug)]
|
||||
pub struct PoV {
|
||||
/// The block witness data.
|
||||
pub block_data: BlockData,
|
||||
}
|
||||
|
||||
impl PoV {
|
||||
/// Get the blake2-256 hash of the PoV.
|
||||
pub fn hash(&self) -> Hash {
|
||||
BlakeTwo256::hash_of(self)
|
||||
}
|
||||
}
|
||||
|
||||
/// SCALE and Zstd encoded [`PoV`].
|
||||
#[derive(Clone, Encode, Decode, PartialEq, Eq)]
|
||||
pub struct CompressedPoV(Vec<u8>);
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, thiserror::Error)]
|
||||
#[allow(missing_docs)]
|
||||
pub enum CompressedPoVError {
|
||||
#[error("Failed to compress a PoV")]
|
||||
Compress,
|
||||
#[error("Failed to decompress a PoV")]
|
||||
Decompress,
|
||||
#[error("Failed to decode the uncompressed PoV")]
|
||||
Decode,
|
||||
#[error("Architecture is not supported")]
|
||||
NotSupported,
|
||||
}
|
||||
|
||||
impl CompressedPoV {
|
||||
/// Compress the given [`PoV`] and returns a [`CompressedPoV`].
|
||||
#[cfg(not(target_os = "unknown"))]
|
||||
pub fn compress(pov: &PoV) -> Result<Self, CompressedPoVError> {
|
||||
zstd::encode_all(pov.encode().as_slice(), 3).map_err(|_| CompressedPoVError::Compress).map(Self)
|
||||
}
|
||||
|
||||
/// Compress the given [`PoV`] and returns a [`CompressedPoV`].
|
||||
#[cfg(target_os = "unknown")]
|
||||
pub fn compress(_: &PoV) -> Result<Self, CompressedPoVError> {
|
||||
Err(CompressedPoVError::NotSupported)
|
||||
}
|
||||
|
||||
/// Decompress `self` and returns the [`PoV`] on success.
|
||||
#[cfg(not(target_os = "unknown"))]
|
||||
pub fn decompress(&self) -> Result<PoV, CompressedPoVError> {
|
||||
use std::io::Read;
|
||||
|
||||
struct InputDecoder<'a, T: std::io::BufRead>(&'a mut zstd::Decoder<T>, usize);
|
||||
impl<'a, T: std::io::BufRead> parity_scale_codec::Input for InputDecoder<'a, T> {
|
||||
fn read(&mut self, into: &mut [u8]) -> Result<(), parity_scale_codec::Error> {
|
||||
self.1 = self.1.saturating_add(into.len());
|
||||
if self.1 > MAX_POV_SIZE as usize {
|
||||
return Err("pov block too big".into())
|
||||
}
|
||||
self.0.read_exact(into).map_err(Into::into)
|
||||
}
|
||||
fn remaining_len(&mut self) -> Result<Option<usize>, parity_scale_codec::Error> {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
let mut decoder = zstd::Decoder::new(self.0.as_slice()).map_err(|_| CompressedPoVError::Decompress)?;
|
||||
PoV::decode(&mut InputDecoder(&mut decoder, 0)).map_err(|_| CompressedPoVError::Decode)
|
||||
}
|
||||
|
||||
/// Decompress `self` and returns the [`PoV`] on success.
|
||||
#[cfg(target_os = "unknown")]
|
||||
pub fn decompress(&self) -> Result<PoV, CompressedPoVError> {
|
||||
Err(CompressedPoVError::NotSupported)
|
||||
}
|
||||
|
||||
/// Get compressed data size.
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for CompressedPoV {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "CompressedPoV({} bytes)", self.0.len())
|
||||
}
|
||||
}
|
||||
|
||||
/// The output of a collator.
|
||||
///
|
||||
/// This differs from `CandidateCommitments` in two ways:
|
||||
@@ -210,3 +307,36 @@ impl std::fmt::Debug for CollationGenerationConfig {
|
||||
write!(f, "CollationGenerationConfig {{ ... }}")
|
||||
}
|
||||
}
|
||||
|
||||
/// This is the data we keep available for each candidate included in the relay chain.
|
||||
#[derive(Clone, Encode, Decode, PartialEq, Eq, Debug)]
|
||||
pub struct AvailableData {
|
||||
/// The Proof-of-Validation of the candidate.
|
||||
pub pov: std::sync::Arc<PoV>,
|
||||
/// The persisted validation data needed for secondary checks.
|
||||
pub validation_data: PersistedValidationData,
|
||||
}
|
||||
|
||||
/// A chunk of erasure-encoded block data.
|
||||
#[derive(PartialEq, Eq, Clone, Encode, Decode, Serialize, Deserialize, Debug, Hash)]
|
||||
pub struct ErasureChunk {
|
||||
/// The erasure-encoded chunk of data belonging to the candidate block.
|
||||
pub chunk: Vec<u8>,
|
||||
/// The index of this erasure-encoded chunk of data.
|
||||
pub index: ValidatorIndex,
|
||||
/// Proof for this chunk's branch in the Merkle tree.
|
||||
pub proof: Vec<Vec<u8>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::{CompressedPoV, CompressedPoVError, PoV};
|
||||
|
||||
#[test]
|
||||
fn decompress_huge_pov_block_fails() {
|
||||
let pov = PoV { block_data: vec![0; 63 * 1024 * 1024].into() };
|
||||
|
||||
let compressed = CompressedPoV::compress(&pov).unwrap();
|
||||
assert_eq!(CompressedPoVError::Decode, compressed.decompress().unwrap_err());
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user