diff --git a/polkadot/erasure-coding/erasure_coding_fuzzer/src/reconstruct.rs b/polkadot/erasure-coding/erasure_coding_fuzzer/src/reconstruct.rs deleted file mode 100644 index 6c8d41306b..0000000000 --- a/polkadot/erasure-coding/erasure_coding_fuzzer/src/reconstruct.rs +++ /dev/null @@ -1,20 +0,0 @@ -use polkadot_erasure_coding::*; -use primitives::v1::AvailableData; -use std::sync::Arc; -use honggfuzz::fuzz; - -fn main(){ - loop { - fuzz!(|data: (usize, Vec<(Vec, usize)>)| { - let (num_validators, chunk_input) = data; - if num_validators <= 1 || num_validators > 10_000 { - return; - } - let reconstructed: Result = reconstruct_v1( - num_validators, - chunk_input.iter().map(|t| (&*t.0, t.1)).collect::>() - ); - println!("reconstructed {:?}", reconstructed); - }); - } -} diff --git a/polkadot/erasure-coding/erasure_coding_fuzzer/src/round_trip.rs b/polkadot/erasure-coding/erasure_coding_fuzzer/src/round_trip.rs deleted file mode 100644 index 39766c7fbc..0000000000 --- a/polkadot/erasure-coding/erasure_coding_fuzzer/src/round_trip.rs +++ /dev/null @@ -1,40 +0,0 @@ -use polkadot_erasure_coding::*; -use primitives::v1::{AvailableData, BlockData, PoV}; -use std::sync::Arc; -use honggfuzz::fuzz; - - -fn main(){ - loop { - fuzz!(|data: &[u8]| { - let pov_block = PoV { - block_data: BlockData(data.iter().cloned().collect()), - }; - - let available_data = AvailableData { - pov: Arc::new(pov_block), - validation_data: Default::default(), - }; - let chunks = obtain_chunks_v1( - 10, - &available_data, - ).unwrap(); - - assert_eq!(chunks.len(), 10); - - // any 4 chunks should work. - let reconstructed: AvailableData = reconstruct_v1( - 10, - [ - (&*chunks[1], 1), - (&*chunks[4], 4), - (&*chunks[6], 6), - (&*chunks[9], 9), - ].iter().cloned(), - ).unwrap(); - - assert_eq!(reconstructed, available_data); - println!("{:?}", reconstructed); - }); - } -} diff --git a/polkadot/erasure-coding/erasure_coding_fuzzer/Cargo.lock b/polkadot/erasure-coding/fuzzer/Cargo.lock similarity index 100% rename from polkadot/erasure-coding/erasure_coding_fuzzer/Cargo.lock rename to polkadot/erasure-coding/fuzzer/Cargo.lock diff --git a/polkadot/erasure-coding/erasure_coding_fuzzer/Cargo.toml b/polkadot/erasure-coding/fuzzer/Cargo.toml similarity index 87% rename from polkadot/erasure-coding/erasure_coding_fuzzer/Cargo.toml rename to polkadot/erasure-coding/fuzzer/Cargo.toml index 5117be38c1..e844e9ad94 100644 --- a/polkadot/erasure-coding/erasure_coding_fuzzer/Cargo.toml +++ b/polkadot/erasure-coding/fuzzer/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "erasure_coding_fuzzer" version = "0.1.0" -authors = ["Vincent Ulitzsch "] +authors = ["Parity Technologies "] edition = "2018" [dependencies] diff --git a/polkadot/erasure-coding/fuzzer/src/reconstruct.rs b/polkadot/erasure-coding/fuzzer/src/reconstruct.rs new file mode 100644 index 0000000000..694953e58d --- /dev/null +++ b/polkadot/erasure-coding/fuzzer/src/reconstruct.rs @@ -0,0 +1,16 @@ +use polkadot_erasure_coding::*; +use primitives::v1::AvailableData; +use honggfuzz::fuzz; + +fn main() { + loop { + fuzz!(|data: (usize, Vec<(Vec, usize)>)| { + let (num_validators, chunk_input) = data; + let reconstructed: Result = reconstruct_v1( + num_validators, + chunk_input.iter().map(|t| (&*t.0, t.1)).collect::>() + ); + println!("reconstructed {:?}", reconstructed); + }); + } +} diff --git a/polkadot/erasure-coding/fuzzer/src/round_trip.rs b/polkadot/erasure-coding/fuzzer/src/round_trip.rs new file mode 100644 index 0000000000..141e86073b --- /dev/null +++ b/polkadot/erasure-coding/fuzzer/src/round_trip.rs @@ -0,0 +1,40 @@ +use polkadot_erasure_coding::*; +use primitives::v1::{AvailableData, BlockData, PoV}; +use std::sync::Arc; +use honggfuzz::fuzz; + + +fn main() { + loop { + fuzz!(|data: &[u8]| { + let pov_block = PoV { + block_data: BlockData(data.iter().cloned().collect()), + }; + + let available_data = AvailableData { + pov: Arc::new(pov_block), + validation_data: Default::default(), + }; + let chunks = obtain_chunks_v1( + 10, + &available_data, + ).unwrap(); + + assert_eq!(chunks.len(), 10); + + // any 4 chunks should work. + let reconstructed: AvailableData = reconstruct_v1( + 10, + [ + (&*chunks[1], 1), + (&*chunks[4], 4), + (&*chunks[6], 6), + (&*chunks[9], 9), + ].iter().cloned(), + ).unwrap(); + + assert_eq!(reconstructed, available_data); + println!("{:?}", reconstructed); + }); + } +} diff --git a/polkadot/erasure-coding/src/lib.rs b/polkadot/erasure-coding/src/lib.rs index 199a7436a5..370c228e34 100644 --- a/polkadot/erasure-coding/src/lib.rs +++ b/polkadot/erasure-coding/src/lib.rs @@ -45,9 +45,9 @@ pub enum Error { /// Returned when there are too many validators. #[error("There are too many validators")] TooManyValidators, - /// Cannot encode something for no validators - #[error("Validator set is empty")] - EmptyValidators, + /// Cannot encode something for zero or one validator + #[error("Expected at least 2 validators")] + NotEnoughValidators, /// Cannot reconstruct: wrong number of validators. #[error("Validator count mismatches between encoding and decoding")] WrongValidatorCount, @@ -122,7 +122,7 @@ impl CodeParams { fn code_params(n_validators: usize) -> Result { if n_validators > MAX_VALIDATORS { return Err(Error::TooManyValidators) } - if n_validators == 0 { return Err(Error::EmptyValidators) } + if n_validators <= 1 { return Err(Error::NotEnoughValidators) } let n_faulty = n_validators.saturating_sub(1) / 3; let n_good = n_validators - n_faulty; @@ -406,12 +406,9 @@ mod tests { #[test] fn test_code_params() { - assert_eq!(code_params(0), Err(Error::EmptyValidators)); + assert_eq!(code_params(0), Err(Error::NotEnoughValidators)); - assert_eq!(code_params(1), Ok(CodeParams { - data_shards: 1, - parity_shards: 0, - })); + assert_eq!(code_params(1), Err(Error::NotEnoughValidators)); assert_eq!(code_params(2), Ok(CodeParams { data_shards: 1, @@ -487,6 +484,15 @@ mod tests { assert_eq!(reconstructed, available_data); } + #[test] + fn reconstruct_does_not_panic_on_low_validator_count() { + let reconstructed = reconstruct_v1( + 1, + [].iter().cloned(), + ); + assert_eq!(reconstructed, Err(Error::NotEnoughValidators)); + } + #[test] fn construct_valid_branches() { let pov_block = PoVBlock {