Fix codegen validation when Runtime APIs are stripped (#1000)

* modify code gen

* cargo fmt

* fix return

* move the runtime_apis static

* small layout change

* tidy up metadata UI testing and test validation function too

* fix validation tests to point to metadata

* update codegen

* fix clippy and tests

* tweak a comment

* rename validate_codegen => is_codegen_valid_for

---------

Co-authored-by: James Wilson <james@jsdw.me>
This commit is contained in:
Tadeo Hepperle
2023-06-09 11:10:48 +02:00
committed by GitHub
parent e711dc1582
commit 992ec9ab9b
9 changed files with 323 additions and 212 deletions
+18 -12
View File
@@ -322,11 +322,14 @@ impl RuntimeGenerator {
.collect();
let pallet_names_len = pallet_names.len();
let metadata_hash = self
let runtime_api_names: Vec<_> = self
.metadata
.hasher()
.only_these_pallets(&pallet_names)
.hash();
.runtime_api_traits()
.map(|api| api.name().to_string())
.collect();
let runtime_api_names_len = runtime_api_names.len();
let metadata_hash = self.metadata.hasher().hash();
let modules = pallets_with_mod_names
.iter()
@@ -542,6 +545,9 @@ impl RuntimeGenerator {
// Identify the pallets composing the static metadata by name.
pub static PALLETS: [&str; #pallet_names_len] = [ #(#pallet_names,)* ];
// Runtime APIs in the metadata by name.
pub static RUNTIME_APIS: [&str; #runtime_api_names_len] = [ #(#runtime_api_names,)* ];
/// The error type returned when there is a runtime issue.
pub type DispatchError = #types_mod_ident::sp_runtime::DispatchError;
@@ -621,14 +627,14 @@ impl RuntimeGenerator {
)*
}
/// check whether the Client you are using is aligned with the statically generated codegen.
pub fn validate_codegen<T: #crate_path::Config, C: #crate_path::client::OfflineClientT<T>>(client: &C) -> Result<(), #crate_path::error::MetadataError> {
let runtime_metadata_hash = client.metadata().hasher().only_these_pallets(&PALLETS).hash();
if runtime_metadata_hash != [ #(#metadata_hash,)* ] {
Err(#crate_path::error::MetadataError::IncompatibleCodegen)
} else {
Ok(())
}
/// check whether the metadata provided is aligned with this statically generated code.
pub fn is_codegen_valid_for(metadata: &#crate_path::Metadata) -> bool {
let runtime_metadata_hash = metadata
.hasher()
.only_these_pallets(&PALLETS)
.only_these_runtime_apis(&RUNTIME_APIS)
.hash();
runtime_metadata_hash == [ #(#metadata_hash,)* ]
}
#( #modules )*
+36 -10
View File
@@ -407,6 +407,7 @@ pub fn get_pallet_hash(pallet: PalletMetadata) -> [u8; HASH_LEN] {
pub struct MetadataHasher<'a> {
metadata: &'a Metadata,
specific_pallets: Option<Vec<&'a str>>,
specific_runtime_apis: Option<Vec<&'a str>>,
}
impl<'a> MetadataHasher<'a> {
@@ -415,6 +416,7 @@ impl<'a> MetadataHasher<'a> {
Self {
metadata,
specific_pallets: None,
specific_runtime_apis: None,
}
}
@@ -424,6 +426,16 @@ impl<'a> MetadataHasher<'a> {
self
}
/// Only hash the provided runtime APIs instead of hashing every runtime API
pub fn only_these_runtime_apis<S: AsRef<str>>(
&mut self,
specific_runtime_apis: &'a [S],
) -> &mut Self {
self.specific_runtime_apis =
Some(specific_runtime_apis.iter().map(|n| n.as_ref()).collect());
self
}
/// Hash the given metadata.
pub fn hash(&self) -> [u8; HASH_LEN] {
let mut visited_ids = HashSet::<u32>::new();
@@ -431,23 +443,37 @@ impl<'a> MetadataHasher<'a> {
let metadata = self.metadata;
let pallet_hash = metadata.pallets().fold([0u8; HASH_LEN], |bytes, pallet| {
// If specific pallets are given, only include this pallet if it's
// in the list.
if let Some(specific_pallets) = &self.specific_pallets {
if specific_pallets.iter().all(|&p| p != pallet.name()) {
return bytes;
}
}
// If specific pallets are given, only include this pallet if it is in the specific pallets.
let should_hash = self
.specific_pallets
.as_ref()
.map(|specific_pallets| specific_pallets.contains(&pallet.name()))
.unwrap_or(true);
// We don't care what order the pallets are seen in, so XOR their
// hashes together to be order independent.
xor(bytes, get_pallet_hash(pallet))
if should_hash {
xor(bytes, get_pallet_hash(pallet))
} else {
bytes
}
});
let apis_hash = metadata
.runtime_api_traits()
.fold([0u8; HASH_LEN], |bytes, api| {
// We don't care what order the runtime APIs are seen in, so XOR
xor(bytes, get_runtime_trait_hash(api))
// If specific runtime APIs are given, only include this pallet if it is in the specific runtime APIs.
let should_hash = self
.specific_runtime_apis
.as_ref()
.map(|specific_runtime_apis| specific_runtime_apis.contains(&api.name()))
.unwrap_or(true);
// We don't care what order the runtime APIs are seen in, so XOR their
// hashes together to be order independent.
if should_hash {
xor(bytes, xor(bytes, get_runtime_trait_hash(api)))
} else {
bytes
}
});
let extrinsic_hash = get_extrinsic_hash(&metadata.types, &metadata.extrinsic);
+7
View File
@@ -59,6 +59,13 @@ impl From<subxt_metadata::Metadata> for Metadata {
}
}
impl TryFrom<frame_metadata::RuntimeMetadataPrefixed> for Metadata {
type Error = subxt_metadata::TryFromError;
fn try_from(value: frame_metadata::RuntimeMetadataPrefixed) -> Result<Self, Self::Error> {
subxt_metadata::Metadata::try_from(value).map(Metadata::from)
}
}
impl codec::Decode for Metadata {
fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
subxt_metadata::Metadata::decode(input).map(Metadata::new)
@@ -1,5 +1,6 @@
#[allow(dead_code, unused_imports, non_camel_case_types)]
#[allow(clippy::all)]
#[allow(rustdoc::broken_intra_doc_links)]
pub mod api {
#[allow(unused_imports)]
mod root_mod {
@@ -64,6 +65,25 @@ pub mod api {
"Crowdloan",
"XcmPallet",
];
pub static RUNTIME_APIS: [&str; 17usize] = [
"Core",
"Metadata",
"BlockBuilder",
"NominationPoolsApi",
"StakingApi",
"TaggedTransactionQueue",
"OffchainWorkerApi",
"ParachainHost",
"BeefyApi",
"MmrApi",
"GrandpaApi",
"BabeApi",
"AuthorityDiscoveryApi",
"SessionKeys",
"AccountNonceApi",
"TransactionPaymentApi",
"TransactionPaymentCallApi",
];
#[doc = r" The error type returned when there is a runtime issue."]
pub type DispatchError = runtime_types::sp_runtime::DispatchError;
#[derive(
@@ -4420,26 +4440,19 @@ pub mod api {
xcm_pallet::calls::TransactionApi
}
}
#[doc = r" check whether the Client you are using is aligned with the statically generated codegen."]
pub fn validate_codegen<T: ::subxt::Config, C: ::subxt::client::OfflineClientT<T>>(
client: &C,
) -> Result<(), ::subxt::error::MetadataError> {
let runtime_metadata_hash = client
.metadata()
#[doc = r" check whether the metadata provided is aligned with this statically generated code."]
pub fn is_codegen_valid_for(metadata: &::subxt::Metadata) -> bool {
let runtime_metadata_hash = metadata
.hasher()
.only_these_pallets(&PALLETS)
.only_these_runtime_apis(&RUNTIME_APIS)
.hash();
if runtime_metadata_hash
!= [
151u8, 83u8, 251u8, 44u8, 149u8, 59u8, 20u8, 183u8, 19u8, 173u8, 234u8, 48u8,
114u8, 104u8, 69u8, 102u8, 189u8, 208u8, 10u8, 87u8, 154u8, 252u8, 54u8, 185u8,
248u8, 199u8, 45u8, 173u8, 199u8, 95u8, 189u8, 253u8,
runtime_metadata_hash
== [
48u8, 175u8, 255u8, 171u8, 180u8, 123u8, 181u8, 54u8, 125u8, 74u8, 109u8, 140u8,
192u8, 208u8, 131u8, 194u8, 195u8, 232u8, 33u8, 229u8, 178u8, 181u8, 236u8, 230u8,
37u8, 97u8, 134u8, 144u8, 187u8, 127u8, 47u8, 237u8,
]
{
Err(::subxt::error::MetadataError::IncompatibleCodegen)
} else {
Ok(())
}
}
pub mod system {
use super::root_mod;
@@ -91,19 +91,15 @@ async fn full_metadata_check() {
let api = ctx.client();
// Runtime metadata is identical to the metadata used during API generation.
assert!(node_runtime::validate_codegen(&api).is_ok());
assert!(node_runtime::is_codegen_valid_for(&api.metadata()));
// Modify the metadata.
let metadata = modified_metadata(api.metadata(), |md| {
md.pallets[0].name = "NewPallet".to_string();
});
let api = metadata_to_api(metadata, &ctx).await;
assert_eq!(
node_runtime::validate_codegen(&api)
.expect_err("Validation should fail for incompatible metadata"),
::subxt::error::MetadataError::IncompatibleCodegen
);
// It should now be invalid:
assert!(!node_runtime::is_codegen_valid_for(&metadata));
}
#[tokio::test]
@@ -134,7 +130,7 @@ async fn constant_values_are_not_validated() {
let api = metadata_to_api(metadata, &ctx).await;
assert!(node_runtime::validate_codegen(&api).is_ok());
assert!(node_runtime::is_codegen_valid_for(&api.metadata()));
assert!(api.constants().at(&deposit_addr).is_ok());
}
+80 -22
View File
@@ -15,7 +15,7 @@ mod dispatch_errors;
mod storage;
mod utils;
use crate::utils::{MetadataTestRunner, PalletMetadataTestRunner};
use crate::utils::MetadataTestRunner;
// Each of these tests leads to some rust code being compiled and
// executed to test that compilation is successful (or errors in the
@@ -23,35 +23,93 @@ use crate::utils::{MetadataTestRunner, PalletMetadataTestRunner};
#[test]
fn ui_tests() {
let mut m = MetadataTestRunner::default();
// specify pallets we want to test the metadata for (None => all pallets, but specifying only Some(..) speeds up test)
let mut p = PalletMetadataTestRunner::new(Some(&["Babe", "Claims", "Grandpa", "Balances"]));
let t = trybuild::TestCases::new();
t.pass("src/correct/*.rs");
// Check that storage maps with no keys are handled properly.
t.pass(m.path_to_ui_test_for_metadata(
"storage_map_no_keys",
storage::metadata_storage_map_no_keys(),
));
t.pass(
m.new_test_case()
.name("storage_map_no_keys")
.build(storage::metadata_storage_map_no_keys()),
);
// Test that the codegen can handle the different types of DispatchError.
t.pass(m.path_to_ui_test_for_metadata(
"named_field_dispatch_error",
dispatch_errors::metadata_named_field_dispatch_error(),
));
t.pass(m.path_to_ui_test_for_metadata(
"legacy_dispatch_error",
dispatch_errors::metadata_legacy_dispatch_error(),
));
t.pass(m.path_to_ui_test_for_metadata(
"array_dispatch_error",
dispatch_errors::metadata_array_dispatch_error(),
));
t.pass(
m.new_test_case()
.name("named_field_dispatch_error")
.build(dispatch_errors::metadata_named_field_dispatch_error()),
);
t.pass(
m.new_test_case()
.name("legacy_dispatch_error")
.build(dispatch_errors::metadata_legacy_dispatch_error()),
);
t.pass(
m.new_test_case()
.name("array_dispatch_error")
.build(dispatch_errors::metadata_array_dispatch_error()),
);
// Ensure the generate per pallet metadata compiles.
while let Some(path) = p.path_to_next_ui_test() {
t.pass(path);
// Test retaining only specific pallets and ensure that works.
for pallet in ["Babe", "Claims", "Grandpa", "Balances"] {
let mut metadata = MetadataTestRunner::load_metadata();
metadata.retain(|p| p == pallet, |_| true);
t.pass(
m.new_test_case()
.name(format!("retain_pallet_{pallet}"))
.build(metadata),
);
}
// Test retaining only specific runtime APIs to ensure that works.
for runtime_api in ["Core", "Metadata"] {
let mut metadata = MetadataTestRunner::load_metadata();
metadata.retain(|_| true, |r| r == runtime_api);
t.pass(
m.new_test_case()
.name(format!("retain_runtime_api_{runtime_api}"))
.build(metadata),
);
}
// Validation should succeed when metadata we codegen from is stripped and
// client metadata is full:
{
let mut metadata = MetadataTestRunner::load_metadata();
metadata.retain(
|p| ["Babe", "Claims"].contains(&p),
|r| ["Core", "Metadata"].contains(&r),
);
t.pass(
m.new_test_case()
.name("stripped_metadata_validates_against_full")
.validation_metadata(MetadataTestRunner::load_metadata())
.build(metadata),
);
}
// Finally as a sanity check, codegen against stripped metadata should
// _not_ compare valid against client with differently stripped metadata.
{
let mut codegen_metadata = MetadataTestRunner::load_metadata();
codegen_metadata.retain(
|p| ["Babe", "Claims"].contains(&p),
|r| ["Core", "Metadata"].contains(&r),
);
let mut validation_metadata = MetadataTestRunner::load_metadata();
validation_metadata.retain(|p| p != "Claims", |r| r != "Metadata");
t.pass(
m.new_test_case()
.name("stripped_metadata_doesnt_validate_against_different")
.validation_metadata(validation_metadata)
.expects_invalid()
.build(codegen_metadata),
);
}
}
@@ -2,10 +2,12 @@
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use codec::Encode;
use frame_metadata::RuntimeMetadataPrefixed;
use codec::{Decode, Encode};
use std::io::Read;
use subxt_metadata::Metadata;
static TEST_DIR_PREFIX: &str = "subxt_generated_ui_tests_";
static METADATA_FILE: &str = "../../artifacts/polkadot_metadata_full.scale";
#[derive(Default)]
pub struct MetadataTestRunner {
@@ -13,50 +15,26 @@ pub struct MetadataTestRunner {
}
impl MetadataTestRunner {
pub fn path_to_ui_test_for_metadata(
&mut self,
name: impl AsRef<str>,
metadata: RuntimeMetadataPrefixed,
) -> String {
let test_name = name.as_ref();
/// Loads metadata that we can use in our tests. Panics if
/// there is some issue decoding the metadata.
pub fn load_metadata() -> Metadata {
let mut file =
std::fs::File::open(METADATA_FILE).expect("Cannot open metadata.scale artifact");
// increment test index to avoid overlaps.
let mut bytes = Vec::new();
file.read_to_end(&mut bytes)
.expect("Failed to read metadata.scale file");
Metadata::decode(&mut &*bytes).expect("Cannot decode metadata bytes")
}
/// Create a new test case.
pub fn new_test_case(&mut self) -> MetadataTestRunnerCaseBuilder {
let index = self.index;
// increment index so that each test case gets its own folder path.
self.index += 1;
let mut tmp_dir = std::env::temp_dir();
tmp_dir.push(format!("{TEST_DIR_PREFIX}{index}"));
let tmp_metadata_path = {
let mut t = tmp_dir.clone();
t.push("metadata.scale");
t.to_string_lossy().into_owned()
};
let tmp_rust_path = {
let mut t = tmp_dir.clone();
t.push(format!("{test_name}.rs"));
t.to_string_lossy().into_owned()
};
let encoded_metadata = metadata.encode();
let rust_file = format!(
r#"
use subxt;
#[subxt::subxt(runtime_metadata_path = "{tmp_metadata_path}")]
pub mod polkadot {{}}
fn main() {{}}
"#
);
std::fs::create_dir_all(&tmp_dir).expect("could not create tmp ui test dir");
// Write metadata to tmp folder:
std::fs::write(&tmp_metadata_path, encoded_metadata).unwrap();
// Write test file to tmp folder (it'll be moved by trybuild):
std::fs::write(&tmp_rust_path, rust_file).unwrap();
tmp_rust_path
MetadataTestRunnerCaseBuilder::new(index)
}
}
@@ -71,3 +49,132 @@ impl Drop for MetadataTestRunner {
}
}
}
/// Build a single test case.
pub struct MetadataTestRunnerCaseBuilder {
index: usize,
name: String,
validation_metadata: Option<Metadata>,
should_be_valid: bool,
}
impl MetadataTestRunnerCaseBuilder {
fn new(index: usize) -> Self {
MetadataTestRunnerCaseBuilder {
index,
name: format!("Test {index}"),
validation_metadata: None,
should_be_valid: true,
}
}
/// Set the test name.
pub fn name(mut self, name: impl AsRef<str>) -> Self {
self.name = name.as_ref().to_owned();
self
}
/// Set metadata to be validated against the generated code.
/// By default, we'll validate the same metadata used to generate the code.
pub fn validation_metadata(mut self, md: impl Into<Metadata>) -> Self {
self.validation_metadata = Some(md.into());
self
}
/// Expect the validation metadata provided to _not_ be valid.
pub fn expects_invalid(mut self) -> Self {
self.should_be_valid = false;
self
}
/// At the minimum, takes some metadata and a test name, generates the code
/// and hands back a path to some generated code that `trybuild` can be pointed at.
/// validation metadata and expected validity can also be provided.
///
/// The generated code:
/// - checks that the subxt macro can perform codegen given the
/// provided macro_metadata without running into any issues.
/// - checks that the `runtime::is_codegen_valid_for` function returns
/// true or false when compared to the `validation_metadata`, according
/// to whether `expects_invalid()` is set or not.
///
/// The generated code will be tidied up when the `MetadataTestRunner` that
/// this was handed out from is dropped.
pub fn build<M>(self, macro_metadata: M) -> String
where
M: TryInto<Metadata>,
M::Error: std::fmt::Debug,
{
let macro_metadata = macro_metadata.try_into().expect("can into Metadata");
let validation_metadata = self
.validation_metadata
.unwrap_or_else(|| macro_metadata.clone());
let index = self.index;
let mut tmp_dir = std::env::temp_dir();
tmp_dir.push(format!("{TEST_DIR_PREFIX}{index}"));
let tmp_macro_metadata_path = {
let mut t = tmp_dir.clone();
t.push("macro_metadata.scale");
t.to_string_lossy().into_owned()
};
let tmp_validation_metadata_path = {
let mut t = tmp_dir.clone();
t.push("validation_metadata.scale");
t.to_string_lossy().into_owned()
};
let tmp_rust_path = {
let mut t = tmp_dir.clone();
let test_name = &self.name;
t.push(format!("{test_name}.rs"));
t.to_string_lossy().into_owned()
};
let encoded_macro_metadata = macro_metadata.encode();
let encoded_validation_metadata = validation_metadata.encode();
let should_be_valid_str = if self.should_be_valid {
"true"
} else {
"false"
};
let rust_file = format!(
r#"
use subxt;
use subxt::ext::codec::Decode;
use std::io::Read;
#[subxt::subxt(runtime_metadata_path = "{tmp_macro_metadata_path}")]
pub mod polkadot {{}}
fn main() {{
// load validation metadata:
let mut file = std::fs::File::open("{tmp_validation_metadata_path}")
.expect("validation_metadata exists");
let mut bytes = Vec::new();
file.read_to_end(&mut bytes)
.expect("Failed to read metadata.scale file");
let metadata = subxt::Metadata::decode(&mut &*bytes)
.expect("Cannot decode metadata bytes");
// validate it:
let is_valid = polkadot::is_codegen_valid_for(&metadata);
assert_eq!(is_valid, {should_be_valid_str}, "expected validity to line up");
}}
"#
);
std::fs::create_dir_all(&tmp_dir).expect("could not create tmp ui test dir");
// Write metadatas to tmp folder:
std::fs::write(&tmp_macro_metadata_path, encoded_macro_metadata).unwrap();
std::fs::write(&tmp_validation_metadata_path, encoded_validation_metadata).unwrap();
// Write test file to tmp folder (it'll be moved by trybuild):
std::fs::write(&tmp_rust_path, rust_file).unwrap();
tmp_rust_path
}
}
-2
View File
@@ -4,7 +4,6 @@
pub mod dispatch_error;
mod metadata_test_runner;
mod pallet_metadata_test_runner;
use frame_metadata::{
v15::{
@@ -16,7 +15,6 @@ use frame_metadata::{
use scale_info::{meta_type, IntoPortable, TypeInfo};
pub use metadata_test_runner::MetadataTestRunner;
pub use pallet_metadata_test_runner::PalletMetadataTestRunner;
/// Given some pallet metadata, generate a [`RuntimeMetadataPrefixed`] struct.
/// We default to a useless extrinsic type, and register a fake `DispatchError`
@@ -1,100 +0,0 @@
// Copyright 2019-2023 Parity Technologies (UK) Ltd.
// This file is dual-licensed as Apache-2.0 or GPL-3.0.
// see LICENSE for license details.
use codec::{Decode, Encode};
use std::io::Read;
use subxt_metadata::Metadata;
static TEST_DIR_PREFIX: &str = "subxt_generated_pallets_ui_tests_";
static METADATA_FILE: &str = "../../artifacts/polkadot_metadata_full.scale";
pub struct PalletMetadataTestRunner {
metadata: Metadata,
index: usize,
pallet_names: Option<Vec<String>>,
}
impl PalletMetadataTestRunner {
/// if pallet_names is Some(..) only the provided pallets will be tested.
pub fn new(pallet_names: Option<&[&str]>) -> PalletMetadataTestRunner {
let mut file =
std::fs::File::open(METADATA_FILE).expect("Cannot open metadata.scale artifact");
let mut bytes = Vec::new();
file.read_to_end(&mut bytes)
.expect("Failed to read metadata.scale file");
let metadata = Metadata::decode(&mut &*bytes).expect("Cannot decode metadata bytes");
let pallet_names = pallet_names.map(|v| v.iter().map(|e| e.to_string()).collect());
PalletMetadataTestRunner {
metadata,
index: 0,
pallet_names,
}
}
pub fn path_to_next_ui_test(&mut self) -> Option<String> {
let pallet = match self.pallet_names.as_ref() {
Some(names) => self.metadata.pallet_by_name(names.get(self.index)?)?,
None => self.metadata.pallets().nth(self.index)?,
};
let test_name = pallet.name();
// Increment test index to point at the next pallet.
let index = self.index;
self.index += 1;
// Build custom metadata containing only this pallet.
let mut metadata = self.metadata.clone();
metadata.retain(|pallet_filter| pallet_filter == pallet.name(), |_| true);
let mut tmp_dir = std::env::temp_dir();
tmp_dir.push(format!("{TEST_DIR_PREFIX}{index}"));
let tmp_metadata_path = {
let mut t = tmp_dir.clone();
t.push("metadata.scale");
t.to_string_lossy().into_owned()
};
let tmp_rust_path = {
let mut t = tmp_dir.clone();
t.push(format!("{test_name}.rs"));
t.to_string_lossy().into_owned()
};
let encoded_metadata = metadata.encode();
let rust_file = format!(
r#"
use subxt;
#[subxt::subxt(runtime_metadata_path = "{tmp_metadata_path}")]
pub mod polkadot {{}}
fn main() {{}}
"#
);
std::fs::create_dir_all(&tmp_dir).expect("could not create tmp ui test dir");
// Write metadata to tmp folder:
std::fs::write(&tmp_metadata_path, encoded_metadata).unwrap();
// Write test file to tmp folder (it'll be moved by trybuild):
std::fs::write(&tmp_rust_path, rust_file).unwrap();
Some(tmp_rust_path)
}
}
// `trybuild` runs all tests once it's dropped. So, we defer all cleanup until we
// are dropped too, to make sure that cleanup happens after tests are ran.
impl Drop for PalletMetadataTestRunner {
fn drop(&mut self) {
for idx in 0..self.index {
let mut tmp_dir = std::env::temp_dir();
tmp_dir.push(format!("{TEST_DIR_PREFIX}{idx}"));
let _ = std::fs::remove_dir_all(tmp_dir);
}
}
}