mirror of
https://github.com/pezkuwichain/pezkuwi-subxt.git
synced 2026-04-26 00:37:57 +00:00
contracts: Fix printing the Schedule (#3021)
Printing the `Schedule` is a useful debugging tool and general sanity
check. It is much more easy to interpret than the raw weights.
The printing relied on using `println` and hence was only available from
the native runtime. This is no longer available. This is why in this PR
we switch to using `log` which works from Wasm.
I made sure that the `WeightDebug` is only derived when
`runtime-benchmarks` is set so that we don't increase the size of the
binary.
Some other changes were necessary to make this actually work inside the
runtime. For example, I needed to remove `format!` and usage of floats.
Please note that this removed the decimal from the number because
truncating the fraction without using floats would not be easy and would
require custom code. I think the precision here is sufficient.
This is how the output looks like now:
```
Schedule {
limits: Limits {
event_topics: 4,
globals: 256,
locals: 1024,
parameters: 128,
memory_pages: 16,
table_size: 4096,
br_table_size: 256,
subject_len: 32,
payload_len: 16384,
runtime_memory: 134217728,
},
instruction_weights: InstructionWeights {
base: 2565,
_phantom: PhantomData<kitchensink_runtime::Runtime>,
},
host_fn_weights: HostFnWeights {
caller: 322 ns, 6 bytes,
is_contract: 28 µs, 2684 bytes,
code_hash: 29 µs, 2688 bytes,
own_code_hash: 400 ns, 6 bytes,
caller_is_origin: 176 ns, 3 bytes,
caller_is_root: 158 ns, 3 bytes,
address: 315 ns, 6 bytes,
gas_left: 355 ns, 6 bytes,
balance: 1 µs, 6 bytes,
value_transferred: 314 ns, 6 bytes,
minimum_balance: 318 ns, 6 bytes,
block_number: 313 ns, 6 bytes,
now: 325 ns, 6 bytes,
weight_to_fee: 1 µs, 14 bytes,
input: 263 ns, 6 bytes,
input_per_byte: 989 ps, 0 bytes,
r#return: 0 ps, 45 bytes,
return_per_byte: 320 ps, 0 bytes,
terminate: 1 ms, 5266 bytes,
random: 1 µs, 10 bytes,
deposit_event: 1 µs, 10 bytes,
deposit_event_per_topic: 127 µs, 2508 bytes,
deposit_event_per_byte: 501 ps, 0 bytes,
debug_message: 226 ns, 7 bytes,
debug_message_per_byte: 1 ns, 0 bytes,
set_storage: 131 µs, 293 bytes,
set_storage_per_new_byte: 576 ps, 0 bytes,
set_storage_per_old_byte: 184 ps, 1 bytes,
set_code_hash: 297 µs, 3090 bytes,
clear_storage: 131 µs, 289 bytes,
clear_storage_per_byte: 92 ps, 1 bytes,
contains_storage: 29 µs, 289 bytes,
contains_storage_per_byte: 213 ps, 1 bytes,
get_storage: 29 µs, 297 bytes,
get_storage_per_byte: 980 ps, 1 bytes,
take_storage: 131 µs, 297 bytes,
take_storage_per_byte: 921 ps, 1 bytes,
transfer: 156 µs, 2520 bytes,
call: 484 µs, 2721 bytes,
delegate_call: 406 µs, 2637 bytes,
call_transfer_surcharge: 607 µs, 5227 bytes,
call_per_cloned_byte: 970 ps, 0 bytes,
instantiate: 1 ms, 2731 bytes,
instantiate_transfer_surcharge: 131 µs, 2549 bytes,
instantiate_per_input_byte: 1 ns, 0 bytes,
instantiate_per_salt_byte: 1 ns, 0 bytes,
hash_sha2_256: 377 ns, 8 bytes,
hash_sha2_256_per_byte: 1 ns, 0 bytes,
hash_keccak_256: 767 ns, 8 bytes,
hash_keccak_256_per_byte: 3 ns, 0 bytes,
hash_blake2_256: 443 ns, 8 bytes,
hash_blake2_256_per_byte: 1 ns, 0 bytes,
hash_blake2_128: 440 ns, 8 bytes,
hash_blake2_128_per_byte: 1 ns, 0 bytes,
ecdsa_recover: 45 µs, 77 bytes,
ecdsa_to_eth_address: 11 µs, 42 bytes,
sr25519_verify: 41 µs, 112 bytes,
sr25519_verify_per_byte: 5 ns, 1 bytes,
reentrance_count: 174 ns, 3 bytes,
account_reentrance_count: 248 ns, 40 bytes,
instantiation_nonce: 154 ns, 3 bytes,
add_delegate_dependency: 131 µs, 2606 bytes,
remove_delegate_dependency: 130 µs, 2568 bytes,
},
}
###############################################
Lazy deletion weight per key: Weight(ref_time: 126109302, proof_size: 70)
Lazy deletion keys per block: 15859
```
This commit is contained in:
committed by
GitHub
parent
d72fb58070
commit
5e5341da9b
@@ -20,23 +20,13 @@
|
||||
//! Most likely you should use the [`#[define_env]`][`macro@define_env`] attribute macro which hides
|
||||
//! boilerplate of defining external environment for a wasm module.
|
||||
|
||||
#![no_std]
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
use alloc::{
|
||||
collections::BTreeMap,
|
||||
format,
|
||||
string::{String, ToString},
|
||||
vec::Vec,
|
||||
};
|
||||
use core::cmp::Reverse;
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::{Span, TokenStream as TokenStream2};
|
||||
use quote::{quote, quote_spanned, ToTokens};
|
||||
use syn::{
|
||||
parse_macro_input, punctuated::Punctuated, spanned::Spanned, token::Comma, Data, DeriveInput,
|
||||
FnArg, Ident,
|
||||
Fields, FnArg, Ident,
|
||||
};
|
||||
|
||||
/// This derives `Debug` for a struct where each field must be of some numeric type.
|
||||
@@ -44,17 +34,6 @@ use syn::{
|
||||
/// it is readable by humans.
|
||||
#[proc_macro_derive(WeightDebug)]
|
||||
pub fn derive_weight_debug(input: TokenStream) -> TokenStream {
|
||||
derive_debug(input, format_weight)
|
||||
}
|
||||
|
||||
/// This is basically identical to the std libs Debug derive but without adding any
|
||||
/// bounds to existing generics.
|
||||
#[proc_macro_derive(ScheduleDebug)]
|
||||
pub fn derive_schedule_debug(input: TokenStream) -> TokenStream {
|
||||
derive_debug(input, format_default)
|
||||
}
|
||||
|
||||
fn derive_debug(input: TokenStream, fmt: impl Fn(&Ident) -> TokenStream2) -> TokenStream {
|
||||
let input = parse_macro_input!(input as DeriveInput);
|
||||
let name = &input.ident;
|
||||
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
|
||||
@@ -68,45 +47,15 @@ fn derive_debug(input: TokenStream, fmt: impl Fn(&Ident) -> TokenStream2) -> Tok
|
||||
.into()
|
||||
};
|
||||
|
||||
#[cfg(feature = "full")]
|
||||
let fields = iterate_fields(data, fmt);
|
||||
|
||||
#[cfg(not(feature = "full"))]
|
||||
let fields = {
|
||||
drop(fmt);
|
||||
let _ = data;
|
||||
TokenStream2::new()
|
||||
};
|
||||
|
||||
let tokens = quote! {
|
||||
impl #impl_generics core::fmt::Debug for #name #ty_generics #where_clause {
|
||||
fn fmt(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
use ::sp_runtime::{FixedPointNumber, FixedU128 as Fixed};
|
||||
let mut formatter = formatter.debug_struct(stringify!(#name));
|
||||
#fields
|
||||
formatter.finish()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
tokens.into()
|
||||
}
|
||||
|
||||
/// This is only used then the `full` feature is activated.
|
||||
#[cfg(feature = "full")]
|
||||
fn iterate_fields(data: &syn::DataStruct, fmt: impl Fn(&Ident) -> TokenStream2) -> TokenStream2 {
|
||||
use syn::Fields;
|
||||
|
||||
match &data.fields {
|
||||
let fields = match &data.fields {
|
||||
Fields::Named(fields) => {
|
||||
let recurse = fields.named.iter().filter_map(|f| {
|
||||
let name = f.ident.as_ref()?;
|
||||
if name.to_string().starts_with('_') {
|
||||
return None
|
||||
}
|
||||
let value = fmt(name);
|
||||
let ret = quote_spanned! { f.span() =>
|
||||
formatter.field(stringify!(#name), #value);
|
||||
formatter.field(stringify!(#name), &HumanWeight(self.#name));
|
||||
};
|
||||
Some(ret)
|
||||
});
|
||||
@@ -119,39 +68,53 @@ fn iterate_fields(data: &syn::DataStruct, fmt: impl Fn(&Ident) -> TokenStream2)
|
||||
compile_error!("Unnamed fields are not supported")
|
||||
},
|
||||
Fields::Unit => quote!(),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
fn format_weight(field: &Ident) -> TokenStream2 {
|
||||
quote_spanned! { field.span() =>
|
||||
&if self.#field.ref_time() > 1_000_000_000 {
|
||||
format!(
|
||||
"{:.1?} ms, {} bytes",
|
||||
Fixed::saturating_from_rational(self.#field.ref_time(), 1_000_000_000).to_float(),
|
||||
self.#field.proof_size()
|
||||
)
|
||||
} else if self.#field.ref_time() > 1_000_000 {
|
||||
format!(
|
||||
"{:.1?} µs, {} bytes",
|
||||
Fixed::saturating_from_rational(self.#field.ref_time(), 1_000_000).to_float(),
|
||||
self.#field.proof_size()
|
||||
)
|
||||
} else if self.#field.ref_time() > 1_000 {
|
||||
format!(
|
||||
"{:.1?} ns, {} bytes",
|
||||
Fixed::saturating_from_rational(self.#field.ref_time(), 1_000).to_float(),
|
||||
self.#field.proof_size()
|
||||
)
|
||||
} else {
|
||||
format!("{} ps, {} bytes", self.#field.ref_time(), self.#field.proof_size())
|
||||
let tokens = quote! {
|
||||
impl #impl_generics ::core::fmt::Debug for #name #ty_generics #where_clause {
|
||||
fn fmt(&self, formatter: &mut ::core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
use ::sp_runtime::{FixedPointNumber, FixedU128 as Fixed};
|
||||
use ::core::{fmt, write};
|
||||
|
||||
struct HumanWeight(Weight);
|
||||
|
||||
impl fmt::Debug for HumanWeight {
|
||||
fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if self.0.ref_time() > 1_000_000_000 {
|
||||
write!(
|
||||
formatter,
|
||||
"{} ms, {} bytes",
|
||||
Fixed::saturating_from_rational(self.0.ref_time(), 1_000_000_000).into_inner() / Fixed::accuracy(),
|
||||
self.0.proof_size()
|
||||
)
|
||||
} else if self.0.ref_time() > 1_000_000 {
|
||||
write!(
|
||||
formatter,
|
||||
"{} µs, {} bytes",
|
||||
Fixed::saturating_from_rational(self.0.ref_time(), 1_000_000).into_inner() / Fixed::accuracy(),
|
||||
self.0.proof_size()
|
||||
)
|
||||
} else if self.0.ref_time() > 1_000 {
|
||||
write!(
|
||||
formatter,
|
||||
"{} ns, {} bytes",
|
||||
Fixed::saturating_from_rational(self.0.ref_time(), 1_000).into_inner() / Fixed::accuracy(),
|
||||
self.0.proof_size()
|
||||
)
|
||||
} else {
|
||||
write!(formatter, "{} ps, {} bytes", self.0.ref_time(), self.0.proof_size())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut formatter = formatter.debug_struct(stringify!(#name));
|
||||
#fields
|
||||
formatter.finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
fn format_default(field: &Ident) -> TokenStream2 {
|
||||
quote_spanned! { field.span() =>
|
||||
&self.#field
|
||||
}
|
||||
tokens.into()
|
||||
}
|
||||
|
||||
/// Parsed environment definition.
|
||||
@@ -480,7 +443,7 @@ fn expand_func_doc(func: &HostFn) -> TokenStream2 {
|
||||
fn expand_docs(def: &EnvDef) -> TokenStream2 {
|
||||
// Create the `Current` trait with only the newest versions
|
||||
// we sort so that only the newest versions make it into `docs`
|
||||
let mut current_docs = BTreeMap::new();
|
||||
let mut current_docs = std::collections::HashMap::new();
|
||||
let mut funcs: Vec<_> = def.host_funcs.iter().filter(|f| f.alias_to.is_none()).collect();
|
||||
funcs.sort_unstable_by_key(|func| Reverse(func.version));
|
||||
for func in funcs {
|
||||
@@ -493,7 +456,7 @@ fn expand_docs(def: &EnvDef) -> TokenStream2 {
|
||||
|
||||
// Create the `legacy` module with all functions
|
||||
// Maps from version to list of functions that have this version
|
||||
let mut legacy_doc = BTreeMap::<u8, Vec<TokenStream2>>::new();
|
||||
let mut legacy_doc = std::collections::BTreeMap::<u8, Vec<TokenStream2>>::new();
|
||||
for func in def.host_funcs.iter() {
|
||||
legacy_doc.entry(func.version).or_default().push(expand_func_doc(&func));
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user