Compare commits

...

9 Commits

Author SHA1 Message Date
Omar Abdulla b8a71b40e2 Merge remote-tracking branch 'origin/main' into chore/update-version-of-tests2 2025-11-03 02:47:53 +03:00
Omar Abdulla 6df00f567c Update the version of tests 2025-11-03 02:45:47 +03:00
Omar 75159229df Update the commit hash of resolc compiler tests (#201) 2025-11-02 21:27:26 +00:00
Omar Abdulla 9b75a4f236 Update the commit hash of resolc compiler tests 2025-11-03 00:25:56 +03:00
Omar 2af1a62319 Supply the revert reason in the logs (#200) 2025-11-02 17:54:11 +00:00
Omar e09be4f3fa Remove references to kitchensink (#199)
* Remove references to kitchensink

* Update the ci for the revive-dev-node

* Update references to the substrate node

* Add the step path to the failure logs

* Update the CI

* fix machete

* Update tests

* Update the commit hash of the polkadot sdk

* Ignore the tx mine test
2025-11-01 05:30:43 +00:00
Omar 33b5faca45 Add revert reason to the assertion logs (#198) 2025-11-01 01:50:58 +00:00
Omar 172fb4700f Cleanup benchmarks (#197)
* Require test argument

* Increase tx timeout and channel limits

* Add default arguments for tests

* Fix tests

* Fix tests

* Cleanup benchmarks
2025-10-30 01:32:23 +00:00
Omar fefea17c8e Require test argument (#196)
* Require test argument

* Increase tx timeout and channel limits

* Add default arguments for tests

* Fix tests

* Fix tests
2025-10-27 00:17:47 +00:00
24 changed files with 607 additions and 306 deletions
+16 -9
View File
@@ -51,15 +51,15 @@ jobs:
uses: actions/cache@v3 uses: actions/cache@v3
with: with:
path: | path: |
~/.cargo/bin/substrate-node ~/.cargo/bin/revive-dev-node
~/.cargo/bin/eth-rpc ~/.cargo/bin/eth-rpc
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }} key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}-with-dev-node
- name: Build substrate-node - name: Build revive-dev-node
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
run: | run: |
cd polkadot-sdk cd polkadot-sdk
cargo install --locked --force --profile=production --path substrate/bin/node/cli --bin substrate-node --features cli cargo install --locked --force --profile=production --path substrate/frame/revive/dev-node/node --bin revive-dev-node
- name: Build eth-rpc - name: Build eth-rpc
if: steps.cache.outputs.cache-hit != 'true' if: steps.cache.outputs.cache-hit != 'true'
@@ -109,14 +109,16 @@ jobs:
steps: steps:
- name: Checkout repo - name: Checkout repo
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
submodules: recursive
- name: Restore binaries from cache - name: Restore binaries from cache
uses: actions/cache@v3 uses: actions/cache@v3
with: with:
path: | path: |
~/.cargo/bin/substrate-node ~/.cargo/bin/revive-dev-node
~/.cargo/bin/eth-rpc ~/.cargo/bin/eth-rpc
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }} key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}-with-dev-node
- name: Restore downloaded Polkadot binaries from cache - name: Restore downloaded Polkadot binaries from cache
uses: actions/cache@v3 uses: actions/cache@v3
@@ -202,8 +204,13 @@ jobs:
sudo apt update sudo apt update
sudo apt install kurtosis-cli sudo apt install kurtosis-cli
- name: Install cargo-machete
uses: clechasseur/rs-cargo@v2
with:
command: install
args: cargo-machete@0.7.0
- name: Machete - name: Machete
uses: bnjbvr/cargo-machete@v0.7.1 run: cargo machete crates
- name: Format - name: Format
run: make format run: make format
@@ -211,8 +218,8 @@ jobs:
- name: Clippy - name: Clippy
run: make clippy run: make clippy
- name: Check substrate-node version - name: Check revive-dev-node version
run: substrate-node --version run: revive-dev-node --version
- name: Check eth-rpc version - name: Check eth-rpc version
run: eth-rpc --version run: eth-rpc --version
Generated
+11
View File
@@ -1920,6 +1920,7 @@ dependencies = [
"anstyle", "anstyle",
"clap_lex", "clap_lex",
"strsim", "strsim",
"terminal_size",
] ]
[[package]] [[package]]
@@ -7838,6 +7839,16 @@ dependencies = [
"winapi-util", "winapi-util",
] ]
[[package]]
name = "terminal_size"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed"
dependencies = [
"rustix",
"windows-sys 0.59.0",
]
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "1.0.69" version = "1.0.69"
+1 -1
View File
@@ -26,7 +26,7 @@ ansi_term = "0.12.1"
anyhow = "1.0" anyhow = "1.0"
bson = { version = "2.15.0" } bson = { version = "2.15.0" }
cacache = { version = "13.1.0" } cacache = { version = "13.1.0" }
clap = { version = "4", features = ["derive"] } clap = { version = "4", features = ["derive", "wrap_help"] }
dashmap = { version = "6.1.0" } dashmap = { version = "6.1.0" }
foundry-compilers-artifacts = { version = "0.18.0" } foundry-compilers-artifacts = { version = "0.18.0" }
futures = { version = "0.3.31" } futures = { version = "0.3.31" }
+4 -4
View File
@@ -9,7 +9,7 @@
This project compiles and executes declarative smart-contract tests against multiple platforms, then compares behavior (status, return data, events, and state diffs). Today it supports: This project compiles and executes declarative smart-contract tests against multiple platforms, then compares behavior (status, return data, events, and state diffs). Today it supports:
- Geth (EVM reference implementation) - Geth (EVM reference implementation)
- Revive Kitchensink (Substrate-based PolkaVM + `eth-rpc` proxy) - Revive Dev Node (Substrate-based PolkaVM + `eth-rpc` proxy)
Use it to: Use it to:
@@ -39,9 +39,9 @@ This repository contains none of the tests and only contains the testing framewo
This section describes the required dependencies that this framework requires to run. Compiling this framework is pretty straightforward and no additional dependencies beyond what's specified in the `Cargo.toml` file should be required. This section describes the required dependencies that this framework requires to run. Compiling this framework is pretty straightforward and no additional dependencies beyond what's specified in the `Cargo.toml` file should be required.
- Stable Rust - Stable Rust
- Geth - When doing differential testing against the PVM we submit transactions to a Geth node and to Kitchensink to compare them. - Geth - When doing differential testing against the PVM we submit transactions to a Geth node and to Revive Dev Node to compare them.
- Kitchensink - When doing differential testing against the PVM we submit transactions to a Geth node and to Kitchensink to compare them. - Revive Dev Node - When doing differential testing against the PVM we submit transactions to a Geth node and to Revive Dev Node to compare them.
- ETH-RPC - All communication with Kitchensink is done through the ETH RPC. - ETH-RPC - All communication with Revive Dev Node is done through the ETH RPC.
- Solc - This is actually a transitive dependency, while this tool doesn't require solc as it downloads the versions that it requires, resolc requires that Solc is installed and available in the path. - Solc - This is actually a transitive dependency, while this tool doesn't require solc as it downloads the versions that it requires, resolc requires that Solc is installed and available in the path.
- Resolc - This is required to compile the contracts to PolkaVM bytecode. - Resolc - This is required to compile the contracts to PolkaVM bytecode.
- Kurtosis - The Kurtosis CLI tool is required for the production Ethereum mainnet-like node configuration with Geth as the execution layer and lighthouse as the consensus layer. Kurtosis also requires docker to be installed since it runs everything inside of docker containers. - Kurtosis - The Kurtosis CLI tool is required for the production Ethereum mainnet-like node configuration with Geth as the execution layer and lighthouse as the consensus layer. Kurtosis also requires docker to be installed since it runs everything inside of docker containers.
-6
View File
@@ -31,10 +31,6 @@ pub enum PlatformIdentifier {
GethEvmSolc, GethEvmSolc,
/// The Lighthouse Go-ethereum reference full node EVM implementation with the solc compiler. /// The Lighthouse Go-ethereum reference full node EVM implementation with the solc compiler.
LighthouseGethEvmSolc, LighthouseGethEvmSolc,
/// The kitchensink node with the PolkaVM backend with the resolc compiler.
KitchensinkPolkavmResolc,
/// The kitchensink node with the REVM backend with the solc compiler.
KitchensinkRevmSolc,
/// The revive dev node with the PolkaVM backend with the resolc compiler. /// The revive dev node with the PolkaVM backend with the resolc compiler.
ReviveDevNodePolkavmResolc, ReviveDevNodePolkavmResolc,
/// The revive dev node with the REVM backend with the solc compiler. /// The revive dev node with the REVM backend with the solc compiler.
@@ -95,8 +91,6 @@ pub enum NodeIdentifier {
Geth, Geth,
/// The go-ethereum node implementation. /// The go-ethereum node implementation.
LighthouseGeth, LighthouseGeth,
/// The Kitchensink node implementation.
Kitchensink,
/// The revive dev node implementation. /// The revive dev node implementation.
ReviveDevNode, ReviveDevNode,
/// A zombienet spawned nodes /// A zombienet spawned nodes
+4 -98
View File
@@ -24,7 +24,7 @@ use strum::{AsRefStr, Display, EnumString, IntoStaticStr};
use temp_dir::TempDir; use temp_dir::TempDir;
#[derive(Clone, Debug, Parser, Serialize, Deserialize)] #[derive(Clone, Debug, Parser, Serialize, Deserialize)]
#[command(name = "retester")] #[command(name = "retester", term_width = 100)]
pub enum Context { pub enum Context {
/// Executes tests in the MatterLabs format differentially on multiple targets concurrently. /// Executes tests in the MatterLabs format differentially on multiple targets concurrently.
Test(Box<TestExecutionContext>), Test(Box<TestExecutionContext>),
@@ -131,17 +131,6 @@ impl AsRef<PolkadotParachainConfiguration> for Context {
} }
} }
impl AsRef<KitchensinkConfiguration> for Context {
fn as_ref(&self) -> &KitchensinkConfiguration {
match self {
Self::Test(context) => context.as_ref().as_ref(),
Self::Benchmark(context) => context.as_ref().as_ref(),
Self::ExportGenesis(context) => context.as_ref().as_ref(),
Self::ExportJsonSchema => unreachable!(),
}
}
}
impl AsRef<ReviveDevNodeConfiguration> for Context { impl AsRef<ReviveDevNodeConfiguration> for Context {
fn as_ref(&self) -> &ReviveDevNodeConfiguration { fn as_ref(&self) -> &ReviveDevNodeConfiguration {
match self { match self {
@@ -283,10 +272,6 @@ pub struct TestExecutionContext {
#[clap(flatten, next_help_heading = "Lighthouse Configuration")] #[clap(flatten, next_help_heading = "Lighthouse Configuration")]
pub lighthouse_configuration: KurtosisConfiguration, pub lighthouse_configuration: KurtosisConfiguration,
/// Configuration parameters for the Kitchensink.
#[clap(flatten, next_help_heading = "Kitchensink Configuration")]
pub kitchensink_configuration: KitchensinkConfiguration,
/// Configuration parameters for the Revive Dev Node. /// Configuration parameters for the Revive Dev Node.
#[clap(flatten, next_help_heading = "Revive Dev Node Configuration")] #[clap(flatten, next_help_heading = "Revive Dev Node Configuration")]
pub revive_dev_node_configuration: ReviveDevNodeConfiguration, pub revive_dev_node_configuration: ReviveDevNodeConfiguration,
@@ -409,10 +394,6 @@ pub struct BenchmarkingContext {
#[clap(flatten, next_help_heading = "Lighthouse Configuration")] #[clap(flatten, next_help_heading = "Lighthouse Configuration")]
pub lighthouse_configuration: KurtosisConfiguration, pub lighthouse_configuration: KurtosisConfiguration,
/// Configuration parameters for the Kitchensink.
#[clap(flatten, next_help_heading = "Kitchensink Configuration")]
pub kitchensink_configuration: KitchensinkConfiguration,
/// Configuration parameters for the Polkadot Parachain. /// Configuration parameters for the Polkadot Parachain.
#[clap(flatten, next_help_heading = "Polkadot Parachain Configuration")] #[clap(flatten, next_help_heading = "Polkadot Parachain Configuration")]
pub polkadot_parachain_configuration: PolkadotParachainConfiguration, pub polkadot_parachain_configuration: PolkadotParachainConfiguration,
@@ -491,10 +472,6 @@ pub struct ExportGenesisContext {
#[clap(flatten, next_help_heading = "Lighthouse Configuration")] #[clap(flatten, next_help_heading = "Lighthouse Configuration")]
pub lighthouse_configuration: KurtosisConfiguration, pub lighthouse_configuration: KurtosisConfiguration,
/// Configuration parameters for the Kitchensink.
#[clap(flatten, next_help_heading = "Kitchensink Configuration")]
pub kitchensink_configuration: KitchensinkConfiguration,
/// Configuration parameters for the Polkadot Parachain. /// Configuration parameters for the Polkadot Parachain.
#[clap(flatten, next_help_heading = "Polkadot Parachain Configuration")] #[clap(flatten, next_help_heading = "Polkadot Parachain Configuration")]
pub polkadot_parachain_configuration: PolkadotParachainConfiguration, pub polkadot_parachain_configuration: PolkadotParachainConfiguration,
@@ -510,7 +487,7 @@ pub struct ExportGenesisContext {
impl Default for TestExecutionContext { impl Default for TestExecutionContext {
fn default() -> Self { fn default() -> Self {
Self::parse_from(["execution-context"]) Self::parse_from(["execution-context", "--test", "."])
} }
} }
@@ -556,12 +533,6 @@ impl AsRef<KurtosisConfiguration> for TestExecutionContext {
} }
} }
impl AsRef<KitchensinkConfiguration> for TestExecutionContext {
fn as_ref(&self) -> &KitchensinkConfiguration {
&self.kitchensink_configuration
}
}
impl AsRef<ReviveDevNodeConfiguration> for TestExecutionContext { impl AsRef<ReviveDevNodeConfiguration> for TestExecutionContext {
fn as_ref(&self) -> &ReviveDevNodeConfiguration { fn as_ref(&self) -> &ReviveDevNodeConfiguration {
&self.revive_dev_node_configuration &self.revive_dev_node_configuration
@@ -612,7 +583,7 @@ impl AsRef<IgnoreSuccessConfiguration> for TestExecutionContext {
impl Default for BenchmarkingContext { impl Default for BenchmarkingContext {
fn default() -> Self { fn default() -> Self {
Self::parse_from(["benchmarking-context"]) Self::parse_from(["benchmarking-context", "--test", "."])
} }
} }
@@ -658,12 +629,6 @@ impl AsRef<PolkadotParachainConfiguration> for BenchmarkingContext {
} }
} }
impl AsRef<KitchensinkConfiguration> for BenchmarkingContext {
fn as_ref(&self) -> &KitchensinkConfiguration {
&self.kitchensink_configuration
}
}
impl AsRef<ReviveDevNodeConfiguration> for BenchmarkingContext { impl AsRef<ReviveDevNodeConfiguration> for BenchmarkingContext {
fn as_ref(&self) -> &ReviveDevNodeConfiguration { fn as_ref(&self) -> &ReviveDevNodeConfiguration {
&self.revive_dev_node_configuration &self.revive_dev_node_configuration
@@ -718,12 +683,6 @@ impl AsRef<KurtosisConfiguration> for ExportGenesisContext {
} }
} }
impl AsRef<KitchensinkConfiguration> for ExportGenesisContext {
fn as_ref(&self) -> &KitchensinkConfiguration {
&self.kitchensink_configuration
}
}
impl AsRef<PolkadotParachainConfiguration> for ExportGenesisContext { impl AsRef<PolkadotParachainConfiguration> for ExportGenesisContext {
fn as_ref(&self) -> &PolkadotParachainConfiguration { fn as_ref(&self) -> &PolkadotParachainConfiguration {
&self.polkadot_parachain_configuration &self.polkadot_parachain_configuration
@@ -759,7 +718,7 @@ pub struct CorpusConfiguration {
/// - `{metadata_file_path}::{case_idx}::{mode}`: This is very similar to the above specifier /// - `{metadata_file_path}::{case_idx}::{mode}`: This is very similar to the above specifier
/// with the exception that in this case the mode is specified and will be used in the test. /// with the exception that in this case the mode is specified and will be used in the test.
#[serde_as(as = "Vec<serde_with::DisplayFromStr>")] #[serde_as(as = "Vec<serde_with::DisplayFromStr>")]
#[arg(short = 't', long = "test")] #[arg(short = 't', long = "test", required = true)]
pub test_specifiers: Vec<ParsedTestSpecifier>, pub test_specifiers: Vec<ParsedTestSpecifier>,
} }
@@ -842,30 +801,6 @@ pub struct KurtosisConfiguration {
pub path: PathBuf, pub path: PathBuf,
} }
/// A set of configuration parameters for Kitchensink.
#[derive(Clone, Debug, Parser, Serialize, Deserialize)]
pub struct KitchensinkConfiguration {
/// Specifies the path of the kitchensink node to be used by the tool.
///
/// If this is not specified, then the tool assumes that it should use the kitchensink binary
/// that's provided in the user's $PATH.
#[clap(
id = "kitchensink.path",
long = "kitchensink.path",
default_value = "substrate-node"
)]
pub path: PathBuf,
/// The amount of time to wait upon startup before considering that the node timed out.
#[clap(
id = "kitchensink.start-timeout-ms",
long = "kitchensink.start-timeout-ms",
default_value = "30000",
value_parser = parse_duration
)]
pub start_timeout_ms: Duration,
}
/// A set of configuration parameters for the revive dev node. /// A set of configuration parameters for the revive dev node.
#[derive(Clone, Debug, Parser, Serialize, Deserialize)] #[derive(Clone, Debug, Parser, Serialize, Deserialize)]
pub struct ReviveDevNodeConfiguration { pub struct ReviveDevNodeConfiguration {
@@ -1161,35 +1096,6 @@ fn parse_duration(s: &str) -> anyhow::Result<Duration> {
.map_err(Into::into) .map_err(Into::into)
} }
/// The Solidity compatible node implementation.
///
/// This describes the solutions to be tested against on a high level.
#[derive(
Clone,
Copy,
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Hash,
Serialize,
ValueEnum,
EnumString,
Display,
AsRefStr,
IntoStaticStr,
)]
#[strum(serialize_all = "kebab-case")]
pub enum TestingPlatform {
/// The go-ethereum reference full node EVM implementation.
Geth,
/// The kitchensink runtime provides the PolkaVM (PVM) based node implementation.
Kitchensink,
/// A polkadot/Substrate based network
Zombienet,
}
/// The output format to use for the test execution output. /// The output format to use for the test execution output.
#[derive( #[derive(
Clone, Clone,
@@ -112,12 +112,23 @@ impl Watcher {
let all_transactions_submitted = all_transactions_submitted.clone(); let all_transactions_submitted = all_transactions_submitted.clone();
let mut blocks_information_stream = self.blocks_stream; let mut blocks_information_stream = self.blocks_stream;
async move { async move {
while let Some(block) = blocks_information_stream.next().await { while let Some(mut block) = blocks_information_stream.next().await {
// If the block number is equal to or less than the last block before the // If the block number is equal to or less than the last block before the
// repetition then we ignore it and continue on to the next block. // repetition then we ignore it and continue on to the next block.
if block.ethereum_block_information.block_number <= ignore_block_before { if block.ethereum_block_information.block_number <= ignore_block_before {
continue; continue;
} }
{
let watch_for_transaction_hashes =
watch_for_transaction_hashes.read().await;
for tx_hash in block.ethereum_block_information.transaction_hashes.iter() {
let Some((step_path, _)) = watch_for_transaction_hashes.get(tx_hash)
else {
continue;
};
*block.tx_counts.entry(step_path.clone()).or_default() += 1
}
}
reporter reporter
.report_block_mined_event(block.clone()) .report_block_mined_event(block.clone())
.expect("Can't fail"); .expect("Can't fail");
@@ -189,7 +200,6 @@ pub enum WatcherEvent {
/// streaming the blocks. /// streaming the blocks.
ignore_block_before: BlockNumber, ignore_block_before: BlockNumber,
}, },
/// Informs the watcher that a transaction was submitted and that the watcher should watch for a /// Informs the watcher that a transaction was submitted and that the watcher should watch for a
/// transaction with this hash in the blocks that it watches. /// transaction with this hash in the blocks that it watches.
SubmittedTransaction { SubmittedTransaction {
@@ -198,7 +208,6 @@ pub enum WatcherEvent {
/// The step path of the step that the transaction belongs to. /// The step path of the step that the transaction belongs to.
step_path: StepPath, step_path: StepPath,
}, },
/// Informs the watcher that all of the transactions of this benchmark have been submitted and /// Informs the watcher that all of the transactions of this benchmark have been submitted and
/// that it can expect to receive no further transaction hashes and not even watch the channel /// that it can expect to receive no further transaction hashes and not even watch the channel
/// any longer. /// any longer.
+8 -2
View File
@@ -353,7 +353,8 @@ where
.execute_account_allocation(step_path, step.as_ref()) .execute_account_allocation(step_path, step.as_ref())
.await .await
.context("Account Allocation Step Failed"), .context("Account Allocation Step Failed"),
}?; }
.context(format!("Failure on step {step_path}"))?;
self.steps_executed += steps_executed; self.steps_executed += steps_executed;
Ok(()) Ok(())
} }
@@ -597,15 +598,20 @@ where
let expected = !assertion.exception; let expected = !assertion.exception;
let actual = receipt.status(); let actual = receipt.status();
if actual != expected { if actual != expected {
let revert_reason = tracing_result
.revert_reason
.as_ref()
.or(tracing_result.error.as_ref());
tracing::error!( tracing::error!(
expected, expected,
actual, actual,
?receipt, ?receipt,
?tracing_result, ?tracing_result,
?revert_reason,
"Transaction status assertion failed" "Transaction status assertion failed"
); );
anyhow::bail!( anyhow::bail!(
"Transaction status assertion failed - Expected {expected} but got {actual}", "Transaction status assertion failed - Expected {expected} but got {actual}. Revert reason: {revert_reason:?}",
); );
} }
@@ -330,15 +330,18 @@ async fn start_cli_reporting_task(output_format: OutputFormat, reporter: Reporte
.unwrap(); .unwrap();
writeln!(buf).unwrap(); writeln!(buf).unwrap();
buf = tokio::task::spawn_blocking(move || { if aggregator_events_rx.is_empty() {
buf.flush().unwrap(); buf = tokio::task::spawn_blocking(move || {
buf buf.flush().unwrap();
}) buf
.await })
.unwrap(); .await
.unwrap();
}
} }
} }
} }
info!("Aggregator Broadcast Channel Closed");
// Summary at the end. // Summary at the end.
match output_format { match output_format {
-140
View File
@@ -172,134 +172,6 @@ impl Platform for LighthouseGethEvmSolcPlatform {
} }
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash)]
pub struct KitchensinkPolkavmResolcPlatform;
impl Platform for KitchensinkPolkavmResolcPlatform {
fn platform_identifier(&self) -> PlatformIdentifier {
PlatformIdentifier::KitchensinkPolkavmResolc
}
fn node_identifier(&self) -> NodeIdentifier {
NodeIdentifier::Kitchensink
}
fn vm_identifier(&self) -> VmIdentifier {
VmIdentifier::PolkaVM
}
fn compiler_identifier(&self) -> CompilerIdentifier {
CompilerIdentifier::Resolc
}
fn new_node(
&self,
context: Context,
) -> anyhow::Result<JoinHandle<anyhow::Result<Box<dyn EthereumNode + Send + Sync>>>> {
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
.path
.clone();
let genesis = genesis_configuration.genesis()?.clone();
Ok(thread::spawn(move || {
let node = SubstrateNode::new(
kitchensink_path,
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND,
None,
context,
&[],
);
let node = spawn_node(node, genesis)?;
Ok(Box::new(node) as Box<_>)
}))
}
fn new_compiler(
&self,
context: Context,
version: Option<VersionOrRequirement>,
) -> Pin<Box<dyn Future<Output = anyhow::Result<Box<dyn SolidityCompiler>>>>> {
Box::pin(async move {
let compiler = Resolc::new(context, version).await;
compiler.map(|compiler| Box::new(compiler) as Box<dyn SolidityCompiler>)
})
}
fn export_genesis(&self, context: Context) -> anyhow::Result<serde_json::Value> {
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
.path
.as_path();
let wallet = AsRef::<WalletConfiguration>::as_ref(&context).wallet();
let export_chainspec_command = SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND;
SubstrateNode::node_genesis(kitchensink_path, export_chainspec_command, &wallet)
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash)]
pub struct KitchensinkRevmSolcPlatform;
impl Platform for KitchensinkRevmSolcPlatform {
fn platform_identifier(&self) -> PlatformIdentifier {
PlatformIdentifier::KitchensinkRevmSolc
}
fn node_identifier(&self) -> NodeIdentifier {
NodeIdentifier::Kitchensink
}
fn vm_identifier(&self) -> VmIdentifier {
VmIdentifier::Evm
}
fn compiler_identifier(&self) -> CompilerIdentifier {
CompilerIdentifier::Solc
}
fn new_node(
&self,
context: Context,
) -> anyhow::Result<JoinHandle<anyhow::Result<Box<dyn EthereumNode + Send + Sync>>>> {
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
.path
.clone();
let genesis = genesis_configuration.genesis()?.clone();
Ok(thread::spawn(move || {
let node = SubstrateNode::new(
kitchensink_path,
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND,
None,
context,
&[],
);
let node = spawn_node(node, genesis)?;
Ok(Box::new(node) as Box<_>)
}))
}
fn new_compiler(
&self,
context: Context,
version: Option<VersionOrRequirement>,
) -> Pin<Box<dyn Future<Output = anyhow::Result<Box<dyn SolidityCompiler>>>>> {
Box::pin(async move {
let compiler = Solc::new(context, version).await;
compiler.map(|compiler| Box::new(compiler) as Box<dyn SolidityCompiler>)
})
}
fn export_genesis(&self, context: Context) -> anyhow::Result<serde_json::Value> {
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
.path
.as_path();
let wallet = AsRef::<WalletConfiguration>::as_ref(&context).wallet();
let export_chainspec_command = SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND;
SubstrateNode::node_genesis(kitchensink_path, export_chainspec_command, &wallet)
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash)]
pub struct ReviveDevNodePolkavmResolcPlatform; pub struct ReviveDevNodePolkavmResolcPlatform;
@@ -557,12 +429,6 @@ impl From<PlatformIdentifier> for Box<dyn Platform> {
PlatformIdentifier::LighthouseGethEvmSolc => { PlatformIdentifier::LighthouseGethEvmSolc => {
Box::new(LighthouseGethEvmSolcPlatform) as Box<_> Box::new(LighthouseGethEvmSolcPlatform) as Box<_>
} }
PlatformIdentifier::KitchensinkPolkavmResolc => {
Box::new(KitchensinkPolkavmResolcPlatform) as Box<_>
}
PlatformIdentifier::KitchensinkRevmSolc => {
Box::new(KitchensinkRevmSolcPlatform) as Box<_>
}
PlatformIdentifier::ReviveDevNodePolkavmResolc => { PlatformIdentifier::ReviveDevNodePolkavmResolc => {
Box::new(ReviveDevNodePolkavmResolcPlatform) as Box<_> Box::new(ReviveDevNodePolkavmResolcPlatform) as Box<_>
} }
@@ -584,12 +450,6 @@ impl From<PlatformIdentifier> for &dyn Platform {
PlatformIdentifier::LighthouseGethEvmSolc => { PlatformIdentifier::LighthouseGethEvmSolc => {
&LighthouseGethEvmSolcPlatform as &dyn Platform &LighthouseGethEvmSolcPlatform as &dyn Platform
} }
PlatformIdentifier::KitchensinkPolkavmResolc => {
&KitchensinkPolkavmResolcPlatform as &dyn Platform
}
PlatformIdentifier::KitchensinkRevmSolc => {
&KitchensinkRevmSolcPlatform as &dyn Platform
}
PlatformIdentifier::ReviveDevNodePolkavmResolc => { PlatformIdentifier::ReviveDevNodePolkavmResolc => {
&ReviveDevNodePolkavmResolcPlatform as &dyn Platform &ReviveDevNodePolkavmResolcPlatform as &dyn Platform
} }
+11 -3
View File
@@ -1,5 +1,6 @@
use std::{collections::HashMap, fmt::Display, str::FromStr}; use std::{collections::HashMap, fmt::Display, str::FromStr};
use alloy::hex::ToHexExt;
use alloy::primitives::{FixedBytes, utils::parse_units}; use alloy::primitives::{FixedBytes, utils::parse_units};
use alloy::{ use alloy::{
eips::BlockNumberOrTag, eips::BlockNumberOrTag,
@@ -686,8 +687,8 @@ impl Calldata {
Calldata::Compound(items) => { Calldata::Compound(items) => {
stream::iter(items.iter().zip(other.chunks(32))) stream::iter(items.iter().zip(other.chunks(32)))
.map(|(this, other)| async move { .map(|(this, other)| async move {
// The matterlabs format supports wildcards and therefore we // The MatterLabs format supports wildcards and therefore we also need to
// also need to support them. // support them.
if this.as_ref() == "*" { if this.as_ref() == "*" {
return Ok::<_, anyhow::Error>(true); return Ok::<_, anyhow::Error>(true);
} }
@@ -768,7 +769,14 @@ impl CalldataItem {
match stack.as_slice() { match stack.as_slice() {
// Empty stack means that we got an empty compound calldata which we resolve to zero. // Empty stack means that we got an empty compound calldata which we resolve to zero.
[] => Ok(U256::ZERO), [] => Ok(U256::ZERO),
[CalldataToken::Item(item)] => Ok(*item), [CalldataToken::Item(item)] => {
tracing::debug!(
original_item = ?self,
resolved_item = item.to_be_bytes::<32>().encode_hex(),
"Resolution Done"
);
Ok(*item)
}
_ => Err(anyhow::anyhow!( _ => Err(anyhow::anyhow!(
"Invalid calldata arithmetic operation - Invalid stack" "Invalid calldata arithmetic operation - Invalid stack"
)), )),
@@ -540,6 +540,7 @@ impl EthereumNode for GethNode {
.to_vec(), .to_vec(),
}, },
substrate_block_information: None, substrate_block_information: None,
tx_counts: Default::default(),
}) })
}); });
@@ -771,6 +771,7 @@ impl EthereumNode for LighthouseGethNode {
.to_vec(), .to_vec(),
}, },
substrate_block_information: None, substrate_block_information: None,
tx_counts: Default::default(),
}) })
}); });
@@ -92,7 +92,6 @@ impl SubstrateNode {
const SUBSTRATE_LOG_ENV: &str = "error,evm=debug,sc_rpc_server=info,runtime::revive=debug"; const SUBSTRATE_LOG_ENV: &str = "error,evm=debug,sc_rpc_server=info,runtime::revive=debug";
const PROXY_LOG_ENV: &str = "info,eth-rpc=debug"; const PROXY_LOG_ENV: &str = "info,eth-rpc=debug";
pub const KITCHENSINK_EXPORT_CHAINSPEC_COMMAND: &str = "export-chain-spec";
pub const REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND: &str = "build-spec"; pub const REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND: &str = "build-spec";
pub fn new( pub fn new(
@@ -333,7 +332,7 @@ impl SubstrateNode {
trace!("Waiting for chainspec export"); trace!("Waiting for chainspec export");
if !output.status.success() { if !output.status.success() {
anyhow::bail!( anyhow::bail!(
"Substrate-node export-chain-spec failed: {}", "substrate-node export-chain-spec failed: {}",
String::from_utf8_lossy(&output.stderr) String::from_utf8_lossy(&output.stderr)
); );
} }
@@ -578,6 +577,7 @@ impl EthereumNode for SubstrateNode {
proof_size: block_proof_size, proof_size: block_proof_size,
max_proof_size, max_proof_size,
}), }),
tx_counts: Default::default(),
}) })
} }
}); });
@@ -799,8 +799,8 @@ mod tests {
let context = test_config(); let context = test_config();
let mut node = SubstrateNode::new( let mut node = SubstrateNode::new(
context.kitchensink_configuration.path.clone(), context.revive_dev_node_configuration.path.clone(),
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND, SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
None, None,
&context, &context,
&[], &[],
@@ -822,6 +822,7 @@ mod tests {
} }
#[tokio::test] #[tokio::test]
#[ignore = "Ignored since it takes a long time to run"]
async fn node_mines_simple_transfer_transaction_and_returns_receipt() { async fn node_mines_simple_transfer_transaction_and_returns_receipt() {
// Arrange // Arrange
let (context, node) = shared_state(); let (context, node) = shared_state();
@@ -838,11 +839,14 @@ mod tests {
.value(U256::from(100_000_000_000_000u128)); .value(U256::from(100_000_000_000_000u128));
// Act // Act
let receipt = provider.send_transaction(transaction).await; let mut pending_transaction = provider
.send_transaction(transaction)
.await
.expect("Submission failed");
pending_transaction.set_timeout(Some(Duration::from_secs(60)));
// Assert // Assert
let _ = receipt let _ = pending_transaction
.expect("Failed to send the transfer transaction")
.get_receipt() .get_receipt()
.await .await
.expect("Failed to get the receipt for the transfer"); .expect("Failed to get the receipt for the transfer");
@@ -866,8 +870,8 @@ mod tests {
let context = test_config(); let context = test_config();
let mut dummy_node = SubstrateNode::new( let mut dummy_node = SubstrateNode::new(
context.kitchensink_configuration.path.clone(), context.revive_dev_node_configuration.path.clone(),
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND, SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
None, None,
&context, &context,
&[], &[],
@@ -960,7 +964,7 @@ mod tests {
assert!( assert!(
version.starts_with("substrate-node"), version.starts_with("substrate-node"),
"Expected Substrate-node version string, got: {version}" "Expected substrate-node version string, got: {version}"
); );
} }
@@ -210,6 +210,7 @@ impl ZombienetNode {
.with_args(vec![ .with_args(vec![
("--pool-limit", u32::MAX.to_string().as_str()).into(), ("--pool-limit", u32::MAX.to_string().as_str()).into(),
("--pool-kbytes", u32::MAX.to_string().as_str()).into(), ("--pool-kbytes", u32::MAX.to_string().as_str()).into(),
("--dev-block-time", 12000u16.to_string().as_str()).into(),
]) ])
}) })
}) })
@@ -355,7 +356,7 @@ impl ZombienetNode {
if !output.status.success() { if !output.status.success() {
anyhow::bail!( anyhow::bail!(
"Substrate-node export-chain-spec failed: {}", "substrate-node export-chain-spec failed: {}",
String::from_utf8_lossy(&output.stderr) String::from_utf8_lossy(&output.stderr)
); );
} }
@@ -599,6 +600,7 @@ impl EthereumNode for ZombienetNode {
proof_size: block_proof_size, proof_size: block_proof_size,
max_proof_size, max_proof_size,
}), }),
tx_counts: Default::default(),
}) })
} }
}); });
@@ -856,6 +858,7 @@ mod tests {
#[tokio::test] #[tokio::test]
#[ignore = "Ignored for the time being"] #[ignore = "Ignored for the time being"]
async fn test_transfer_transaction_should_return_receipt() { async fn test_transfer_transaction_should_return_receipt() {
// Arrange
let (ctx, node) = new_node().await; let (ctx, node) = new_node().await;
let provider = node.provider().await.expect("Failed to create provider"); let provider = node.provider().await.expect("Failed to create provider");
@@ -864,9 +867,15 @@ mod tests {
.to(account_address) .to(account_address)
.value(U256::from(100_000_000_000_000u128)); .value(U256::from(100_000_000_000_000u128));
let receipt = provider.send_transaction(transaction).await; // Act
let _ = receipt let mut pending_transaction = provider
.expect("Failed to send the transfer transaction") .send_transaction(transaction)
.await
.expect("Submission failed");
pending_transaction.set_timeout(Some(Duration::from_secs(60)));
// Assert
let _ = pending_transaction
.get_receipt() .get_receipt()
.await .await
.expect("Failed to get the receipt for the transfer"); .expect("Failed to get the receipt for the transfer");
@@ -62,7 +62,10 @@ where
) -> TransportResult<Self::Fillable> { ) -> TransportResult<Self::Fillable> {
match self.inner.prepare(provider, tx).await { match self.inner.prepare(provider, tx).await {
Ok(fill) => Ok(Some(fill)), Ok(fill) => Ok(Some(fill)),
Err(_) => Ok(None), Err(err) => {
tracing::debug!(error = ?err, "Gas Provider Estimation Failed, using fallback");
Ok(None)
}
} }
} }
+1 -1
View File
@@ -104,7 +104,7 @@ where
}; };
debug!(%tx_hash, "Submitted Transaction"); debug!(%tx_hash, "Submitted Transaction");
pending_transaction.set_timeout(Some(Duration::from_secs(120))); pending_transaction.set_timeout(Some(Duration::from_secs(240)));
let tx_hash = pending_transaction.watch().await.context(format!( let tx_hash = pending_transaction.watch().await.context(format!(
"Transaction inclusion watching timeout for {tx_hash}" "Transaction inclusion watching timeout for {tx_hash}"
))?; ))?;
+8 -8
View File
@@ -41,7 +41,7 @@ pub struct ReportAggregator {
impl ReportAggregator { impl ReportAggregator {
pub fn new(context: Context) -> Self { pub fn new(context: Context) -> Self {
let (runner_tx, runner_rx) = unbounded_channel::<RunnerEvent>(); let (runner_tx, runner_rx) = unbounded_channel::<RunnerEvent>();
let (listener_tx, _) = channel::<ReporterEvent>(1024); let (listener_tx, _) = channel::<ReporterEvent>(0xFFFF);
Self { Self {
report: Report::new(context), report: Report::new(context),
remaining_cases: Default::default(), remaining_cases: Default::default(),
@@ -64,7 +64,7 @@ impl ReportAggregator {
debug!("Starting to aggregate report"); debug!("Starting to aggregate report");
while let Some(event) = self.runner_rx.recv().await { while let Some(event) = self.runner_rx.recv().await {
debug!(?event, "Received Event"); debug!(event = event.variant_name(), "Received Event");
match event { match event {
RunnerEvent::SubscribeToEvents(event) => { RunnerEvent::SubscribeToEvents(event) => {
self.handle_subscribe_to_events_event(*event); self.handle_subscribe_to_events_event(*event);
@@ -412,8 +412,8 @@ impl ReportAggregator {
{ {
block_information.sort_by(|a, b| { block_information.sort_by(|a, b| {
a.ethereum_block_information a.ethereum_block_information
.block_timestamp .block_number
.cmp(&b.ethereum_block_information.block_timestamp) .cmp(&b.ethereum_block_information.block_number)
}); });
// Computing the TPS. // Computing the TPS.
@@ -466,7 +466,6 @@ impl ReportAggregator {
.filter_map(|block| block.ref_time_block_fullness_percentage()) .filter_map(|block| block.ref_time_block_fullness_percentage())
.map(|v| v as u64) .map(|v| v as u64)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
dbg!(&reftime_block_fullness);
if !reftime_block_fullness.is_empty() { if !reftime_block_fullness.is_empty() {
report report
.metrics .metrics
@@ -482,7 +481,6 @@ impl ReportAggregator {
.filter_map(|block| block.proof_size_block_fullness_percentage()) .filter_map(|block| block.proof_size_block_fullness_percentage())
.map(|v| v as u64) .map(|v| v as u64)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
dbg!(&proof_size_block_fullness);
if !proof_size_block_fullness.is_empty() { if !proof_size_block_fullness.is_empty() {
report report
.metrics .metrics
@@ -803,8 +801,9 @@ where
pub fn with_list( pub fn with_list(
&mut self, &mut self,
platform_identifier: PlatformIdentifier, platform_identifier: PlatformIdentifier,
mut list: Vec<T>, original_list: Vec<T>,
) -> &mut Self { ) -> &mut Self {
let mut list = original_list.clone();
list.sort(); list.sort();
let Some(min) = list.first().copied() else { let Some(min) = list.first().copied() else {
return self; return self;
@@ -842,7 +841,7 @@ where
.insert(platform_identifier, median); .insert(platform_identifier, median);
self.raw self.raw
.get_or_insert_default() .get_or_insert_default()
.insert(platform_identifier, list); .insert(platform_identifier, original_list);
self self
} }
@@ -883,6 +882,7 @@ pub struct ContractInformation {
pub struct MinedBlockInformation { pub struct MinedBlockInformation {
pub ethereum_block_information: EthereumMinedBlockInformation, pub ethereum_block_information: EthereumMinedBlockInformation,
pub substrate_block_information: Option<SubstrateMinedBlockInformation>, pub substrate_block_information: Option<SubstrateMinedBlockInformation>,
pub tx_counts: BTreeMap<StepPath, usize>,
} }
impl MinedBlockInformation { impl MinedBlockInformation {
+10
View File
@@ -347,6 +347,16 @@ macro_rules! define_event {
),* ),*
} }
impl $ident {
pub fn variant_name(&self) -> &'static str {
match self {
$(
Self::$variant_ident { .. } => stringify!($variant_ident)
),*
}
}
}
$( $(
#[derive(Debug)] #[derive(Debug)]
$(#[$variant_meta])* $(#[$variant_meta])*
+2 -5
View File
@@ -22,7 +22,6 @@ POLKADOT_SDK_DIR="${1:-}"
# Binary paths (default to names in $PATH) # Binary paths (default to names in $PATH)
REVIVE_DEV_NODE_BIN="revive-dev-node" REVIVE_DEV_NODE_BIN="revive-dev-node"
ETH_RPC_BIN="eth-rpc" ETH_RPC_BIN="eth-rpc"
SUBSTRATE_NODE_BIN="substrate-node"
echo -e "${GREEN}=== Revive Differential Tests Quick Start ===${NC}" echo -e "${GREEN}=== Revive Differential Tests Quick Start ===${NC}"
echo "" echo ""
@@ -50,14 +49,13 @@ if [ -n "$POLKADOT_SDK_DIR" ]; then
REVIVE_DEV_NODE_BIN="$POLKADOT_SDK_DIR/target/release/revive-dev-node" REVIVE_DEV_NODE_BIN="$POLKADOT_SDK_DIR/target/release/revive-dev-node"
ETH_RPC_BIN="$POLKADOT_SDK_DIR/target/release/eth-rpc" ETH_RPC_BIN="$POLKADOT_SDK_DIR/target/release/eth-rpc"
SUBSTRATE_NODE_BIN="$POLKADOT_SDK_DIR/target/release/substrate-node"
if [ ! -x "$REVIVE_DEV_NODE_BIN" ] || [ ! -x "$ETH_RPC_BIN" ] || [ ! -x "$SUBSTRATE_NODE_BIN" ]; then if [ ! -x "$REVIVE_DEV_NODE_BIN" ] || [ ! -x "$ETH_RPC_BIN" ]; then
echo -e "${YELLOW}Required binaries not found in release target. Building...${NC}" echo -e "${YELLOW}Required binaries not found in release target. Building...${NC}"
(cd "$POLKADOT_SDK_DIR" && cargo build --release --package staging-node-cli --package pallet-revive-eth-rpc --package revive-dev-node) (cd "$POLKADOT_SDK_DIR" && cargo build --release --package staging-node-cli --package pallet-revive-eth-rpc --package revive-dev-node)
fi fi
for bin in "$REVIVE_DEV_NODE_BIN" "$ETH_RPC_BIN" "$SUBSTRATE_NODE_BIN"; do for bin in "$REVIVE_DEV_NODE_BIN" "$ETH_RPC_BIN"; do
if [ ! -x "$bin" ]; then if [ ! -x "$bin" ]; then
echo -e "${RED}Expected binary not found after build: $bin${NC}" echo -e "${RED}Expected binary not found after build: $bin${NC}"
exit 1 exit 1
@@ -84,7 +82,6 @@ RUST_LOG="info,alloy_pubsub::service=error" ./target/release/retester test \
--concurrency.number-of-threads 5 \ --concurrency.number-of-threads 5 \
--concurrency.number-of-concurrent-tasks 500 \ --concurrency.number-of-concurrent-tasks 500 \
--wallet.additional-keys 100000 \ --wallet.additional-keys 100000 \
--kitchensink.path "$SUBSTRATE_NODE_BIN" \
--revive-dev-node.path "$REVIVE_DEV_NODE_BIN" \ --revive-dev-node.path "$REVIVE_DEV_NODE_BIN" \
--eth-rpc.path "$ETH_RPC_BIN" \ --eth-rpc.path "$ETH_RPC_BIN" \
> logs.log \ > logs.log \
+246
View File
@@ -0,0 +1,246 @@
"""
Utilities to print benchmark metrics from a report JSON into CSV.
Usage:
python scripts/print_benchmark_metrics_csv.py /absolute/path/to/report.json
The script prints, for each metadata path, case index, and mode combination,
CSV rows aligned to mined blocks with the following columns:
- block_number
- number_of_txs
- tps (transaction_per_second)
- gps (gas_per_second)
- gas_block_fullness
- ref_time (if available)
- max_ref_time (if available)
- proof_size (if available)
- max_proof_size (if available)
- ref_time_block_fullness (if available)
- proof_size_block_fullness (if available)
Important nuance: TPS and GPS arrays have (number_of_blocks - 1) items. The
first block row has no TPS/GPS; the CSV leaves those cells empty for the first
row and aligns subsequent values to their corresponding next block.
"""
from __future__ import annotations
import json
import sys
import csv
from typing import List, Mapping, TypedDict
class EthereumMinedBlockInformation(TypedDict):
"""EVM block information extracted from the report.
Attributes:
block_number: The block height.
block_timestamp: The UNIX timestamp of the block.
mined_gas: Total gas used (mined) in the block.
block_gas_limit: The gas limit of the block.
transaction_hashes: List of transaction hashes included in the block.
"""
block_number: int
block_timestamp: int
mined_gas: int
block_gas_limit: int
transaction_hashes: List[str]
class SubstrateMinedBlockInformation(TypedDict):
"""Substrate-specific block resource usage fields.
Attributes:
ref_time: The consumed ref time in the block.
max_ref_time: The maximum ref time allowed for the block.
proof_size: The consumed proof size in the block.
max_proof_size: The maximum proof size allowed for the block.
"""
ref_time: int
max_ref_time: int
proof_size: int
max_proof_size: int
class MinedBlockInformation(TypedDict):
"""Block-level information for a mined block with both EVM and optional Substrate fields."""
ethereum_block_information: EthereumMinedBlockInformation
substrate_block_information: SubstrateMinedBlockInformation
class Metric(TypedDict):
"""Metric data of integer values keyed by platform identifier.
Attributes:
minimum: Single scalar minimum per platform.
maximum: Single scalar maximum per platform.
mean: Single scalar mean per platform.
median: Single scalar median per platform.
raw: Time-series (or list) of values per platform.
"""
minimum: Mapping[str, int]
maximum: Mapping[str, int]
mean: Mapping[str, int]
median: Mapping[str, int]
raw: Mapping[str, List[int]]
class Metrics(TypedDict):
"""All metrics that may be present for a given execution report.
Note that some metrics are optional and present only for specific platforms
or execution modes.
"""
transaction_per_second: Metric
gas_per_second: Metric
gas_block_fullness: Metric
ref_time_block_fullness: Metric
proof_size_block_fullness: Metric
class ExecutionReport(TypedDict):
"""Execution report for a mode containing mined blocks and metrics.
Attributes:
mined_block_information: Mapping from platform identifier to the list of
mined blocks observed for that platform.
metrics: The computed metrics for the execution.
"""
mined_block_information: Mapping[str, List[MinedBlockInformation]]
metrics: Metrics
class CaseReport(TypedDict):
"""Report for a single case, keyed by mode string."""
mode_execution_reports: Mapping[str, ExecutionReport]
class MetadataFileReport(TypedDict):
"""Report subtree keyed by case indices for a metadata file path."""
case_reports: Mapping[str, CaseReport]
class ReportRoot(TypedDict):
"""Top-level report schema with execution information keyed by metadata path."""
execution_information: Mapping[str, MetadataFileReport]
BlockInformation = TypedDict(
"BlockInformation",
{
"Block Number": int,
"Timestamp": int,
"Datetime": None,
"Transaction Count": int,
"TPS": int | None,
"GPS": int | None,
"Ref Time": int,
"Max Ref Time": int,
"Block Fullness Ref Time": int,
"Proof Size": int,
"Max Proof Size": int,
"Block Fullness Proof Size": int,
},
)
"""A typed dictionary used to hold all of the block information"""
def load_report(path: str) -> ReportRoot:
"""Load the report JSON from disk.
Args:
path: Absolute or relative filesystem path to the JSON report file.
Returns:
The parsed report as a typed dictionary structure.
"""
with open(path, "r", encoding="utf-8") as f:
data: ReportRoot = json.load(f)
return data
def main() -> None:
report_path: str = sys.argv[1]
report: ReportRoot = load_report(report_path)
# TODO: Remove this in the future, but for now, the target is fixed.
target: str = "revive-dev-node-revm-solc"
csv_writer = csv.writer(sys.stdout)
for _, metadata_file_report in report["execution_information"].items():
for _, case_report in metadata_file_report["case_reports"].items():
for _, execution_report in case_report["mode_execution_reports"].items():
blocks_information: list[MinedBlockInformation] = execution_report[
"mined_block_information"
][target]
resolved_blocks: list[BlockInformation] = []
for i, block_information in enumerate(blocks_information):
resolved_blocks.append(
{
"Block Number": block_information[
"ethereum_block_information"
]["block_number"],
"Timestamp": block_information[
"ethereum_block_information"
]["block_timestamp"],
"Datetime": None,
"Transaction Count": len(
block_information["ethereum_block_information"][
"transaction_hashes"
]
),
"TPS": (
None
if i == 0
else execution_report["metrics"][
"transaction_per_second"
]["raw"][target][i - 1]
),
"GPS": (
None
if i == 0
else execution_report["metrics"]["gas_per_second"][
"raw"
][target][i - 1]
),
"Ref Time": block_information[
"substrate_block_information"
]["ref_time"],
"Max Ref Time": block_information[
"substrate_block_information"
]["max_ref_time"],
"Block Fullness Ref Time": execution_report["metrics"][
"ref_time_block_fullness"
]["raw"][target][i],
"Proof Size": block_information[
"substrate_block_information"
]["proof_size"],
"Max Proof Size": block_information[
"substrate_block_information"
]["max_proof_size"],
"Block Fullness Proof Size": execution_report["metrics"][
"proof_size_block_fullness"
]["raw"][target][i],
}
)
csv_writer = csv.DictWriter(sys.stdout, resolved_blocks[0].keys())
csv_writer.writeheader()
csv_writer.writerows(resolved_blocks)
if __name__ == "__main__":
main()
@@ -0,0 +1,226 @@
"""
This script is used to turn the JSON report produced by the revive differential tests tool into an
easy to consume markdown document for the purpose of reporting this information in the Polkadot SDK
CI. The full models used in the JSON report can be found in the revive differential tests repo and
the models used in this script are just a partial reproduction of the full report models.
"""
from typing import TypedDict, Literal, Union
import json, io
class Report(TypedDict):
context: "Context"
execution_information: dict[
"MetadataFilePathString",
dict["ModeString", dict["CaseIdxString", "CaseReport"]],
]
class Context(TypedDict):
Test: "TestContext"
class TestContext(TypedDict):
corpus_configuration: "CorpusConfiguration"
class CorpusConfiguration(TypedDict):
test_specifiers: list["TestSpecifier"]
class CaseReport(TypedDict):
status: "CaseStatus"
class CaseStatusSuccess(TypedDict):
status: Literal["Succeeded"]
steps_executed: int
class CaseStatusFailure(TypedDict):
status: Literal["Failed"]
reason: str
class CaseStatusIgnored(TypedDict):
status: Literal["Ignored"]
reason: str
CaseStatus = Union[CaseStatusSuccess, CaseStatusFailure, CaseStatusIgnored]
"""A union type of all of the possible statuses that could be reported for a case."""
TestSpecifier = str
"""A test specifier string. For example resolc-compiler-tests/fixtures/solidity/test.json::0::Y+"""
ModeString = str
"""The mode string. For example Y+ >=0.8.13"""
MetadataFilePathString = str
"""The path to a metadata file. For example resolc-compiler-tests/fixtures/solidity/test.json"""
CaseIdxString = str
"""The index of a case as a string. For example '0'"""
def path_relative_to_resolc_compiler_test_directory(path: str) -> str:
"""
Given a path, this function returns the path relative to the resolc-compiler-test directory. The
following is an example of an input and an output:
Input: ~/polkadot-sdk/revive-differential-tests/resolc-compiler-tests/fixtures/solidity/test.json
Output: test.json
"""
return f"{path.split('resolc-compiler-tests/fixtures/solidity')[-1].strip('/')}"
def main() -> None:
with open("report.json", "r") as file:
report: Report = json.load(file)
# Starting the markdown document and adding information to it as we go.
markdown_document: io.TextIOWrapper = open("report.md", "w")
print("# Differential Tests Results", file=markdown_document)
# Getting all of the test specifiers from the report and making them relative to the tests dir.
test_specifiers: list[str] = list(
map(
path_relative_to_resolc_compiler_test_directory,
report["context"]["Test"]["corpus_configuration"]["test_specifiers"],
)
)
print("## Specified Tests", file=markdown_document)
for test_specifier in test_specifiers:
print(f"* `{test_specifier}`", file=markdown_document)
# Counting the total number of test cases, successes, failures, and ignored tests
total_number_of_cases: int = 0
total_number_of_successes: int = 0
total_number_of_failures: int = 0
total_number_of_ignores: int = 0
for _, mode_to_case_mapping in report["execution_information"].items():
for _, case_idx_to_report_mapping in mode_to_case_mapping.items():
for _, case_report in case_idx_to_report_mapping.items():
status: CaseStatus = case_report["status"]
total_number_of_cases += 1
if status["status"] == "Succeeded":
total_number_of_successes += 1
elif status["status"] == "Failed":
total_number_of_failures += 1
elif status["status"] == "Ignored":
total_number_of_ignores += 1
else:
raise Exception(
f"Encountered a status that's unknown to the script: {status}"
)
print("## Counts", file=markdown_document)
print(
f"* **Total Number of Test Cases:** {total_number_of_cases}",
file=markdown_document,
)
print(
f"* **Total Number of Successes:** {total_number_of_successes}",
file=markdown_document,
)
print(
f"* **Total Number of Failures:** {total_number_of_failures}",
file=markdown_document,
)
print(
f"* **Total Number of Ignores:** {total_number_of_ignores}",
file=markdown_document,
)
# Grouping the various test cases into dictionaries and groups depending on their status to make
# them easier to include in the markdown document later on.
successful_cases: dict[
MetadataFilePathString, dict[CaseIdxString, set[ModeString]]
] = {}
for metadata_file_path, mode_to_case_mapping in report[
"execution_information"
].items():
for mode_string, case_idx_to_report_mapping in mode_to_case_mapping.items():
for case_idx_string, case_report in case_idx_to_report_mapping.items():
status: CaseStatus = case_report["status"]
metadata_file_path: str = (
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
)
mode_string: str = mode_string.replace(" M3", "+").replace(" M0", "-")
if status["status"] == "Succeeded":
successful_cases.setdefault(
metadata_file_path,
{},
).setdefault(
case_idx_string, set()
).add(mode_string)
print("## Failures", file=markdown_document)
print(
"The test specifiers seen in this section have the format 'path::case_idx::compilation_mode'\
and they're compatible with the revive differential tests framework and can be specified\
to it directly in the same way that they're provided through the `--test` argument of the\
framework.\n",
file=markdown_document,
)
print(
"The failures are provided in an expandable section to ensure that the PR does not get \
polluted with information. Please click on the section below for more information",
file=markdown_document,
)
print(
"<details><summary>Detailed Differential Tests Failure Information</summary>\n\n",
file=markdown_document,
)
print("| Test Specifier | Failure Reason | Note |", file=markdown_document)
print("| -- | -- | -- |", file=markdown_document)
for metadata_file_path, mode_to_case_mapping in report[
"execution_information"
].items():
for mode_string, case_idx_to_report_mapping in mode_to_case_mapping.items():
for case_idx_string, case_report in case_idx_to_report_mapping.items():
status: CaseStatus = case_report["status"]
metadata_file_path: str = (
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
)
mode_string: str = mode_string.replace(" M3", "+").replace(" M0", "-")
if status["status"] != "Failed":
continue
failure_reason: str = status["reason"].replace("\n", " ")
note: str = ""
modes_where_this_case_succeeded: set[ModeString] = (
successful_cases.setdefault(
metadata_file_path,
{},
).setdefault(case_idx_string, set())
)
if len(modes_where_this_case_succeeded) != 0:
note: str = (
f"This test case succeeded with other compilation modes: {modes_where_this_case_succeeded}"
)
test_specifier: str = (
f"{metadata_file_path}::{case_idx_string}::{mode_string}"
)
print(
f"| `{test_specifier}` | `{failure_reason}` | {note} |",
file=markdown_document,
)
print("\n\n</details>", file=markdown_document)
# The primary downside of not using `with`, but I guess it's better since I don't want to over
# indent the code.
markdown_document.close()
if __name__ == "__main__":
main()