mirror of
https://github.com/pezkuwichain/revive-differential-tests.git
synced 2026-04-22 07:57:58 +00:00
Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 9b75a4f236 | |||
| 2af1a62319 | |||
| e09be4f3fa | |||
| 33b5faca45 | |||
| 172fb4700f | |||
| fefea17c8e |
+19
-12
@@ -51,22 +51,22 @@ jobs:
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/substrate-node
|
||||
~/.cargo/bin/revive-dev-node
|
||||
~/.cargo/bin/eth-rpc
|
||||
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}
|
||||
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}-with-dev-node
|
||||
|
||||
- name: Build substrate-node
|
||||
- name: Build revive-dev-node
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
cd polkadot-sdk
|
||||
cargo install --locked --force --profile=production --path substrate/bin/node/cli --bin substrate-node --features cli
|
||||
cargo install --locked --force --profile=production --path substrate/frame/revive/dev-node/node --bin revive-dev-node
|
||||
|
||||
- name: Build eth-rpc
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
cd polkadot-sdk
|
||||
cargo install --path substrate/frame/revive/rpc --bin eth-rpc
|
||||
|
||||
|
||||
- name: Cache downloaded Polkadot binaries
|
||||
id: cache-polkadot
|
||||
uses: actions/cache@v3
|
||||
@@ -109,14 +109,16 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Restore binaries from cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
~/.cargo/bin/substrate-node
|
||||
~/.cargo/bin/revive-dev-node
|
||||
~/.cargo/bin/eth-rpc
|
||||
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}
|
||||
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}-with-dev-node
|
||||
|
||||
- name: Restore downloaded Polkadot binaries from cache
|
||||
uses: actions/cache@v3
|
||||
@@ -202,8 +204,13 @@ jobs:
|
||||
sudo apt update
|
||||
sudo apt install kurtosis-cli
|
||||
|
||||
- name: Install cargo-machete
|
||||
uses: clechasseur/rs-cargo@v2
|
||||
with:
|
||||
command: install
|
||||
args: cargo-machete@0.7.0
|
||||
- name: Machete
|
||||
uses: bnjbvr/cargo-machete@v0.7.1
|
||||
run: cargo machete crates
|
||||
|
||||
- name: Format
|
||||
run: make format
|
||||
@@ -211,8 +218,8 @@ jobs:
|
||||
- name: Clippy
|
||||
run: make clippy
|
||||
|
||||
- name: Check substrate-node version
|
||||
run: substrate-node --version
|
||||
- name: Check revive-dev-node version
|
||||
run: revive-dev-node --version
|
||||
|
||||
- name: Check eth-rpc version
|
||||
run: eth-rpc --version
|
||||
@@ -222,13 +229,13 @@ jobs:
|
||||
|
||||
- name: Check polkadot version
|
||||
run: polkadot --version
|
||||
|
||||
|
||||
- name: Check polkadot-parachain version
|
||||
run: polkadot-parachain --version
|
||||
|
||||
- name: Check polkadot-execute-worker version
|
||||
run: polkadot-execute-worker --version
|
||||
|
||||
|
||||
- name: Check polkadot-prepare-worker version
|
||||
run: polkadot-prepare-worker --version
|
||||
|
||||
|
||||
Generated
+11
@@ -1920,6 +1920,7 @@ dependencies = [
|
||||
"anstyle",
|
||||
"clap_lex",
|
||||
"strsim",
|
||||
"terminal_size",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7838,6 +7839,16 @@ dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "terminal_size"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "45c6481c4829e4cc63825e62c49186a34538b7b2750b73b266581ffb612fb5ed"
|
||||
dependencies = [
|
||||
"rustix",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.69"
|
||||
|
||||
+1
-1
@@ -26,7 +26,7 @@ ansi_term = "0.12.1"
|
||||
anyhow = "1.0"
|
||||
bson = { version = "2.15.0" }
|
||||
cacache = { version = "13.1.0" }
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
clap = { version = "4", features = ["derive", "wrap_help"] }
|
||||
dashmap = { version = "6.1.0" }
|
||||
foundry-compilers-artifacts = { version = "0.18.0" }
|
||||
futures = { version = "0.3.31" }
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
This project compiles and executes declarative smart-contract tests against multiple platforms, then compares behavior (status, return data, events, and state diffs). Today it supports:
|
||||
|
||||
- Geth (EVM reference implementation)
|
||||
- Revive Kitchensink (Substrate-based PolkaVM + `eth-rpc` proxy)
|
||||
- Revive Dev Node (Substrate-based PolkaVM + `eth-rpc` proxy)
|
||||
|
||||
Use it to:
|
||||
|
||||
@@ -39,9 +39,9 @@ This repository contains none of the tests and only contains the testing framewo
|
||||
This section describes the required dependencies that this framework requires to run. Compiling this framework is pretty straightforward and no additional dependencies beyond what's specified in the `Cargo.toml` file should be required.
|
||||
|
||||
- Stable Rust
|
||||
- Geth - When doing differential testing against the PVM we submit transactions to a Geth node and to Kitchensink to compare them.
|
||||
- Kitchensink - When doing differential testing against the PVM we submit transactions to a Geth node and to Kitchensink to compare them.
|
||||
- ETH-RPC - All communication with Kitchensink is done through the ETH RPC.
|
||||
- Geth - When doing differential testing against the PVM we submit transactions to a Geth node and to Revive Dev Node to compare them.
|
||||
- Revive Dev Node - When doing differential testing against the PVM we submit transactions to a Geth node and to Revive Dev Node to compare them.
|
||||
- ETH-RPC - All communication with Revive Dev Node is done through the ETH RPC.
|
||||
- Solc - This is actually a transitive dependency, while this tool doesn't require solc as it downloads the versions that it requires, resolc requires that Solc is installed and available in the path.
|
||||
- Resolc - This is required to compile the contracts to PolkaVM bytecode.
|
||||
- Kurtosis - The Kurtosis CLI tool is required for the production Ethereum mainnet-like node configuration with Geth as the execution layer and lighthouse as the consensus layer. Kurtosis also requires docker to be installed since it runs everything inside of docker containers.
|
||||
|
||||
@@ -31,10 +31,6 @@ pub enum PlatformIdentifier {
|
||||
GethEvmSolc,
|
||||
/// The Lighthouse Go-ethereum reference full node EVM implementation with the solc compiler.
|
||||
LighthouseGethEvmSolc,
|
||||
/// The kitchensink node with the PolkaVM backend with the resolc compiler.
|
||||
KitchensinkPolkavmResolc,
|
||||
/// The kitchensink node with the REVM backend with the solc compiler.
|
||||
KitchensinkRevmSolc,
|
||||
/// The revive dev node with the PolkaVM backend with the resolc compiler.
|
||||
ReviveDevNodePolkavmResolc,
|
||||
/// The revive dev node with the REVM backend with the solc compiler.
|
||||
@@ -95,8 +91,6 @@ pub enum NodeIdentifier {
|
||||
Geth,
|
||||
/// The go-ethereum node implementation.
|
||||
LighthouseGeth,
|
||||
/// The Kitchensink node implementation.
|
||||
Kitchensink,
|
||||
/// The revive dev node implementation.
|
||||
ReviveDevNode,
|
||||
/// A zombienet spawned nodes
|
||||
|
||||
@@ -24,7 +24,7 @@ use strum::{AsRefStr, Display, EnumString, IntoStaticStr};
|
||||
use temp_dir::TempDir;
|
||||
|
||||
#[derive(Clone, Debug, Parser, Serialize, Deserialize)]
|
||||
#[command(name = "retester")]
|
||||
#[command(name = "retester", term_width = 100)]
|
||||
pub enum Context {
|
||||
/// Executes tests in the MatterLabs format differentially on multiple targets concurrently.
|
||||
Test(Box<TestExecutionContext>),
|
||||
@@ -131,17 +131,6 @@ impl AsRef<PolkadotParachainConfiguration> for Context {
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<KitchensinkConfiguration> for Context {
|
||||
fn as_ref(&self) -> &KitchensinkConfiguration {
|
||||
match self {
|
||||
Self::Test(context) => context.as_ref().as_ref(),
|
||||
Self::Benchmark(context) => context.as_ref().as_ref(),
|
||||
Self::ExportGenesis(context) => context.as_ref().as_ref(),
|
||||
Self::ExportJsonSchema => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<ReviveDevNodeConfiguration> for Context {
|
||||
fn as_ref(&self) -> &ReviveDevNodeConfiguration {
|
||||
match self {
|
||||
@@ -283,10 +272,6 @@ pub struct TestExecutionContext {
|
||||
#[clap(flatten, next_help_heading = "Lighthouse Configuration")]
|
||||
pub lighthouse_configuration: KurtosisConfiguration,
|
||||
|
||||
/// Configuration parameters for the Kitchensink.
|
||||
#[clap(flatten, next_help_heading = "Kitchensink Configuration")]
|
||||
pub kitchensink_configuration: KitchensinkConfiguration,
|
||||
|
||||
/// Configuration parameters for the Revive Dev Node.
|
||||
#[clap(flatten, next_help_heading = "Revive Dev Node Configuration")]
|
||||
pub revive_dev_node_configuration: ReviveDevNodeConfiguration,
|
||||
@@ -409,10 +394,6 @@ pub struct BenchmarkingContext {
|
||||
#[clap(flatten, next_help_heading = "Lighthouse Configuration")]
|
||||
pub lighthouse_configuration: KurtosisConfiguration,
|
||||
|
||||
/// Configuration parameters for the Kitchensink.
|
||||
#[clap(flatten, next_help_heading = "Kitchensink Configuration")]
|
||||
pub kitchensink_configuration: KitchensinkConfiguration,
|
||||
|
||||
/// Configuration parameters for the Polkadot Parachain.
|
||||
#[clap(flatten, next_help_heading = "Polkadot Parachain Configuration")]
|
||||
pub polkadot_parachain_configuration: PolkadotParachainConfiguration,
|
||||
@@ -491,10 +472,6 @@ pub struct ExportGenesisContext {
|
||||
#[clap(flatten, next_help_heading = "Lighthouse Configuration")]
|
||||
pub lighthouse_configuration: KurtosisConfiguration,
|
||||
|
||||
/// Configuration parameters for the Kitchensink.
|
||||
#[clap(flatten, next_help_heading = "Kitchensink Configuration")]
|
||||
pub kitchensink_configuration: KitchensinkConfiguration,
|
||||
|
||||
/// Configuration parameters for the Polkadot Parachain.
|
||||
#[clap(flatten, next_help_heading = "Polkadot Parachain Configuration")]
|
||||
pub polkadot_parachain_configuration: PolkadotParachainConfiguration,
|
||||
@@ -510,7 +487,7 @@ pub struct ExportGenesisContext {
|
||||
|
||||
impl Default for TestExecutionContext {
|
||||
fn default() -> Self {
|
||||
Self::parse_from(["execution-context"])
|
||||
Self::parse_from(["execution-context", "--test", "."])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -556,12 +533,6 @@ impl AsRef<KurtosisConfiguration> for TestExecutionContext {
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<KitchensinkConfiguration> for TestExecutionContext {
|
||||
fn as_ref(&self) -> &KitchensinkConfiguration {
|
||||
&self.kitchensink_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<ReviveDevNodeConfiguration> for TestExecutionContext {
|
||||
fn as_ref(&self) -> &ReviveDevNodeConfiguration {
|
||||
&self.revive_dev_node_configuration
|
||||
@@ -612,7 +583,7 @@ impl AsRef<IgnoreSuccessConfiguration> for TestExecutionContext {
|
||||
|
||||
impl Default for BenchmarkingContext {
|
||||
fn default() -> Self {
|
||||
Self::parse_from(["benchmarking-context"])
|
||||
Self::parse_from(["benchmarking-context", "--test", "."])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -658,12 +629,6 @@ impl AsRef<PolkadotParachainConfiguration> for BenchmarkingContext {
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<KitchensinkConfiguration> for BenchmarkingContext {
|
||||
fn as_ref(&self) -> &KitchensinkConfiguration {
|
||||
&self.kitchensink_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<ReviveDevNodeConfiguration> for BenchmarkingContext {
|
||||
fn as_ref(&self) -> &ReviveDevNodeConfiguration {
|
||||
&self.revive_dev_node_configuration
|
||||
@@ -718,12 +683,6 @@ impl AsRef<KurtosisConfiguration> for ExportGenesisContext {
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<KitchensinkConfiguration> for ExportGenesisContext {
|
||||
fn as_ref(&self) -> &KitchensinkConfiguration {
|
||||
&self.kitchensink_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<PolkadotParachainConfiguration> for ExportGenesisContext {
|
||||
fn as_ref(&self) -> &PolkadotParachainConfiguration {
|
||||
&self.polkadot_parachain_configuration
|
||||
@@ -759,7 +718,7 @@ pub struct CorpusConfiguration {
|
||||
/// - `{metadata_file_path}::{case_idx}::{mode}`: This is very similar to the above specifier
|
||||
/// with the exception that in this case the mode is specified and will be used in the test.
|
||||
#[serde_as(as = "Vec<serde_with::DisplayFromStr>")]
|
||||
#[arg(short = 't', long = "test")]
|
||||
#[arg(short = 't', long = "test", required = true)]
|
||||
pub test_specifiers: Vec<ParsedTestSpecifier>,
|
||||
}
|
||||
|
||||
@@ -842,30 +801,6 @@ pub struct KurtosisConfiguration {
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
/// A set of configuration parameters for Kitchensink.
|
||||
#[derive(Clone, Debug, Parser, Serialize, Deserialize)]
|
||||
pub struct KitchensinkConfiguration {
|
||||
/// Specifies the path of the kitchensink node to be used by the tool.
|
||||
///
|
||||
/// If this is not specified, then the tool assumes that it should use the kitchensink binary
|
||||
/// that's provided in the user's $PATH.
|
||||
#[clap(
|
||||
id = "kitchensink.path",
|
||||
long = "kitchensink.path",
|
||||
default_value = "substrate-node"
|
||||
)]
|
||||
pub path: PathBuf,
|
||||
|
||||
/// The amount of time to wait upon startup before considering that the node timed out.
|
||||
#[clap(
|
||||
id = "kitchensink.start-timeout-ms",
|
||||
long = "kitchensink.start-timeout-ms",
|
||||
default_value = "30000",
|
||||
value_parser = parse_duration
|
||||
)]
|
||||
pub start_timeout_ms: Duration,
|
||||
}
|
||||
|
||||
/// A set of configuration parameters for the revive dev node.
|
||||
#[derive(Clone, Debug, Parser, Serialize, Deserialize)]
|
||||
pub struct ReviveDevNodeConfiguration {
|
||||
@@ -1161,35 +1096,6 @@ fn parse_duration(s: &str) -> anyhow::Result<Duration> {
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// The Solidity compatible node implementation.
|
||||
///
|
||||
/// This describes the solutions to be tested against on a high level.
|
||||
#[derive(
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Hash,
|
||||
Serialize,
|
||||
ValueEnum,
|
||||
EnumString,
|
||||
Display,
|
||||
AsRefStr,
|
||||
IntoStaticStr,
|
||||
)]
|
||||
#[strum(serialize_all = "kebab-case")]
|
||||
pub enum TestingPlatform {
|
||||
/// The go-ethereum reference full node EVM implementation.
|
||||
Geth,
|
||||
/// The kitchensink runtime provides the PolkaVM (PVM) based node implementation.
|
||||
Kitchensink,
|
||||
/// A polkadot/Substrate based network
|
||||
Zombienet,
|
||||
}
|
||||
|
||||
/// The output format to use for the test execution output.
|
||||
#[derive(
|
||||
Clone,
|
||||
|
||||
@@ -112,12 +112,23 @@ impl Watcher {
|
||||
let all_transactions_submitted = all_transactions_submitted.clone();
|
||||
let mut blocks_information_stream = self.blocks_stream;
|
||||
async move {
|
||||
while let Some(block) = blocks_information_stream.next().await {
|
||||
while let Some(mut block) = blocks_information_stream.next().await {
|
||||
// If the block number is equal to or less than the last block before the
|
||||
// repetition then we ignore it and continue on to the next block.
|
||||
if block.ethereum_block_information.block_number <= ignore_block_before {
|
||||
continue;
|
||||
}
|
||||
{
|
||||
let watch_for_transaction_hashes =
|
||||
watch_for_transaction_hashes.read().await;
|
||||
for tx_hash in block.ethereum_block_information.transaction_hashes.iter() {
|
||||
let Some((step_path, _)) = watch_for_transaction_hashes.get(tx_hash)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
*block.tx_counts.entry(step_path.clone()).or_default() += 1
|
||||
}
|
||||
}
|
||||
reporter
|
||||
.report_block_mined_event(block.clone())
|
||||
.expect("Can't fail");
|
||||
@@ -189,7 +200,6 @@ pub enum WatcherEvent {
|
||||
/// streaming the blocks.
|
||||
ignore_block_before: BlockNumber,
|
||||
},
|
||||
|
||||
/// Informs the watcher that a transaction was submitted and that the watcher should watch for a
|
||||
/// transaction with this hash in the blocks that it watches.
|
||||
SubmittedTransaction {
|
||||
@@ -198,7 +208,6 @@ pub enum WatcherEvent {
|
||||
/// The step path of the step that the transaction belongs to.
|
||||
step_path: StepPath,
|
||||
},
|
||||
|
||||
/// Informs the watcher that all of the transactions of this benchmark have been submitted and
|
||||
/// that it can expect to receive no further transaction hashes and not even watch the channel
|
||||
/// any longer.
|
||||
|
||||
@@ -353,7 +353,8 @@ where
|
||||
.execute_account_allocation(step_path, step.as_ref())
|
||||
.await
|
||||
.context("Account Allocation Step Failed"),
|
||||
}?;
|
||||
}
|
||||
.context(format!("Failure on step {step_path}"))?;
|
||||
self.steps_executed += steps_executed;
|
||||
Ok(())
|
||||
}
|
||||
@@ -597,15 +598,20 @@ where
|
||||
let expected = !assertion.exception;
|
||||
let actual = receipt.status();
|
||||
if actual != expected {
|
||||
let revert_reason = tracing_result
|
||||
.revert_reason
|
||||
.as_ref()
|
||||
.or(tracing_result.error.as_ref());
|
||||
tracing::error!(
|
||||
expected,
|
||||
actual,
|
||||
?receipt,
|
||||
?tracing_result,
|
||||
?revert_reason,
|
||||
"Transaction status assertion failed"
|
||||
);
|
||||
anyhow::bail!(
|
||||
"Transaction status assertion failed - Expected {expected} but got {actual}",
|
||||
"Transaction status assertion failed - Expected {expected} but got {actual}. Revert reason: {revert_reason:?}",
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -330,15 +330,18 @@ async fn start_cli_reporting_task(output_format: OutputFormat, reporter: Reporte
|
||||
.unwrap();
|
||||
writeln!(buf).unwrap();
|
||||
|
||||
buf = tokio::task::spawn_blocking(move || {
|
||||
buf.flush().unwrap();
|
||||
buf
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
if aggregator_events_rx.is_empty() {
|
||||
buf = tokio::task::spawn_blocking(move || {
|
||||
buf.flush().unwrap();
|
||||
buf
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
info!("Aggregator Broadcast Channel Closed");
|
||||
|
||||
// Summary at the end.
|
||||
match output_format {
|
||||
|
||||
@@ -172,134 +172,6 @@ impl Platform for LighthouseGethEvmSolcPlatform {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash)]
|
||||
pub struct KitchensinkPolkavmResolcPlatform;
|
||||
|
||||
impl Platform for KitchensinkPolkavmResolcPlatform {
|
||||
fn platform_identifier(&self) -> PlatformIdentifier {
|
||||
PlatformIdentifier::KitchensinkPolkavmResolc
|
||||
}
|
||||
|
||||
fn node_identifier(&self) -> NodeIdentifier {
|
||||
NodeIdentifier::Kitchensink
|
||||
}
|
||||
|
||||
fn vm_identifier(&self) -> VmIdentifier {
|
||||
VmIdentifier::PolkaVM
|
||||
}
|
||||
|
||||
fn compiler_identifier(&self) -> CompilerIdentifier {
|
||||
CompilerIdentifier::Resolc
|
||||
}
|
||||
|
||||
fn new_node(
|
||||
&self,
|
||||
context: Context,
|
||||
) -> anyhow::Result<JoinHandle<anyhow::Result<Box<dyn EthereumNode + Send + Sync>>>> {
|
||||
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
||||
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
|
||||
.path
|
||||
.clone();
|
||||
let genesis = genesis_configuration.genesis()?.clone();
|
||||
Ok(thread::spawn(move || {
|
||||
let node = SubstrateNode::new(
|
||||
kitchensink_path,
|
||||
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND,
|
||||
None,
|
||||
context,
|
||||
&[],
|
||||
);
|
||||
let node = spawn_node(node, genesis)?;
|
||||
Ok(Box::new(node) as Box<_>)
|
||||
}))
|
||||
}
|
||||
|
||||
fn new_compiler(
|
||||
&self,
|
||||
context: Context,
|
||||
version: Option<VersionOrRequirement>,
|
||||
) -> Pin<Box<dyn Future<Output = anyhow::Result<Box<dyn SolidityCompiler>>>>> {
|
||||
Box::pin(async move {
|
||||
let compiler = Resolc::new(context, version).await;
|
||||
compiler.map(|compiler| Box::new(compiler) as Box<dyn SolidityCompiler>)
|
||||
})
|
||||
}
|
||||
|
||||
fn export_genesis(&self, context: Context) -> anyhow::Result<serde_json::Value> {
|
||||
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
|
||||
.path
|
||||
.as_path();
|
||||
let wallet = AsRef::<WalletConfiguration>::as_ref(&context).wallet();
|
||||
let export_chainspec_command = SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND;
|
||||
|
||||
SubstrateNode::node_genesis(kitchensink_path, export_chainspec_command, &wallet)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash)]
|
||||
pub struct KitchensinkRevmSolcPlatform;
|
||||
|
||||
impl Platform for KitchensinkRevmSolcPlatform {
|
||||
fn platform_identifier(&self) -> PlatformIdentifier {
|
||||
PlatformIdentifier::KitchensinkRevmSolc
|
||||
}
|
||||
|
||||
fn node_identifier(&self) -> NodeIdentifier {
|
||||
NodeIdentifier::Kitchensink
|
||||
}
|
||||
|
||||
fn vm_identifier(&self) -> VmIdentifier {
|
||||
VmIdentifier::Evm
|
||||
}
|
||||
|
||||
fn compiler_identifier(&self) -> CompilerIdentifier {
|
||||
CompilerIdentifier::Solc
|
||||
}
|
||||
|
||||
fn new_node(
|
||||
&self,
|
||||
context: Context,
|
||||
) -> anyhow::Result<JoinHandle<anyhow::Result<Box<dyn EthereumNode + Send + Sync>>>> {
|
||||
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
||||
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
|
||||
.path
|
||||
.clone();
|
||||
let genesis = genesis_configuration.genesis()?.clone();
|
||||
Ok(thread::spawn(move || {
|
||||
let node = SubstrateNode::new(
|
||||
kitchensink_path,
|
||||
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND,
|
||||
None,
|
||||
context,
|
||||
&[],
|
||||
);
|
||||
let node = spawn_node(node, genesis)?;
|
||||
Ok(Box::new(node) as Box<_>)
|
||||
}))
|
||||
}
|
||||
|
||||
fn new_compiler(
|
||||
&self,
|
||||
context: Context,
|
||||
version: Option<VersionOrRequirement>,
|
||||
) -> Pin<Box<dyn Future<Output = anyhow::Result<Box<dyn SolidityCompiler>>>>> {
|
||||
Box::pin(async move {
|
||||
let compiler = Solc::new(context, version).await;
|
||||
compiler.map(|compiler| Box::new(compiler) as Box<dyn SolidityCompiler>)
|
||||
})
|
||||
}
|
||||
|
||||
fn export_genesis(&self, context: Context) -> anyhow::Result<serde_json::Value> {
|
||||
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
|
||||
.path
|
||||
.as_path();
|
||||
let wallet = AsRef::<WalletConfiguration>::as_ref(&context).wallet();
|
||||
let export_chainspec_command = SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND;
|
||||
|
||||
SubstrateNode::node_genesis(kitchensink_path, export_chainspec_command, &wallet)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash)]
|
||||
pub struct ReviveDevNodePolkavmResolcPlatform;
|
||||
|
||||
@@ -557,12 +429,6 @@ impl From<PlatformIdentifier> for Box<dyn Platform> {
|
||||
PlatformIdentifier::LighthouseGethEvmSolc => {
|
||||
Box::new(LighthouseGethEvmSolcPlatform) as Box<_>
|
||||
}
|
||||
PlatformIdentifier::KitchensinkPolkavmResolc => {
|
||||
Box::new(KitchensinkPolkavmResolcPlatform) as Box<_>
|
||||
}
|
||||
PlatformIdentifier::KitchensinkRevmSolc => {
|
||||
Box::new(KitchensinkRevmSolcPlatform) as Box<_>
|
||||
}
|
||||
PlatformIdentifier::ReviveDevNodePolkavmResolc => {
|
||||
Box::new(ReviveDevNodePolkavmResolcPlatform) as Box<_>
|
||||
}
|
||||
@@ -584,12 +450,6 @@ impl From<PlatformIdentifier> for &dyn Platform {
|
||||
PlatformIdentifier::LighthouseGethEvmSolc => {
|
||||
&LighthouseGethEvmSolcPlatform as &dyn Platform
|
||||
}
|
||||
PlatformIdentifier::KitchensinkPolkavmResolc => {
|
||||
&KitchensinkPolkavmResolcPlatform as &dyn Platform
|
||||
}
|
||||
PlatformIdentifier::KitchensinkRevmSolc => {
|
||||
&KitchensinkRevmSolcPlatform as &dyn Platform
|
||||
}
|
||||
PlatformIdentifier::ReviveDevNodePolkavmResolc => {
|
||||
&ReviveDevNodePolkavmResolcPlatform as &dyn Platform
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::{collections::HashMap, fmt::Display, str::FromStr};
|
||||
|
||||
use alloy::hex::ToHexExt;
|
||||
use alloy::primitives::{FixedBytes, utils::parse_units};
|
||||
use alloy::{
|
||||
eips::BlockNumberOrTag,
|
||||
@@ -686,8 +687,8 @@ impl Calldata {
|
||||
Calldata::Compound(items) => {
|
||||
stream::iter(items.iter().zip(other.chunks(32)))
|
||||
.map(|(this, other)| async move {
|
||||
// The matterlabs format supports wildcards and therefore we
|
||||
// also need to support them.
|
||||
// The MatterLabs format supports wildcards and therefore we also need to
|
||||
// support them.
|
||||
if this.as_ref() == "*" {
|
||||
return Ok::<_, anyhow::Error>(true);
|
||||
}
|
||||
@@ -768,7 +769,14 @@ impl CalldataItem {
|
||||
match stack.as_slice() {
|
||||
// Empty stack means that we got an empty compound calldata which we resolve to zero.
|
||||
[] => Ok(U256::ZERO),
|
||||
[CalldataToken::Item(item)] => Ok(*item),
|
||||
[CalldataToken::Item(item)] => {
|
||||
tracing::debug!(
|
||||
original_item = ?self,
|
||||
resolved_item = item.to_be_bytes::<32>().encode_hex(),
|
||||
"Resolution Done"
|
||||
);
|
||||
Ok(*item)
|
||||
}
|
||||
_ => Err(anyhow::anyhow!(
|
||||
"Invalid calldata arithmetic operation - Invalid stack"
|
||||
)),
|
||||
|
||||
@@ -540,6 +540,7 @@ impl EthereumNode for GethNode {
|
||||
.to_vec(),
|
||||
},
|
||||
substrate_block_information: None,
|
||||
tx_counts: Default::default(),
|
||||
})
|
||||
});
|
||||
|
||||
|
||||
@@ -771,6 +771,7 @@ impl EthereumNode for LighthouseGethNode {
|
||||
.to_vec(),
|
||||
},
|
||||
substrate_block_information: None,
|
||||
tx_counts: Default::default(),
|
||||
})
|
||||
});
|
||||
|
||||
|
||||
@@ -92,7 +92,6 @@ impl SubstrateNode {
|
||||
const SUBSTRATE_LOG_ENV: &str = "error,evm=debug,sc_rpc_server=info,runtime::revive=debug";
|
||||
const PROXY_LOG_ENV: &str = "info,eth-rpc=debug";
|
||||
|
||||
pub const KITCHENSINK_EXPORT_CHAINSPEC_COMMAND: &str = "export-chain-spec";
|
||||
pub const REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND: &str = "build-spec";
|
||||
|
||||
pub fn new(
|
||||
@@ -333,7 +332,7 @@ impl SubstrateNode {
|
||||
trace!("Waiting for chainspec export");
|
||||
if !output.status.success() {
|
||||
anyhow::bail!(
|
||||
"Substrate-node export-chain-spec failed: {}",
|
||||
"substrate-node export-chain-spec failed: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
}
|
||||
@@ -578,6 +577,7 @@ impl EthereumNode for SubstrateNode {
|
||||
proof_size: block_proof_size,
|
||||
max_proof_size,
|
||||
}),
|
||||
tx_counts: Default::default(),
|
||||
})
|
||||
}
|
||||
});
|
||||
@@ -799,8 +799,8 @@ mod tests {
|
||||
|
||||
let context = test_config();
|
||||
let mut node = SubstrateNode::new(
|
||||
context.kitchensink_configuration.path.clone(),
|
||||
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND,
|
||||
context.revive_dev_node_configuration.path.clone(),
|
||||
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
||||
None,
|
||||
&context,
|
||||
&[],
|
||||
@@ -822,6 +822,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[ignore = "Ignored since it takes a long time to run"]
|
||||
async fn node_mines_simple_transfer_transaction_and_returns_receipt() {
|
||||
// Arrange
|
||||
let (context, node) = shared_state();
|
||||
@@ -838,11 +839,14 @@ mod tests {
|
||||
.value(U256::from(100_000_000_000_000u128));
|
||||
|
||||
// Act
|
||||
let receipt = provider.send_transaction(transaction).await;
|
||||
let mut pending_transaction = provider
|
||||
.send_transaction(transaction)
|
||||
.await
|
||||
.expect("Submission failed");
|
||||
pending_transaction.set_timeout(Some(Duration::from_secs(60)));
|
||||
|
||||
// Assert
|
||||
let _ = receipt
|
||||
.expect("Failed to send the transfer transaction")
|
||||
let _ = pending_transaction
|
||||
.get_receipt()
|
||||
.await
|
||||
.expect("Failed to get the receipt for the transfer");
|
||||
@@ -866,8 +870,8 @@ mod tests {
|
||||
|
||||
let context = test_config();
|
||||
let mut dummy_node = SubstrateNode::new(
|
||||
context.kitchensink_configuration.path.clone(),
|
||||
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND,
|
||||
context.revive_dev_node_configuration.path.clone(),
|
||||
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
||||
None,
|
||||
&context,
|
||||
&[],
|
||||
@@ -960,7 +964,7 @@ mod tests {
|
||||
|
||||
assert!(
|
||||
version.starts_with("substrate-node"),
|
||||
"Expected Substrate-node version string, got: {version}"
|
||||
"Expected substrate-node version string, got: {version}"
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -210,6 +210,7 @@ impl ZombienetNode {
|
||||
.with_args(vec![
|
||||
("--pool-limit", u32::MAX.to_string().as_str()).into(),
|
||||
("--pool-kbytes", u32::MAX.to_string().as_str()).into(),
|
||||
("--dev-block-time", 12000u16.to_string().as_str()).into(),
|
||||
])
|
||||
})
|
||||
})
|
||||
@@ -355,7 +356,7 @@ impl ZombienetNode {
|
||||
|
||||
if !output.status.success() {
|
||||
anyhow::bail!(
|
||||
"Substrate-node export-chain-spec failed: {}",
|
||||
"substrate-node export-chain-spec failed: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
}
|
||||
@@ -599,6 +600,7 @@ impl EthereumNode for ZombienetNode {
|
||||
proof_size: block_proof_size,
|
||||
max_proof_size,
|
||||
}),
|
||||
tx_counts: Default::default(),
|
||||
})
|
||||
}
|
||||
});
|
||||
@@ -856,6 +858,7 @@ mod tests {
|
||||
#[tokio::test]
|
||||
#[ignore = "Ignored for the time being"]
|
||||
async fn test_transfer_transaction_should_return_receipt() {
|
||||
// Arrange
|
||||
let (ctx, node) = new_node().await;
|
||||
|
||||
let provider = node.provider().await.expect("Failed to create provider");
|
||||
@@ -864,9 +867,15 @@ mod tests {
|
||||
.to(account_address)
|
||||
.value(U256::from(100_000_000_000_000u128));
|
||||
|
||||
let receipt = provider.send_transaction(transaction).await;
|
||||
let _ = receipt
|
||||
.expect("Failed to send the transfer transaction")
|
||||
// Act
|
||||
let mut pending_transaction = provider
|
||||
.send_transaction(transaction)
|
||||
.await
|
||||
.expect("Submission failed");
|
||||
pending_transaction.set_timeout(Some(Duration::from_secs(60)));
|
||||
|
||||
// Assert
|
||||
let _ = pending_transaction
|
||||
.get_receipt()
|
||||
.await
|
||||
.expect("Failed to get the receipt for the transfer");
|
||||
|
||||
@@ -62,7 +62,10 @@ where
|
||||
) -> TransportResult<Self::Fillable> {
|
||||
match self.inner.prepare(provider, tx).await {
|
||||
Ok(fill) => Ok(Some(fill)),
|
||||
Err(_) => Ok(None),
|
||||
Err(err) => {
|
||||
tracing::debug!(error = ?err, "Gas Provider Estimation Failed, using fallback");
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -104,7 +104,7 @@ where
|
||||
};
|
||||
debug!(%tx_hash, "Submitted Transaction");
|
||||
|
||||
pending_transaction.set_timeout(Some(Duration::from_secs(120)));
|
||||
pending_transaction.set_timeout(Some(Duration::from_secs(240)));
|
||||
let tx_hash = pending_transaction.watch().await.context(format!(
|
||||
"Transaction inclusion watching timeout for {tx_hash}"
|
||||
))?;
|
||||
|
||||
@@ -41,7 +41,7 @@ pub struct ReportAggregator {
|
||||
impl ReportAggregator {
|
||||
pub fn new(context: Context) -> Self {
|
||||
let (runner_tx, runner_rx) = unbounded_channel::<RunnerEvent>();
|
||||
let (listener_tx, _) = channel::<ReporterEvent>(1024);
|
||||
let (listener_tx, _) = channel::<ReporterEvent>(0xFFFF);
|
||||
Self {
|
||||
report: Report::new(context),
|
||||
remaining_cases: Default::default(),
|
||||
@@ -64,7 +64,7 @@ impl ReportAggregator {
|
||||
debug!("Starting to aggregate report");
|
||||
|
||||
while let Some(event) = self.runner_rx.recv().await {
|
||||
debug!(?event, "Received Event");
|
||||
debug!(event = event.variant_name(), "Received Event");
|
||||
match event {
|
||||
RunnerEvent::SubscribeToEvents(event) => {
|
||||
self.handle_subscribe_to_events_event(*event);
|
||||
@@ -412,8 +412,8 @@ impl ReportAggregator {
|
||||
{
|
||||
block_information.sort_by(|a, b| {
|
||||
a.ethereum_block_information
|
||||
.block_timestamp
|
||||
.cmp(&b.ethereum_block_information.block_timestamp)
|
||||
.block_number
|
||||
.cmp(&b.ethereum_block_information.block_number)
|
||||
});
|
||||
|
||||
// Computing the TPS.
|
||||
@@ -466,7 +466,6 @@ impl ReportAggregator {
|
||||
.filter_map(|block| block.ref_time_block_fullness_percentage())
|
||||
.map(|v| v as u64)
|
||||
.collect::<Vec<_>>();
|
||||
dbg!(&reftime_block_fullness);
|
||||
if !reftime_block_fullness.is_empty() {
|
||||
report
|
||||
.metrics
|
||||
@@ -482,7 +481,6 @@ impl ReportAggregator {
|
||||
.filter_map(|block| block.proof_size_block_fullness_percentage())
|
||||
.map(|v| v as u64)
|
||||
.collect::<Vec<_>>();
|
||||
dbg!(&proof_size_block_fullness);
|
||||
if !proof_size_block_fullness.is_empty() {
|
||||
report
|
||||
.metrics
|
||||
@@ -803,8 +801,9 @@ where
|
||||
pub fn with_list(
|
||||
&mut self,
|
||||
platform_identifier: PlatformIdentifier,
|
||||
mut list: Vec<T>,
|
||||
original_list: Vec<T>,
|
||||
) -> &mut Self {
|
||||
let mut list = original_list.clone();
|
||||
list.sort();
|
||||
let Some(min) = list.first().copied() else {
|
||||
return self;
|
||||
@@ -842,7 +841,7 @@ where
|
||||
.insert(platform_identifier, median);
|
||||
self.raw
|
||||
.get_or_insert_default()
|
||||
.insert(platform_identifier, list);
|
||||
.insert(platform_identifier, original_list);
|
||||
|
||||
self
|
||||
}
|
||||
@@ -883,6 +882,7 @@ pub struct ContractInformation {
|
||||
pub struct MinedBlockInformation {
|
||||
pub ethereum_block_information: EthereumMinedBlockInformation,
|
||||
pub substrate_block_information: Option<SubstrateMinedBlockInformation>,
|
||||
pub tx_counts: BTreeMap<StepPath, usize>,
|
||||
}
|
||||
|
||||
impl MinedBlockInformation {
|
||||
|
||||
@@ -347,6 +347,16 @@ macro_rules! define_event {
|
||||
),*
|
||||
}
|
||||
|
||||
impl $ident {
|
||||
pub fn variant_name(&self) -> &'static str {
|
||||
match self {
|
||||
$(
|
||||
Self::$variant_ident { .. } => stringify!($variant_ident)
|
||||
),*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$(
|
||||
#[derive(Debug)]
|
||||
$(#[$variant_meta])*
|
||||
|
||||
+1
-1
Submodule polkadot-sdk updated: dc3d0e5ab7...a44be635e6
+1
-1
Submodule resolc-compiler-tests updated: ce77cb1166...a9d1f54b74
+2
-5
@@ -22,7 +22,6 @@ POLKADOT_SDK_DIR="${1:-}"
|
||||
# Binary paths (default to names in $PATH)
|
||||
REVIVE_DEV_NODE_BIN="revive-dev-node"
|
||||
ETH_RPC_BIN="eth-rpc"
|
||||
SUBSTRATE_NODE_BIN="substrate-node"
|
||||
|
||||
echo -e "${GREEN}=== Revive Differential Tests Quick Start ===${NC}"
|
||||
echo ""
|
||||
@@ -50,14 +49,13 @@ if [ -n "$POLKADOT_SDK_DIR" ]; then
|
||||
|
||||
REVIVE_DEV_NODE_BIN="$POLKADOT_SDK_DIR/target/release/revive-dev-node"
|
||||
ETH_RPC_BIN="$POLKADOT_SDK_DIR/target/release/eth-rpc"
|
||||
SUBSTRATE_NODE_BIN="$POLKADOT_SDK_DIR/target/release/substrate-node"
|
||||
|
||||
if [ ! -x "$REVIVE_DEV_NODE_BIN" ] || [ ! -x "$ETH_RPC_BIN" ] || [ ! -x "$SUBSTRATE_NODE_BIN" ]; then
|
||||
if [ ! -x "$REVIVE_DEV_NODE_BIN" ] || [ ! -x "$ETH_RPC_BIN" ]; then
|
||||
echo -e "${YELLOW}Required binaries not found in release target. Building...${NC}"
|
||||
(cd "$POLKADOT_SDK_DIR" && cargo build --release --package staging-node-cli --package pallet-revive-eth-rpc --package revive-dev-node)
|
||||
fi
|
||||
|
||||
for bin in "$REVIVE_DEV_NODE_BIN" "$ETH_RPC_BIN" "$SUBSTRATE_NODE_BIN"; do
|
||||
for bin in "$REVIVE_DEV_NODE_BIN" "$ETH_RPC_BIN"; do
|
||||
if [ ! -x "$bin" ]; then
|
||||
echo -e "${RED}Expected binary not found after build: $bin${NC}"
|
||||
exit 1
|
||||
@@ -84,7 +82,6 @@ RUST_LOG="info,alloy_pubsub::service=error" ./target/release/retester test \
|
||||
--concurrency.number-of-threads 5 \
|
||||
--concurrency.number-of-concurrent-tasks 500 \
|
||||
--wallet.additional-keys 100000 \
|
||||
--kitchensink.path "$SUBSTRATE_NODE_BIN" \
|
||||
--revive-dev-node.path "$REVIVE_DEV_NODE_BIN" \
|
||||
--eth-rpc.path "$ETH_RPC_BIN" \
|
||||
> logs.log \
|
||||
|
||||
@@ -0,0 +1,246 @@
|
||||
"""
|
||||
Utilities to print benchmark metrics from a report JSON into CSV.
|
||||
|
||||
Usage:
|
||||
python scripts/print_benchmark_metrics_csv.py /absolute/path/to/report.json
|
||||
|
||||
The script prints, for each metadata path, case index, and mode combination,
|
||||
CSV rows aligned to mined blocks with the following columns:
|
||||
- block_number
|
||||
- number_of_txs
|
||||
- tps (transaction_per_second)
|
||||
- gps (gas_per_second)
|
||||
- gas_block_fullness
|
||||
- ref_time (if available)
|
||||
- max_ref_time (if available)
|
||||
- proof_size (if available)
|
||||
- max_proof_size (if available)
|
||||
- ref_time_block_fullness (if available)
|
||||
- proof_size_block_fullness (if available)
|
||||
|
||||
Important nuance: TPS and GPS arrays have (number_of_blocks - 1) items. The
|
||||
first block row has no TPS/GPS; the CSV leaves those cells empty for the first
|
||||
row and aligns subsequent values to their corresponding next block.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import sys
|
||||
import csv
|
||||
from typing import List, Mapping, TypedDict
|
||||
|
||||
|
||||
class EthereumMinedBlockInformation(TypedDict):
|
||||
"""EVM block information extracted from the report.
|
||||
|
||||
Attributes:
|
||||
block_number: The block height.
|
||||
block_timestamp: The UNIX timestamp of the block.
|
||||
mined_gas: Total gas used (mined) in the block.
|
||||
block_gas_limit: The gas limit of the block.
|
||||
transaction_hashes: List of transaction hashes included in the block.
|
||||
"""
|
||||
|
||||
block_number: int
|
||||
block_timestamp: int
|
||||
mined_gas: int
|
||||
block_gas_limit: int
|
||||
transaction_hashes: List[str]
|
||||
|
||||
|
||||
class SubstrateMinedBlockInformation(TypedDict):
|
||||
"""Substrate-specific block resource usage fields.
|
||||
|
||||
Attributes:
|
||||
ref_time: The consumed ref time in the block.
|
||||
max_ref_time: The maximum ref time allowed for the block.
|
||||
proof_size: The consumed proof size in the block.
|
||||
max_proof_size: The maximum proof size allowed for the block.
|
||||
"""
|
||||
|
||||
ref_time: int
|
||||
max_ref_time: int
|
||||
proof_size: int
|
||||
max_proof_size: int
|
||||
|
||||
|
||||
class MinedBlockInformation(TypedDict):
|
||||
"""Block-level information for a mined block with both EVM and optional Substrate fields."""
|
||||
|
||||
ethereum_block_information: EthereumMinedBlockInformation
|
||||
substrate_block_information: SubstrateMinedBlockInformation
|
||||
|
||||
|
||||
class Metric(TypedDict):
|
||||
"""Metric data of integer values keyed by platform identifier.
|
||||
|
||||
Attributes:
|
||||
minimum: Single scalar minimum per platform.
|
||||
maximum: Single scalar maximum per platform.
|
||||
mean: Single scalar mean per platform.
|
||||
median: Single scalar median per platform.
|
||||
raw: Time-series (or list) of values per platform.
|
||||
"""
|
||||
|
||||
minimum: Mapping[str, int]
|
||||
maximum: Mapping[str, int]
|
||||
mean: Mapping[str, int]
|
||||
median: Mapping[str, int]
|
||||
raw: Mapping[str, List[int]]
|
||||
|
||||
|
||||
class Metrics(TypedDict):
|
||||
"""All metrics that may be present for a given execution report.
|
||||
|
||||
Note that some metrics are optional and present only for specific platforms
|
||||
or execution modes.
|
||||
"""
|
||||
|
||||
transaction_per_second: Metric
|
||||
gas_per_second: Metric
|
||||
gas_block_fullness: Metric
|
||||
ref_time_block_fullness: Metric
|
||||
proof_size_block_fullness: Metric
|
||||
|
||||
|
||||
class ExecutionReport(TypedDict):
|
||||
"""Execution report for a mode containing mined blocks and metrics.
|
||||
|
||||
Attributes:
|
||||
mined_block_information: Mapping from platform identifier to the list of
|
||||
mined blocks observed for that platform.
|
||||
metrics: The computed metrics for the execution.
|
||||
"""
|
||||
|
||||
mined_block_information: Mapping[str, List[MinedBlockInformation]]
|
||||
metrics: Metrics
|
||||
|
||||
|
||||
class CaseReport(TypedDict):
|
||||
"""Report for a single case, keyed by mode string."""
|
||||
|
||||
mode_execution_reports: Mapping[str, ExecutionReport]
|
||||
|
||||
|
||||
class MetadataFileReport(TypedDict):
|
||||
"""Report subtree keyed by case indices for a metadata file path."""
|
||||
|
||||
case_reports: Mapping[str, CaseReport]
|
||||
|
||||
|
||||
class ReportRoot(TypedDict):
|
||||
"""Top-level report schema with execution information keyed by metadata path."""
|
||||
|
||||
execution_information: Mapping[str, MetadataFileReport]
|
||||
|
||||
|
||||
BlockInformation = TypedDict(
|
||||
"BlockInformation",
|
||||
{
|
||||
"Block Number": int,
|
||||
"Timestamp": int,
|
||||
"Datetime": None,
|
||||
"Transaction Count": int,
|
||||
"TPS": int | None,
|
||||
"GPS": int | None,
|
||||
"Ref Time": int,
|
||||
"Max Ref Time": int,
|
||||
"Block Fullness Ref Time": int,
|
||||
"Proof Size": int,
|
||||
"Max Proof Size": int,
|
||||
"Block Fullness Proof Size": int,
|
||||
},
|
||||
)
|
||||
"""A typed dictionary used to hold all of the block information"""
|
||||
|
||||
|
||||
def load_report(path: str) -> ReportRoot:
|
||||
"""Load the report JSON from disk.
|
||||
|
||||
Args:
|
||||
path: Absolute or relative filesystem path to the JSON report file.
|
||||
|
||||
Returns:
|
||||
The parsed report as a typed dictionary structure.
|
||||
"""
|
||||
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
data: ReportRoot = json.load(f)
|
||||
return data
|
||||
|
||||
|
||||
def main() -> None:
|
||||
report_path: str = sys.argv[1]
|
||||
report: ReportRoot = load_report(report_path)
|
||||
|
||||
# TODO: Remove this in the future, but for now, the target is fixed.
|
||||
target: str = "revive-dev-node-revm-solc"
|
||||
|
||||
csv_writer = csv.writer(sys.stdout)
|
||||
|
||||
for _, metadata_file_report in report["execution_information"].items():
|
||||
for _, case_report in metadata_file_report["case_reports"].items():
|
||||
for _, execution_report in case_report["mode_execution_reports"].items():
|
||||
blocks_information: list[MinedBlockInformation] = execution_report[
|
||||
"mined_block_information"
|
||||
][target]
|
||||
|
||||
resolved_blocks: list[BlockInformation] = []
|
||||
for i, block_information in enumerate(blocks_information):
|
||||
resolved_blocks.append(
|
||||
{
|
||||
"Block Number": block_information[
|
||||
"ethereum_block_information"
|
||||
]["block_number"],
|
||||
"Timestamp": block_information[
|
||||
"ethereum_block_information"
|
||||
]["block_timestamp"],
|
||||
"Datetime": None,
|
||||
"Transaction Count": len(
|
||||
block_information["ethereum_block_information"][
|
||||
"transaction_hashes"
|
||||
]
|
||||
),
|
||||
"TPS": (
|
||||
None
|
||||
if i == 0
|
||||
else execution_report["metrics"][
|
||||
"transaction_per_second"
|
||||
]["raw"][target][i - 1]
|
||||
),
|
||||
"GPS": (
|
||||
None
|
||||
if i == 0
|
||||
else execution_report["metrics"]["gas_per_second"][
|
||||
"raw"
|
||||
][target][i - 1]
|
||||
),
|
||||
"Ref Time": block_information[
|
||||
"substrate_block_information"
|
||||
]["ref_time"],
|
||||
"Max Ref Time": block_information[
|
||||
"substrate_block_information"
|
||||
]["max_ref_time"],
|
||||
"Block Fullness Ref Time": execution_report["metrics"][
|
||||
"ref_time_block_fullness"
|
||||
]["raw"][target][i],
|
||||
"Proof Size": block_information[
|
||||
"substrate_block_information"
|
||||
]["proof_size"],
|
||||
"Max Proof Size": block_information[
|
||||
"substrate_block_information"
|
||||
]["max_proof_size"],
|
||||
"Block Fullness Proof Size": execution_report["metrics"][
|
||||
"proof_size_block_fullness"
|
||||
]["raw"][target][i],
|
||||
}
|
||||
)
|
||||
|
||||
csv_writer = csv.DictWriter(sys.stdout, resolved_blocks[0].keys())
|
||||
csv_writer.writeheader()
|
||||
csv_writer.writerows(resolved_blocks)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,226 @@
|
||||
"""
|
||||
This script is used to turn the JSON report produced by the revive differential tests tool into an
|
||||
easy to consume markdown document for the purpose of reporting this information in the Polkadot SDK
|
||||
CI. The full models used in the JSON report can be found in the revive differential tests repo and
|
||||
the models used in this script are just a partial reproduction of the full report models.
|
||||
"""
|
||||
|
||||
from typing import TypedDict, Literal, Union
|
||||
|
||||
import json, io
|
||||
|
||||
|
||||
class Report(TypedDict):
|
||||
context: "Context"
|
||||
execution_information: dict[
|
||||
"MetadataFilePathString",
|
||||
dict["ModeString", dict["CaseIdxString", "CaseReport"]],
|
||||
]
|
||||
|
||||
|
||||
class Context(TypedDict):
|
||||
Test: "TestContext"
|
||||
|
||||
|
||||
class TestContext(TypedDict):
|
||||
corpus_configuration: "CorpusConfiguration"
|
||||
|
||||
|
||||
class CorpusConfiguration(TypedDict):
|
||||
test_specifiers: list["TestSpecifier"]
|
||||
|
||||
|
||||
class CaseReport(TypedDict):
|
||||
status: "CaseStatus"
|
||||
|
||||
|
||||
class CaseStatusSuccess(TypedDict):
|
||||
status: Literal["Succeeded"]
|
||||
steps_executed: int
|
||||
|
||||
|
||||
class CaseStatusFailure(TypedDict):
|
||||
status: Literal["Failed"]
|
||||
reason: str
|
||||
|
||||
|
||||
class CaseStatusIgnored(TypedDict):
|
||||
status: Literal["Ignored"]
|
||||
reason: str
|
||||
|
||||
|
||||
CaseStatus = Union[CaseStatusSuccess, CaseStatusFailure, CaseStatusIgnored]
|
||||
"""A union type of all of the possible statuses that could be reported for a case."""
|
||||
|
||||
TestSpecifier = str
|
||||
"""A test specifier string. For example resolc-compiler-tests/fixtures/solidity/test.json::0::Y+"""
|
||||
|
||||
ModeString = str
|
||||
"""The mode string. For example Y+ >=0.8.13"""
|
||||
|
||||
MetadataFilePathString = str
|
||||
"""The path to a metadata file. For example resolc-compiler-tests/fixtures/solidity/test.json"""
|
||||
|
||||
CaseIdxString = str
|
||||
"""The index of a case as a string. For example '0'"""
|
||||
|
||||
|
||||
def path_relative_to_resolc_compiler_test_directory(path: str) -> str:
|
||||
"""
|
||||
Given a path, this function returns the path relative to the resolc-compiler-test directory. The
|
||||
following is an example of an input and an output:
|
||||
|
||||
Input: ~/polkadot-sdk/revive-differential-tests/resolc-compiler-tests/fixtures/solidity/test.json
|
||||
Output: test.json
|
||||
"""
|
||||
|
||||
return f"{path.split('resolc-compiler-tests/fixtures/solidity')[-1].strip('/')}"
|
||||
|
||||
|
||||
def main() -> None:
|
||||
with open("report.json", "r") as file:
|
||||
report: Report = json.load(file)
|
||||
|
||||
# Starting the markdown document and adding information to it as we go.
|
||||
markdown_document: io.TextIOWrapper = open("report.md", "w")
|
||||
print("# Differential Tests Results", file=markdown_document)
|
||||
|
||||
# Getting all of the test specifiers from the report and making them relative to the tests dir.
|
||||
test_specifiers: list[str] = list(
|
||||
map(
|
||||
path_relative_to_resolc_compiler_test_directory,
|
||||
report["context"]["Test"]["corpus_configuration"]["test_specifiers"],
|
||||
)
|
||||
)
|
||||
print("## Specified Tests", file=markdown_document)
|
||||
for test_specifier in test_specifiers:
|
||||
print(f"* `{test_specifier}`", file=markdown_document)
|
||||
|
||||
# Counting the total number of test cases, successes, failures, and ignored tests
|
||||
total_number_of_cases: int = 0
|
||||
total_number_of_successes: int = 0
|
||||
total_number_of_failures: int = 0
|
||||
total_number_of_ignores: int = 0
|
||||
for _, mode_to_case_mapping in report["execution_information"].items():
|
||||
for _, case_idx_to_report_mapping in mode_to_case_mapping.items():
|
||||
for _, case_report in case_idx_to_report_mapping.items():
|
||||
status: CaseStatus = case_report["status"]
|
||||
|
||||
total_number_of_cases += 1
|
||||
if status["status"] == "Succeeded":
|
||||
total_number_of_successes += 1
|
||||
elif status["status"] == "Failed":
|
||||
total_number_of_failures += 1
|
||||
elif status["status"] == "Ignored":
|
||||
total_number_of_ignores += 1
|
||||
else:
|
||||
raise Exception(
|
||||
f"Encountered a status that's unknown to the script: {status}"
|
||||
)
|
||||
|
||||
print("## Counts", file=markdown_document)
|
||||
print(
|
||||
f"* **Total Number of Test Cases:** {total_number_of_cases}",
|
||||
file=markdown_document,
|
||||
)
|
||||
print(
|
||||
f"* **Total Number of Successes:** {total_number_of_successes}",
|
||||
file=markdown_document,
|
||||
)
|
||||
print(
|
||||
f"* **Total Number of Failures:** {total_number_of_failures}",
|
||||
file=markdown_document,
|
||||
)
|
||||
print(
|
||||
f"* **Total Number of Ignores:** {total_number_of_ignores}",
|
||||
file=markdown_document,
|
||||
)
|
||||
|
||||
# Grouping the various test cases into dictionaries and groups depending on their status to make
|
||||
# them easier to include in the markdown document later on.
|
||||
successful_cases: dict[
|
||||
MetadataFilePathString, dict[CaseIdxString, set[ModeString]]
|
||||
] = {}
|
||||
for metadata_file_path, mode_to_case_mapping in report[
|
||||
"execution_information"
|
||||
].items():
|
||||
for mode_string, case_idx_to_report_mapping in mode_to_case_mapping.items():
|
||||
for case_idx_string, case_report in case_idx_to_report_mapping.items():
|
||||
status: CaseStatus = case_report["status"]
|
||||
metadata_file_path: str = (
|
||||
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
|
||||
)
|
||||
mode_string: str = mode_string.replace(" M3", "+").replace(" M0", "-")
|
||||
|
||||
if status["status"] == "Succeeded":
|
||||
successful_cases.setdefault(
|
||||
metadata_file_path,
|
||||
{},
|
||||
).setdefault(
|
||||
case_idx_string, set()
|
||||
).add(mode_string)
|
||||
|
||||
print("## Failures", file=markdown_document)
|
||||
print(
|
||||
"The test specifiers seen in this section have the format 'path::case_idx::compilation_mode'\
|
||||
and they're compatible with the revive differential tests framework and can be specified\
|
||||
to it directly in the same way that they're provided through the `--test` argument of the\
|
||||
framework.\n",
|
||||
file=markdown_document,
|
||||
)
|
||||
print(
|
||||
"The failures are provided in an expandable section to ensure that the PR does not get \
|
||||
polluted with information. Please click on the section below for more information",
|
||||
file=markdown_document,
|
||||
)
|
||||
print(
|
||||
"<details><summary>Detailed Differential Tests Failure Information</summary>\n\n",
|
||||
file=markdown_document,
|
||||
)
|
||||
print("| Test Specifier | Failure Reason | Note |", file=markdown_document)
|
||||
print("| -- | -- | -- |", file=markdown_document)
|
||||
|
||||
for metadata_file_path, mode_to_case_mapping in report[
|
||||
"execution_information"
|
||||
].items():
|
||||
for mode_string, case_idx_to_report_mapping in mode_to_case_mapping.items():
|
||||
for case_idx_string, case_report in case_idx_to_report_mapping.items():
|
||||
status: CaseStatus = case_report["status"]
|
||||
metadata_file_path: str = (
|
||||
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
|
||||
)
|
||||
mode_string: str = mode_string.replace(" M3", "+").replace(" M0", "-")
|
||||
|
||||
if status["status"] != "Failed":
|
||||
continue
|
||||
|
||||
failure_reason: str = status["reason"].replace("\n", " ")
|
||||
|
||||
note: str = ""
|
||||
modes_where_this_case_succeeded: set[ModeString] = (
|
||||
successful_cases.setdefault(
|
||||
metadata_file_path,
|
||||
{},
|
||||
).setdefault(case_idx_string, set())
|
||||
)
|
||||
if len(modes_where_this_case_succeeded) != 0:
|
||||
note: str = (
|
||||
f"This test case succeeded with other compilation modes: {modes_where_this_case_succeeded}"
|
||||
)
|
||||
|
||||
test_specifier: str = (
|
||||
f"{metadata_file_path}::{case_idx_string}::{mode_string}"
|
||||
)
|
||||
print(
|
||||
f"| `{test_specifier}` | `{failure_reason}` | {note} |",
|
||||
file=markdown_document,
|
||||
)
|
||||
print("\n\n</details>", file=markdown_document)
|
||||
|
||||
# The primary downside of not using `with`, but I guess it's better since I don't want to over
|
||||
# indent the code.
|
||||
markdown_document.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user