mirror of
https://github.com/pezkuwichain/revive-differential-tests.git
synced 2026-04-22 21:57:58 +00:00
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b6f1f6e6af | |||
| 6e8187b135 | |||
| 310925de07 | |||
| bb98c96c9d | |||
| 1d481b314c | |||
| 36ef669341 | |||
| e7ebd0c034 | |||
| ad20b99e0a | |||
| 234e59bbea |
@@ -51,15 +51,15 @@ jobs:
|
|||||||
uses: actions/cache@v3
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.cargo/bin/substrate-node
|
~/.cargo/bin/revive-dev-node
|
||||||
~/.cargo/bin/eth-rpc
|
~/.cargo/bin/eth-rpc
|
||||||
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}
|
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}-with-dev-node
|
||||||
|
|
||||||
- name: Build substrate-node
|
- name: Build revive-dev-node
|
||||||
if: steps.cache.outputs.cache-hit != 'true'
|
if: steps.cache.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
cd polkadot-sdk
|
cd polkadot-sdk
|
||||||
cargo install --locked --force --profile=production --path substrate/bin/node/cli --bin substrate-node --features cli
|
cargo install --locked --force --profile=production --path substrate/frame/revive/dev-node/node --bin revive-dev-node
|
||||||
|
|
||||||
- name: Build eth-rpc
|
- name: Build eth-rpc
|
||||||
if: steps.cache.outputs.cache-hit != 'true'
|
if: steps.cache.outputs.cache-hit != 'true'
|
||||||
@@ -109,14 +109,16 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: recursive
|
||||||
|
|
||||||
- name: Restore binaries from cache
|
- name: Restore binaries from cache
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.cargo/bin/substrate-node
|
~/.cargo/bin/revive-dev-node
|
||||||
~/.cargo/bin/eth-rpc
|
~/.cargo/bin/eth-rpc
|
||||||
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}
|
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}-with-dev-node
|
||||||
|
|
||||||
- name: Restore downloaded Polkadot binaries from cache
|
- name: Restore downloaded Polkadot binaries from cache
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@v3
|
||||||
@@ -202,8 +204,13 @@ jobs:
|
|||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt install kurtosis-cli
|
sudo apt install kurtosis-cli
|
||||||
|
|
||||||
|
- name: Install cargo-machete
|
||||||
|
uses: clechasseur/rs-cargo@v2
|
||||||
|
with:
|
||||||
|
command: install
|
||||||
|
args: cargo-machete@0.7.0
|
||||||
- name: Machete
|
- name: Machete
|
||||||
uses: bnjbvr/cargo-machete@v0.7.1
|
run: cargo machete crates
|
||||||
|
|
||||||
- name: Format
|
- name: Format
|
||||||
run: make format
|
run: make format
|
||||||
@@ -211,8 +218,8 @@ jobs:
|
|||||||
- name: Clippy
|
- name: Clippy
|
||||||
run: make clippy
|
run: make clippy
|
||||||
|
|
||||||
- name: Check substrate-node version
|
- name: Check revive-dev-node version
|
||||||
run: substrate-node --version
|
run: revive-dev-node --version
|
||||||
|
|
||||||
- name: Check eth-rpc version
|
- name: Check eth-rpc version
|
||||||
run: eth-rpc --version
|
run: eth-rpc --version
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
This project compiles and executes declarative smart-contract tests against multiple platforms, then compares behavior (status, return data, events, and state diffs). Today it supports:
|
This project compiles and executes declarative smart-contract tests against multiple platforms, then compares behavior (status, return data, events, and state diffs). Today it supports:
|
||||||
|
|
||||||
- Geth (EVM reference implementation)
|
- Geth (EVM reference implementation)
|
||||||
- Revive Kitchensink (Substrate-based PolkaVM + `eth-rpc` proxy)
|
- Revive Dev Node (Substrate-based PolkaVM + `eth-rpc` proxy)
|
||||||
|
|
||||||
Use it to:
|
Use it to:
|
||||||
|
|
||||||
@@ -39,9 +39,9 @@ This repository contains none of the tests and only contains the testing framewo
|
|||||||
This section describes the required dependencies that this framework requires to run. Compiling this framework is pretty straightforward and no additional dependencies beyond what's specified in the `Cargo.toml` file should be required.
|
This section describes the required dependencies that this framework requires to run. Compiling this framework is pretty straightforward and no additional dependencies beyond what's specified in the `Cargo.toml` file should be required.
|
||||||
|
|
||||||
- Stable Rust
|
- Stable Rust
|
||||||
- Geth - When doing differential testing against the PVM we submit transactions to a Geth node and to Kitchensink to compare them.
|
- Geth - When doing differential testing against the PVM we submit transactions to a Geth node and to Revive Dev Node to compare them.
|
||||||
- Kitchensink - When doing differential testing against the PVM we submit transactions to a Geth node and to Kitchensink to compare them.
|
- Revive Dev Node - When doing differential testing against the PVM we submit transactions to a Geth node and to Revive Dev Node to compare them.
|
||||||
- ETH-RPC - All communication with Kitchensink is done through the ETH RPC.
|
- ETH-RPC - All communication with Revive Dev Node is done through the ETH RPC.
|
||||||
- Solc - This is actually a transitive dependency, while this tool doesn't require solc as it downloads the versions that it requires, resolc requires that Solc is installed and available in the path.
|
- Solc - This is actually a transitive dependency, while this tool doesn't require solc as it downloads the versions that it requires, resolc requires that Solc is installed and available in the path.
|
||||||
- Resolc - This is required to compile the contracts to PolkaVM bytecode.
|
- Resolc - This is required to compile the contracts to PolkaVM bytecode.
|
||||||
- Kurtosis - The Kurtosis CLI tool is required for the production Ethereum mainnet-like node configuration with Geth as the execution layer and lighthouse as the consensus layer. Kurtosis also requires docker to be installed since it runs everything inside of docker containers.
|
- Kurtosis - The Kurtosis CLI tool is required for the production Ethereum mainnet-like node configuration with Geth as the execution layer and lighthouse as the consensus layer. Kurtosis also requires docker to be installed since it runs everything inside of docker containers.
|
||||||
|
|||||||
@@ -31,10 +31,6 @@ pub enum PlatformIdentifier {
|
|||||||
GethEvmSolc,
|
GethEvmSolc,
|
||||||
/// The Lighthouse Go-ethereum reference full node EVM implementation with the solc compiler.
|
/// The Lighthouse Go-ethereum reference full node EVM implementation with the solc compiler.
|
||||||
LighthouseGethEvmSolc,
|
LighthouseGethEvmSolc,
|
||||||
/// The kitchensink node with the PolkaVM backend with the resolc compiler.
|
|
||||||
KitchensinkPolkavmResolc,
|
|
||||||
/// The kitchensink node with the REVM backend with the solc compiler.
|
|
||||||
KitchensinkRevmSolc,
|
|
||||||
/// The revive dev node with the PolkaVM backend with the resolc compiler.
|
/// The revive dev node with the PolkaVM backend with the resolc compiler.
|
||||||
ReviveDevNodePolkavmResolc,
|
ReviveDevNodePolkavmResolc,
|
||||||
/// The revive dev node with the REVM backend with the solc compiler.
|
/// The revive dev node with the REVM backend with the solc compiler.
|
||||||
@@ -95,8 +91,6 @@ pub enum NodeIdentifier {
|
|||||||
Geth,
|
Geth,
|
||||||
/// The go-ethereum node implementation.
|
/// The go-ethereum node implementation.
|
||||||
LighthouseGeth,
|
LighthouseGeth,
|
||||||
/// The Kitchensink node implementation.
|
|
||||||
Kitchensink,
|
|
||||||
/// The revive dev node implementation.
|
/// The revive dev node implementation.
|
||||||
ReviveDevNode,
|
ReviveDevNode,
|
||||||
/// A zombienet spawned nodes
|
/// A zombienet spawned nodes
|
||||||
|
|||||||
@@ -131,17 +131,6 @@ impl AsRef<PolkadotParachainConfiguration> for Context {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<KitchensinkConfiguration> for Context {
|
|
||||||
fn as_ref(&self) -> &KitchensinkConfiguration {
|
|
||||||
match self {
|
|
||||||
Self::Test(context) => context.as_ref().as_ref(),
|
|
||||||
Self::Benchmark(context) => context.as_ref().as_ref(),
|
|
||||||
Self::ExportGenesis(context) => context.as_ref().as_ref(),
|
|
||||||
Self::ExportJsonSchema => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AsRef<ReviveDevNodeConfiguration> for Context {
|
impl AsRef<ReviveDevNodeConfiguration> for Context {
|
||||||
fn as_ref(&self) -> &ReviveDevNodeConfiguration {
|
fn as_ref(&self) -> &ReviveDevNodeConfiguration {
|
||||||
match self {
|
match self {
|
||||||
@@ -283,10 +272,6 @@ pub struct TestExecutionContext {
|
|||||||
#[clap(flatten, next_help_heading = "Lighthouse Configuration")]
|
#[clap(flatten, next_help_heading = "Lighthouse Configuration")]
|
||||||
pub lighthouse_configuration: KurtosisConfiguration,
|
pub lighthouse_configuration: KurtosisConfiguration,
|
||||||
|
|
||||||
/// Configuration parameters for the Kitchensink.
|
|
||||||
#[clap(flatten, next_help_heading = "Kitchensink Configuration")]
|
|
||||||
pub kitchensink_configuration: KitchensinkConfiguration,
|
|
||||||
|
|
||||||
/// Configuration parameters for the Revive Dev Node.
|
/// Configuration parameters for the Revive Dev Node.
|
||||||
#[clap(flatten, next_help_heading = "Revive Dev Node Configuration")]
|
#[clap(flatten, next_help_heading = "Revive Dev Node Configuration")]
|
||||||
pub revive_dev_node_configuration: ReviveDevNodeConfiguration,
|
pub revive_dev_node_configuration: ReviveDevNodeConfiguration,
|
||||||
@@ -409,10 +394,6 @@ pub struct BenchmarkingContext {
|
|||||||
#[clap(flatten, next_help_heading = "Lighthouse Configuration")]
|
#[clap(flatten, next_help_heading = "Lighthouse Configuration")]
|
||||||
pub lighthouse_configuration: KurtosisConfiguration,
|
pub lighthouse_configuration: KurtosisConfiguration,
|
||||||
|
|
||||||
/// Configuration parameters for the Kitchensink.
|
|
||||||
#[clap(flatten, next_help_heading = "Kitchensink Configuration")]
|
|
||||||
pub kitchensink_configuration: KitchensinkConfiguration,
|
|
||||||
|
|
||||||
/// Configuration parameters for the Polkadot Parachain.
|
/// Configuration parameters for the Polkadot Parachain.
|
||||||
#[clap(flatten, next_help_heading = "Polkadot Parachain Configuration")]
|
#[clap(flatten, next_help_heading = "Polkadot Parachain Configuration")]
|
||||||
pub polkadot_parachain_configuration: PolkadotParachainConfiguration,
|
pub polkadot_parachain_configuration: PolkadotParachainConfiguration,
|
||||||
@@ -491,10 +472,6 @@ pub struct ExportGenesisContext {
|
|||||||
#[clap(flatten, next_help_heading = "Lighthouse Configuration")]
|
#[clap(flatten, next_help_heading = "Lighthouse Configuration")]
|
||||||
pub lighthouse_configuration: KurtosisConfiguration,
|
pub lighthouse_configuration: KurtosisConfiguration,
|
||||||
|
|
||||||
/// Configuration parameters for the Kitchensink.
|
|
||||||
#[clap(flatten, next_help_heading = "Kitchensink Configuration")]
|
|
||||||
pub kitchensink_configuration: KitchensinkConfiguration,
|
|
||||||
|
|
||||||
/// Configuration parameters for the Polkadot Parachain.
|
/// Configuration parameters for the Polkadot Parachain.
|
||||||
#[clap(flatten, next_help_heading = "Polkadot Parachain Configuration")]
|
#[clap(flatten, next_help_heading = "Polkadot Parachain Configuration")]
|
||||||
pub polkadot_parachain_configuration: PolkadotParachainConfiguration,
|
pub polkadot_parachain_configuration: PolkadotParachainConfiguration,
|
||||||
@@ -556,12 +533,6 @@ impl AsRef<KurtosisConfiguration> for TestExecutionContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<KitchensinkConfiguration> for TestExecutionContext {
|
|
||||||
fn as_ref(&self) -> &KitchensinkConfiguration {
|
|
||||||
&self.kitchensink_configuration
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AsRef<ReviveDevNodeConfiguration> for TestExecutionContext {
|
impl AsRef<ReviveDevNodeConfiguration> for TestExecutionContext {
|
||||||
fn as_ref(&self) -> &ReviveDevNodeConfiguration {
|
fn as_ref(&self) -> &ReviveDevNodeConfiguration {
|
||||||
&self.revive_dev_node_configuration
|
&self.revive_dev_node_configuration
|
||||||
@@ -658,12 +629,6 @@ impl AsRef<PolkadotParachainConfiguration> for BenchmarkingContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<KitchensinkConfiguration> for BenchmarkingContext {
|
|
||||||
fn as_ref(&self) -> &KitchensinkConfiguration {
|
|
||||||
&self.kitchensink_configuration
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AsRef<ReviveDevNodeConfiguration> for BenchmarkingContext {
|
impl AsRef<ReviveDevNodeConfiguration> for BenchmarkingContext {
|
||||||
fn as_ref(&self) -> &ReviveDevNodeConfiguration {
|
fn as_ref(&self) -> &ReviveDevNodeConfiguration {
|
||||||
&self.revive_dev_node_configuration
|
&self.revive_dev_node_configuration
|
||||||
@@ -718,12 +683,6 @@ impl AsRef<KurtosisConfiguration> for ExportGenesisContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AsRef<KitchensinkConfiguration> for ExportGenesisContext {
|
|
||||||
fn as_ref(&self) -> &KitchensinkConfiguration {
|
|
||||||
&self.kitchensink_configuration
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AsRef<PolkadotParachainConfiguration> for ExportGenesisContext {
|
impl AsRef<PolkadotParachainConfiguration> for ExportGenesisContext {
|
||||||
fn as_ref(&self) -> &PolkadotParachainConfiguration {
|
fn as_ref(&self) -> &PolkadotParachainConfiguration {
|
||||||
&self.polkadot_parachain_configuration
|
&self.polkadot_parachain_configuration
|
||||||
@@ -842,30 +801,6 @@ pub struct KurtosisConfiguration {
|
|||||||
pub path: PathBuf,
|
pub path: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A set of configuration parameters for Kitchensink.
|
|
||||||
#[derive(Clone, Debug, Parser, Serialize, Deserialize)]
|
|
||||||
pub struct KitchensinkConfiguration {
|
|
||||||
/// Specifies the path of the kitchensink node to be used by the tool.
|
|
||||||
///
|
|
||||||
/// If this is not specified, then the tool assumes that it should use the kitchensink binary
|
|
||||||
/// that's provided in the user's $PATH.
|
|
||||||
#[clap(
|
|
||||||
id = "kitchensink.path",
|
|
||||||
long = "kitchensink.path",
|
|
||||||
default_value = "substrate-node"
|
|
||||||
)]
|
|
||||||
pub path: PathBuf,
|
|
||||||
|
|
||||||
/// The amount of time to wait upon startup before considering that the node timed out.
|
|
||||||
#[clap(
|
|
||||||
id = "kitchensink.start-timeout-ms",
|
|
||||||
long = "kitchensink.start-timeout-ms",
|
|
||||||
default_value = "30000",
|
|
||||||
value_parser = parse_duration
|
|
||||||
)]
|
|
||||||
pub start_timeout_ms: Duration,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A set of configuration parameters for the revive dev node.
|
/// A set of configuration parameters for the revive dev node.
|
||||||
#[derive(Clone, Debug, Parser, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Parser, Serialize, Deserialize)]
|
||||||
pub struct ReviveDevNodeConfiguration {
|
pub struct ReviveDevNodeConfiguration {
|
||||||
@@ -1161,35 +1096,6 @@ fn parse_duration(s: &str) -> anyhow::Result<Duration> {
|
|||||||
.map_err(Into::into)
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The Solidity compatible node implementation.
|
|
||||||
///
|
|
||||||
/// This describes the solutions to be tested against on a high level.
|
|
||||||
#[derive(
|
|
||||||
Clone,
|
|
||||||
Copy,
|
|
||||||
Debug,
|
|
||||||
PartialEq,
|
|
||||||
Eq,
|
|
||||||
PartialOrd,
|
|
||||||
Ord,
|
|
||||||
Hash,
|
|
||||||
Serialize,
|
|
||||||
ValueEnum,
|
|
||||||
EnumString,
|
|
||||||
Display,
|
|
||||||
AsRefStr,
|
|
||||||
IntoStaticStr,
|
|
||||||
)]
|
|
||||||
#[strum(serialize_all = "kebab-case")]
|
|
||||||
pub enum TestingPlatform {
|
|
||||||
/// The go-ethereum reference full node EVM implementation.
|
|
||||||
Geth,
|
|
||||||
/// The kitchensink runtime provides the PolkaVM (PVM) based node implementation.
|
|
||||||
Kitchensink,
|
|
||||||
/// A polkadot/Substrate based network
|
|
||||||
Zombienet,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The output format to use for the test execution output.
|
/// The output format to use for the test execution output.
|
||||||
#[derive(
|
#[derive(
|
||||||
Clone,
|
Clone,
|
||||||
|
|||||||
@@ -353,7 +353,8 @@ where
|
|||||||
.execute_account_allocation(step_path, step.as_ref())
|
.execute_account_allocation(step_path, step.as_ref())
|
||||||
.await
|
.await
|
||||||
.context("Account Allocation Step Failed"),
|
.context("Account Allocation Step Failed"),
|
||||||
}?;
|
}
|
||||||
|
.context(format!("Failure on step {step_path}"))?;
|
||||||
self.steps_executed += steps_executed;
|
self.steps_executed += steps_executed;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -172,134 +172,6 @@ impl Platform for LighthouseGethEvmSolcPlatform {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash)]
|
|
||||||
pub struct KitchensinkPolkavmResolcPlatform;
|
|
||||||
|
|
||||||
impl Platform for KitchensinkPolkavmResolcPlatform {
|
|
||||||
fn platform_identifier(&self) -> PlatformIdentifier {
|
|
||||||
PlatformIdentifier::KitchensinkPolkavmResolc
|
|
||||||
}
|
|
||||||
|
|
||||||
fn node_identifier(&self) -> NodeIdentifier {
|
|
||||||
NodeIdentifier::Kitchensink
|
|
||||||
}
|
|
||||||
|
|
||||||
fn vm_identifier(&self) -> VmIdentifier {
|
|
||||||
VmIdentifier::PolkaVM
|
|
||||||
}
|
|
||||||
|
|
||||||
fn compiler_identifier(&self) -> CompilerIdentifier {
|
|
||||||
CompilerIdentifier::Resolc
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new_node(
|
|
||||||
&self,
|
|
||||||
context: Context,
|
|
||||||
) -> anyhow::Result<JoinHandle<anyhow::Result<Box<dyn EthereumNode + Send + Sync>>>> {
|
|
||||||
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
|
||||||
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
|
|
||||||
.path
|
|
||||||
.clone();
|
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
|
||||||
Ok(thread::spawn(move || {
|
|
||||||
let node = SubstrateNode::new(
|
|
||||||
kitchensink_path,
|
|
||||||
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND,
|
|
||||||
None,
|
|
||||||
context,
|
|
||||||
&[],
|
|
||||||
);
|
|
||||||
let node = spawn_node(node, genesis)?;
|
|
||||||
Ok(Box::new(node) as Box<_>)
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new_compiler(
|
|
||||||
&self,
|
|
||||||
context: Context,
|
|
||||||
version: Option<VersionOrRequirement>,
|
|
||||||
) -> Pin<Box<dyn Future<Output = anyhow::Result<Box<dyn SolidityCompiler>>>>> {
|
|
||||||
Box::pin(async move {
|
|
||||||
let compiler = Resolc::new(context, version).await;
|
|
||||||
compiler.map(|compiler| Box::new(compiler) as Box<dyn SolidityCompiler>)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn export_genesis(&self, context: Context) -> anyhow::Result<serde_json::Value> {
|
|
||||||
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
|
|
||||||
.path
|
|
||||||
.as_path();
|
|
||||||
let wallet = AsRef::<WalletConfiguration>::as_ref(&context).wallet();
|
|
||||||
let export_chainspec_command = SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND;
|
|
||||||
|
|
||||||
SubstrateNode::node_genesis(kitchensink_path, export_chainspec_command, &wallet)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash)]
|
|
||||||
pub struct KitchensinkRevmSolcPlatform;
|
|
||||||
|
|
||||||
impl Platform for KitchensinkRevmSolcPlatform {
|
|
||||||
fn platform_identifier(&self) -> PlatformIdentifier {
|
|
||||||
PlatformIdentifier::KitchensinkRevmSolc
|
|
||||||
}
|
|
||||||
|
|
||||||
fn node_identifier(&self) -> NodeIdentifier {
|
|
||||||
NodeIdentifier::Kitchensink
|
|
||||||
}
|
|
||||||
|
|
||||||
fn vm_identifier(&self) -> VmIdentifier {
|
|
||||||
VmIdentifier::Evm
|
|
||||||
}
|
|
||||||
|
|
||||||
fn compiler_identifier(&self) -> CompilerIdentifier {
|
|
||||||
CompilerIdentifier::Solc
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new_node(
|
|
||||||
&self,
|
|
||||||
context: Context,
|
|
||||||
) -> anyhow::Result<JoinHandle<anyhow::Result<Box<dyn EthereumNode + Send + Sync>>>> {
|
|
||||||
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
|
||||||
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
|
|
||||||
.path
|
|
||||||
.clone();
|
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
|
||||||
Ok(thread::spawn(move || {
|
|
||||||
let node = SubstrateNode::new(
|
|
||||||
kitchensink_path,
|
|
||||||
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND,
|
|
||||||
None,
|
|
||||||
context,
|
|
||||||
&[],
|
|
||||||
);
|
|
||||||
let node = spawn_node(node, genesis)?;
|
|
||||||
Ok(Box::new(node) as Box<_>)
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new_compiler(
|
|
||||||
&self,
|
|
||||||
context: Context,
|
|
||||||
version: Option<VersionOrRequirement>,
|
|
||||||
) -> Pin<Box<dyn Future<Output = anyhow::Result<Box<dyn SolidityCompiler>>>>> {
|
|
||||||
Box::pin(async move {
|
|
||||||
let compiler = Solc::new(context, version).await;
|
|
||||||
compiler.map(|compiler| Box::new(compiler) as Box<dyn SolidityCompiler>)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn export_genesis(&self, context: Context) -> anyhow::Result<serde_json::Value> {
|
|
||||||
let kitchensink_path = AsRef::<KitchensinkConfiguration>::as_ref(&context)
|
|
||||||
.path
|
|
||||||
.as_path();
|
|
||||||
let wallet = AsRef::<WalletConfiguration>::as_ref(&context).wallet();
|
|
||||||
let export_chainspec_command = SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND;
|
|
||||||
|
|
||||||
SubstrateNode::node_genesis(kitchensink_path, export_chainspec_command, &wallet)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash)]
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Default, Hash)]
|
||||||
pub struct ReviveDevNodePolkavmResolcPlatform;
|
pub struct ReviveDevNodePolkavmResolcPlatform;
|
||||||
|
|
||||||
@@ -557,12 +429,6 @@ impl From<PlatformIdentifier> for Box<dyn Platform> {
|
|||||||
PlatformIdentifier::LighthouseGethEvmSolc => {
|
PlatformIdentifier::LighthouseGethEvmSolc => {
|
||||||
Box::new(LighthouseGethEvmSolcPlatform) as Box<_>
|
Box::new(LighthouseGethEvmSolcPlatform) as Box<_>
|
||||||
}
|
}
|
||||||
PlatformIdentifier::KitchensinkPolkavmResolc => {
|
|
||||||
Box::new(KitchensinkPolkavmResolcPlatform) as Box<_>
|
|
||||||
}
|
|
||||||
PlatformIdentifier::KitchensinkRevmSolc => {
|
|
||||||
Box::new(KitchensinkRevmSolcPlatform) as Box<_>
|
|
||||||
}
|
|
||||||
PlatformIdentifier::ReviveDevNodePolkavmResolc => {
|
PlatformIdentifier::ReviveDevNodePolkavmResolc => {
|
||||||
Box::new(ReviveDevNodePolkavmResolcPlatform) as Box<_>
|
Box::new(ReviveDevNodePolkavmResolcPlatform) as Box<_>
|
||||||
}
|
}
|
||||||
@@ -584,12 +450,6 @@ impl From<PlatformIdentifier> for &dyn Platform {
|
|||||||
PlatformIdentifier::LighthouseGethEvmSolc => {
|
PlatformIdentifier::LighthouseGethEvmSolc => {
|
||||||
&LighthouseGethEvmSolcPlatform as &dyn Platform
|
&LighthouseGethEvmSolcPlatform as &dyn Platform
|
||||||
}
|
}
|
||||||
PlatformIdentifier::KitchensinkPolkavmResolc => {
|
|
||||||
&KitchensinkPolkavmResolcPlatform as &dyn Platform
|
|
||||||
}
|
|
||||||
PlatformIdentifier::KitchensinkRevmSolc => {
|
|
||||||
&KitchensinkRevmSolcPlatform as &dyn Platform
|
|
||||||
}
|
|
||||||
PlatformIdentifier::ReviveDevNodePolkavmResolc => {
|
PlatformIdentifier::ReviveDevNodePolkavmResolc => {
|
||||||
&ReviveDevNodePolkavmResolcPlatform as &dyn Platform
|
&ReviveDevNodePolkavmResolcPlatform as &dyn Platform
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -92,7 +92,6 @@ impl SubstrateNode {
|
|||||||
const SUBSTRATE_LOG_ENV: &str = "error,evm=debug,sc_rpc_server=info,runtime::revive=debug";
|
const SUBSTRATE_LOG_ENV: &str = "error,evm=debug,sc_rpc_server=info,runtime::revive=debug";
|
||||||
const PROXY_LOG_ENV: &str = "info,eth-rpc=debug";
|
const PROXY_LOG_ENV: &str = "info,eth-rpc=debug";
|
||||||
|
|
||||||
pub const KITCHENSINK_EXPORT_CHAINSPEC_COMMAND: &str = "export-chain-spec";
|
|
||||||
pub const REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND: &str = "build-spec";
|
pub const REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND: &str = "build-spec";
|
||||||
|
|
||||||
pub fn new(
|
pub fn new(
|
||||||
@@ -333,7 +332,7 @@ impl SubstrateNode {
|
|||||||
trace!("Waiting for chainspec export");
|
trace!("Waiting for chainspec export");
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
anyhow::bail!(
|
anyhow::bail!(
|
||||||
"Substrate-node export-chain-spec failed: {}",
|
"substrate-node export-chain-spec failed: {}",
|
||||||
String::from_utf8_lossy(&output.stderr)
|
String::from_utf8_lossy(&output.stderr)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -800,8 +799,8 @@ mod tests {
|
|||||||
|
|
||||||
let context = test_config();
|
let context = test_config();
|
||||||
let mut node = SubstrateNode::new(
|
let mut node = SubstrateNode::new(
|
||||||
context.kitchensink_configuration.path.clone(),
|
context.revive_dev_node_configuration.path.clone(),
|
||||||
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND,
|
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
||||||
None,
|
None,
|
||||||
&context,
|
&context,
|
||||||
&[],
|
&[],
|
||||||
@@ -823,6 +822,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
|
#[ignore = "Ignored since it takes a long time to run"]
|
||||||
async fn node_mines_simple_transfer_transaction_and_returns_receipt() {
|
async fn node_mines_simple_transfer_transaction_and_returns_receipt() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let (context, node) = shared_state();
|
let (context, node) = shared_state();
|
||||||
@@ -839,11 +839,14 @@ mod tests {
|
|||||||
.value(U256::from(100_000_000_000_000u128));
|
.value(U256::from(100_000_000_000_000u128));
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let receipt = provider.send_transaction(transaction).await;
|
let mut pending_transaction = provider
|
||||||
|
.send_transaction(transaction)
|
||||||
|
.await
|
||||||
|
.expect("Submission failed");
|
||||||
|
pending_transaction.set_timeout(Some(Duration::from_secs(60)));
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let _ = receipt
|
let _ = pending_transaction
|
||||||
.expect("Failed to send the transfer transaction")
|
|
||||||
.get_receipt()
|
.get_receipt()
|
||||||
.await
|
.await
|
||||||
.expect("Failed to get the receipt for the transfer");
|
.expect("Failed to get the receipt for the transfer");
|
||||||
@@ -867,8 +870,8 @@ mod tests {
|
|||||||
|
|
||||||
let context = test_config();
|
let context = test_config();
|
||||||
let mut dummy_node = SubstrateNode::new(
|
let mut dummy_node = SubstrateNode::new(
|
||||||
context.kitchensink_configuration.path.clone(),
|
context.revive_dev_node_configuration.path.clone(),
|
||||||
SubstrateNode::KITCHENSINK_EXPORT_CHAINSPEC_COMMAND,
|
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
||||||
None,
|
None,
|
||||||
&context,
|
&context,
|
||||||
&[],
|
&[],
|
||||||
@@ -961,7 +964,7 @@ mod tests {
|
|||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
version.starts_with("substrate-node"),
|
version.starts_with("substrate-node"),
|
||||||
"Expected Substrate-node version string, got: {version}"
|
"Expected substrate-node version string, got: {version}"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -356,7 +356,7 @@ impl ZombienetNode {
|
|||||||
|
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
anyhow::bail!(
|
anyhow::bail!(
|
||||||
"Substrate-node export-chain-spec failed: {}",
|
"substrate-node export-chain-spec failed: {}",
|
||||||
String::from_utf8_lossy(&output.stderr)
|
String::from_utf8_lossy(&output.stderr)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -858,6 +858,7 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[ignore = "Ignored for the time being"]
|
#[ignore = "Ignored for the time being"]
|
||||||
async fn test_transfer_transaction_should_return_receipt() {
|
async fn test_transfer_transaction_should_return_receipt() {
|
||||||
|
// Arrange
|
||||||
let (ctx, node) = new_node().await;
|
let (ctx, node) = new_node().await;
|
||||||
|
|
||||||
let provider = node.provider().await.expect("Failed to create provider");
|
let provider = node.provider().await.expect("Failed to create provider");
|
||||||
@@ -866,9 +867,15 @@ mod tests {
|
|||||||
.to(account_address)
|
.to(account_address)
|
||||||
.value(U256::from(100_000_000_000_000u128));
|
.value(U256::from(100_000_000_000_000u128));
|
||||||
|
|
||||||
let receipt = provider.send_transaction(transaction).await;
|
// Act
|
||||||
let _ = receipt
|
let mut pending_transaction = provider
|
||||||
.expect("Failed to send the transfer transaction")
|
.send_transaction(transaction)
|
||||||
|
.await
|
||||||
|
.expect("Submission failed");
|
||||||
|
pending_transaction.set_timeout(Some(Duration::from_secs(60)));
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let _ = pending_transaction
|
||||||
.get_receipt()
|
.get_receipt()
|
||||||
.await
|
.await
|
||||||
.expect("Failed to get the receipt for the transfer");
|
.expect("Failed to get the receipt for the transfer");
|
||||||
|
|||||||
+1
-1
Submodule polkadot-sdk updated: dc3d0e5ab7...f268e32768
+2
-5
@@ -22,7 +22,6 @@ POLKADOT_SDK_DIR="${1:-}"
|
|||||||
# Binary paths (default to names in $PATH)
|
# Binary paths (default to names in $PATH)
|
||||||
REVIVE_DEV_NODE_BIN="revive-dev-node"
|
REVIVE_DEV_NODE_BIN="revive-dev-node"
|
||||||
ETH_RPC_BIN="eth-rpc"
|
ETH_RPC_BIN="eth-rpc"
|
||||||
SUBSTRATE_NODE_BIN="substrate-node"
|
|
||||||
|
|
||||||
echo -e "${GREEN}=== Revive Differential Tests Quick Start ===${NC}"
|
echo -e "${GREEN}=== Revive Differential Tests Quick Start ===${NC}"
|
||||||
echo ""
|
echo ""
|
||||||
@@ -50,14 +49,13 @@ if [ -n "$POLKADOT_SDK_DIR" ]; then
|
|||||||
|
|
||||||
REVIVE_DEV_NODE_BIN="$POLKADOT_SDK_DIR/target/release/revive-dev-node"
|
REVIVE_DEV_NODE_BIN="$POLKADOT_SDK_DIR/target/release/revive-dev-node"
|
||||||
ETH_RPC_BIN="$POLKADOT_SDK_DIR/target/release/eth-rpc"
|
ETH_RPC_BIN="$POLKADOT_SDK_DIR/target/release/eth-rpc"
|
||||||
SUBSTRATE_NODE_BIN="$POLKADOT_SDK_DIR/target/release/substrate-node"
|
|
||||||
|
|
||||||
if [ ! -x "$REVIVE_DEV_NODE_BIN" ] || [ ! -x "$ETH_RPC_BIN" ] || [ ! -x "$SUBSTRATE_NODE_BIN" ]; then
|
if [ ! -x "$REVIVE_DEV_NODE_BIN" ] || [ ! -x "$ETH_RPC_BIN" ]; then
|
||||||
echo -e "${YELLOW}Required binaries not found in release target. Building...${NC}"
|
echo -e "${YELLOW}Required binaries not found in release target. Building...${NC}"
|
||||||
(cd "$POLKADOT_SDK_DIR" && cargo build --release --package staging-node-cli --package pallet-revive-eth-rpc --package revive-dev-node)
|
(cd "$POLKADOT_SDK_DIR" && cargo build --release --package staging-node-cli --package pallet-revive-eth-rpc --package revive-dev-node)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
for bin in "$REVIVE_DEV_NODE_BIN" "$ETH_RPC_BIN" "$SUBSTRATE_NODE_BIN"; do
|
for bin in "$REVIVE_DEV_NODE_BIN" "$ETH_RPC_BIN"; do
|
||||||
if [ ! -x "$bin" ]; then
|
if [ ! -x "$bin" ]; then
|
||||||
echo -e "${RED}Expected binary not found after build: $bin${NC}"
|
echo -e "${RED}Expected binary not found after build: $bin${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
@@ -84,7 +82,6 @@ RUST_LOG="info,alloy_pubsub::service=error" ./target/release/retester test \
|
|||||||
--concurrency.number-of-threads 5 \
|
--concurrency.number-of-threads 5 \
|
||||||
--concurrency.number-of-concurrent-tasks 500 \
|
--concurrency.number-of-concurrent-tasks 500 \
|
||||||
--wallet.additional-keys 100000 \
|
--wallet.additional-keys 100000 \
|
||||||
--kitchensink.path "$SUBSTRATE_NODE_BIN" \
|
|
||||||
--revive-dev-node.path "$REVIVE_DEV_NODE_BIN" \
|
--revive-dev-node.path "$REVIVE_DEV_NODE_BIN" \
|
||||||
--eth-rpc.path "$ETH_RPC_BIN" \
|
--eth-rpc.path "$ETH_RPC_BIN" \
|
||||||
> logs.log \
|
> logs.log \
|
||||||
|
|||||||
@@ -0,0 +1,226 @@
|
|||||||
|
"""
|
||||||
|
This script is used to turn the JSON report produced by the revive differential tests tool into an
|
||||||
|
easy to consume markdown document for the purpose of reporting this information in the Polkadot SDK
|
||||||
|
CI. The full models used in the JSON report can be found in the revive differential tests repo and
|
||||||
|
the models used in this script are just a partial reproduction of the full report models.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import TypedDict, Literal, Union
|
||||||
|
|
||||||
|
import json, io
|
||||||
|
|
||||||
|
|
||||||
|
class Report(TypedDict):
|
||||||
|
context: "Context"
|
||||||
|
execution_information: dict[
|
||||||
|
"MetadataFilePathString",
|
||||||
|
dict["ModeString", dict["CaseIdxString", "CaseReport"]],
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class Context(TypedDict):
|
||||||
|
Test: "TestContext"
|
||||||
|
|
||||||
|
|
||||||
|
class TestContext(TypedDict):
|
||||||
|
corpus_configuration: "CorpusConfiguration"
|
||||||
|
|
||||||
|
|
||||||
|
class CorpusConfiguration(TypedDict):
|
||||||
|
test_specifiers: list["TestSpecifier"]
|
||||||
|
|
||||||
|
|
||||||
|
class CaseReport(TypedDict):
|
||||||
|
status: "CaseStatus"
|
||||||
|
|
||||||
|
|
||||||
|
class CaseStatusSuccess(TypedDict):
|
||||||
|
status: Literal["Succeeded"]
|
||||||
|
steps_executed: int
|
||||||
|
|
||||||
|
|
||||||
|
class CaseStatusFailure(TypedDict):
|
||||||
|
status: Literal["Failed"]
|
||||||
|
reason: str
|
||||||
|
|
||||||
|
|
||||||
|
class CaseStatusIgnored(TypedDict):
|
||||||
|
status: Literal["Ignored"]
|
||||||
|
reason: str
|
||||||
|
|
||||||
|
|
||||||
|
CaseStatus = Union[CaseStatusSuccess, CaseStatusFailure, CaseStatusIgnored]
|
||||||
|
"""A union type of all of the possible statuses that could be reported for a case."""
|
||||||
|
|
||||||
|
TestSpecifier = str
|
||||||
|
"""A test specifier string. For example resolc-compiler-tests/fixtures/solidity/test.json::0::Y+"""
|
||||||
|
|
||||||
|
ModeString = str
|
||||||
|
"""The mode string. For example Y+ >=0.8.13"""
|
||||||
|
|
||||||
|
MetadataFilePathString = str
|
||||||
|
"""The path to a metadata file. For example resolc-compiler-tests/fixtures/solidity/test.json"""
|
||||||
|
|
||||||
|
CaseIdxString = str
|
||||||
|
"""The index of a case as a string. For example '0'"""
|
||||||
|
|
||||||
|
|
||||||
|
def path_relative_to_resolc_compiler_test_directory(path: str) -> str:
|
||||||
|
"""
|
||||||
|
Given a path, this function returns the path relative to the resolc-compiler-test directory. The
|
||||||
|
following is an example of an input and an output:
|
||||||
|
|
||||||
|
Input: ~/polkadot-sdk/revive-differential-tests/resolc-compiler-tests/fixtures/solidity/test.json
|
||||||
|
Output: test.json
|
||||||
|
"""
|
||||||
|
|
||||||
|
return f"{path.split('resolc-compiler-tests/fixtures/solidity')[-1].strip('/')}"
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
with open("report.json", "r") as file:
|
||||||
|
report: Report = json.load(file)
|
||||||
|
|
||||||
|
# Starting the markdown document and adding information to it as we go.
|
||||||
|
markdown_document: io.TextIOWrapper = open("report.md", "w")
|
||||||
|
print("# Differential Tests Results", file=markdown_document)
|
||||||
|
|
||||||
|
# Getting all of the test specifiers from the report and making them relative to the tests dir.
|
||||||
|
test_specifiers: list[str] = list(
|
||||||
|
map(
|
||||||
|
path_relative_to_resolc_compiler_test_directory,
|
||||||
|
report["context"]["Test"]["corpus_configuration"]["test_specifiers"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print("## Specified Tests", file=markdown_document)
|
||||||
|
for test_specifier in test_specifiers:
|
||||||
|
print(f"* `{test_specifier}`", file=markdown_document)
|
||||||
|
|
||||||
|
# Counting the total number of test cases, successes, failures, and ignored tests
|
||||||
|
total_number_of_cases: int = 0
|
||||||
|
total_number_of_successes: int = 0
|
||||||
|
total_number_of_failures: int = 0
|
||||||
|
total_number_of_ignores: int = 0
|
||||||
|
for _, mode_to_case_mapping in report["execution_information"].items():
|
||||||
|
for _, case_idx_to_report_mapping in mode_to_case_mapping.items():
|
||||||
|
for _, case_report in case_idx_to_report_mapping.items():
|
||||||
|
status: CaseStatus = case_report["status"]
|
||||||
|
|
||||||
|
total_number_of_cases += 1
|
||||||
|
if status["status"] == "Succeeded":
|
||||||
|
total_number_of_successes += 1
|
||||||
|
elif status["status"] == "Failed":
|
||||||
|
total_number_of_failures += 1
|
||||||
|
elif status["status"] == "Ignored":
|
||||||
|
total_number_of_ignores += 1
|
||||||
|
else:
|
||||||
|
raise Exception(
|
||||||
|
f"Encountered a status that's unknown to the script: {status}"
|
||||||
|
)
|
||||||
|
|
||||||
|
print("## Counts", file=markdown_document)
|
||||||
|
print(
|
||||||
|
f"* **Total Number of Test Cases:** {total_number_of_cases}",
|
||||||
|
file=markdown_document,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"* **Total Number of Successes:** {total_number_of_successes}",
|
||||||
|
file=markdown_document,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"* **Total Number of Failures:** {total_number_of_failures}",
|
||||||
|
file=markdown_document,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"* **Total Number of Ignores:** {total_number_of_ignores}",
|
||||||
|
file=markdown_document,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Grouping the various test cases into dictionaries and groups depending on their status to make
|
||||||
|
# them easier to include in the markdown document later on.
|
||||||
|
successful_cases: dict[
|
||||||
|
MetadataFilePathString, dict[CaseIdxString, set[ModeString]]
|
||||||
|
] = {}
|
||||||
|
for metadata_file_path, mode_to_case_mapping in report[
|
||||||
|
"execution_information"
|
||||||
|
].items():
|
||||||
|
for mode_string, case_idx_to_report_mapping in mode_to_case_mapping.items():
|
||||||
|
for case_idx_string, case_report in case_idx_to_report_mapping.items():
|
||||||
|
status: CaseStatus = case_report["status"]
|
||||||
|
metadata_file_path: str = (
|
||||||
|
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
|
||||||
|
)
|
||||||
|
mode_string: str = mode_string.replace(" M3", "+").replace(" M0", "-")
|
||||||
|
|
||||||
|
if status["status"] == "Succeeded":
|
||||||
|
successful_cases.setdefault(
|
||||||
|
metadata_file_path,
|
||||||
|
{},
|
||||||
|
).setdefault(
|
||||||
|
case_idx_string, set()
|
||||||
|
).add(mode_string)
|
||||||
|
|
||||||
|
print("## Failures", file=markdown_document)
|
||||||
|
print(
|
||||||
|
"The test specifiers seen in this section have the format 'path::case_idx::compilation_mode'\
|
||||||
|
and they're compatible with the revive differential tests framework and can be specified\
|
||||||
|
to it directly in the same way that they're provided through the `--test` argument of the\
|
||||||
|
framework.\n",
|
||||||
|
file=markdown_document,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
"The failures are provided in an expandable section to ensure that the PR does not get \
|
||||||
|
polluted with information. Please click on the section below for more information",
|
||||||
|
file=markdown_document,
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
"<details><summary>Detailed Differential Tests Failure Information</summary>\n\n",
|
||||||
|
file=markdown_document,
|
||||||
|
)
|
||||||
|
print("| Test Specifier | Failure Reason | Note |", file=markdown_document)
|
||||||
|
print("| -- | -- | -- |", file=markdown_document)
|
||||||
|
|
||||||
|
for metadata_file_path, mode_to_case_mapping in report[
|
||||||
|
"execution_information"
|
||||||
|
].items():
|
||||||
|
for mode_string, case_idx_to_report_mapping in mode_to_case_mapping.items():
|
||||||
|
for case_idx_string, case_report in case_idx_to_report_mapping.items():
|
||||||
|
status: CaseStatus = case_report["status"]
|
||||||
|
metadata_file_path: str = (
|
||||||
|
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
|
||||||
|
)
|
||||||
|
mode_string: str = mode_string.replace(" M3", "+").replace(" M0", "-")
|
||||||
|
|
||||||
|
if status["status"] != "Failed":
|
||||||
|
continue
|
||||||
|
|
||||||
|
failure_reason: str = status["reason"].replace("\n", " ")
|
||||||
|
|
||||||
|
note: str = ""
|
||||||
|
modes_where_this_case_succeeded: set[ModeString] = (
|
||||||
|
successful_cases.setdefault(
|
||||||
|
metadata_file_path,
|
||||||
|
{},
|
||||||
|
).setdefault(case_idx_string, set())
|
||||||
|
)
|
||||||
|
if len(modes_where_this_case_succeeded) != 0:
|
||||||
|
note: str = (
|
||||||
|
f"This test case succeeded with other compilation modes: {modes_where_this_case_succeeded}"
|
||||||
|
)
|
||||||
|
|
||||||
|
test_specifier: str = (
|
||||||
|
f"{metadata_file_path}::{case_idx_string}::{mode_string}"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"| `{test_specifier}` | `{failure_reason}` | {note} |",
|
||||||
|
file=markdown_document,
|
||||||
|
)
|
||||||
|
print("\n\n</details>", file=markdown_document)
|
||||||
|
|
||||||
|
# The primary downside of not using `with`, but I guess it's better since I don't want to over
|
||||||
|
# indent the code.
|
||||||
|
markdown_document.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user