Compare commits

..

28 Commits

Author SHA1 Message Date
Cyrill Leutwiler c5f87e9b2a remove commented out code
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-31 10:22:01 +02:00
Cyrill Leutwiler c0f57466c1 update dependencies
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-31 10:20:42 +02:00
Cyrill Leutwiler 9505e30fe1 the localsigner wallet
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-31 10:19:12 +02:00
Cyrill Leutwiler ea17166448 make node interactions generic
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-31 09:50:55 +02:00
Cyrill Leutwiler 3edd72850f the solidity comment metadata parser
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-26 15:57:04 +01:00
Cyrill Leutwiler 95d2afde05 the node pool
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-26 11:58:38 +01:00
Cyrill Leutwiler 34b8879b15 deploy tx
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-25 17:54:40 +01:00
Cyrill Leutwiler a835754d41 check the supported solc version
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-25 11:20:00 +01:00
Cyrill Leutwiler 382b944bd1 set file permissions for downloaded solc
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-25 07:33:24 +01:00
Cyrill Leutwiler c69a87238d the solc download per target helper
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-24 22:33:37 +01:00
Cyrill Leutwiler 97156ed21e complete the M-L format parser
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-24 16:49:07 +01:00
xermicus ad4901550d the test driver
Signed-off-by: xermicus <bigcyrill@hotmail.com>
2025-03-24 15:48:26 +01:00
xermicus 9bba37b7a9 the geth version
Signed-off-by: xermicus <bigcyrill@hotmail.com>
2025-03-24 09:42:51 +01:00
Cyrill Leutwiler 33c5adbc22 add dev account to config
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-24 09:18:30 +01:00
xermicus bfb96bf67d inject workdir
Signed-off-by: xermicus <bigcyrill@hotmail.com>
2025-03-23 15:36:49 +01:00
Cyrill Leutwiler 487eefe908 spawn geth node
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-23 14:12:08 +01:00
xermicus 6cd4519d89 initialize geth via the standard json
Signed-off-by: xermicus <bigcyrill@hotmail.com>
2025-03-23 00:05:53 +01:00
xermicus f9a0542d49 the node interaction interface
Signed-off-by: xermicus <bigcyrill@hotmail.com>
2025-03-22 19:41:24 +01:00
Cyrill Leutwiler 3b713ad2cb building for EVM works with complex cases
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-21 18:10:17 +01:00
xermicus 11bd08df4e the solc github releases downloader
Signed-off-by: xermicus <bigcyrill@hotmail.com>
2025-03-21 15:20:50 +01:00
xermicus 84a5647a8b the solc binaries list downloader
Signed-off-by: xermicus <bigcyrill@hotmail.com>
2025-03-21 14:32:15 +01:00
xermicus eb685fc668 wip
Signed-off-by: xermicus <bigcyrill@hotmail.com>
2025-03-21 09:16:31 +01:00
xermicus 6a0d705371 the compiler interface
Signed-off-by: xermicus <bigcyrill@hotmail.com>
2025-03-20 23:26:39 +01:00
xermicus d6c2535853 shorter names
Signed-off-by: xermicus <bigcyrill@hotmail.com>
2025-03-20 13:10:07 +01:00
xermicus cf83a8e34b init node interaction crate
Signed-off-by: xermicus <bigcyrill@hotmail.com>
2025-03-20 12:48:04 +01:00
Cyrill Leutwiler 67f068ca12 parsing complex tests works modulo the contract addresses in calldata
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-19 16:23:04 +01:00
Cyrill Leutwiler d08d6fd66f the metadata parser
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-19 09:49:45 +01:00
Cyrill Leutwiler 42d6f04f2d the mode mini parser
Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
2025-03-18 18:16:40 +01:00
35 changed files with 587 additions and 4106 deletions
-128
View File
@@ -1,128 +0,0 @@
name: Test workflow
on:
push:
branches:
- main
pull_request:
branches:
- main
types: [opened, synchronize]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
CARGO_TERM_COLOR: always
jobs:
cache-polkadot:
name: Build and cache Polkadot binaries on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-24.04, macos-14]
steps:
- name: Checkout repo and submodules
uses: actions/checkout@v4
with:
submodules: recursive
- name: Install dependencies (Linux)
if: matrix.os == 'ubuntu-24.04'
run: |
sudo apt-get update
sudo apt-get install -y protobuf-compiler clang libclang-dev
rustup target add wasm32-unknown-unknown
rustup component add rust-src
- name: Install dependencies (macOS)
if: matrix.os == 'macos-14'
run: |
brew install protobuf
rustup target add wasm32-unknown-unknown
rustup component add rust-src
- name: Cache binaries
id: cache
uses: actions/cache@v3
with:
path: |
~/.cargo/bin/substrate-node
~/.cargo/bin/eth-rpc
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}
- name: Build substrate-node
if: steps.cache.outputs.cache-hit != 'true'
run: |
cd polkadot-sdk
cargo install --locked --force --profile=production --path substrate/bin/node/cli --bin substrate-node --features cli
- name: Build eth-rpc
if: steps.cache.outputs.cache-hit != 'true'
run: |
cd polkadot-sdk
cargo install --path substrate/frame/revive/rpc --bin eth-rpc
ci:
name: CI on ${{ matrix.os }}
needs: cache-polkadot
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-24.04, macos-14]
steps:
- name: Checkout repo
uses: actions/checkout@v4
- name: Restore binaries from cache
uses: actions/cache@v3
with:
path: |
~/.cargo/bin/substrate-node
~/.cargo/bin/eth-rpc
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}
- name: Setup Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
rustflags: ""
- name: Add wasm32 target
run: |
rustup target add wasm32-unknown-unknown
rustup component add rust-src
- name: Install Geth on Ubuntu
if: matrix.os == 'ubuntu-24.04'
run: |
sudo add-apt-repository -y ppa:ethereum/ethereum
sudo apt-get update
sudo apt-get install -y ethereum protobuf-compiler
- name: Install Geth on macOS
if: matrix.os == 'macos-14'
run: |
brew tap ethereum/ethereum
brew install ethereum protobuf
- name: Machete
uses: bnjbvr/cargo-machete@v0.7.1
- name: Format
run: make format
- name: Clippy
run: make clippy
- name: Check substrate-node version
run: substrate-node --version
- name: Check eth-rpc version
run: eth-rpc --version
- name: Test cargo workspace
run: make test
-3
View File
@@ -1,3 +0,0 @@
[submodule "polkadot-sdk"]
path = polkadot-sdk
url = https://github.com/paritytech/polkadot-sdk.git
Generated
+447 -2300
View File
File diff suppressed because it is too large Load Diff
+8 -13
View File
@@ -20,35 +20,30 @@ revive-dt-format = { version = "0.1.0", path = "crates/format" }
revive-dt-node = { version = "0.1.0", path = "crates/node" }
revive-dt-node-interaction = { version = "0.1.0", path = "crates/node-interaction" }
revive-dt-node-pool = { version = "0.1.0", path = "crates/node-pool" }
revive-dt-report = { version = "0.1.0", path = "crates/report" }
revive-dt-solc-binaries = { version = "0.1.0", path = "crates/solc-binaries" }
anyhow = "1.0"
clap = { version = "4", features = ["derive"] }
env_logger = "0.11.8"
env_logger = "0.11.7"
hex = "0.4.3"
reqwest = { version = "0.12.15", features = ["blocking", "json"] }
log = "0.4.27"
log = "0.4.26"
once_cell = "1.21"
rayon = { version = "1.10" }
semver = { version = "1.0", features = ["serde"] }
serde = { version = "1.0", default-features = false, features = ["derive"] }
serde_json = { version = "1.0", default-features = false, features = ["arbitrary_precision", "std"] }
sha2 = { version = "0.10.9" }
sp-core = "36.1.0"
sp-runtime = "41.1.0"
temp-dir = { version = "0.1.16" }
tempfile = "3.3"
sha2 = { version = "0.10.8" }
temp-dir = { version = "0.1.14" }
tokio = { version = "1", default-features = false, features = ["rt-multi-thread"] }
uuid = { version = "1.8", features = ["v4"] }
# revive compiler
revive-solc-json-interface = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
revive-common = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
revive-differential = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
revive-solc-json-interface = { git = "https://github.com/paritytech/revive", rev = "497dae2494dabe12d1af32d6d687122903cb2ada" }
revive-common = { git = "https://github.com/paritytech/revive", rev = "497dae2494dabe12d1af32d6d687122903cb2ada" }
revive-differential = { git = "https://github.com/paritytech/revive", rev = "497dae2494dabe12d1af32d6d687122903cb2ada" }
[workspace.dependencies.alloy]
version = "1.0"
version = "0.13.0"
default-features = false
features = [
"json-abi",
-15
View File
@@ -1,15 +0,0 @@
.PHONY: format clippy test machete
format:
cargo fmt --all -- --check
clippy:
cargo clippy --all-features --workspace -- --deny warnings
machete:
cargo install cargo-machete
cargo machete crates
test: format clippy machete
cargo test --workspace -- --nocapture
+1 -51
View File
@@ -1,52 +1,2 @@
# revive-differential-tests
The revive differential testing framework allows to define smart contract tests in a declarative manner in order to compile and execute them against different Ethereum-compatible blockchain implmentations. This is useful to:
- Analyze observable differences in contract compilation and execution across different blockchain implementations, including contract storage, account balances, transaction output and emitted events on a per-transaction base.
- Collect and compare benchmark metrics such as code size, gas usage or transaction throughput per seconds (TPS) of different blockchain implementations.
- Ensure reproducible contract builds across multiple compiler implementations or multiple host platforms.
- Implement end-to-end regression tests for Ethereum-compatible smart contract stacks.
# Declarative test format
For now, the format used to write tests is the [matter-labs era compiler format](https://github.com/matter-labs/era-compiler-tests?tab=readme-ov-file#matter-labs-simplecomplex-format). This allows us to re-use many tests from their corpora.
# Dependencies
The following is needed to execute `geth` vs. substrate node differential tests:
- [`geth` node](https://github.com/ethereum/go-ethereum/)
- `kitchensink` node and revive ETH RPC binary
- [`solc` compiler](https://github.com/ethereum/solidity)
- [`resolc` compiler](https://github.com/paritytech/revive)
For the compilers and `geth` node please check the project help on how to obtain them.
To install the `kitchensink` node and the ETH RPC binary:
```
git submodule update --init --recursive
cd polkadot-sdk
cargo install --locked --force --profile=production --path substrate/bin/node/cli --bin substrate-node --features cli
cargo install --path substrate/frame/revive/rpc --bin eth-rpc
```
# The `retester` utility
The `retester` helper utilty is used to run the tests. To get an idea of what `retester` can do, please consults its command line help:
```
cargo run -p revive-dt-core -- --help
```
For example, to run the [complex Solidity tests](https://github.com/matter-labs/era-compiler-tests/tree/main/solidity/complex), define a corpus structure as follows:
```json
{
"name": "ML Solidity Complex",
"path": "/path/to/era-compiler-tests/solidity/complex"
}
```
Assuming this to be saved in a `ml-solidity-complex.json` file, the following command will try to compile and execute the tests found inside the corpus:
```bash
RUST_LOG=debug cargo r --release -p revive-dt-core -- --corpus ml-solidity-complex.json
```
revive differential testing framework
-3
View File
@@ -11,9 +11,6 @@ rust-version.workspace = true
[dependencies]
anyhow = { workspace = true }
revive-solc-json-interface = { workspace = true }
revive-dt-config = { workspace = true }
revive-dt-solc-binaries = { workspace = true }
revive-common = { workspace = true }
semver = { workspace = true }
serde_json = { workspace = true }
log = { workspace = true }
-14
View File
@@ -9,8 +9,6 @@ use std::{
path::{Path, PathBuf},
};
use revive_dt_config::Arguments;
use revive_common::EVMVersion;
use revive_solc_json_interface::{
SolcStandardJsonInput, SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings,
@@ -35,8 +33,6 @@ pub trait SolidityCompiler {
) -> anyhow::Result<CompilerOutput<Self::Options>>;
fn new(solc_executable: PathBuf) -> Self;
fn get_compiler_executable(config: &Arguments, version: Version) -> anyhow::Result<PathBuf>;
}
/// The generic compilation input configuration.
@@ -48,12 +44,8 @@ pub struct CompilerInput<T: PartialEq + Eq + Hash> {
/// The generic compilation output configuration.
pub struct CompilerOutput<T: PartialEq + Eq + Hash> {
/// The solc standard JSON input.
pub input: CompilerInput<T>,
/// The produced solc standard JSON output.
pub output: SolcStandardJsonOutput,
/// The error message in case the compiler returns abnormally.
pub error: Option<String>,
}
impl<T> PartialEq for CompilerInput<T>
@@ -114,7 +106,6 @@ where
false,
),
None,
None,
),
},
extra_options: Default::default(),
@@ -161,9 +152,4 @@ where
input: self.input,
})
}
/// Returns the compiler JSON input.
pub fn input(&self) -> SolcStandardJsonInput {
self.input.clone()
}
}
+2 -86
View File
@@ -1,86 +1,2 @@
//! Implements the [SolidityCompiler] trait with `resolc` for
//! compiling contracts to PolkaVM (PVM) bytecode.
use std::{
path::PathBuf,
process::{Command, Stdio},
};
use crate::{CompilerInput, CompilerOutput, SolidityCompiler};
use revive_dt_config::Arguments;
use revive_solc_json_interface::SolcStandardJsonOutput;
/// A wrapper around the `resolc` binary, emitting PVM-compatible bytecode.
pub struct Resolc {
/// Path to the `resolc` executable
resolc_path: PathBuf,
}
impl SolidityCompiler for Resolc {
type Options = Vec<String>;
fn build(
&self,
input: CompilerInput<Self::Options>,
) -> anyhow::Result<CompilerOutput<Self::Options>> {
let mut child = Command::new(&self.resolc_path)
.arg("--standard-json")
.args(&input.extra_options)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
let stdin_pipe = child.stdin.as_mut().expect("stdin must be piped");
serde_json::to_writer(stdin_pipe, &input.input)?;
let json_in = serde_json::to_string_pretty(&input.input)?;
let output = child.wait_with_output()?;
let stdout = output.stdout;
let stderr = output.stderr;
if !output.status.success() {
let message = String::from_utf8_lossy(&stderr);
log::error!(
"resolc failed exit={} stderr={} JSON-in={} ",
output.status,
&message,
json_in,
);
return Ok(CompilerOutput {
input,
output: Default::default(),
error: Some(message.into()),
});
}
let parsed: SolcStandardJsonOutput = serde_json::from_slice(&stdout).map_err(|e| {
anyhow::anyhow!(
"failed to parse resolc JSON output: {e}\nstderr: {}",
String::from_utf8_lossy(&stderr)
)
})?;
Ok(CompilerOutput {
input,
output: parsed,
error: None,
})
}
fn new(resolc_path: PathBuf) -> Self {
Resolc { resolc_path }
}
fn get_compiler_executable(
config: &Arguments,
_version: semver::Version,
) -> anyhow::Result<PathBuf> {
if !config.resolc.as_os_str().is_empty() {
return Ok(config.resolc.clone());
}
Ok(PathBuf::from("resolc"))
}
}
//! Implements the [crate::SolidityCompiler] trait with resolc for
//! compiling contracts to PVM bytecode.
+2 -23
View File
@@ -7,8 +7,6 @@ use std::{
};
use crate::{CompilerInput, CompilerOutput, SolidityCompiler};
use revive_dt_config::Arguments;
use revive_dt_solc_binaries::download_solc;
pub struct Solc {
solc_path: PathBuf,
@@ -30,34 +28,15 @@ impl SolidityCompiler for Solc {
let stdin = child.stdin.as_mut().expect("should be piped");
serde_json::to_writer(stdin, &input.input)?;
let output = child.wait_with_output()?;
if !output.status.success() {
let message = String::from_utf8_lossy(&output.stderr);
log::error!("solc failed exit={} stderr={}", output.status, &message);
return Ok(CompilerOutput {
input,
output: Default::default(),
error: Some(message.into()),
});
}
let output = child.wait_with_output()?.stdout;
Ok(CompilerOutput {
input,
output: serde_json::from_slice(&output.stdout)?,
error: None,
output: serde_json::from_slice(&output)?,
})
}
fn new(solc_path: PathBuf) -> Self {
Self { solc_path }
}
fn get_compiler_executable(
config: &Arguments,
version: semver::Version,
) -> anyhow::Result<PathBuf> {
let path = download_solc(config.directory(), version, config.wasm)?;
Ok(path)
}
}
-2
View File
@@ -13,5 +13,3 @@ alloy = { workspace = true }
clap = { workspace = true }
semver = { workspace = true }
temp-dir = { workspace = true }
serde = { workspace = true }
+3 -33
View File
@@ -1,17 +1,13 @@
//! The global configuration used accross all revive differential testing crates.
use std::{
fmt::Display,
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use alloy::{network::EthereumWallet, signers::local::PrivateKeySigner};
use clap::{Parser, ValueEnum};
use semver::Version;
use serde::{Deserialize, Serialize};
use temp_dir::TempDir;
#[derive(Debug, Parser, Clone, Serialize, Deserialize)]
#[derive(Debug, Parser, Clone)]
#[command(name = "retester")]
pub struct Arguments {
/// The `solc` version to use if the test didn't specify it explicitly.
@@ -44,7 +40,6 @@ pub struct Arguments {
///
/// We attach it here because [TempDir] prunes itself on drop.
#[clap(skip)]
#[serde(skip)]
pub temp_dir: Option<&'static TempDir>,
/// The path to the `geth` executable.
@@ -88,22 +83,6 @@ pub struct Arguments {
/// Determines the amount of tests that are executed in parallel.
#[arg(long = "workers", default_value = "12")]
pub workers: usize,
/// Extract problems back to the test corpus.
#[arg(short, long = "extract-problems")]
pub extract_problems: bool,
/// The path to the `kitchensink` executable.
///
/// By default it uses `substrate-node` binary found in `$PATH`.
#[arg(short, long = "kitchensink", default_value = "substrate-node")]
pub kitchensink: PathBuf,
/// The path to the `eth_proxy` executable.
///
/// By default it uses `eth-rpc` binary found in `$PATH`.
#[arg(short = 'p', long = "eth_proxy", default_value = "eth-rpc")]
pub eth_proxy: PathBuf,
}
impl Arguments {
@@ -145,7 +124,7 @@ impl Default for Arguments {
/// The Solidity compatible node implementation.
///
/// This describes the solutions to be tested against on a high level.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, ValueEnum, Serialize, Deserialize)]
#[derive(Clone, Debug, Eq, Hash, PartialEq, ValueEnum)]
#[clap(rename_all = "lower")]
pub enum TestingPlatform {
/// The go-ethereum reference full node EVM implementation.
@@ -153,12 +132,3 @@ pub enum TestingPlatform {
/// The kitchensink runtime provides the PolkaVM (PVM) based node implentation.
Kitchensink,
}
impl Display for TestingPlatform {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Geth => f.write_str("geth"),
Self::Kitchensink => f.write_str("revive"),
}
}
}
+4 -1
View File
@@ -18,7 +18,7 @@ revive-dt-config = { workspace = true }
revive-dt-format = { workspace = true }
revive-dt-node = { workspace = true }
revive-dt-node-interaction = { workspace = true }
revive-dt-report = { workspace = true }
revive-dt-solc-binaries = { workspace = true }
alloy = { workspace = true }
anyhow = { workspace = true }
@@ -27,4 +27,7 @@ log = { workspace = true }
env_logger = { workspace = true }
rayon = { workspace = true }
revive-solc-json-interface = { workspace = true }
semver = { workspace = true }
serde = { workspace = true, features = [ "derive" ] }
serde_json = { workspace = true }
temp-dir = { workspace = true }
+29 -289
View File
@@ -1,19 +1,14 @@
//! The test driver handles the compilation and execution of the test cases.
use alloy::primitives::Bytes;
use alloy::rpc::types::TransactionInput;
use alloy::{
primitives::{Address, TxKind, map::HashMap},
rpc::types::{
TransactionReceipt, TransactionRequest,
trace::geth::{AccountState, DiffMode, GethTrace},
},
primitives::{Address, map::HashMap},
rpc::types::trace::geth::GethTrace,
};
use revive_dt_compiler::{Compiler, CompilerInput, SolidityCompiler};
use revive_dt_config::Arguments;
use revive_dt_format::{input::Input, metadata::Metadata, mode::SolcMode};
use revive_dt_node_interaction::EthereumNode;
use revive_dt_report::reporter::{CompilationTask, Report, Span};
use revive_dt_solc_binaries::download_solc;
use revive_solc_json_interface::SolcStandardJsonOutput;
use crate::Platform;
@@ -25,7 +20,6 @@ type Contracts<T> = HashMap<
pub struct State<'a, T: Platform> {
config: &'a Arguments,
span: Span,
contracts: Contracts<T>,
deployed_contracts: HashMap<String, Address>,
}
@@ -34,249 +28,50 @@ impl<'a, T> State<'a, T>
where
T: Platform,
{
pub fn new(config: &'a Arguments, span: Span) -> Self {
pub fn new(config: &'a Arguments) -> Self {
Self {
config,
span,
contracts: Default::default(),
deployed_contracts: Default::default(),
}
}
/// Returns a copy of the current span.
fn span(&self) -> Span {
self.span
}
pub fn build_contracts(&mut self, mode: &SolcMode, metadata: &Metadata) -> anyhow::Result<()> {
let mut span = self.span();
span.next_metadata(
metadata
.file_path
.as_ref()
.expect("metadata should have been read from a file")
.clone(),
);
let Some(version) = mode.last_patch_version(&self.config.solc) else {
anyhow::bail!("unsupported solc version: {:?}", &mode.solc_version);
anyhow::bail!("unsupported solc version: {:?}", mode.solc_version);
};
let mut compiler = Compiler::<T::Compiler>::new()
.base_path(metadata.directory()?.display().to_string())
.solc_optimizer(mode.solc_optimize());
for (file, _contract) in metadata.contract_sources()?.values() {
let sources = metadata.contract_sources()?;
let base_path = metadata.directory()?.display().to_string();
let mut compiler = Compiler::<T::Compiler>::new().base_path(base_path.clone());
for (file, _contract) in sources.values() {
log::debug!("contract source {}", file.display());
compiler = compiler.with_source(file)?;
}
let mut task = CompilationTask {
json_input: compiler.input(),
json_output: None,
mode: mode.clone(),
compiler_version: format!("{}", &version),
error: None,
};
let solc_path = download_solc(self.config.directory(), version, self.config.wasm)?;
let output = compiler
.solc_optimizer(mode.solc_optimize())
.try_build(solc_path)?;
let compiler_path = T::Compiler::get_compiler_executable(self.config, version)?;
match compiler.try_build(compiler_path) {
Ok(output) => {
task.json_output = Some(output.output.clone());
task.error = output.error;
self.contracts.insert(output.input, output.output);
self.contracts.insert(output.input, output.output);
if let Some(last_output) = self.contracts.values().last() {
if let Some(contracts) = &last_output.contracts {
for (file, contracts_map) in contracts {
for contract_name in contracts_map.keys() {
log::debug!("Compiled contract: {contract_name} from file: {file}");
}
}
} else {
log::warn!("Compiled contracts field is None");
}
}
Report::compilation(span, T::config_id(), task);
Ok(())
}
Err(error) => {
log::error!("Failed to compile contract: {:?}", error.to_string());
task.error = Some(error.to_string());
Err(error)
}
}
Ok(())
}
pub fn execute_input(
&mut self,
input: &Input,
node: &T::Blockchain,
) -> anyhow::Result<(TransactionReceipt, GethTrace, DiffMode)> {
log::trace!("Calling execute_input for input: {input:?}");
let nonce = node.fetch_add_nonce(input.caller)?;
log::debug!(
"Nonce calculated on the execute contract, calculated nonce {}, for contract {}, having address {} on node: {}",
&nonce,
&input.instance,
&input.caller,
std::any::type_name::<T>()
);
let tx =
match input.legacy_transaction(self.config.network_id, nonce, &self.deployed_contracts)
{
Ok(tx) => tx,
Err(err) => {
log::error!("Failed to construct legacy transaction: {err:?}");
return Err(err);
}
};
log::trace!("Executing transaction for input: {input:?}");
let receipt = match node.execute_transaction(tx) {
Ok(receipt) => receipt,
Err(err) => {
log::error!(
"Failed to execute transaction when executing the contract: {}, {:?}",
&input.instance,
err
);
return Err(err);
}
};
log::trace!(
"Transaction receipt for executed contract: {} - {:?}",
&input.instance,
receipt,
);
let trace = node.trace_transaction(receipt.clone())?;
log::trace!(
"Trace result for contract: {} - {:?}",
&input.instance,
trace
);
let diff = node.state_diff(receipt.clone())?;
Ok((receipt, trace, diff))
}
pub fn deploy_contracts(&mut self, input: &Input, node: &T::Blockchain) -> anyhow::Result<()> {
log::debug!(
"Deploying contracts {}, having address {} on node: {}",
&input.instance,
&input.caller,
std::any::type_name::<T>()
);
for output in self.contracts.values() {
let Some(contract_map) = &output.contracts else {
log::debug!(
"No contracts in output — skipping deployment for this input {}",
&input.instance
);
continue;
};
for contracts in contract_map.values() {
for (contract_name, contract) in contracts {
log::debug!(
"Contract name is: {:?} and the input name is: {:?}",
&contract_name,
&input.instance
);
if contract_name != &input.instance {
continue;
}
let bytecode = contract
.evm
.as_ref()
.and_then(|evm| evm.bytecode.as_ref())
.map(|b| b.object.clone());
let Some(code) = bytecode else {
log::error!("no bytecode for contract {contract_name}");
continue;
};
let nonce = node.fetch_add_nonce(input.caller)?;
log::debug!(
"Calculated nonce {}, for contract {}, having address {} on node: {}",
&nonce,
&input.instance,
&input.caller,
std::any::type_name::<T>()
);
let tx = TransactionRequest {
from: Some(input.caller),
to: Some(TxKind::Create),
gas_price: Some(5_000_000),
gas: Some(5_000_000),
chain_id: Some(self.config.network_id),
nonce: Some(nonce),
input: TransactionInput::new(Bytes::from(code.into_bytes())),
..Default::default()
};
let receipt = match node.execute_transaction(tx) {
Ok(receipt) => receipt,
Err(err) => {
log::error!(
"Failed to execute transaction when deploying the contract on node : {:?}, {:?}, {:?}",
std::any::type_name::<T>(),
&contract_name,
err
);
return Err(err);
}
};
log::debug!(
"Deployment tx sent for {} with nonce {} → tx hash: {:?}, on node: {:?}",
contract_name,
nonce,
receipt.transaction_hash,
std::any::type_name::<T>(),
);
log::trace!(
"Deployed transaction receipt for contract: {} - {:?}, on node: {:?}",
&contract_name,
receipt,
std::any::type_name::<T>(),
);
let Some(address) = receipt.contract_address else {
log::error!(
"contract {contract_name} deployment did not return an address"
);
continue;
};
self.deployed_contracts
.insert(contract_name.clone(), address);
log::trace!(
"deployed contract `{}` at {:?}, on node {:?}",
contract_name,
address,
std::any::type_name::<T>()
);
}
}
}
log::debug!("Available contracts: {:?}", self.deployed_contracts.keys());
Ok(())
) -> anyhow::Result<GethTrace> {
let receipt = node.execute_transaction(input.legacy_transaction(
self.config.network_id,
0,
&self.deployed_contracts,
)?)?;
dbg!(&receipt);
//node.trace_transaction(receipt)
todo!()
}
}
@@ -306,73 +101,18 @@ where
}
}
pub fn trace_diff_mode(label: &str, diff: &DiffMode) {
log::trace!("{label} - PRE STATE:");
for (addr, state) in &diff.pre {
Self::trace_account_state(" [pre]", addr, state);
}
log::trace!("{label} - POST STATE:");
for (addr, state) in &diff.post {
Self::trace_account_state(" [post]", addr, state);
}
}
fn trace_account_state(prefix: &str, addr: &Address, state: &AccountState) {
log::trace!("{prefix} 0x{addr:x}");
if let Some(balance) = &state.balance {
log::trace!("{prefix} balance: {balance}");
}
if let Some(nonce) = &state.nonce {
log::trace!("{prefix} nonce: {nonce}");
}
if let Some(code) = &state.code {
log::trace!("{prefix} code: {code}");
}
}
pub fn execute(&mut self, span: Span) -> anyhow::Result<()> {
pub fn execute(&mut self) -> anyhow::Result<()> {
for mode in self.metadata.solc_modes() {
let mut leader_state = State::<L>::new(self.config, span);
let mut leader_state = State::<L>::new(self.config);
leader_state.build_contracts(&mode, self.metadata)?;
let mut follower_state = State::<F>::new(self.config, span);
let mut follower_state = State::<F>::new(self.config);
follower_state.build_contracts(&mode, self.metadata)?;
for case in &self.metadata.cases {
for input in &case.inputs {
log::debug!("Starting deploying contract {}", &input.instance);
leader_state.deploy_contracts(input, self.leader_node)?;
follower_state.deploy_contracts(input, self.follower_node)?;
log::debug!("Starting executing contract {}", &input.instance);
let (leader_receipt, _, leader_diff) =
leader_state.execute_input(input, self.leader_node)?;
let (follower_receipt, _, follower_diff) =
follower_state.execute_input(input, self.follower_node)?;
if leader_diff == follower_diff {
log::debug!("State diffs match between leader and follower.");
} else {
log::debug!("State diffs mismatch between leader and follower.");
Self::trace_diff_mode("Leader", &leader_diff);
Self::trace_diff_mode("Follower", &follower_diff);
}
if leader_receipt.logs() != follower_receipt.logs() {
log::debug!("Log/event mismatch between leader and follower.");
log::trace!("Leader logs: {:?}", leader_receipt.logs());
log::trace!("Follower logs: {:?}", follower_receipt.logs());
}
if leader_receipt.status() != follower_receipt.status() {
log::debug!(
"Mismatch in status: leader = {}, follower = {}",
leader_receipt.status(),
follower_receipt.status()
);
}
let _ = leader_state.execute_input(input, self.leader_node)?;
let _ = follower_state.execute_input(input, self.follower_node)?;
}
}
}
+4 -16
View File
@@ -3,9 +3,8 @@
//! This crate defines the testing configuration and
//! provides a helper utilty to execute tests.
use revive_dt_compiler::{SolidityCompiler, revive_resolc, solc};
use revive_dt_config::TestingPlatform;
use revive_dt_node::{geth, kitchensink::KitchensinkNode};
use revive_dt_compiler::{SolidityCompiler, solc};
use revive_dt_node::geth;
use revive_dt_node_interaction::EthereumNode;
pub mod driver;
@@ -16,9 +15,6 @@ pub mod driver;
pub trait Platform {
type Blockchain: EthereumNode;
type Compiler: SolidityCompiler;
/// Returns the matching [TestingPlatform] of the [revive_dt_config::Arguments].
fn config_id() -> TestingPlatform;
}
#[derive(Default)]
@@ -27,20 +23,12 @@ pub struct Geth;
impl Platform for Geth {
type Blockchain = geth::Instance;
type Compiler = solc::Solc;
fn config_id() -> TestingPlatform {
TestingPlatform::Geth
}
}
#[derive(Default)]
pub struct Kitchensink;
impl Platform for Kitchensink {
type Blockchain = KitchensinkNode;
type Compiler = revive_resolc::Resolc;
fn config_id() -> TestingPlatform {
TestingPlatform::Kitchensink
}
type Blockchain = geth::Instance;
type Compiler = solc::Solc;
}
+42 -64
View File
@@ -5,12 +5,11 @@ use rayon::{ThreadPoolBuilder, prelude::*};
use revive_dt_config::*;
use revive_dt_core::{
Geth, Kitchensink, Platform,
Geth,
driver::{Driver, State},
};
use revive_dt_format::{corpus::Corpus, metadata::Metadata};
use revive_dt_node::pool::NodePool;
use revive_dt_report::reporter::{Report, Span};
use temp_dir::TempDir;
static TEMP_DIR: LazyLock<TempDir> = LazyLock::new(|| TempDir::new().unwrap());
@@ -18,15 +17,18 @@ static TEMP_DIR: LazyLock<TempDir> = LazyLock::new(|| TempDir::new().unwrap());
fn main() -> anyhow::Result<()> {
let args = init_cli()?;
for (corpus, tests) in collect_corpora(&args)? {
let span = Span::new(corpus, args.clone())?;
let corpora = collect_corpora(&args)?;
match &args.compile_only {
Some(platform) => compile_corpus(&args, &tests, platform, span),
None => execute_corpus(&args, &tests, span)?,
if let Some(platform) = &args.compile_only {
for tests in corpora.values() {
main_compile_only(&args, tests, platform)?;
}
Report::save()?;
return Ok(());
}
for tests in corpora.values() {
main_execute_differential(&args, tests)?;
}
Ok(())
@@ -36,26 +38,17 @@ fn init_cli() -> anyhow::Result<Arguments> {
env_logger::init();
let mut args = Arguments::parse();
if args.corpus.is_empty() {
anyhow::bail!("no test corpus specified");
}
match args.working_directory.as_ref() {
Some(dir) => {
if !dir.exists() {
anyhow::bail!("workdir {} does not exist", dir.display());
}
}
None => {
args.temp_dir = Some(&TEMP_DIR);
}
if args.working_directory.is_none() {
args.temp_dir = Some(&TEMP_DIR);
}
log::info!("workdir: {}", args.directory().display());
ThreadPoolBuilder::new()
.num_threads(args.workers)
.build_global()?;
.build_global()
.unwrap();
Ok(args)
}
@@ -74,30 +67,28 @@ fn collect_corpora(args: &Arguments) -> anyhow::Result<HashMap<Corpus, Vec<Metad
Ok(corpora)
}
fn run_driver<L, F>(args: &Arguments, tests: &[Metadata], span: Span) -> anyhow::Result<()>
where
L: Platform,
F: Platform,
L::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
F::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
{
let leader_nodes = NodePool::<L::Blockchain>::new(args)?;
let follower_nodes = NodePool::<F::Blockchain>::new(args)?;
fn main_execute_differential(args: &Arguments, tests: &[Metadata]) -> anyhow::Result<()> {
let leader_nodes = NodePool::new(args)?;
let follower_nodes = NodePool::new(args)?;
tests.par_iter().for_each(|metadata| {
let mut driver = Driver::<L, F>::new(
metadata,
args,
leader_nodes.round_robbin(),
follower_nodes.round_robbin(),
);
let mut driver = match (&args.leader, &args.follower) {
(TestingPlatform::Geth, TestingPlatform::Kitchensink) => Driver::<Geth, Geth>::new(
metadata,
args,
leader_nodes.round_robbin(),
follower_nodes.round_robbin(),
),
_ => unimplemented!(),
};
match driver.execute(span) {
Ok(_) => {
match driver.execute() {
Ok(build) => {
log::info!(
"metadata {} success",
metadata.directory().as_ref().unwrap().display()
);
build
}
Err(error) => {
log::warn!(
@@ -111,33 +102,20 @@ where
Ok(())
}
fn execute_corpus(args: &Arguments, tests: &[Metadata], span: Span) -> anyhow::Result<()> {
match (&args.leader, &args.follower) {
(TestingPlatform::Geth, TestingPlatform::Kitchensink) => {
run_driver::<Geth, Kitchensink>(args, tests, span)?
fn main_compile_only(
config: &Arguments,
tests: &[Metadata],
platform: &TestingPlatform,
) -> anyhow::Result<()> {
tests.par_iter().for_each(|metadata| {
for mode in &metadata.solc_modes() {
let mut state = match platform {
TestingPlatform::Geth => State::<Geth>::new(config),
_ => todo!(),
};
let _ = state.build_contracts(mode, metadata);
}
(TestingPlatform::Geth, TestingPlatform::Geth) => {
run_driver::<Geth, Geth>(args, tests, span)?
}
_ => unimplemented!(),
}
});
Ok(())
}
fn compile_corpus(config: &Arguments, tests: &[Metadata], platform: &TestingPlatform, span: Span) {
tests.par_iter().for_each(|metadata| {
for mode in &metadata.solc_modes() {
match platform {
TestingPlatform::Geth => {
let mut state = State::<Geth>::new(config, span);
let _ = state.build_contracts(mode, metadata);
}
TestingPlatform::Kitchensink => {
let mut state = State::<Kitchensink>::new(config, span);
let _ = state.build_contracts(mode, metadata);
}
};
}
});
}
+2 -2
View File
@@ -3,11 +3,11 @@ use std::{
path::{Path, PathBuf},
};
use serde::{Deserialize, Serialize};
use serde::Deserialize;
use crate::metadata::Metadata;
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq, Hash)]
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Hash)]
pub struct Corpus {
pub name: String,
pub path: PathBuf,
+10 -15
View File
@@ -1,8 +1,7 @@
use std::collections::HashMap;
use alloy::{
json_abi::Function,
primitives::{Address, TxKind},
json_abi::Function, network::TransactionBuilder, primitives::Address,
rpc::types::TransactionRequest,
};
use semver::VersionReq;
@@ -110,21 +109,17 @@ impl Input {
deployed_contracts: &HashMap<String, Address>,
) -> anyhow::Result<TransactionRequest> {
let to = match self.method {
Method::Deployer => Some(TxKind::Create),
_ => Some(TxKind::Call(
self.instance_to_address(&self.instance, deployed_contracts)?,
)),
Method::Deployer => Address::ZERO,
_ => self.instance_to_address(&self.instance, deployed_contracts)?,
};
Ok(TransactionRequest {
from: Some(self.caller),
to,
nonce: Some(nonce),
chain_id: Some(chain_id),
gas_price: Some(5_000_000),
gas: Some(5_000_000),
..Default::default()
})
Ok(TransactionRequest::default()
.with_from(self.caller)
.with_to(to)
.with_nonce(nonce)
.with_chain_id(chain_id)
.with_gas_price(20_000_000_000)
.with_gas_limit(20_000_000_000))
}
}
+4 -10
View File
@@ -132,7 +132,7 @@ impl Metadata {
}
fn try_from_solidity(path: &Path) -> Option<Self> {
let spec = read_to_string(path)
let buf = read_to_string(path)
.inspect_err(|error| {
log::error!(
"opening JSON test metadata file '{}' error: {error}",
@@ -147,24 +147,18 @@ impl Metadata {
buf
});
if spec.is_empty() {
if buf.is_empty() {
return None;
}
match serde_json::from_str::<Self>(&spec) {
match serde_json::from_str::<Self>(&buf) {
Ok(mut metadata) => {
metadata.file_path = Some(path.to_path_buf());
let name = path
.file_name()
.expect("this should be the path to a Solidity file")
.to_str()
.expect("the file name should be valid UTF-8k");
metadata.contracts = Some([(String::from("Test"), format!("{name}:Test"))].into());
Some(metadata)
}
Err(error) => {
log::error!(
"parsing Solidity test metadata file '{}' error: '{error}' from data: {spec}",
"parsing Solidity test metadata file '{}' error: {error}",
path.display()
);
None
+3 -3
View File
@@ -1,16 +1,16 @@
use semver::Version;
use serde::Deserialize;
use serde::de::Deserializer;
use serde::{Deserialize, Serialize};
/// Specifies the compilation mode of the test artifact.
#[derive(Hash, Debug, Clone, Eq, PartialEq)]
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Mode {
Solidity(SolcMode),
Unknown(String),
}
/// Specify Solidity specific compiler options.
#[derive(Hash, Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)]
#[derive(Debug, Default, Clone, Eq, PartialEq)]
pub struct SolcMode {
pub solc_version: Option<semver::VersionReq>,
solc_optimize: Option<bool>,
+2
View File
@@ -11,6 +11,8 @@ rust-version.workspace = true
[dependencies]
alloy = { workspace = true }
anyhow = { workspace = true }
hex = { workspace = true }
log = { workspace = true }
once_cell = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true }
+1 -9
View File
@@ -1,11 +1,9 @@
//! This crate implements all node interactions.
use alloy::primitives::Address;
use alloy::rpc::types::trace::geth::{DiffMode, GethTrace};
use alloy::rpc::types::trace::geth::GethTrace;
use alloy::rpc::types::{TransactionReceipt, TransactionRequest};
use tokio_runtime::TO_TOKIO;
pub mod nonce;
mod tokio_runtime;
pub mod trace;
pub mod transaction;
@@ -20,10 +18,4 @@ pub trait EthereumNode {
/// Trace the transaction in the [TransactionReceipt] and return a [GethTrace].
fn trace_transaction(&self, transaction: TransactionReceipt) -> anyhow::Result<GethTrace>;
/// Returns the state diff of the transaction hash in the [TransactionReceipt].
fn state_diff(&self, transaction: TransactionReceipt) -> anyhow::Result<DiffMode>;
/// Returns the next available nonce for the given [Address].
fn fetch_add_nonce(&self, address: Address) -> anyhow::Result<u64>;
}
-55
View File
@@ -1,55 +0,0 @@
use std::pin::Pin;
use alloy::{
primitives::Address,
providers::{Provider, ProviderBuilder},
};
use tokio::sync::oneshot;
use crate::{TO_TOKIO, tokio_runtime::AsyncNodeInteraction};
pub type Task = Pin<Box<dyn Future<Output = anyhow::Result<u64>> + Send>>;
pub(crate) struct Nonce {
sender: oneshot::Sender<anyhow::Result<u64>>,
task: Task,
}
impl AsyncNodeInteraction for Nonce {
type Output = anyhow::Result<u64>;
fn split(
self,
) -> (
std::pin::Pin<Box<dyn Future<Output = Self::Output> + Send>>,
oneshot::Sender<Self::Output>,
) {
(self.task, self.sender)
}
}
/// This is like `trace_transaction`, just for nonces.
pub fn fetch_onchain_nonce(
connection: String,
wallet: alloy::network::EthereumWallet,
address: Address,
) -> anyhow::Result<u64> {
let sender = TO_TOKIO.lock().unwrap().nonce_sender.clone();
let (tx, rx) = oneshot::channel();
let task: Task = Box::pin(async move {
let provider = ProviderBuilder::new()
.wallet(wallet)
.connect(&connection)
.await?;
let onchain = provider.get_transaction_count(address).await?;
Ok(onchain)
});
sender
.blocking_send(Nonce { task, sender: tx })
.expect("not in async context");
rx.blocking_recv()
.unwrap_or_else(|err| anyhow::bail!("nonce fetch failed: {err}"))
}
@@ -10,7 +10,6 @@ use tokio::spawn;
use tokio::sync::{mpsc, oneshot};
use tokio::task::JoinError;
use crate::nonce::Nonce;
use crate::trace::Trace;
use crate::transaction::Transaction;
@@ -34,7 +33,6 @@ pub(crate) trait AsyncNodeInteraction: Send + 'static {
pub(crate) struct TokioRuntime {
pub(crate) transaction_sender: mpsc::Sender<Transaction>,
pub(crate) trace_sender: mpsc::Sender<Trace>,
pub(crate) nonce_sender: mpsc::Sender<Nonce>,
}
impl TokioRuntime {
@@ -42,13 +40,11 @@ impl TokioRuntime {
let rt = Runtime::new().expect("should be able to create the tokio runtime");
let (transaction_sender, transaction_receiver) = mpsc::channel::<Transaction>(1024);
let (trace_sender, trace_receiver) = mpsc::channel::<Trace>(1024);
let (nonce_sender, nonce_receiver) = mpsc::channel::<Nonce>(1024);
thread::spawn(move || {
rt.block_on(async move {
let transaction_task = spawn(interaction::<Transaction>(transaction_receiver));
let trace_task = spawn(interaction::<Trace>(trace_receiver));
let nonce_task = spawn(interaction::<Nonce>(nonce_receiver));
if let Err(error) = transaction_task.await {
log::error!("tokio transaction task failed: {error}");
@@ -56,16 +52,12 @@ impl TokioRuntime {
if let Err(error) = trace_task.await {
log::error!("tokio trace transaction task failed: {error}");
}
if let Err(error) = nonce_task.await {
log::error!("tokio nonce task failed: {error}");
}
});
});
Self {
transaction_sender,
trace_sender,
nonce_sender,
}
}
}
+2 -6
View File
@@ -12,14 +12,10 @@ rust-version.workspace = true
anyhow = { workspace = true }
alloy = { workspace = true }
log = { workspace = true }
serde_json = { workspace = true }
revive-dt-node-interaction = { workspace = true }
revive-dt-config = { workspace = true }
serde_json = { workspace = true }
sp-core = { workspace = true }
sp-runtime = { workspace = true }
[dev-dependencies]
temp-dir = { workspace = true }
temp-dir = { workspace = true }
+15 -35
View File
@@ -5,17 +5,13 @@ use std::{
io::{BufRead, BufReader, Read, Write},
path::PathBuf,
process::{Child, Command, Stdio},
sync::{
Mutex,
atomic::{AtomicU32, Ordering},
},
sync::atomic::{AtomicU32, Ordering},
thread,
time::{Duration, Instant},
};
use alloy::{
network::EthereumWallet,
primitives::{Address, map::HashMap},
providers::{Provider, ProviderBuilder, ext::DebugApi},
rpc::types::{
TransactionReceipt, TransactionRequest,
@@ -24,8 +20,7 @@ use alloy::{
};
use revive_dt_config::Arguments;
use revive_dt_node_interaction::{
EthereumNode, nonce::fetch_onchain_nonce, trace::trace_transaction,
transaction::execute_transaction,
EthereumNode, trace::trace_transaction, transaction::execute_transaction,
};
use crate::Node;
@@ -50,7 +45,6 @@ pub struct Instance {
network_id: u64,
start_timeout: u64,
wallet: EthereumWallet,
nonces: Mutex<HashMap<Address, u64>>,
}
impl Instance {
@@ -191,32 +185,6 @@ impl EthereumNode for Instance {
.await?)
}))
}
fn state_diff(
&self,
transaction: alloy::rpc::types::TransactionReceipt,
) -> anyhow::Result<DiffMode> {
match self
.trace_transaction(transaction)?
.try_into_pre_state_frame()?
{
PreStateFrame::Diff(diff) => Ok(diff),
_ => anyhow::bail!("expected a diff mode trace"),
}
}
fn fetch_add_nonce(&self, address: Address) -> anyhow::Result<u64> {
let connection_string = self.connection_string.clone();
let wallet = self.wallet.clone();
let onchain_nonce = fetch_onchain_nonce(connection_string, wallet, address)?;
let mut nonces = self.nonces.lock().unwrap();
let current = nonces.entry(address).or_insert(onchain_nonce);
let value = *current;
*current += 1;
Ok(value)
}
}
impl Node for Instance {
@@ -235,7 +203,6 @@ impl Node for Instance {
network_id: config.network_id,
start_timeout: config.geth_start_timeout,
wallet: config.wallet(),
nonces: Mutex::new(HashMap::new()),
}
}
@@ -252,6 +219,19 @@ impl Node for Instance {
Ok(())
}
fn state_diff(
&self,
transaction: alloy::rpc::types::TransactionReceipt,
) -> anyhow::Result<DiffMode> {
match self
.trace_transaction(transaction)?
.try_into_pre_state_frame()?
{
PreStateFrame::Diff(diff) => Ok(diff),
_ => anyhow::bail!("expected a diff mode trace"),
}
}
fn version(&self) -> anyhow::Result<String> {
let output = Command::new(&self.geth)
.arg("--version")
-558
View File
@@ -1,558 +0,0 @@
use std::{
fs::create_dir_all,
io::BufRead,
path::PathBuf,
process::{Child, Command, Stdio},
sync::{
Mutex,
atomic::{AtomicU32, Ordering},
},
time::Duration,
};
use alloy::{
hex,
network::EthereumWallet,
primitives::{Address, map::HashMap},
providers::{Provider, ProviderBuilder, ext::DebugApi},
rpc::types::{
TransactionReceipt,
trace::geth::{DiffMode, GethDebugTracingOptions, PreStateConfig, PreStateFrame},
},
};
use serde_json::{Value as JsonValue, json};
use sp_core::crypto::Ss58Codec;
use sp_runtime::AccountId32;
use revive_dt_config::Arguments;
use revive_dt_node_interaction::{
EthereumNode, nonce::fetch_onchain_nonce, trace::trace_transaction,
transaction::execute_transaction,
};
use crate::Node;
static NODE_COUNT: AtomicU32 = AtomicU32::new(0);
#[derive(Debug)]
pub struct KitchensinkNode {
id: u32,
substrate_binary: PathBuf,
eth_proxy_binary: PathBuf,
rpc_url: String,
wallet: EthereumWallet,
base_directory: PathBuf,
process_substrate: Option<Child>,
process_proxy: Option<Child>,
nonces: Mutex<HashMap<Address, u64>>,
}
impl KitchensinkNode {
const BASE_DIRECTORY: &str = "kitchensink";
const SUBSTRATE_READY_MARKER: &str = "Running JSON-RPC server";
const ETH_PROXY_READY_MARKER: &str = "Running JSON-RPC server";
const CHAIN_SPEC_JSON_FILE: &str = "template_chainspec.json";
const BASE_SUBSTRATE_RPC_PORT: u16 = 9944;
const BASE_PROXY_RPC_PORT: u16 = 8545;
const SUBSTRATE_LOG_ENV: &str = "error,evm=debug,sc_rpc_server=info,runtime::revive=debug";
const PROXY_LOG_ENV: &str = "info,eth-rpc=debug";
fn init(&mut self, genesis: &str) -> anyhow::Result<&mut Self> {
create_dir_all(&self.base_directory)?;
let template_chainspec_path = self.base_directory.join(Self::CHAIN_SPEC_JSON_FILE);
let output = Command::new(&self.substrate_binary)
.arg("export-chain-spec")
.arg("--chain")
.arg("dev")
.output()?;
if !output.status.success() {
anyhow::bail!(
"substrate-node export-chain-spec failed: {}",
String::from_utf8_lossy(&output.stderr)
);
}
let content = String::from_utf8(output.stdout)?;
let mut chainspec_json: JsonValue = serde_json::from_str(&content)?;
let existing_chainspec_balances =
chainspec_json["genesis"]["runtimeGenesis"]["patch"]["balances"]["balances"]
.as_array()
.cloned()
.unwrap_or_default();
let mut merged_balances: Vec<(String, u128)> = existing_chainspec_balances
.into_iter()
.filter_map(|val| {
if let Some(arr) = val.as_array() {
if arr.len() == 2 {
let account = arr[0].as_str()?.to_string();
let balance = arr[1].as_f64()? as u128;
return Some((account, balance));
}
}
None
})
.collect();
let mut eth_balances = self.extract_balance_from_genesis_file(genesis)?;
merged_balances.append(&mut eth_balances);
chainspec_json["genesis"]["runtimeGenesis"]["patch"]["balances"]["balances"] =
json!(merged_balances);
serde_json::to_writer_pretty(
std::fs::File::create(&template_chainspec_path)?,
&chainspec_json,
)?;
Ok(self)
}
fn spawn_process(&mut self) -> anyhow::Result<()> {
let substrate_rpc_port = Self::BASE_SUBSTRATE_RPC_PORT + self.id as u16;
let proxy_rpc_port = Self::BASE_PROXY_RPC_PORT + self.id as u16;
self.rpc_url = format!("http://127.0.0.1:{proxy_rpc_port}");
let chainspec_path = self.base_directory.join(Self::CHAIN_SPEC_JSON_FILE);
// Start Substrate node
let mut substrate_process = Command::new(&self.substrate_binary)
.arg("--chain")
.arg(chainspec_path)
.arg("--base-path")
.arg(&self.base_directory)
.arg("--rpc-port")
.arg(substrate_rpc_port.to_string())
.arg("--name")
.arg(format!("revive-kitchensink-{}", self.id))
.arg("--force-authoring")
.arg("--rpc-methods")
.arg("Unsafe")
.arg("--rpc-cors")
.arg("all")
.env("RUST_LOG", Self::SUBSTRATE_LOG_ENV)
.stdout(Stdio::null())
.stderr(Stdio::piped())
.spawn()?;
// Give the node a moment to boot
Self::wait_ready(
&mut substrate_process,
Self::SUBSTRATE_READY_MARKER,
Duration::from_secs(30),
)?;
let mut proxy_process = Command::new(&self.eth_proxy_binary)
.arg("--dev")
.arg("--rpc-port")
.arg(proxy_rpc_port.to_string())
.arg("--node-rpc-url")
.arg(format!("ws://127.0.0.1:{substrate_rpc_port}"))
.env("RUST_LOG", Self::PROXY_LOG_ENV)
.stdout(Stdio::null())
.stderr(Stdio::piped())
.spawn()?;
Self::wait_ready(
&mut proxy_process,
Self::ETH_PROXY_READY_MARKER,
Duration::from_secs(30),
)?;
self.process_substrate = Some(substrate_process);
self.process_proxy = Some(proxy_process);
Ok(())
}
fn extract_balance_from_genesis_file(
&self,
genesis_str: &str,
) -> anyhow::Result<Vec<(String, u128)>> {
let genesis_json: JsonValue = serde_json::from_str(genesis_str)?;
let alloc = genesis_json
.get("alloc")
.and_then(|a| a.as_object())
.ok_or_else(|| anyhow::anyhow!("Missing 'alloc' in genesis"))?;
let mut balances = Vec::new();
for (eth_addr, obj) in alloc.iter() {
let balance_str = obj.get("balance").and_then(|b| b.as_str()).unwrap_or("0");
let balance = if balance_str.starts_with("0x") {
u128::from_str_radix(balance_str.trim_start_matches("0x"), 16)?
} else {
balance_str.parse::<u128>()?
};
let substrate_addr = Self::eth_to_substrate_address(eth_addr)?;
balances.push((substrate_addr.clone(), balance));
}
Ok(balances)
}
fn eth_to_substrate_address(eth_addr: &str) -> anyhow::Result<String> {
let eth_bytes = hex::decode(eth_addr.trim_start_matches("0x"))?;
if eth_bytes.len() != 20 {
anyhow::bail!(
"Invalid Ethereum address length: expected 20 bytes, got {}",
eth_bytes.len()
);
}
let mut padded = [0xEEu8; 32];
padded[..20].copy_from_slice(&eth_bytes);
let account_id = AccountId32::from(padded);
Ok(account_id.to_ss58check())
}
fn wait_ready(child: &mut Child, marker: &str, timeout: Duration) -> anyhow::Result<()> {
let start_time = std::time::Instant::now();
let stderr = child.stderr.take().expect("stderr must be piped");
let mut lines = std::io::BufReader::new(stderr).lines();
loop {
if let Some(Ok(line)) = lines.next() {
println!("Kitchensink log: {line:?}");
if line.contains(marker) {
std::thread::spawn(move || for _ in lines.by_ref() {});
return Ok(());
}
}
if start_time.elapsed() > timeout {
let _ = child.kill();
anyhow::bail!("Timeout waiting for process readiness: {marker}");
}
}
}
pub fn eth_rpc_version(&self) -> anyhow::Result<String> {
let output = Command::new(&self.eth_proxy_binary)
.arg("--version")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::null())
.spawn()?
.wait_with_output()?
.stdout;
Ok(String::from_utf8_lossy(&output).trim().to_string())
}
}
impl EthereumNode for KitchensinkNode {
fn execute_transaction(
&self,
transaction: alloy::rpc::types::TransactionRequest,
) -> anyhow::Result<TransactionReceipt> {
let url = self.rpc_url.clone();
let wallet = self.wallet.clone();
execute_transaction(Box::pin(async move {
Ok(ProviderBuilder::new()
.wallet(wallet)
.connect(&url)
.await?
.send_transaction(transaction)
.await?
.get_receipt()
.await?)
}))
}
fn trace_transaction(
&self,
transaction: TransactionReceipt,
) -> anyhow::Result<alloy::rpc::types::trace::geth::GethTrace> {
let url = self.rpc_url.clone();
let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig {
diff_mode: Some(true),
disable_code: None,
disable_storage: None,
});
let wallet = self.wallet.clone();
trace_transaction(Box::pin(async move {
Ok(ProviderBuilder::new()
.wallet(wallet)
.connect(&url)
.await?
.debug_trace_transaction(transaction.transaction_hash, trace_options)
.await?)
}))
}
fn state_diff(&self, transaction: TransactionReceipt) -> anyhow::Result<DiffMode> {
match self
.trace_transaction(transaction)?
.try_into_pre_state_frame()?
{
PreStateFrame::Diff(diff) => Ok(diff),
_ => anyhow::bail!("expected a diff mode trace"),
}
}
fn fetch_add_nonce(&self, address: Address) -> anyhow::Result<u64> {
let url = self.rpc_url.clone();
let wallet = self.wallet.clone();
let onchain_nonce = fetch_onchain_nonce(url, wallet, address)?;
let mut nonces = self.nonces.lock().unwrap();
let current = nonces.entry(address).or_insert(onchain_nonce);
let value = *current;
*current += 1;
Ok(value)
}
}
impl Node for KitchensinkNode {
fn new(config: &Arguments) -> Self {
let kitchensink_directory = config.directory().join(Self::BASE_DIRECTORY);
let id = NODE_COUNT.fetch_add(1, Ordering::SeqCst);
let base_directory = kitchensink_directory.join(id.to_string());
Self {
id,
substrate_binary: config.kitchensink.clone(),
eth_proxy_binary: config.eth_proxy.clone(),
rpc_url: String::new(),
wallet: config.wallet(),
base_directory,
process_substrate: None,
process_proxy: None,
nonces: Mutex::new(HashMap::new()),
}
}
fn connection_string(&self) -> String {
self.rpc_url.clone()
}
fn shutdown(mut self) -> anyhow::Result<()> {
if let Some(mut child) = self.process_proxy.take() {
let _ = child.kill();
}
if let Some(mut child) = self.process_substrate.take() {
let _ = child.kill();
}
Ok(())
}
fn spawn(&mut self, genesis: String) -> anyhow::Result<()> {
self.init(&genesis)?.spawn_process()
}
fn version(&self) -> anyhow::Result<String> {
let output = Command::new(&self.substrate_binary)
.arg("--version")
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::null())
.spawn()?
.wait_with_output()?
.stdout;
Ok(String::from_utf8_lossy(&output).into())
}
}
impl Drop for KitchensinkNode {
fn drop(&mut self) {
if let Some(mut child) = self.process_proxy.take() {
let _ = child.kill();
}
if let Some(mut child) = self.process_substrate.take() {
let _ = child.kill();
}
}
}
#[cfg(test)]
mod tests {
use revive_dt_config::Arguments;
use std::path::PathBuf;
use temp_dir::TempDir;
use std::fs;
use super::KitchensinkNode;
use crate::{GENESIS_JSON, Node};
fn test_config() -> (Arguments, TempDir) {
let mut config = Arguments::default();
let temp_dir = TempDir::new().unwrap();
config.working_directory = temp_dir.path().to_path_buf().into();
config.kitchensink = PathBuf::from("substrate-node");
config.eth_proxy = PathBuf::from("eth-rpc");
(config, temp_dir)
}
#[test]
fn test_init_generates_chainspec_with_balances() {
let genesis_content = r#"
{
"alloc": {
"90F8bf6A479f320ead074411a4B0e7944Ea8c9C1": {
"balance": "1000000000000000000"
},
"Ab8483F64d9C6d1EcF9b849Ae677dD3315835cb2": {
"balance": "2000000000000000000"
}
}
}
"#;
let mut dummy_node = KitchensinkNode::new(&test_config().0);
// Call `init()`
dummy_node.init(genesis_content).expect("init failed");
// Check that the patched chainspec file was generated
let final_chainspec_path = dummy_node
.base_directory
.join(KitchensinkNode::CHAIN_SPEC_JSON_FILE);
assert!(final_chainspec_path.exists(), "Chainspec file should exist");
let contents = fs::read_to_string(&final_chainspec_path).expect("Failed to read chainspec");
// Validate that the Substrate addresses derived from the Ethereum addresses are in the file
let first_eth_addr =
KitchensinkNode::eth_to_substrate_address("90F8bf6A479f320ead074411a4B0e7944Ea8c9C1")
.unwrap();
let second_eth_addr =
KitchensinkNode::eth_to_substrate_address("Ab8483F64d9C6d1EcF9b849Ae677dD3315835cb2")
.unwrap();
assert!(
contents.contains(&first_eth_addr),
"Chainspec should contain Substrate address for first Ethereum account"
);
assert!(
contents.contains(&second_eth_addr),
"Chainspec should contain Substrate address for second Ethereum account"
);
}
#[test]
fn test_parse_genesis_alloc() {
// Create test genesis file
let genesis_json = r#"
{
"alloc": {
"0x90F8bf6A479f320ead074411a4B0e7944Ea8c9C1": { "balance": "1000000000000000000" },
"0x0000000000000000000000000000000000000000": { "balance": "0xDE0B6B3A7640000" },
"0xffffffffffffffffffffffffffffffffffffffff": { "balance": "123456789" }
}
}
"#;
let node = KitchensinkNode::new(&test_config().0);
let result = node
.extract_balance_from_genesis_file(genesis_json)
.unwrap();
let result_map: std::collections::HashMap<_, _> = result.into_iter().collect();
assert_eq!(
result_map.get("5FLneRcWAfk3X3tg6PuGyLNGAquPAZez5gpqvyuf3yUK8VaV"),
Some(&1_000_000_000_000_000_000u128)
);
assert_eq!(
result_map.get("5C4hrfjw9DjXZTzV3MwzrrAr9P1MLDHajjSidz9bR544LEq1"),
Some(&1_000_000_000_000_000_000u128)
);
assert_eq!(
result_map.get("5HrN7fHLXWcFiXPwwtq2EkSGns9eMmoUQnbVKweNz3VVr6N4"),
Some(&123_456_789u128)
);
}
#[test]
fn print_eth_to_substrate_mappings() {
let eth_addresses = vec![
"0x90F8bf6A479f320ead074411a4B0e7944Ea8c9C1",
"0xffffffffffffffffffffffffffffffffffffffff",
"90F8bf6A479f320ead074411a4B0e7944Ea8c9C1",
];
for eth_addr in eth_addresses {
let ss58 = KitchensinkNode::eth_to_substrate_address(eth_addr).unwrap();
println!("Ethereum: {eth_addr} -> Substrate SS58: {ss58}");
}
}
#[test]
fn test_eth_to_substrate_address() {
let cases = vec![
(
"0x90F8bf6A479f320ead074411a4B0e7944Ea8c9C1",
"5FLneRcWAfk3X3tg6PuGyLNGAquPAZez5gpqvyuf3yUK8VaV",
),
(
"90F8bf6A479f320ead074411a4B0e7944Ea8c9C1",
"5FLneRcWAfk3X3tg6PuGyLNGAquPAZez5gpqvyuf3yUK8VaV",
),
(
"0x0000000000000000000000000000000000000000",
"5C4hrfjw9DjXZTzV3MwzrrAr9P1MLDHajjSidz9bR544LEq1",
),
(
"0xffffffffffffffffffffffffffffffffffffffff",
"5HrN7fHLXWcFiXPwwtq2EkSGns9eMmoUQnbVKweNz3VVr6N4",
),
];
for (eth_addr, expected_ss58) in cases {
let result = KitchensinkNode::eth_to_substrate_address(eth_addr).unwrap();
assert_eq!(
result, expected_ss58,
"Mismatch for Ethereum address {eth_addr}"
);
}
}
#[test]
fn spawn_works() {
let (config, _temp_dir) = test_config();
let mut node = KitchensinkNode::new(&config);
node.spawn(GENESIS_JSON.to_string()).unwrap();
}
#[test]
fn version_works() {
let (config, _temp_dir) = test_config();
let node = KitchensinkNode::new(&config);
let version = node.version().unwrap();
assert!(
version.starts_with("substrate-node"),
"Expected substrate-node version string, got: {version}"
);
}
#[test]
fn eth_rpc_version_works() {
let (config, _temp_dir) = test_config();
let node = KitchensinkNode::new(&config);
let version = node.eth_rpc_version().unwrap();
assert!(
version.starts_with("pallet-revive-eth-rpc"),
"Expected eth-rpc version string, got: {version}"
);
}
}
+4 -1
View File
@@ -1,10 +1,10 @@
//! This crate implements the testing nodes.
use alloy::rpc::types::{TransactionReceipt, trace::geth::DiffMode};
use revive_dt_config::Arguments;
use revive_dt_node_interaction::EthereumNode;
pub mod geth;
pub mod kitchensink;
pub mod pool;
/// The default genesis configuration.
@@ -28,6 +28,9 @@ pub trait Node: EthereumNode {
/// Returns the nodes connection string.
fn connection_string(&self) -> String;
/// Returns the state diff of the transaction hash in the [TransactionReceipt].
fn state_diff(&self, transaction: TransactionReceipt) -> anyhow::Result<DiffMode>;
/// Returns the node version.
fn version(&self) -> anyhow::Result<String>;
}
-19
View File
@@ -1,19 +0,0 @@
[package]
name = "revive-dt-report"
version.workspace = true
authors.workspace = true
license.workspace = true
edition.workspace = true
repository.workspace = true
rust-version.workspace = true
[dependencies]
revive-dt-config = { workspace = true }
revive-dt-format = { workspace = true }
anyhow = { workspace = true }
log = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
revive-solc-json-interface = { workspace = true }
-94
View File
@@ -1,94 +0,0 @@
//! The report analyzer enriches the raw report data.
use serde::{Deserialize, Serialize};
use crate::reporter::CompilationTask;
/// Provides insights into how well the compilers perform.
#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, PartialOrd)]
pub struct CompilerStatistics {
/// The sum of contracts observed.
pub n_contracts: usize,
/// The mean size of compiled contracts.
pub mean_code_size: usize,
/// The mean size of the optimized YUL IR.
pub mean_yul_size: usize,
/// Is a proxy because the YUL also containes a lot of comments.
pub yul_to_bytecode_size_ratio: f32,
}
impl CompilerStatistics {
/// Cumulatively update the statistics with the next compiler task.
pub fn sample(&mut self, compilation_task: &CompilationTask) {
let Some(output) = &compilation_task.json_output else {
return;
};
let Some(contracts) = &output.contracts else {
return;
};
for (_solidity, contracts) in contracts.iter() {
for (_name, contract) in contracts.iter() {
let Some(evm) = &contract.evm else {
continue;
};
let Some(deploy_code) = &evm.deployed_bytecode else {
continue;
};
// The EVM bytecode can be unlinked and thus is not necessarily a decodable hex
// string; for our statistics this is a good enough approximation.
let bytecode_size = deploy_code.object.len() / 2;
let yul_size = contract
.ir_optimized
.as_ref()
.expect("if the contract has a deploy code it should also have the opimized IR")
.len();
self.update_sizes(bytecode_size, yul_size);
}
}
}
/// Updates the size statistics cumulatively.
fn update_sizes(&mut self, bytecode_size: usize, yul_size: usize) {
let n_previous = self.n_contracts;
let n_current = self.n_contracts + 1;
self.n_contracts = n_current;
self.mean_code_size = (n_previous * self.mean_code_size + bytecode_size) / n_current;
self.mean_yul_size = (n_previous * self.mean_yul_size + yul_size) / n_current;
if self.mean_code_size > 0 {
self.yul_to_bytecode_size_ratio =
self.mean_yul_size as f32 / self.mean_code_size as f32;
}
}
}
#[cfg(test)]
mod tests {
use super::CompilerStatistics;
#[test]
fn compiler_statistics() {
let mut received = CompilerStatistics::default();
received.update_sizes(0, 0);
received.update_sizes(3, 37);
received.update_sizes(123, 456);
let mean_code_size = 41; // rounding error from integer truncation
let mean_yul_size = 164;
let expected = CompilerStatistics {
n_contracts: 3,
mean_code_size,
mean_yul_size,
yul_to_bytecode_size_ratio: mean_yul_size as f32 / mean_code_size as f32,
};
assert_eq!(received, expected);
}
}
-4
View File
@@ -1,4 +0,0 @@
//! The revive differential tests reporting facility.
pub mod analyzer;
pub mod reporter;
-243
View File
@@ -1,243 +0,0 @@
//! The reporter is the central place observing test execution by collecting data.
//!
//! The data collected gives useful insights into the outcome of the test run
//! and helps identifying and reproducing failing cases.
use std::{
collections::HashMap,
fs::{self, File, create_dir_all},
path::PathBuf,
sync::{Mutex, OnceLock},
time::{SystemTime, UNIX_EPOCH},
};
use anyhow::Context;
use serde::{Deserialize, Serialize};
use revive_dt_config::{Arguments, TestingPlatform};
use revive_dt_format::{corpus::Corpus, mode::SolcMode};
use revive_solc_json_interface::{SolcStandardJsonInput, SolcStandardJsonOutput};
use crate::analyzer::CompilerStatistics;
pub(crate) static REPORTER: OnceLock<Mutex<Report>> = OnceLock::new();
/// The `Report` datastructure stores all relevant inforamtion required for generating reports.
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct Report {
/// The configuration used during the test.
pub config: Arguments,
/// The observed test corpora.
pub corpora: Vec<Corpus>,
/// The observed test definitions.
pub metadata_files: Vec<PathBuf>,
/// The observed compilation results.
pub compiler_results: HashMap<TestingPlatform, Vec<CompilationResult>>,
/// The observed compilation statistics.
pub compiler_statistics: HashMap<TestingPlatform, CompilerStatistics>,
/// The file name this is serialized to.
#[serde(skip)]
directory: PathBuf,
}
/// Contains a compiled contract.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CompilationTask {
/// The observed compiler input.
pub json_input: SolcStandardJsonInput,
/// The observed compiler output.
pub json_output: Option<SolcStandardJsonOutput>,
/// The observed compiler mode.
pub mode: SolcMode,
/// The observed compiler version.
pub compiler_version: String,
/// The observed error, if any.
pub error: Option<String>,
}
/// Represents a report about a compilation task.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct CompilationResult {
/// The observed compilation task.
pub compilation_task: CompilationTask,
/// The linked span.
pub span: Span,
}
/// The [Span] struct indicates the context of what is being reported.
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub struct Span {
/// The corpus index this belongs to.
corpus: usize,
/// The metadata file this belongs to.
metadata_file: usize,
/// The index of the case definition this belongs to.
case: usize,
/// The index of the case input this belongs to.
input: usize,
}
impl Report {
/// The file name where this report will be written to.
pub const FILE_NAME: &str = "report.json";
/// The [Span] is expected to initialize the reporter by providing the config.
const INITIALIZED_VIA_SPAN: &str = "requires a Span which initializes the reporter";
/// Create a new [Report].
fn new(config: Arguments) -> anyhow::Result<Self> {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_millis();
let directory = config.directory().join("report").join(format!("{now}"));
if !directory.exists() {
create_dir_all(&directory)?;
}
Ok(Self {
config,
directory,
..Default::default()
})
}
/// Add a compilation task to the report.
pub fn compilation(span: Span, platform: TestingPlatform, compilation_task: CompilationTask) {
let mut report = REPORTER
.get()
.expect(Report::INITIALIZED_VIA_SPAN)
.lock()
.unwrap();
report
.compiler_statistics
.entry(platform)
.or_default()
.sample(&compilation_task);
report
.compiler_results
.entry(platform)
.or_default()
.push(CompilationResult {
compilation_task,
span,
});
}
/// Write the report to disk.
pub fn save() -> anyhow::Result<()> {
let Some(reporter) = REPORTER.get() else {
return Ok(());
};
let report = reporter.lock().unwrap();
if let Err(error) = report.write_to_file() {
anyhow::bail!("can not write report: {error}");
}
if report.config.extract_problems {
if let Err(error) = report.save_compiler_problems() {
anyhow::bail!("can not write compiler problems: {error}");
}
}
Ok(())
}
/// Write compiler problems to disk for later debugging.
pub fn save_compiler_problems(&self) -> anyhow::Result<()> {
for (platform, results) in self.compiler_results.iter() {
for result in results {
// ignore if there were no errors
if result.compilation_task.error.is_none()
&& result
.compilation_task
.json_output
.as_ref()
.and_then(|output| output.errors.as_ref())
.map(|errors| errors.is_empty())
.unwrap_or(true)
{
continue;
}
let path = &self.metadata_files[result.span.metadata_file]
.parent()
.unwrap()
.join(format!("{platform}_errors"));
if !path.exists() {
create_dir_all(path)?;
}
if let Some(error) = result.compilation_task.error.as_ref() {
fs::write(path.join("compiler_error.txt"), error)?;
}
if let Some(errors) = result.compilation_task.json_output.as_ref() {
let file = File::create(path.join("compiler_output.txt"))?;
serde_json::to_writer_pretty(file, &errors)?;
}
}
}
Ok(())
}
fn write_to_file(&self) -> anyhow::Result<()> {
let path = self.directory.join(Self::FILE_NAME);
let file = File::create(&path).context(path.display().to_string())?;
serde_json::to_writer_pretty(file, &self)?;
log::info!("report written to: {}", path.display());
Ok(())
}
}
impl Span {
/// Create a new [Span] with case and input index at 0.
///
/// Initializes the reporting facility on the first call.
pub fn new(corpus: Corpus, config: Arguments) -> anyhow::Result<Self> {
let report = Mutex::new(Report::new(config)?);
let mut reporter = REPORTER.get_or_init(|| report).lock().unwrap();
reporter.corpora.push(corpus);
Ok(Self {
corpus: reporter.corpora.len() - 1,
metadata_file: 0,
case: 0,
input: 0,
})
}
/// Advance to the next metadata file: Resets the case input index to 0.
pub fn next_metadata(&mut self, metadata_file: PathBuf) {
let mut reporter = REPORTER
.get()
.expect(Report::INITIALIZED_VIA_SPAN)
.lock()
.unwrap();
reporter.metadata_files.push(metadata_file);
self.metadata_file = reporter.metadata_files.len() - 1;
self.case = 0;
self.input = 0;
}
/// Advance to the next case: Increas the case index by one and resets the input index to 0.
pub fn next_case(&mut self) {
self.case += 1;
self.input = 0;
}
/// Advance to the next input.
pub fn next_input(&mut self) {
self.input += 1;
}
}
+1 -1
View File
@@ -1,6 +1,6 @@
[package]
name = "revive-dt-solc-binaries"
description = "Download and cache solc binaries"
dependencies = "Download and cache solc binaries"
version.workspace = true
authors.workspace = true
license.workspace = true
+1 -1
View File
@@ -60,7 +60,7 @@ fn download_to_file(path: &Path, downloader: &GHDownloader) -> anyhow::Result<()
std::process::Command::new("xattr")
.arg("-d")
.arg("com.apple.quarantine")
.arg(path)
.arg(&path)
.stderr(std::process::Stdio::null())
.stdout(std::process::Stdio::null())
.stdout(std::process::Stdio::null())
Submodule polkadot-sdk deleted from dc3d0e5ab7