mirror of
https://github.com/pezkuwichain/revive-differential-tests.git
synced 2026-04-22 10:17:56 +00:00
Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 49cbc51546 | |||
| c2526e48e7 | |||
| 7878f68c26 |
+3
-1
@@ -10,4 +10,6 @@ node_modules
|
||||
|
||||
profile.json.gz
|
||||
resolc-compiler-tests
|
||||
workdir
|
||||
workdir
|
||||
|
||||
!/schema.json
|
||||
Generated
+32
-5
@@ -4501,9 +4501,12 @@ name = "revive-dt-config"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"anyhow",
|
||||
"clap",
|
||||
"semver 1.0.26",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"strum",
|
||||
"temp-dir",
|
||||
]
|
||||
|
||||
@@ -4518,7 +4521,6 @@ dependencies = [
|
||||
"clap",
|
||||
"futures",
|
||||
"indexmap 2.10.0",
|
||||
"once_cell",
|
||||
"revive-dt-common",
|
||||
"revive-dt-compiler",
|
||||
"revive-dt-config",
|
||||
@@ -4526,11 +4528,10 @@ dependencies = [
|
||||
"revive-dt-node",
|
||||
"revive-dt-node-interaction",
|
||||
"revive-dt-report",
|
||||
"schemars 1.0.4",
|
||||
"semver 1.0.26",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"temp-dir",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-appender",
|
||||
@@ -4549,6 +4550,7 @@ dependencies = [
|
||||
"regex",
|
||||
"revive-common",
|
||||
"revive-dt-common",
|
||||
"schemars 1.0.4",
|
||||
"semver 1.0.26",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -4872,10 +4874,24 @@ checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0"
|
||||
dependencies = [
|
||||
"dyn-clone",
|
||||
"ref-cast",
|
||||
"schemars_derive",
|
||||
"semver 1.0.26",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schemars_derive"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "33d020396d1d138dc19f1165df7545479dcd58d93810dc5d646a16e55abefa80"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"serde_derive_internals",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "schnellru"
|
||||
version = "0.2.4"
|
||||
@@ -5060,6 +5076,17 @@ dependencies = [
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive_internals"
|
||||
version = "0.29.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.140"
|
||||
@@ -5692,9 +5719,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "strum"
|
||||
version = "0.27.1"
|
||||
version = "0.27.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f64def088c51c9510a8579e3c5d67c65349dcf755e5479ad3d010aa6454e2c32"
|
||||
checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf"
|
||||
dependencies = [
|
||||
"strum_macros",
|
||||
]
|
||||
|
||||
@@ -37,6 +37,7 @@ moka = "0.12.10"
|
||||
paste = "1.0.15"
|
||||
reqwest = { version = "0.12.15", features = ["json"] }
|
||||
once_cell = "1.21"
|
||||
schemars = { version = "1.0.4", features = ["semver1"] }
|
||||
semver = { version = "1.0", features = ["serde"] }
|
||||
serde = { version = "1.0", default-features = false, features = ["derive"] }
|
||||
serde_json = { version = "1.0", default-features = false, features = [
|
||||
@@ -48,6 +49,7 @@ serde_with = { version = "3.14.0" }
|
||||
sha2 = { version = "0.10.9" }
|
||||
sp-core = "36.1.0"
|
||||
sp-runtime = "41.1.0"
|
||||
strum = { version = "0.27.2", features = ["derive"] }
|
||||
temp-dir = { version = "0.1.16" }
|
||||
tempfile = "3.3"
|
||||
thiserror = "2"
|
||||
|
||||
@@ -187,10 +187,11 @@ The above corpus file instructs the tool to look for all of the test cases conta
|
||||
The simplest command to run this tool is the following:
|
||||
|
||||
```bash
|
||||
RUST_LOG="info" cargo run --release -- \
|
||||
RUST_LOG="info" cargo run --release -- execute-tests \
|
||||
--follower geth \
|
||||
--corpus path_to_your_corpus_file.json \
|
||||
--workdir path_to_a_temporary_directory_to_cache_things_in \
|
||||
--number-of-nodes 5 \
|
||||
--working-directory path_to_a_temporary_directory_to_cache_things_in \
|
||||
--concurrency.number-of-nodes 5 \
|
||||
> logs.log \
|
||||
2> output.log
|
||||
```
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
/// An iterator that could be either of two iterators.
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum EitherIter<A, B> {
|
||||
A(A),
|
||||
B(B),
|
||||
}
|
||||
|
||||
impl<A, B, T> Iterator for EitherIter<A, B>
|
||||
where
|
||||
A: Iterator<Item = T>,
|
||||
B: Iterator<Item = T>,
|
||||
{
|
||||
type Item = T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
EitherIter::A(iter) => iter.next(),
|
||||
EitherIter::B(iter) => iter.next(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
mod either_iter;
|
||||
mod files_with_extension_iterator;
|
||||
|
||||
pub use either_iter::*;
|
||||
pub use files_with_extension_iterator::*;
|
||||
|
||||
@@ -3,6 +3,7 @@ use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Display;
|
||||
use std::str::FromStr;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
/// This represents a mode that a given test should be run with, if possible.
|
||||
///
|
||||
@@ -34,14 +35,19 @@ impl Display for Mode {
|
||||
|
||||
impl Mode {
|
||||
/// Return all of the available mode combinations.
|
||||
pub fn all() -> impl Iterator<Item = Mode> {
|
||||
ModePipeline::test_cases().flat_map(|pipeline| {
|
||||
ModeOptimizerSetting::test_cases().map(move |optimize_setting| Mode {
|
||||
pipeline,
|
||||
optimize_setting,
|
||||
version: None,
|
||||
})
|
||||
})
|
||||
pub fn all() -> impl Iterator<Item = &'static Mode> {
|
||||
static ALL_MODES: LazyLock<Vec<Mode>> = LazyLock::new(|| {
|
||||
ModePipeline::test_cases()
|
||||
.flat_map(|pipeline| {
|
||||
ModeOptimizerSetting::test_cases().map(move |optimize_setting| Mode {
|
||||
pipeline,
|
||||
optimize_setting,
|
||||
version: None,
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
ALL_MODES.iter()
|
||||
}
|
||||
|
||||
/// Resolves the [`Mode`]'s solidity version requirement into a [`VersionOrRequirement`] if
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
use semver::Version;
|
||||
|
||||
/// This is the first version of solc that supports the `--via-ir` flag / "viaIR" input JSON.
|
||||
pub const SOLC_VERSION_SUPPORTING_VIA_YUL_IR: Version = Version::new(0, 8, 13);
|
||||
+33
-53
@@ -3,8 +3,6 @@
|
||||
//! - Polkadot revive resolc compiler
|
||||
//! - Polkadot revive Wasm compiler
|
||||
|
||||
mod constants;
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
hash::Hash,
|
||||
@@ -13,14 +11,14 @@ use std::{
|
||||
|
||||
use alloy::json_abi::JsonAbi;
|
||||
use alloy_primitives::Address;
|
||||
use anyhow::Context;
|
||||
use anyhow::{Context as _, Result};
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use revive_common::EVMVersion;
|
||||
use revive_dt_common::cached_fs::read_to_string;
|
||||
use revive_dt_common::types::VersionOrRequirement;
|
||||
use revive_dt_config::Arguments;
|
||||
use revive_dt_config::{ResolcConfiguration, SolcConfiguration, WorkingDirectoryConfiguration};
|
||||
|
||||
// Re-export this as it's a part of the compiler interface.
|
||||
pub use revive_dt_common::types::{Mode, ModeOptimizerSetting, ModePipeline};
|
||||
@@ -30,36 +28,38 @@ pub mod revive_resolc;
|
||||
pub mod solc;
|
||||
|
||||
/// A common interface for all supported Solidity compilers.
|
||||
pub trait SolidityCompiler {
|
||||
/// Extra options specific to the compiler.
|
||||
type Options: Default + PartialEq + Eq + Hash;
|
||||
pub trait SolidityCompiler: Sized {
|
||||
/// Instantiates a new compiler object.
|
||||
///
|
||||
/// Based on the given [`Context`] and [`VersionOrRequirement`] this function instantiates a
|
||||
/// new compiler object. Certain implementations of this trait might choose to cache cache the
|
||||
/// compiler objects and return the same ones over and over again.
|
||||
fn new(
|
||||
context: impl AsRef<SolcConfiguration>
|
||||
+ AsRef<ResolcConfiguration>
|
||||
+ AsRef<WorkingDirectoryConfiguration>,
|
||||
version: impl Into<Option<VersionOrRequirement>>,
|
||||
) -> impl Future<Output = Result<Self>>;
|
||||
|
||||
/// Returns the version of the compiler.
|
||||
fn version(&self) -> &Version;
|
||||
|
||||
/// Returns the path of the compiler executable.
|
||||
fn path(&self) -> &Path;
|
||||
|
||||
/// The low-level compiler interface.
|
||||
fn build(
|
||||
&self,
|
||||
input: CompilerInput,
|
||||
additional_options: Self::Options,
|
||||
) -> impl Future<Output = anyhow::Result<CompilerOutput>>;
|
||||
fn build(&self, input: CompilerInput) -> impl Future<Output = Result<CompilerOutput>>;
|
||||
|
||||
fn new(solc_executable: PathBuf) -> Self;
|
||||
|
||||
fn get_compiler_executable(
|
||||
config: &Arguments,
|
||||
version: impl Into<VersionOrRequirement>,
|
||||
) -> impl Future<Output = anyhow::Result<PathBuf>>;
|
||||
|
||||
fn version(&self) -> impl Future<Output = anyhow::Result<Version>>;
|
||||
|
||||
/// Does the compiler support the provided mode and version settings?
|
||||
/// Does the compiler support the provided mode and version settings.
|
||||
fn supports_mode(
|
||||
compiler_version: &Version,
|
||||
optimize_setting: ModeOptimizerSetting,
|
||||
&self,
|
||||
optimizer_setting: ModeOptimizerSetting,
|
||||
pipeline: ModePipeline,
|
||||
) -> bool;
|
||||
}
|
||||
|
||||
/// The generic compilation input configuration.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
|
||||
pub struct CompilerInput {
|
||||
pub pipeline: Option<ModePipeline>,
|
||||
pub optimization: Option<ModeOptimizerSetting>,
|
||||
@@ -80,21 +80,12 @@ pub struct CompilerOutput {
|
||||
}
|
||||
|
||||
/// A generic builder style interface for configuring the supported compiler options.
|
||||
pub struct Compiler<T: SolidityCompiler> {
|
||||
#[derive(Default)]
|
||||
pub struct Compiler {
|
||||
input: CompilerInput,
|
||||
additional_options: T::Options,
|
||||
}
|
||||
|
||||
impl Default for Compiler<solc::Solc> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Compiler<T>
|
||||
where
|
||||
T: SolidityCompiler,
|
||||
{
|
||||
impl Compiler {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
input: CompilerInput {
|
||||
@@ -107,7 +98,6 @@ where
|
||||
libraries: Default::default(),
|
||||
revert_string_handling: Default::default(),
|
||||
},
|
||||
additional_options: T::Options::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -136,7 +126,7 @@ where
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_source(mut self, path: impl AsRef<Path>) -> anyhow::Result<Self> {
|
||||
pub fn with_source(mut self, path: impl AsRef<Path>) -> Result<Self> {
|
||||
self.input.sources.insert(
|
||||
path.as_ref().to_path_buf(),
|
||||
read_to_string(path.as_ref()).context("Failed to read the contract source")?,
|
||||
@@ -166,11 +156,6 @@ where
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_additional_options(mut self, options: impl Into<T::Options>) -> Self {
|
||||
self.additional_options = options.into();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn then(self, callback: impl FnOnce(Self) -> Self) -> Self {
|
||||
callback(self)
|
||||
}
|
||||
@@ -179,17 +164,12 @@ where
|
||||
callback(self)
|
||||
}
|
||||
|
||||
pub async fn try_build(
|
||||
self,
|
||||
compiler_path: impl AsRef<Path>,
|
||||
) -> anyhow::Result<CompilerOutput> {
|
||||
T::new(compiler_path.as_ref().to_path_buf())
|
||||
.build(self.input, self.additional_options)
|
||||
.await
|
||||
pub async fn try_build(self, compiler: &impl SolidityCompiler) -> Result<CompilerOutput> {
|
||||
compiler.build(self.input).await
|
||||
}
|
||||
|
||||
pub fn input(&self) -> CompilerInput {
|
||||
self.input.clone()
|
||||
pub fn input(&self) -> &CompilerInput {
|
||||
&self.input
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,39 +3,78 @@
|
||||
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
process::{Command, Stdio},
|
||||
sync::LazyLock,
|
||||
process::Stdio,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
|
||||
use dashmap::DashMap;
|
||||
use revive_dt_common::types::VersionOrRequirement;
|
||||
use revive_dt_config::Arguments;
|
||||
use revive_dt_config::{ResolcConfiguration, SolcConfiguration, WorkingDirectoryConfiguration};
|
||||
use revive_solc_json_interface::{
|
||||
SolcStandardJsonInput, SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings,
|
||||
SolcStandardJsonInputSettingsOptimizer, SolcStandardJsonInputSettingsSelection,
|
||||
SolcStandardJsonOutput,
|
||||
};
|
||||
|
||||
use crate::{CompilerInput, CompilerOutput, ModeOptimizerSetting, ModePipeline, SolidityCompiler};
|
||||
use crate::{
|
||||
CompilerInput, CompilerOutput, ModeOptimizerSetting, ModePipeline, SolidityCompiler, solc::Solc,
|
||||
};
|
||||
|
||||
use alloy::json_abi::JsonAbi;
|
||||
use anyhow::Context;
|
||||
use anyhow::{Context as _, Result};
|
||||
use semver::Version;
|
||||
use tokio::{io::AsyncWriteExt, process::Command as AsyncCommand};
|
||||
|
||||
// TODO: I believe that we need to also pass the solc compiler to resolc so that resolc uses the
|
||||
// specified solc compiler. I believe that currently we completely ignore the specified solc binary
|
||||
// when invoking resolc which doesn't seem right if we're using solc as a compiler frontend.
|
||||
|
||||
/// A wrapper around the `resolc` binary, emitting PVM-compatible bytecode.
|
||||
#[derive(Debug)]
|
||||
pub struct Resolc {
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Resolc(Arc<ResolcInner>);
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
struct ResolcInner {
|
||||
/// The internal solc compiler that the resolc compiler uses as a compiler frontend.
|
||||
solc: Solc,
|
||||
/// Path to the `resolc` executable
|
||||
resolc_path: PathBuf,
|
||||
}
|
||||
|
||||
impl SolidityCompiler for Resolc {
|
||||
type Options = Vec<String>;
|
||||
async fn new(
|
||||
context: impl AsRef<SolcConfiguration>
|
||||
+ AsRef<ResolcConfiguration>
|
||||
+ AsRef<WorkingDirectoryConfiguration>,
|
||||
version: impl Into<Option<VersionOrRequirement>>,
|
||||
) -> Result<Self> {
|
||||
/// This is a cache of all of the resolc compiler objects. Since we do not currently support
|
||||
/// multiple resolc compiler versions, so our cache is just keyed by the solc compiler and
|
||||
/// its version to the resolc compiler.
|
||||
static COMPILERS_CACHE: LazyLock<DashMap<Solc, Resolc>> = LazyLock::new(Default::default);
|
||||
|
||||
let resolc_configuration = AsRef::<ResolcConfiguration>::as_ref(&context);
|
||||
|
||||
let solc = Solc::new(&context, version)
|
||||
.await
|
||||
.context("Failed to create the solc compiler frontend for resolc")?;
|
||||
|
||||
Ok(COMPILERS_CACHE
|
||||
.entry(solc.clone())
|
||||
.or_insert_with(|| {
|
||||
Self(Arc::new(ResolcInner {
|
||||
solc,
|
||||
resolc_path: resolc_configuration.path.clone(),
|
||||
}))
|
||||
})
|
||||
.clone())
|
||||
}
|
||||
|
||||
fn version(&self) -> &Version {
|
||||
// We currently return the solc compiler version since we do not support multiple resolc
|
||||
// compiler versions.
|
||||
self.0.solc.version()
|
||||
}
|
||||
|
||||
fn path(&self) -> &std::path::Path {
|
||||
&self.0.resolc_path
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", ret)]
|
||||
async fn build(
|
||||
@@ -52,8 +91,7 @@ impl SolidityCompiler for Resolc {
|
||||
// resolc. So, we need to go back to this later once it's supported.
|
||||
revert_string_handling: _,
|
||||
}: CompilerInput,
|
||||
additional_options: Self::Options,
|
||||
) -> anyhow::Result<CompilerOutput> {
|
||||
) -> Result<CompilerOutput> {
|
||||
if !matches!(pipeline, None | Some(ModePipeline::ViaYulIR)) {
|
||||
anyhow::bail!(
|
||||
"Resolc only supports the Y (via Yul IR) pipeline, but the provided pipeline is {pipeline:?}"
|
||||
@@ -100,7 +138,7 @@ impl SolidityCompiler for Resolc {
|
||||
},
|
||||
};
|
||||
|
||||
let mut command = AsyncCommand::new(&self.resolc_path);
|
||||
let mut command = AsyncCommand::new(self.path());
|
||||
command
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
@@ -121,7 +159,7 @@ impl SolidityCompiler for Resolc {
|
||||
}
|
||||
let mut child = command
|
||||
.spawn()
|
||||
.with_context(|| format!("Failed to spawn resolc at {}", self.resolc_path.display()))?;
|
||||
.with_context(|| format!("Failed to spawn resolc at {}", self.path().display()))?;
|
||||
|
||||
let stdin_pipe = child.stdin.as_mut().expect("stdin must be piped");
|
||||
let serialized_input = serde_json::to_vec(&input)
|
||||
@@ -238,108 +276,11 @@ impl SolidityCompiler for Resolc {
|
||||
Ok(compiler_output)
|
||||
}
|
||||
|
||||
fn new(resolc_path: PathBuf) -> Self {
|
||||
Resolc { resolc_path }
|
||||
}
|
||||
|
||||
async fn get_compiler_executable(
|
||||
config: &Arguments,
|
||||
_version: impl Into<VersionOrRequirement>,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
if !config.resolc.as_os_str().is_empty() {
|
||||
return Ok(config.resolc.clone());
|
||||
}
|
||||
|
||||
Ok(PathBuf::from("resolc"))
|
||||
}
|
||||
|
||||
async fn version(&self) -> anyhow::Result<semver::Version> {
|
||||
/// This is a cache of the path of the compiler to the version number of the compiler. We
|
||||
/// choose to cache the version in this way rather than through a field on the struct since
|
||||
/// compiler objects are being created all the time from the path and the compiler object is
|
||||
/// not reused over time.
|
||||
static VERSION_CACHE: LazyLock<DashMap<PathBuf, Version>> = LazyLock::new(Default::default);
|
||||
|
||||
match VERSION_CACHE.entry(self.resolc_path.clone()) {
|
||||
dashmap::Entry::Occupied(occupied_entry) => Ok(occupied_entry.get().clone()),
|
||||
dashmap::Entry::Vacant(vacant_entry) => {
|
||||
let output = Command::new(self.resolc_path.as_path())
|
||||
.arg("--version")
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to spawn resolc at {} to get version",
|
||||
self.resolc_path.display()
|
||||
)
|
||||
})?
|
||||
.wait_with_output()
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed waiting for resolc at {} to finish --version",
|
||||
self.resolc_path.display()
|
||||
)
|
||||
})?
|
||||
.stdout;
|
||||
|
||||
let output = String::from_utf8_lossy(&output);
|
||||
let version_string = output
|
||||
.split("version ")
|
||||
.nth(1)
|
||||
.context("Version parsing failed")?
|
||||
.split("+")
|
||||
.next()
|
||||
.context("Version parsing failed")?;
|
||||
|
||||
let version = Version::parse(version_string).with_context(|| {
|
||||
format!("Failed to parse resolc semver from '{version_string}'")
|
||||
})?;
|
||||
|
||||
vacant_entry.insert(version.clone());
|
||||
|
||||
Ok(version)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn supports_mode(
|
||||
_compiler_version: &Version,
|
||||
_optimize_setting: ModeOptimizerSetting,
|
||||
&self,
|
||||
optimize_setting: ModeOptimizerSetting,
|
||||
pipeline: ModePipeline,
|
||||
) -> bool {
|
||||
// We only support the Y (IE compile via Yul IR) mode here, which also means that we can
|
||||
// only use solc version 0.8.13 and above. We must always compile via Yul IR as resolc
|
||||
// needs this to translate to LLVM IR and then RISCV.
|
||||
|
||||
// Note: the original implementation of this function looked like the following:
|
||||
// ```
|
||||
// pipeline == ModePipeline::ViaYulIR && compiler_version >= &SOLC_VERSION_SUPPORTING_VIA_YUL_IR
|
||||
// ```
|
||||
// However, that implementation is sadly incorrect since the version that's passed into this
|
||||
// function is not the version of solc but the version of resolc. This is despite the fact
|
||||
// that resolc depends on Solc for the initial Yul codegen. Therefore, we have skipped the
|
||||
// version check until we do a better integrations between resolc and solc.
|
||||
pipeline == ModePipeline::ViaYulIR
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn compiler_version_can_be_obtained() {
|
||||
// Arrange
|
||||
let args = Arguments::default();
|
||||
let path = Resolc::get_compiler_executable(&args, Version::new(0, 7, 6))
|
||||
.await
|
||||
.unwrap();
|
||||
let compiler = Resolc::new(path);
|
||||
|
||||
// Act
|
||||
let version = compiler.version().await;
|
||||
|
||||
// Assert
|
||||
let _ = version.expect("Failed to get version");
|
||||
pipeline == ModePipeline::ViaYulIR && self.0.solc.supports_mode(optimize_setting, pipeline)
|
||||
}
|
||||
}
|
||||
|
||||
+68
-129
@@ -3,19 +3,18 @@
|
||||
|
||||
use std::{
|
||||
path::PathBuf,
|
||||
process::{Command, Stdio},
|
||||
sync::LazyLock,
|
||||
process::Stdio,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
|
||||
use dashmap::DashMap;
|
||||
use revive_dt_common::types::VersionOrRequirement;
|
||||
use revive_dt_config::Arguments;
|
||||
use revive_dt_config::{ResolcConfiguration, SolcConfiguration, WorkingDirectoryConfiguration};
|
||||
use revive_dt_solc_binaries::download_solc;
|
||||
|
||||
use super::constants::SOLC_VERSION_SUPPORTING_VIA_YUL_IR;
|
||||
use crate::{CompilerInput, CompilerOutput, ModeOptimizerSetting, ModePipeline, SolidityCompiler};
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::{Context as _, Result};
|
||||
use foundry_compilers_artifacts::{
|
||||
output_selection::{
|
||||
BytecodeOutputSelection, ContractOutputSelection, EvmOutputSelection, OutputSelection,
|
||||
@@ -26,13 +25,64 @@ use foundry_compilers_artifacts::{
|
||||
use semver::Version;
|
||||
use tokio::{io::AsyncWriteExt, process::Command as AsyncCommand};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Solc {
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Solc(Arc<SolcInner>);
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
struct SolcInner {
|
||||
/// The path of the solidity compiler executable that this object uses.
|
||||
solc_path: PathBuf,
|
||||
/// The version of the solidity compiler executable that this object uses.
|
||||
solc_version: Version,
|
||||
}
|
||||
|
||||
impl SolidityCompiler for Solc {
|
||||
type Options = ();
|
||||
async fn new(
|
||||
context: impl AsRef<SolcConfiguration>
|
||||
+ AsRef<ResolcConfiguration>
|
||||
+ AsRef<WorkingDirectoryConfiguration>,
|
||||
version: impl Into<Option<VersionOrRequirement>>,
|
||||
) -> Result<Self> {
|
||||
// This is a cache for the compiler objects so that whenever the same compiler version is
|
||||
// requested the same object is returned. We do this as we do not want to keep cloning the
|
||||
// compiler around.
|
||||
static COMPILERS_CACHE: LazyLock<DashMap<(PathBuf, Version), Solc>> =
|
||||
LazyLock::new(Default::default);
|
||||
|
||||
let working_directory_configuration =
|
||||
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context);
|
||||
let solc_configuration = AsRef::<SolcConfiguration>::as_ref(&context);
|
||||
|
||||
// We attempt to download the solc binary. Note the following: this call does the version
|
||||
// resolution for us. Therefore, even if the download didn't proceed, this function will
|
||||
// resolve the version requirement into a canonical version of the compiler. It's then up
|
||||
// to us to either use the provided path or not.
|
||||
let version = version
|
||||
.into()
|
||||
.unwrap_or_else(|| solc_configuration.version.clone().into());
|
||||
let (version, path) =
|
||||
download_solc(working_directory_configuration.as_path(), version, false)
|
||||
.await
|
||||
.context("Failed to download/get path to solc binary")?;
|
||||
|
||||
Ok(COMPILERS_CACHE
|
||||
.entry((path.clone(), version.clone()))
|
||||
.or_insert_with(|| {
|
||||
Self(Arc::new(SolcInner {
|
||||
solc_path: path,
|
||||
solc_version: version,
|
||||
}))
|
||||
})
|
||||
.clone())
|
||||
}
|
||||
|
||||
fn version(&self) -> &Version {
|
||||
&self.0.solc_version
|
||||
}
|
||||
|
||||
fn path(&self) -> &std::path::Path {
|
||||
&self.0.solc_path
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", ret)]
|
||||
async fn build(
|
||||
@@ -47,19 +97,12 @@ impl SolidityCompiler for Solc {
|
||||
libraries,
|
||||
revert_string_handling,
|
||||
}: CompilerInput,
|
||||
_: Self::Options,
|
||||
) -> anyhow::Result<CompilerOutput> {
|
||||
let compiler_supports_via_ir = self
|
||||
.version()
|
||||
.await
|
||||
.context("Failed to query solc version to determine via-ir support")?
|
||||
>= SOLC_VERSION_SUPPORTING_VIA_YUL_IR;
|
||||
|
||||
) -> Result<CompilerOutput> {
|
||||
// Be careful to entirely omit the viaIR field if the compiler does not support it,
|
||||
// as it will error if you provide fields it does not know about. Because
|
||||
// `supports_mode` is called prior to instantiating a compiler, we should never
|
||||
// ask for something which is invalid.
|
||||
let via_ir = match (pipeline, compiler_supports_via_ir) {
|
||||
let via_ir = match (pipeline, self.compiler_supports_yul()) {
|
||||
(pipeline, true) => pipeline.map(|p| p.via_yul_ir()),
|
||||
(_pipeline, false) => None,
|
||||
};
|
||||
@@ -119,7 +162,7 @@ impl SolidityCompiler for Solc {
|
||||
},
|
||||
};
|
||||
|
||||
let mut command = AsyncCommand::new(&self.solc_path);
|
||||
let mut command = AsyncCommand::new(self.path());
|
||||
command
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
@@ -140,7 +183,7 @@ impl SolidityCompiler for Solc {
|
||||
}
|
||||
let mut child = command
|
||||
.spawn()
|
||||
.with_context(|| format!("Failed to spawn solc at {}", self.solc_path.display()))?;
|
||||
.with_context(|| format!("Failed to spawn solc at {}", self.path().display()))?;
|
||||
|
||||
let stdin = child.stdin.as_mut().expect("should be piped");
|
||||
let serialized_input = serde_json::to_vec(&input)
|
||||
@@ -220,125 +263,21 @@ impl SolidityCompiler for Solc {
|
||||
Ok(compiler_output)
|
||||
}
|
||||
|
||||
fn new(solc_path: PathBuf) -> Self {
|
||||
Self { solc_path }
|
||||
}
|
||||
|
||||
async fn get_compiler_executable(
|
||||
config: &Arguments,
|
||||
version: impl Into<VersionOrRequirement>,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
let path = download_solc(config.directory(), version, config.wasm)
|
||||
.await
|
||||
.context("Failed to download/get path to solc binary")?;
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
async fn version(&self) -> anyhow::Result<semver::Version> {
|
||||
/// This is a cache of the path of the compiler to the version number of the compiler. We
|
||||
/// choose to cache the version in this way rather than through a field on the struct since
|
||||
/// compiler objects are being created all the time from the path and the compiler object is
|
||||
/// not reused over time.
|
||||
static VERSION_CACHE: LazyLock<DashMap<PathBuf, Version>> = LazyLock::new(Default::default);
|
||||
|
||||
match VERSION_CACHE.entry(self.solc_path.clone()) {
|
||||
dashmap::Entry::Occupied(occupied_entry) => Ok(occupied_entry.get().clone()),
|
||||
dashmap::Entry::Vacant(vacant_entry) => {
|
||||
// The following is the parsing code for the version from the solc version strings
|
||||
// which look like the following:
|
||||
// ```
|
||||
// solc, the solidity compiler commandline interface
|
||||
// Version: 0.8.30+commit.73712a01.Darwin.appleclang
|
||||
// ```
|
||||
let child = Command::new(self.solc_path.as_path())
|
||||
.arg("--version")
|
||||
.stdout(Stdio::piped())
|
||||
.spawn()
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to spawn solc at {} to get version",
|
||||
self.solc_path.display()
|
||||
)
|
||||
})?;
|
||||
let output = child.wait_with_output().with_context(|| {
|
||||
format!(
|
||||
"Failed waiting for solc at {} to finish --version",
|
||||
self.solc_path.display()
|
||||
)
|
||||
})?;
|
||||
let output = String::from_utf8_lossy(&output.stdout);
|
||||
let version_line = output
|
||||
.split("Version: ")
|
||||
.nth(1)
|
||||
.context("Version parsing failed")?;
|
||||
let version_string = version_line
|
||||
.split("+")
|
||||
.next()
|
||||
.context("Version parsing failed")?;
|
||||
|
||||
let version = Version::parse(version_string).with_context(|| {
|
||||
format!("Failed to parse solc semver from '{version_string}'")
|
||||
})?;
|
||||
|
||||
vacant_entry.insert(version.clone());
|
||||
|
||||
Ok(version)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn supports_mode(
|
||||
compiler_version: &Version,
|
||||
&self,
|
||||
_optimize_setting: ModeOptimizerSetting,
|
||||
pipeline: ModePipeline,
|
||||
) -> bool {
|
||||
// solc 0.8.13 and above supports --via-ir, and less than that does not. Thus, we support mode E
|
||||
// (ie no Yul IR) in either case, but only support Y (via Yul IR) if the compiler is new enough.
|
||||
pipeline == ModePipeline::ViaEVMAssembly
|
||||
|| (pipeline == ModePipeline::ViaYulIR
|
||||
&& compiler_version >= &SOLC_VERSION_SUPPORTING_VIA_YUL_IR)
|
||||
|| (pipeline == ModePipeline::ViaYulIR && self.compiler_supports_yul())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn compiler_version_can_be_obtained() {
|
||||
// Arrange
|
||||
let args = Arguments::default();
|
||||
let path = Solc::get_compiler_executable(&args, Version::new(0, 7, 6))
|
||||
.await
|
||||
.unwrap();
|
||||
let compiler = Solc::new(path);
|
||||
|
||||
// Act
|
||||
let version = compiler.version().await;
|
||||
|
||||
// Assert
|
||||
assert_eq!(
|
||||
version.expect("Failed to get version"),
|
||||
Version::new(0, 7, 6)
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn compiler_version_can_be_obtained1() {
|
||||
// Arrange
|
||||
let args = Arguments::default();
|
||||
let path = Solc::get_compiler_executable(&args, Version::new(0, 4, 21))
|
||||
.await
|
||||
.unwrap();
|
||||
let compiler = Solc::new(path);
|
||||
|
||||
// Act
|
||||
let version = compiler.version().await;
|
||||
|
||||
// Assert
|
||||
assert_eq!(
|
||||
version.expect("Failed to get version"),
|
||||
Version::new(0, 4, 21)
|
||||
)
|
||||
impl Solc {
|
||||
fn compiler_supports_yul(&self) -> bool {
|
||||
const SOLC_VERSION_SUPPORTING_VIA_YUL_IR: Version = Version::new(0, 8, 13);
|
||||
self.version() >= &SOLC_VERSION_SUPPORTING_VIA_YUL_IR
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,24 +1,25 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use revive_dt_common::types::VersionOrRequirement;
|
||||
use revive_dt_compiler::{Compiler, SolidityCompiler, revive_resolc::Resolc, solc::Solc};
|
||||
use revive_dt_config::Arguments;
|
||||
use revive_dt_config::ExecutionContext;
|
||||
use semver::Version;
|
||||
|
||||
#[tokio::test]
|
||||
async fn contracts_can_be_compiled_with_solc() {
|
||||
// Arrange
|
||||
let args = Arguments::default();
|
||||
let compiler_path = Solc::get_compiler_executable(&args, Version::new(0, 8, 30))
|
||||
let args = ExecutionContext::default();
|
||||
let solc = Solc::new(&args, VersionOrRequirement::Version(Version::new(0, 8, 30)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Act
|
||||
let output = Compiler::<Solc>::new()
|
||||
let output = Compiler::new()
|
||||
.with_source("./tests/assets/array_one_element/callable.sol")
|
||||
.unwrap()
|
||||
.with_source("./tests/assets/array_one_element/main.sol")
|
||||
.unwrap()
|
||||
.try_build(compiler_path)
|
||||
.try_build(&solc)
|
||||
.await;
|
||||
|
||||
// Assert
|
||||
@@ -48,18 +49,18 @@ async fn contracts_can_be_compiled_with_solc() {
|
||||
#[tokio::test]
|
||||
async fn contracts_can_be_compiled_with_resolc() {
|
||||
// Arrange
|
||||
let args = Arguments::default();
|
||||
let compiler_path = Resolc::get_compiler_executable(&args, Version::new(0, 8, 30))
|
||||
let args = ExecutionContext::default();
|
||||
let resolc = Resolc::new(&args, VersionOrRequirement::Version(Version::new(0, 8, 30)))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Act
|
||||
let output = Compiler::<Resolc>::new()
|
||||
let output = Compiler::new()
|
||||
.with_source("./tests/assets/array_one_element/callable.sol")
|
||||
.unwrap()
|
||||
.with_source("./tests/assets/array_one_element/main.sol")
|
||||
.unwrap()
|
||||
.try_build(compiler_path)
|
||||
.try_build(&resolc)
|
||||
.await;
|
||||
|
||||
// Assert
|
||||
|
||||
@@ -10,10 +10,13 @@ rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
alloy = { workspace = true }
|
||||
anyhow = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
semver = { workspace = true }
|
||||
temp-dir = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
+484
-138
@@ -2,219 +2,565 @@
|
||||
|
||||
use std::{
|
||||
fmt::Display,
|
||||
fs::read_to_string,
|
||||
ops::Deref,
|
||||
path::{Path, PathBuf},
|
||||
sync::LazyLock,
|
||||
str::FromStr,
|
||||
sync::{Arc, LazyLock, OnceLock},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use alloy::{network::EthereumWallet, signers::local::PrivateKeySigner};
|
||||
use clap::{Parser, ValueEnum};
|
||||
use alloy::{
|
||||
genesis::Genesis,
|
||||
hex::ToHexExt,
|
||||
network::EthereumWallet,
|
||||
primitives::{FixedBytes, U256},
|
||||
signers::local::PrivateKeySigner,
|
||||
};
|
||||
use clap::{Parser, ValueEnum, ValueHint};
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::{Serialize, Serializer};
|
||||
use strum::{AsRefStr, Display, EnumString, IntoStaticStr};
|
||||
use temp_dir::TempDir;
|
||||
|
||||
#[derive(Debug, Parser, Clone, Serialize, Deserialize)]
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
#[command(name = "retester")]
|
||||
pub struct Arguments {
|
||||
/// The `solc` version to use if the test didn't specify it explicitly.
|
||||
#[arg(long = "solc", short, default_value = "0.8.29")]
|
||||
pub solc: Version,
|
||||
pub enum Context {
|
||||
/// Executes tests in the MatterLabs format differentially against a leader and a follower.
|
||||
ExecuteTests(Box<ExecutionContext>),
|
||||
/// Exports the JSON schema of the MatterLabs test format used by the tool.
|
||||
ExportJsonSchema,
|
||||
}
|
||||
|
||||
/// Use the Wasm compiler versions.
|
||||
#[arg(long = "wasm")]
|
||||
pub wasm: bool,
|
||||
impl Context {
|
||||
pub fn working_directory_configuration(&self) -> &WorkingDirectoryConfiguration {
|
||||
self.as_ref()
|
||||
}
|
||||
|
||||
/// The path to the `resolc` executable to be tested.
|
||||
pub fn report_configuration(&self) -> &ReportConfiguration {
|
||||
self.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<WorkingDirectoryConfiguration> for Context {
|
||||
fn as_ref(&self) -> &WorkingDirectoryConfiguration {
|
||||
match self {
|
||||
Context::ExecuteTests(execution_context) => &execution_context.working_directory,
|
||||
Context::ExportJsonSchema => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<ReportConfiguration> for Context {
|
||||
fn as_ref(&self) -> &ReportConfiguration {
|
||||
match self {
|
||||
Context::ExecuteTests(execution_context) => &execution_context.report_configuration,
|
||||
Context::ExportJsonSchema => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct ExecutionContext {
|
||||
/// The working directory that the program will use for all of the temporary artifacts needed at
|
||||
/// runtime.
|
||||
///
|
||||
/// By default it uses the `resolc` binary found in `$PATH`.
|
||||
///
|
||||
/// If `--wasm` is set, this should point to the resolc Wasm ile.
|
||||
#[arg(long = "resolc", short, default_value = "resolc")]
|
||||
pub resolc: PathBuf,
|
||||
/// If not specified, then a temporary directory will be created and used by the program for all
|
||||
/// temporary artifacts.
|
||||
#[clap(
|
||||
short,
|
||||
long,
|
||||
default_value = "",
|
||||
value_hint = ValueHint::DirPath,
|
||||
)]
|
||||
pub working_directory: WorkingDirectoryConfiguration,
|
||||
|
||||
/// The differential testing leader node implementation.
|
||||
#[arg(short, long = "leader", default_value_t = TestingPlatform::Geth)]
|
||||
pub leader: TestingPlatform,
|
||||
|
||||
/// The differential testing follower node implementation.
|
||||
#[arg(short, long = "follower", default_value_t = TestingPlatform::Kitchensink)]
|
||||
pub follower: TestingPlatform,
|
||||
|
||||
/// A list of test corpus JSON files to be tested.
|
||||
#[arg(long = "corpus", short)]
|
||||
pub corpus: Vec<PathBuf>,
|
||||
|
||||
/// A place to store temporary artifacts during test execution.
|
||||
///
|
||||
/// Creates a temporary dir if not specified.
|
||||
#[arg(long = "workdir", short)]
|
||||
pub working_directory: Option<PathBuf>,
|
||||
/// Configuration parameters for the solc compiler.
|
||||
#[clap(flatten, next_help_heading = "Solc Configuration")]
|
||||
pub solc_configuration: SolcConfiguration,
|
||||
|
||||
/// Add a tempdir manually if `working_directory` was not given.
|
||||
/// Configuration parameters for the resolc compiler.
|
||||
#[clap(flatten, next_help_heading = "Resolc Configuration")]
|
||||
pub resolc_configuration: ResolcConfiguration,
|
||||
|
||||
/// Configuration parameters for the geth node.
|
||||
#[clap(flatten, next_help_heading = "Geth Configuration")]
|
||||
pub geth_configuration: GethConfiguration,
|
||||
|
||||
/// Configuration parameters for the Kitchensink.
|
||||
#[clap(flatten, next_help_heading = "Kitchensink Configuration")]
|
||||
pub kitchensink_configuration: KitchensinkConfiguration,
|
||||
|
||||
/// Configuration parameters for the Revive Dev Node.
|
||||
#[clap(flatten, next_help_heading = "Revive Dev Node Configuration")]
|
||||
pub revive_dev_node_configuration: ReviveDevNodeConfiguration,
|
||||
|
||||
/// Configuration parameters for the Eth Rpc.
|
||||
#[clap(flatten, next_help_heading = "Eth RPC Configuration")]
|
||||
pub eth_rpc_configuration: EthRpcConfiguration,
|
||||
|
||||
/// Configuration parameters for the genesis.
|
||||
#[clap(flatten, next_help_heading = "Genesis Configuration")]
|
||||
pub genesis_configuration: GenesisConfiguration,
|
||||
|
||||
/// Configuration parameters for the wallet.
|
||||
#[clap(flatten, next_help_heading = "Wallet Configuration")]
|
||||
pub wallet_configuration: WalletConfiguration,
|
||||
|
||||
/// Configuration parameters for concurrency.
|
||||
#[clap(flatten, next_help_heading = "Concurrency Configuration")]
|
||||
pub concurrency_configuration: ConcurrencyConfiguration,
|
||||
|
||||
/// Configuration parameters for the compilers and compilation.
|
||||
#[clap(flatten, next_help_heading = "Compilation Configuration")]
|
||||
pub compilation_configuration: CompilationConfiguration,
|
||||
|
||||
/// Configuration parameters for the report.
|
||||
#[clap(flatten, next_help_heading = "Report Configuration")]
|
||||
pub report_configuration: ReportConfiguration,
|
||||
}
|
||||
|
||||
impl Default for ExecutionContext {
|
||||
fn default() -> Self {
|
||||
Self::parse_from(["execution-context"])
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<WorkingDirectoryConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &WorkingDirectoryConfiguration {
|
||||
&self.working_directory
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<SolcConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &SolcConfiguration {
|
||||
&self.solc_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<ResolcConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &ResolcConfiguration {
|
||||
&self.resolc_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<GethConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &GethConfiguration {
|
||||
&self.geth_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<KitchensinkConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &KitchensinkConfiguration {
|
||||
&self.kitchensink_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<ReviveDevNodeConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &ReviveDevNodeConfiguration {
|
||||
&self.revive_dev_node_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<EthRpcConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &EthRpcConfiguration {
|
||||
&self.eth_rpc_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<GenesisConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &GenesisConfiguration {
|
||||
&self.genesis_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<WalletConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &WalletConfiguration {
|
||||
&self.wallet_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<ConcurrencyConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &ConcurrencyConfiguration {
|
||||
&self.concurrency_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<CompilationConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &CompilationConfiguration {
|
||||
&self.compilation_configuration
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<ReportConfiguration> for ExecutionContext {
|
||||
fn as_ref(&self) -> &ReportConfiguration {
|
||||
&self.report_configuration
|
||||
}
|
||||
}
|
||||
|
||||
/// A set of configuration parameters for Solc.
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct SolcConfiguration {
|
||||
/// Specifies the default version of the Solc compiler that should be used if there is no
|
||||
/// override specified by one of the test cases.
|
||||
#[clap(long = "solc.version", default_value = "0.8.29")]
|
||||
pub version: Version,
|
||||
}
|
||||
|
||||
/// A set of configuration parameters for Resolc.
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct ResolcConfiguration {
|
||||
/// Specifies the path of the resolc compiler to be used by the tool.
|
||||
///
|
||||
/// We attach it here because [TempDir] prunes itself on drop.
|
||||
/// If this is not specified, then the tool assumes that it should use the resolc binary that's
|
||||
/// provided in the user's $PATH.
|
||||
#[clap(id = "resolc.path", long = "resolc.path", default_value = "resolc")]
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
/// A set of configuration parameters for Geth.
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct GethConfiguration {
|
||||
/// Specifies the path of the geth node to be used by the tool.
|
||||
///
|
||||
/// If this is not specified, then the tool assumes that it should use the geth binary that's
|
||||
/// provided in the user's $PATH.
|
||||
#[clap(id = "geth.path", long = "geth.path", default_value = "geth")]
|
||||
pub path: PathBuf,
|
||||
|
||||
/// The amount of time to wait upon startup before considering that the node timed out.
|
||||
#[clap(
|
||||
id = "geth.start-timeout-ms",
|
||||
long = "geth.start-timeout-ms",
|
||||
default_value = "5000",
|
||||
value_parser = parse_duration
|
||||
)]
|
||||
pub start_timeout_ms: Duration,
|
||||
}
|
||||
|
||||
/// A set of configuration parameters for Kitchensink.
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct KitchensinkConfiguration {
|
||||
/// Specifies the path of the kitchensink node to be used by the tool.
|
||||
///
|
||||
/// If this is not specified, then the tool assumes that it should use the kitchensink binary
|
||||
/// that's provided in the user's $PATH.
|
||||
#[clap(
|
||||
id = "kitchensink.path",
|
||||
long = "kitchensink.path",
|
||||
default_value = "substrate-node"
|
||||
)]
|
||||
pub path: PathBuf,
|
||||
|
||||
/// The amount of time to wait upon startup before considering that the node timed out.
|
||||
#[clap(
|
||||
id = "kitchensink.start-timeout-ms",
|
||||
long = "kitchensink.start-timeout-ms",
|
||||
default_value = "5000",
|
||||
value_parser = parse_duration
|
||||
)]
|
||||
pub start_timeout_ms: Duration,
|
||||
|
||||
/// This configures the tool to use Kitchensink instead of using the revive-dev-node.
|
||||
#[clap(long = "kitchensink.dont-use-dev-node")]
|
||||
pub use_kitchensink: bool,
|
||||
}
|
||||
|
||||
/// A set of configuration parameters for the revive dev node.
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct ReviveDevNodeConfiguration {
|
||||
/// Specifies the path of the revive dev node to be used by the tool.
|
||||
///
|
||||
/// If this is not specified, then the tool assumes that it should use the revive dev node binary
|
||||
/// that's provided in the user's $PATH.
|
||||
#[clap(
|
||||
id = "revive-dev-node.path",
|
||||
long = "revive-dev-node.path",
|
||||
default_value = "revive-dev-node"
|
||||
)]
|
||||
pub path: PathBuf,
|
||||
|
||||
/// The amount of time to wait upon startup before considering that the node timed out.
|
||||
#[clap(
|
||||
id = "revive-dev-node.start-timeout-ms",
|
||||
long = "revive-dev-node.start-timeout-ms",
|
||||
default_value = "5000",
|
||||
value_parser = parse_duration
|
||||
)]
|
||||
pub start_timeout_ms: Duration,
|
||||
}
|
||||
|
||||
/// A set of configuration parameters for the ETH RPC.
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct EthRpcConfiguration {
|
||||
/// Specifies the path of the ETH RPC to be used by the tool.
|
||||
///
|
||||
/// If this is not specified, then the tool assumes that it should use the ETH RPC binary
|
||||
/// that's provided in the user's $PATH.
|
||||
#[clap(id = "eth-rpc.path", long = "eth-rpc.path", default_value = "eth-rpc")]
|
||||
pub path: PathBuf,
|
||||
|
||||
/// The amount of time to wait upon startup before considering that the node timed out.
|
||||
#[clap(
|
||||
id = "eth-rpc.start-timeout-ms",
|
||||
long = "eth-rpc.start-timeout-ms",
|
||||
default_value = "5000",
|
||||
value_parser = parse_duration
|
||||
)]
|
||||
pub start_timeout_ms: Duration,
|
||||
}
|
||||
|
||||
/// A set of configuration parameters for the genesis.
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct GenesisConfiguration {
|
||||
/// Specifies the path of the genesis file to use for the nodes that are started.
|
||||
///
|
||||
/// This is expected to be the path of a JSON geth genesis file.
|
||||
#[clap(id = "genesis.path", long = "genesis.path")]
|
||||
path: Option<PathBuf>,
|
||||
|
||||
/// The genesis object found at the provided path.
|
||||
#[clap(skip)]
|
||||
#[serde(skip)]
|
||||
pub temp_dir: Option<&'static TempDir>,
|
||||
genesis: OnceLock<Genesis>,
|
||||
}
|
||||
|
||||
/// The path to the `geth` executable.
|
||||
///
|
||||
/// By default it uses `geth` binary found in `$PATH`.
|
||||
#[arg(short, long = "geth", default_value = "geth")]
|
||||
pub geth: PathBuf,
|
||||
impl GenesisConfiguration {
|
||||
pub fn genesis(&self) -> anyhow::Result<&Genesis> {
|
||||
static DEFAULT_GENESIS: LazyLock<Genesis> = LazyLock::new(|| {
|
||||
let genesis = include_str!("../../../genesis.json");
|
||||
serde_json::from_str(genesis).unwrap()
|
||||
});
|
||||
|
||||
/// The maximum time in milliseconds to wait for geth to start.
|
||||
#[arg(long = "geth-start-timeout", default_value = "5000")]
|
||||
pub geth_start_timeout: u64,
|
||||
match self.genesis.get() {
|
||||
Some(genesis) => Ok(genesis),
|
||||
None => {
|
||||
let genesis = match self.path.as_ref() {
|
||||
Some(genesis_path) => {
|
||||
let genesis_content = read_to_string(genesis_path)?;
|
||||
serde_json::from_str(genesis_content.as_str())?
|
||||
}
|
||||
None => DEFAULT_GENESIS.clone(),
|
||||
};
|
||||
Ok(self.genesis.get_or_init(|| genesis))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configure nodes according to this genesis.json file.
|
||||
#[arg(long = "genesis", default_value = "genesis.json")]
|
||||
pub genesis_file: PathBuf,
|
||||
|
||||
/// The signing account private key.
|
||||
#[arg(
|
||||
short,
|
||||
long = "account",
|
||||
/// A set of configuration parameters for the wallet.
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct WalletConfiguration {
|
||||
/// The private key of the default signer.
|
||||
#[clap(
|
||||
long = "wallet.default-private-key",
|
||||
default_value = "0x4f3edf983ac636a65a842ce7c78d9aa706d3b113bce9c46f30d7d21715b23b1d"
|
||||
)]
|
||||
pub account: String,
|
||||
#[serde(serialize_with = "serialize_private_key")]
|
||||
default_key: PrivateKeySigner,
|
||||
|
||||
/// This argument controls which private keys the nodes should have access to and be added to
|
||||
/// its wallet signers. With a value of N, private keys (0, N] will be added to the signer set
|
||||
/// of the node.
|
||||
#[arg(long = "private-keys-count", default_value_t = 100_000)]
|
||||
pub private_keys_to_add: usize,
|
||||
#[clap(long = "wallet.additional-keys", default_value_t = 100_000)]
|
||||
additional_keys: usize,
|
||||
|
||||
/// The differential testing leader node implementation.
|
||||
#[arg(short, long = "leader", default_value = "geth")]
|
||||
pub leader: TestingPlatform,
|
||||
/// The wallet object that will be used.
|
||||
#[clap(skip)]
|
||||
#[serde(skip)]
|
||||
wallet: OnceLock<Arc<EthereumWallet>>,
|
||||
}
|
||||
|
||||
/// The differential testing follower node implementation.
|
||||
#[arg(short, long = "follower", default_value = "kitchensink")]
|
||||
pub follower: TestingPlatform,
|
||||
impl WalletConfiguration {
|
||||
pub fn wallet(&self) -> Arc<EthereumWallet> {
|
||||
self.wallet
|
||||
.get_or_init(|| {
|
||||
let mut wallet = EthereumWallet::new(self.default_key.clone());
|
||||
for signer in (1..=self.additional_keys)
|
||||
.map(|id| U256::from(id))
|
||||
.map(|id| id.to_be_bytes::<32>())
|
||||
.map(|id| PrivateKeySigner::from_bytes(&FixedBytes(id)).unwrap())
|
||||
{
|
||||
wallet.register_signer(signer);
|
||||
}
|
||||
Arc::new(wallet)
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
}
|
||||
|
||||
/// Only compile against this testing platform (doesn't execute the tests).
|
||||
#[arg(long = "compile-only")]
|
||||
pub compile_only: Option<TestingPlatform>,
|
||||
fn serialize_private_key<S>(value: &PrivateKeySigner, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
value.to_bytes().encode_hex().serialize(serializer)
|
||||
}
|
||||
|
||||
/// A set of configuration for concurrency.
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct ConcurrencyConfiguration {
|
||||
/// Determines the amount of nodes that will be spawned for each chain.
|
||||
#[arg(long, default_value = "1")]
|
||||
#[clap(long = "concurrency.number-of-nodes", default_value_t = 5)]
|
||||
pub number_of_nodes: usize,
|
||||
|
||||
/// Determines the amount of tokio worker threads that will will be used.
|
||||
#[arg(
|
||||
long,
|
||||
long = "concurrency.number-of-threads",
|
||||
default_value_t = std::thread::available_parallelism()
|
||||
.map(|n| n.get())
|
||||
.unwrap_or(1)
|
||||
)]
|
||||
pub number_of_threads: usize,
|
||||
|
||||
/// Determines the amount of concurrent tasks that will be spawned to run tests. Defaults to 10 x the number of nodes.
|
||||
#[arg(long)]
|
||||
pub number_concurrent_tasks: Option<usize>,
|
||||
|
||||
/// Extract problems back to the test corpus.
|
||||
#[arg(short, long = "extract-problems")]
|
||||
pub extract_problems: bool,
|
||||
|
||||
/// The path to the `kitchensink` executable.
|
||||
/// Determines the amount of concurrent tasks that will be spawned to run tests.
|
||||
///
|
||||
/// By default it uses `substrate-node` binary found in `$PATH`.
|
||||
#[arg(short, long = "kitchensink", default_value = "substrate-node")]
|
||||
pub kitchensink: PathBuf,
|
||||
/// Defaults to 10 x the number of nodes.
|
||||
#[arg(long = "concurrency.number-of-concurrent-tasks")]
|
||||
number_concurrent_tasks: Option<usize>,
|
||||
|
||||
/// The path to the `revive-dev-node` executable.
|
||||
///
|
||||
/// By default it uses `revive-dev-node` binary found in `$PATH`.
|
||||
#[arg(long = "revive-dev-node", default_value = "revive-dev-node")]
|
||||
pub revive_dev_node: PathBuf,
|
||||
/// Determines if the concurrency limit should be ignored or not.
|
||||
#[arg(long = "concurrency.ignore-concurrency-limit")]
|
||||
ignore_concurrency_limit: bool,
|
||||
}
|
||||
|
||||
/// By default the tool uses the revive-dev-node when it's running differential tests against
|
||||
/// PolkaVM since the dev-node is much faster than kitchensink. This flag allows the caller to
|
||||
/// configure the tool to use kitchensink rather than the dev-node.
|
||||
#[arg(long)]
|
||||
pub use_kitchensink_not_dev_node: bool,
|
||||
|
||||
/// The path to the `eth_proxy` executable.
|
||||
///
|
||||
/// By default it uses `eth-rpc` binary found in `$PATH`.
|
||||
#[arg(short = 'p', long = "eth_proxy", default_value = "eth-rpc")]
|
||||
pub eth_proxy: PathBuf,
|
||||
impl ConcurrencyConfiguration {
|
||||
pub fn concurrency_limit(&self) -> Option<usize> {
|
||||
match self.ignore_concurrency_limit {
|
||||
true => None,
|
||||
false => Some(
|
||||
self.number_concurrent_tasks
|
||||
.unwrap_or(20 * self.number_of_nodes),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct CompilationConfiguration {
|
||||
/// Controls if the compilation cache should be invalidated or not.
|
||||
#[arg(short, long)]
|
||||
#[arg(long = "compilation.invalidate-cache")]
|
||||
pub invalidate_compilation_cache: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Parser, Serialize)]
|
||||
pub struct ReportConfiguration {
|
||||
/// Controls if the compiler input is included in the final report.
|
||||
#[clap(long = "report.include-compiler-input")]
|
||||
pub report_include_compiler_input: bool,
|
||||
pub include_compiler_input: bool,
|
||||
|
||||
/// Controls if the compiler output is included in the final report.
|
||||
#[clap(long = "report.include-compiler-output")]
|
||||
pub report_include_compiler_output: bool,
|
||||
pub include_compiler_output: bool,
|
||||
}
|
||||
|
||||
impl Arguments {
|
||||
/// Return the configured working directory with the following precedence:
|
||||
/// 1. `self.working_directory` if it was provided.
|
||||
/// 2. `self.temp_dir` if it it was provided
|
||||
/// 3. Panic.
|
||||
pub fn directory(&self) -> &Path {
|
||||
if let Some(path) = &self.working_directory {
|
||||
return path.as_path();
|
||||
}
|
||||
/// Represents the working directory that the program uses.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum WorkingDirectoryConfiguration {
|
||||
/// A temporary directory is used as the working directory. This will be removed when dropped.
|
||||
TemporaryDirectory(Arc<TempDir>),
|
||||
/// A directory with a path is used as the working directory.
|
||||
Path(PathBuf),
|
||||
}
|
||||
|
||||
if let Some(temp_dir) = &self.temp_dir {
|
||||
return temp_dir.path();
|
||||
}
|
||||
|
||||
panic!("should have a workdir configured")
|
||||
}
|
||||
|
||||
/// Return the number of concurrent tasks to run. This is provided via the
|
||||
/// `--number-concurrent-tasks` argument, and otherwise defaults to --number-of-nodes * 20.
|
||||
pub fn number_of_concurrent_tasks(&self) -> usize {
|
||||
self.number_concurrent_tasks
|
||||
.unwrap_or(20 * self.number_of_nodes)
|
||||
}
|
||||
|
||||
/// Try to parse `self.account` into a [PrivateKeySigner],
|
||||
/// panicing on error.
|
||||
pub fn wallet(&self) -> EthereumWallet {
|
||||
let signer = self
|
||||
.account
|
||||
.parse::<PrivateKeySigner>()
|
||||
.unwrap_or_else(|error| {
|
||||
panic!("private key '{}' parsing error: {error}", self.account);
|
||||
});
|
||||
EthereumWallet::new(signer)
|
||||
impl WorkingDirectoryConfiguration {
|
||||
pub fn as_path(&self) -> &Path {
|
||||
self.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Arguments {
|
||||
impl Deref for WorkingDirectoryConfiguration {
|
||||
type Target = Path;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_path()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<Path> for WorkingDirectoryConfiguration {
|
||||
fn as_ref(&self) -> &Path {
|
||||
match self {
|
||||
WorkingDirectoryConfiguration::TemporaryDirectory(temp_dir) => temp_dir.path(),
|
||||
WorkingDirectoryConfiguration::Path(path) => path.as_path(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for WorkingDirectoryConfiguration {
|
||||
fn default() -> Self {
|
||||
static TEMP_DIR: LazyLock<TempDir> = LazyLock::new(|| TempDir::new().unwrap());
|
||||
TempDir::new()
|
||||
.map(Arc::new)
|
||||
.map(Self::TemporaryDirectory)
|
||||
.expect("Failed to create the temporary directory")
|
||||
}
|
||||
}
|
||||
|
||||
let default = Arguments::parse_from(["retester"]);
|
||||
impl FromStr for WorkingDirectoryConfiguration {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
Arguments {
|
||||
temp_dir: Some(&TEMP_DIR),
|
||||
..default
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s {
|
||||
"" => Ok(Default::default()),
|
||||
_ => Ok(Self::Path(PathBuf::from(s))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for WorkingDirectoryConfiguration {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
Display::fmt(&self.as_path().display(), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for WorkingDirectoryConfiguration {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
self.as_path().serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_duration(s: &str) -> anyhow::Result<Duration> {
|
||||
u64::from_str(s)
|
||||
.map(Duration::from_millis)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// The Solidity compatible node implementation.
|
||||
///
|
||||
/// This describes the solutions to be tested against on a high level.
|
||||
#[derive(
|
||||
Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, ValueEnum, Serialize, Deserialize,
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Hash,
|
||||
Serialize,
|
||||
ValueEnum,
|
||||
EnumString,
|
||||
Display,
|
||||
AsRefStr,
|
||||
IntoStaticStr,
|
||||
)]
|
||||
#[clap(rename_all = "lower")]
|
||||
#[strum(serialize_all = "kebab-case")]
|
||||
pub enum TestingPlatform {
|
||||
/// The go-ethereum reference full node EVM implementation.
|
||||
Geth,
|
||||
/// The kitchensink runtime provides the PolkaVM (PVM) based node implentation.
|
||||
/// The kitchensink runtime provides the PolkaVM (PVM) based node implementation.
|
||||
Kitchensink,
|
||||
}
|
||||
|
||||
impl Display for TestingPlatform {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Geth => f.write_str("geth"),
|
||||
Self::Kitchensink => f.write_str("revive"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,16 +28,14 @@ cacache = { workspace = true }
|
||||
clap = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
indexmap = { workspace = true }
|
||||
once_cell = { workspace = true }
|
||||
tokio = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
tracing-appender = { workspace = true }
|
||||
tracing-subscriber = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
semver = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
temp-dir = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
+120
-128
@@ -2,6 +2,7 @@
|
||||
//! be reused between runs.
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::HashMap,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
@@ -9,13 +10,13 @@ use std::{
|
||||
|
||||
use futures::FutureExt;
|
||||
use revive_dt_common::iterators::FilesWithExtensionIterator;
|
||||
use revive_dt_compiler::{Compiler, CompilerInput, CompilerOutput, Mode, SolidityCompiler};
|
||||
use revive_dt_config::Arguments;
|
||||
use revive_dt_compiler::{Compiler, CompilerOutput, Mode, SolidityCompiler};
|
||||
use revive_dt_config::TestingPlatform;
|
||||
use revive_dt_format::metadata::{ContractIdent, ContractInstance, Metadata};
|
||||
|
||||
use alloy::{hex::ToHexExt, json_abi::JsonAbi, primitives::Address};
|
||||
use anyhow::{Context as _, Error, Result};
|
||||
use once_cell::sync::Lazy;
|
||||
use revive_dt_report::ExecutionSpecificReporter;
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::{Mutex, RwLock};
|
||||
@@ -23,9 +24,17 @@ use tracing::{Instrument, debug, debug_span, instrument};
|
||||
|
||||
use crate::Platform;
|
||||
|
||||
pub struct CachedCompiler(ArtifactsCache);
|
||||
pub struct CachedCompiler<'a> {
|
||||
/// The cache that stores the compiled contracts.
|
||||
artifacts_cache: ArtifactsCache,
|
||||
|
||||
impl CachedCompiler {
|
||||
/// This is a mechanism that the cached compiler uses so that if multiple compilation requests
|
||||
/// come in for the same contract we never compile all of them and only compile it once and all
|
||||
/// other tasks that request this same compilation concurrently get the cached version.
|
||||
cache_key_lock: RwLock<HashMap<CacheKey<'a>, Arc<Mutex<()>>>>,
|
||||
}
|
||||
|
||||
impl<'a> CachedCompiler<'a> {
|
||||
pub async fn new(path: impl AsRef<Path>, invalidate_cache: bool) -> Result<Self> {
|
||||
let mut cache = ArtifactsCache::new(path);
|
||||
if invalidate_cache {
|
||||
@@ -34,7 +43,10 @@ impl CachedCompiler {
|
||||
.await
|
||||
.context("Failed to invalidate compilation cache directory")?;
|
||||
}
|
||||
Ok(Self(cache))
|
||||
Ok(Self {
|
||||
artifacts_cache: cache,
|
||||
cache_key_lock: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Compiles or gets the compilation artifacts from the cache.
|
||||
@@ -43,7 +55,7 @@ impl CachedCompiler {
|
||||
level = "debug",
|
||||
skip_all,
|
||||
fields(
|
||||
metadata_file_path = %metadata_file_path.as_ref().display(),
|
||||
metadata_file_path = %metadata_file_path.display(),
|
||||
%mode,
|
||||
platform = P::config_id().to_string()
|
||||
),
|
||||
@@ -51,76 +63,33 @@ impl CachedCompiler {
|
||||
)]
|
||||
pub async fn compile_contracts<P: Platform>(
|
||||
&self,
|
||||
metadata: &Metadata,
|
||||
metadata_file_path: impl AsRef<Path>,
|
||||
mode: &Mode,
|
||||
config: &Arguments,
|
||||
metadata: &'a Metadata,
|
||||
metadata_file_path: &'a Path,
|
||||
mode: Cow<'a, Mode>,
|
||||
deployed_libraries: Option<&HashMap<ContractInstance, (ContractIdent, Address, JsonAbi)>>,
|
||||
compilation_success_report_callback: impl Fn(
|
||||
Version,
|
||||
PathBuf,
|
||||
bool,
|
||||
Option<CompilerInput>,
|
||||
CompilerOutput,
|
||||
) + Clone,
|
||||
compilation_failure_report_callback: impl Fn(
|
||||
Option<Version>,
|
||||
Option<PathBuf>,
|
||||
Option<CompilerInput>,
|
||||
String,
|
||||
),
|
||||
) -> Result<(CompilerOutput, Version)> {
|
||||
static CACHE_KEY_LOCK: Lazy<RwLock<HashMap<CacheKey, Arc<Mutex<()>>>>> =
|
||||
Lazy::new(Default::default);
|
||||
|
||||
let compiler_version_or_requirement = mode.compiler_version_to_use(config.solc.clone());
|
||||
let compiler_path = <P::Compiler as SolidityCompiler>::get_compiler_executable(
|
||||
config,
|
||||
compiler_version_or_requirement,
|
||||
)
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
compilation_failure_report_callback(None, None, None, format!("{err:#}"))
|
||||
})
|
||||
.context("Failed to obtain compiler executable path")?;
|
||||
let compiler_version = <P::Compiler as SolidityCompiler>::new(compiler_path.clone())
|
||||
.version()
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
compilation_failure_report_callback(
|
||||
None,
|
||||
Some(compiler_path.clone()),
|
||||
None,
|
||||
format!("{err:#}"),
|
||||
)
|
||||
})
|
||||
.context("Failed to query compiler version")?;
|
||||
|
||||
compiler: &P::Compiler,
|
||||
reporter: &ExecutionSpecificReporter,
|
||||
) -> Result<CompilerOutput> {
|
||||
let cache_key = CacheKey {
|
||||
platform_key: P::config_id().to_string(),
|
||||
compiler_version: compiler_version.clone(),
|
||||
metadata_file_path: metadata_file_path.as_ref().to_path_buf(),
|
||||
platform_key: P::config_id(),
|
||||
compiler_version: compiler.version().clone(),
|
||||
metadata_file_path,
|
||||
solc_mode: mode.clone(),
|
||||
};
|
||||
|
||||
let compilation_callback = || {
|
||||
let compiler_path = compiler_path.clone();
|
||||
let compiler_version = compiler_version.clone();
|
||||
let compilation_success_report_callback = compilation_success_report_callback.clone();
|
||||
async move {
|
||||
compile_contracts::<P>(
|
||||
metadata
|
||||
.directory()
|
||||
.context("Failed to get metadata directory while preparing compilation")?,
|
||||
compiler_path,
|
||||
compiler_version,
|
||||
metadata
|
||||
.files_to_compile()
|
||||
.context("Failed to enumerate files to compile from metadata")?,
|
||||
mode,
|
||||
&mode,
|
||||
deployed_libraries,
|
||||
compilation_success_report_callback,
|
||||
compilation_failure_report_callback,
|
||||
compiler,
|
||||
reporter,
|
||||
)
|
||||
.map(|compilation_result| compilation_result.map(CacheValue::new))
|
||||
.await
|
||||
@@ -153,12 +122,15 @@ impl CachedCompiler {
|
||||
// Lock this specific cache key such that we do not get inconsistent state. We want
|
||||
// that when multiple cases come in asking for the compilation artifacts then they
|
||||
// don't all trigger a compilation if there's a cache miss. Hence, the lock here.
|
||||
let read_guard = CACHE_KEY_LOCK.read().await;
|
||||
let read_guard = self.cache_key_lock.read().await;
|
||||
let mutex = match read_guard.get(&cache_key).cloned() {
|
||||
Some(value) => value,
|
||||
Some(value) => {
|
||||
drop(read_guard);
|
||||
value
|
||||
}
|
||||
None => {
|
||||
drop(read_guard);
|
||||
CACHE_KEY_LOCK
|
||||
self.cache_key_lock
|
||||
.write()
|
||||
.await
|
||||
.entry(cache_key.clone())
|
||||
@@ -168,15 +140,29 @@ impl CachedCompiler {
|
||||
};
|
||||
let _guard = mutex.lock().await;
|
||||
|
||||
match self.0.get(&cache_key).await {
|
||||
match self.artifacts_cache.get(&cache_key).await {
|
||||
Some(cache_value) => {
|
||||
compilation_success_report_callback(
|
||||
compiler_version.clone(),
|
||||
compiler_path,
|
||||
true,
|
||||
None,
|
||||
cache_value.compiler_output.clone(),
|
||||
);
|
||||
if deployed_libraries.is_some() {
|
||||
reporter
|
||||
.report_post_link_contracts_compilation_succeeded_event(
|
||||
compiler.version().clone(),
|
||||
compiler.path(),
|
||||
true,
|
||||
None,
|
||||
cache_value.compiler_output.clone(),
|
||||
)
|
||||
.expect("Can't happen");
|
||||
} else {
|
||||
reporter
|
||||
.report_pre_link_contracts_compilation_succeeded_event(
|
||||
compiler.version().clone(),
|
||||
compiler.path(),
|
||||
true,
|
||||
None,
|
||||
cache_value.compiler_output.clone(),
|
||||
)
|
||||
.expect("Can't happen");
|
||||
}
|
||||
cache_value.compiler_output
|
||||
}
|
||||
None => {
|
||||
@@ -189,38 +175,24 @@ impl CachedCompiler {
|
||||
}
|
||||
};
|
||||
|
||||
Ok((compiled_contracts, compiler_version))
|
||||
Ok(compiled_contracts)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn compile_contracts<P: Platform>(
|
||||
metadata_directory: impl AsRef<Path>,
|
||||
compiler_path: impl AsRef<Path>,
|
||||
compiler_version: Version,
|
||||
mut files_to_compile: impl Iterator<Item = PathBuf>,
|
||||
mode: &Mode,
|
||||
deployed_libraries: Option<&HashMap<ContractInstance, (ContractIdent, Address, JsonAbi)>>,
|
||||
compilation_success_report_callback: impl Fn(
|
||||
Version,
|
||||
PathBuf,
|
||||
bool,
|
||||
Option<CompilerInput>,
|
||||
CompilerOutput,
|
||||
),
|
||||
compilation_failure_report_callback: impl Fn(
|
||||
Option<Version>,
|
||||
Option<PathBuf>,
|
||||
Option<CompilerInput>,
|
||||
String,
|
||||
),
|
||||
compiler: &P::Compiler,
|
||||
reporter: &ExecutionSpecificReporter,
|
||||
) -> Result<CompilerOutput> {
|
||||
let all_sources_in_dir = FilesWithExtensionIterator::new(metadata_directory.as_ref())
|
||||
.with_allowed_extension("sol")
|
||||
.with_use_cached_fs(true)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let compiler = Compiler::<P::Compiler>::new()
|
||||
let compilation = Compiler::new()
|
||||
.with_allow_path(metadata_directory)
|
||||
// Handling the modes
|
||||
.with_optimization(mode.optimize_setting)
|
||||
@@ -228,14 +200,6 @@ async fn compile_contracts<P: Platform>(
|
||||
// Adding the contract sources to the compiler.
|
||||
.try_then(|compiler| {
|
||||
files_to_compile.try_fold(compiler, |compiler, path| compiler.with_source(path))
|
||||
})
|
||||
.inspect_err(|err| {
|
||||
compilation_failure_report_callback(
|
||||
Some(compiler_version.clone()),
|
||||
Some(compiler_path.as_ref().to_path_buf()),
|
||||
None,
|
||||
format!("{err:#}"),
|
||||
)
|
||||
})?
|
||||
// Adding the deployed libraries to the compiler.
|
||||
.then(|compiler| {
|
||||
@@ -253,27 +217,55 @@ async fn compile_contracts<P: Platform>(
|
||||
})
|
||||
});
|
||||
|
||||
let compiler_input = compiler.input();
|
||||
let compiler_output = compiler
|
||||
.try_build(compiler_path.as_ref())
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
compilation_failure_report_callback(
|
||||
Some(compiler_version.clone()),
|
||||
Some(compiler_path.as_ref().to_path_buf()),
|
||||
Some(compiler_input.clone()),
|
||||
format!("{err:#}"),
|
||||
)
|
||||
})
|
||||
.context("Failed to configure compiler with sources and options")?;
|
||||
compilation_success_report_callback(
|
||||
compiler_version,
|
||||
compiler_path.as_ref().to_path_buf(),
|
||||
false,
|
||||
Some(compiler_input),
|
||||
compiler_output.clone(),
|
||||
);
|
||||
Ok(compiler_output)
|
||||
let input = compilation.input().clone();
|
||||
let output = compilation.try_build(compiler).await;
|
||||
|
||||
match (output.as_ref(), deployed_libraries.is_some()) {
|
||||
(Ok(output), true) => {
|
||||
reporter
|
||||
.report_post_link_contracts_compilation_succeeded_event(
|
||||
compiler.version().clone(),
|
||||
compiler.path(),
|
||||
false,
|
||||
input,
|
||||
output.clone(),
|
||||
)
|
||||
.expect("Can't happen");
|
||||
}
|
||||
(Ok(output), false) => {
|
||||
reporter
|
||||
.report_pre_link_contracts_compilation_succeeded_event(
|
||||
compiler.version().clone(),
|
||||
compiler.path(),
|
||||
false,
|
||||
input,
|
||||
output.clone(),
|
||||
)
|
||||
.expect("Can't happen");
|
||||
}
|
||||
(Err(err), true) => {
|
||||
reporter
|
||||
.report_post_link_contracts_compilation_failed_event(
|
||||
compiler.version().clone(),
|
||||
compiler.path().to_path_buf(),
|
||||
input,
|
||||
format!("{err:#}"),
|
||||
)
|
||||
.expect("Can't happen");
|
||||
}
|
||||
(Err(err), false) => {
|
||||
reporter
|
||||
.report_pre_link_contracts_compilation_failed_event(
|
||||
compiler.version().clone(),
|
||||
compiler.path().to_path_buf(),
|
||||
input,
|
||||
format!("{err:#}"),
|
||||
)
|
||||
.expect("Can't happen");
|
||||
}
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
|
||||
struct ArtifactsCache {
|
||||
@@ -297,7 +289,7 @@ impl ArtifactsCache {
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip_all, err)]
|
||||
pub async fn insert(&self, key: &CacheKey, value: &CacheValue) -> Result<()> {
|
||||
pub async fn insert(&self, key: &CacheKey<'_>, value: &CacheValue) -> Result<()> {
|
||||
let key = bson::to_vec(key).context("Failed to serialize cache key (bson)")?;
|
||||
let value = bson::to_vec(value).context("Failed to serialize cache value (bson)")?;
|
||||
cacache::write(self.path.as_path(), key.encode_hex(), value)
|
||||
@@ -308,7 +300,7 @@ impl ArtifactsCache {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get(&self, key: &CacheKey) -> Option<CacheValue> {
|
||||
pub async fn get(&self, key: &CacheKey<'_>) -> Option<CacheValue> {
|
||||
let key = bson::to_vec(key).ok()?;
|
||||
let value = cacache::read(self.path.as_path(), key.encode_hex())
|
||||
.await
|
||||
@@ -320,7 +312,7 @@ impl ArtifactsCache {
|
||||
#[instrument(level = "debug", skip_all, err)]
|
||||
pub async fn get_or_insert_with(
|
||||
&self,
|
||||
key: &CacheKey,
|
||||
key: &CacheKey<'_>,
|
||||
callback: impl AsyncFnOnce() -> Result<CacheValue>,
|
||||
) -> Result<CacheValue> {
|
||||
match self.get(key).await {
|
||||
@@ -338,20 +330,20 @@ impl ArtifactsCache {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
struct CacheKey {
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
|
||||
struct CacheKey<'a> {
|
||||
/// The platform name that this artifact was compiled for. For example, this could be EVM or
|
||||
/// PVM.
|
||||
platform_key: String,
|
||||
platform_key: &'a TestingPlatform,
|
||||
|
||||
/// The version of the compiler that was used to compile the artifacts.
|
||||
compiler_version: Version,
|
||||
|
||||
/// The path of the metadata file that the compilation artifacts are for.
|
||||
metadata_file_path: PathBuf,
|
||||
metadata_file_path: &'a Path,
|
||||
|
||||
/// The mode that the compilation artifacts where compiled with.
|
||||
solc_mode: Mode,
|
||||
solc_mode: Cow<'a, Mode>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
|
||||
@@ -18,7 +18,7 @@ use alloy::{
|
||||
primitives::Address,
|
||||
rpc::types::{TransactionRequest, trace::geth::DiffMode},
|
||||
};
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use futures::TryStreamExt;
|
||||
use indexmap::IndexMap;
|
||||
use revive_dt_format::traits::{ResolutionContext, ResolverApi};
|
||||
|
||||
@@ -19,7 +19,7 @@ pub trait Platform {
|
||||
type Compiler: SolidityCompiler;
|
||||
|
||||
/// Returns the matching [TestingPlatform] of the [revive_dt_config::Arguments].
|
||||
fn config_id() -> TestingPlatform;
|
||||
fn config_id() -> &'static TestingPlatform;
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -29,8 +29,8 @@ impl Platform for Geth {
|
||||
type Blockchain = geth::GethNode;
|
||||
type Compiler = solc::Solc;
|
||||
|
||||
fn config_id() -> TestingPlatform {
|
||||
TestingPlatform::Geth
|
||||
fn config_id() -> &'static TestingPlatform {
|
||||
&TestingPlatform::Geth
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ impl Platform for Kitchensink {
|
||||
type Blockchain = KitchensinkNode;
|
||||
type Compiler = revive_resolc::Resolc;
|
||||
|
||||
fn config_id() -> TestingPlatform {
|
||||
TestingPlatform::Kitchensink
|
||||
fn config_id() -> &'static TestingPlatform {
|
||||
&TestingPlatform::Kitchensink
|
||||
}
|
||||
}
|
||||
|
||||
+341
-538
File diff suppressed because it is too large
Load Diff
@@ -20,6 +20,7 @@ anyhow = { workspace = true }
|
||||
futures = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
tracing = { workspace = true }
|
||||
schemars = { workspace = true }
|
||||
semver = { workspace = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
serde_json = { workspace = true }
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use revive_dt_common::{macros::define_wrapper_type, types::Mode};
|
||||
@@ -7,26 +8,48 @@ use crate::{
|
||||
mode::ParsedMode,
|
||||
};
|
||||
|
||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)]
|
||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq, JsonSchema)]
|
||||
pub struct Case {
|
||||
/// An optional name of the test case.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub name: Option<String>,
|
||||
|
||||
/// An optional comment on the case which has no impact on the execution in any way.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub comment: Option<String>,
|
||||
|
||||
/// This represents a mode that has been parsed from test metadata.
|
||||
///
|
||||
/// Mode strings can take the following form (in pseudo-regex):
|
||||
///
|
||||
/// ```text
|
||||
/// [YEILV][+-]? (M[0123sz])? <semver>?
|
||||
/// ```
|
||||
///
|
||||
/// If this is provided then it takes higher priority than the modes specified in the metadata
|
||||
/// file.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub modes: Option<Vec<ParsedMode>>,
|
||||
|
||||
/// The set of steps to run as part of this test case.
|
||||
#[serde(rename = "inputs")]
|
||||
pub steps: Vec<Step>,
|
||||
|
||||
/// An optional name of the group of tests that this test belongs to.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub group: Option<String>,
|
||||
|
||||
/// An optional set of expectations and assertions to make about the transaction after it ran.
|
||||
///
|
||||
/// If this is not specified then the only assertion that will be ran is that the transaction
|
||||
/// was successful.
|
||||
///
|
||||
/// This expectation that's on the case itself will be attached to the final step of the case.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub expected: Option<Expected>,
|
||||
|
||||
/// An optional boolean which defines if the case as a whole should be ignored. If null then the
|
||||
/// case will not be ignored.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub ignore: Option<bool>,
|
||||
}
|
||||
@@ -64,7 +87,7 @@ impl Case {
|
||||
pub fn solc_modes(&self) -> Vec<Mode> {
|
||||
match &self.modes {
|
||||
Some(modes) => ParsedMode::many_to_modes(modes.iter()).collect(),
|
||||
None => Mode::all().collect(),
|
||||
None => Mode::all().cloned().collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
+61
-16
@@ -8,8 +8,9 @@ use alloy::{
|
||||
rpc::types::TransactionRequest,
|
||||
};
|
||||
use alloy_primitives::{FixedBytes, utils::parse_units};
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use futures::{FutureExt, StreamExt, TryFutureExt, TryStreamExt, stream};
|
||||
use schemars::JsonSchema;
|
||||
use semver::VersionReq;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -23,7 +24,7 @@ use crate::{metadata::ContractInstance, traits::ResolutionContext};
|
||||
///
|
||||
/// A test step can be anything. It could be an invocation to a function, an assertion, or any other
|
||||
/// action that needs to be run or executed on the nodes used in the tests.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq, JsonSchema)]
|
||||
#[serde(untagged)]
|
||||
pub enum Step {
|
||||
/// A function call or an invocation to some function on some smart contract.
|
||||
@@ -39,36 +40,51 @@ define_wrapper_type!(
|
||||
pub struct StepIdx(usize) impl Display;
|
||||
);
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
||||
/// This is an input step which is a transaction description that the framework translates into a
|
||||
/// transaction and executes on the nodes.
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq, JsonSchema)]
|
||||
pub struct Input {
|
||||
/// The address of the account performing the call and paying the fees for it.
|
||||
#[serde(default = "Input::default_caller")]
|
||||
#[schemars(with = "String")]
|
||||
pub caller: Address,
|
||||
|
||||
/// An optional comment on the step which has no impact on the execution in any way.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub comment: Option<String>,
|
||||
|
||||
/// The contract instance that's being called in this transaction step.
|
||||
#[serde(default = "Input::default_instance")]
|
||||
pub instance: ContractInstance,
|
||||
|
||||
/// The method that's being called in this step.
|
||||
pub method: Method,
|
||||
|
||||
/// The calldata that the function should be invoked with.
|
||||
#[serde(default)]
|
||||
pub calldata: Calldata,
|
||||
|
||||
/// A set of assertions and expectations to have for the transaction.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub expected: Option<Expected>,
|
||||
|
||||
/// An optional value to provide as part of the transaction.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub value: Option<EtherValue>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[schemars(skip)]
|
||||
pub storage: Option<HashMap<String, Calldata>>,
|
||||
|
||||
/// Variable assignment to perform in the framework allowing us to reference them again later on
|
||||
/// during the execution.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub variable_assignments: Option<VariableAssignments>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
||||
/// This represents a balance assertion step where the framework needs to query the balance of some
|
||||
/// account or contract and assert that it's some amount.
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq, JsonSchema)]
|
||||
pub struct BalanceAssertion {
|
||||
/// An optional comment on the balance assertion.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@@ -82,11 +98,13 @@ pub struct BalanceAssertion {
|
||||
/// followed in the calldata.
|
||||
pub address: String,
|
||||
|
||||
/// The amount of balance to assert that the account or contract has.
|
||||
/// The amount of balance to assert that the account or contract has. This is a 256 bit string
|
||||
/// that's serialized and deserialized into a decimal string.
|
||||
#[schemars(with = "String")]
|
||||
pub expected_balance: U256,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq, JsonSchema)]
|
||||
pub struct StorageEmptyAssertion {
|
||||
/// An optional comment on the storage empty assertion.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@@ -104,31 +122,52 @@ pub struct StorageEmptyAssertion {
|
||||
pub is_storage_empty: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
|
||||
/// A set of expectations and assertions to make about the transaction after it ran.
|
||||
///
|
||||
/// If this is not specified then the only assertion that will be ran is that the transaction
|
||||
/// was successful.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Eq, PartialEq)]
|
||||
#[serde(untagged)]
|
||||
pub enum Expected {
|
||||
/// An assertion that the transaction succeeded and returned the provided set of data.
|
||||
Calldata(Calldata),
|
||||
/// A more complex assertion.
|
||||
Expected(ExpectedOutput),
|
||||
/// A set of assertions.
|
||||
ExpectedMany(Vec<ExpectedOutput>),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
||||
/// A set of assertions to run on the transaction.
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, Eq, PartialEq)]
|
||||
pub struct ExpectedOutput {
|
||||
/// An optional compiler version that's required in order for this assertion to run.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[schemars(with = "Option<String>")]
|
||||
pub compiler_version: Option<VersionReq>,
|
||||
|
||||
/// An optional field of the expected returns from the invocation.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub return_data: Option<Calldata>,
|
||||
|
||||
/// An optional set of assertions to run on the emitted events from the transaction.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub events: Option<Vec<Event>>,
|
||||
|
||||
/// A boolean which defines whether we expect the transaction to succeed or fail.
|
||||
#[serde(default)]
|
||||
pub exception: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema, Eq, PartialEq)]
|
||||
pub struct Event {
|
||||
/// An optional field of the address of the emitter of the event.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub address: Option<String>,
|
||||
|
||||
/// The set of topics to expect the event to have.
|
||||
pub topics: Vec<String>,
|
||||
|
||||
/// The set of values to expect the event to have.
|
||||
pub values: Calldata,
|
||||
}
|
||||
|
||||
@@ -183,16 +222,17 @@ pub struct Event {
|
||||
/// [`Single`]: Calldata::Single
|
||||
/// [`Compound`]: Calldata::Compound
|
||||
/// [reverse polish notation]: https://en.wikipedia.org/wiki/Reverse_Polish_notation
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, Eq, PartialEq)]
|
||||
#[serde(untagged)]
|
||||
pub enum Calldata {
|
||||
Single(Bytes),
|
||||
Single(#[schemars(with = "String")] Bytes),
|
||||
Compound(Vec<CalldataItem>),
|
||||
}
|
||||
|
||||
define_wrapper_type! {
|
||||
/// This represents an item in the [`Calldata::Compound`] variant.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
/// This represents an item in the [`Calldata::Compound`] variant. Each item will be resolved
|
||||
/// according to the resolution rules of the tool.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(transparent)]
|
||||
pub struct CalldataItem(String) impl Display;
|
||||
}
|
||||
@@ -217,7 +257,7 @@ enum Operation {
|
||||
}
|
||||
|
||||
/// Specify how the contract is called.
|
||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)]
|
||||
#[derive(Debug, Default, Serialize, Deserialize, JsonSchema, Clone, Eq, PartialEq)]
|
||||
pub enum Method {
|
||||
/// Initiate a deploy transaction, calling contracts constructor.
|
||||
///
|
||||
@@ -238,11 +278,16 @@ pub enum Method {
|
||||
}
|
||||
|
||||
define_wrapper_type!(
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
/// Defines an Ether value.
|
||||
///
|
||||
/// This is an unsigned 256 bit integer that's followed by some denomination which can either be
|
||||
/// eth, ether, gwei, or wei.
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, JsonSchema)]
|
||||
#[schemars(with = "String")]
|
||||
pub struct EtherValue(U256) impl Display;
|
||||
);
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq, JsonSchema)]
|
||||
pub struct VariableAssignments {
|
||||
/// A vector of the variable names to assign to the return data.
|
||||
///
|
||||
|
||||
@@ -8,6 +8,7 @@ use std::{
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use revive_common::EVMVersion;
|
||||
@@ -56,30 +57,62 @@ impl Deref for MetadataFile {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)]
|
||||
/// A MatterLabs metadata file.
|
||||
///
|
||||
/// This defines the structure that the MatterLabs metadata files follow for defining the tests or
|
||||
/// the workloads.
|
||||
///
|
||||
/// Each metadata file is composed of multiple test cases where each test case is isolated from the
|
||||
/// others and runs in a completely different address space. Each test case is composed of a number
|
||||
/// of steps and assertions that should be performed as part of the test case.
|
||||
#[derive(Debug, Default, Serialize, Deserialize, JsonSchema, Clone, Eq, PartialEq)]
|
||||
pub struct Metadata {
|
||||
/// A comment on the test case that's added for human-readability.
|
||||
/// This is an optional comment on the metadata file which has no impact on the execution in any
|
||||
/// way.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub comment: Option<String>,
|
||||
|
||||
/// An optional boolean which defines if the metadata file as a whole should be ignored. If null
|
||||
/// then the metadata file will not be ignored.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub ignore: Option<bool>,
|
||||
|
||||
/// An optional vector of targets that this Metadata file's cases can be executed on. As an
|
||||
/// example, if we wish for the metadata file's cases to only be run on PolkaVM then we'd
|
||||
/// specify a target of "PolkaVM" in here.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub targets: Option<Vec<String>>,
|
||||
|
||||
/// A vector of the test cases and workloads contained within the metadata file. This is their
|
||||
/// primary description.
|
||||
pub cases: Vec<Case>,
|
||||
|
||||
/// A map of all of the contracts that the test requires to run.
|
||||
///
|
||||
/// This is a map where the key is the name of the contract instance and the value is the
|
||||
/// contract's path and ident in the file.
|
||||
///
|
||||
/// If any contract is to be used by the test then it must be included in here first so that the
|
||||
/// framework is aware of its path, compiles it, and prepares it.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub contracts: Option<BTreeMap<ContractInstance, ContractPathAndIdent>>,
|
||||
|
||||
/// The set of libraries that this metadata file requires.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub libraries: Option<BTreeMap<PathBuf, BTreeMap<ContractIdent, ContractInstance>>>,
|
||||
|
||||
/// This represents a mode that has been parsed from test metadata.
|
||||
///
|
||||
/// Mode strings can take the following form (in pseudo-regex):
|
||||
///
|
||||
/// ```text
|
||||
/// [YEILV][+-]? (M[0123sz])? <semver>?
|
||||
/// ```
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub modes: Option<Vec<ParsedMode>>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[schemars(skip)]
|
||||
pub file_path: Option<PathBuf>,
|
||||
|
||||
/// This field specifies an EVM version requirement that the test case has where the test might
|
||||
@@ -87,9 +120,9 @@ pub struct Metadata {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub required_evm_version: Option<EvmVersionRequirement>,
|
||||
|
||||
/// A set of compilation directives that will be passed to the compiler whenever the contracts for
|
||||
/// the test are being compiled. Note that this differs from the [`Mode`]s in that a [`Mode`] is
|
||||
/// just a filter for when a test can run whereas this is an instruction to the compiler.
|
||||
/// A set of compilation directives that will be passed to the compiler whenever the contracts
|
||||
/// for the test are being compiled. Note that this differs from the [`Mode`]s in that a [`Mode`]
|
||||
/// is just a filter for when a test can run whereas this is an instruction to the compiler.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub compiler_directives: Option<CompilationDirectives>,
|
||||
}
|
||||
@@ -99,7 +132,7 @@ impl Metadata {
|
||||
pub fn solc_modes(&self) -> Vec<Mode> {
|
||||
match &self.modes {
|
||||
Some(modes) => ParsedMode::many_to_modes(modes.iter()).collect(),
|
||||
None => Mode::all().collect(),
|
||||
None => Mode::all().cloned().collect(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -262,7 +295,7 @@ define_wrapper_type!(
|
||||
///
|
||||
/// Typically, this is used as the key to the "contracts" field of metadata files.
|
||||
#[derive(
|
||||
Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize,
|
||||
Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, JsonSchema
|
||||
)]
|
||||
#[serde(transparent)]
|
||||
pub struct ContractInstance(String) impl Display;
|
||||
@@ -273,7 +306,7 @@ define_wrapper_type!(
|
||||
///
|
||||
/// A contract identifier is the name of the contract in the source code.
|
||||
#[derive(
|
||||
Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize,
|
||||
Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, JsonSchema
|
||||
)]
|
||||
#[serde(transparent)]
|
||||
pub struct ContractIdent(String) impl Display;
|
||||
@@ -286,7 +319,9 @@ define_wrapper_type!(
|
||||
/// ```text
|
||||
/// ${path}:${contract_ident}
|
||||
/// ```
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, JsonSchema,
|
||||
)]
|
||||
#[serde(try_from = "String", into = "String")]
|
||||
pub struct ContractPathAndIdent {
|
||||
/// The path of the contract source code relative to the directory containing the metadata file.
|
||||
@@ -363,9 +398,15 @@ impl From<ContractPathAndIdent> for String {
|
||||
}
|
||||
}
|
||||
|
||||
/// An EVM version requirement that the test case has. This gets serialized and
|
||||
/// deserialized from and into [`String`].
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
/// An EVM version requirement that the test case has. This gets serialized and deserialized from
|
||||
/// and into [`String`]. This follows a simple format of (>=|<=|=|>|<) followed by a string of the
|
||||
/// EVM version.
|
||||
///
|
||||
/// When specified, the framework will only run the test if the node's EVM version matches that
|
||||
/// required by the metadata file.
|
||||
#[derive(
|
||||
Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, JsonSchema,
|
||||
)]
|
||||
#[serde(try_from = "String", into = "String")]
|
||||
pub struct EvmVersionRequirement {
|
||||
ordering: Ordering,
|
||||
@@ -493,7 +534,18 @@ impl From<EvmVersionRequirement> for String {
|
||||
/// just a filter for when a test can run whereas this is an instruction to the compiler.
|
||||
/// Defines how the compiler should handle revert strings.
|
||||
#[derive(
|
||||
Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize,
|
||||
Clone,
|
||||
Debug,
|
||||
Copy,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Hash,
|
||||
Default,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
JsonSchema,
|
||||
)]
|
||||
pub struct CompilationDirectives {
|
||||
/// Defines how the revert strings should be handled.
|
||||
@@ -502,14 +554,29 @@ pub struct CompilationDirectives {
|
||||
|
||||
/// Defines how the compiler should handle revert strings.
|
||||
#[derive(
|
||||
Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize,
|
||||
Clone,
|
||||
Debug,
|
||||
Copy,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Hash,
|
||||
Default,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
JsonSchema,
|
||||
)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum RevertString {
|
||||
/// The default handling of the revert strings.
|
||||
#[default]
|
||||
Default,
|
||||
/// The debug handling of the revert strings.
|
||||
Debug,
|
||||
/// Strip the revert strings.
|
||||
Strip,
|
||||
/// Provide verbose debug strings for the revert string.
|
||||
VerboseDebug,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use regex::Regex;
|
||||
use revive_dt_common::iterators::EitherIter;
|
||||
use revive_dt_common::types::{Mode, ModeOptimizerSetting, ModePipeline};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashSet;
|
||||
use std::fmt::Display;
|
||||
@@ -16,7 +18,7 @@ use std::sync::LazyLock;
|
||||
/// ```
|
||||
///
|
||||
/// We can parse valid mode strings into [`ParsedMode`] using [`ParsedMode::from_str`].
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize, JsonSchema)]
|
||||
#[serde(try_from = "String", into = "String")]
|
||||
pub struct ParsedMode {
|
||||
pub pipeline: Option<ModePipeline>,
|
||||
@@ -176,27 +178,6 @@ impl ParsedMode {
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator that could be either of two iterators.
|
||||
#[derive(Clone, Debug)]
|
||||
enum EitherIter<A, B> {
|
||||
A(A),
|
||||
B(B),
|
||||
}
|
||||
|
||||
impl<A, B> Iterator for EitherIter<A, B>
|
||||
where
|
||||
A: Iterator,
|
||||
B: Iterator<Item = A::Item>,
|
||||
{
|
||||
type Item = A::Item;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
EitherIter::A(iter) => iter.next(),
|
||||
EitherIter::B(iter) => iter.next(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
+46
-65
@@ -17,9 +17,7 @@ use alloy::{
|
||||
eips::BlockNumberOrTag,
|
||||
genesis::{Genesis, GenesisAccount},
|
||||
network::{Ethereum, EthereumWallet, NetworkWallet},
|
||||
primitives::{
|
||||
Address, BlockHash, BlockNumber, BlockTimestamp, FixedBytes, StorageKey, TxHash, U256,
|
||||
},
|
||||
primitives::{Address, BlockHash, BlockNumber, BlockTimestamp, StorageKey, TxHash, U256},
|
||||
providers::{
|
||||
Provider, ProviderBuilder,
|
||||
ext::DebugApi,
|
||||
@@ -29,9 +27,8 @@ use alloy::{
|
||||
EIP1186AccountProofResponse, TransactionReceipt, TransactionRequest,
|
||||
trace::geth::{DiffMode, GethDebugTracingOptions, PreStateConfig, PreStateFrame},
|
||||
},
|
||||
signers::local::PrivateKeySigner,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use revive_common::EVMVersion;
|
||||
use tracing::{Instrument, instrument};
|
||||
|
||||
@@ -39,7 +36,7 @@ use revive_dt_common::{
|
||||
fs::clear_directory,
|
||||
futures::{PollingWaitBehavior, poll},
|
||||
};
|
||||
use revive_dt_config::Arguments;
|
||||
use revive_dt_config::*;
|
||||
use revive_dt_format::traits::ResolverApi;
|
||||
use revive_dt_node_interaction::EthereumNode;
|
||||
|
||||
@@ -64,7 +61,7 @@ pub struct GethNode {
|
||||
geth: PathBuf,
|
||||
id: u32,
|
||||
handle: Option<Child>,
|
||||
start_timeout: u64,
|
||||
start_timeout: Duration,
|
||||
wallet: Arc<EthereumWallet>,
|
||||
nonce_manager: CachedNonceManager,
|
||||
chain_id_filler: ChainIdFiller,
|
||||
@@ -97,7 +94,7 @@ impl GethNode {
|
||||
|
||||
/// Create the node directory and call `geth init` to configure the genesis.
|
||||
#[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
|
||||
fn init(&mut self, genesis: String) -> anyhow::Result<&mut Self> {
|
||||
fn init(&mut self, mut genesis: Genesis) -> anyhow::Result<&mut Self> {
|
||||
let _ = clear_directory(&self.base_directory);
|
||||
let _ = clear_directory(&self.logs_directory);
|
||||
|
||||
@@ -106,8 +103,6 @@ impl GethNode {
|
||||
create_dir_all(&self.logs_directory)
|
||||
.context("Failed to create logs directory for geth node")?;
|
||||
|
||||
let mut genesis = serde_json::from_str::<Genesis>(&genesis)
|
||||
.context("Failed to deserialize geth genesis JSON")?;
|
||||
for signer_address in
|
||||
<EthereumWallet as NetworkWallet<Ethereum>>::signer_addresses(&self.wallet)
|
||||
{
|
||||
@@ -240,7 +235,7 @@ impl GethNode {
|
||||
.open(self.geth_stderr_log_file_path())
|
||||
.context("Failed to open geth stderr logs file for readiness check")?;
|
||||
|
||||
let maximum_wait_time = Duration::from_millis(self.start_timeout);
|
||||
let maximum_wait_time = self.start_timeout;
|
||||
let mut stderr = BufReader::new(logs_file).lines();
|
||||
let mut lines = vec![];
|
||||
loop {
|
||||
@@ -256,7 +251,7 @@ impl GethNode {
|
||||
if Instant::now().duration_since(start_time) > maximum_wait_time {
|
||||
anyhow::bail!(
|
||||
"Timeout in starting geth: took longer than {}ms. stdout:\n\n{}\n",
|
||||
self.start_timeout,
|
||||
self.start_timeout.as_millis(),
|
||||
lines.join("\n")
|
||||
);
|
||||
}
|
||||
@@ -556,30 +551,40 @@ impl ResolverApi for GethNode {
|
||||
}
|
||||
|
||||
impl Node for GethNode {
|
||||
fn new(config: &Arguments) -> Self {
|
||||
let geth_directory = config.directory().join(Self::BASE_DIRECTORY);
|
||||
fn new(
|
||||
context: impl AsRef<WorkingDirectoryConfiguration>
|
||||
+ AsRef<ConcurrencyConfiguration>
|
||||
+ AsRef<GenesisConfiguration>
|
||||
+ AsRef<WalletConfiguration>
|
||||
+ AsRef<GethConfiguration>
|
||||
+ AsRef<KitchensinkConfiguration>
|
||||
+ AsRef<ReviveDevNodeConfiguration>
|
||||
+ AsRef<EthRpcConfiguration>
|
||||
+ Clone,
|
||||
) -> Self {
|
||||
let working_directory_configuration =
|
||||
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context);
|
||||
let wallet_configuration = AsRef::<WalletConfiguration>::as_ref(&context);
|
||||
let geth_configuration = AsRef::<GethConfiguration>::as_ref(&context);
|
||||
|
||||
let geth_directory = working_directory_configuration
|
||||
.as_path()
|
||||
.join(Self::BASE_DIRECTORY);
|
||||
let id = NODE_COUNT.fetch_add(1, Ordering::SeqCst);
|
||||
let base_directory = geth_directory.join(id.to_string());
|
||||
|
||||
let mut wallet = config.wallet();
|
||||
for signer in (1..=config.private_keys_to_add)
|
||||
.map(|id| U256::from(id))
|
||||
.map(|id| id.to_be_bytes::<32>())
|
||||
.map(|id| PrivateKeySigner::from_bytes(&FixedBytes(id)).unwrap())
|
||||
{
|
||||
wallet.register_signer(signer);
|
||||
}
|
||||
let wallet = wallet_configuration.wallet();
|
||||
|
||||
Self {
|
||||
connection_string: base_directory.join(Self::IPC_FILE).display().to_string(),
|
||||
data_directory: base_directory.join(Self::DATA_DIRECTORY),
|
||||
logs_directory: base_directory.join(Self::LOGS_DIRECTORY),
|
||||
base_directory,
|
||||
geth: config.geth.clone(),
|
||||
geth: geth_configuration.path.clone(),
|
||||
id,
|
||||
handle: None,
|
||||
start_timeout: config.geth_start_timeout,
|
||||
wallet: Arc::new(wallet),
|
||||
start_timeout: geth_configuration.start_timeout_ms,
|
||||
wallet: wallet.clone(),
|
||||
chain_id_filler: Default::default(),
|
||||
nonce_manager: Default::default(),
|
||||
// We know that we only need to be storing 2 files so we can specify that when creating
|
||||
@@ -621,7 +626,7 @@ impl Node for GethNode {
|
||||
}
|
||||
|
||||
#[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
|
||||
fn spawn(&mut self, genesis: String) -> anyhow::Result<()> {
|
||||
fn spawn(&mut self, genesis: Genesis) -> anyhow::Result<()> {
|
||||
self.init(genesis)?.spawn_process()?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -662,49 +667,25 @@ impl Drop for GethNode {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use revive_dt_config::Arguments;
|
||||
|
||||
use temp_dir::TempDir;
|
||||
|
||||
use crate::{GENESIS_JSON, Node};
|
||||
|
||||
use super::*;
|
||||
|
||||
fn test_config() -> (Arguments, TempDir) {
|
||||
let mut config = Arguments::default();
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
config.working_directory = temp_dir.path().to_path_buf().into();
|
||||
|
||||
(config, temp_dir)
|
||||
fn test_config() -> ExecutionContext {
|
||||
ExecutionContext::default()
|
||||
}
|
||||
|
||||
fn new_node() -> (GethNode, TempDir) {
|
||||
let (args, temp_dir) = test_config();
|
||||
let mut node = GethNode::new(&args);
|
||||
node.init(GENESIS_JSON.to_owned())
|
||||
fn new_node() -> (ExecutionContext, GethNode) {
|
||||
let context = test_config();
|
||||
let mut node = GethNode::new(&context);
|
||||
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
||||
.expect("Failed to initialize the node")
|
||||
.spawn_process()
|
||||
.expect("Failed to spawn the node process");
|
||||
(node, temp_dir)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn init_works() {
|
||||
GethNode::new(&test_config().0)
|
||||
.init(GENESIS_JSON.to_string())
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn spawn_works() {
|
||||
GethNode::new(&test_config().0)
|
||||
.spawn(GENESIS_JSON.to_string())
|
||||
.unwrap();
|
||||
(context, node)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_works() {
|
||||
let version = GethNode::new(&test_config().0).version().unwrap();
|
||||
let version = GethNode::new(&test_config()).version().unwrap();
|
||||
assert!(
|
||||
version.starts_with("geth version"),
|
||||
"expected version string, got: '{version}'"
|
||||
@@ -714,7 +695,7 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn can_get_chain_id_from_node() {
|
||||
// Arrange
|
||||
let (node, _temp_dir) = new_node();
|
||||
let (_context, node) = new_node();
|
||||
|
||||
// Act
|
||||
let chain_id = node.chain_id().await;
|
||||
@@ -727,7 +708,7 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn can_get_gas_limit_from_node() {
|
||||
// Arrange
|
||||
let (node, _temp_dir) = new_node();
|
||||
let (_context, node) = new_node();
|
||||
|
||||
// Act
|
||||
let gas_limit = node.block_gas_limit(BlockNumberOrTag::Latest).await;
|
||||
@@ -740,7 +721,7 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn can_get_coinbase_from_node() {
|
||||
// Arrange
|
||||
let (node, _temp_dir) = new_node();
|
||||
let (_context, node) = new_node();
|
||||
|
||||
// Act
|
||||
let coinbase = node.block_coinbase(BlockNumberOrTag::Latest).await;
|
||||
@@ -753,7 +734,7 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn can_get_block_difficulty_from_node() {
|
||||
// Arrange
|
||||
let (node, _temp_dir) = new_node();
|
||||
let (_context, node) = new_node();
|
||||
|
||||
// Act
|
||||
let block_difficulty = node.block_difficulty(BlockNumberOrTag::Latest).await;
|
||||
@@ -766,7 +747,7 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn can_get_block_hash_from_node() {
|
||||
// Arrange
|
||||
let (node, _temp_dir) = new_node();
|
||||
let (_context, node) = new_node();
|
||||
|
||||
// Act
|
||||
let block_hash = node.block_hash(BlockNumberOrTag::Latest).await;
|
||||
@@ -778,7 +759,7 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn can_get_block_timestamp_from_node() {
|
||||
// Arrange
|
||||
let (node, _temp_dir) = new_node();
|
||||
let (_context, node) = new_node();
|
||||
|
||||
// Act
|
||||
let block_timestamp = node.block_timestamp(BlockNumberOrTag::Latest).await;
|
||||
@@ -790,7 +771,7 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn can_get_block_number_from_node() {
|
||||
// Arrange
|
||||
let (node, _temp_dir) = new_node();
|
||||
let (_context, node) = new_node();
|
||||
|
||||
// Act
|
||||
let block_number = node.last_block_number().await;
|
||||
|
||||
@@ -19,8 +19,8 @@ use alloy::{
|
||||
TransactionBuilderError, UnbuiltTransactionError,
|
||||
},
|
||||
primitives::{
|
||||
Address, B64, B256, BlockHash, BlockNumber, BlockTimestamp, Bloom, Bytes, FixedBytes,
|
||||
StorageKey, TxHash, U256,
|
||||
Address, B64, B256, BlockHash, BlockNumber, BlockTimestamp, Bloom, Bytes, StorageKey,
|
||||
TxHash, U256,
|
||||
},
|
||||
providers::{
|
||||
Provider, ProviderBuilder,
|
||||
@@ -32,9 +32,8 @@ use alloy::{
|
||||
eth::{Block, Header, Transaction},
|
||||
trace::geth::{DiffMode, GethDebugTracingOptions, PreStateConfig, PreStateFrame},
|
||||
},
|
||||
signers::local::PrivateKeySigner,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use revive_common::EVMVersion;
|
||||
use revive_dt_common::fs::clear_directory;
|
||||
use revive_dt_format::traits::ResolverApi;
|
||||
@@ -43,7 +42,7 @@ use serde_json::{Value as JsonValue, json};
|
||||
use sp_core::crypto::Ss58Codec;
|
||||
use sp_runtime::AccountId32;
|
||||
|
||||
use revive_dt_config::Arguments;
|
||||
use revive_dt_config::*;
|
||||
use revive_dt_node_interaction::EthereumNode;
|
||||
|
||||
use crate::{Node, common::FallbackGasFiller, constants::INITIAL_BALANCE};
|
||||
@@ -92,7 +91,7 @@ impl KitchensinkNode {
|
||||
const PROXY_STDOUT_LOG_FILE_NAME: &str = "proxy_stdout.log";
|
||||
const PROXY_STDERR_LOG_FILE_NAME: &str = "proxy_stderr.log";
|
||||
|
||||
fn init(&mut self, genesis: &str) -> anyhow::Result<&mut Self> {
|
||||
fn init(&mut self, mut genesis: Genesis) -> anyhow::Result<&mut Self> {
|
||||
let _ = clear_directory(&self.base_directory);
|
||||
let _ = clear_directory(&self.logs_directory);
|
||||
|
||||
@@ -153,8 +152,6 @@ impl KitchensinkNode {
|
||||
})
|
||||
.collect();
|
||||
let mut eth_balances = {
|
||||
let mut genesis = serde_json::from_str::<Genesis>(genesis)
|
||||
.context("Failed to deserialize EVM genesis JSON for kitchensink")?;
|
||||
for signer_address in
|
||||
<EthereumWallet as NetworkWallet<Ethereum>>::signer_addresses(&self.wallet)
|
||||
{
|
||||
@@ -586,35 +583,47 @@ impl ResolverApi for KitchensinkNode {
|
||||
}
|
||||
|
||||
impl Node for KitchensinkNode {
|
||||
fn new(config: &Arguments) -> Self {
|
||||
let kitchensink_directory = config.directory().join(Self::BASE_DIRECTORY);
|
||||
fn new(
|
||||
context: impl AsRef<WorkingDirectoryConfiguration>
|
||||
+ AsRef<ConcurrencyConfiguration>
|
||||
+ AsRef<GenesisConfiguration>
|
||||
+ AsRef<WalletConfiguration>
|
||||
+ AsRef<GethConfiguration>
|
||||
+ AsRef<KitchensinkConfiguration>
|
||||
+ AsRef<ReviveDevNodeConfiguration>
|
||||
+ AsRef<EthRpcConfiguration>
|
||||
+ Clone,
|
||||
) -> Self {
|
||||
let kitchensink_configuration = AsRef::<KitchensinkConfiguration>::as_ref(&context);
|
||||
let dev_node_configuration = AsRef::<ReviveDevNodeConfiguration>::as_ref(&context);
|
||||
let eth_rpc_configuration = AsRef::<EthRpcConfiguration>::as_ref(&context);
|
||||
let working_directory_configuration =
|
||||
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context);
|
||||
let wallet_configuration = AsRef::<WalletConfiguration>::as_ref(&context);
|
||||
|
||||
let kitchensink_directory = working_directory_configuration
|
||||
.as_path()
|
||||
.join(Self::BASE_DIRECTORY);
|
||||
let id = NODE_COUNT.fetch_add(1, Ordering::SeqCst);
|
||||
let base_directory = kitchensink_directory.join(id.to_string());
|
||||
let logs_directory = base_directory.join(Self::LOGS_DIRECTORY);
|
||||
|
||||
let mut wallet = config.wallet();
|
||||
for signer in (1..=config.private_keys_to_add)
|
||||
.map(|id| U256::from(id))
|
||||
.map(|id| id.to_be_bytes::<32>())
|
||||
.map(|id| PrivateKeySigner::from_bytes(&FixedBytes(id)).unwrap())
|
||||
{
|
||||
wallet.register_signer(signer);
|
||||
}
|
||||
let wallet = wallet_configuration.wallet();
|
||||
|
||||
Self {
|
||||
id,
|
||||
substrate_binary: config.kitchensink.clone(),
|
||||
dev_node_binary: config.revive_dev_node.clone(),
|
||||
eth_proxy_binary: config.eth_proxy.clone(),
|
||||
substrate_binary: kitchensink_configuration.path.clone(),
|
||||
dev_node_binary: dev_node_configuration.path.clone(),
|
||||
eth_proxy_binary: eth_rpc_configuration.path.clone(),
|
||||
rpc_url: String::new(),
|
||||
base_directory,
|
||||
logs_directory,
|
||||
process_substrate: None,
|
||||
process_proxy: None,
|
||||
wallet: Arc::new(wallet),
|
||||
wallet: wallet.clone(),
|
||||
chain_id_filler: Default::default(),
|
||||
nonce_manager: Default::default(),
|
||||
use_kitchensink_not_dev_node: config.use_kitchensink_not_dev_node,
|
||||
use_kitchensink_not_dev_node: kitchensink_configuration.use_kitchensink,
|
||||
// We know that we only need to be storing 4 files so we can specify that when creating
|
||||
// the vector. It's the stdout and stderr of the substrate-node and the eth-rpc.
|
||||
logs_file_to_flush: Vec::with_capacity(4),
|
||||
@@ -655,8 +664,8 @@ impl Node for KitchensinkNode {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn spawn(&mut self, genesis: String) -> anyhow::Result<()> {
|
||||
self.init(&genesis)?.spawn_process()
|
||||
fn spawn(&mut self, genesis: Genesis) -> anyhow::Result<()> {
|
||||
self.init(genesis)?.spawn_process()
|
||||
}
|
||||
|
||||
fn version(&self) -> anyhow::Result<String> {
|
||||
@@ -1121,25 +1130,20 @@ impl BlockHeader for KitchenSinkHeader {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use alloy::rpc::types::TransactionRequest;
|
||||
use revive_dt_config::Arguments;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{LazyLock, Mutex};
|
||||
|
||||
use std::fs;
|
||||
|
||||
use super::*;
|
||||
use crate::{GENESIS_JSON, Node};
|
||||
use crate::Node;
|
||||
|
||||
fn test_config() -> Arguments {
|
||||
Arguments {
|
||||
kitchensink: PathBuf::from("substrate-node"),
|
||||
eth_proxy: PathBuf::from("eth-rpc"),
|
||||
use_kitchensink_not_dev_node: true,
|
||||
..Default::default()
|
||||
}
|
||||
fn test_config() -> ExecutionContext {
|
||||
let mut context = ExecutionContext::default();
|
||||
context.kitchensink_configuration.use_kitchensink = true;
|
||||
context
|
||||
}
|
||||
|
||||
fn new_node() -> (KitchensinkNode, Arguments) {
|
||||
fn new_node() -> (ExecutionContext, KitchensinkNode) {
|
||||
// Note: When we run the tests in the CI we found that if they're all
|
||||
// run in parallel then the CI is unable to start all of the nodes in
|
||||
// time and their start up times-out. Therefore, we want all of the
|
||||
@@ -1158,32 +1162,36 @@ mod tests {
|
||||
static NODE_START_MUTEX: Mutex<()> = Mutex::new(());
|
||||
let _guard = NODE_START_MUTEX.lock().unwrap();
|
||||
|
||||
let args = test_config();
|
||||
let mut node = KitchensinkNode::new(&args);
|
||||
node.init(GENESIS_JSON)
|
||||
let context = test_config();
|
||||
let mut node = KitchensinkNode::new(&context);
|
||||
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
||||
.expect("Failed to initialize the node")
|
||||
.spawn_process()
|
||||
.expect("Failed to spawn the node process");
|
||||
(node, args)
|
||||
(context, node)
|
||||
}
|
||||
|
||||
/// A shared node that multiple tests can use. It starts up once.
|
||||
fn shared_node() -> &'static KitchensinkNode {
|
||||
static NODE: LazyLock<(KitchensinkNode, Arguments)> = LazyLock::new(|| {
|
||||
let (node, args) = new_node();
|
||||
(node, args)
|
||||
static NODE: LazyLock<(ExecutionContext, KitchensinkNode)> = LazyLock::new(|| {
|
||||
let (context, node) = new_node();
|
||||
(context, node)
|
||||
});
|
||||
&NODE.0
|
||||
&NODE.1
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn node_mines_simple_transfer_transaction_and_returns_receipt() {
|
||||
// Arrange
|
||||
let (node, args) = new_node();
|
||||
let (context, node) = new_node();
|
||||
|
||||
let provider = node.provider().await.expect("Failed to create provider");
|
||||
|
||||
let account_address = args.wallet().default_signer().address();
|
||||
let account_address = context
|
||||
.wallet_configuration
|
||||
.wallet()
|
||||
.default_signer()
|
||||
.address();
|
||||
let transaction = TransactionRequest::default()
|
||||
.to(account_address)
|
||||
.value(U256::from(100_000_000_000_000u128));
|
||||
@@ -1217,7 +1225,9 @@ mod tests {
|
||||
let mut dummy_node = KitchensinkNode::new(&test_config());
|
||||
|
||||
// Call `init()`
|
||||
dummy_node.init(genesis_content).expect("init failed");
|
||||
dummy_node
|
||||
.init(serde_json::from_str(genesis_content).unwrap())
|
||||
.expect("init failed");
|
||||
|
||||
// Check that the patched chainspec file was generated
|
||||
let final_chainspec_path = dummy_node
|
||||
@@ -1327,20 +1337,10 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn spawn_works() {
|
||||
let config = test_config();
|
||||
|
||||
let mut node = KitchensinkNode::new(&config);
|
||||
|
||||
node.spawn(GENESIS_JSON.to_string()).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn version_works() {
|
||||
let config = test_config();
|
||||
let node = shared_node();
|
||||
|
||||
let node = KitchensinkNode::new(&config);
|
||||
let version = node.version().unwrap();
|
||||
|
||||
assert!(
|
||||
@@ -1351,9 +1351,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn eth_rpc_version_works() {
|
||||
let config = test_config();
|
||||
let node = shared_node();
|
||||
|
||||
let node = KitchensinkNode::new(&config);
|
||||
let version = node.eth_rpc_version().unwrap();
|
||||
|
||||
assert!(
|
||||
|
||||
+14
-6
@@ -1,7 +1,8 @@
|
||||
//! This crate implements the testing nodes.
|
||||
|
||||
use alloy::genesis::Genesis;
|
||||
use revive_common::EVMVersion;
|
||||
use revive_dt_config::Arguments;
|
||||
use revive_dt_config::*;
|
||||
use revive_dt_node_interaction::EthereumNode;
|
||||
|
||||
pub mod common;
|
||||
@@ -10,13 +11,20 @@ pub mod geth;
|
||||
pub mod kitchensink;
|
||||
pub mod pool;
|
||||
|
||||
/// The default genesis configuration.
|
||||
pub const GENESIS_JSON: &str = include_str!("../../../genesis.json");
|
||||
|
||||
/// An abstract interface for testing nodes.
|
||||
pub trait Node: EthereumNode {
|
||||
/// Create a new uninitialized instance.
|
||||
fn new(config: &Arguments) -> Self;
|
||||
fn new(
|
||||
context: impl AsRef<WorkingDirectoryConfiguration>
|
||||
+ AsRef<ConcurrencyConfiguration>
|
||||
+ AsRef<GenesisConfiguration>
|
||||
+ AsRef<WalletConfiguration>
|
||||
+ AsRef<GethConfiguration>
|
||||
+ AsRef<KitchensinkConfiguration>
|
||||
+ AsRef<ReviveDevNodeConfiguration>
|
||||
+ AsRef<EthRpcConfiguration>
|
||||
+ Clone,
|
||||
) -> Self;
|
||||
|
||||
/// Returns the identifier of the node.
|
||||
fn id(&self) -> usize;
|
||||
@@ -24,7 +32,7 @@ pub trait Node: EthereumNode {
|
||||
/// Spawns a node configured according to the genesis json.
|
||||
///
|
||||
/// Blocking until it's ready to accept transactions.
|
||||
fn spawn(&mut self, genesis: String) -> anyhow::Result<()>;
|
||||
fn spawn(&mut self, genesis: Genesis) -> anyhow::Result<()>;
|
||||
|
||||
/// Prune the node instance and related data.
|
||||
///
|
||||
|
||||
+42
-14
@@ -5,10 +5,13 @@ use std::{
|
||||
thread,
|
||||
};
|
||||
|
||||
use revive_dt_common::cached_fs::read_to_string;
|
||||
|
||||
use anyhow::Context;
|
||||
use revive_dt_config::Arguments;
|
||||
use alloy::genesis::Genesis;
|
||||
use anyhow::Context as _;
|
||||
use revive_dt_config::{
|
||||
ConcurrencyConfiguration, EthRpcConfiguration, GenesisConfiguration, GethConfiguration,
|
||||
KitchensinkConfiguration, ReviveDevNodeConfiguration, WalletConfiguration,
|
||||
WorkingDirectoryConfiguration,
|
||||
};
|
||||
use tracing::info;
|
||||
|
||||
use crate::Node;
|
||||
@@ -25,18 +28,31 @@ where
|
||||
T: Node + Send + 'static,
|
||||
{
|
||||
/// Create a new Pool. This will start as many nodes as there are workers in `config`.
|
||||
pub fn new(config: &Arguments) -> anyhow::Result<Self> {
|
||||
let nodes = config.number_of_nodes;
|
||||
let genesis = read_to_string(&config.genesis_file).context(format!(
|
||||
"can not read genesis file: {}",
|
||||
config.genesis_file.display()
|
||||
))?;
|
||||
pub fn new(
|
||||
context: impl AsRef<WorkingDirectoryConfiguration>
|
||||
+ AsRef<ConcurrencyConfiguration>
|
||||
+ AsRef<GenesisConfiguration>
|
||||
+ AsRef<WalletConfiguration>
|
||||
+ AsRef<GethConfiguration>
|
||||
+ AsRef<KitchensinkConfiguration>
|
||||
+ AsRef<ReviveDevNodeConfiguration>
|
||||
+ AsRef<EthRpcConfiguration>
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Clone
|
||||
+ 'static,
|
||||
) -> anyhow::Result<Self> {
|
||||
let concurrency_configuration = AsRef::<ConcurrencyConfiguration>::as_ref(&context);
|
||||
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
||||
|
||||
let nodes = concurrency_configuration.number_of_nodes;
|
||||
let genesis = genesis_configuration.genesis()?;
|
||||
|
||||
let mut handles = Vec::with_capacity(nodes);
|
||||
for _ in 0..nodes {
|
||||
let config = config.clone();
|
||||
let context = context.clone();
|
||||
let genesis = genesis.clone();
|
||||
handles.push(thread::spawn(move || spawn_node::<T>(&config, genesis)));
|
||||
handles.push(thread::spawn(move || spawn_node::<T>(context, genesis)));
|
||||
}
|
||||
|
||||
let mut nodes = Vec::with_capacity(nodes);
|
||||
@@ -64,8 +80,20 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn spawn_node<T: Node + Send>(args: &Arguments, genesis: String) -> anyhow::Result<T> {
|
||||
let mut node = T::new(args);
|
||||
fn spawn_node<T: Node + Send>(
|
||||
context: impl AsRef<WorkingDirectoryConfiguration>
|
||||
+ AsRef<ConcurrencyConfiguration>
|
||||
+ AsRef<GenesisConfiguration>
|
||||
+ AsRef<WalletConfiguration>
|
||||
+ AsRef<GethConfiguration>
|
||||
+ AsRef<KitchensinkConfiguration>
|
||||
+ AsRef<ReviveDevNodeConfiguration>
|
||||
+ AsRef<EthRpcConfiguration>
|
||||
+ Clone
|
||||
+ 'static,
|
||||
genesis: Genesis,
|
||||
) -> anyhow::Result<T> {
|
||||
let mut node = T::new(context);
|
||||
info!(
|
||||
id = node.id(),
|
||||
connection_string = node.connection_string(),
|
||||
|
||||
@@ -12,7 +12,7 @@ use alloy_primitives::Address;
|
||||
use anyhow::{Context as _, Result};
|
||||
use indexmap::IndexMap;
|
||||
use revive_dt_compiler::{CompilerInput, CompilerOutput, Mode};
|
||||
use revive_dt_config::{Arguments, TestingPlatform};
|
||||
use revive_dt_config::{Context, TestingPlatform};
|
||||
use revive_dt_format::{case::CaseIdx, corpus::Corpus, metadata::ContractInstance};
|
||||
use semver::Version;
|
||||
use serde::Serialize;
|
||||
@@ -36,11 +36,11 @@ pub struct ReportAggregator {
|
||||
}
|
||||
|
||||
impl ReportAggregator {
|
||||
pub fn new(config: Arguments) -> Self {
|
||||
pub fn new(context: Context) -> Self {
|
||||
let (runner_tx, runner_rx) = unbounded_channel::<RunnerEvent>();
|
||||
let (listener_tx, _) = channel::<ReporterEvent>(1024);
|
||||
Self {
|
||||
report: Report::new(config),
|
||||
report: Report::new(context),
|
||||
remaining_cases: Default::default(),
|
||||
runner_tx: Some(runner_tx),
|
||||
runner_rx,
|
||||
@@ -121,7 +121,12 @@ impl ReportAggregator {
|
||||
file_name.push_str(".json");
|
||||
file_name
|
||||
};
|
||||
let file_path = self.report.config.directory().join(file_name);
|
||||
let file_path = self
|
||||
.report
|
||||
.context
|
||||
.working_directory_configuration()
|
||||
.as_path()
|
||||
.join(file_name);
|
||||
let file = OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
@@ -282,8 +287,16 @@ impl ReportAggregator {
|
||||
&mut self,
|
||||
event: PreLinkContractsCompilationSucceededEvent,
|
||||
) {
|
||||
let include_input = self.report.config.report_include_compiler_input;
|
||||
let include_output = self.report.config.report_include_compiler_output;
|
||||
let include_input = self
|
||||
.report
|
||||
.context
|
||||
.report_configuration()
|
||||
.include_compiler_input;
|
||||
let include_output = self
|
||||
.report
|
||||
.context
|
||||
.report_configuration()
|
||||
.include_compiler_output;
|
||||
|
||||
let execution_information = self.execution_information(&event.execution_specifier);
|
||||
|
||||
@@ -311,8 +324,16 @@ impl ReportAggregator {
|
||||
&mut self,
|
||||
event: PostLinkContractsCompilationSucceededEvent,
|
||||
) {
|
||||
let include_input = self.report.config.report_include_compiler_input;
|
||||
let include_output = self.report.config.report_include_compiler_output;
|
||||
let include_input = self
|
||||
.report
|
||||
.context
|
||||
.report_configuration()
|
||||
.include_compiler_input;
|
||||
let include_output = self
|
||||
.report
|
||||
.context
|
||||
.report_configuration()
|
||||
.include_compiler_output;
|
||||
|
||||
let execution_information = self.execution_information(&event.execution_specifier);
|
||||
|
||||
@@ -340,21 +361,13 @@ impl ReportAggregator {
|
||||
&mut self,
|
||||
event: PreLinkContractsCompilationFailedEvent,
|
||||
) {
|
||||
let include_input = self.report.config.report_include_compiler_input;
|
||||
|
||||
let execution_information = self.execution_information(&event.execution_specifier);
|
||||
|
||||
let compiler_input = if include_input {
|
||||
event.compiler_input
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
execution_information.pre_link_compilation_status = Some(CompilationStatus::Failure {
|
||||
reason: event.reason,
|
||||
compiler_version: event.compiler_version,
|
||||
compiler_path: event.compiler_path,
|
||||
compiler_input,
|
||||
compiler_input: event.compiler_input,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -362,21 +375,13 @@ impl ReportAggregator {
|
||||
&mut self,
|
||||
event: PostLinkContractsCompilationFailedEvent,
|
||||
) {
|
||||
let include_input = self.report.config.report_include_compiler_input;
|
||||
|
||||
let execution_information = self.execution_information(&event.execution_specifier);
|
||||
|
||||
let compiler_input = if include_input {
|
||||
event.compiler_input
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
execution_information.post_link_compilation_status = Some(CompilationStatus::Failure {
|
||||
reason: event.reason,
|
||||
compiler_version: event.compiler_version,
|
||||
compiler_path: event.compiler_path,
|
||||
compiler_input,
|
||||
compiler_input: event.compiler_input,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -422,12 +427,8 @@ impl ReportAggregator {
|
||||
#[serde_as]
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct Report {
|
||||
/// The configuration that the tool was started up with.
|
||||
pub config: Arguments,
|
||||
/// The platform of the leader chain.
|
||||
pub leader_platform: TestingPlatform,
|
||||
/// The platform of the follower chain.
|
||||
pub follower_platform: TestingPlatform,
|
||||
/// The context that the tool was started up with.
|
||||
pub context: Context,
|
||||
/// The list of corpus files that the tool found.
|
||||
pub corpora: Vec<Corpus>,
|
||||
/// The list of metadata files that were found by the tool.
|
||||
@@ -439,11 +440,9 @@ pub struct Report {
|
||||
}
|
||||
|
||||
impl Report {
|
||||
pub fn new(config: Arguments) -> Self {
|
||||
pub fn new(context: Context) -> Self {
|
||||
Self {
|
||||
leader_platform: config.leader,
|
||||
follower_platform: config.follower,
|
||||
config,
|
||||
context,
|
||||
corpora: Default::default(),
|
||||
metadata_files: Default::default(),
|
||||
test_case_information: Default::default(),
|
||||
@@ -533,12 +532,12 @@ pub enum CompilationStatus {
|
||||
/// The path of the compiler used to compile the contracts.
|
||||
compiler_path: PathBuf,
|
||||
/// The input provided to the compiler to compile the contracts. This is only included if
|
||||
/// the appropriate flag is set in the CLI configuration and if the contracts were not
|
||||
/// cached and the compiler was invoked.
|
||||
/// the appropriate flag is set in the CLI context and if the contracts were not cached and
|
||||
/// the compiler was invoked.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
compiler_input: Option<CompilerInput>,
|
||||
/// The output of the compiler. This is only included if the appropriate flag is set in the
|
||||
/// CLI configurations.
|
||||
/// CLI contexts.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
compiler_output: Option<CompilerOutput>,
|
||||
},
|
||||
@@ -553,8 +552,8 @@ pub enum CompilationStatus {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
compiler_path: Option<PathBuf>,
|
||||
/// The input provided to the compiler to compile the contracts. This is only included if
|
||||
/// the appropriate flag is set in the CLI configuration and if the contracts were not
|
||||
/// cached and the compiler was invoked.
|
||||
/// the appropriate flag is set in the CLI context and if the contracts were not cached and
|
||||
/// the compiler was invoked.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
compiler_input: Option<CompilerInput>,
|
||||
},
|
||||
|
||||
@@ -9,10 +9,11 @@ use std::{
|
||||
sync::LazyLock,
|
||||
};
|
||||
|
||||
use semver::Version;
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
use crate::download::SolcDownloader;
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
|
||||
pub const SOLC_CACHE_DIRECTORY: &str = "solc";
|
||||
pub(crate) static SOLC_CACHER: LazyLock<Mutex<HashSet<PathBuf>>> = LazyLock::new(Default::default);
|
||||
@@ -20,7 +21,7 @@ pub(crate) static SOLC_CACHER: LazyLock<Mutex<HashSet<PathBuf>>> = LazyLock::new
|
||||
pub(crate) async fn get_or_download(
|
||||
working_directory: &Path,
|
||||
downloader: &SolcDownloader,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
) -> anyhow::Result<(Version, PathBuf)> {
|
||||
let target_directory = working_directory
|
||||
.join(SOLC_CACHE_DIRECTORY)
|
||||
.join(downloader.version.to_string());
|
||||
@@ -29,7 +30,7 @@ pub(crate) async fn get_or_download(
|
||||
let mut cache = SOLC_CACHER.lock().await;
|
||||
if cache.contains(&target_file) {
|
||||
tracing::debug!("using cached solc: {}", target_file.display());
|
||||
return Ok(target_file);
|
||||
return Ok((downloader.version.clone(), target_file));
|
||||
}
|
||||
|
||||
create_dir_all(&target_directory).with_context(|| {
|
||||
@@ -48,7 +49,7 @@ pub(crate) async fn get_or_download(
|
||||
})?;
|
||||
cache.insert(target_file.clone());
|
||||
|
||||
Ok(target_file)
|
||||
Ok((downloader.version.clone(), target_file))
|
||||
}
|
||||
|
||||
async fn download_to_file(path: &Path, downloader: &SolcDownloader) -> anyhow::Result<()> {
|
||||
|
||||
@@ -11,7 +11,7 @@ use semver::Version;
|
||||
use sha2::{Digest, Sha256};
|
||||
|
||||
use crate::list::List;
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
|
||||
pub static LIST_CACHE: LazyLock<Mutex<HashMap<&'static str, List>>> =
|
||||
LazyLock::new(Default::default);
|
||||
|
||||
@@ -5,11 +5,12 @@
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use cache::get_or_download;
|
||||
use download::SolcDownloader;
|
||||
|
||||
use revive_dt_common::types::VersionOrRequirement;
|
||||
use semver::Version;
|
||||
|
||||
pub mod cache;
|
||||
pub mod download;
|
||||
@@ -24,7 +25,7 @@ pub async fn download_solc(
|
||||
cache_directory: &Path,
|
||||
version: impl Into<VersionOrRequirement>,
|
||||
wasm: bool,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
) -> anyhow::Result<(Version, PathBuf)> {
|
||||
let downloader = if wasm {
|
||||
SolcDownloader::wasm(version).await
|
||||
} else if cfg!(target_os = "linux") {
|
||||
|
||||
+6
-6
@@ -89,13 +89,13 @@ echo "This may take a while..."
|
||||
echo ""
|
||||
|
||||
# Run the tool
|
||||
RUST_LOG="error" cargo run --release -- \
|
||||
RUST_LOG="error" cargo run --release -- execute-tests \
|
||||
--corpus "$CORPUS_FILE" \
|
||||
--workdir "$WORKDIR" \
|
||||
--number-of-nodes 5 \
|
||||
--kitchensink "$SUBSTRATE_NODE_BIN" \
|
||||
--revive-dev-node "$REVIVE_DEV_NODE_BIN" \
|
||||
--eth_proxy "$ETH_RPC_BIN" \
|
||||
--working-directory "$WORKDIR" \
|
||||
--concurrency.number-of-nodes 5 \
|
||||
--kitchensink.path "$SUBSTRATE_NODE_BIN" \
|
||||
--revive-dev-node.path "$REVIVE_DEV_NODE_BIN" \
|
||||
--eth-rpc.path "$ETH_RPC_BIN" \
|
||||
> logs.log \
|
||||
2> output.log
|
||||
|
||||
|
||||
+497
@@ -0,0 +1,497 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "Metadata",
|
||||
"description": "A MatterLabs metadata file.\n\nThis defines the structure that the MatterLabs metadata files follow for defining the tests or\nthe workloads.\n\nEach metadata file is composed of multiple test cases where each test case is isolated from the\nothers and runs in a completely different address space. Each test case is composed of a number\nof steps and assertions that should be performed as part of the test case.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"comment": {
|
||||
"description": "This is an optional comment on the metadata file which has no impact on the execution in any\nway.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"ignore": {
|
||||
"description": "An optional boolean which defines if the metadata file as a whole should be ignored. If null\nthen the metadata file will not be ignored.",
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"targets": {
|
||||
"description": "An optional vector of targets that this Metadata file's cases can be executed on. As an\nexample, if we wish for the metadata file's cases to only be run on PolkaVM then we'd\nspecify a target of \"PolkaVM\" in here.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"cases": {
|
||||
"description": "A vector of the test cases and workloads contained within the metadata file. This is their\nprimary description.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/$defs/Case"
|
||||
}
|
||||
},
|
||||
"contracts": {
|
||||
"description": "A map of all of the contracts that the test requires to run.\n\nThis is a map where the key is the name of the contract instance and the value is the\ncontract's path and ident in the file.\n\nIf any contract is to be used by the test then it must be included in here first so that the\nframework is aware of its path, compiles it, and prepares it.",
|
||||
"type": [
|
||||
"object",
|
||||
"null"
|
||||
],
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/ContractPathAndIdent"
|
||||
}
|
||||
},
|
||||
"libraries": {
|
||||
"description": "The set of libraries that this metadata file requires.",
|
||||
"type": [
|
||||
"object",
|
||||
"null"
|
||||
],
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"$ref": "#/$defs/ContractInstance"
|
||||
}
|
||||
}
|
||||
},
|
||||
"modes": {
|
||||
"description": "This represents a mode that has been parsed from test metadata.\n\nMode strings can take the following form (in pseudo-regex):\n\n```text\n[YEILV][+-]? (M[0123sz])? <semver>?\n```",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/$defs/ParsedMode"
|
||||
}
|
||||
},
|
||||
"required_evm_version": {
|
||||
"description": "This field specifies an EVM version requirement that the test case has where the test might\nbe run of the evm version of the nodes match the evm version specified here.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/$defs/EvmVersionRequirement"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"compiler_directives": {
|
||||
"description": "A set of compilation directives that will be passed to the compiler whenever the contracts\nfor the test are being compiled. Note that this differs from the [`Mode`]s in that a [`Mode`]\nis just a filter for when a test can run whereas this is an instruction to the compiler.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/$defs/CompilationDirectives"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"cases"
|
||||
],
|
||||
"$defs": {
|
||||
"Case": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"description": "An optional name of the test case.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"comment": {
|
||||
"description": "An optional comment on the case which has no impact on the execution in any way.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"modes": {
|
||||
"description": "This represents a mode that has been parsed from test metadata.\n\nMode strings can take the following form (in pseudo-regex):\n\n```text\n[YEILV][+-]? (M[0123sz])? <semver>?\n```\n\nIf this is provided then it takes higher priority than the modes specified in the metadata\nfile.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/$defs/ParsedMode"
|
||||
}
|
||||
},
|
||||
"inputs": {
|
||||
"description": "The set of steps to run as part of this test case.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/$defs/Step"
|
||||
}
|
||||
},
|
||||
"group": {
|
||||
"description": "An optional name of the group of tests that this test belongs to.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"expected": {
|
||||
"description": "An optional set of expectations and assertions to make about the transaction after it ran.\n\nIf this is not specified then the only assertion that will be ran is that the transaction\nwas successful.\n\nThis expectation that's on the case itself will be attached to the final step of the case.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/$defs/Expected"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"ignore": {
|
||||
"description": "An optional boolean which defines if the case as a whole should be ignored. If null then the\ncase will not be ignored.",
|
||||
"type": [
|
||||
"boolean",
|
||||
"null"
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"inputs"
|
||||
]
|
||||
},
|
||||
"ParsedMode": {
|
||||
"description": "This represents a mode that has been parsed from test metadata.\n\nMode strings can take the following form (in pseudo-regex):\n\n```text\n[YEILV][+-]? (M[0123sz])? <semver>?\n```\n\nWe can parse valid mode strings into [`ParsedMode`] using [`ParsedMode::from_str`].",
|
||||
"type": "string"
|
||||
},
|
||||
"Step": {
|
||||
"description": "A test step.\n\nA test step can be anything. It could be an invocation to a function, an assertion, or any other\naction that needs to be run or executed on the nodes used in the tests.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "A function call or an invocation to some function on some smart contract.",
|
||||
"$ref": "#/$defs/Input"
|
||||
},
|
||||
{
|
||||
"description": "A step for performing a balance assertion on some account or contract.",
|
||||
"$ref": "#/$defs/BalanceAssertion"
|
||||
},
|
||||
{
|
||||
"description": "A step for asserting that the storage of some contract or account is empty.",
|
||||
"$ref": "#/$defs/StorageEmptyAssertion"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Input": {
|
||||
"description": "This is an input step which is a transaction description that the framework translates into a\ntransaction and executes on the nodes.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"caller": {
|
||||
"description": "The address of the account performing the call and paying the fees for it.",
|
||||
"type": "string",
|
||||
"default": "0x90f8bf6a479f320ead074411a4b0e7944ea8c9c1"
|
||||
},
|
||||
"comment": {
|
||||
"description": "An optional comment on the step which has no impact on the execution in any way.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"instance": {
|
||||
"description": "The contract instance that's being called in this transaction step.",
|
||||
"$ref": "#/$defs/ContractInstance",
|
||||
"default": "Test"
|
||||
},
|
||||
"method": {
|
||||
"description": "The method that's being called in this step.",
|
||||
"$ref": "#/$defs/Method"
|
||||
},
|
||||
"calldata": {
|
||||
"description": "The calldata that the function should be invoked with.",
|
||||
"$ref": "#/$defs/Calldata",
|
||||
"default": []
|
||||
},
|
||||
"expected": {
|
||||
"description": "A set of assertions and expectations to have for the transaction.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/$defs/Expected"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"value": {
|
||||
"description": "An optional value to provide as part of the transaction.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/$defs/EtherValue"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"variable_assignments": {
|
||||
"description": "Variable assignment to perform in the framework allowing us to reference them again later on\nduring the execution.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/$defs/VariableAssignments"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"method"
|
||||
]
|
||||
},
|
||||
"ContractInstance": {
|
||||
"description": "Represents a contract instance found a metadata file.\n\nTypically, this is used as the key to the \"contracts\" field of metadata files.",
|
||||
"type": "string"
|
||||
},
|
||||
"Method": {
|
||||
"description": "Specify how the contract is called.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "Initiate a deploy transaction, calling contracts constructor.\n\nIndicated by `#deployer`.",
|
||||
"type": "string",
|
||||
"const": "#deployer"
|
||||
},
|
||||
{
|
||||
"description": "Does not calculate and insert a function selector.\n\nIndicated by `#fallback`.",
|
||||
"type": "string",
|
||||
"const": "#fallback"
|
||||
},
|
||||
{
|
||||
"description": "Call the public function with the given name.",
|
||||
"type": "string"
|
||||
}
|
||||
]
|
||||
},
|
||||
"Calldata": {
|
||||
"description": "A type definition for the calldata supported by the testing framework.\n\nWe choose to document all of the types used in [`Calldata`] in this one doc comment to elaborate\non why they exist and consolidate all of the documentation for calldata in a single place where\nit can be viewed and understood.\n\nThe [`Single`] variant of this enum is quite simple and straightforward: it's a hex-encoded byte\narray of the calldata.\n\nThe [`Compound`] type is more intricate and allows for capabilities such as resolution and some\nsimple arithmetic operations. It houses a vector of [`CalldataItem`]s which is just a wrapper\naround an owned string.\n\nA [`CalldataItem`] could be a simple hex string of a single calldata argument, but it could also\nbe something that requires resolution such as `MyContract.address` which is a variable that is\nunderstood by the resolution logic to mean \"Lookup the address of this particular contract\ninstance\".\n\nIn addition to the above, the format supports some simple arithmetic operations like add, sub,\ndivide, multiply, bitwise AND, bitwise OR, and bitwise XOR. Our parser understands the [reverse\npolish notation] simply because it's easy to write a calculator for that notation and since we\ndo not have plans to use arithmetic too often in tests. In reverse polish notation a typical\n`2 + 4` would be written as `2 4 +` which makes this notation very simple to implement through\na stack.\n\nCombining the above, a single [`CalldataItem`] could employ both resolution and arithmetic at\nthe same time. For example, a [`CalldataItem`] of `$BLOCK_NUMBER $BLOCK_NUMBER +` means that\nthe block number should be retrieved and then it should be added to itself.\n\nInternally, we split the [`CalldataItem`] by spaces. Therefore, `$BLOCK_NUMBER $BLOCK_NUMBER+`\nis invalid but `$BLOCK_NUMBER $BLOCK_NUMBER +` is valid and can be understood by the parser and\ncalculator. After the split is done, each token is parsed into a [`CalldataToken<&str>`] forming\nan [`Iterator`] over [`CalldataToken<&str>`]. A [`CalldataToken<&str>`] can then be resolved\ninto a [`CalldataToken<U256>`] through the resolution logic. Finally, after resolution is done,\nthis iterator of [`CalldataToken<U256>`] is collapsed into the final result by applying the\narithmetic operations requested.\n\nFor example, supplying a [`Compound`] calldata of `0xdeadbeef` produces an iterator of a single\n[`CalldataToken<&str>`] items of the value [`CalldataToken::Item`] of the string value 12 which\nwe can then resolve into the appropriate [`U256`] value and convert into calldata.\n\nIn summary, the various types used in [`Calldata`] represent the following:\n- [`CalldataItem`]: A calldata string from the metadata files.\n- [`CalldataToken<&str>`]: Typically used in an iterator of items from the space splitted\n [`CalldataItem`] and represents a token that has not yet been resolved into its value.\n- [`CalldataToken<U256>`]: Represents a token that's been resolved from being a string and into\n the word-size calldata argument on which we can perform arithmetic.\n\n[`Single`]: Calldata::Single\n[`Compound`]: Calldata::Compound\n[reverse polish notation]: https://en.wikipedia.org/wiki/Reverse_Polish_notation",
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/$defs/CalldataItem"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"CalldataItem": {
|
||||
"description": "This represents an item in the [`Calldata::Compound`] variant. Each item will be resolved\naccording to the resolution rules of the tool.",
|
||||
"type": "string"
|
||||
},
|
||||
"Expected": {
|
||||
"description": "A set of expectations and assertions to make about the transaction after it ran.\n\nIf this is not specified then the only assertion that will be ran is that the transaction\nwas successful.",
|
||||
"anyOf": [
|
||||
{
|
||||
"description": "An assertion that the transaction succeeded and returned the provided set of data.",
|
||||
"$ref": "#/$defs/Calldata"
|
||||
},
|
||||
{
|
||||
"description": "A more complex assertion.",
|
||||
"$ref": "#/$defs/ExpectedOutput"
|
||||
},
|
||||
{
|
||||
"description": "A set of assertions.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/$defs/ExpectedOutput"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"ExpectedOutput": {
|
||||
"description": "A set of assertions to run on the transaction.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"compiler_version": {
|
||||
"description": "An optional compiler version that's required in order for this assertion to run.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"return_data": {
|
||||
"description": "An optional field of the expected returns from the invocation.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/$defs/Calldata"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
},
|
||||
"events": {
|
||||
"description": "An optional set of assertions to run on the emitted events from the transaction.",
|
||||
"type": [
|
||||
"array",
|
||||
"null"
|
||||
],
|
||||
"items": {
|
||||
"$ref": "#/$defs/Event"
|
||||
}
|
||||
},
|
||||
"exception": {
|
||||
"description": "A boolean which defines whether we expect the transaction to succeed or fail.",
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
}
|
||||
}
|
||||
},
|
||||
"Event": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"address": {
|
||||
"description": "An optional field of the address of the emitter of the event.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"topics": {
|
||||
"description": "The set of topics to expect the event to have.",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"values": {
|
||||
"description": "The set of values to expect the event to have.",
|
||||
"$ref": "#/$defs/Calldata"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"topics",
|
||||
"values"
|
||||
]
|
||||
},
|
||||
"EtherValue": {
|
||||
"description": "Defines an Ether value.\n\nThis is an unsigned 256 bit integer that's followed by some denomination which can either be\neth, ether, gwei, or wei.",
|
||||
"type": "string"
|
||||
},
|
||||
"VariableAssignments": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"return_data": {
|
||||
"description": "A vector of the variable names to assign to the return data.\n\nExample: `UniswapV3PoolAddress`",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"return_data"
|
||||
]
|
||||
},
|
||||
"BalanceAssertion": {
|
||||
"description": "This represents a balance assertion step where the framework needs to query the balance of some\naccount or contract and assert that it's some amount.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"comment": {
|
||||
"description": "An optional comment on the balance assertion.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"address": {
|
||||
"description": "The address that the balance assertion should be done on.\n\nThis is a string which will be resolved into an address when being processed. Therefore,\nthis could be a normal hex address, a variable such as `Test.address`, or perhaps even a\nfull on variable like `$VARIABLE:Uniswap`. It follows the same resolution rules that are\nfollowed in the calldata.",
|
||||
"type": "string"
|
||||
},
|
||||
"expected_balance": {
|
||||
"description": "The amount of balance to assert that the account or contract has. This is a 256 bit string\nthat's serialized and deserialized into a decimal string.",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"address",
|
||||
"expected_balance"
|
||||
]
|
||||
},
|
||||
"StorageEmptyAssertion": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"comment": {
|
||||
"description": "An optional comment on the storage empty assertion.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
]
|
||||
},
|
||||
"address": {
|
||||
"description": "The address that the balance assertion should be done on.\n\nThis is a string which will be resolved into an address when being processed. Therefore,\nthis could be a normal hex address, a variable such as `Test.address`, or perhaps even a\nfull on variable like `$VARIABLE:Uniswap`. It follows the same resolution rules that are\nfollowed in the calldata.",
|
||||
"type": "string"
|
||||
},
|
||||
"is_storage_empty": {
|
||||
"description": "A boolean of whether the storage of the address is empty or not.",
|
||||
"type": "boolean"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"address",
|
||||
"is_storage_empty"
|
||||
]
|
||||
},
|
||||
"ContractPathAndIdent": {
|
||||
"description": "Represents an identifier used for contracts.\n\nThe type supports serialization from and into the following string format:\n\n```text\n${path}:${contract_ident}\n```",
|
||||
"type": "string"
|
||||
},
|
||||
"EvmVersionRequirement": {
|
||||
"description": "An EVM version requirement that the test case has. This gets serialized and deserialized from\nand into [`String`]. This follows a simple format of (>=|<=|=|>|<) followed by a string of the\nEVM version.\n\nWhen specified, the framework will only run the test if the node's EVM version matches that\nrequired by the metadata file.",
|
||||
"type": "string"
|
||||
},
|
||||
"CompilationDirectives": {
|
||||
"description": "A set of compilation directives that will be passed to the compiler whenever the contracts for\nthe test are being compiled. Note that this differs from the [`Mode`]s in that a [`Mode`] is\njust a filter for when a test can run whereas this is an instruction to the compiler.\nDefines how the compiler should handle revert strings.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"revert_string_handling": {
|
||||
"description": "Defines how the revert strings should be handled.",
|
||||
"anyOf": [
|
||||
{
|
||||
"$ref": "#/$defs/RevertString"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"RevertString": {
|
||||
"description": "Defines how the compiler should handle revert strings.",
|
||||
"oneOf": [
|
||||
{
|
||||
"description": "The default handling of the revert strings.",
|
||||
"type": "string",
|
||||
"const": "default"
|
||||
},
|
||||
{
|
||||
"description": "The debug handling of the revert strings.",
|
||||
"type": "string",
|
||||
"const": "debug"
|
||||
},
|
||||
{
|
||||
"description": "Strip the revert strings.",
|
||||
"type": "string",
|
||||
"const": "strip"
|
||||
},
|
||||
{
|
||||
"description": "Provide verbose debug strings for the revert string.",
|
||||
"type": "string",
|
||||
"const": "verboseDebug"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user