Compare commits

..

4 Commits

Author SHA1 Message Date
Omar Abdulla 7247eca2e8 Add a cached fs abstraction 2025-08-14 17:38:12 +03:00
Omar f2045db0e9 Add compiler directives to metadata (#139) 2025-08-14 07:38:56 +00:00
Omar 5a11f44673 Misc features/improvements (#138)
* Implement various needed features and improvements

* Reorder the metadata struct

* Format comments
2025-08-13 13:50:06 +00:00
James Wilson 46aea0890d Split reporter and case runner, use channels to pass test reports (#137)
* Use channels to send data to reporting thread and avoid hangs / mutex / duration. Limit max concurrent tasks to avoid too many open files

* More appropriate name for dirver/reporter task fns

* Back to parallelise individual cases, report individual cases, address grumbles

* newline before 'Failures' title in report
2025-08-13 13:10:26 +00:00
14 changed files with 499 additions and 197 deletions
Generated
+140 -2
View File
@@ -1644,6 +1644,15 @@ version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]]
name = "crossbeam-channel"
version = "0.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.6"
@@ -2399,6 +2408,20 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9"
[[package]]
name = "generator"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d18470a76cb7f8ff746cf1f7470914f900252ec36bbc40b569d74b1258446827"
dependencies = [
"cc",
"cfg-if",
"libc",
"log",
"rustversion",
"windows",
]
[[package]]
name = "generic-array"
version = "0.14.7"
@@ -3165,6 +3188,19 @@ version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
[[package]]
name = "loom"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
dependencies = [
"cfg-if",
"generator",
"scoped-tls",
"tracing",
"tracing-subscriber",
]
[[package]]
name = "lru"
version = "0.13.0"
@@ -3247,6 +3283,25 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "moka"
version = "0.12.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9321642ca94a4282428e6ea4af8cc2ca4eac48ac7a6a4ea8f33f76d0ce70926"
dependencies = [
"crossbeam-channel",
"crossbeam-epoch",
"crossbeam-utils",
"loom",
"parking_lot",
"portable-atomic",
"rustc_version 0.4.1",
"smallvec",
"tagptr",
"thiserror 1.0.69",
"uuid",
]
[[package]]
name = "native-tls"
version = "0.2.14"
@@ -3646,6 +3701,12 @@ dependencies = [
"syn 2.0.101",
]
[[package]]
name = "portable-atomic"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
[[package]]
name = "potential_utf"
version = "0.1.2"
@@ -4029,6 +4090,8 @@ name = "revive-dt-common"
version = "0.1.0"
dependencies = [
"anyhow",
"moka",
"once_cell",
"semver 1.0.26",
"tokio",
]
@@ -4424,6 +4487,12 @@ dependencies = [
"zeroize",
]
[[package]]
name = "scoped-tls"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
[[package]]
name = "scopeguard"
version = "1.2.0"
@@ -5276,6 +5345,12 @@ dependencies = [
"libc",
]
[[package]]
name = "tagptr"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417"
[[package]]
name = "tap"
version = "1.0.1"
@@ -5808,6 +5883,17 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f33196643e165781c20a5ead5582283a7dacbb87855d867fbc2df3f81eddc1be"
dependencies = [
"getrandom 0.3.3",
"js-sys",
"wasm-bindgen",
]
[[package]]
name = "valuable"
version = "0.1.1"
@@ -6069,6 +6155,28 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows"
version = "0.61.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893"
dependencies = [
"windows-collections",
"windows-core",
"windows-future",
"windows-link",
"windows-numerics",
]
[[package]]
name = "windows-collections"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8"
dependencies = [
"windows-core",
]
[[package]]
name = "windows-core"
version = "0.61.2"
@@ -6082,6 +6190,17 @@ dependencies = [
"windows-strings 0.4.2",
]
[[package]]
name = "windows-future"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e"
dependencies = [
"windows-core",
"windows-link",
"windows-threading",
]
[[package]]
name = "windows-implement"
version = "0.60.0"
@@ -6106,9 +6225,19 @@ dependencies = [
[[package]]
name = "windows-link"
version = "0.1.1"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38"
checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
[[package]]
name = "windows-numerics"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1"
dependencies = [
"windows-core",
"windows-link",
]
[[package]]
name = "windows-registry"
@@ -6198,6 +6327,15 @@ dependencies = [
"windows_x86_64_msvc 0.53.0",
]
[[package]]
name = "windows-threading"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6"
dependencies = [
"windows-link",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
+1
View File
@@ -29,6 +29,7 @@ clap = { version = "4", features = ["derive"] }
foundry-compilers-artifacts = { version = "0.18.0" }
futures = { version = "0.3.31" }
hex = "0.4.3"
moka = "0.12.10"
reqwest = { version = "0.12.15", features = ["json"] }
once_cell = "1.21"
semver = { version = "1.0", features = ["serde"] }
+2
View File
@@ -10,5 +10,7 @@ rust-version.workspace = true
[dependencies]
anyhow = { workspace = true }
moka = { workspace = true, features = ["sync"] }
once_cell = { workspace = true }
semver = { workspace = true }
tokio = { workspace = true, default-features = false, features = ["time"] }
+49
View File
@@ -0,0 +1,49 @@
//! This module implements a cached file system allowing for results to be stored in-memory rather
//! rather being queried from the file system again.
use std::fs;
use std::io::{Error, Result};
use std::path::{Path, PathBuf};
use moka::sync::Cache;
use once_cell::sync::Lazy;
pub fn read(path: impl AsRef<Path>) -> Result<Vec<u8>> {
static READ_CACHE: Lazy<Cache<PathBuf, Vec<u8>>> = Lazy::new(|| Cache::new(10_000));
let path = path.as_ref().canonicalize()?;
match READ_CACHE.get(path.as_path()) {
Some(content) => Ok(content),
None => {
let content = fs::read(path.as_path())?;
READ_CACHE.insert(path, content.clone());
Ok(content)
}
}
}
pub fn read_to_string(path: impl AsRef<Path>) -> Result<String> {
let content = read(path)?;
String::from_utf8(content).map_err(|_| {
Error::new(
std::io::ErrorKind::InvalidData,
"The contents of the file are not valid UTF8",
)
})
}
pub fn read_dir(path: impl AsRef<Path>) -> Result<Box<dyn Iterator<Item = Result<PathBuf>>>> {
static READ_DIR_CACHE: Lazy<Cache<PathBuf, Vec<PathBuf>>> = Lazy::new(|| Cache::new(10_000));
let path = path.as_ref().canonicalize()?;
match READ_DIR_CACHE.get(path.as_path()) {
Some(entries) => Ok(Box::new(entries.into_iter().map(Ok)) as Box<_>),
None => {
let entries = fs::read_dir(path.as_path())?
.flat_map(|maybe_entry| maybe_entry.map(|entry| entry.path()))
.collect();
READ_DIR_CACHE.insert(path.clone(), entries);
Ok(read_dir(path).unwrap())
}
}
}
@@ -19,6 +19,11 @@ pub struct FilesWithExtensionIterator {
/// this vector then they will be returned when the [`Iterator::next`] method is called. If not
/// then we visit one of the next directories to visit.
files_matching_allowed_extensions: Vec<PathBuf>,
/// This option controls if the the cached file system should be used or not. This could be
/// better for certain cases where the entries in the directories do not change and therefore
/// caching can be used.
use_cached_fs: bool,
}
impl FilesWithExtensionIterator {
@@ -27,6 +32,7 @@ impl FilesWithExtensionIterator {
allowed_extensions: Default::default(),
directories_to_search: vec![root_directory.as_ref().to_path_buf()],
files_matching_allowed_extensions: Default::default(),
use_cached_fs: Default::default(),
}
}
@@ -37,6 +43,11 @@ impl FilesWithExtensionIterator {
self.allowed_extensions.insert(allowed_extension.into());
self
}
pub fn with_use_cached_fs(mut self, use_cached_fs: bool) -> Self {
self.use_cached_fs = use_cached_fs;
self
}
}
impl Iterator for FilesWithExtensionIterator {
@@ -49,16 +60,19 @@ impl Iterator for FilesWithExtensionIterator {
let directory_to_search = self.directories_to_search.pop()?;
// Read all of the entries in the directory. If we failed to read this dir's entires then we
// elect to just ignore it and look in the next directory, we do that by calling the next
// method again on the iterator, which is an intentional decision that we made here instead
// of panicking.
let Ok(dir_entries) = std::fs::read_dir(directory_to_search) else {
return self.next();
let iterator = if self.use_cached_fs {
let Ok(dir_entries) = crate::cached_fs::read_dir(directory_to_search.as_path()) else {
return self.next();
};
Box::new(dir_entries) as Box<dyn Iterator<Item = std::io::Result<PathBuf>>>
} else {
let Ok(dir_entries) = std::fs::read_dir(directory_to_search) else {
return self.next();
};
Box::new(dir_entries.map(|maybe_entry| maybe_entry.map(|entry| entry.path()))) as Box<_>
};
for entry in dir_entries.flatten() {
let entry_path = entry.path();
for entry_path in iterator.flatten() {
if entry_path.is_dir() {
self.directories_to_search.push(entry_path)
} else if entry_path.is_file()
+1
View File
@@ -1,6 +1,7 @@
//! This crate provides common concepts, functionality, types, macros, and more that other crates in
//! the workspace can benefit from.
pub mod cached_fs;
pub mod fs;
pub mod futures;
pub mod iterators;
+23 -1
View File
@@ -5,7 +5,6 @@
use std::{
collections::HashMap,
fs::read_to_string,
hash::Hash,
path::{Path, PathBuf},
};
@@ -16,6 +15,7 @@ use semver::Version;
use serde::{Deserialize, Serialize};
use revive_common::EVMVersion;
use revive_dt_common::cached_fs::read_to_string;
use revive_dt_common::types::VersionOrRequirement;
use revive_dt_config::Arguments;
@@ -55,6 +55,7 @@ pub struct CompilerInput {
pub base_path: Option<PathBuf>,
pub sources: HashMap<PathBuf, String>,
pub libraries: HashMap<PathBuf, HashMap<String, Address>>,
pub revert_string_handling: Option<RevertString>,
}
/// The generic compilation output configuration.
@@ -91,6 +92,7 @@ where
base_path: Default::default(),
sources: Default::default(),
libraries: Default::default(),
revert_string_handling: Default::default(),
},
additional_options: T::Options::default(),
}
@@ -142,6 +144,14 @@ where
self
}
pub fn with_revert_string_handling(
mut self,
revert_string_handling: impl Into<Option<RevertString>>,
) -> Self {
self.input.revert_string_handling = revert_string_handling.into();
self
}
pub fn with_additional_options(mut self, options: impl Into<T::Options>) -> Self {
self.additional_options = options.into();
self
@@ -160,3 +170,15 @@ where
self.input.clone()
}
}
/// Defines how the compiler should handle revert strings.
#[derive(
Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize,
)]
pub enum RevertString {
#[default]
Default,
Debug,
Strip,
VerboseDebug,
}
+3
View File
@@ -47,6 +47,9 @@ impl SolidityCompiler for Resolc {
base_path,
sources,
libraries,
// TODO: this is currently not being handled since there is no way to pass it into
// resolc. So, we need to go back to this later once it's supported.
revert_string_handling: _,
}: CompilerInput,
additional_options: Self::Options,
) -> anyhow::Result<CompilerOutput> {
+10
View File
@@ -42,6 +42,7 @@ impl SolidityCompiler for Solc {
base_path,
sources,
libraries,
revert_string_handling,
}: CompilerInput,
_: Self::Options,
) -> anyhow::Result<CompilerOutput> {
@@ -87,6 +88,15 @@ impl SolidityCompiler for Solc {
})
.collect(),
},
debug: revert_string_handling.map(|revert_string_handling| DebuggingSettings {
revert_strings: match revert_string_handling {
crate::RevertString::Default => Some(RevertStrings::Default),
crate::RevertString::Debug => Some(RevertStrings::Debug),
crate::RevertString::Strip => Some(RevertStrings::Strip),
crate::RevertString::VerboseDebug => Some(RevertStrings::VerboseDebug),
},
debug_info: Default::default(),
}),
..Default::default()
},
};
+18 -2
View File
@@ -96,10 +96,19 @@ pub struct Arguments {
#[arg(long, default_value = "1")]
pub number_of_nodes: usize,
/// Determines the amount of threads that will will be used.
#[arg(long, default_value = "12")]
/// Determines the amount of tokio worker threads that will will be used.
#[arg(
long,
default_value_t = std::thread::available_parallelism()
.map(|n| n.get())
.unwrap_or(1)
)]
pub number_of_threads: usize,
/// Determines the amount of concurrent tasks that will be spawned to run tests. Defaults to 10 x the number of nodes.
#[arg(long)]
pub number_concurrent_tasks: Option<usize>,
/// Extract problems back to the test corpus.
#[arg(short, long = "extract-problems")]
pub extract_problems: bool,
@@ -134,6 +143,13 @@ impl Arguments {
panic!("should have a workdir configured")
}
/// Return the number of concurrent tasks to run. This is provided via the
/// `--number-concurrent-tasks` argument, and otherwise defaults to --number-of-nodes * 20.
pub fn number_of_concurrent_tasks(&self) -> usize {
self.number_concurrent_tasks
.unwrap_or(20 * self.number_of_nodes)
}
/// Try to parse `self.account` into a [PrivateKeySigner],
/// panicing on error.
pub fn wallet(&self) -> EthereumWallet {
+186 -176
View File
@@ -1,6 +1,6 @@
use std::{
collections::HashMap,
path::Path,
path::{Path, PathBuf},
sync::{Arc, LazyLock},
time::Instant,
};
@@ -18,7 +18,7 @@ use revive_dt_common::iterators::FilesWithExtensionIterator;
use revive_dt_node_interaction::EthereumNode;
use semver::Version;
use temp_dir::TempDir;
use tokio::sync::{Mutex, RwLock};
use tokio::sync::{Mutex, RwLock, mpsc};
use tracing::{Instrument, Level};
use tracing_subscriber::{EnvFilter, FmtSubscriber};
@@ -41,15 +41,28 @@ use revive_dt_report::reporter::{Report, Span};
static TEMP_DIR: LazyLock<TempDir> = LazyLock::new(|| TempDir::new().unwrap());
type CompilationCache<'a> = Arc<
type CompilationCache = Arc<
RwLock<
HashMap<
(&'a Path, SolcMode, TestingPlatform),
(PathBuf, SolcMode, TestingPlatform),
Arc<Mutex<Option<Arc<(Version, CompilerOutput)>>>>,
>,
>,
>;
/// this represents a single "test"; a mode, path and collection of cases.
#[derive(Clone)]
struct Test {
metadata: Metadata,
path: PathBuf,
mode: SolcMode,
case_idx: usize,
case: Case,
}
/// This represents the results that we gather from running test cases.
type CaseResult = Result<usize, anyhow::Error>;
fn main() -> anyhow::Result<()> {
let args = init_cli()?;
@@ -120,7 +133,7 @@ fn collect_corpora(args: &Arguments) -> anyhow::Result<HashMap<Corpus, Vec<Metad
async fn run_driver<L, F>(
args: &Arguments,
tests: &[MetadataFile],
metadata_files: &[MetadataFile],
span: Span,
) -> anyhow::Result<()>
where
@@ -129,10 +142,25 @@ where
L::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
F::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
{
let leader_nodes = NodePool::<L::Blockchain>::new(args)?;
let follower_nodes = NodePool::<F::Blockchain>::new(args)?;
let (report_tx, report_rx) = mpsc::unbounded_channel::<(Test, CaseResult)>();
let test_cases = tests
let tests = prepare_tests::<L, F>(metadata_files);
let driver_task = start_driver_task::<L, F>(args, tests, span, report_tx)?;
let status_reporter_task = start_reporter_task(report_rx);
tokio::join!(status_reporter_task, driver_task);
Ok(())
}
fn prepare_tests<L, F>(metadata_files: &[MetadataFile]) -> impl Iterator<Item = Test>
where
L: Platform,
F: Platform,
L::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
F::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
{
metadata_files
.iter()
.flat_map(
|MetadataFile {
@@ -198,188 +226,159 @@ where
}
None => true,
})
.collect::<Vec<_>>();
let metadata_case_status = Arc::new(RwLock::new(test_cases.iter().fold(
HashMap::<_, HashMap<_, _>>::new(),
|mut map, (path, _, case_idx, case, solc_mode)| {
map.entry((path.to_path_buf(), solc_mode.clone()))
.or_default()
.insert((CaseIdx::new(*case_idx), case.name.clone()), None::<bool>);
map
},
)));
let status_reporter_task = {
let metadata_case_status = metadata_case_status.clone();
let start = Instant::now();
async move {
const GREEN: &str = "\x1B[32m";
const RED: &str = "\x1B[31m";
const RESET: &str = "\x1B[0m";
let mut entries_to_delete = Vec::new();
let mut number_of_successes = 0;
let mut number_of_failures = 0;
loop {
let metadata_case_status_read = metadata_case_status.read().await;
if metadata_case_status_read.is_empty() {
break;
}
for ((metadata_file_path, solc_mode), case_status) in
metadata_case_status_read.iter()
{
if case_status.values().any(|value| value.is_none()) {
continue;
}
let contains_failures = case_status
.values()
.any(|value| value.is_some_and(|value| !value));
if !contains_failures {
eprintln!(
"{}Succeeded:{} {} - {:?}",
GREEN,
RESET,
metadata_file_path.display(),
solc_mode
)
} else {
eprintln!(
"{}Failed:{} {} - {:?}",
RED,
RESET,
metadata_file_path.display(),
solc_mode
)
};
number_of_successes += case_status
.values()
.filter(|value| value.is_some_and(|value| value))
.count();
number_of_failures += case_status
.values()
.filter(|value| value.is_some_and(|value| !value))
.count();
let mut case_status = case_status
.iter()
.map(|((case_idx, case_name), case_status)| {
(case_idx.into_inner(), case_name, case_status.unwrap())
})
.collect::<Vec<_>>();
case_status.sort_by(|a, b| a.0.cmp(&b.0));
for (case_idx, case_name, case_status) in case_status.into_iter() {
if case_status {
eprintln!(
" {GREEN}Case Succeeded:{RESET} {} - Case Idx: {case_idx}",
case_name
.as_ref()
.map(|string| string.as_str())
.unwrap_or("Unnamed case")
)
} else {
eprintln!(
" {RED}Case Failed:{RESET} {} - Case Idx: {case_idx}",
case_name
.as_ref()
.map(|string| string.as_str())
.unwrap_or("Unnamed case")
)
};
}
eprintln!();
entries_to_delete.push((metadata_file_path.clone(), solc_mode.clone()));
}
drop(metadata_case_status_read);
let mut metadata_case_status_write = metadata_case_status.write().await;
for entry in entries_to_delete.drain(..) {
metadata_case_status_write.remove(&entry);
}
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
.map(|(metadata_file_path, metadata, case_idx, case, solc_mode)| {
Test {
metadata: metadata.clone(),
path: metadata_file_path.to_path_buf(),
mode: solc_mode,
case_idx,
case: case.clone(),
}
})
}
let elapsed = start.elapsed();
eprintln!(
"{GREEN}{}{RESET} cases succeeded, {RED}{}{RESET} cases failed in {} seconds",
number_of_successes,
number_of_failures,
elapsed.as_secs()
);
}
};
fn start_driver_task<L, F>(
args: &Arguments,
tests: impl Iterator<Item = Test>,
span: Span,
report_tx: mpsc::UnboundedSender<(Test, CaseResult)>,
) -> anyhow::Result<impl Future<Output = ()>>
where
L: Platform,
F: Platform,
L::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
F::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
{
let leader_nodes = Arc::new(NodePool::<L::Blockchain>::new(args)?);
let follower_nodes = Arc::new(NodePool::<F::Blockchain>::new(args)?);
let compilation_cache = Arc::new(RwLock::new(HashMap::new()));
let driver_task = futures::stream::iter(test_cases).for_each_concurrent(
None,
|(metadata_file_path, metadata, case_idx, case, solc_mode)| {
let number_concurrent_tasks = args.number_of_concurrent_tasks();
Ok(futures::stream::iter(tests).for_each_concurrent(
// We want to limit the concurrent tasks here because:
//
// 1. We don't want to overwhelm the nodes with too many requests, leading to responses timing out.
// 2. We don't want to open too many files at once, leading to the OS running out of file descriptors.
//
// By default, we allow maximum of 10 ongoing requests per node in order to limit (1), and assume that
// this number will automatically be low enough to address (2). The user can override this.
Some(number_concurrent_tasks),
move |test| {
let leader_nodes = leader_nodes.clone();
let follower_nodes = follower_nodes.clone();
let compilation_cache = compilation_cache.clone();
let leader_node = leader_nodes.round_robbin();
let follower_node = follower_nodes.round_robbin();
let tracing_span = tracing::span!(
Level::INFO,
"Running driver",
metadata_file_path = %metadata_file_path.display(),
case_idx = case_idx,
solc_mode = ?solc_mode,
);
let metadata_case_status = metadata_case_status.clone();
let report_tx = report_tx.clone();
async move {
let leader_node = leader_nodes.round_robbin();
let follower_node = follower_nodes.round_robbin();
let tracing_span = tracing::span!(
Level::INFO,
"Running driver",
metadata_file_path = %test.path.display(),
case_idx = ?test.case_idx,
solc_mode = ?test.mode,
);
let result = handle_case_driver::<L, F>(
metadata_file_path.as_path(),
metadata,
case_idx.into(),
case,
solc_mode.clone(),
&test.path,
&test.metadata,
test.case_idx.into(),
&test.case,
test.mode.clone(),
args,
compilation_cache.clone(),
leader_node,
follower_node,
span,
)
.instrument(tracing_span)
.await;
let mut metadata_case_status = metadata_case_status.write().await;
match result {
Ok(inputs_executed) => {
tracing::info!(inputs_executed, "Execution succeeded");
metadata_case_status
.entry((metadata_file_path.clone(), solc_mode))
.or_default()
.insert((CaseIdx::new(case_idx), case.name.clone()), Some(true));
}
Err(error) => {
metadata_case_status
.entry((metadata_file_path.clone(), solc_mode))
.or_default()
.insert((CaseIdx::new(case_idx), case.name.clone()), Some(false));
tracing::error!(%error, "Execution failed")
}
}
tracing::info!("Execution completed");
report_tx
.send((test, result))
.expect("Failed to send report");
}
.instrument(tracing_span)
},
))
}
async fn start_reporter_task(mut report_rx: mpsc::UnboundedReceiver<(Test, CaseResult)>) {
let start = Instant::now();
const GREEN: &str = "\x1B[32m";
const RED: &str = "\x1B[31m";
const COLOUR_RESET: &str = "\x1B[0m";
const BOLD: &str = "\x1B[1m";
const BOLD_RESET: &str = "\x1B[22m";
let mut number_of_successes = 0;
let mut number_of_failures = 0;
let mut failures = vec![];
// Wait for reports to come from our test runner. When the channel closes, this ends.
while let Some((test, case_result)) = report_rx.recv().await {
let case_name = test.case.name.as_deref().unwrap_or("unnamed_case");
let case_idx = test.case_idx;
let test_path = test.path.display();
let test_mode = test.mode.clone();
match case_result {
Ok(_inputs) => {
number_of_successes += 1;
eprintln!(
"{GREEN}Case Succeeded:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode:?})"
);
}
Err(err) => {
number_of_failures += 1;
eprintln!(
"{RED}Case Failed:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode:?})"
);
failures.push((test, err));
}
}
}
eprintln!();
let elapsed = start.elapsed();
// Now, log the failures with more complete errors at the bottom, like `cargo test` does, so
// that we don't have to scroll through the entire output to find them.
if !failures.is_empty() {
eprintln!("{BOLD}Failures:{BOLD_RESET}\n");
for failure in failures {
let (test, err) = failure;
let case_name = test.case.name.as_deref().unwrap_or("unnamed_case");
let case_idx = test.case_idx;
let test_path = test.path.display();
let test_mode = test.mode.clone();
eprintln!(
"---- {RED}Case Failed:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode:?}) ----\n\n{err}\n"
);
}
}
// Summary at the end.
eprintln!(
"{} cases: {GREEN}{number_of_successes}{COLOUR_RESET} cases succeeded, {RED}{number_of_failures}{COLOUR_RESET} cases failed in {} seconds",
number_of_successes + number_of_failures,
elapsed.as_secs()
);
tokio::join!(status_reporter_task, driver_task);
Ok(())
}
#[allow(clippy::too_many_arguments)]
async fn handle_case_driver<'a, L, F>(
metadata_file_path: &'a Path,
metadata: &'a Metadata,
async fn handle_case_driver<L, F>(
metadata_file_path: &Path,
metadata: &Metadata,
case_idx: CaseIdx,
case: &Case,
mode: SolcMode,
config: &Arguments,
compilation_cache: CompilationCache<'a>,
compilation_cache: CompilationCache,
leader_node: &L::Blockchain,
follower_node: &F::Blockchain,
_: Span,
@@ -520,11 +519,9 @@ where
);
let Some(leader_library_address) = leader_receipt.contract_address else {
tracing::error!("Contract deployment transaction didn't return an address");
anyhow::bail!("Contract deployment didn't return an address");
};
let Some(follower_library_address) = follower_receipt.contract_address else {
tracing::error!("Contract deployment transaction didn't return an address");
anyhow::bail!("Contract deployment didn't return an address");
};
@@ -554,8 +551,16 @@ where
.any(|(code, _)| !code.chars().all(|char| char.is_ascii_hexdigit()));
let (leader_compiled_contracts, follower_compiled_contracts) =
if metadata_file_contains_libraries && compiled_contracts_require_linking {
let leader_key = (metadata_file_path, mode.clone(), L::config_id());
let follower_key = (metadata_file_path, mode.clone(), L::config_id());
let leader_key = (
metadata_file_path.to_path_buf(),
mode.clone(),
L::config_id(),
);
let follower_key = (
metadata_file_path.to_path_buf(),
mode.clone(),
F::config_id(),
);
{
let mut cache = compilation_cache.write().await;
cache.remove(&leader_key);
@@ -609,15 +614,19 @@ where
driver.execute().await
}
async fn get_or_build_contracts<'a, P: Platform>(
metadata: &'a Metadata,
metadata_file_path: &'a Path,
async fn get_or_build_contracts<P: Platform>(
metadata: &Metadata,
metadata_file_path: &Path,
mode: SolcMode,
config: &Arguments,
compilation_cache: CompilationCache<'a>,
compilation_cache: CompilationCache,
deployed_libraries: &HashMap<ContractInstance, (Address, JsonAbi)>,
) -> anyhow::Result<Arc<(Version, CompilerOutput)>> {
let key = (metadata_file_path, mode.clone(), P::config_id());
let key = (
metadata_file_path.to_path_buf(),
mode.clone(),
P::config_id(),
);
if let Some(compilation_artifact) = compilation_cache.read().await.get(&key).cloned() {
let mut compilation_artifact = compilation_artifact.lock().await;
match *compilation_artifact {
@@ -705,6 +714,7 @@ async fn compile_contracts<P: Platform>(
// library.
compiler = FilesWithExtensionIterator::new(metadata.directory()?)
.with_allowed_extension("sol")
.with_use_cached_fs(true)
.fold(compiler, |compiler, path| {
compiler.with_library(&path, library_ident.as_str(), *library_address)
});
+4 -4
View File
@@ -3,6 +3,7 @@ use std::{
path::{Path, PathBuf},
};
use revive_dt_common::cached_fs::read_dir;
use serde::{Deserialize, Serialize};
use crate::metadata::MetadataFile;
@@ -54,7 +55,7 @@ impl Corpus {
/// `path` is expected to be a directory.
pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
if path.is_dir() {
let dir_entry = match std::fs::read_dir(path) {
let dir_entry = match read_dir(path) {
Ok(dir_entry) => dir_entry,
Err(error) => {
tracing::error!("failed to read dir '{}': {error}", path.display());
@@ -62,8 +63,8 @@ pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
}
};
for entry in dir_entry {
let entry = match entry {
for path in dir_entry {
let path = match path {
Ok(entry) => entry,
Err(error) => {
tracing::error!("error reading dir entry: {error}");
@@ -71,7 +72,6 @@ pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
}
};
let path = entry.path();
if path.is_dir() {
collect_metadata(&path, tests);
continue;
+38 -3
View File
@@ -2,7 +2,7 @@ use std::{
cmp::Ordering,
collections::BTreeMap,
fmt::Display,
fs::{File, read_to_string},
fs::File,
ops::Deref,
path::{Path, PathBuf},
str::FromStr,
@@ -11,7 +11,9 @@ use std::{
use serde::{Deserialize, Serialize};
use revive_common::EVMVersion;
use revive_dt_common::{iterators::FilesWithExtensionIterator, macros::define_wrapper_type};
use revive_dt_common::{
cached_fs::read_to_string, iterators::FilesWithExtensionIterator, macros::define_wrapper_type,
};
use crate::{
case::Case,
@@ -75,6 +77,12 @@ pub struct Metadata {
/// be run of the evm version of the nodes match the evm version specified here.
#[serde(skip_serializing_if = "Option::is_none")]
pub required_evm_version: Option<EvmVersionRequirement>,
/// A set of compilation directives that will be passed to the compiler whenever the contracts for
/// the test are being compiled. Note that this differs from the [`Mode`]s in that a [`Mode`] is
/// just a filter for when a test can run whereas this is an instruction to the compiler.
#[serde(skip_serializing_if = "Option::is_none")]
pub compiler_directives: Option<CompilationDirectives>,
}
impl Metadata {
@@ -253,7 +261,9 @@ impl Metadata {
Ok(Box::new(std::iter::once(metadata_file_path.clone())))
} else {
Ok(Box::new(
FilesWithExtensionIterator::new(self.directory()?).with_allowed_extension("sol"),
FilesWithExtensionIterator::new(self.directory()?)
.with_allowed_extension("sol")
.with_use_cached_fs(true),
))
}
}
@@ -490,6 +500,31 @@ impl From<EvmVersionRequirement> for String {
}
}
/// A set of compilation directives that will be passed to the compiler whenever the contracts for
/// the test are being compiled. Note that this differs from the [`Mode`]s in that a [`Mode`] is
/// just a filter for when a test can run whereas this is an instruction to the compiler.
/// Defines how the compiler should handle revert strings.
#[derive(
Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize,
)]
pub struct CompilationDirectives {
/// Defines how the revert strings should be handled.
pub revert_string_handling: Option<RevertString>,
}
/// Defines how the compiler should handle revert strings.
#[derive(
Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize,
)]
#[serde(rename_all = "camelCase")]
pub enum RevertString {
#[default]
Default,
Debug,
Strip,
VerboseDebug,
}
#[cfg(test)]
mod test {
use super::*;
+2 -1
View File
@@ -1,11 +1,12 @@
//! This crate implements concurrent handling of testing node.
use std::{
fs::read_to_string,
sync::atomic::{AtomicUsize, Ordering},
thread,
};
use revive_dt_common::cached_fs::read_to_string;
use anyhow::Context;
use revive_dt_config::Arguments;