Compare commits

..

1 Commits

Author SHA1 Message Date
Omar Abdulla 7247eca2e8 Add a cached fs abstraction 2025-08-14 17:38:12 +03:00
13 changed files with 265 additions and 96 deletions
Generated
+140 -2
View File
@@ -1644,6 +1644,15 @@ version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]]
name = "crossbeam-channel"
version = "0.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
dependencies = [
"crossbeam-utils",
]
[[package]] [[package]]
name = "crossbeam-deque" name = "crossbeam-deque"
version = "0.8.6" version = "0.8.6"
@@ -2399,6 +2408,20 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9"
[[package]]
name = "generator"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d18470a76cb7f8ff746cf1f7470914f900252ec36bbc40b569d74b1258446827"
dependencies = [
"cc",
"cfg-if",
"libc",
"log",
"rustversion",
"windows",
]
[[package]] [[package]]
name = "generic-array" name = "generic-array"
version = "0.14.7" version = "0.14.7"
@@ -3165,6 +3188,19 @@ version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
[[package]]
name = "loom"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
dependencies = [
"cfg-if",
"generator",
"scoped-tls",
"tracing",
"tracing-subscriber",
]
[[package]] [[package]]
name = "lru" name = "lru"
version = "0.13.0" version = "0.13.0"
@@ -3247,6 +3283,25 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "moka"
version = "0.12.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9321642ca94a4282428e6ea4af8cc2ca4eac48ac7a6a4ea8f33f76d0ce70926"
dependencies = [
"crossbeam-channel",
"crossbeam-epoch",
"crossbeam-utils",
"loom",
"parking_lot",
"portable-atomic",
"rustc_version 0.4.1",
"smallvec",
"tagptr",
"thiserror 1.0.69",
"uuid",
]
[[package]] [[package]]
name = "native-tls" name = "native-tls"
version = "0.2.14" version = "0.2.14"
@@ -3646,6 +3701,12 @@ dependencies = [
"syn 2.0.101", "syn 2.0.101",
] ]
[[package]]
name = "portable-atomic"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
[[package]] [[package]]
name = "potential_utf" name = "potential_utf"
version = "0.1.2" version = "0.1.2"
@@ -4029,6 +4090,8 @@ name = "revive-dt-common"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"moka",
"once_cell",
"semver 1.0.26", "semver 1.0.26",
"tokio", "tokio",
] ]
@@ -4424,6 +4487,12 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "scoped-tls"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
[[package]] [[package]]
name = "scopeguard" name = "scopeguard"
version = "1.2.0" version = "1.2.0"
@@ -5276,6 +5345,12 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "tagptr"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417"
[[package]] [[package]]
name = "tap" name = "tap"
version = "1.0.1" version = "1.0.1"
@@ -5808,6 +5883,17 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f33196643e165781c20a5ead5582283a7dacbb87855d867fbc2df3f81eddc1be"
dependencies = [
"getrandom 0.3.3",
"js-sys",
"wasm-bindgen",
]
[[package]] [[package]]
name = "valuable" name = "valuable"
version = "0.1.1" version = "0.1.1"
@@ -6069,6 +6155,28 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows"
version = "0.61.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893"
dependencies = [
"windows-collections",
"windows-core",
"windows-future",
"windows-link",
"windows-numerics",
]
[[package]]
name = "windows-collections"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8"
dependencies = [
"windows-core",
]
[[package]] [[package]]
name = "windows-core" name = "windows-core"
version = "0.61.2" version = "0.61.2"
@@ -6082,6 +6190,17 @@ dependencies = [
"windows-strings 0.4.2", "windows-strings 0.4.2",
] ]
[[package]]
name = "windows-future"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e"
dependencies = [
"windows-core",
"windows-link",
"windows-threading",
]
[[package]] [[package]]
name = "windows-implement" name = "windows-implement"
version = "0.60.0" version = "0.60.0"
@@ -6106,9 +6225,19 @@ dependencies = [
[[package]] [[package]]
name = "windows-link" name = "windows-link"
version = "0.1.1" version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
[[package]]
name = "windows-numerics"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1"
dependencies = [
"windows-core",
"windows-link",
]
[[package]] [[package]]
name = "windows-registry" name = "windows-registry"
@@ -6198,6 +6327,15 @@ dependencies = [
"windows_x86_64_msvc 0.53.0", "windows_x86_64_msvc 0.53.0",
] ]
[[package]]
name = "windows-threading"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6"
dependencies = [
"windows-link",
]
[[package]] [[package]]
name = "windows_aarch64_gnullvm" name = "windows_aarch64_gnullvm"
version = "0.52.6" version = "0.52.6"
+1
View File
@@ -29,6 +29,7 @@ clap = { version = "4", features = ["derive"] }
foundry-compilers-artifacts = { version = "0.18.0" } foundry-compilers-artifacts = { version = "0.18.0" }
futures = { version = "0.3.31" } futures = { version = "0.3.31" }
hex = "0.4.3" hex = "0.4.3"
moka = "0.12.10"
reqwest = { version = "0.12.15", features = ["json"] } reqwest = { version = "0.12.15", features = ["json"] }
once_cell = "1.21" once_cell = "1.21"
semver = { version = "1.0", features = ["serde"] } semver = { version = "1.0", features = ["serde"] }
+2
View File
@@ -10,5 +10,7 @@ rust-version.workspace = true
[dependencies] [dependencies]
anyhow = { workspace = true } anyhow = { workspace = true }
moka = { workspace = true, features = ["sync"] }
once_cell = { workspace = true }
semver = { workspace = true } semver = { workspace = true }
tokio = { workspace = true, default-features = false, features = ["time"] } tokio = { workspace = true, default-features = false, features = ["time"] }
+49
View File
@@ -0,0 +1,49 @@
//! This module implements a cached file system allowing for results to be stored in-memory rather
//! rather being queried from the file system again.
use std::fs;
use std::io::{Error, Result};
use std::path::{Path, PathBuf};
use moka::sync::Cache;
use once_cell::sync::Lazy;
pub fn read(path: impl AsRef<Path>) -> Result<Vec<u8>> {
static READ_CACHE: Lazy<Cache<PathBuf, Vec<u8>>> = Lazy::new(|| Cache::new(10_000));
let path = path.as_ref().canonicalize()?;
match READ_CACHE.get(path.as_path()) {
Some(content) => Ok(content),
None => {
let content = fs::read(path.as_path())?;
READ_CACHE.insert(path, content.clone());
Ok(content)
}
}
}
pub fn read_to_string(path: impl AsRef<Path>) -> Result<String> {
let content = read(path)?;
String::from_utf8(content).map_err(|_| {
Error::new(
std::io::ErrorKind::InvalidData,
"The contents of the file are not valid UTF8",
)
})
}
pub fn read_dir(path: impl AsRef<Path>) -> Result<Box<dyn Iterator<Item = Result<PathBuf>>>> {
static READ_DIR_CACHE: Lazy<Cache<PathBuf, Vec<PathBuf>>> = Lazy::new(|| Cache::new(10_000));
let path = path.as_ref().canonicalize()?;
match READ_DIR_CACHE.get(path.as_path()) {
Some(entries) => Ok(Box::new(entries.into_iter().map(Ok)) as Box<_>),
None => {
let entries = fs::read_dir(path.as_path())?
.flat_map(|maybe_entry| maybe_entry.map(|entry| entry.path()))
.collect();
READ_DIR_CACHE.insert(path.clone(), entries);
Ok(read_dir(path).unwrap())
}
}
}
@@ -1,38 +0,0 @@
//! Implements a cached file system that allows for files to be read once into memory and then when
//! they're requested to be read again they will be returned from the cache.
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::{Arc, LazyLock},
};
use anyhow::Result;
use tokio::sync::RwLock;
#[allow(clippy::type_complexity)]
static CACHE: LazyLock<Arc<RwLock<HashMap<PathBuf, Vec<u8>>>>> = LazyLock::new(Default::default);
pub struct CachedFileSystem;
impl CachedFileSystem {
pub async fn read(path: impl AsRef<Path>) -> Result<Vec<u8>> {
let cache_read_lock = CACHE.read().await;
match cache_read_lock.get(path.as_ref()) {
Some(entry) => Ok(entry.clone()),
None => {
drop(cache_read_lock);
let content = std::fs::read(&path)?;
let mut cache_write_lock = CACHE.write().await;
cache_write_lock.insert(path.as_ref().to_path_buf(), content.clone());
Ok(content)
}
}
}
pub async fn read_to_string(path: impl AsRef<Path>) -> Result<String> {
let content = Self::read(path).await?;
String::from_utf8(content).map_err(Into::into)
}
}
-2
View File
@@ -1,5 +1,3 @@
mod cached_file_system;
mod clear_dir; mod clear_dir;
pub use cached_file_system::*;
pub use clear_dir::*; pub use clear_dir::*;
@@ -19,6 +19,11 @@ pub struct FilesWithExtensionIterator {
/// this vector then they will be returned when the [`Iterator::next`] method is called. If not /// this vector then they will be returned when the [`Iterator::next`] method is called. If not
/// then we visit one of the next directories to visit. /// then we visit one of the next directories to visit.
files_matching_allowed_extensions: Vec<PathBuf>, files_matching_allowed_extensions: Vec<PathBuf>,
/// This option controls if the the cached file system should be used or not. This could be
/// better for certain cases where the entries in the directories do not change and therefore
/// caching can be used.
use_cached_fs: bool,
} }
impl FilesWithExtensionIterator { impl FilesWithExtensionIterator {
@@ -27,6 +32,7 @@ impl FilesWithExtensionIterator {
allowed_extensions: Default::default(), allowed_extensions: Default::default(),
directories_to_search: vec![root_directory.as_ref().to_path_buf()], directories_to_search: vec![root_directory.as_ref().to_path_buf()],
files_matching_allowed_extensions: Default::default(), files_matching_allowed_extensions: Default::default(),
use_cached_fs: Default::default(),
} }
} }
@@ -37,6 +43,11 @@ impl FilesWithExtensionIterator {
self.allowed_extensions.insert(allowed_extension.into()); self.allowed_extensions.insert(allowed_extension.into());
self self
} }
pub fn with_use_cached_fs(mut self, use_cached_fs: bool) -> Self {
self.use_cached_fs = use_cached_fs;
self
}
} }
impl Iterator for FilesWithExtensionIterator { impl Iterator for FilesWithExtensionIterator {
@@ -49,16 +60,19 @@ impl Iterator for FilesWithExtensionIterator {
let directory_to_search = self.directories_to_search.pop()?; let directory_to_search = self.directories_to_search.pop()?;
// Read all of the entries in the directory. If we failed to read this dir's entires then we let iterator = if self.use_cached_fs {
// elect to just ignore it and look in the next directory, we do that by calling the next let Ok(dir_entries) = crate::cached_fs::read_dir(directory_to_search.as_path()) else {
// method again on the iterator, which is an intentional decision that we made here instead return self.next();
// of panicking. };
let Ok(dir_entries) = std::fs::read_dir(directory_to_search) else { Box::new(dir_entries) as Box<dyn Iterator<Item = std::io::Result<PathBuf>>>
return self.next(); } else {
let Ok(dir_entries) = std::fs::read_dir(directory_to_search) else {
return self.next();
};
Box::new(dir_entries.map(|maybe_entry| maybe_entry.map(|entry| entry.path()))) as Box<_>
}; };
for entry in dir_entries.flatten() { for entry_path in iterator.flatten() {
let entry_path = entry.path();
if entry_path.is_dir() { if entry_path.is_dir() {
self.directories_to_search.push(entry_path) self.directories_to_search.push(entry_path)
} else if entry_path.is_file() } else if entry_path.is_file()
+1
View File
@@ -1,6 +1,7 @@
//! This crate provides common concepts, functionality, types, macros, and more that other crates in //! This crate provides common concepts, functionality, types, macros, and more that other crates in
//! the workspace can benefit from. //! the workspace can benefit from.
pub mod cached_fs;
pub mod fs; pub mod fs;
pub mod futures; pub mod futures;
pub mod iterators; pub mod iterators;
+6 -6
View File
@@ -15,7 +15,8 @@ use semver::Version;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use revive_common::EVMVersion; use revive_common::EVMVersion;
use revive_dt_common::{fs::CachedFileSystem, types::VersionOrRequirement}; use revive_dt_common::cached_fs::read_to_string;
use revive_dt_common::types::VersionOrRequirement;
use revive_dt_config::Arguments; use revive_dt_config::Arguments;
pub mod revive_js; pub mod revive_js;
@@ -122,11 +123,10 @@ where
self self
} }
pub async fn with_source(mut self, path: impl AsRef<Path>) -> anyhow::Result<Self> { pub fn with_source(mut self, path: impl AsRef<Path>) -> anyhow::Result<Self> {
self.input.sources.insert( self.input
path.as_ref().to_path_buf(), .sources
CachedFileSystem::read_to_string(path.as_ref()).await?, .insert(path.as_ref().to_path_buf(), read_to_string(path.as_ref())?);
);
Ok(self) Ok(self)
} }
+12 -11
View File
@@ -67,7 +67,7 @@ fn main() -> anyhow::Result<()> {
let args = init_cli()?; let args = init_cli()?;
let body = async { let body = async {
for (corpus, tests) in collect_corpora(&args).await? { for (corpus, tests) in collect_corpora(&args)? {
let span = Span::new(corpus, args.clone())?; let span = Span::new(corpus, args.clone())?;
match &args.compile_only { match &args.compile_only {
Some(platform) => compile_corpus(&args, &tests, platform, span).await, Some(platform) => compile_corpus(&args, &tests, platform, span).await,
@@ -117,13 +117,13 @@ fn init_cli() -> anyhow::Result<Arguments> {
Ok(args) Ok(args)
} }
async fn collect_corpora(args: &Arguments) -> anyhow::Result<HashMap<Corpus, Vec<MetadataFile>>> { fn collect_corpora(args: &Arguments) -> anyhow::Result<HashMap<Corpus, Vec<MetadataFile>>> {
let mut corpora = HashMap::new(); let mut corpora = HashMap::new();
for path in &args.corpus { for path in &args.corpus {
let corpus = Corpus::try_from_path(path)?; let corpus = Corpus::try_from_path(path)?;
tracing::info!("found corpus: {}", path.display()); tracing::info!("found corpus: {}", path.display());
let tests = corpus.enumerate_tests().await; let tests = corpus.enumerate_tests();
tracing::info!("corpus '{}' contains {} tests", &corpus.name, tests.len()); tracing::info!("corpus '{}' contains {} tests", &corpus.name, tests.len());
corpora.insert(corpus, tests); corpora.insert(corpus, tests);
} }
@@ -145,7 +145,7 @@ where
let (report_tx, report_rx) = mpsc::unbounded_channel::<(Test, CaseResult)>(); let (report_tx, report_rx) = mpsc::unbounded_channel::<(Test, CaseResult)>();
let tests = prepare_tests::<L, F>(metadata_files); let tests = prepare_tests::<L, F>(metadata_files);
let driver_task = start_driver_task::<L, F>(args, tests, span, report_tx).await?; let driver_task = start_driver_task::<L, F>(args, tests, span, report_tx)?;
let status_reporter_task = start_reporter_task(report_rx); let status_reporter_task = start_reporter_task(report_rx);
tokio::join!(status_reporter_task, driver_task); tokio::join!(status_reporter_task, driver_task);
@@ -237,7 +237,7 @@ where
}) })
} }
async fn start_driver_task<L, F>( fn start_driver_task<L, F>(
args: &Arguments, args: &Arguments,
tests: impl Iterator<Item = Test>, tests: impl Iterator<Item = Test>,
span: Span, span: Span,
@@ -249,8 +249,8 @@ where
L::Blockchain: revive_dt_node::Node + Send + Sync + 'static, L::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
F::Blockchain: revive_dt_node::Node + Send + Sync + 'static, F::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
{ {
let leader_nodes = Arc::new(NodePool::<L::Blockchain>::new(args).await?); let leader_nodes = Arc::new(NodePool::<L::Blockchain>::new(args)?);
let follower_nodes = Arc::new(NodePool::<F::Blockchain>::new(args).await?); let follower_nodes = Arc::new(NodePool::<F::Blockchain>::new(args)?);
let compilation_cache = Arc::new(RwLock::new(HashMap::new())); let compilation_cache = Arc::new(RwLock::new(HashMap::new()));
let number_concurrent_tasks = args.number_of_concurrent_tasks(); let number_concurrent_tasks = args.number_of_concurrent_tasks();
@@ -693,12 +693,12 @@ async fn compile_contracts<P: Platform>(
"Compiling contracts" "Compiling contracts"
); );
let mut compiler = Compiler::<P::Compiler>::new() let compiler = Compiler::<P::Compiler>::new()
.with_allow_path(metadata.directory()?) .with_allow_path(metadata.directory()?)
.with_optimization(mode.solc_optimize()); .with_optimization(mode.solc_optimize());
for path in metadata.files_to_compile()? { let mut compiler = metadata
compiler = compiler.with_source(path).await?; .files_to_compile()?
} .try_fold(compiler, |compiler, path| compiler.with_source(&path))?;
for (library_instance, (library_address, _)) in deployed_libraries.iter() { for (library_instance, (library_address, _)) in deployed_libraries.iter() {
let library_ident = &metadata let library_ident = &metadata
.contracts .contracts
@@ -714,6 +714,7 @@ async fn compile_contracts<P: Platform>(
// library. // library.
compiler = FilesWithExtensionIterator::new(metadata.directory()?) compiler = FilesWithExtensionIterator::new(metadata.directory()?)
.with_allowed_extension("sol") .with_allowed_extension("sol")
.with_use_cached_fs(true)
.fold(compiler, |compiler, path| { .fold(compiler, |compiler, path| {
compiler.with_library(&path, library_ident.as_str(), *library_address) compiler.with_library(&path, library_ident.as_str(), *library_address)
}); });
+10 -10
View File
@@ -3,6 +3,7 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use revive_dt_common::cached_fs::read_dir;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::metadata::MetadataFile; use crate::metadata::MetadataFile;
@@ -39,9 +40,9 @@ impl Corpus {
} }
/// Scan the corpus base directory and return all tests found. /// Scan the corpus base directory and return all tests found.
pub async fn enumerate_tests(&self) -> Vec<MetadataFile> { pub fn enumerate_tests(&self) -> Vec<MetadataFile> {
let mut tests = Vec::new(); let mut tests = Vec::new();
collect_metadata(&self.path, &mut tests).await; collect_metadata(&self.path, &mut tests);
tests tests
} }
} }
@@ -52,9 +53,9 @@ impl Corpus {
/// Found tests are inserted into `tests`. /// Found tests are inserted into `tests`.
/// ///
/// `path` is expected to be a directory. /// `path` is expected to be a directory.
pub async fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) { pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
if path.is_dir() { if path.is_dir() {
let dir_entry = match std::fs::read_dir(path) { let dir_entry = match read_dir(path) {
Ok(dir_entry) => dir_entry, Ok(dir_entry) => dir_entry,
Err(error) => { Err(error) => {
tracing::error!("failed to read dir '{}': {error}", path.display()); tracing::error!("failed to read dir '{}': {error}", path.display());
@@ -62,8 +63,8 @@ pub async fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
} }
}; };
for entry in dir_entry { for path in dir_entry {
let entry = match entry { let path = match path {
Ok(entry) => entry, Ok(entry) => entry,
Err(error) => { Err(error) => {
tracing::error!("error reading dir entry: {error}"); tracing::error!("error reading dir entry: {error}");
@@ -71,14 +72,13 @@ pub async fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
} }
}; };
let path = entry.path();
if path.is_dir() { if path.is_dir() {
Box::pin(collect_metadata(&path, tests)).await; collect_metadata(&path, tests);
continue; continue;
} }
if path.is_file() { if path.is_file() {
if let Some(metadata) = MetadataFile::try_from_file(&path).await { if let Some(metadata) = MetadataFile::try_from_file(&path) {
tests.push(metadata) tests.push(metadata)
} }
} }
@@ -89,7 +89,7 @@ pub async fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
return; return;
}; };
if extension.eq_ignore_ascii_case("sol") || extension.eq_ignore_ascii_case("json") { if extension.eq_ignore_ascii_case("sol") || extension.eq_ignore_ascii_case("json") {
if let Some(metadata) = MetadataFile::try_from_file(path).await { if let Some(metadata) = MetadataFile::try_from_file(path) {
tests.push(metadata) tests.push(metadata)
} }
} else { } else {
+15 -14
View File
@@ -2,6 +2,7 @@ use std::{
cmp::Ordering, cmp::Ordering,
collections::BTreeMap, collections::BTreeMap,
fmt::Display, fmt::Display,
fs::File,
ops::Deref, ops::Deref,
path::{Path, PathBuf}, path::{Path, PathBuf},
str::FromStr, str::FromStr,
@@ -11,7 +12,7 @@ use serde::{Deserialize, Serialize};
use revive_common::EVMVersion; use revive_common::EVMVersion;
use revive_dt_common::{ use revive_dt_common::{
fs::CachedFileSystem, iterators::FilesWithExtensionIterator, macros::define_wrapper_type, cached_fs::read_to_string, iterators::FilesWithExtensionIterator, macros::define_wrapper_type,
}; };
use crate::{ use crate::{
@@ -30,8 +31,8 @@ pub struct MetadataFile {
} }
impl MetadataFile { impl MetadataFile {
pub async fn try_from_file(path: &Path) -> Option<Self> { pub fn try_from_file(path: &Path) -> Option<Self> {
Metadata::try_from_file(path).await.map(|metadata| Self { Metadata::try_from_file(path).map(|metadata| Self {
path: path.to_owned(), path: path.to_owned(),
content: metadata, content: metadata,
}) })
@@ -152,7 +153,7 @@ impl Metadata {
/// ///
/// # Panics /// # Panics
/// Expects the supplied `path` to be a file. /// Expects the supplied `path` to be a file.
pub async fn try_from_file(path: &Path) -> Option<Self> { pub fn try_from_file(path: &Path) -> Option<Self> {
assert!(path.is_file(), "not a file: {}", path.display()); assert!(path.is_file(), "not a file: {}", path.display());
let Some(file_extension) = path.extension() else { let Some(file_extension) = path.extension() else {
@@ -161,20 +162,19 @@ impl Metadata {
}; };
if file_extension == METADATA_FILE_EXTENSION { if file_extension == METADATA_FILE_EXTENSION {
return Self::try_from_json(path).await; return Self::try_from_json(path);
} }
if file_extension == SOLIDITY_CASE_FILE_EXTENSION { if file_extension == SOLIDITY_CASE_FILE_EXTENSION {
return Self::try_from_solidity(path).await; return Self::try_from_solidity(path);
} }
tracing::debug!("ignoring invalid corpus file: {}", path.display()); tracing::debug!("ignoring invalid corpus file: {}", path.display());
None None
} }
async fn try_from_json(path: &Path) -> Option<Self> { fn try_from_json(path: &Path) -> Option<Self> {
let content = CachedFileSystem::read(path) let file = File::open(path)
.await
.inspect_err(|error| { .inspect_err(|error| {
tracing::error!( tracing::error!(
"opening JSON test metadata file '{}' error: {error}", "opening JSON test metadata file '{}' error: {error}",
@@ -183,7 +183,7 @@ impl Metadata {
}) })
.ok()?; .ok()?;
match serde_json::from_slice::<Metadata>(content.as_slice()) { match serde_json::from_reader::<_, Metadata>(file) {
Ok(mut metadata) => { Ok(mut metadata) => {
metadata.file_path = Some(path.to_path_buf()); metadata.file_path = Some(path.to_path_buf());
Some(metadata) Some(metadata)
@@ -198,9 +198,8 @@ impl Metadata {
} }
} }
async fn try_from_solidity(path: &Path) -> Option<Self> { fn try_from_solidity(path: &Path) -> Option<Self> {
let spec = CachedFileSystem::read_to_string(path) let spec = read_to_string(path)
.await
.inspect_err(|error| { .inspect_err(|error| {
tracing::error!( tracing::error!(
"opening JSON test metadata file '{}' error: {error}", "opening JSON test metadata file '{}' error: {error}",
@@ -262,7 +261,9 @@ impl Metadata {
Ok(Box::new(std::iter::once(metadata_file_path.clone()))) Ok(Box::new(std::iter::once(metadata_file_path.clone())))
} else { } else {
Ok(Box::new( Ok(Box::new(
FilesWithExtensionIterator::new(self.directory()?).with_allowed_extension("sol"), FilesWithExtensionIterator::new(self.directory()?)
.with_allowed_extension("sol")
.with_use_cached_fs(true),
)) ))
} }
} }
+7 -5
View File
@@ -5,8 +5,9 @@ use std::{
thread, thread,
}; };
use revive_dt_common::cached_fs::read_to_string;
use anyhow::Context; use anyhow::Context;
use revive_dt_common::fs::CachedFileSystem;
use revive_dt_config::Arguments; use revive_dt_config::Arguments;
use crate::Node; use crate::Node;
@@ -23,11 +24,12 @@ where
T: Node + Send + 'static, T: Node + Send + 'static,
{ {
/// Create a new Pool. This will start as many nodes as there are workers in `config`. /// Create a new Pool. This will start as many nodes as there are workers in `config`.
pub async fn new(config: &Arguments) -> anyhow::Result<Self> { pub fn new(config: &Arguments) -> anyhow::Result<Self> {
let nodes = config.number_of_nodes; let nodes = config.number_of_nodes;
let genesis = CachedFileSystem::read_to_string(&config.genesis_file) let genesis = read_to_string(&config.genesis_file).context(format!(
.await "can not read genesis file: {}",
.context("Failed to read genesis file")?; config.genesis_file.display()
))?;
let mut handles = Vec::with_capacity(nodes); let mut handles = Vec::with_capacity(nodes);
for _ in 0..nodes { for _ in 0..nodes {