Add a cached fs abstraction

This commit is contained in:
Omar Abdulla
2025-08-14 13:12:23 +03:00
parent f2045db0e9
commit 7f4fadf7b1
7 changed files with 83 additions and 41 deletions
@@ -0,0 +1,38 @@
//! Implements a cached file system that allows for files to be read once into memory and then when
//! they're requested to be read again they will be returned from the cache.
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::{Arc, LazyLock},
};
use anyhow::Result;
use tokio::sync::RwLock;
#[allow(clippy::type_complexity)]
static CACHE: LazyLock<Arc<RwLock<HashMap<PathBuf, Vec<u8>>>>> = LazyLock::new(Default::default);
pub struct CachedFileSystem;
impl CachedFileSystem {
pub async fn read(path: impl AsRef<Path>) -> Result<Vec<u8>> {
let cache_read_lock = CACHE.read().await;
match cache_read_lock.get(path.as_ref()) {
Some(entry) => Ok(entry.clone()),
None => {
drop(cache_read_lock);
let content = std::fs::read(&path)?;
let mut cache_write_lock = CACHE.write().await;
cache_write_lock.insert(path.as_ref().to_path_buf(), content.clone());
Ok(content)
}
}
}
pub async fn read_to_string(path: impl AsRef<Path>) -> Result<String> {
let content = Self::read(path).await?;
String::from_utf8(content).map_err(Into::into)
}
}
+2
View File
@@ -1,3 +1,5 @@
mod cached_file_system;
mod clear_dir;
pub use cached_file_system::*;
pub use clear_dir::*;
+6 -6
View File
@@ -5,7 +5,6 @@
use std::{
collections::HashMap,
fs::read_to_string,
hash::Hash,
path::{Path, PathBuf},
};
@@ -16,7 +15,7 @@ use semver::Version;
use serde::{Deserialize, Serialize};
use revive_common::EVMVersion;
use revive_dt_common::types::VersionOrRequirement;
use revive_dt_common::{fs::CachedFileSystem, types::VersionOrRequirement};
use revive_dt_config::Arguments;
pub mod revive_js;
@@ -123,10 +122,11 @@ where
self
}
pub fn with_source(mut self, path: impl AsRef<Path>) -> anyhow::Result<Self> {
self.input
.sources
.insert(path.as_ref().to_path_buf(), read_to_string(path.as_ref())?);
pub async fn with_source(mut self, path: impl AsRef<Path>) -> anyhow::Result<Self> {
self.input.sources.insert(
path.as_ref().to_path_buf(),
CachedFileSystem::read_to_string(path.as_ref()).await?,
);
Ok(self)
}
+11 -11
View File
@@ -67,7 +67,7 @@ fn main() -> anyhow::Result<()> {
let args = init_cli()?;
let body = async {
for (corpus, tests) in collect_corpora(&args)? {
for (corpus, tests) in collect_corpora(&args).await? {
let span = Span::new(corpus, args.clone())?;
match &args.compile_only {
Some(platform) => compile_corpus(&args, &tests, platform, span).await,
@@ -117,13 +117,13 @@ fn init_cli() -> anyhow::Result<Arguments> {
Ok(args)
}
fn collect_corpora(args: &Arguments) -> anyhow::Result<HashMap<Corpus, Vec<MetadataFile>>> {
async fn collect_corpora(args: &Arguments) -> anyhow::Result<HashMap<Corpus, Vec<MetadataFile>>> {
let mut corpora = HashMap::new();
for path in &args.corpus {
let corpus = Corpus::try_from_path(path)?;
tracing::info!("found corpus: {}", path.display());
let tests = corpus.enumerate_tests();
let tests = corpus.enumerate_tests().await;
tracing::info!("corpus '{}' contains {} tests", &corpus.name, tests.len());
corpora.insert(corpus, tests);
}
@@ -145,7 +145,7 @@ where
let (report_tx, report_rx) = mpsc::unbounded_channel::<(Test, CaseResult)>();
let tests = prepare_tests::<L, F>(metadata_files);
let driver_task = start_driver_task::<L, F>(args, tests, span, report_tx)?;
let driver_task = start_driver_task::<L, F>(args, tests, span, report_tx).await?;
let status_reporter_task = start_reporter_task(report_rx);
tokio::join!(status_reporter_task, driver_task);
@@ -237,7 +237,7 @@ where
})
}
fn start_driver_task<L, F>(
async fn start_driver_task<L, F>(
args: &Arguments,
tests: impl Iterator<Item = Test>,
span: Span,
@@ -249,8 +249,8 @@ where
L::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
F::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
{
let leader_nodes = Arc::new(NodePool::<L::Blockchain>::new(args)?);
let follower_nodes = Arc::new(NodePool::<F::Blockchain>::new(args)?);
let leader_nodes = Arc::new(NodePool::<L::Blockchain>::new(args).await?);
let follower_nodes = Arc::new(NodePool::<F::Blockchain>::new(args).await?);
let compilation_cache = Arc::new(RwLock::new(HashMap::new()));
let number_concurrent_tasks = args.number_of_concurrent_tasks();
@@ -693,12 +693,12 @@ async fn compile_contracts<P: Platform>(
"Compiling contracts"
);
let compiler = Compiler::<P::Compiler>::new()
let mut compiler = Compiler::<P::Compiler>::new()
.with_allow_path(metadata.directory()?)
.with_optimization(mode.solc_optimize());
let mut compiler = metadata
.files_to_compile()?
.try_fold(compiler, |compiler, path| compiler.with_source(&path))?;
for path in metadata.files_to_compile()? {
compiler = compiler.with_source(path).await?;
}
for (library_instance, (library_address, _)) in deployed_libraries.iter() {
let library_ident = &metadata
.contracts
+6 -6
View File
@@ -39,9 +39,9 @@ impl Corpus {
}
/// Scan the corpus base directory and return all tests found.
pub fn enumerate_tests(&self) -> Vec<MetadataFile> {
pub async fn enumerate_tests(&self) -> Vec<MetadataFile> {
let mut tests = Vec::new();
collect_metadata(&self.path, &mut tests);
collect_metadata(&self.path, &mut tests).await;
tests
}
}
@@ -52,7 +52,7 @@ impl Corpus {
/// Found tests are inserted into `tests`.
///
/// `path` is expected to be a directory.
pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
pub async fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
if path.is_dir() {
let dir_entry = match std::fs::read_dir(path) {
Ok(dir_entry) => dir_entry,
@@ -73,12 +73,12 @@ pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
let path = entry.path();
if path.is_dir() {
collect_metadata(&path, tests);
Box::pin(collect_metadata(&path, tests)).await;
continue;
}
if path.is_file() {
if let Some(metadata) = MetadataFile::try_from_file(&path) {
if let Some(metadata) = MetadataFile::try_from_file(&path).await {
tests.push(metadata)
}
}
@@ -89,7 +89,7 @@ pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
return;
};
if extension.eq_ignore_ascii_case("sol") || extension.eq_ignore_ascii_case("json") {
if let Some(metadata) = MetadataFile::try_from_file(path) {
if let Some(metadata) = MetadataFile::try_from_file(path).await {
tests.push(metadata)
}
} else {
+15 -12
View File
@@ -2,7 +2,6 @@ use std::{
cmp::Ordering,
collections::BTreeMap,
fmt::Display,
fs::{File, read_to_string},
ops::Deref,
path::{Path, PathBuf},
str::FromStr,
@@ -11,7 +10,9 @@ use std::{
use serde::{Deserialize, Serialize};
use revive_common::EVMVersion;
use revive_dt_common::{iterators::FilesWithExtensionIterator, macros::define_wrapper_type};
use revive_dt_common::{
fs::CachedFileSystem, iterators::FilesWithExtensionIterator, macros::define_wrapper_type,
};
use crate::{
case::Case,
@@ -29,8 +30,8 @@ pub struct MetadataFile {
}
impl MetadataFile {
pub fn try_from_file(path: &Path) -> Option<Self> {
Metadata::try_from_file(path).map(|metadata| Self {
pub async fn try_from_file(path: &Path) -> Option<Self> {
Metadata::try_from_file(path).await.map(|metadata| Self {
path: path.to_owned(),
content: metadata,
})
@@ -151,7 +152,7 @@ impl Metadata {
///
/// # Panics
/// Expects the supplied `path` to be a file.
pub fn try_from_file(path: &Path) -> Option<Self> {
pub async fn try_from_file(path: &Path) -> Option<Self> {
assert!(path.is_file(), "not a file: {}", path.display());
let Some(file_extension) = path.extension() else {
@@ -160,19 +161,20 @@ impl Metadata {
};
if file_extension == METADATA_FILE_EXTENSION {
return Self::try_from_json(path);
return Self::try_from_json(path).await;
}
if file_extension == SOLIDITY_CASE_FILE_EXTENSION {
return Self::try_from_solidity(path);
return Self::try_from_solidity(path).await;
}
tracing::debug!("ignoring invalid corpus file: {}", path.display());
None
}
fn try_from_json(path: &Path) -> Option<Self> {
let file = File::open(path)
async fn try_from_json(path: &Path) -> Option<Self> {
let content = CachedFileSystem::read(path)
.await
.inspect_err(|error| {
tracing::error!(
"opening JSON test metadata file '{}' error: {error}",
@@ -181,7 +183,7 @@ impl Metadata {
})
.ok()?;
match serde_json::from_reader::<_, Metadata>(file) {
match serde_json::from_slice::<Metadata>(content.as_slice()) {
Ok(mut metadata) => {
metadata.file_path = Some(path.to_path_buf());
Some(metadata)
@@ -196,8 +198,9 @@ impl Metadata {
}
}
fn try_from_solidity(path: &Path) -> Option<Self> {
let spec = read_to_string(path)
async fn try_from_solidity(path: &Path) -> Option<Self> {
let spec = CachedFileSystem::read_to_string(path)
.await
.inspect_err(|error| {
tracing::error!(
"opening JSON test metadata file '{}' error: {error}",
+5 -6
View File
@@ -1,12 +1,12 @@
//! This crate implements concurrent handling of testing node.
use std::{
fs::read_to_string,
sync::atomic::{AtomicUsize, Ordering},
thread,
};
use anyhow::Context;
use revive_dt_common::fs::CachedFileSystem;
use revive_dt_config::Arguments;
use crate::Node;
@@ -23,12 +23,11 @@ where
T: Node + Send + 'static,
{
/// Create a new Pool. This will start as many nodes as there are workers in `config`.
pub fn new(config: &Arguments) -> anyhow::Result<Self> {
pub async fn new(config: &Arguments) -> anyhow::Result<Self> {
let nodes = config.number_of_nodes;
let genesis = read_to_string(&config.genesis_file).context(format!(
"can not read genesis file: {}",
config.genesis_file.display()
))?;
let genesis = CachedFileSystem::read_to_string(&config.genesis_file)
.await
.context("Failed to read genesis file")?;
let mut handles = Vec::with_capacity(nodes);
for _ in 0..nodes {