From 09d56f5177595eb405022bb6c5cc50437f7cdaaa Mon Sep 17 00:00:00 2001 From: James Wilson Date: Sat, 16 Aug 2025 12:38:17 +0100 Subject: [PATCH 1/3] Redo how we parse and use modes (#125) * WIP redo how we parse and use modes * test expanding, too * WIP integrate new Mode/ParsedMode into rest of code * First pass integrated new mode bits * fmt * clippy * Remove mode we no longer support from test metadata * Address nits * Add ability for compiler to opt out if it can't work with some Mode/version * Elide viaIR input if compiler does not support it * Improve test output a little; string modes and list ignored tests * Move Mode to common crate * constants.mod, and Display for CaseIdx to use it * fmt * Rename ModePipeline::E/Y * Re-arrange Mode things; ParsedMode in format and Mode etc in common * Move compile check to prepare_tests * Remove now-unused deps * clippy nits * Update fallback tx weights to avoid out of gas errors * Update kitchensink weights too and fmt * Bump default geth timeout to 10s * 30s timeout * Improve geth stdout logging on failure * fix line logging * remove --networkid and arg, back to 5s timeout for geth --- Cargo.lock | 3 + Cargo.toml | 2 + assets/test_metadata.json | 3 +- crates/common/Cargo.toml | 1 + crates/common/src/types/mod.rs | 2 + crates/common/src/types/mode.rs | 167 +++++++++++++ crates/compiler/src/constants.rs | 4 + crates/compiler/src/lib.rs | 28 ++- crates/compiler/src/revive_resolc.rs | 30 ++- crates/compiler/src/solc.rs | 32 ++- crates/config/src/lib.rs | 4 - crates/core/src/main.rs | 120 ++++++--- crates/format/Cargo.toml | 1 + crates/format/src/case.rs | 10 +- crates/format/src/metadata.rs | 29 +-- crates/format/src/mode.rs | 359 +++++++++++++++++++-------- crates/node/src/geth.rs | 18 +- crates/node/src/kitchensink.rs | 6 +- crates/report/Cargo.toml | 1 + crates/report/src/reporter.rs | 17 +- 20 files changed, 627 insertions(+), 210 deletions(-) create mode 100644 crates/common/src/types/mode.rs create mode 100644 crates/compiler/src/constants.rs diff --git a/Cargo.lock b/Cargo.lock index 501d257..0b375d7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4093,6 +4093,7 @@ dependencies = [ "moka", "once_cell", "semver 1.0.26", + "serde", "tokio", ] @@ -4159,6 +4160,7 @@ dependencies = [ "alloy-primitives", "alloy-sol-types", "anyhow", + "regex", "revive-common", "revive-dt-common", "semver 1.0.26", @@ -4201,6 +4203,7 @@ name = "revive-dt-report" version = "0.1.0" dependencies = [ "anyhow", + "revive-dt-common", "revive-dt-compiler", "revive-dt-config", "revive-dt-format", diff --git a/Cargo.toml b/Cargo.toml index 34ae899..8669e9b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,6 +29,7 @@ clap = { version = "4", features = ["derive"] } foundry-compilers-artifacts = { version = "0.18.0" } futures = { version = "0.3.31" } hex = "0.4.3" +regex = "1" moka = "0.12.10" reqwest = { version = "0.12.15", features = ["json"] } once_cell = "1.21" @@ -44,6 +45,7 @@ sp-core = "36.1.0" sp-runtime = "41.1.0" temp-dir = { version = "0.1.16" } tempfile = "3.3" +thiserror = "2" tokio = { version = "1.47.0", default-features = false, features = [ "rt-multi-thread", "process", diff --git a/assets/test_metadata.json b/assets/test_metadata.json index 127e808..14c1fde 100644 --- a/assets/test_metadata.json +++ b/assets/test_metadata.json @@ -1,8 +1,7 @@ { "modes": [ "Y >=0.8.9", - "E", - "I" + "E" ], "cases": [ { diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 52d7d8a..128e464 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -13,4 +13,5 @@ anyhow = { workspace = true } moka = { workspace = true, features = ["sync"] } once_cell = { workspace = true } semver = { workspace = true } +serde = { workspace = true } tokio = { workspace = true, default-features = false, features = ["time"] } diff --git a/crates/common/src/types/mod.rs b/crates/common/src/types/mod.rs index 4cd063a..0e1c34f 100644 --- a/crates/common/src/types/mod.rs +++ b/crates/common/src/types/mod.rs @@ -1,3 +1,5 @@ +mod mode; mod version_or_requirement; +pub use mode::*; pub use version_or_requirement::*; diff --git a/crates/common/src/types/mode.rs b/crates/common/src/types/mode.rs new file mode 100644 index 0000000..c380209 --- /dev/null +++ b/crates/common/src/types/mode.rs @@ -0,0 +1,167 @@ +use crate::types::VersionOrRequirement; +use semver::Version; +use serde::{Deserialize, Serialize}; +use std::fmt::Display; +use std::str::FromStr; + +/// This represents a mode that a given test should be run with, if possible. +/// +/// We obtain this by taking a [`ParsedMode`], which may be looser or more strict +/// in its requirements, and then expanding it out into a list of [`Mode`]s. +/// +/// Use [`ParsedMode::to_test_modes()`] to do this. +#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)] +pub struct Mode { + pub pipeline: ModePipeline, + pub optimize_setting: ModeOptimizerSetting, + pub version: Option, +} + +impl Display for Mode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.pipeline.fmt(f)?; + f.write_str(" ")?; + self.optimize_setting.fmt(f)?; + + if let Some(version) = &self.version { + f.write_str(" ")?; + version.fmt(f)?; + } + + Ok(()) + } +} + +impl Mode { + /// Return all of the available mode combinations. + pub fn all() -> impl Iterator { + ModePipeline::test_cases().flat_map(|pipeline| { + ModeOptimizerSetting::test_cases().map(move |optimize_setting| Mode { + pipeline, + optimize_setting, + version: None, + }) + }) + } + + /// Resolves the [`Mode`]'s solidity version requirement into a [`VersionOrRequirement`] if + /// the requirement is present on the object. Otherwise, the passed default version is used. + pub fn compiler_version_to_use(&self, default: Version) -> VersionOrRequirement { + match self.version { + Some(ref requirement) => requirement.clone().into(), + None => default.into(), + } + } +} + +/// What do we want the compiler to do? +#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] +pub enum ModePipeline { + /// Compile Solidity code via Yul IR + ViaYulIR, + /// Compile Solidity direct to assembly + ViaEVMAssembly, +} + +impl FromStr for ModePipeline { + type Err = anyhow::Error; + fn from_str(s: &str) -> Result { + match s { + // via Yul IR + "Y" => Ok(ModePipeline::ViaYulIR), + // Don't go via Yul IR + "E" => Ok(ModePipeline::ViaEVMAssembly), + // Anything else that we see isn't a mode at all + _ => Err(anyhow::anyhow!( + "Unsupported pipeline '{s}': expected 'Y' or 'E'" + )), + } + } +} + +impl Display for ModePipeline { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ModePipeline::ViaYulIR => f.write_str("Y"), + ModePipeline::ViaEVMAssembly => f.write_str("E"), + } + } +} + +impl ModePipeline { + /// Should we go via Yul IR? + pub fn via_yul_ir(&self) -> bool { + matches!(self, ModePipeline::ViaYulIR) + } + + /// An iterator over the available pipelines that we'd like to test, + /// when an explicit pipeline was not specified. + pub fn test_cases() -> impl Iterator + Clone { + [ModePipeline::ViaYulIR, ModePipeline::ViaEVMAssembly].into_iter() + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] +pub enum ModeOptimizerSetting { + /// 0 / -: Don't apply any optimizations + M0, + /// 1: Apply less than default optimizations + M1, + /// 2: Apply the default optimizations + M2, + /// 3 / +: Apply aggressive optimizations + M3, + /// s: Optimize for size + Ms, + /// z: Aggressively optimize for size + Mz, +} + +impl FromStr for ModeOptimizerSetting { + type Err = anyhow::Error; + fn from_str(s: &str) -> Result { + match s { + "M0" => Ok(ModeOptimizerSetting::M0), + "M1" => Ok(ModeOptimizerSetting::M1), + "M2" => Ok(ModeOptimizerSetting::M2), + "M3" => Ok(ModeOptimizerSetting::M3), + "Ms" => Ok(ModeOptimizerSetting::Ms), + "Mz" => Ok(ModeOptimizerSetting::Mz), + _ => Err(anyhow::anyhow!( + "Unsupported optimizer setting '{s}': expected 'M0', 'M1', 'M2', 'M3', 'Ms' or 'Mz'" + )), + } + } +} + +impl Display for ModeOptimizerSetting { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + ModeOptimizerSetting::M0 => f.write_str("M0"), + ModeOptimizerSetting::M1 => f.write_str("M1"), + ModeOptimizerSetting::M2 => f.write_str("M2"), + ModeOptimizerSetting::M3 => f.write_str("M3"), + ModeOptimizerSetting::Ms => f.write_str("Ms"), + ModeOptimizerSetting::Mz => f.write_str("Mz"), + } + } +} + +impl ModeOptimizerSetting { + /// An iterator over the available optimizer settings that we'd like to test, + /// when an explicit optimizer setting was not specified. + pub fn test_cases() -> impl Iterator + Clone { + [ + // No optimizations: + ModeOptimizerSetting::M0, + // Aggressive optimizations: + ModeOptimizerSetting::M3, + ] + .into_iter() + } + + /// Are any optimizations enabled? + pub fn optimizations_enabled(&self) -> bool { + !matches!(self, ModeOptimizerSetting::M0) + } +} diff --git a/crates/compiler/src/constants.rs b/crates/compiler/src/constants.rs new file mode 100644 index 0000000..bdb87f1 --- /dev/null +++ b/crates/compiler/src/constants.rs @@ -0,0 +1,4 @@ +use semver::Version; + +/// This is the first version of solc that supports the `--via-ir` flag / "viaIR" input JSON. +pub const SOLC_VERSION_SUPPORTING_VIA_YUL_IR: Version = Version::new(0, 8, 13); diff --git a/crates/compiler/src/lib.rs b/crates/compiler/src/lib.rs index dc00113..551d101 100644 --- a/crates/compiler/src/lib.rs +++ b/crates/compiler/src/lib.rs @@ -3,6 +3,8 @@ //! - Polkadot revive resolc compiler //! - Polkadot revive Wasm compiler +mod constants; + use std::{ collections::HashMap, hash::Hash, @@ -19,6 +21,9 @@ use revive_dt_common::cached_fs::read_to_string; use revive_dt_common::types::VersionOrRequirement; use revive_dt_config::Arguments; +// Re-export this as it's a part of the compiler interface. +pub use revive_dt_common::types::{Mode, ModeOptimizerSetting, ModePipeline}; + pub mod revive_js; pub mod revive_resolc; pub mod solc; @@ -43,13 +48,20 @@ pub trait SolidityCompiler { ) -> impl Future>; fn version(&self) -> anyhow::Result; + + /// Does the compiler support the provided mode and version settings? + fn supports_mode( + compiler_version: &Version, + optimize_setting: ModeOptimizerSetting, + pipeline: ModePipeline, + ) -> bool; } /// The generic compilation input configuration. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CompilerInput { - pub enable_optimization: Option, - pub via_ir: Option, + pub pipeline: Option, + pub optimization: Option, pub evm_version: Option, pub allow_paths: Vec, pub base_path: Option, @@ -85,8 +97,8 @@ where pub fn new() -> Self { Self { input: CompilerInput { - enable_optimization: Default::default(), - via_ir: Default::default(), + pipeline: Default::default(), + optimization: Default::default(), evm_version: Default::default(), allow_paths: Default::default(), base_path: Default::default(), @@ -98,13 +110,13 @@ where } } - pub fn with_optimization(mut self, value: impl Into>) -> Self { - self.input.enable_optimization = value.into(); + pub fn with_optimization(mut self, value: impl Into>) -> Self { + self.input.optimization = value.into(); self } - pub fn with_via_ir(mut self, value: impl Into>) -> Self { - self.input.via_ir = value.into(); + pub fn with_pipeline(mut self, value: impl Into>) -> Self { + self.input.pipeline = value.into(); self } diff --git a/crates/compiler/src/revive_resolc.rs b/crates/compiler/src/revive_resolc.rs index 0261de8..efa0812 100644 --- a/crates/compiler/src/revive_resolc.rs +++ b/crates/compiler/src/revive_resolc.rs @@ -14,7 +14,8 @@ use revive_solc_json_interface::{ SolcStandardJsonOutput, }; -use crate::{CompilerInput, CompilerOutput, SolidityCompiler}; +use super::constants::SOLC_VERSION_SUPPORTING_VIA_YUL_IR; +use crate::{CompilerInput, CompilerOutput, ModeOptimizerSetting, ModePipeline, SolidityCompiler}; use alloy::json_abi::JsonAbi; use anyhow::Context; @@ -39,9 +40,8 @@ impl SolidityCompiler for Resolc { async fn build( &self, CompilerInput { - enable_optimization, - // Ignored and not honored since this is required for the resolc compilation. - via_ir: _via_ir, + pipeline, + optimization, evm_version, allow_paths, base_path, @@ -53,6 +53,12 @@ impl SolidityCompiler for Resolc { }: CompilerInput, additional_options: Self::Options, ) -> anyhow::Result { + if !matches!(pipeline, None | Some(ModePipeline::ViaYulIR)) { + anyhow::bail!( + "Resolc only supports the Y (via Yul IR) pipeline, but the provided pipeline is {pipeline:?}" + ); + } + let input = SolcStandardJsonInput { language: SolcStandardJsonInputLanguage::Solidity, sources: sources @@ -81,7 +87,9 @@ impl SolidityCompiler for Resolc { output_selection: Some(SolcStandardJsonInputSettingsSelection::new_required()), via_ir: Some(true), optimizer: SolcStandardJsonInputSettingsOptimizer::new( - enable_optimization.unwrap_or(false), + optimization + .unwrap_or(ModeOptimizerSetting::M0) + .optimizations_enabled(), None, &Version::new(0, 0, 0), false, @@ -232,6 +240,18 @@ impl SolidityCompiler for Resolc { Version::parse(version_string).map_err(Into::into) } + + fn supports_mode( + compiler_version: &Version, + _optimize_setting: ModeOptimizerSetting, + pipeline: ModePipeline, + ) -> bool { + // We only support the Y (IE compile via Yul IR) mode here, which also means that we can + // only use solc version 0.8.13 and above. We must always compile via Yul IR as resolc + // needs this to translate to LLVM IR and then RISCV. + pipeline == ModePipeline::ViaYulIR + && compiler_version >= &SOLC_VERSION_SUPPORTING_VIA_YUL_IR + } } #[cfg(test)] diff --git a/crates/compiler/src/solc.rs b/crates/compiler/src/solc.rs index f714857..b785b06 100644 --- a/crates/compiler/src/solc.rs +++ b/crates/compiler/src/solc.rs @@ -10,7 +10,8 @@ use revive_dt_common::types::VersionOrRequirement; use revive_dt_config::Arguments; use revive_dt_solc_binaries::download_solc; -use crate::{CompilerInput, CompilerOutput, SolidityCompiler}; +use super::constants::SOLC_VERSION_SUPPORTING_VIA_YUL_IR; +use crate::{CompilerInput, CompilerOutput, ModeOptimizerSetting, ModePipeline, SolidityCompiler}; use anyhow::Context; use foundry_compilers_artifacts::{ @@ -35,8 +36,8 @@ impl SolidityCompiler for Solc { async fn build( &self, CompilerInput { - enable_optimization, - via_ir, + pipeline, + optimization, evm_version, allow_paths, base_path, @@ -46,6 +47,17 @@ impl SolidityCompiler for Solc { }: CompilerInput, _: Self::Options, ) -> anyhow::Result { + let compiler_supports_via_ir = self.version()? >= SOLC_VERSION_SUPPORTING_VIA_YUL_IR; + + // Be careful to entirely omit the viaIR field if the compiler does not support it, + // as it will error if you provide fields it does not know about. Because + // `supports_mode` is called prior to instantiating a compiler, we should never + // ask for something which is invalid. + let via_ir = match (pipeline, compiler_supports_via_ir) { + (pipeline, true) => pipeline.map(|p| p.via_yul_ir()), + (_pipeline, false) => None, + }; + let input = SolcInput { language: SolcLanguage::Solidity, sources: Sources( @@ -56,7 +68,7 @@ impl SolidityCompiler for Solc { ), settings: Settings { optimizer: Optimizer { - enabled: enable_optimization, + enabled: optimization.map(|o| o.optimizations_enabled()), details: Some(Default::default()), ..Default::default() }, @@ -222,6 +234,18 @@ impl SolidityCompiler for Solc { Version::parse(version_string).map_err(Into::into) } + + fn supports_mode( + compiler_version: &Version, + _optimize_setting: ModeOptimizerSetting, + pipeline: ModePipeline, + ) -> bool { + // solc 0.8.13 and above supports --via-ir, and less than that does not. Thus, we support mode E + // (ie no Yul IR) in either case, but only support Y (via Yul IR) if the compiler is new enough. + pipeline == ModePipeline::ViaEVMAssembly + || (pipeline == ModePipeline::ViaYulIR + && compiler_version >= &SOLC_VERSION_SUPPORTING_VIA_YUL_IR) + } } #[cfg(test)] diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index b7871fb..e4fb409 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -58,10 +58,6 @@ pub struct Arguments { #[arg(long = "geth-start-timeout", default_value = "5000")] pub geth_start_timeout: u64, - /// The test network chain ID. - #[arg(short, long = "network-id", default_value = "420420420")] - pub network_id: u64, - /// Configure nodes according to this genesis.json file. #[arg(long = "genesis", default_value = "genesis.json")] pub genesis_file: PathBuf, diff --git a/crates/core/src/main.rs b/crates/core/src/main.rs index 8cad37a..a1a9429 100644 --- a/crates/core/src/main.rs +++ b/crates/core/src/main.rs @@ -13,7 +13,8 @@ use alloy::{ }; use anyhow::Context; use clap::Parser; -use futures::StreamExt; +use futures::stream::futures_unordered::FuturesUnordered; +use futures::{Stream, StreamExt}; use revive_dt_common::iterators::FilesWithExtensionIterator; use revive_dt_node_interaction::EthereumNode; use semver::Version; @@ -22,6 +23,7 @@ use tokio::sync::{Mutex, RwLock, mpsc}; use tracing::{Instrument, Level}; use tracing_subscriber::{EnvFilter, FmtSubscriber}; +use revive_dt_common::types::Mode; use revive_dt_compiler::SolidityCompiler; use revive_dt_compiler::{Compiler, CompilerOutput}; use revive_dt_config::*; @@ -34,7 +36,6 @@ use revive_dt_format::{ corpus::Corpus, input::{Input, Step}, metadata::{ContractInstance, ContractPathAndIdent, Metadata, MetadataFile}, - mode::SolcMode, }; use revive_dt_node::pool::NodePool; use revive_dt_report::reporter::{Report, Span}; @@ -44,7 +45,7 @@ static TEMP_DIR: LazyLock = LazyLock::new(|| TempDir::new().unwrap()); type CompilationCache = Arc< RwLock< HashMap< - (PathBuf, SolcMode, TestingPlatform), + (PathBuf, Mode, TestingPlatform), Arc>>>, >, >, @@ -55,8 +56,8 @@ type CompilationCache = Arc< struct Test { metadata: Metadata, path: PathBuf, - mode: SolcMode, - case_idx: usize, + mode: Mode, + case_idx: CaseIdx, case: Case, } @@ -144,7 +145,7 @@ where { let (report_tx, report_rx) = mpsc::unbounded_channel::<(Test, CaseResult)>(); - let tests = prepare_tests::(metadata_files); + let tests = prepare_tests::(args, metadata_files); let driver_task = start_driver_task::(args, tests, span, report_tx)?; let status_reporter_task = start_reporter_task(report_rx); @@ -153,7 +154,10 @@ where Ok(()) } -fn prepare_tests(metadata_files: &[MetadataFile]) -> impl Iterator +fn prepare_tests( + args: &Arguments, + metadata_files: &[MetadataFile], +) -> impl Stream where L: Platform, F: Platform, @@ -231,15 +235,53 @@ where metadata: metadata.clone(), path: metadata_file_path.to_path_buf(), mode: solc_mode, - case_idx, + case_idx: case_idx.into(), case: case.clone(), } }) + .map(async |test| test) + .collect::>() + .filter_map(async move |test| { + // Check that both compilers support this test, else we skip it + let is_supported = does_compiler_support_mode::(args, &test.mode).await.ok().unwrap_or(false) && + does_compiler_support_mode::(args, &test.mode).await.ok().unwrap_or(false); + + tracing::warn!( + metadata_file_path = %test.path.display(), + case_idx = %test.case_idx, + case_name = ?test.case.name, + mode = %test.mode, + "Skipping test as one or both of the compilers don't support it" + ); + + // We filter_map to avoid needing to clone `test`, but return it as-is. + if is_supported { + Some(test) + } else { + None + } + }) +} + +async fn does_compiler_support_mode( + args: &Arguments, + mode: &Mode, +) -> anyhow::Result { + let compiler_version_or_requirement = mode.compiler_version_to_use(args.solc.clone()); + let compiler_path = + P::Compiler::get_compiler_executable(args, compiler_version_or_requirement).await?; + let compiler_version = P::Compiler::new(compiler_path.clone()).version()?; + + Ok(P::Compiler::supports_mode( + &compiler_version, + mode.optimize_setting, + mode.pipeline, + )) } fn start_driver_task( args: &Arguments, - tests: impl Iterator, + tests: impl Stream, span: Span, report_tx: mpsc::UnboundedSender<(Test, CaseResult)>, ) -> anyhow::Result> @@ -254,7 +296,7 @@ where let compilation_cache = Arc::new(RwLock::new(HashMap::new())); let number_concurrent_tasks = args.number_of_concurrent_tasks(); - Ok(futures::stream::iter(tests).for_each_concurrent( + Ok(tests.for_each_concurrent( // We want to limit the concurrent tasks here because: // // 1. We don't want to overwhelm the nodes with too many requests, leading to responses timing out. @@ -284,7 +326,7 @@ where let result = handle_case_driver::( &test.path, &test.metadata, - test.case_idx.into(), + test.case_idx, &test.case, test.mode.clone(), args, @@ -328,13 +370,13 @@ async fn start_reporter_task(mut report_rx: mpsc::UnboundedReceiver<(Test, CaseR Ok(_inputs) => { number_of_successes += 1; eprintln!( - "{GREEN}Case Succeeded:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode:?})" + "{GREEN}Case Succeeded:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode})" ); } Err(err) => { number_of_failures += 1; eprintln!( - "{RED}Case Failed:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode:?})" + "{RED}Case Failed:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode})" ); failures.push((test, err)); } @@ -357,7 +399,7 @@ async fn start_reporter_task(mut report_rx: mpsc::UnboundedReceiver<(Test, CaseR let test_mode = test.mode.clone(); eprintln!( - "---- {RED}Case Failed:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode:?}) ----\n\n{err}\n" + "---- {RED}Case Failed:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode}) ----\n\n{err}\n" ); } } @@ -376,7 +418,7 @@ async fn handle_case_driver( metadata: &Metadata, case_idx: CaseIdx, case: &Case, - mode: SolcMode, + mode: Mode, config: &Arguments, compilation_cache: CompilationCache, leader_node: &L::Blockchain, @@ -617,7 +659,7 @@ where async fn get_or_build_contracts( metadata: &Metadata, metadata_file_path: &Path, - mode: SolcMode, + mode: Mode, config: &Arguments, compilation_cache: CompilationCache, deployed_libraries: &HashMap, @@ -636,16 +678,16 @@ async fn get_or_build_contracts( } None => { tracing::debug!(?key, "Compiled contracts cache miss"); - let compiled_contracts = Arc::new( - compile_contracts::

( - metadata, - metadata_file_path, - &mode, - config, - deployed_libraries, - ) - .await?, - ); + let compiled_contracts = compile_contracts::

( + metadata, + metadata_file_path, + &mode, + config, + deployed_libraries, + ) + .await?; + let compiled_contracts = Arc::new(compiled_contracts); + *compilation_artifact = Some(compiled_contracts.clone()); return Ok(compiled_contracts.clone()); } @@ -660,16 +702,17 @@ async fn get_or_build_contracts( mutex }; let mut compilation_artifact = mutex.lock().await; - let compiled_contracts = Arc::new( - compile_contracts::

( - metadata, - metadata_file_path, - &mode, - config, - deployed_libraries, - ) - .await?, - ); + + let compiled_contracts = compile_contracts::

( + metadata, + metadata_file_path, + &mode, + config, + deployed_libraries, + ) + .await?; + let compiled_contracts = Arc::new(compiled_contracts); + *compilation_artifact = Some(compiled_contracts.clone()); Ok(compiled_contracts.clone()) } @@ -677,7 +720,7 @@ async fn get_or_build_contracts( async fn compile_contracts( metadata: &Metadata, metadata_file_path: &Path, - mode: &SolcMode, + mode: &Mode, config: &Arguments, deployed_libraries: &HashMap, ) -> anyhow::Result<(Version, CompilerOutput)> { @@ -695,7 +738,8 @@ async fn compile_contracts( let compiler = Compiler::::new() .with_allow_path(metadata.directory()?) - .with_optimization(mode.solc_optimize()); + .with_optimization(mode.optimize_setting) + .with_pipeline(mode.pipeline); let mut compiler = metadata .files_to_compile()? .try_fold(compiler, |compiler, path| compiler.with_source(&path))?; diff --git a/crates/format/Cargo.toml b/crates/format/Cargo.toml index 0f50758..0e5745e 100644 --- a/crates/format/Cargo.toml +++ b/crates/format/Cargo.toml @@ -17,6 +17,7 @@ alloy = { workspace = true } alloy-primitives = { workspace = true } alloy-sol-types = { workspace = true } anyhow = { workspace = true } +regex = { workspace = true } tracing = { workspace = true } semver = { workspace = true } serde = { workspace = true, features = ["derive"] } diff --git a/crates/format/src/case.rs b/crates/format/src/case.rs index aafe914..b1bd234 100644 --- a/crates/format/src/case.rs +++ b/crates/format/src/case.rs @@ -4,7 +4,7 @@ use revive_dt_common::macros::define_wrapper_type; use crate::{ input::{Expected, Step}, - mode::Mode, + mode::ParsedMode, }; #[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)] @@ -16,7 +16,7 @@ pub struct Case { pub comment: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub modes: Option>, + pub modes: Option>, #[serde(rename = "inputs")] pub steps: Vec, @@ -67,3 +67,9 @@ define_wrapper_type!( #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct CaseIdx(usize); ); + +impl std::fmt::Display for CaseIdx { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} diff --git a/crates/format/src/metadata.rs b/crates/format/src/metadata.rs index d5c55be..5e6e07d 100644 --- a/crates/format/src/metadata.rs +++ b/crates/format/src/metadata.rs @@ -13,12 +13,10 @@ use serde::{Deserialize, Serialize}; use revive_common::EVMVersion; use revive_dt_common::{ cached_fs::read_to_string, iterators::FilesWithExtensionIterator, macros::define_wrapper_type, + types::Mode, }; -use crate::{ - case::Case, - mode::{Mode, SolcMode}, -}; +use crate::{case::Case, mode::ParsedMode}; pub const METADATA_FILE_EXTENSION: &str = "json"; pub const SOLIDITY_CASE_FILE_EXTENSION: &str = "sol"; @@ -68,7 +66,7 @@ pub struct Metadata { pub libraries: Option>>, #[serde(skip_serializing_if = "Option::is_none")] - pub modes: Option>, + pub modes: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub file_path: Option, @@ -86,21 +84,12 @@ pub struct Metadata { } impl Metadata { - /// Returns the solc modes of this metadata, inserting a default mode if not present. - pub fn solc_modes(&self) -> Vec { - self.modes - .to_owned() - .unwrap_or_else(|| vec![Mode::Solidity(Default::default())]) - .iter() - .filter_map(|mode| match mode { - Mode::Solidity(solc_mode) => Some(solc_mode), - Mode::Unknown(mode) => { - tracing::debug!("compiler: ignoring unknown mode '{mode}'"); - None - } - }) - .cloned() - .collect() + /// Returns the modes that we should test from this metadata. + pub fn solc_modes(&self) -> Vec { + match &self.modes { + Some(modes) => ParsedMode::many_to_modes(modes.iter()).collect(), + None => Mode::all().collect(), + } } /// Returns the base directory of this metadata. diff --git a/crates/format/src/mode.rs b/crates/format/src/mode.rs index 8b1f4c0..7e6dfc8 100644 --- a/crates/format/src/mode.rs +++ b/crates/format/src/mode.rs @@ -1,123 +1,262 @@ -use revive_dt_common::types::VersionOrRequirement; -use semver::Version; -use serde::de::Deserializer; +use regex::Regex; +use revive_dt_common::types::{Mode, ModeOptimizerSetting, ModePipeline}; use serde::{Deserialize, Serialize}; +use std::collections::HashSet; +use std::fmt::Display; +use std::str::FromStr; +use std::sync::LazyLock; -/// Specifies the compilation mode of the test artifact. -#[derive(Hash, Debug, Clone, Eq, PartialEq)] -pub enum Mode { - Solidity(SolcMode), - Unknown(String), +/// This represents a mode that has been parsed from test metadata. +/// +/// Mode strings can take the following form (in pseudo-regex): +/// +/// ```text +/// [YEILV][+-]? (M[0123sz])? ? +/// ``` +/// +/// We can parse valid mode strings into [`ParsedMode`] using [`ParsedMode::from_str`]. +#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)] +#[serde(try_from = "String", into = "String")] +pub struct ParsedMode { + pub pipeline: Option, + pub optimize_flag: Option, + pub optimize_setting: Option, + pub version: Option, } -/// Specify Solidity specific compiler options. -#[derive(Hash, Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] -pub struct SolcMode { - pub solc_version: Option, - solc_optimize: Option, - pub llvm_optimizer_settings: Vec, - mode_string: String, -} +impl FromStr for ParsedMode { + type Err = anyhow::Error; + fn from_str(s: &str) -> Result { + static REGEX: LazyLock = LazyLock::new(|| { + Regex::new(r"(?x) + ^ + (?:(?P[YEILV])(?P[+-])?)? # Pipeline to use eg Y, E+, E- + \s* + (?PM[a-zA-Z0-9])? # Optimize setting eg M0, Ms, Mz + \s* + (?P[>=<]*\d+(?:\.\d+)*)? # Optional semver version eg >=0.8.0, 0.7, <0.8 + $ + ").unwrap() + }); -impl SolcMode { - /// Try to parse a mode string into a solc mode. - /// Returns `None` if the string wasn't a solc YUL mode string. - /// - /// The mode string is expected to start with the `Y` ID (YUL ID), - /// optionally followed by `+` or `-` for the solc optimizer settings. - /// - /// Options can be separated by a whitespace contain the following - /// - A solc `SemVer version requirement` string - /// - One or more `-OX` where X is a supposed to be an LLVM opt mode - pub fn parse_from_mode_string(mode_string: &str) -> Option { - let mut result = Self { - mode_string: mode_string.to_string(), - ..Default::default() + let Some(caps) = REGEX.captures(s) else { + anyhow::bail!("Cannot parse mode '{s}' from string"); }; - let mut parts = mode_string.trim().split(" "); - - match parts.next()? { - "Y" => {} - "Y+" => result.solc_optimize = Some(true), - "Y-" => result.solc_optimize = Some(false), - _ => return None, - } - - for part in parts { - if let Ok(solc_version) = semver::VersionReq::parse(part) { - result.solc_version = Some(solc_version); - continue; - } - if let Some(level) = part.strip_prefix("-O") { - result.llvm_optimizer_settings.push(level.to_string()); - continue; - } - panic!("the YUL mode string {mode_string} failed to parse, invalid part: {part}") - } - - Some(result) - } - - /// Returns whether to enable the solc optimizer. - pub fn solc_optimize(&self) -> bool { - self.solc_optimize.unwrap_or(true) - } - - /// Calculate the latest matching solc patch version. Returns: - /// - `latest_supported` if no version request was specified. - /// - A matching version with the same minor version as `latest_supported`, if any. - /// - `None` if no minor version of the `latest_supported` version matches. - pub fn last_patch_version(&self, latest_supported: &Version) -> Option { - let Some(version_req) = self.solc_version.as_ref() else { - return Some(latest_supported.to_owned()); + let pipeline = match caps.name("pipeline") { + Some(m) => Some(ModePipeline::from_str(m.as_str())?), + None => None, }; - // lgtm - for patch in (0..latest_supported.patch + 1).rev() { - let version = Version::new(0, latest_supported.minor, patch); - if version_req.matches(&version) { - return Some(version); - } - } + let optimize_flag = caps.name("optimize_flag").map(|m| m.as_str() == "+"); - None - } - - /// Resolves the [`SolcMode`]'s solidity version requirement into a [`VersionOrRequirement`] if - /// the requirement is present on the object. Otherwise, the passed default version is used. - pub fn compiler_version_to_use(&self, default: Version) -> VersionOrRequirement { - match self.solc_version { - Some(ref requirement) => requirement.clone().into(), - None => default.into(), - } - } -} - -impl<'de> Deserialize<'de> for Mode { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let mode_string = String::deserialize(deserializer)?; - - if let Some(solc_mode) = SolcMode::parse_from_mode_string(&mode_string) { - return Ok(Self::Solidity(solc_mode)); - } - - Ok(Self::Unknown(mode_string)) - } -} - -impl Serialize for Mode { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - let string = match self { - Mode::Solidity(solc_mode) => &solc_mode.mode_string, - Mode::Unknown(string) => string, + let optimize_setting = match caps.name("optimize_setting") { + Some(m) => Some(ModeOptimizerSetting::from_str(m.as_str())?), + None => None, }; - string.serialize(serializer) + + let version = match caps.name("version") { + Some(m) => Some(semver::VersionReq::parse(m.as_str()).map_err(|e| { + anyhow::anyhow!("Cannot parse the version requirement '{}': {e}", m.as_str()) + })?), + None => None, + }; + + Ok(ParsedMode { + pipeline, + optimize_flag, + optimize_setting, + version, + }) + } +} + +impl Display for ParsedMode { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut has_written = false; + + if let Some(pipeline) = self.pipeline { + pipeline.fmt(f)?; + if let Some(optimize_flag) = self.optimize_flag { + f.write_str(if optimize_flag { "+" } else { "-" })?; + } + has_written = true; + } + + if let Some(optimize_setting) = self.optimize_setting { + if has_written { + f.write_str(" ")?; + } + optimize_setting.fmt(f)?; + has_written = true; + } + + if let Some(version) = &self.version { + if has_written { + f.write_str(" ")?; + } + version.fmt(f)?; + } + + Ok(()) + } +} + +impl From for String { + fn from(parsed_mode: ParsedMode) -> Self { + parsed_mode.to_string() + } +} + +impl TryFrom for ParsedMode { + type Error = anyhow::Error; + fn try_from(value: String) -> Result { + ParsedMode::from_str(&value) + } +} + +impl ParsedMode { + /// This takes a [`ParsedMode`] and expands it into a list of [`Mode`]s that we should try. + pub fn to_modes(&self) -> impl Iterator { + let pipeline_iter = self.pipeline.as_ref().map_or_else( + || EitherIter::A(ModePipeline::test_cases()), + |p| EitherIter::B(std::iter::once(*p)), + ); + + let optimize_flag_setting = self.optimize_flag.map(|flag| { + if flag { + ModeOptimizerSetting::M3 + } else { + ModeOptimizerSetting::M0 + } + }); + + let optimize_flag_iter = match optimize_flag_setting { + Some(setting) => EitherIter::A(std::iter::once(setting)), + None => EitherIter::B(ModeOptimizerSetting::test_cases()), + }; + + let optimize_settings_iter = self.optimize_setting.as_ref().map_or_else( + || EitherIter::A(optimize_flag_iter), + |s| EitherIter::B(std::iter::once(*s)), + ); + + pipeline_iter.flat_map(move |pipeline| { + optimize_settings_iter + .clone() + .map(move |optimize_setting| Mode { + pipeline, + optimize_setting, + version: self.version.clone(), + }) + }) + } + + /// Return a set of [`Mode`]s that correspond to the given [`ParsedMode`]s. + /// This avoids any duplicate entries. + pub fn many_to_modes<'a>( + parsed: impl Iterator, + ) -> impl Iterator { + let modes: HashSet<_> = parsed.flat_map(|p| p.to_modes()).collect(); + modes.into_iter() + } +} + +/// An iterator that could be either of two iterators. +#[derive(Clone, Debug)] +enum EitherIter { + A(A), + B(B), +} + +impl Iterator for EitherIter +where + A: Iterator, + B: Iterator, +{ + type Item = A::Item; + fn next(&mut self) -> Option { + match self { + EitherIter::A(iter) => iter.next(), + EitherIter::B(iter) => iter.next(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_parsed_mode_from_str() { + let strings = vec![ + ("Mz", "Mz"), + ("Y", "Y"), + ("Y+", "Y+"), + ("Y-", "Y-"), + ("E", "E"), + ("E+", "E+"), + ("E-", "E-"), + ("Y M0", "Y M0"), + ("Y M1", "Y M1"), + ("Y M2", "Y M2"), + ("Y M3", "Y M3"), + ("Y Ms", "Y Ms"), + ("Y Mz", "Y Mz"), + ("E M0", "E M0"), + ("E M1", "E M1"), + ("E M2", "E M2"), + ("E M3", "E M3"), + ("E Ms", "E Ms"), + ("E Mz", "E Mz"), + // When stringifying semver again, 0.8.0 becomes ^0.8.0 (same meaning) + ("Y 0.8.0", "Y ^0.8.0"), + ("E+ 0.8.0", "E+ ^0.8.0"), + ("Y M3 >=0.8.0", "Y M3 >=0.8.0"), + ("E Mz <0.7.0", "E Mz <0.7.0"), + // We can parse +- _and_ M1/M2 but the latter takes priority. + ("Y+ M1 0.8.0", "Y+ M1 ^0.8.0"), + ("E- M2 0.7.0", "E- M2 ^0.7.0"), + // We don't see this in the wild but it is parsed. + ("<=0.8", "<=0.8"), + ]; + + for (actual, expected) in strings { + let parsed = ParsedMode::from_str(actual) + .expect(format!("Failed to parse mode string '{actual}'").as_str()); + assert_eq!( + expected, + parsed.to_string(), + "Mode string '{actual}' did not parse to '{expected}': got '{parsed}'" + ); + } + } + + #[test] + fn test_parsed_mode_to_test_modes() { + let strings = vec![ + ("Mz", vec!["Y Mz", "E Mz"]), + ("Y", vec!["Y M0", "Y M3"]), + ("E", vec!["E M0", "E M3"]), + ("Y+", vec!["Y M3"]), + ("Y-", vec!["Y M0"]), + ("Y <=0.8", vec!["Y M0 <=0.8", "Y M3 <=0.8"]), + ( + "<=0.8", + vec!["Y M0 <=0.8", "Y M3 <=0.8", "E M0 <=0.8", "E M3 <=0.8"], + ), + ]; + + for (actual, expected) in strings { + let parsed = ParsedMode::from_str(actual) + .expect(format!("Failed to parse mode string '{actual}'").as_str()); + let expected_set: HashSet<_> = expected.into_iter().map(|s| s.to_owned()).collect(); + let actual_set: HashSet<_> = parsed.to_modes().map(|m| m.to_string()).collect(); + + assert_eq!( + expected_set, actual_set, + "Mode string '{actual}' did not expand to '{expected_set:?}': got '{actual_set:?}'" + ); + } } } diff --git a/crates/node/src/geth.rs b/crates/node/src/geth.rs index 09cf6c6..6617351 100644 --- a/crates/node/src/geth.rs +++ b/crates/node/src/geth.rs @@ -60,7 +60,6 @@ pub struct GethNode { geth: PathBuf, id: u32, handle: Option, - network_id: u64, start_timeout: u64, wallet: EthereumWallet, nonce_manager: CachedNonceManager, @@ -165,8 +164,6 @@ impl GethNode { .arg(&self.data_directory) .arg("--ipcpath") .arg(&self.connection_string) - .arg("--networkid") - .arg(self.network_id.to_string()) .arg("--nodiscover") .arg("--maxpeers") .arg("0") @@ -213,6 +210,7 @@ impl GethNode { let maximum_wait_time = Duration::from_millis(self.start_timeout); let mut stderr = BufReader::new(logs_file).lines(); + let mut lines = vec![]; loop { if let Some(Ok(line)) = stderr.next() { if line.contains(Self::ERROR_MARKER) { @@ -221,9 +219,14 @@ impl GethNode { if line.contains(Self::READY_MARKER) { return Ok(self); } + lines.push(line); } if Instant::now().duration_since(start_time) > maximum_wait_time { - anyhow::bail!("Timeout in starting geth"); + anyhow::bail!( + "Timeout in starting geth: took longer than {}ms. stdout:\n\n{}\n", + self.start_timeout, + lines.join("\n") + ); } } } @@ -257,7 +260,11 @@ impl GethNode { Box::pin(async move { ProviderBuilder::new() .disable_recommended_fillers() - .filler(FallbackGasFiller::new(500_000_000, 500_000_000, 1)) + .filler(FallbackGasFiller::new( + 25_000_000, + 1_000_000_000, + 1_000_000_000, + )) .filler(ChainIdFiller::default()) .filler(NonceFiller::new(nonce_manager)) .wallet(wallet) @@ -517,7 +524,6 @@ impl Node for GethNode { geth: config.geth.clone(), id, handle: None, - network_id: config.network_id, start_timeout: config.geth_start_timeout, wallet, // We know that we only need to be storing 2 files so we can specify that when creating diff --git a/crates/node/src/kitchensink.rs b/crates/node/src/kitchensink.rs index 03a86ff..f5a9e0a 100644 --- a/crates/node/src/kitchensink.rs +++ b/crates/node/src/kitchensink.rs @@ -367,9 +367,9 @@ impl KitchensinkNode { .disable_recommended_fillers() .network::() .filler(FallbackGasFiller::new( - 30_000_000, - 200_000_000_000, - 3_000_000_000, + 25_000_000, + 1_000_000_000, + 1_000_000_000, )) .filler(ChainIdFiller::default()) .filler(NonceFiller::new(nonce_manager)) diff --git a/crates/report/Cargo.toml b/crates/report/Cargo.toml index 1ffb7b8..d18caab 100644 --- a/crates/report/Cargo.toml +++ b/crates/report/Cargo.toml @@ -8,6 +8,7 @@ repository.workspace = true rust-version.workspace = true [dependencies] +revive-dt-common = { workspace = true } revive-dt-config = { workspace = true } revive-dt-format = { workspace = true } revive-dt-compiler = { workspace = true } diff --git a/crates/report/src/reporter.rs b/crates/report/src/reporter.rs index 9b9303d..e5d0d1f 100644 --- a/crates/report/src/reporter.rs +++ b/crates/report/src/reporter.rs @@ -12,18 +12,19 @@ use std::{ }; use anyhow::Context; -use revive_dt_compiler::{CompilerInput, CompilerOutput}; -use serde::{Deserialize, Serialize}; +use serde::Serialize; +use revive_dt_common::types::Mode; +use revive_dt_compiler::{CompilerInput, CompilerOutput}; use revive_dt_config::{Arguments, TestingPlatform}; -use revive_dt_format::{corpus::Corpus, mode::SolcMode}; +use revive_dt_format::corpus::Corpus; use crate::analyzer::CompilerStatistics; pub(crate) static REPORTER: OnceLock> = OnceLock::new(); /// The `Report` datastructure stores all relevant inforamtion required for generating reports. -#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize)] pub struct Report { /// The configuration used during the test. pub config: Arguments, @@ -41,14 +42,14 @@ pub struct Report { } /// Contains a compiled contract. -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize)] pub struct CompilationTask { /// The observed compiler input. pub json_input: CompilerInput, /// The observed compiler output. pub json_output: Option, /// The observed compiler mode. - pub mode: SolcMode, + pub mode: Mode, /// The observed compiler version. pub compiler_version: String, /// The observed error, if any. @@ -56,7 +57,7 @@ pub struct CompilationTask { } /// Represents a report about a compilation task. -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize)] pub struct CompilationResult { /// The observed compilation task. pub compilation_task: CompilationTask, @@ -65,7 +66,7 @@ pub struct CompilationResult { } /// The [Span] struct indicates the context of what is being reported. -#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +#[derive(Clone, Copy, Debug, Serialize)] pub struct Span { /// The corpus index this belongs to. corpus: usize, From 185edcfad9ab1055064be6689b86fc2d39f89ad2 Mon Sep 17 00:00:00 2001 From: Omar Date: Sat, 16 Aug 2025 19:04:13 +0300 Subject: [PATCH 2/3] Cached compiler artifacts (#143) * WIP compilation cache * Implement a persistent compilation cache * Correct the key and value encoding for the cache --- Cargo.lock | 454 ++++++++++++++++++++++++++++- Cargo.toml | 2 + crates/common/src/types/mode.rs | 2 +- crates/compiler/src/lib.rs | 8 + crates/config/src/lib.rs | 4 + crates/core/Cargo.toml | 5 + crates/core/src/cached_compiler.rs | 261 +++++++++++++++++ crates/core/src/driver/mod.rs | 29 +- crates/core/src/main.rs | 359 +++++++---------------- crates/format/src/input.rs | 20 +- crates/format/src/traits.rs | 21 +- crates/node/src/geth.rs | 3 - 12 files changed, 869 insertions(+), 299 deletions(-) create mode 100644 crates/core/src/cached_compiler.rs diff --git a/Cargo.lock b/Cargo.lock index 0b375d7..20b1631 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -659,7 +659,7 @@ checksum = "4f317d20f047b3de4d9728c556e2e9a92c9a507702d2016424cd8be13a74ca5e" dependencies = [ "alloy-json-rpc", "alloy-primitives", - "base64", + "base64 0.22.1", "derive_more 2.0.1", "futures", "futures-utils-wasm", @@ -1189,6 +1189,151 @@ dependencies = [ "serde", ] +[[package]] +name = "async-channel" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" +dependencies = [ + "concurrent-queue", + "event-listener 2.5.3", + "futures-core", +] + +[[package]] +name = "async-channel" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb812ffb58524bdd10860d7d974e2f01cc0950c2438a74ee5ec2e2280c6c4ffa" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "pin-project-lite", + "slab", +] + +[[package]] +name = "async-global-executor" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" +dependencies = [ + "async-channel 2.5.0", + "async-executor", + "async-io", + "async-lock", + "blocking", + "futures-lite", + "once_cell", +] + +[[package]] +name = "async-io" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1237c0ae75a0f3765f58910ff9cdd0a12eeb39ab2f4c7de23262f337f0aacbb3" +dependencies = [ + "async-lock", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "async-lock" +version = "3.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" +dependencies = [ + "event-listener 5.4.1", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-process" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65daa13722ad51e6ab1a1b9c01299142bc75135b337923cfa10e79bbbd669f00" +dependencies = [ + "async-channel 2.5.0", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if", + "event-listener 5.4.1", + "futures-lite", + "rustix", +] + +[[package]] +name = "async-signal" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7605a4e50d4b06df3898d5a70bf5fde51ed9059b0434b73105193bc27acce0d" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.59.0", +] + +[[package]] +name = "async-std" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c8e079a4ab67ae52b7403632e4618815d6db36d2a010cfe41b02c1b1578f93b" +dependencies = [ + "async-channel 1.9.0", + "async-global-executor", + "async-io", + "async-lock", + "async-process", + "crossbeam-utils", + "futures-channel", + "futures-core", + "futures-io", + "futures-lite", + "gloo-timers", + "kv-log-macro", + "log", + "memchr", + "once_cell", + "pin-project-lite", + "pin-utils", + "slab", + "wasm-bindgen-futures", +] + [[package]] name = "async-stream" version = "0.3.6" @@ -1211,6 +1356,12 @@ dependencies = [ "syn 2.0.101", ] +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + [[package]] name = "async-trait" version = "0.1.88" @@ -1266,6 +1417,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + [[package]] name = "base64" version = "0.22.1" @@ -1404,6 +1561,19 @@ dependencies = [ "generic-array", ] +[[package]] +name = "blocking" +version = "1.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21" +dependencies = [ + "async-channel 2.5.0", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + [[package]] name = "blst" version = "0.3.14" @@ -1437,6 +1607,29 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "bson" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969a9ba84b0ff843813e7249eed1678d9b6607ce5a3b8f0a47af3fcf7978e6e" +dependencies = [ + "ahash", + "base64 0.22.1", + "bitvec", + "getrandom 0.2.16", + "getrandom 0.3.3", + "hex", + "indexmap 2.10.0", + "js-sys", + "once_cell", + "rand 0.9.2", + "serde", + "serde_bytes", + "serde_json", + "time", + "uuid", +] + [[package]] name = "bumpalo" version = "3.17.0" @@ -1479,6 +1672,32 @@ dependencies = [ "serde", ] +[[package]] +name = "cacache" +version = "13.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5063741c7b2e260bbede781cf4679632dd90e2718e99f7715e46824b65670b" +dependencies = [ + "async-std", + "digest 0.10.7", + "either", + "futures", + "hex", + "libc", + "memmap2", + "miette", + "reflink-copy", + "serde", + "serde_derive", + "serde_json", + "sha1", + "sha2 0.10.9", + "ssri", + "tempfile", + "thiserror 1.0.69", + "walkdir", +] + [[package]] name = "cc" version = "1.2.25" @@ -1559,6 +1778,15 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2382f75942f4b3be3690fe4f86365e9c853c1587d6ee58212cebf6e2a9ccd101" +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "const-hex" version = "1.14.1" @@ -2120,6 +2348,33 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener 5.4.1", + "pin-project-lite", +] + [[package]] name = "expander" version = "2.2.1" @@ -2361,6 +2616,19 @@ version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" +[[package]] +name = "futures-lite" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + [[package]] name = "futures-macro" version = "0.3.31" @@ -2440,8 +2708,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", ] [[package]] @@ -2451,9 +2721,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", + "js-sys", "libc", "r-efi", "wasi 0.14.2+wasi-0.2.4", + "wasm-bindgen", ] [[package]] @@ -2478,6 +2750,18 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" +[[package]] +name = "gloo-timers" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "group" version = "0.13.0" @@ -2572,6 +2856,12 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" +[[package]] +name = "hermit-abi" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + [[package]] name = "hex" version = "0.4.3" @@ -2724,7 +3014,7 @@ version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1c293b6b3d21eca78250dc7dbebd6b9210ec5530e038cbfe0661b5c47ab06e8" dependencies = [ - "base64", + "base64 0.22.1", "bytes", "futures-channel", "futures-core", @@ -3094,6 +3384,15 @@ dependencies = [ "sha3-asm", ] +[[package]] +name = "kv-log-macro" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" +dependencies = [ + "log", +] + [[package]] name = "lazy_static" version = "1.5.0" @@ -3119,7 +3418,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e79019718125edc905a079a70cfa5f3820bc76139fc91d6f9abc27ea2a887139" dependencies = [ "arrayref", - "base64", + "base64 0.22.1", "digest 0.9.0", "hmac-drbg", "libsecp256k1-core", @@ -3187,6 +3486,9 @@ name = "log" version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +dependencies = [ + "value-bag", +] [[package]] name = "loom" @@ -3236,6 +3538,15 @@ version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +[[package]] +name = "memmap2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" +dependencies = [ + "libc", +] + [[package]] name = "memory-db" version = "0.32.0" @@ -3257,6 +3568,29 @@ dependencies = [ "zeroize", ] +[[package]] +name = "miette" +version = "5.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59bb584eaeeab6bd0226ccf3509a69d7936d148cf3d036ad350abe35e8c6856e" +dependencies = [ + "miette-derive", + "once_cell", + "thiserror 1.0.69", + "unicode-width", +] + +[[package]] +name = "miette-derive" +version = "5.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.101", +] + [[package]] name = "mime" version = "0.3.17" @@ -3386,7 +3720,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.9", "libc", ] @@ -3543,6 +3877,12 @@ dependencies = [ "syn 2.0.101", ] +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + [[package]] name = "parking_lot" version = "0.12.4" @@ -3648,6 +3988,17 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" +[[package]] +name = "piper" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + [[package]] name = "pkcs8" version = "0.10.2" @@ -3701,6 +4052,21 @@ dependencies = [ "syn 2.0.101", ] +[[package]] +name = "polling" +version = "3.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b53a684391ad002dd6a596ceb6c74fd004fdce75f4be2e3f615068abbea5fd50" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi 0.5.2", + "pin-project-lite", + "rustix", + "tracing", + "windows-sys 0.59.0", +] + [[package]] name = "portable-atomic" version = "1.11.1" @@ -3988,6 +4354,18 @@ dependencies = [ "syn 2.0.101", ] +[[package]] +name = "reflink-copy" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c81d000a2c524133cc00d2f92f019d399e57906c3b7119271a2495354fe895" +dependencies = [ + "cfg-if", + "libc", + "rustix", + "windows", +] + [[package]] name = "regex" version = "1.11.1" @@ -4038,7 +4416,7 @@ version = "0.12.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e98ff6b0dbbe4d5a37318f433d4fc82babd21631f194d370409ceb2e40b2f0b5" dependencies = [ - "base64", + "base64 0.22.1", "bytes", "encoding_rs", "futures-core", @@ -4134,9 +4512,12 @@ version = "0.1.0" dependencies = [ "alloy", "anyhow", + "bson", + "cacache", "clap", "futures", "indexmap 2.10.0", + "once_cell", "revive-dt-common", "revive-dt-compiler", "revive-dt-config", @@ -4145,8 +4526,10 @@ dependencies = [ "revive-dt-node-interaction", "revive-dt-report", "semver 1.0.26", + "serde", "serde_json", "temp-dir", + "tempfile", "tokio", "tracing", "tracing-subscriber", @@ -4650,6 +5033,7 @@ version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ + "indexmap 2.10.0", "itoa", "memchr", "ryu", @@ -4693,7 +5077,7 @@ version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" dependencies = [ - "base64", + "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", @@ -4727,6 +5111,28 @@ dependencies = [ "serde", ] +[[package]] +name = "sha-1" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + [[package]] name = "sha2" version = "0.9.9" @@ -5201,6 +5607,23 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "ssri" +version = "9.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da7a2b3c2bc9693bcb40870c4e9b5bf0d79f9cb46273321bf855ec513e919082" +dependencies = [ + "base64 0.21.7", + "digest 0.10.7", + "hex", + "miette", + "serde", + "sha-1", + "sha2 0.10.9", + "thiserror 1.0.69", + "xxhash-rust", +] + [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -5851,6 +6274,12 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + [[package]] name = "unicode-xid" version = "0.2.6" @@ -5894,6 +6323,7 @@ checksum = "f33196643e165781c20a5ead5582283a7dacbb87855d867fbc2df3f81eddc1be" dependencies = [ "getrandom 0.3.3", "js-sys", + "serde", "wasm-bindgen", ] @@ -5903,6 +6333,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" +[[package]] +name = "value-bag" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5" + [[package]] name = "vcpkg" version = "0.2.15" @@ -6468,6 +6904,12 @@ dependencies = [ "tap", ] +[[package]] +name = "xxhash-rust" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" + [[package]] name = "yansi" version = "1.0.1" diff --git a/Cargo.toml b/Cargo.toml index 8669e9b..598234d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,6 +25,8 @@ revive-dt-solc-binaries = { version = "0.1.0", path = "crates/solc-binaries" } alloy-primitives = "1.2.1" alloy-sol-types = "1.2.1" anyhow = "1.0" +bson = { version = "2.15.0" } +cacache = { version = "13.1.0" } clap = { version = "4", features = ["derive"] } foundry-compilers-artifacts = { version = "0.18.0" } futures = { version = "0.3.31" } diff --git a/crates/common/src/types/mode.rs b/crates/common/src/types/mode.rs index c380209..535add1 100644 --- a/crates/common/src/types/mode.rs +++ b/crates/common/src/types/mode.rs @@ -10,7 +10,7 @@ use std::str::FromStr; /// in its requirements, and then expanding it out into a list of [`Mode`]s. /// /// Use [`ParsedMode::to_test_modes()`] to do this. -#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)] +#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct Mode { pub pipeline: ModePipeline, pub optimize_setting: ModeOptimizerSetting, diff --git a/crates/compiler/src/lib.rs b/crates/compiler/src/lib.rs index 551d101..799124e 100644 --- a/crates/compiler/src/lib.rs +++ b/crates/compiler/src/lib.rs @@ -169,6 +169,14 @@ where self } + pub fn then(self, callback: impl FnOnce(Self) -> Self) -> Self { + callback(self) + } + + pub fn try_then(self, callback: impl FnOnce(Self) -> Result) -> Result { + callback(self) + } + pub async fn try_build( self, compiler_path: impl AsRef, diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index e4fb409..86ba19e 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -120,6 +120,10 @@ pub struct Arguments { /// By default it uses `eth-rpc` binary found in `$PATH`. #[arg(short = 'p', long = "eth_proxy", default_value = "eth-rpc")] pub eth_proxy: PathBuf, + + /// Controls if the compilation cache should be invalidated or not. + #[arg(short, long)] + pub invalidate_compilation_cache: bool, } impl Arguments { diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 9c7d91a..b747bc1 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -23,12 +23,17 @@ revive-dt-report = { workspace = true } alloy = { workspace = true } anyhow = { workspace = true } +bson = { workspace = true } +cacache = { workspace = true } clap = { workspace = true } futures = { workspace = true } indexmap = { workspace = true } +once_cell = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } semver = { workspace = true } +serde = { workspace = true } serde_json = { workspace = true } temp-dir = { workspace = true } +tempfile = { workspace = true } diff --git a/crates/core/src/cached_compiler.rs b/crates/core/src/cached_compiler.rs new file mode 100644 index 0000000..ee05428 --- /dev/null +++ b/crates/core/src/cached_compiler.rs @@ -0,0 +1,261 @@ +//! A wrapper around the compiler which allows for caching of compilation artifacts so that they can +//! be reused between runs. + +use std::{ + collections::HashMap, + path::{Path, PathBuf}, + sync::Arc, +}; + +use futures::FutureExt; +use revive_dt_common::iterators::FilesWithExtensionIterator; +use revive_dt_compiler::{Compiler, CompilerOutput, Mode, SolidityCompiler}; +use revive_dt_config::Arguments; +use revive_dt_format::metadata::{ContractIdent, ContractInstance, Metadata}; + +use alloy::{hex::ToHexExt, json_abi::JsonAbi, primitives::Address}; +use anyhow::{Error, Result}; +use once_cell::sync::Lazy; +use semver::Version; +use serde::{Deserialize, Serialize}; +use tokio::sync::{Mutex, RwLock}; +use tracing::{Instrument, debug, debug_span, instrument}; + +use crate::Platform; + +pub struct CachedCompiler(ArtifactsCache); + +impl CachedCompiler { + pub async fn new(path: impl AsRef, invalidate_cache: bool) -> Result { + let mut cache = ArtifactsCache::new(path); + if invalidate_cache { + cache = cache.with_invalidated_cache().await?; + } + Ok(Self(cache)) + } + + /// Compiles or gets the compilation artifacts from the cache. + #[instrument( + level = "debug", + skip_all, + fields( + metadata_file_path = %metadata_file_path.as_ref().display(), + %mode, + platform = P::config_id().to_string() + ), + err + )] + pub async fn compile_contracts( + &self, + metadata: &Metadata, + metadata_file_path: impl AsRef, + mode: &Mode, + config: &Arguments, + deployed_libraries: Option<&HashMap>, + ) -> Result<(CompilerOutput, Version)> { + static CACHE_KEY_LOCK: Lazy>>>> = + Lazy::new(Default::default); + + let compiler_version_or_requirement = mode.compiler_version_to_use(config.solc.clone()); + let compiler_path = ::get_compiler_executable( + config, + compiler_version_or_requirement, + ) + .await?; + let compiler_version = + ::new(compiler_path.clone()).version()?; + + let cache_key = CacheKey { + platform_key: P::config_id().to_string(), + compiler_version: compiler_version.clone(), + metadata_file_path: metadata_file_path.as_ref().to_path_buf(), + solc_mode: mode.clone(), + }; + + let compilation_callback = || { + async move { + compile_contracts::

( + metadata.directory()?, + compiler_path, + metadata.files_to_compile()?, + mode, + deployed_libraries, + ) + .map(|compilation_result| compilation_result.map(CacheValue::new)) + .await + } + .instrument(debug_span!( + "Running compilation for the cache key", + cache_key.platform_key = %cache_key.platform_key, + cache_key.compiler_version = %cache_key.compiler_version, + cache_key.metadata_file_path = %cache_key.metadata_file_path.display(), + cache_key.solc_mode = %cache_key.solc_mode, + )) + }; + + let compiled_contracts = match deployed_libraries { + // If deployed libraries have been specified then we will re-compile the contract as it + // means that linking is required in this case. + Some(_) => { + debug!("Deployed libraries defined, recompilation must take place"); + debug!("Cache miss"); + compilation_callback().await?.compiler_output + } + // If no deployed libraries are specified then we can follow the cached flow and attempt + // to lookup the compilation artifacts in the cache. + None => { + debug!("Deployed libraries undefined, attempting to make use of cache"); + + // Lock this specific cache key such that we do not get inconsistent state. We want + // that when multiple cases come in asking for the compilation artifacts then they + // don't all trigger a compilation if there's a cache miss. Hence, the lock here. + let read_guard = CACHE_KEY_LOCK.read().await; + let mutex = match read_guard.get(&cache_key).cloned() { + Some(value) => value, + None => { + drop(read_guard); + CACHE_KEY_LOCK + .write() + .await + .entry(cache_key.clone()) + .or_default() + .clone() + } + }; + let _guard = mutex.lock().await; + + self.0 + .get_or_insert_with(&cache_key, compilation_callback) + .await + .map(|value| value.compiler_output)? + } + }; + + Ok((compiled_contracts, compiler_version)) + } +} + +async fn compile_contracts( + metadata_directory: impl AsRef, + compiler_path: impl AsRef, + mut files_to_compile: impl Iterator, + mode: &Mode, + deployed_libraries: Option<&HashMap>, +) -> Result { + let all_sources_in_dir = FilesWithExtensionIterator::new(metadata_directory.as_ref()) + .with_allowed_extension("sol") + .with_use_cached_fs(true) + .collect::>(); + + Compiler::::new() + .with_allow_path(metadata_directory) + // Handling the modes + .with_optimization(mode.optimize_setting) + .with_pipeline(mode.pipeline) + // Adding the contract sources to the compiler. + .try_then(|compiler| { + files_to_compile.try_fold(compiler, |compiler, path| compiler.with_source(path)) + })? + // Adding the deployed libraries to the compiler. + .then(|compiler| { + deployed_libraries + .iter() + .flat_map(|value| value.iter()) + .map(|(instance, (ident, address, abi))| (instance, ident, address, abi)) + .flat_map(|(_, ident, address, _)| { + all_sources_in_dir + .iter() + .map(move |path| (ident, address, path)) + }) + .fold(compiler, |compiler, (ident, address, path)| { + compiler.with_library(path, ident.as_str(), *address) + }) + }) + .try_build(compiler_path) + .await +} + +struct ArtifactsCache { + path: PathBuf, +} + +impl ArtifactsCache { + pub fn new(path: impl AsRef) -> Self { + Self { + path: path.as_ref().to_path_buf(), + } + } + + #[instrument(level = "debug", skip_all, err)] + pub async fn with_invalidated_cache(self) -> Result { + cacache::clear(self.path.as_path()) + .await + .map_err(Into::::into)?; + Ok(self) + } + + #[instrument(level = "debug", skip_all, err)] + pub async fn insert(&self, key: &CacheKey, value: &CacheValue) -> Result<()> { + let key = bson::to_vec(key)?; + let value = bson::to_vec(value)?; + cacache::write(self.path.as_path(), key.encode_hex(), value).await?; + Ok(()) + } + + pub async fn get(&self, key: &CacheKey) -> Option { + let key = bson::to_vec(key).ok()?; + let value = cacache::read(self.path.as_path(), key.encode_hex()) + .await + .ok()?; + let value = bson::from_slice::(&value).ok()?; + Some(value) + } + + #[instrument(level = "debug", skip_all, err)] + pub async fn get_or_insert_with( + &self, + key: &CacheKey, + callback: impl AsyncFnOnce() -> Result, + ) -> Result { + match self.get(key).await { + Some(value) => { + debug!("Cache hit"); + Ok(value) + } + None => { + debug!("Cache miss"); + let value = callback().await?; + self.insert(key, &value).await?; + Ok(value) + } + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +struct CacheKey { + /// The platform name that this artifact was compiled for. For example, this could be EVM or + /// PVM. + platform_key: String, + + /// The version of the compiler that was used to compile the artifacts. + compiler_version: Version, + + /// The path of the metadata file that the compilation artifacts are for. + metadata_file_path: PathBuf, + + /// The mode that the compilation artifacts where compiled with. + solc_mode: Mode, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +struct CacheValue { + /// The compiler output from the compilation run. + compiler_output: CompilerOutput, +} + +impl CacheValue { + pub fn new(compiler_output: CompilerOutput) -> Self { + Self { compiler_output } + } +} diff --git a/crates/core/src/driver/mod.rs b/crates/core/src/driver/mod.rs index a8e908e..35ef782 100644 --- a/crates/core/src/driver/mod.rs +++ b/crates/core/src/driver/mod.rs @@ -31,7 +31,7 @@ use revive_dt_format::input::{ BalanceAssertion, Calldata, EtherValue, Expected, ExpectedOutput, Input, Method, StorageEmptyAssertion, }; -use revive_dt_format::metadata::{ContractInstance, ContractPathAndIdent}; +use revive_dt_format::metadata::{ContractIdent, ContractInstance, ContractPathAndIdent}; use revive_dt_format::{input::Step, metadata::Metadata}; use revive_dt_node::Node; use revive_dt_node_interaction::EthereumNode; @@ -44,7 +44,7 @@ pub struct CaseState { compiled_contracts: HashMap>, /// This map stores the contracts deployments for this case. - deployed_contracts: HashMap, + deployed_contracts: HashMap, /// This map stores the variables used for each one of the cases contained in the metadata /// file. @@ -63,7 +63,7 @@ where pub fn new( compiler_version: Version, compiled_contracts: HashMap>, - deployed_contracts: HashMap, + deployed_contracts: HashMap, ) -> Self { Self { compiled_contracts, @@ -155,17 +155,10 @@ where async fn handle_input_contract_deployment( &mut self, metadata: &Metadata, - case_idx: CaseIdx, + _: CaseIdx, input: &Input, node: &T::Blockchain, ) -> anyhow::Result> { - let span = tracing::debug_span!( - "Handling contract deployment", - ?case_idx, - instance = ?input.instance - ); - let _guard = span.enter(); - let mut instances_we_must_deploy = IndexMap::::new(); for instance in input.find_all_contract_instances().into_iter() { if !self.deployed_contracts.contains_key(&instance) { @@ -316,9 +309,6 @@ where resolver: &impl ResolverApi, tracing_result: &CallFrame, ) -> anyhow::Result<()> { - let span = tracing::info_span!("Handling input expectations"); - let _guard = span.enter(); - // Resolving the `input.expected` into a series of expectations that we can then assert on. let mut expectations = match input { Input { @@ -508,9 +498,6 @@ where execution_receipt: TransactionReceipt, node: &T::Blockchain, ) -> anyhow::Result<(TransactionReceipt, GethTrace, DiffMode)> { - let span = tracing::info_span!("Handling input diff"); - let _guard = span.enter(); - let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig { diff_mode: Some(true), disable_code: None, @@ -662,7 +649,7 @@ where value: Option, node: &T::Blockchain, ) -> anyhow::Result<(Address, JsonAbi, Option)> { - if let Some((address, abi)) = self.deployed_contracts.get(contract_instance) { + if let Some((_, address, abi)) = self.deployed_contracts.get(contract_instance) { return Ok((*address, abi.clone(), None)); } @@ -746,8 +733,10 @@ where "Deployed contract" ); - self.deployed_contracts - .insert(contract_instance.clone(), (address, abi.clone())); + self.deployed_contracts.insert( + contract_instance.clone(), + (contract_ident, address, abi.clone()), + ); Ok((address, abi, Some(receipt))) } diff --git a/crates/core/src/main.rs b/crates/core/src/main.rs index a1a9429..63fb90c 100644 --- a/crates/core/src/main.rs +++ b/crates/core/src/main.rs @@ -1,3 +1,5 @@ +mod cached_compiler; + use std::{ collections::HashMap, path::{Path, PathBuf}, @@ -6,26 +8,21 @@ use std::{ }; use alloy::{ - json_abi::JsonAbi, network::{Ethereum, TransactionBuilder}, - primitives::Address, rpc::types::TransactionRequest, }; use anyhow::Context; use clap::Parser; use futures::stream::futures_unordered::FuturesUnordered; use futures::{Stream, StreamExt}; -use revive_dt_common::iterators::FilesWithExtensionIterator; use revive_dt_node_interaction::EthereumNode; -use semver::Version; use temp_dir::TempDir; -use tokio::sync::{Mutex, RwLock, mpsc}; +use tokio::sync::mpsc; use tracing::{Instrument, Level}; use tracing_subscriber::{EnvFilter, FmtSubscriber}; use revive_dt_common::types::Mode; use revive_dt_compiler::SolidityCompiler; -use revive_dt_compiler::{Compiler, CompilerOutput}; use revive_dt_config::*; use revive_dt_core::{ Geth, Kitchensink, Platform, @@ -35,21 +32,14 @@ use revive_dt_format::{ case::{Case, CaseIdx}, corpus::Corpus, input::{Input, Step}, - metadata::{ContractInstance, ContractPathAndIdent, Metadata, MetadataFile}, + metadata::{ContractPathAndIdent, Metadata, MetadataFile}, }; use revive_dt_node::pool::NodePool; use revive_dt_report::reporter::{Report, Span}; -static TEMP_DIR: LazyLock = LazyLock::new(|| TempDir::new().unwrap()); +use crate::cached_compiler::CachedCompiler; -type CompilationCache = Arc< - RwLock< - HashMap< - (PathBuf, Mode, TestingPlatform), - Arc>>>, - >, - >, ->; +static TEMP_DIR: LazyLock = LazyLock::new(|| TempDir::new().unwrap()); /// this represents a single "test"; a mode, path and collection of cases. #[derive(Clone)] @@ -146,7 +136,7 @@ where let (report_tx, report_rx) = mpsc::unbounded_channel::<(Test, CaseResult)>(); let tests = prepare_tests::(args, metadata_files); - let driver_task = start_driver_task::(args, tests, span, report_tx)?; + let driver_task = start_driver_task::(args, tests, span, report_tx).await?; let status_reporter_task = start_reporter_task(report_rx); tokio::join!(status_reporter_task, driver_task); @@ -246,18 +236,17 @@ where let is_supported = does_compiler_support_mode::(args, &test.mode).await.ok().unwrap_or(false) && does_compiler_support_mode::(args, &test.mode).await.ok().unwrap_or(false); - tracing::warn!( - metadata_file_path = %test.path.display(), - case_idx = %test.case_idx, - case_name = ?test.case.name, - mode = %test.mode, - "Skipping test as one or both of the compilers don't support it" - ); - // We filter_map to avoid needing to clone `test`, but return it as-is. if is_supported { Some(test) } else { + tracing::warn!( + metadata_file_path = %test.path.display(), + case_idx = %test.case_idx, + case_name = ?test.case.name, + mode = %test.mode, + "Skipping test as one or both of the compilers don't support it" + ); None } }) @@ -279,7 +268,7 @@ async fn does_compiler_support_mode( )) } -fn start_driver_task( +async fn start_driver_task( args: &Arguments, tests: impl Stream, span: Span, @@ -293,8 +282,14 @@ where { let leader_nodes = Arc::new(NodePool::::new(args)?); let follower_nodes = Arc::new(NodePool::::new(args)?); - let compilation_cache = Arc::new(RwLock::new(HashMap::new())); let number_concurrent_tasks = args.number_of_concurrent_tasks(); + let cached_compiler = Arc::new( + CachedCompiler::new( + args.directory().join("compilation_cache"), + args.invalidate_compilation_cache, + ) + .await?, + ); Ok(tests.for_each_concurrent( // We want to limit the concurrent tasks here because: @@ -308,8 +303,8 @@ where move |test| { let leader_nodes = leader_nodes.clone(); let follower_nodes = follower_nodes.clone(); - let compilation_cache = compilation_cache.clone(); let report_tx = report_tx.clone(); + let cached_compiler = cached_compiler.clone(); async move { let leader_node = leader_nodes.round_robbin(); @@ -330,7 +325,7 @@ where &test.case, test.mode.clone(), args, - compilation_cache.clone(), + cached_compiler, leader_node, follower_node, span, @@ -420,7 +415,7 @@ async fn handle_case_driver( case: &Case, mode: Mode, config: &Arguments, - compilation_cache: CompilationCache, + cached_compiler: Arc, leader_node: &L::Blockchain, follower_node: &F::Blockchain, _: Span, @@ -431,27 +426,19 @@ where L::Blockchain: revive_dt_node::Node + Send + Sync + 'static, F::Blockchain: revive_dt_node::Node + Send + Sync + 'static, { - let leader_pre_link_contracts = get_or_build_contracts::( - metadata, - metadata_file_path, - mode.clone(), - config, - compilation_cache.clone(), - &HashMap::new(), - ) - .await?; - let follower_pre_link_contracts = get_or_build_contracts::( - metadata, - metadata_file_path, - mode.clone(), - config, - compilation_cache.clone(), - &HashMap::new(), - ) - .await?; + let leader_pre_link_contracts = cached_compiler + .compile_contracts::(metadata, metadata_file_path, &mode, config, None) + .await? + .0 + .contracts; + let follower_pre_link_contracts = cached_compiler + .compile_contracts::(metadata, metadata_file_path, &mode, config, None) + .await? + .0 + .contracts; - let mut leader_deployed_libraries = HashMap::new(); - let mut follower_deployed_libraries = HashMap::new(); + let mut leader_deployed_libraries = None::>; + let mut follower_deployed_libraries = None::>; let mut contract_sources = metadata.contract_sources()?; for library_instance in metadata .libraries @@ -467,14 +454,10 @@ where .context("Failed to find the contract source")?; let (leader_code, leader_abi) = leader_pre_link_contracts - .1 - .contracts .get(&library_source_path) .and_then(|contracts| contracts.get(library_ident.as_str())) .context("Declared library was not compiled")?; let (follower_code, follower_abi) = follower_pre_link_contracts - .1 - .contracts .get(&library_source_path) .and_then(|contracts| contracts.get(library_ident.as_str())) .context("Declared library was not compiled")?; @@ -567,81 +550,52 @@ where anyhow::bail!("Contract deployment didn't return an address"); }; - leader_deployed_libraries.insert( + leader_deployed_libraries.get_or_insert_default().insert( library_instance.clone(), - (leader_library_address, leader_abi.clone()), + ( + library_ident.clone(), + leader_library_address, + leader_abi.clone(), + ), ); - follower_deployed_libraries.insert( + follower_deployed_libraries.get_or_insert_default().insert( library_instance.clone(), - (follower_library_address, follower_abi.clone()), + ( + library_ident, + follower_library_address, + follower_abi.clone(), + ), ); } - let metadata_file_contains_libraries = metadata - .libraries - .iter() - .flat_map(|map| map.iter()) - .flat_map(|(_, value)| value.iter()) - .next() - .is_some(); - let compiled_contracts_require_linking = leader_pre_link_contracts - .1 - .contracts - .values() - .chain(follower_pre_link_contracts.1.contracts.values()) - .flat_map(|value| value.values()) - .any(|(code, _)| !code.chars().all(|char| char.is_ascii_hexdigit())); - let (leader_compiled_contracts, follower_compiled_contracts) = - if metadata_file_contains_libraries && compiled_contracts_require_linking { - let leader_key = ( - metadata_file_path.to_path_buf(), - mode.clone(), - L::config_id(), - ); - let follower_key = ( - metadata_file_path.to_path_buf(), - mode.clone(), - F::config_id(), - ); - { - let mut cache = compilation_cache.write().await; - cache.remove(&leader_key); - cache.remove(&follower_key); - } - - let leader_post_link_contracts = get_or_build_contracts::( - metadata, - metadata_file_path, - mode.clone(), - config, - compilation_cache.clone(), - &leader_deployed_libraries, - ) - .await?; - let follower_post_link_contracts = get_or_build_contracts::( - metadata, - metadata_file_path, - mode.clone(), - config, - compilation_cache, - &follower_deployed_libraries, - ) - .await?; - - (leader_post_link_contracts, follower_post_link_contracts) - } else { - (leader_pre_link_contracts, follower_pre_link_contracts) - }; + let (leader_post_link_contracts, leader_compiler_version) = cached_compiler + .compile_contracts::( + metadata, + metadata_file_path, + &mode, + config, + leader_deployed_libraries.as_ref(), + ) + .await?; + let (follower_post_link_contracts, follower_compiler_version) = cached_compiler + .compile_contracts::( + metadata, + metadata_file_path, + &mode, + config, + follower_deployed_libraries.as_ref(), + ) + .await?; let leader_state = CaseState::::new( - leader_compiled_contracts.0.clone(), - leader_compiled_contracts.1.contracts.clone(), - leader_deployed_libraries, + leader_compiler_version, + leader_post_link_contracts.contracts, + leader_deployed_libraries.unwrap_or_default(), ); let follower_state = CaseState::::new( - follower_compiled_contracts.0.clone(), - follower_compiled_contracts.1.contracts.clone(), - follower_deployed_libraries, + follower_compiler_version, + follower_post_link_contracts.contracts, + follower_deployed_libraries.unwrap_or_default(), ); let mut driver = CaseDriver::::new( @@ -656,119 +610,6 @@ where driver.execute().await } -async fn get_or_build_contracts( - metadata: &Metadata, - metadata_file_path: &Path, - mode: Mode, - config: &Arguments, - compilation_cache: CompilationCache, - deployed_libraries: &HashMap, -) -> anyhow::Result> { - let key = ( - metadata_file_path.to_path_buf(), - mode.clone(), - P::config_id(), - ); - if let Some(compilation_artifact) = compilation_cache.read().await.get(&key).cloned() { - let mut compilation_artifact = compilation_artifact.lock().await; - match *compilation_artifact { - Some(ref compiled_contracts) => { - tracing::debug!(?key, "Compiled contracts cache hit"); - return Ok(compiled_contracts.clone()); - } - None => { - tracing::debug!(?key, "Compiled contracts cache miss"); - let compiled_contracts = compile_contracts::

( - metadata, - metadata_file_path, - &mode, - config, - deployed_libraries, - ) - .await?; - let compiled_contracts = Arc::new(compiled_contracts); - - *compilation_artifact = Some(compiled_contracts.clone()); - return Ok(compiled_contracts.clone()); - } - } - }; - - tracing::debug!(?key, "Compiled contracts cache miss"); - let mutex = { - let mut compilation_cache = compilation_cache.write().await; - let mutex = Arc::new(Mutex::new(None)); - compilation_cache.insert(key, mutex.clone()); - mutex - }; - let mut compilation_artifact = mutex.lock().await; - - let compiled_contracts = compile_contracts::

( - metadata, - metadata_file_path, - &mode, - config, - deployed_libraries, - ) - .await?; - let compiled_contracts = Arc::new(compiled_contracts); - - *compilation_artifact = Some(compiled_contracts.clone()); - Ok(compiled_contracts.clone()) -} - -async fn compile_contracts( - metadata: &Metadata, - metadata_file_path: &Path, - mode: &Mode, - config: &Arguments, - deployed_libraries: &HashMap, -) -> anyhow::Result<(Version, CompilerOutput)> { - let compiler_version_or_requirement = mode.compiler_version_to_use(config.solc.clone()); - let compiler_path = - P::Compiler::get_compiler_executable(config, compiler_version_or_requirement).await?; - let compiler_version = P::Compiler::new(compiler_path.clone()).version()?; - - tracing::info!( - %compiler_version, - metadata_file_path = %metadata_file_path.display(), - mode = ?mode, - "Compiling contracts" - ); - - let compiler = Compiler::::new() - .with_allow_path(metadata.directory()?) - .with_optimization(mode.optimize_setting) - .with_pipeline(mode.pipeline); - let mut compiler = metadata - .files_to_compile()? - .try_fold(compiler, |compiler, path| compiler.with_source(&path))?; - for (library_instance, (library_address, _)) in deployed_libraries.iter() { - let library_ident = &metadata - .contracts - .as_ref() - .and_then(|contracts| contracts.get(library_instance)) - .expect("Impossible for library to not be found in contracts") - .contract_ident; - - // Note the following: we need to tell solc which files require the libraries to be linked - // into them. We do not have access to this information and therefore we choose an easier, - // yet more compute intensive route, of telling solc that all of the files need to link the - // library and it will only perform the linking for the files that do actually need the - // library. - compiler = FilesWithExtensionIterator::new(metadata.directory()?) - .with_allowed_extension("sol") - .with_use_cached_fs(true) - .fold(compiler, |compiler, path| { - compiler.with_library(&path, library_ident.as_str(), *library_address) - }); - } - - let compiler_output = compiler.try_build(compiler_path).await?; - - Ok((compiler_version, compiler_output)) -} - async fn execute_corpus( args: &Arguments, tests: &[MetadataFile], @@ -800,28 +641,40 @@ async fn compile_corpus( .map(move |solc_mode| (metadata, solc_mode)) }); + let file = tempfile::NamedTempFile::new().expect("Failed to create temp file"); + let cached_compiler = CachedCompiler::new(file.path(), false) + .await + .map(Arc::new) + .expect("Failed to create the cached compiler"); + futures::stream::iter(tests) - .for_each_concurrent(None, |(metadata, mode)| async move { - match platform { - TestingPlatform::Geth => { - let _ = compile_contracts::( - &metadata.content, - &metadata.path, - &mode, - config, - &Default::default(), - ) - .await; - } - TestingPlatform::Kitchensink => { - let _ = compile_contracts::( - &metadata.content, - &metadata.path, - &mode, - config, - &Default::default(), - ) - .await; + .for_each_concurrent(None, |(metadata, mode)| { + let cached_compiler = cached_compiler.clone(); + + async move { + match platform { + TestingPlatform::Geth => { + let _ = cached_compiler + .compile_contracts::( + metadata, + metadata.path.as_path(), + &mode, + config, + None, + ) + .await; + } + TestingPlatform::Kitchensink => { + let _ = cached_compiler + .compile_contracts::( + metadata, + metadata.path.as_path(), + &mode, + config, + None, + ) + .await; + } } } }) diff --git a/crates/format/src/input.rs b/crates/format/src/input.rs index aace626..36899b0 100644 --- a/crates/format/src/input.rs +++ b/crates/format/src/input.rs @@ -739,12 +739,14 @@ impl<'de> Deserialize<'de> for EtherValue { #[cfg(test)] mod tests { - use super::*; use alloy::{eips::BlockNumberOrTag, json_abi::JsonAbi}; use alloy_primitives::{BlockHash, BlockNumber, BlockTimestamp, ChainId, TxHash, address}; use alloy_sol_types::SolValue; use std::collections::HashMap; + use super::*; + use crate::metadata::ContractIdent; + struct MockResolver; impl ResolverApi for MockResolver { @@ -818,11 +820,11 @@ mod tests { let mut contracts = HashMap::new(); contracts.insert( ContractInstance::new("Contract"), - (Address::ZERO, parsed_abi), + (ContractIdent::new("Contract"), Address::ZERO, parsed_abi), ); let resolver = MockResolver; - let context = ResolutionContext::new_from_parts(&contracts, None, None, None); + let context = ResolutionContext::default().with_deployed_contracts(&contracts); let encoded = input.encoded_input(&resolver, context).await.unwrap(); assert!(encoded.0.starts_with(&selector)); @@ -862,11 +864,11 @@ mod tests { let mut contracts = HashMap::new(); contracts.insert( ContractInstance::new("Contract"), - (Address::ZERO, parsed_abi), + (ContractIdent::new("Contract"), Address::ZERO, parsed_abi), ); let resolver = MockResolver; - let context = ResolutionContext::new_from_parts(&contracts, None, None, None); + let context = ResolutionContext::default().with_deployed_contracts(&contracts); let encoded = input.encoded_input(&resolver, context).await.unwrap(); assert!(encoded.0.starts_with(&selector)); @@ -909,11 +911,11 @@ mod tests { let mut contracts = HashMap::new(); contracts.insert( ContractInstance::new("Contract"), - (Address::ZERO, parsed_abi), + (ContractIdent::new("Contract"), Address::ZERO, parsed_abi), ); let resolver = MockResolver; - let context = ResolutionContext::new_from_parts(&contracts, None, None, None); + let context = ResolutionContext::default().with_deployed_contracts(&contracts); let encoded = input.encoded_input(&resolver, context).await.unwrap(); assert!(encoded.0.starts_with(&selector)); @@ -927,10 +929,10 @@ mod tests { async fn resolve_calldata_item( input: &str, - deployed_contracts: &HashMap, + deployed_contracts: &HashMap, resolver: &impl ResolverApi, ) -> anyhow::Result { - let context = ResolutionContext::new_from_parts(deployed_contracts, None, None, None); + let context = ResolutionContext::default().with_deployed_contracts(deployed_contracts); CalldataItem::new(input).resolve(resolver, context).await } diff --git a/crates/format/src/traits.rs b/crates/format/src/traits.rs index 057239c..1ad6cc3 100644 --- a/crates/format/src/traits.rs +++ b/crates/format/src/traits.rs @@ -6,7 +6,7 @@ use alloy::primitives::{Address, BlockHash, BlockNumber, BlockTimestamp, ChainId use alloy_primitives::TxHash; use anyhow::Result; -use crate::metadata::ContractInstance; +use crate::metadata::{ContractIdent, ContractInstance}; /// A trait of the interface are required to implement to be used by the resolution logic that this /// crate implements to go from string calldata and into the bytes calldata. @@ -48,7 +48,7 @@ pub trait ResolverApi { /// Contextual information required by the code that's performing the resolution. pub struct ResolutionContext<'a> { /// When provided the contracts provided here will be used for resolutions. - deployed_contracts: Option<&'a HashMap>, + deployed_contracts: Option<&'a HashMap>, /// When provided the variables in here will be used for performing resolutions. variables: Option<&'a HashMap>, @@ -66,7 +66,9 @@ impl<'a> ResolutionContext<'a> { } pub fn new_from_parts( - deployed_contracts: impl Into>>, + deployed_contracts: impl Into< + Option<&'a HashMap>, + >, variables: impl Into>>, block_number: impl Into>, transaction_hash: impl Into>, @@ -81,7 +83,9 @@ impl<'a> ResolutionContext<'a> { pub fn with_deployed_contracts( mut self, - deployed_contracts: impl Into>>, + deployed_contracts: impl Into< + Option<&'a HashMap>, + >, ) -> Self { self.deployed_contracts = deployed_contracts.into(); self @@ -122,17 +126,20 @@ impl<'a> ResolutionContext<'a> { } } - pub fn deployed_contract(&self, instance: &ContractInstance) -> Option<&(Address, JsonAbi)> { + pub fn deployed_contract( + &self, + instance: &ContractInstance, + ) -> Option<&(ContractIdent, Address, JsonAbi)> { self.deployed_contracts .and_then(|deployed_contracts| deployed_contracts.get(instance)) } pub fn deployed_contract_address(&self, instance: &ContractInstance) -> Option<&Address> { - self.deployed_contract(instance).map(|(a, _)| a) + self.deployed_contract(instance).map(|(_, a, _)| a) } pub fn deployed_contract_abi(&self, instance: &ContractInstance) -> Option<&JsonAbi> { - self.deployed_contract(instance).map(|(_, a)| a) + self.deployed_contract(instance).map(|(_, _, a)| a) } pub fn variable(&self, name: impl AsRef) -> Option<&U256> { diff --git a/crates/node/src/geth.rs b/crates/node/src/geth.rs index 6617351..c034ba5 100644 --- a/crates/node/src/geth.rs +++ b/crates/node/src/geth.rs @@ -281,9 +281,6 @@ impl EthereumNode for GethNode { &self, transaction: TransactionRequest, ) -> anyhow::Result { - let span = tracing::debug_span!("Submitting transaction", ?transaction); - let _guard = span.enter(); - let provider = Arc::new(self.provider().await?); let transaction_hash = *provider.send_transaction(transaction).await?.tx_hash(); From c58551803d36c818f61e0c72d7edb05617238504 Mon Sep 17 00:00:00 2001 From: Omar Date: Sat, 16 Aug 2025 19:04:17 +0300 Subject: [PATCH 3/3] Allow multiple files in corpus (#144) --- crates/core/src/main.rs | 2 +- crates/format/src/corpus.rs | 81 +++++++++++++++++++++++++------------ 2 files changed, 57 insertions(+), 26 deletions(-) diff --git a/crates/core/src/main.rs b/crates/core/src/main.rs index 63fb90c..2c7b5a5 100644 --- a/crates/core/src/main.rs +++ b/crates/core/src/main.rs @@ -115,7 +115,7 @@ fn collect_corpora(args: &Arguments) -> anyhow::Result }, } impl Corpus { - /// Try to read and parse the corpus definition file at given `path`. - pub fn try_from_path(path: &Path) -> anyhow::Result { - let file = File::open(path)?; - let mut corpus: Corpus = serde_json::from_reader(file)?; + pub fn try_from_path(file_path: impl AsRef) -> anyhow::Result { + let mut corpus = File::open(file_path.as_ref()) + .map_err(Into::::into) + .and_then(|file| serde_json::from_reader::<_, Corpus>(file).map_err(Into::into))?; - // Ensure that the path mentioned in the corpus is relative to the corpus file. - // Canonicalizing also helps make the path in any errors unambiguous. - corpus.path = path - .parent() - .ok_or_else(|| { - anyhow::anyhow!("Corpus path '{}' does not point to a file", path.display()) - })? - .canonicalize() - .map_err(|error| { - anyhow::anyhow!( - "Failed to canonicalize path to corpus '{}': {error}", - path.display() - ) - })? - .join(corpus.path); + for path in corpus.paths_iter_mut() { + *path = file_path + .as_ref() + .parent() + .ok_or_else(|| { + anyhow::anyhow!("Corpus path '{}' does not point to a file", path.display()) + })? + .canonicalize() + .map_err(|error| { + anyhow::anyhow!( + "Failed to canonicalize path to corpus '{}': {error}", + path.display() + ) + })? + .join(path.as_path()) + } Ok(corpus) } - /// Scan the corpus base directory and return all tests found. pub fn enumerate_tests(&self) -> Vec { let mut tests = Vec::new(); - collect_metadata(&self.path, &mut tests); + for path in self.paths_iter() { + collect_metadata(path, &mut tests); + } tests } + + pub fn name(&self) -> &str { + match self { + Corpus::SinglePath { name, .. } | Corpus::MultiplePaths { name, .. } => name.as_str(), + } + } + + pub fn paths_iter(&self) -> impl Iterator { + match self { + Corpus::SinglePath { path, .. } => { + Box::new(std::iter::once(path.as_path())) as Box> + } + Corpus::MultiplePaths { paths, .. } => { + Box::new(paths.iter().map(|path| path.as_path())) as Box> + } + } + } + + pub fn paths_iter_mut(&mut self) -> impl Iterator { + match self { + Corpus::SinglePath { path, .. } => { + Box::new(std::iter::once(path)) as Box> + } + Corpus::MultiplePaths { paths, .. } => { + Box::new(paths.iter_mut()) as Box> + } + } + } } /// Recursively walks `path` and parses any JSON or Solidity file into a test