diff --git a/Cargo.lock b/Cargo.lock index 8b71cb9..7a216bd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2984,8 +2984,8 @@ dependencies = [ [[package]] name = "revive-common" -version = "0.1.0-dev.12" -source = "git+https://github.com/paritytech/revive?rev=497dae2494dabe12d1af32d6d687122903cb2ada#497dae2494dabe12d1af32d6d687122903cb2ada" +version = "0.1.0-dev.16" +source = "git+https://github.com/paritytech/revive?rev=3389865af7c3ff6f29a586d82157e8bc573c1a8e#3389865af7c3ff6f29a586d82157e8bc573c1a8e" dependencies = [ "anyhow", "serde", @@ -3014,6 +3014,7 @@ dependencies = [ "alloy", "clap", "semver 1.0.26", + "serde", "temp-dir", ] @@ -3032,6 +3033,7 @@ dependencies = [ "revive-dt-format", "revive-dt-node", "revive-dt-node-interaction", + "revive-dt-report", "revive-solc-json-interface", "temp-dir", ] @@ -3071,6 +3073,19 @@ dependencies = [ "tokio", ] +[[package]] +name = "revive-dt-report" +version = "0.1.0" +dependencies = [ + "anyhow", + "log", + "revive-dt-config", + "revive-dt-format", + "revive-solc-json-interface", + "serde", + "serde_json", +] + [[package]] name = "revive-dt-solc-binaries" version = "0.1.0" @@ -3086,8 +3101,8 @@ dependencies = [ [[package]] name = "revive-solc-json-interface" -version = "0.1.0-dev.12" -source = "git+https://github.com/paritytech/revive?rev=497dae2494dabe12d1af32d6d687122903cb2ada#497dae2494dabe12d1af32d6d687122903cb2ada" +version = "0.1.0-dev.16" +source = "git+https://github.com/paritytech/revive?rev=3389865af7c3ff6f29a586d82157e8bc573c1a8e#3389865af7c3ff6f29a586d82157e8bc573c1a8e" dependencies = [ "anyhow", "rayon", diff --git a/Cargo.toml b/Cargo.toml index 31bfb70..1c3e677 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,6 +20,7 @@ revive-dt-format = { version = "0.1.0", path = "crates/format" } revive-dt-node = { version = "0.1.0", path = "crates/node" } revive-dt-node-interaction = { version = "0.1.0", path = "crates/node-interaction" } revive-dt-node-pool = { version = "0.1.0", path = "crates/node-pool" } +revive-dt-report = { version = "0.1.0", path = "crates/report" } revive-dt-solc-binaries = { version = "0.1.0", path = "crates/solc-binaries" } anyhow = "1.0" @@ -38,9 +39,9 @@ temp-dir = { version = "0.1.14" } tokio = { version = "1", default-features = false, features = ["rt-multi-thread"] } # revive compiler -revive-solc-json-interface = { git = "https://github.com/paritytech/revive", rev = "497dae2494dabe12d1af32d6d687122903cb2ada" } -revive-common = { git = "https://github.com/paritytech/revive", rev = "497dae2494dabe12d1af32d6d687122903cb2ada" } -revive-differential = { git = "https://github.com/paritytech/revive", rev = "497dae2494dabe12d1af32d6d687122903cb2ada" } +revive-solc-json-interface = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" } +revive-common = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" } +revive-differential = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" } [workspace.dependencies.alloy] version = "0.13.0" diff --git a/crates/compiler/src/lib.rs b/crates/compiler/src/lib.rs index 6820c27..7e92cf0 100644 --- a/crates/compiler/src/lib.rs +++ b/crates/compiler/src/lib.rs @@ -48,8 +48,12 @@ pub struct CompilerInput { /// The generic compilation output configuration. pub struct CompilerOutput { + /// The solc standard JSON input. pub input: CompilerInput, + /// The produced solc standard JSON output. pub output: SolcStandardJsonOutput, + /// The error message in case the compiler returns abnormally. + pub error: Option, } impl PartialEq for CompilerInput @@ -110,6 +114,7 @@ where false, ), None, + None, ), }, extra_options: Default::default(), @@ -156,4 +161,9 @@ where input: self.input, }) } + + /// Returns the compiler JSON input. + pub fn input(&self) -> SolcStandardJsonInput { + self.input.clone() + } } diff --git a/crates/compiler/src/revive_resolc.rs b/crates/compiler/src/revive_resolc.rs index cae774b..706205a 100644 --- a/crates/compiler/src/revive_resolc.rs +++ b/crates/compiler/src/revive_resolc.rs @@ -41,12 +41,18 @@ impl SolidityCompiler for Resolc { let stderr = output.stderr; if !output.status.success() { + let message = String::from_utf8_lossy(&stderr); log::error!( "resolc failed exit={} stderr={} JSON-in={} ", output.status, - String::from_utf8_lossy(&stderr), + &message, json_in, ); + return Ok(CompilerOutput { + input, + output: Default::default(), + error: Some(message.into()), + }); } let parsed: SolcStandardJsonOutput = serde_json::from_slice(&stdout).map_err(|e| { @@ -59,6 +65,7 @@ impl SolidityCompiler for Resolc { Ok(CompilerOutput { input, output: parsed, + error: None, }) } diff --git a/crates/compiler/src/solc.rs b/crates/compiler/src/solc.rs index c33e244..e88fdc7 100644 --- a/crates/compiler/src/solc.rs +++ b/crates/compiler/src/solc.rs @@ -30,11 +30,22 @@ impl SolidityCompiler for Solc { let stdin = child.stdin.as_mut().expect("should be piped"); serde_json::to_writer(stdin, &input.input)?; + let output = child.wait_with_output()?; + + if !output.status.success() { + let message = String::from_utf8_lossy(&output.stderr); + log::error!("solc failed exit={} stderr={}", output.status, &message); + return Ok(CompilerOutput { + input, + output: Default::default(), + error: Some(message.into()), + }); + } - let output = child.wait_with_output()?.stdout; Ok(CompilerOutput { input, - output: serde_json::from_slice(&output)?, + output: serde_json::from_slice(&output.stdout)?, + error: None, }) } diff --git a/crates/config/Cargo.toml b/crates/config/Cargo.toml index 5df52e9..e58c747 100644 --- a/crates/config/Cargo.toml +++ b/crates/config/Cargo.toml @@ -13,3 +13,5 @@ alloy = { workspace = true } clap = { workspace = true } semver = { workspace = true } temp-dir = { workspace = true } +serde = { workspace = true } + diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index b186416..c9593ce 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -1,13 +1,17 @@ //! The global configuration used accross all revive differential testing crates. -use std::path::{Path, PathBuf}; +use std::{ + fmt::Display, + path::{Path, PathBuf}, +}; use alloy::{network::EthereumWallet, signers::local::PrivateKeySigner}; use clap::{Parser, ValueEnum}; use semver::Version; +use serde::{Deserialize, Serialize}; use temp_dir::TempDir; -#[derive(Debug, Parser, Clone)] +#[derive(Debug, Parser, Clone, Serialize, Deserialize)] #[command(name = "retester")] pub struct Arguments { /// The `solc` version to use if the test didn't specify it explicitly. @@ -40,6 +44,7 @@ pub struct Arguments { /// /// We attach it here because [TempDir] prunes itself on drop. #[clap(skip)] + #[serde(skip)] pub temp_dir: Option<&'static TempDir>, /// The path to the `geth` executable. @@ -83,6 +88,10 @@ pub struct Arguments { /// Determines the amount of tests that are executed in parallel. #[arg(long = "workers", default_value = "12")] pub workers: usize, + + /// Extract problems back to the test corpus. + #[arg(short, long = "extract-problems")] + pub extract_problems: bool, } impl Arguments { @@ -124,7 +133,7 @@ impl Default for Arguments { /// The Solidity compatible node implementation. /// /// This describes the solutions to be tested against on a high level. -#[derive(Clone, Debug, Eq, Hash, PartialEq, ValueEnum)] +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, ValueEnum, Serialize, Deserialize)] #[clap(rename_all = "lower")] pub enum TestingPlatform { /// The go-ethereum reference full node EVM implementation. @@ -132,3 +141,12 @@ pub enum TestingPlatform { /// The kitchensink runtime provides the PolkaVM (PVM) based node implentation. Kitchensink, } + +impl Display for TestingPlatform { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Geth => f.write_str("geth"), + Self::Kitchensink => f.write_str("revive"), + } + } +} diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 0fdce9c..7d2f946 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -18,6 +18,7 @@ revive-dt-config = { workspace = true } revive-dt-format = { workspace = true } revive-dt-node = { workspace = true } revive-dt-node-interaction = { workspace = true } +revive-dt-report = { workspace = true } alloy = { workspace = true } anyhow = { workspace = true } diff --git a/crates/core/src/driver/mod.rs b/crates/core/src/driver/mod.rs index 6865a48..7f77511 100644 --- a/crates/core/src/driver/mod.rs +++ b/crates/core/src/driver/mod.rs @@ -8,6 +8,7 @@ use revive_dt_compiler::{Compiler, CompilerInput, SolidityCompiler}; use revive_dt_config::Arguments; use revive_dt_format::{input::Input, metadata::Metadata, mode::SolcMode}; use revive_dt_node_interaction::EthereumNode; +use revive_dt_report::reporter::{CompilationTask, Report, Span}; use revive_solc_json_interface::SolcStandardJsonOutput; use crate::Platform; @@ -19,6 +20,7 @@ type Contracts = HashMap< pub struct State<'a, T: Platform> { config: &'a Arguments, + span: Span, contracts: Contracts, deployed_contracts: HashMap, } @@ -27,37 +29,65 @@ impl<'a, T> State<'a, T> where T: Platform, { - pub fn new(config: &'a Arguments) -> Self { + pub fn new(config: &'a Arguments, span: Span) -> Self { Self { config, + span, contracts: Default::default(), deployed_contracts: Default::default(), } } + /// Returns a copy of the current span. + fn span(&self) -> Span { + self.span + } + pub fn build_contracts(&mut self, mode: &SolcMode, metadata: &Metadata) -> anyhow::Result<()> { + let mut span = self.span(); + span.next_metadata( + metadata + .file_path + .as_ref() + .expect("metadata should have been read from a file") + .clone(), + ); + let Some(version) = mode.last_patch_version(&self.config.solc) else { - anyhow::bail!("unsupported solc version: {:?}", mode.solc_version); + anyhow::bail!("unsupported solc version: {:?}", &mode.solc_version); }; - let sources = metadata.contract_sources()?; - let base_path = metadata.directory()?.display().to_string(); + let mut compiler = Compiler::::new() + .base_path(metadata.directory()?.display().to_string()) + .solc_optimizer(mode.solc_optimize()); - let mut compiler = Compiler::::new().base_path(base_path.clone()); - for (file, _contract) in sources.values() { + for (file, _contract) in metadata.contract_sources()?.values() { log::debug!("contract source {}", file.display()); compiler = compiler.with_source(file)?; } + let mut task = CompilationTask { + json_input: compiler.input(), + json_output: None, + mode: mode.clone(), + compiler_version: format!("{}", &version), + error: None, + }; + let compiler_path = T::Compiler::get_compiler_executable(self.config, version)?; - - let output = compiler - .solc_optimizer(mode.solc_optimize()) - .try_build(compiler_path)?; - - self.contracts.insert(output.input, output.output); - - Ok(()) + match compiler.try_build(compiler_path) { + Ok(output) => { + task.json_output = Some(output.output.clone()); + task.error = output.error; + self.contracts.insert(output.input, output.output); + Report::compilation(span, T::config_id(), task); + Ok(()) + } + Err(error) => { + task.error = Some(error.to_string()); + Err(error) + } + } } pub fn execute_input( @@ -102,12 +132,12 @@ where } } - pub fn execute(&mut self) -> anyhow::Result<()> { + pub fn execute(&mut self, span: Span) -> anyhow::Result<()> { for mode in self.metadata.solc_modes() { - let mut leader_state = State::::new(self.config); + let mut leader_state = State::::new(self.config, span); leader_state.build_contracts(&mode, self.metadata)?; - let mut follower_state = State::::new(self.config); + let mut follower_state = State::::new(self.config, span); follower_state.build_contracts(&mode, self.metadata)?; for case in &self.metadata.cases { diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 0eda722..bc289a0 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -4,6 +4,7 @@ //! provides a helper utilty to execute tests. use revive_dt_compiler::{SolidityCompiler, revive_resolc, solc}; +use revive_dt_config::TestingPlatform; use revive_dt_node::geth; use revive_dt_node_interaction::EthereumNode; @@ -15,6 +16,9 @@ pub mod driver; pub trait Platform { type Blockchain: EthereumNode; type Compiler: SolidityCompiler; + + /// Returns the matching [TestingPlatform] of the [revive_dt_config::Arguments]. + fn config_id() -> TestingPlatform; } #[derive(Default)] @@ -23,6 +27,10 @@ pub struct Geth; impl Platform for Geth { type Blockchain = geth::Instance; type Compiler = solc::Solc; + + fn config_id() -> TestingPlatform { + TestingPlatform::Geth + } } #[derive(Default)] @@ -31,4 +39,8 @@ pub struct Kitchensink; impl Platform for Kitchensink { type Blockchain = geth::Instance; type Compiler = revive_resolc::Resolc; + + fn config_id() -> TestingPlatform { + TestingPlatform::Kitchensink + } } diff --git a/crates/core/src/main.rs b/crates/core/src/main.rs index c66bfe4..523d3ea 100644 --- a/crates/core/src/main.rs +++ b/crates/core/src/main.rs @@ -10,6 +10,7 @@ use revive_dt_core::{ }; use revive_dt_format::{corpus::Corpus, metadata::Metadata}; use revive_dt_node::pool::NodePool; +use revive_dt_report::reporter::{Report, Span}; use temp_dir::TempDir; static TEMP_DIR: LazyLock = LazyLock::new(|| TempDir::new().unwrap()); @@ -17,18 +18,15 @@ static TEMP_DIR: LazyLock = LazyLock::new(|| TempDir::new().unwrap()); fn main() -> anyhow::Result<()> { let args = init_cli()?; - let corpora = collect_corpora(&args)?; + for (corpus, tests) in collect_corpora(&args)? { + let span = Span::new(corpus, args.clone())?; - if let Some(platform) = &args.compile_only { - for tests in corpora.values() { - main_compile_only(&args, tests, platform)?; + match &args.compile_only { + Some(platform) => compile_corpus(&args, &tests, platform, span), + None => execute_corpus(&args, &tests, span)?, } - return Ok(()); - } - - for tests in corpora.values() { - main_execute_differential(&args, tests)?; + Report::save()?; } Ok(()) @@ -38,17 +36,26 @@ fn init_cli() -> anyhow::Result { env_logger::init(); let mut args = Arguments::parse(); + if args.corpus.is_empty() { anyhow::bail!("no test corpus specified"); } - if args.working_directory.is_none() { - args.temp_dir = Some(&TEMP_DIR); + + match args.working_directory.as_ref() { + Some(dir) => { + if !dir.exists() { + anyhow::bail!("workdir {} does not exist", dir.display()); + } + } + None => { + args.temp_dir = Some(&TEMP_DIR); + } } + log::info!("workdir: {}", args.directory().display()); ThreadPoolBuilder::new() .num_threads(args.workers) - .build_global() - .unwrap(); + .build_global()?; Ok(args) } @@ -67,7 +74,7 @@ fn collect_corpora(args: &Arguments) -> anyhow::Result anyhow::Result<()> { +fn execute_corpus(args: &Arguments, tests: &[Metadata], span: Span) -> anyhow::Result<()> { let leader_nodes = NodePool::new(args)?; let follower_nodes = NodePool::new(args)?; @@ -82,7 +89,7 @@ fn main_execute_differential(args: &Arguments, tests: &[Metadata]) -> anyhow::Re _ => unimplemented!(), }; - match driver.execute() { + match driver.execute(span) { Ok(build) => { log::info!( "metadata {} success", @@ -102,25 +109,19 @@ fn main_execute_differential(args: &Arguments, tests: &[Metadata]) -> anyhow::Re Ok(()) } -fn main_compile_only( - config: &Arguments, - tests: &[Metadata], - platform: &TestingPlatform, -) -> anyhow::Result<()> { +fn compile_corpus(config: &Arguments, tests: &[Metadata], platform: &TestingPlatform, span: Span) { tests.par_iter().for_each(|metadata| { for mode in &metadata.solc_modes() { match platform { TestingPlatform::Geth => { - let mut state = State::::new(config); + let mut state = State::::new(config, span); let _ = state.build_contracts(mode, metadata); } TestingPlatform::Kitchensink => { - let mut state = State::::new(config); + let mut state = State::::new(config, span); let _ = state.build_contracts(mode, metadata); } }; } }); - - Ok(()) } diff --git a/crates/format/src/corpus.rs b/crates/format/src/corpus.rs index 5f0fe5a..9caaa0e 100644 --- a/crates/format/src/corpus.rs +++ b/crates/format/src/corpus.rs @@ -3,11 +3,11 @@ use std::{ path::{Path, PathBuf}, }; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use crate::metadata::Metadata; -#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Hash)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq, Hash)] pub struct Corpus { pub name: String, pub path: PathBuf, diff --git a/crates/format/src/mode.rs b/crates/format/src/mode.rs index 638bf67..a99a5e8 100644 --- a/crates/format/src/mode.rs +++ b/crates/format/src/mode.rs @@ -1,16 +1,16 @@ use semver::Version; -use serde::Deserialize; use serde::de::Deserializer; +use serde::{Deserialize, Serialize}; /// Specifies the compilation mode of the test artifact. -#[derive(Debug, Clone, Eq, PartialEq)] +#[derive(Hash, Debug, Clone, Eq, PartialEq)] pub enum Mode { Solidity(SolcMode), Unknown(String), } /// Specify Solidity specific compiler options. -#[derive(Debug, Default, Clone, Eq, PartialEq)] +#[derive(Hash, Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct SolcMode { pub solc_version: Option, solc_optimize: Option, diff --git a/crates/report/Cargo.toml b/crates/report/Cargo.toml new file mode 100644 index 0000000..a602f00 --- /dev/null +++ b/crates/report/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "revive-dt-report" +version.workspace = true +authors.workspace = true +license.workspace = true +edition.workspace = true +repository.workspace = true +rust-version.workspace = true + +[dependencies] +revive-dt-config = { workspace = true } +revive-dt-format = { workspace = true } + +anyhow = { workspace = true } +log = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +revive-solc-json-interface = { workspace = true } + diff --git a/crates/report/src/analyzer.rs b/crates/report/src/analyzer.rs new file mode 100644 index 0000000..81d2409 --- /dev/null +++ b/crates/report/src/analyzer.rs @@ -0,0 +1,94 @@ +//! The report analyzer enriches the raw report data. + +use serde::{Deserialize, Serialize}; + +use crate::reporter::CompilationTask; + +/// Provides insights into how well the compilers perform. +#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, PartialOrd)] +pub struct CompilerStatistics { + /// The sum of contracts observed. + pub n_contracts: usize, + /// The mean size of compiled contracts. + pub mean_code_size: usize, + /// The mean size of the optimized YUL IR. + pub mean_yul_size: usize, + /// Is a proxy because the YUL also containes a lot of comments. + pub yul_to_bytecode_size_ratio: f32, +} + +impl CompilerStatistics { + /// Cumulatively update the statistics with the next compiler task. + pub fn sample(&mut self, compilation_task: &CompilationTask) { + let Some(output) = &compilation_task.json_output else { + return; + }; + + let Some(contracts) = &output.contracts else { + return; + }; + + for (_solidity, contracts) in contracts.iter() { + for (_name, contract) in contracts.iter() { + let Some(evm) = &contract.evm else { + continue; + }; + let Some(deploy_code) = &evm.deployed_bytecode else { + continue; + }; + + // The EVM bytecode can be unlinked and thus is not necessarily a decodable hex + // string; for our statistics this is a good enough approximation. + let bytecode_size = deploy_code.object.len() / 2; + + let yul_size = contract + .ir_optimized + .as_ref() + .expect("if the contract has a deploy code it should also have the opimized IR") + .len(); + + self.update_sizes(bytecode_size, yul_size); + } + } + } + + /// Updates the size statistics cumulatively. + fn update_sizes(&mut self, bytecode_size: usize, yul_size: usize) { + let n_previous = self.n_contracts; + let n_current = self.n_contracts + 1; + + self.n_contracts = n_current; + + self.mean_code_size = (n_previous * self.mean_code_size + bytecode_size) / n_current; + self.mean_yul_size = (n_previous * self.mean_yul_size + yul_size) / n_current; + + if self.mean_code_size > 0 { + self.yul_to_bytecode_size_ratio = + self.mean_yul_size as f32 / self.mean_code_size as f32; + } + } +} + +#[cfg(test)] +mod tests { + use super::CompilerStatistics; + + #[test] + fn compiler_statistics() { + let mut received = CompilerStatistics::default(); + received.update_sizes(0, 0); + received.update_sizes(3, 37); + received.update_sizes(123, 456); + + let mean_code_size = 41; // rounding error from integer truncation + let mean_yul_size = 164; + let expected = CompilerStatistics { + n_contracts: 3, + mean_code_size, + mean_yul_size, + yul_to_bytecode_size_ratio: mean_yul_size as f32 / mean_code_size as f32, + }; + + assert_eq!(received, expected); + } +} diff --git a/crates/report/src/lib.rs b/crates/report/src/lib.rs new file mode 100644 index 0000000..04ceeed --- /dev/null +++ b/crates/report/src/lib.rs @@ -0,0 +1,4 @@ +//! The revive differential tests reporting facility. + +pub mod analyzer; +pub mod reporter; diff --git a/crates/report/src/reporter.rs b/crates/report/src/reporter.rs new file mode 100644 index 0000000..02bd6a4 --- /dev/null +++ b/crates/report/src/reporter.rs @@ -0,0 +1,243 @@ +//! The reporter is the central place observing test execution by collecting data. +//! +//! The data collected gives useful insights into the outcome of the test run +//! and helps identifying and reproducing failing cases. + +use std::{ + collections::HashMap, + fs::{self, File, create_dir_all}, + path::PathBuf, + sync::{Mutex, OnceLock}, + time::{SystemTime, UNIX_EPOCH}, +}; + +use anyhow::Context; +use serde::{Deserialize, Serialize}; + +use revive_dt_config::{Arguments, TestingPlatform}; +use revive_dt_format::{corpus::Corpus, mode::SolcMode}; +use revive_solc_json_interface::{SolcStandardJsonInput, SolcStandardJsonOutput}; + +use crate::analyzer::CompilerStatistics; + +pub(crate) static REPORTER: OnceLock> = OnceLock::new(); + +/// The `Report` datastructure stores all relevant inforamtion required for generating reports. +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +pub struct Report { + /// The configuration used during the test. + pub config: Arguments, + /// The observed test corpora. + pub corpora: Vec, + /// The observed test definitions. + pub metadata_files: Vec, + /// The observed compilation results. + pub compiler_results: HashMap>, + /// The observed compilation statistics. + pub compiler_statistics: HashMap, + /// The file name this is serialized to. + #[serde(skip)] + directory: PathBuf, +} + +/// Contains a compiled contract. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CompilationTask { + /// The observed compiler input. + pub json_input: SolcStandardJsonInput, + /// The observed compiler output. + pub json_output: Option, + /// The observed compiler mode. + pub mode: SolcMode, + /// The observed compiler version. + pub compiler_version: String, + /// The observed error, if any. + pub error: Option, +} + +/// Represents a report about a compilation task. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct CompilationResult { + /// The observed compilation task. + pub compilation_task: CompilationTask, + /// The linked span. + pub span: Span, +} + +/// The [Span] struct indicates the context of what is being reported. +#[derive(Clone, Copy, Debug, Serialize, Deserialize)] +pub struct Span { + /// The corpus index this belongs to. + corpus: usize, + /// The metadata file this belongs to. + metadata_file: usize, + /// The index of the case definition this belongs to. + case: usize, + /// The index of the case input this belongs to. + input: usize, +} + +impl Report { + /// The file name where this report will be written to. + pub const FILE_NAME: &str = "report.json"; + + /// The [Span] is expected to initialize the reporter by providing the config. + const INITIALIZED_VIA_SPAN: &str = "requires a Span which initializes the reporter"; + + /// Create a new [Report]. + fn new(config: Arguments) -> anyhow::Result { + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap() + .as_millis(); + + let directory = config.directory().join("report").join(format!("{now}")); + if !directory.exists() { + create_dir_all(&directory)?; + } + + Ok(Self { + config, + directory, + ..Default::default() + }) + } + + /// Add a compilation task to the report. + pub fn compilation(span: Span, platform: TestingPlatform, compilation_task: CompilationTask) { + let mut report = REPORTER + .get() + .expect(Report::INITIALIZED_VIA_SPAN) + .lock() + .unwrap(); + + report + .compiler_statistics + .entry(platform) + .or_default() + .sample(&compilation_task); + + report + .compiler_results + .entry(platform) + .or_default() + .push(CompilationResult { + compilation_task, + span, + }); + } + + /// Write the report to disk. + pub fn save() -> anyhow::Result<()> { + let Some(reporter) = REPORTER.get() else { + return Ok(()); + }; + let report = reporter.lock().unwrap(); + + if let Err(error) = report.write_to_file() { + anyhow::bail!("can not write report: {error}"); + } + + if report.config.extract_problems { + if let Err(error) = report.save_compiler_problems() { + anyhow::bail!("can not write compiler problems: {error}"); + } + } + + Ok(()) + } + + /// Write compiler problems to disk for later debugging. + pub fn save_compiler_problems(&self) -> anyhow::Result<()> { + for (platform, results) in self.compiler_results.iter() { + for result in results { + // ignore if there were no errors + if result.compilation_task.error.is_none() + && result + .compilation_task + .json_output + .as_ref() + .and_then(|output| output.errors.as_ref()) + .map(|errors| errors.is_empty()) + .unwrap_or(true) + { + continue; + } + + let path = &self.metadata_files[result.span.metadata_file] + .parent() + .unwrap() + .join(format!("{platform}_errors")); + if !path.exists() { + create_dir_all(path)?; + } + + if let Some(error) = result.compilation_task.error.as_ref() { + fs::write(path.join("compiler_error.txt"), error)?; + } + + if let Some(errors) = result.compilation_task.json_output.as_ref() { + let file = File::create(path.join("compiler_output.txt"))?; + serde_json::to_writer_pretty(file, &errors)?; + } + } + } + + Ok(()) + } + + fn write_to_file(&self) -> anyhow::Result<()> { + let path = self.directory.join(Self::FILE_NAME); + + let file = File::create(&path).context(path.display().to_string())?; + serde_json::to_writer_pretty(file, &self)?; + + log::info!("report written to: {}", path.display()); + + Ok(()) + } +} + +impl Span { + /// Create a new [Span] with case and input index at 0. + /// + /// Initializes the reporting facility on the first call. + pub fn new(corpus: Corpus, config: Arguments) -> anyhow::Result { + let report = Mutex::new(Report::new(config)?); + let mut reporter = REPORTER.get_or_init(|| report).lock().unwrap(); + reporter.corpora.push(corpus); + + Ok(Self { + corpus: reporter.corpora.len() - 1, + metadata_file: 0, + case: 0, + input: 0, + }) + } + + /// Advance to the next metadata file: Resets the case input index to 0. + pub fn next_metadata(&mut self, metadata_file: PathBuf) { + let mut reporter = REPORTER + .get() + .expect(Report::INITIALIZED_VIA_SPAN) + .lock() + .unwrap(); + + reporter.metadata_files.push(metadata_file); + + self.metadata_file = reporter.metadata_files.len() - 1; + self.case = 0; + self.input = 0; + } + + /// Advance to the next case: Increas the case index by one and resets the input index to 0. + pub fn next_case(&mut self) { + self.case += 1; + self.input = 0; + } + + /// Advance to the next input. + pub fn next_input(&mut self) { + self.input += 1; + } +} diff --git a/crates/solc-binaries/Cargo.toml b/crates/solc-binaries/Cargo.toml index e694d3b..a238617 100644 --- a/crates/solc-binaries/Cargo.toml +++ b/crates/solc-binaries/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "revive-dt-solc-binaries" -dependencies = "Download and cache solc binaries" +description = "Download and cache solc binaries" version.workspace = true authors.workspace = true license.workspace = true