diff --git a/Cargo.lock b/Cargo.lock index a04261a..7e0f75d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4588,13 +4588,20 @@ dependencies = [ name = "revive-dt-report" version = "0.1.0" dependencies = [ + "alloy-primitives", "anyhow", + "indexmap 2.10.0", + "paste", "revive-dt-common", "revive-dt-compiler", "revive-dt-config", "revive-dt-format", + "semver 1.0.26", "serde", "serde_json", + "serde_with", + "tokio", + "tracing", ] [[package]] @@ -4845,6 +4852,30 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "schemars" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schemars" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + [[package]] name = "schnellru" version = "0.2.4" @@ -5075,15 +5106,17 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.12.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" +checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", "indexmap 2.10.0", + "schemars 0.9.0", + "schemars 1.0.4", "serde", "serde_derive", "serde_json", @@ -5093,9 +5126,9 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.12.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e" +checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f" dependencies = [ "darling", "proc-macro2", diff --git a/Cargo.toml b/Cargo.toml index d8b4213..fb7cbcf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,6 +34,7 @@ futures = { version = "0.3.31" } hex = "0.4.3" regex = "1" moka = "0.12.10" +paste = "1.0.15" reqwest = { version = "0.12.15", features = ["json"] } once_cell = "1.21" semver = { version = "1.0", features = ["serde"] } @@ -43,6 +44,7 @@ serde_json = { version = "1.0", default-features = false, features = [ "std", "unbounded_depth", ] } +serde_with = { version = "3.14.0" } sha2 = { version = "0.10.9" } sp-core = "36.1.0" sp-runtime = "41.1.0" diff --git a/crates/common/src/macros/define_wrapper_type.rs b/crates/common/src/macros/define_wrapper_type.rs index 2196595..a5f1098 100644 --- a/crates/common/src/macros/define_wrapper_type.rs +++ b/crates/common/src/macros/define_wrapper_type.rs @@ -1,12 +1,23 @@ #[macro_export] macro_rules! impl_for_wrapper { (Display, $ident: ident) => { + #[automatically_derived] impl std::fmt::Display for $ident { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(&self.0, f) } } }; + (FromStr, $ident: ident) => { + #[automatically_derived] + impl std::str::FromStr for $ident { + type Err = anyhow::Error; + + fn from_str(s: &str) -> anyhow::Result { + s.parse().map(Self).map_err(Into::into) + } + } + }; } /// Defines wrappers around types. diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index 86ba19e..1f62dd1 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -124,6 +124,14 @@ pub struct Arguments { /// Controls if the compilation cache should be invalidated or not. #[arg(short, long)] pub invalidate_compilation_cache: bool, + + /// Controls if the compiler input is included in the final report. + #[clap(long = "report.include-compiler-input")] + pub report_include_compiler_input: bool, + + /// Controls if the compiler output is included in the final report. + #[clap(long = "report.include-compiler-output")] + pub report_include_compiler_output: bool, } impl Arguments { diff --git a/crates/core/src/cached_compiler.rs b/crates/core/src/cached_compiler.rs index b94b1f3..188befb 100644 --- a/crates/core/src/cached_compiler.rs +++ b/crates/core/src/cached_compiler.rs @@ -9,7 +9,7 @@ use std::{ use futures::FutureExt; use revive_dt_common::iterators::FilesWithExtensionIterator; -use revive_dt_compiler::{Compiler, CompilerOutput, Mode, SolidityCompiler}; +use revive_dt_compiler::{Compiler, CompilerInput, CompilerOutput, Mode, SolidityCompiler}; use revive_dt_config::Arguments; use revive_dt_format::metadata::{ContractIdent, ContractInstance, Metadata}; @@ -35,6 +35,7 @@ impl CachedCompiler { } /// Compiles or gets the compilation artifacts from the cache. + #[allow(clippy::too_many_arguments)] #[instrument( level = "debug", skip_all, @@ -52,6 +53,19 @@ impl CachedCompiler { mode: &Mode, config: &Arguments, deployed_libraries: Option<&HashMap>, + compilation_success_report_callback: impl Fn( + Version, + PathBuf, + bool, + Option, + CompilerOutput, + ) + Clone, + compilation_failure_report_callback: impl Fn( + Option, + Option, + Option, + String, + ), ) -> Result<(CompilerOutput, Version)> { static CACHE_KEY_LOCK: Lazy>>>> = Lazy::new(Default::default); @@ -61,10 +75,21 @@ impl CachedCompiler { config, compiler_version_or_requirement, ) - .await?; + .await + .inspect_err(|err| { + compilation_failure_report_callback(None, None, None, err.to_string()) + })?; let compiler_version = ::new(compiler_path.clone()) .version() - .await?; + .await + .inspect_err(|err| { + compilation_failure_report_callback( + None, + Some(compiler_path.clone()), + None, + err.to_string(), + ) + })?; let cache_key = CacheKey { platform_key: P::config_id().to_string(), @@ -74,13 +99,19 @@ impl CachedCompiler { }; let compilation_callback = || { + let compiler_path = compiler_path.clone(); + let compiler_version = compiler_version.clone(); + let compilation_success_report_callback = compilation_success_report_callback.clone(); async move { compile_contracts::

( metadata.directory()?, compiler_path, + compiler_version, metadata.files_to_compile()?, mode, deployed_libraries, + compilation_success_report_callback, + compilation_failure_report_callback, ) .map(|compilation_result| compilation_result.map(CacheValue::new)) .await @@ -125,10 +156,19 @@ impl CachedCompiler { }; let _guard = mutex.lock().await; - self.0 - .get_or_insert_with(&cache_key, compilation_callback) - .await - .map(|value| value.compiler_output)? + match self.0.get(&cache_key).await { + Some(cache_value) => { + compilation_success_report_callback( + compiler_version.clone(), + compiler_path, + true, + None, + cache_value.compiler_output.clone(), + ); + cache_value.compiler_output + } + None => compilation_callback().await?.compiler_output, + } } }; @@ -136,19 +176,34 @@ impl CachedCompiler { } } +#[allow(clippy::too_many_arguments)] async fn compile_contracts( metadata_directory: impl AsRef, compiler_path: impl AsRef, + compiler_version: Version, mut files_to_compile: impl Iterator, mode: &Mode, deployed_libraries: Option<&HashMap>, + compilation_success_report_callback: impl Fn( + Version, + PathBuf, + bool, + Option, + CompilerOutput, + ), + compilation_failure_report_callback: impl Fn( + Option, + Option, + Option, + String, + ), ) -> Result { let all_sources_in_dir = FilesWithExtensionIterator::new(metadata_directory.as_ref()) .with_allowed_extension("sol") .with_use_cached_fs(true) .collect::>(); - Compiler::::new() + let compiler = Compiler::::new() .with_allow_path(metadata_directory) // Handling the modes .with_optimization(mode.optimize_setting) @@ -156,6 +211,14 @@ async fn compile_contracts( // Adding the contract sources to the compiler. .try_then(|compiler| { files_to_compile.try_fold(compiler, |compiler, path| compiler.with_source(path)) + }) + .inspect_err(|err| { + compilation_failure_report_callback( + Some(compiler_version.clone()), + Some(compiler_path.as_ref().to_path_buf()), + None, + err.to_string(), + ) })? // Adding the deployed libraries to the compiler. .then(|compiler| { @@ -171,9 +234,28 @@ async fn compile_contracts( .fold(compiler, |compiler, (ident, address, path)| { compiler.with_library(path, ident.as_str(), *address) }) - }) - .try_build(compiler_path) + }); + + let compiler_input = compiler.input(); + let compiler_output = compiler + .try_build(compiler_path.as_ref()) .await + .inspect_err(|err| { + compilation_failure_report_callback( + Some(compiler_version.clone()), + Some(compiler_path.as_ref().to_path_buf()), + Some(compiler_input.clone()), + err.to_string(), + ) + })?; + compilation_success_report_callback( + compiler_version, + compiler_path.as_ref().to_path_buf(), + false, + Some(compiler_input), + compiler_output.clone(), + ); + Ok(compiler_output) } struct ArtifactsCache { diff --git a/crates/core/src/driver/mod.rs b/crates/core/src/driver/mod.rs index 4912b9a..d06ea52 100644 --- a/crates/core/src/driver/mod.rs +++ b/crates/core/src/driver/mod.rs @@ -22,6 +22,7 @@ use anyhow::Context; use futures::TryStreamExt; use indexmap::IndexMap; use revive_dt_format::traits::{ResolutionContext, ResolverApi}; +use revive_dt_report::ExecutionSpecificReporter; use semver::Version; use revive_dt_format::case::Case; @@ -51,6 +52,9 @@ pub struct CaseState { /// Stores the version used for the current case. compiler_version: Version, + /// The execution reporter. + execution_reporter: ExecutionSpecificReporter, + phantom: PhantomData, } @@ -62,12 +66,14 @@ where compiler_version: Version, compiled_contracts: HashMap>, deployed_contracts: HashMap, + execution_reporter: ExecutionSpecificReporter, ) -> Self { Self { compiled_contracts, deployed_contracts, variables: Default::default(), compiler_version, + execution_reporter, phantom: PhantomData, } } @@ -718,6 +724,8 @@ where instance_address = ?address, "Deployed contract" ); + self.execution_reporter + .report_contract_deployed_event(contract_instance.clone(), address)?; self.deployed_contracts.insert( contract_instance.clone(), diff --git a/crates/core/src/main.rs b/crates/core/src/main.rs index f74101d..43ae3be 100644 --- a/crates/core/src/main.rs +++ b/crates/core/src/main.rs @@ -18,8 +18,12 @@ use futures::stream; use futures::{Stream, StreamExt}; use indexmap::IndexMap; use revive_dt_node_interaction::EthereumNode; +use revive_dt_report::{ + NodeDesignation, ReportAggregator, Reporter, ReporterEvent, TestCaseStatus, + TestSpecificReporter, TestSpecifier, +}; use temp_dir::TempDir; -use tokio::{sync::mpsc, try_join}; +use tokio::{join, try_join}; use tracing::{debug, info, info_span, instrument}; use tracing_appender::non_blocking::WorkerGuard; use tracing_subscriber::{EnvFilter, FmtSubscriber}; @@ -39,7 +43,6 @@ use revive_dt_format::{ mode::ParsedMode, }; use revive_dt_node::{Node, pool::NodePool}; -use revive_dt_report::reporter::{Report, Span}; use crate::cached_compiler::CachedCompiler; @@ -53,11 +56,9 @@ struct Test<'a> { mode: Mode, case_idx: CaseIdx, case: &'a Case, + reporter: TestSpecificReporter, } -/// This represents the results that we gather from running test cases. -type CaseResult = Result; - fn main() -> anyhow::Result<()> { let (args, _guard) = init_cli()?; info!( @@ -69,20 +70,39 @@ fn main() -> anyhow::Result<()> { "Differential testing tool has been initialized" ); - let body = async { - for (corpus, tests) in collect_corpora(&args)? { - let span = Span::new(corpus, args.clone())?; - match &args.compile_only { - Some(platform) => compile_corpus(&args, &tests, platform, span).await, - None => execute_corpus(&args, &tests, span).await?, + let (reporter, report_aggregator_task) = ReportAggregator::new(args.clone()).into_task(); + + let number_of_threads = args.number_of_threads; + let body = async move { + let tests = collect_corpora(&args)? + .into_iter() + .inspect(|(corpus, _)| { + reporter + .report_corpus_file_discovery_event(corpus.clone()) + .expect("Can't fail") + }) + .flat_map(|(_, files)| files.into_iter()) + .inspect(|metadata_file| { + reporter + .report_metadata_file_discovery_event( + metadata_file.metadata_file_path.clone(), + metadata_file.content.clone(), + ) + .expect("Can't fail") + }) + .collect::>(); + + match &args.compile_only { + Some(platform) => { + compile_corpus(&args, &tests, platform, reporter, report_aggregator_task).await } - Report::save()?; + None => execute_corpus(&args, &tests, reporter, report_aggregator_task).await?, } Ok(()) }; tokio::runtime::Builder::new_multi_thread() - .worker_threads(args.number_of_threads) + .worker_threads(number_of_threads) .enable_all() .build() .expect("Failed building the Runtime") @@ -153,7 +173,8 @@ fn collect_corpora(args: &Arguments) -> anyhow::Result( args: &Arguments, metadata_files: &[MetadataFile], - span: Span, + reporter: Reporter, + report_aggregator_task: impl Future>, ) -> anyhow::Result<()> where L: Platform, @@ -161,13 +182,12 @@ where L::Blockchain: revive_dt_node::Node + Send + Sync + 'static, F::Blockchain: revive_dt_node::Node + Send + Sync + 'static, { - let (report_tx, report_rx) = mpsc::unbounded_channel::<(Test<'_>, CaseResult)>(); + let tests = prepare_tests::(args, metadata_files, reporter.clone()); + let driver_task = start_driver_task::(args, tests).await?; + let cli_reporting_task = start_cli_reporting_task(reporter); - let tests = prepare_tests::(args, metadata_files); - let driver_task = start_driver_task::(args, tests, span, report_tx).await?; - let status_reporter_task = start_reporter_task(report_rx); - - tokio::join!(status_reporter_task, driver_task); + let (_, _, rtn) = tokio::join!(cli_reporting_task, driver_task, report_aggregator_task); + rtn?; Ok(()) } @@ -175,6 +195,7 @@ where fn prepare_tests<'a, L, F>( args: &Arguments, metadata_files: &'a [MetadataFile], + reporter: Reporter, ) -> impl Stream> where L: Platform, @@ -201,27 +222,25 @@ where .into_iter() .map(move |mode| (metadata_file, case_idx, case, mode)) }) - .fold( - IndexMap::<_, BTreeMap<_, Vec<_>>>::new(), - |mut map, (metadata_file, case_idx, case, mode)| { - let test = Test { - metadata: metadata_file, - metadata_file_path: metadata_file.metadata_file_path.as_path(), - mode: mode.clone(), - case_idx: CaseIdx::new(case_idx), - case, - }; - map.entry(mode) - .or_default() - .entry(test.case_idx) - .or_default() - .push(test); - map - }, - ) - .into_values() - .flatten() - .flat_map(|(_, value)| value.into_iter()) + .map(move |(metadata_file, case_idx, case, mode)| Test { + metadata: metadata_file, + metadata_file_path: metadata_file.metadata_file_path.as_path(), + mode: mode.clone(), + case_idx: CaseIdx::new(case_idx), + case, + reporter: reporter.test_specific_reporter(Arc::new(TestSpecifier { + solc_mode: mode.clone(), + metadata_file_path: metadata_file.metadata_file_path.clone(), + case_idx: CaseIdx::new(case_idx), + })), + }) + .inspect(|test| { + test.reporter + .report_test_case_discovery_event() + .expect("Can't fail") + }) + .collect::>() + .into_iter() // Filter the test out if the leader and follower do not support the target. .filter(|test| { let leader_support = @@ -236,7 +255,30 @@ where leader_support, follower_support, "Target is not supported, throwing metadata file out" - ) + ); + test + .reporter + .report_test_ignored_event( + "Either the leader or the follower do not support the target desired by the test", + IndexMap::from_iter([ + ( + "test_desired_targets".to_string(), + serde_json::to_value(test.metadata.targets.as_ref()) + .expect("Can't fail") + ), + ( + "leader_support".to_string(), + serde_json::to_value(leader_support) + .expect("Can't fail") + ), + ( + "follower_support".to_string(), + serde_json::to_value(follower_support) + .expect("Can't fail") + ) + ]) + ) + .expect("Can't fail"); } is_allowed @@ -248,6 +290,13 @@ where file_path = %test.metadata.relative_path().display(), "Metadata file is ignored, throwing case out" ); + test + .reporter + .report_test_ignored_event( + "Metadata file is ignored, therefore all cases are ignored", + IndexMap::new(), + ) + .expect("Can't fail"); false } else { true @@ -261,6 +310,13 @@ where case_idx = %test.case_idx, "Case is ignored, throwing case out" ); + test + .reporter + .report_test_ignored_event( + "Case is ignored", + IndexMap::new(), + ) + .expect("Can't fail"); false } else { true @@ -283,6 +339,29 @@ where follower_compatibility, "EVM Version is incompatible, throwing case out" ); + test + .reporter + .report_test_ignored_event( + "EVM version is incompatible with either the leader or the follower", + IndexMap::from_iter([ + ( + "test_desired_evm_version".to_string(), + serde_json::to_value(test.metadata.required_evm_version) + .expect("Can't fail") + ), + ( + "leader_compatibility".to_string(), + serde_json::to_value(leader_compatibility) + .expect("Can't fail") + ), + ( + "follower_compatibility".to_string(), + serde_json::to_value(follower_compatibility) + .expect("Can't fail") + ) + ]) + ) + .expect("Can't fail"); } is_allowed @@ -311,6 +390,24 @@ where follower_support, "Compilers do not support this, throwing case out" ); + test + .reporter + .report_test_ignored_event( + "Compilers do not support this mode either for the leader or for the follower.", + IndexMap::from_iter([ + ( + "leader_support".to_string(), + serde_json::to_value(leader_support) + .expect("Can't fail") + ), + ( + "follower_support".to_string(), + serde_json::to_value(follower_support) + .expect("Can't fail") + ) + ]) + ) + .expect("Can't fail"); } is_allowed.then_some(test) @@ -336,8 +433,6 @@ async fn does_compiler_support_mode( async fn start_driver_task<'a, L, F>( args: &Arguments, tests: impl Stream>, - span: Span, - report_tx: mpsc::UnboundedSender<(Test<'a>, CaseResult)>, ) -> anyhow::Result> where L: Platform, @@ -345,6 +440,8 @@ where L::Blockchain: revive_dt_node::Node + Send + Sync + 'static, F::Blockchain: revive_dt_node::Node + Send + Sync + 'static, { + info!("Starting driver task"); + let leader_nodes = Arc::new(NodePool::::new(args)?); let follower_nodes = Arc::new(NodePool::::new(args)?); let number_concurrent_tasks = args.number_of_concurrent_tasks(); @@ -368,103 +465,121 @@ where move |test| { let leader_nodes = leader_nodes.clone(); let follower_nodes = follower_nodes.clone(); - let report_tx = report_tx.clone(); let cached_compiler = cached_compiler.clone(); async move { let leader_node = leader_nodes.round_robbin(); let follower_node = follower_nodes.round_robbin(); + test.reporter + .report_leader_node_assigned_event( + leader_node.id(), + L::config_id(), + leader_node.connection_string(), + ) + .expect("Can't fail"); + test.reporter + .report_follower_node_assigned_event( + follower_node.id(), + F::config_id(), + follower_node.connection_string(), + ) + .expect("Can't fail"); + + let reporter = test.reporter.clone(); let result = handle_case_driver::( - test.metadata_file_path, - test.metadata, - test.case_idx, - test.case, - test.mode.clone(), + test, args, cached_compiler, leader_node, follower_node, - span, ) .await; - report_tx - .send((test, result)) - .expect("Failed to send report"); + match result { + Ok(steps_executed) => reporter + .report_test_succeeded_event(steps_executed) + .expect("Can't fail"), + Err(error) => reporter + .report_test_failed_event(error.to_string()) + .expect("Can't fail"), + } } }, )) } -async fn start_reporter_task(mut report_rx: mpsc::UnboundedReceiver<(Test<'_>, CaseResult)>) { +#[allow(clippy::uninlined_format_args)] +#[allow(irrefutable_let_patterns)] +async fn start_cli_reporting_task(reporter: Reporter) { + let mut aggregator_events_rx = reporter.subscribe().await.expect("Can't fail"); + drop(reporter); + let start = Instant::now(); const GREEN: &str = "\x1B[32m"; const RED: &str = "\x1B[31m"; - const COLOUR_RESET: &str = "\x1B[0m"; + const GREY: &str = "\x1B[90m"; + const COLOR_RESET: &str = "\x1B[0m"; const BOLD: &str = "\x1B[1m"; const BOLD_RESET: &str = "\x1B[22m"; let mut number_of_successes = 0; let mut number_of_failures = 0; - let mut failures = vec![]; - // Wait for reports to come from our test runner. When the channel closes, this ends. let mut buf = BufWriter::new(stderr()); - while let Some((test, case_result)) = report_rx.recv().await { - let case_name = test.case.name.as_deref().unwrap_or("unnamed_case"); - let case_idx = test.case_idx; - let test_path = test.metadata_file_path.display(); - let test_mode = test.mode.clone(); + while let Ok(event) = aggregator_events_rx.recv().await { + let ReporterEvent::MetadataFileSolcModeCombinationExecutionCompleted { + metadata_file_path, + mode, + case_status, + } = event + else { + continue; + }; - match case_result { - Ok(_inputs) => { - number_of_successes += 1; - let _ = writeln!( + let _ = writeln!(buf, "{} - {}", mode, metadata_file_path.display()); + for (case_idx, case_status) in case_status.into_iter() { + let _ = write!(buf, "\tCase Index {case_idx:>3}: "); + let _ = match case_status { + TestCaseStatus::Succeeded { steps_executed } => { + number_of_successes += 1; + writeln!( + buf, + "{}{}Case Succeeded{}{} - Steps Executed: {}", + GREEN, BOLD, BOLD_RESET, COLOR_RESET, steps_executed + ) + } + TestCaseStatus::Failed { reason } => { + number_of_failures += 1; + writeln!( + buf, + "{}{}Case Failed{}{} - Reason: {}", + RED, BOLD, BOLD_RESET, COLOR_RESET, reason + ) + } + TestCaseStatus::Ignored { reason, .. } => writeln!( buf, - "{GREEN}Case Succeeded:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode})" - ); - } - Err(err) => { - number_of_failures += 1; - let _ = writeln!( - buf, - "{RED}Case Failed:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode})" - ); - failures.push((test, err)); - } - } - } - - let _ = writeln!(buf,); - let elapsed = start.elapsed(); - - // Now, log the failures with more complete errors at the bottom, like `cargo test` does, so - // that we don't have to scroll through the entire output to find them. - if !failures.is_empty() { - let _ = writeln!(buf, "{BOLD}Failures:{BOLD_RESET}\n"); - - for failure in failures { - let (test, err) = failure; - let case_name = test.case.name.as_deref().unwrap_or("unnamed_case"); - let case_idx = test.case_idx; - let test_path = test.metadata_file_path.display(); - let test_mode = test.mode.clone(); - - let _ = writeln!( - buf, - "---- {RED}Case Failed:{COLOUR_RESET} {test_path} -> {case_name}:{case_idx} (mode: {test_mode}) ----\n\n{err}\n" - ); + "{}{}Case Ignored{}{} - Reason: {}", + GREY, BOLD, BOLD_RESET, COLOR_RESET, reason + ), + }; } + let _ = writeln!(buf); } // Summary at the end. let _ = writeln!( buf, - "{} cases: {GREEN}{number_of_successes}{COLOUR_RESET} cases succeeded, {RED}{number_of_failures}{COLOUR_RESET} cases failed in {} seconds", + "{} cases: {}{}{} cases succeeded, {}{}{} cases failed in {} seconds", number_of_successes + number_of_failures, - elapsed.as_secs() + GREEN, + number_of_successes, + COLOR_RESET, + RED, + number_of_failures, + COLOR_RESET, + start.elapsed().as_secs() ); } @@ -474,25 +589,20 @@ async fn start_reporter_task(mut report_rx: mpsc::UnboundedReceiver<(Test<'_>, C name = "Handling Case" skip_all, fields( - metadata_file_path = %metadata.relative_path().display(), - mode = %mode, - %case_idx, - case_name = case.name.as_deref().unwrap_or("Unnamed Case"), + metadata_file_path = %test.metadata.relative_path().display(), + mode = %test.mode, + case_idx = %test.case_idx, + case_name = test.case.name.as_deref().unwrap_or("Unnamed Case"), leader_node = leader_node.id(), follower_node = follower_node.id(), ) )] async fn handle_case_driver( - metadata_file_path: &Path, - metadata: &MetadataFile, - case_idx: CaseIdx, - case: &Case, - mode: Mode, + test: Test<'_>, config: &Arguments, cached_compiler: Arc, leader_node: &L::Blockchain, follower_node: &F::Blockchain, - _: Span, ) -> anyhow::Result where L: Platform, @@ -500,6 +610,13 @@ where L::Blockchain: revive_dt_node::Node + Send + Sync + 'static, F::Blockchain: revive_dt_node::Node + Send + Sync + 'static, { + let leader_reporter = test + .reporter + .execution_specific_reporter(leader_node.id(), NodeDesignation::Leader); + let follower_reporter = test + .reporter + .execution_specific_reporter(follower_node.id(), NodeDesignation::Follower); + let ( ( CompilerOutput { @@ -514,14 +631,69 @@ where _, ), ) = try_join!( - cached_compiler.compile_contracts::(metadata, metadata_file_path, &mode, config, None), - cached_compiler.compile_contracts::(metadata, metadata_file_path, &mode, config, None) + cached_compiler.compile_contracts::( + test.metadata, + test.metadata_file_path, + &test.mode, + config, + None, + |compiler_version, compiler_path, is_cached, compiler_input, compiler_output| { + leader_reporter + .report_pre_link_contracts_compilation_succeeded_event( + compiler_version, + compiler_path, + is_cached, + compiler_input, + compiler_output, + ) + .expect("Can't fail") + }, + |compiler_version, compiler_path, compiler_input, failure_reason| { + leader_reporter + .report_pre_link_contracts_compilation_failed_event( + compiler_version, + compiler_path, + compiler_input, + failure_reason, + ) + .expect("Can't fail") + } + ), + cached_compiler.compile_contracts::( + test.metadata, + test.metadata_file_path, + &test.mode, + config, + None, + |compiler_version, compiler_path, is_cached, compiler_input, compiler_output| { + follower_reporter + .report_pre_link_contracts_compilation_succeeded_event( + compiler_version, + compiler_path, + is_cached, + compiler_input, + compiler_output, + ) + .expect("Can't fail") + }, + |compiler_version, compiler_path, compiler_input, failure_reason| { + follower_reporter + .report_pre_link_contracts_compilation_failed_event( + compiler_version, + compiler_path, + compiler_input, + failure_reason, + ) + .expect("Can't fail") + } + ) )?; let mut leader_deployed_libraries = None::>; let mut follower_deployed_libraries = None::>; - let mut contract_sources = metadata.contract_sources()?; - for library_instance in metadata + let mut contract_sources = test.metadata.contract_sources()?; + for library_instance in test + .metadata .libraries .iter() .flatten() @@ -561,7 +733,8 @@ where // Getting the deployer address from the cases themselves. This is to ensure that we're // doing the deployments from different accounts and therefore we're not slowed down by // the nonce. - let deployer_address = case + let deployer_address = test + .case .steps .iter() .filter_map(|step| match step { @@ -620,6 +793,24 @@ where ), ); } + if let Some(ref leader_deployed_libraries) = leader_deployed_libraries { + leader_reporter.report_libraries_deployed_event( + leader_deployed_libraries + .clone() + .into_iter() + .map(|(key, (_, address, _))| (key, address)) + .collect::>(), + )?; + } + if let Some(ref follower_deployed_libraries) = follower_deployed_libraries { + follower_reporter.report_libraries_deployed_event( + follower_deployed_libraries + .clone() + .into_iter() + .map(|(key, (_, address, _))| (key, address)) + .collect::>(), + )?; + } let ( ( @@ -636,18 +827,60 @@ where ), ) = try_join!( cached_compiler.compile_contracts::( - metadata, - metadata_file_path, - &mode, + test.metadata, + test.metadata_file_path, + &test.mode, config, - leader_deployed_libraries.as_ref() + leader_deployed_libraries.as_ref(), + |compiler_version, compiler_path, is_cached, compiler_input, compiler_output| { + leader_reporter + .report_post_link_contracts_compilation_succeeded_event( + compiler_version, + compiler_path, + is_cached, + compiler_input, + compiler_output, + ) + .expect("Can't fail") + }, + |compiler_version, compiler_path, compiler_input, failure_reason| { + leader_reporter + .report_post_link_contracts_compilation_failed_event( + compiler_version, + compiler_path, + compiler_input, + failure_reason, + ) + .expect("Can't fail") + } ), cached_compiler.compile_contracts::( - metadata, - metadata_file_path, - &mode, + test.metadata, + test.metadata_file_path, + &test.mode, config, - follower_deployed_libraries.as_ref() + follower_deployed_libraries.as_ref(), + |compiler_version, compiler_path, is_cached, compiler_input, compiler_output| { + follower_reporter + .report_post_link_contracts_compilation_succeeded_event( + compiler_version, + compiler_path, + is_cached, + compiler_input, + compiler_output, + ) + .expect("Can't fail") + }, + |compiler_version, compiler_path, compiler_input, failure_reason| { + follower_reporter + .report_post_link_contracts_compilation_failed_event( + compiler_version, + compiler_path, + compiler_input, + failure_reason, + ) + .expect("Can't fail") + } ) )?; @@ -655,16 +888,18 @@ where leader_compiler_version, leader_post_link_contracts, leader_deployed_libraries.unwrap_or_default(), + leader_reporter, ); let follower_state = CaseState::::new( follower_compiler_version, follower_post_link_contracts, follower_deployed_libraries.unwrap_or_default(), + follower_reporter, ); let mut driver = CaseDriver::::new( - metadata, - case, + test.metadata, + test.case, leader_node, follower_node, leader_state, @@ -679,14 +914,15 @@ where async fn execute_corpus( args: &Arguments, tests: &[MetadataFile], - span: Span, + reporter: Reporter, + report_aggregator_task: impl Future>, ) -> anyhow::Result<()> { match (&args.leader, &args.follower) { (TestingPlatform::Geth, TestingPlatform::Kitchensink) => { - run_driver::(args, tests, span).await? + run_driver::(args, tests, reporter, report_aggregator_task).await? } (TestingPlatform::Geth, TestingPlatform::Geth) => { - run_driver::(args, tests, span).await? + run_driver::(args, tests, reporter, report_aggregator_task).await? } _ => unimplemented!(), } @@ -698,7 +934,8 @@ async fn compile_corpus( config: &Arguments, tests: &[MetadataFile], platform: &TestingPlatform, - _: Span, + _: Reporter, + report_aggregator_task: impl Future>, ) { let tests = tests.iter().flat_map(|metadata| { metadata @@ -713,8 +950,8 @@ async fn compile_corpus( .map(Arc::new) .expect("Failed to create the cached compiler"); - futures::stream::iter(tests) - .for_each_concurrent(None, |(metadata, mode)| { + let compilation_task = + futures::stream::iter(tests).for_each_concurrent(None, |(metadata, mode)| { let cached_compiler = cached_compiler.clone(); async move { @@ -727,6 +964,8 @@ async fn compile_corpus( &mode, config, None, + |_, _, _, _, _| {}, + |_, _, _, _| {}, ) .await; } @@ -738,11 +977,13 @@ async fn compile_corpus( &mode, config, None, + |_, _, _, _, _| {}, + |_, _, _, _| {}, ) .await; } } } - }) - .await; + }); + let _ = join!(compilation_task, report_aggregator_task); } diff --git a/crates/format/src/case.rs b/crates/format/src/case.rs index 2ef9ead..c98ac46 100644 --- a/crates/format/src/case.rs +++ b/crates/format/src/case.rs @@ -71,6 +71,7 @@ impl Case { define_wrapper_type!( /// A wrapper type for the index of test cases found in metadata file. - #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] - pub struct CaseIdx(usize) impl Display; + #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] + #[serde(transparent)] + pub struct CaseIdx(usize) impl Display, FromStr; ); diff --git a/crates/node/src/pool.rs b/crates/node/src/pool.rs index 015c004..d195988 100644 --- a/crates/node/src/pool.rs +++ b/crates/node/src/pool.rs @@ -9,6 +9,7 @@ use revive_dt_common::cached_fs::read_to_string; use anyhow::Context; use revive_dt_config::Arguments; +use tracing::info; use crate::Node; @@ -63,6 +64,16 @@ where fn spawn_node(args: &Arguments, genesis: String) -> anyhow::Result { let mut node = T::new(args); + info!( + id = node.id(), + connection_string = node.connection_string(), + "Spawning node" + ); node.spawn(genesis)?; + info!( + id = node.id(), + connection_string = node.connection_string(), + "Spawned node" + ); Ok(node) } diff --git a/crates/report/Cargo.toml b/crates/report/Cargo.toml index 0e6e896..eae7fa7 100644 --- a/crates/report/Cargo.toml +++ b/crates/report/Cargo.toml @@ -13,9 +13,16 @@ revive-dt-config = { workspace = true } revive-dt-format = { workspace = true } revive-dt-compiler = { workspace = true } +alloy-primitives = { workspace = true } anyhow = { workspace = true } +paste = { workspace = true } +indexmap = { workspace = true, features = ["serde"] } +semver = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } +serde_with = { workspace = true } +tokio = { workspace = true } +tracing = { workspace = true } [lints] workspace = true diff --git a/crates/report/src/aggregator.rs b/crates/report/src/aggregator.rs new file mode 100644 index 0000000..1f7af7e --- /dev/null +++ b/crates/report/src/aggregator.rs @@ -0,0 +1,550 @@ +//! Implementation of the report aggregator task which consumes the events sent by the various +//! reporters and combines them into a single unified report. + +use std::{ + collections::{BTreeMap, BTreeSet, HashMap, HashSet}, + fs::OpenOptions, + path::PathBuf, + time::{SystemTime, UNIX_EPOCH}, +}; + +use alloy_primitives::Address; +use anyhow::Result; +use indexmap::IndexMap; +use revive_dt_compiler::{CompilerInput, CompilerOutput, Mode}; +use revive_dt_config::{Arguments, TestingPlatform}; +use revive_dt_format::{case::CaseIdx, corpus::Corpus, metadata::ContractInstance}; +use semver::Version; +use serde::Serialize; +use serde_with::{DisplayFromStr, serde_as}; +use tokio::sync::{ + broadcast::{Sender, channel}, + mpsc::{UnboundedReceiver, UnboundedSender, unbounded_channel}, +}; +use tracing::debug; + +use crate::*; + +pub struct ReportAggregator { + /* Internal Report State */ + report: Report, + remaining_cases: HashMap>>, + /* Channels */ + runner_tx: Option>, + runner_rx: UnboundedReceiver, + listener_tx: Sender, +} + +impl ReportAggregator { + pub fn new(config: Arguments) -> Self { + let (runner_tx, runner_rx) = unbounded_channel::(); + let (listener_tx, _) = channel::(1024); + Self { + report: Report::new(config), + remaining_cases: Default::default(), + runner_tx: Some(runner_tx), + runner_rx, + listener_tx, + } + } + + pub fn into_task(mut self) -> (Reporter, impl Future>) { + let reporter = self + .runner_tx + .take() + .map(Into::into) + .expect("Can't fail since this can only be called once"); + (reporter, async move { self.aggregate().await }) + } + + async fn aggregate(mut self) -> Result<()> { + debug!("Starting to aggregate report"); + + while let Some(event) = self.runner_rx.recv().await { + debug!(?event, "Received Event"); + match event { + RunnerEvent::SubscribeToEvents(event) => { + self.handle_subscribe_to_events_event(*event); + } + RunnerEvent::CorpusFileDiscovery(event) => { + self.handle_corpus_file_discovered_event(*event) + } + RunnerEvent::MetadataFileDiscovery(event) => { + self.handle_metadata_file_discovery_event(*event); + } + RunnerEvent::TestCaseDiscovery(event) => { + self.handle_test_case_discovery(*event); + } + RunnerEvent::TestSucceeded(event) => { + self.handle_test_succeeded_event(*event); + } + RunnerEvent::TestFailed(event) => { + self.handle_test_failed_event(*event); + } + RunnerEvent::TestIgnored(event) => { + self.handle_test_ignored_event(*event); + } + RunnerEvent::LeaderNodeAssigned(event) => { + self.handle_leader_node_assigned_event(*event); + } + RunnerEvent::FollowerNodeAssigned(event) => { + self.handle_follower_node_assigned_event(*event); + } + RunnerEvent::PreLinkContractsCompilationSucceeded(event) => { + self.handle_pre_link_contracts_compilation_succeeded_event(*event) + } + RunnerEvent::PostLinkContractsCompilationSucceeded(event) => { + self.handle_post_link_contracts_compilation_succeeded_event(*event) + } + RunnerEvent::PreLinkContractsCompilationFailed(event) => { + self.handle_pre_link_contracts_compilation_failed_event(*event) + } + RunnerEvent::PostLinkContractsCompilationFailed(event) => { + self.handle_post_link_contracts_compilation_failed_event(*event) + } + RunnerEvent::LibrariesDeployed(event) => { + self.handle_libraries_deployed_event(*event); + } + RunnerEvent::ContractDeployed(event) => { + self.handle_contract_deployed_event(*event); + } + } + } + debug!("Report aggregation completed"); + + let file_name = { + let current_timestamp = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs(); + let mut file_name = current_timestamp.to_string(); + file_name.push_str(".json"); + file_name + }; + let file_path = self.report.config.directory().join(file_name); + let file = OpenOptions::new() + .create(true) + .write(true) + .truncate(true) + .read(false) + .open(file_path)?; + serde_json::to_writer_pretty(file, &self.report)?; + + Ok(()) + } + + fn handle_subscribe_to_events_event(&self, event: SubscribeToEventsEvent) { + let _ = event.tx.send(self.listener_tx.subscribe()); + } + + fn handle_corpus_file_discovered_event(&mut self, event: CorpusFileDiscoveryEvent) { + self.report.corpora.push(event.corpus); + } + + fn handle_metadata_file_discovery_event(&mut self, event: MetadataFileDiscoveryEvent) { + self.report.metadata_files.insert(event.path.clone()); + } + + fn handle_test_case_discovery(&mut self, event: TestCaseDiscoveryEvent) { + self.remaining_cases + .entry(event.test_specifier.metadata_file_path.clone().into()) + .or_default() + .entry(event.test_specifier.solc_mode.clone()) + .or_default() + .insert(event.test_specifier.case_idx); + } + + fn handle_test_succeeded_event(&mut self, event: TestSucceededEvent) { + // Remove this from the set of cases we're tracking since it has completed. + self.remaining_cases + .entry(event.test_specifier.metadata_file_path.clone().into()) + .or_default() + .entry(event.test_specifier.solc_mode.clone()) + .or_default() + .remove(&event.test_specifier.case_idx); + + // Add information on the fact that the case was ignored to the report. + let test_case_report = self.test_case_report(&event.test_specifier); + test_case_report.status = Some(TestCaseStatus::Succeeded { + steps_executed: event.steps_executed, + }); + self.handle_post_test_case_status_update(&event.test_specifier); + } + + fn handle_test_failed_event(&mut self, event: TestFailedEvent) { + // Remove this from the set of cases we're tracking since it has completed. + self.remaining_cases + .entry(event.test_specifier.metadata_file_path.clone().into()) + .or_default() + .entry(event.test_specifier.solc_mode.clone()) + .or_default() + .remove(&event.test_specifier.case_idx); + + // Add information on the fact that the case was ignored to the report. + let test_case_report = self.test_case_report(&event.test_specifier); + test_case_report.status = Some(TestCaseStatus::Failed { + reason: event.reason, + }); + self.handle_post_test_case_status_update(&event.test_specifier); + } + + fn handle_test_ignored_event(&mut self, event: TestIgnoredEvent) { + // Remove this from the set of cases we're tracking since it has completed. + self.remaining_cases + .entry(event.test_specifier.metadata_file_path.clone().into()) + .or_default() + .entry(event.test_specifier.solc_mode.clone()) + .or_default() + .remove(&event.test_specifier.case_idx); + + // Add information on the fact that the case was ignored to the report. + let test_case_report = self.test_case_report(&event.test_specifier); + test_case_report.status = Some(TestCaseStatus::Ignored { + reason: event.reason, + additional_fields: event.additional_fields, + }); + self.handle_post_test_case_status_update(&event.test_specifier); + } + + fn handle_post_test_case_status_update(&mut self, specifier: &TestSpecifier) { + let remaining_cases = self + .remaining_cases + .entry(specifier.metadata_file_path.clone().into()) + .or_default() + .entry(specifier.solc_mode.clone()) + .or_default(); + if !remaining_cases.is_empty() { + return; + } + + let case_status = self + .report + .test_case_information + .entry(specifier.metadata_file_path.clone().into()) + .or_default() + .entry(specifier.solc_mode.clone()) + .or_default() + .iter() + .map(|(case_idx, case_report)| { + ( + *case_idx, + case_report.status.clone().expect("Can't be uninitialized"), + ) + }) + .collect::>(); + let event = ReporterEvent::MetadataFileSolcModeCombinationExecutionCompleted { + metadata_file_path: specifier.metadata_file_path.clone().into(), + mode: specifier.solc_mode.clone(), + case_status, + }; + + // According to the documentation on send, the sending fails if there are no more receiver + // handles. Therefore, this isn't an error that we want to bubble up or anything. If we fail + // to send then we ignore the error. + let _ = self.listener_tx.send(event); + } + + fn handle_leader_node_assigned_event(&mut self, event: LeaderNodeAssignedEvent) { + let execution_information = self.execution_information(&ExecutionSpecifier { + test_specifier: event.test_specifier, + node_id: event.id, + node_designation: NodeDesignation::Leader, + }); + execution_information.node = Some(TestCaseNodeInformation { + id: event.id, + platform: event.platform, + connection_string: event.connection_string, + }); + } + + fn handle_follower_node_assigned_event(&mut self, event: FollowerNodeAssignedEvent) { + let execution_information = self.execution_information(&ExecutionSpecifier { + test_specifier: event.test_specifier, + node_id: event.id, + node_designation: NodeDesignation::Follower, + }); + execution_information.node = Some(TestCaseNodeInformation { + id: event.id, + platform: event.platform, + connection_string: event.connection_string, + }); + } + + fn handle_pre_link_contracts_compilation_succeeded_event( + &mut self, + event: PreLinkContractsCompilationSucceededEvent, + ) { + let include_input = self.report.config.report_include_compiler_input; + let include_output = self.report.config.report_include_compiler_output; + + let execution_information = self.execution_information(&event.execution_specifier); + + let compiler_input = if include_input { + event.compiler_input + } else { + None + }; + let compiler_output = if include_output { + Some(event.compiler_output) + } else { + None + }; + + execution_information.pre_link_compilation_status = Some(CompilationStatus::Success { + is_cached: event.is_cached, + compiler_version: event.compiler_version, + compiler_path: event.compiler_path, + compiler_input, + compiler_output, + }); + } + + fn handle_post_link_contracts_compilation_succeeded_event( + &mut self, + event: PostLinkContractsCompilationSucceededEvent, + ) { + let include_input = self.report.config.report_include_compiler_input; + let include_output = self.report.config.report_include_compiler_output; + + let execution_information = self.execution_information(&event.execution_specifier); + + let compiler_input = if include_input { + event.compiler_input + } else { + None + }; + let compiler_output = if include_output { + Some(event.compiler_output) + } else { + None + }; + + execution_information.post_link_compilation_status = Some(CompilationStatus::Success { + is_cached: event.is_cached, + compiler_version: event.compiler_version, + compiler_path: event.compiler_path, + compiler_input, + compiler_output, + }); + } + + fn handle_pre_link_contracts_compilation_failed_event( + &mut self, + event: PreLinkContractsCompilationFailedEvent, + ) { + let include_input = self.report.config.report_include_compiler_input; + + let execution_information = self.execution_information(&event.execution_specifier); + + let compiler_input = if include_input { + event.compiler_input + } else { + None + }; + + execution_information.pre_link_compilation_status = Some(CompilationStatus::Failure { + reason: event.reason, + compiler_version: event.compiler_version, + compiler_path: event.compiler_path, + compiler_input, + }); + } + + fn handle_post_link_contracts_compilation_failed_event( + &mut self, + event: PostLinkContractsCompilationFailedEvent, + ) { + let include_input = self.report.config.report_include_compiler_input; + + let execution_information = self.execution_information(&event.execution_specifier); + + let compiler_input = if include_input { + event.compiler_input + } else { + None + }; + + execution_information.post_link_compilation_status = Some(CompilationStatus::Failure { + reason: event.reason, + compiler_version: event.compiler_version, + compiler_path: event.compiler_path, + compiler_input, + }); + } + + fn handle_libraries_deployed_event(&mut self, event: LibrariesDeployedEvent) { + self.execution_information(&event.execution_specifier) + .deployed_libraries = Some(event.libraries); + } + + fn handle_contract_deployed_event(&mut self, event: ContractDeployedEvent) { + self.execution_information(&event.execution_specifier) + .deployed_contracts + .get_or_insert_default() + .insert(event.contract_instance, event.address); + } + + fn test_case_report(&mut self, specifier: &TestSpecifier) -> &mut TestCaseReport { + self.report + .test_case_information + .entry(specifier.metadata_file_path.clone().into()) + .or_default() + .entry(specifier.solc_mode.clone()) + .or_default() + .entry(specifier.case_idx) + .or_default() + } + + fn execution_information( + &mut self, + specifier: &ExecutionSpecifier, + ) -> &mut ExecutionInformation { + let test_case_report = self.test_case_report(&specifier.test_specifier); + match specifier.node_designation { + NodeDesignation::Leader => test_case_report + .leader_execution_information + .get_or_insert_default(), + NodeDesignation::Follower => test_case_report + .follower_execution_information + .get_or_insert_default(), + } + } +} + +#[serde_as] +#[derive(Clone, Debug, Serialize)] +pub struct Report { + /// The configuration that the tool was started up with. + pub config: Arguments, + /// The platform of the leader chain. + pub leader_platform: TestingPlatform, + /// The platform of the follower chain. + pub follower_platform: TestingPlatform, + /// The list of corpus files that the tool found. + pub corpora: Vec, + /// The list of metadata files that were found by the tool. + pub metadata_files: BTreeSet, + /// Information relating to each test case. + #[serde_as(as = "BTreeMap<_, HashMap>>")] + pub test_case_information: + BTreeMap>>, +} + +impl Report { + pub fn new(config: Arguments) -> Self { + Self { + leader_platform: config.leader, + follower_platform: config.follower, + config, + corpora: Default::default(), + metadata_files: Default::default(), + test_case_information: Default::default(), + } + } +} + +#[derive(Clone, Debug, Serialize, Default)] +pub struct TestCaseReport { + /// Information on the status of the test case and whether it succeeded, failed, or was ignored. + #[serde(skip_serializing_if = "Option::is_none")] + pub status: Option, + /// Information related to the execution on the leader. + #[serde(skip_serializing_if = "Option::is_none")] + pub leader_execution_information: Option, + /// Information related to the execution on the follower. + #[serde(skip_serializing_if = "Option::is_none")] + pub follower_execution_information: Option, +} + +/// Information related to the status of the test. Could be that the test succeeded, failed, or that +/// it was ignored. +#[derive(Clone, Debug, Serialize)] +#[serde(tag = "status")] +pub enum TestCaseStatus { + /// The test case succeeded. + Succeeded { + /// The number of steps of the case that were executed. + steps_executed: usize, + }, + /// The test case failed. + Failed { + /// The reason for the failure of the test case. + reason: String, + }, + /// The test case was ignored. This variant carries information related to why it was ignored. + Ignored { + /// The reason behind the test case being ignored. + reason: String, + /// Additional fields that describe more information on why the test case is ignored. + #[serde(flatten)] + additional_fields: IndexMap, + }, +} + +/// Information related to the leader or follower node that's being used to execute the step. +#[derive(Clone, Debug, Serialize)] +pub struct TestCaseNodeInformation { + /// The ID of the node that this case is being executed on. + pub id: usize, + /// The platform of the node. + pub platform: TestingPlatform, + /// The connection string of the node. + pub connection_string: String, +} + +/// Execution information tied to the leader or the follower. +#[derive(Clone, Debug, Default, Serialize)] +pub struct ExecutionInformation { + /// Information related to the node assigned to this test case. + #[serde(skip_serializing_if = "Option::is_none")] + pub node: Option, + /// Information on the pre-link compiled contracts. + #[serde(skip_serializing_if = "Option::is_none")] + pub pre_link_compilation_status: Option, + /// Information on the post-link compiled contracts. + #[serde(skip_serializing_if = "Option::is_none")] + pub post_link_compilation_status: Option, + /// Information on the deployed libraries. + #[serde(skip_serializing_if = "Option::is_none")] + pub deployed_libraries: Option>, + /// Information on the deployed contracts. + #[serde(skip_serializing_if = "Option::is_none")] + pub deployed_contracts: Option>, +} + +/// Information related to compilation +#[derive(Clone, Debug, Serialize)] +#[serde(tag = "status")] +pub enum CompilationStatus { + /// The compilation was successful. + Success { + /// A flag with information on whether the compilation artifacts were cached or not. + is_cached: bool, + /// The version of the compiler used to compile the contracts. + compiler_version: Version, + /// The path of the compiler used to compile the contracts. + compiler_path: PathBuf, + /// The input provided to the compiler to compile the contracts. This is only included if + /// the appropriate flag is set in the CLI configuration and if the contracts were not + /// cached and the compiler was invoked. + #[serde(skip_serializing_if = "Option::is_none")] + compiler_input: Option, + /// The output of the compiler. This is only included if the appropriate flag is set in the + /// CLI configurations. + #[serde(skip_serializing_if = "Option::is_none")] + compiler_output: Option, + }, + /// The compilation failed. + Failure { + /// The failure reason. + reason: String, + /// The version of the compiler used to compile the contracts. + #[serde(skip_serializing_if = "Option::is_none")] + compiler_version: Option, + /// The path of the compiler used to compile the contracts. + #[serde(skip_serializing_if = "Option::is_none")] + compiler_path: Option, + /// The input provided to the compiler to compile the contracts. This is only included if + /// the appropriate flag is set in the CLI configuration and if the contracts were not + /// cached and the compiler was invoked. + #[serde(skip_serializing_if = "Option::is_none")] + compiler_input: Option, + }, +} diff --git a/crates/report/src/analyzer.rs b/crates/report/src/analyzer.rs deleted file mode 100644 index 52fd360..0000000 --- a/crates/report/src/analyzer.rs +++ /dev/null @@ -1,81 +0,0 @@ -//! The report analyzer enriches the raw report data. - -use revive_dt_compiler::CompilerOutput; -use serde::{Deserialize, Serialize}; - -use crate::reporter::CompilationTask; - -/// Provides insights into how well the compilers perform. -#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, PartialOrd)] -pub struct CompilerStatistics { - /// The sum of contracts observed. - pub n_contracts: usize, - /// The mean size of compiled contracts. - pub mean_code_size: usize, - /// The mean size of the optimized YUL IR. - pub mean_yul_size: usize, - /// Is a proxy because the YUL also contains a lot of comments. - pub yul_to_bytecode_size_ratio: f32, -} - -impl CompilerStatistics { - /// Cumulatively update the statistics with the next compiler task. - pub fn sample(&mut self, compilation_task: &CompilationTask) { - let Some(CompilerOutput { contracts }) = &compilation_task.json_output else { - return; - }; - - for (_solidity, contracts) in contracts.iter() { - for (_name, (bytecode, _)) in contracts.iter() { - // The EVM bytecode can be unlinked and thus is not necessarily a decodable hex - // string; for our statistics this is a good enough approximation. - let bytecode_size = bytecode.len() / 2; - - // TODO: for the time being we set the yul_size to be zero. We need to change this - // when we overhaul the reporting. - - self.update_sizes(bytecode_size, 0); - } - } - } - - /// Updates the size statistics cumulatively. - fn update_sizes(&mut self, bytecode_size: usize, yul_size: usize) { - let n_previous = self.n_contracts; - let n_current = self.n_contracts + 1; - - self.n_contracts = n_current; - - self.mean_code_size = (n_previous * self.mean_code_size + bytecode_size) / n_current; - self.mean_yul_size = (n_previous * self.mean_yul_size + yul_size) / n_current; - - if self.mean_code_size > 0 { - self.yul_to_bytecode_size_ratio = - self.mean_yul_size as f32 / self.mean_code_size as f32; - } - } -} - -#[cfg(test)] -mod tests { - use super::CompilerStatistics; - - #[test] - fn compiler_statistics() { - let mut received = CompilerStatistics::default(); - received.update_sizes(0, 0); - received.update_sizes(3, 37); - received.update_sizes(123, 456); - - let mean_code_size = 41; // rounding error from integer truncation - let mean_yul_size = 164; - let expected = CompilerStatistics { - n_contracts: 3, - mean_code_size, - mean_yul_size, - yul_to_bytecode_size_ratio: mean_yul_size as f32 / mean_code_size as f32, - }; - - assert_eq!(received, expected); - } -} diff --git a/crates/report/src/common.rs b/crates/report/src/common.rs new file mode 100644 index 0000000..5b6e3f1 --- /dev/null +++ b/crates/report/src/common.rs @@ -0,0 +1,43 @@ +//! Common types and functions used throughout the crate. + +use std::{path::PathBuf, sync::Arc}; + +use revive_dt_common::define_wrapper_type; +use revive_dt_compiler::Mode; +use revive_dt_format::{case::CaseIdx, input::StepIdx}; +use serde::{Deserialize, Serialize}; + +define_wrapper_type!( + #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] + #[serde(transparent)] + pub struct MetadataFilePath(PathBuf); +); + +/// An absolute specifier for a test. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct TestSpecifier { + pub solc_mode: Mode, + pub metadata_file_path: PathBuf, + pub case_idx: CaseIdx, +} + +/// An absolute path for a test that also includes information about the node that it's assigned to +/// and whether it's the leader or follower. +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct ExecutionSpecifier { + pub test_specifier: Arc, + pub node_id: usize, + pub node_designation: NodeDesignation, +} + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum NodeDesignation { + Leader, + Follower, +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct StepExecutionSpecifier { + pub execution_specifier: Arc, + pub step_idx: StepIdx, +} diff --git a/crates/report/src/lib.rs b/crates/report/src/lib.rs index 04ceeed..7a6aa78 100644 --- a/crates/report/src/lib.rs +++ b/crates/report/src/lib.rs @@ -1,4 +1,11 @@ -//! The revive differential tests reporting facility. +//! This crate implements the reporting infrastructure for the differential testing tool. -pub mod analyzer; -pub mod reporter; +mod aggregator; +mod common; +mod reporter_event; +mod runner_event; + +pub use aggregator::*; +pub use common::*; +pub use reporter_event::*; +pub use runner_event::*; diff --git a/crates/report/src/reporter.rs b/crates/report/src/reporter.rs deleted file mode 100644 index 5313ac7..0000000 --- a/crates/report/src/reporter.rs +++ /dev/null @@ -1,234 +0,0 @@ -//! The reporter is the central place observing test execution by collecting data. -//! -//! The data collected gives useful insights into the outcome of the test run -//! and helps identifying and reproducing failing cases. - -use std::{ - collections::HashMap, - fs::{self, File, create_dir_all}, - path::PathBuf, - sync::{Mutex, OnceLock}, - time::{SystemTime, UNIX_EPOCH}, -}; - -use anyhow::Context; -use serde::Serialize; - -use revive_dt_common::types::Mode; -use revive_dt_compiler::{CompilerInput, CompilerOutput}; -use revive_dt_config::{Arguments, TestingPlatform}; -use revive_dt_format::corpus::Corpus; - -use crate::analyzer::CompilerStatistics; - -pub(crate) static REPORTER: OnceLock> = OnceLock::new(); - -/// The `Report` datastructure stores all relevant inforamtion required for generating reports. -#[derive(Clone, Debug, Default, Serialize)] -pub struct Report { - /// The configuration used during the test. - pub config: Arguments, - /// The observed test corpora. - pub corpora: Vec, - /// The observed test definitions. - pub metadata_files: Vec, - /// The observed compilation results. - pub compiler_results: HashMap>, - /// The observed compilation statistics. - pub compiler_statistics: HashMap, - /// The file name this is serialized to. - #[serde(skip)] - directory: PathBuf, -} - -/// Contains a compiled contract. -#[derive(Clone, Debug, Serialize)] -pub struct CompilationTask { - /// The observed compiler input. - pub json_input: CompilerInput, - /// The observed compiler output. - pub json_output: Option, - /// The observed compiler mode. - pub mode: Mode, - /// The observed compiler version. - pub compiler_version: String, - /// The observed error, if any. - pub error: Option, -} - -/// Represents a report about a compilation task. -#[derive(Clone, Debug, Serialize)] -pub struct CompilationResult { - /// The observed compilation task. - pub compilation_task: CompilationTask, - /// The linked span. - pub span: Span, -} - -/// The [Span] struct indicates the context of what is being reported. -#[derive(Clone, Copy, Debug, Serialize)] -pub struct Span { - /// The corpus index this belongs to. - corpus: usize, - /// The metadata file this belongs to. - metadata_file: usize, - /// The index of the case definition this belongs to. - case: usize, - /// The index of the case input this belongs to. - input: usize, -} - -impl Report { - /// The file name where this report will be written to. - pub const FILE_NAME: &str = "report.json"; - - /// The [Span] is expected to initialize the reporter by providing the config. - const INITIALIZED_VIA_SPAN: &str = "requires a Span which initializes the reporter"; - - /// Create a new [Report]. - fn new(config: Arguments) -> anyhow::Result { - let now = SystemTime::now() - .duration_since(UNIX_EPOCH) - .unwrap() - .as_millis(); - - let directory = config.directory().join("report").join(format!("{now}")); - if !directory.exists() { - create_dir_all(&directory)?; - } - - Ok(Self { - config, - directory, - ..Default::default() - }) - } - - /// Add a compilation task to the report. - pub fn compilation(span: Span, platform: TestingPlatform, compilation_task: CompilationTask) { - let mut report = REPORTER - .get() - .expect(Report::INITIALIZED_VIA_SPAN) - .lock() - .unwrap(); - - report - .compiler_statistics - .entry(platform) - .or_default() - .sample(&compilation_task); - - report - .compiler_results - .entry(platform) - .or_default() - .push(CompilationResult { - compilation_task, - span, - }); - } - - /// Write the report to disk. - pub fn save() -> anyhow::Result<()> { - let Some(reporter) = REPORTER.get() else { - return Ok(()); - }; - let report = reporter.lock().unwrap(); - - if let Err(error) = report.write_to_file() { - anyhow::bail!("can not write report: {error}"); - } - - if report.config.extract_problems { - if let Err(error) = report.save_compiler_problems() { - anyhow::bail!("can not write compiler problems: {error}"); - } - } - - Ok(()) - } - - /// Write compiler problems to disk for later debugging. - pub fn save_compiler_problems(&self) -> anyhow::Result<()> { - for (platform, results) in self.compiler_results.iter() { - for result in results { - // ignore if there were no errors - if result.compilation_task.error.is_none() { - continue; - } - - let path = &self.metadata_files[result.span.metadata_file] - .parent() - .unwrap() - .join(format!("{platform}_errors")); - if !path.exists() { - create_dir_all(path)?; - } - - if let Some(error) = result.compilation_task.error.as_ref() { - fs::write(path.join("compiler_error.txt"), error)?; - } - - if let Some(errors) = result.compilation_task.json_output.as_ref() { - let file = File::create(path.join("compiler_output.txt"))?; - serde_json::to_writer_pretty(file, &errors)?; - } - } - } - - Ok(()) - } - - fn write_to_file(&self) -> anyhow::Result<()> { - let path = self.directory.join(Self::FILE_NAME); - - let file = File::create(&path).context(path.display().to_string())?; - serde_json::to_writer_pretty(file, &self)?; - - Ok(()) - } -} - -impl Span { - /// Create a new [Span] with case and input index at 0. - /// - /// Initializes the reporting facility on the first call. - pub fn new(corpus: Corpus, config: Arguments) -> anyhow::Result { - let report = Mutex::new(Report::new(config)?); - let mut reporter = REPORTER.get_or_init(|| report).lock().unwrap(); - reporter.corpora.push(corpus); - - Ok(Self { - corpus: reporter.corpora.len() - 1, - metadata_file: 0, - case: 0, - input: 0, - }) - } - - /// Advance to the next metadata file: Resets the case input index to 0. - pub fn next_metadata(&mut self, metadata_file: PathBuf) { - let mut reporter = REPORTER - .get() - .expect(Report::INITIALIZED_VIA_SPAN) - .lock() - .unwrap(); - - reporter.metadata_files.push(metadata_file); - - self.metadata_file = reporter.metadata_files.len() - 1; - self.case = 0; - self.input = 0; - } - - /// Advance to the next case: Increas the case index by one and resets the input index to 0. - pub fn next_case(&mut self) { - self.case += 1; - self.input = 0; - } - - /// Advance to the next input. - pub fn next_input(&mut self) { - self.input += 1; - } -} diff --git a/crates/report/src/reporter_event.rs b/crates/report/src/reporter_event.rs new file mode 100644 index 0000000..0211e64 --- /dev/null +++ b/crates/report/src/reporter_event.rs @@ -0,0 +1,22 @@ +//! A reporter event sent by the report aggregator to the various listeners. + +use std::collections::BTreeMap; + +use revive_dt_compiler::Mode; +use revive_dt_format::case::CaseIdx; + +use crate::{MetadataFilePath, TestCaseStatus}; + +#[derive(Clone, Debug)] +pub enum ReporterEvent { + /// An event sent by the reporter once an entire metadata file and solc mode combination has + /// finished execution. + MetadataFileSolcModeCombinationExecutionCompleted { + /// The path of the metadata file. + metadata_file_path: MetadataFilePath, + /// The Solc mode that this metadata file was executed in. + mode: Mode, + /// The status of each one of the cases. + case_status: BTreeMap, + }, +} diff --git a/crates/report/src/runner_event.rs b/crates/report/src/runner_event.rs new file mode 100644 index 0000000..ddb67f9 --- /dev/null +++ b/crates/report/src/runner_event.rs @@ -0,0 +1,640 @@ +//! The types associated with the events sent by the runner to the reporter. +#![allow(dead_code)] + +use std::{collections::BTreeMap, path::PathBuf, sync::Arc}; + +use alloy_primitives::Address; +use indexmap::IndexMap; +use revive_dt_compiler::{CompilerInput, CompilerOutput}; +use revive_dt_config::TestingPlatform; +use revive_dt_format::metadata::Metadata; +use revive_dt_format::{corpus::Corpus, metadata::ContractInstance}; +use semver::Version; +use tokio::sync::{broadcast, oneshot}; + +use crate::{ExecutionSpecifier, ReporterEvent, TestSpecifier, common::MetadataFilePath}; + +macro_rules! __report_gen_emit_test_specific { + ( + $ident:ident, + $variant_ident:ident, + $skip_field:ident; + $( $bname:ident : $bty:ty, )* + ; + $( $aname:ident : $aty:ty, )* + ) => { + paste::paste! { + pub fn [< report_ $variant_ident:snake _event >]( + &self + $(, $bname: impl Into<$bty> )* + $(, $aname: impl Into<$aty> )* + ) -> anyhow::Result<()> { + self.report([< $variant_ident Event >] { + $skip_field: self.test_specifier.clone() + $(, $bname: $bname.into() )* + $(, $aname: $aname.into() )* + }) + } + } + }; +} + +macro_rules! __report_gen_emit_test_specific_by_parse { + ( + $ident:ident, + $variant_ident:ident, + $skip_field:ident; + $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* + ) => { + __report_gen_emit_test_specific!( + $ident, $variant_ident, $skip_field; + $( $bname : $bty, )* ; $( $aname : $aty, )* + ); + }; +} + +macro_rules! __report_gen_scan_before { + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + test_specifier : $skip_ty:ty, + $( $after:ident : $aty:ty, )* + ; + ) => { + __report_gen_emit_test_specific_by_parse!( + $ident, $variant_ident, test_specifier; + $( $before : $bty, )* ; $( $after : $aty, )* + ); + }; + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + $name:ident : $ty:ty, $( $after:ident : $aty:ty, )* + ; + ) => { + __report_gen_scan_before!( + $ident, $variant_ident; + $( $before : $bty, )* $name : $ty, + ; + $( $after : $aty, )* + ; + ); + }; + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + ; + ) => {}; +} + +macro_rules! __report_gen_for_variant { + ( + $ident:ident, + $variant_ident:ident; + ) => {}; + ( + $ident:ident, + $variant_ident:ident; + $( $field_ident:ident : $field_ty:ty ),+ $(,)? + ) => { + __report_gen_scan_before!( + $ident, $variant_ident; + ; + $( $field_ident : $field_ty, )* + ; + ); + }; +} + +macro_rules! __report_gen_emit_execution_specific { + ( + $ident:ident, + $variant_ident:ident, + $skip_field:ident; + $( $bname:ident : $bty:ty, )* + ; + $( $aname:ident : $aty:ty, )* + ) => { + paste::paste! { + pub fn [< report_ $variant_ident:snake _event >]( + &self + $(, $bname: impl Into<$bty> )* + $(, $aname: impl Into<$aty> )* + ) -> anyhow::Result<()> { + self.report([< $variant_ident Event >] { + $skip_field: self.execution_specifier.clone() + $(, $bname: $bname.into() )* + $(, $aname: $aname.into() )* + }) + } + } + }; +} + +macro_rules! __report_gen_emit_execution_specific_by_parse { + ( + $ident:ident, + $variant_ident:ident, + $skip_field:ident; + $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* + ) => { + __report_gen_emit_execution_specific!( + $ident, $variant_ident, $skip_field; + $( $bname : $bty, )* ; $( $aname : $aty, )* + ); + }; +} + +macro_rules! __report_gen_scan_before_exec { + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + execution_specifier : $skip_ty:ty, + $( $after:ident : $aty:ty, )* + ; + ) => { + __report_gen_emit_execution_specific_by_parse!( + $ident, $variant_ident, execution_specifier; + $( $before : $bty, )* ; $( $after : $aty, )* + ); + }; + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + $name:ident : $ty:ty, $( $after:ident : $aty:ty, )* + ; + ) => { + __report_gen_scan_before_exec!( + $ident, $variant_ident; + $( $before : $bty, )* $name : $ty, + ; + $( $after : $aty, )* + ; + ); + }; + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + ; + ) => {}; +} + +macro_rules! __report_gen_for_variant_exec { + ( + $ident:ident, + $variant_ident:ident; + ) => {}; + ( + $ident:ident, + $variant_ident:ident; + $( $field_ident:ident : $field_ty:ty ),+ $(,)? + ) => { + __report_gen_scan_before_exec!( + $ident, $variant_ident; + ; + $( $field_ident : $field_ty, )* + ; + ); + }; +} + +macro_rules! __report_gen_emit_step_execution_specific { + ( + $ident:ident, + $variant_ident:ident, + $skip_field:ident; + $( $bname:ident : $bty:ty, )* + ; + $( $aname:ident : $aty:ty, )* + ) => { + paste::paste! { + pub fn [< report_ $variant_ident:snake _event >]( + &self + $(, $bname: impl Into<$bty> )* + $(, $aname: impl Into<$aty> )* + ) -> anyhow::Result<()> { + self.report([< $variant_ident Event >] { + $skip_field: self.step_specifier.clone() + $(, $bname: $bname.into() )* + $(, $aname: $aname.into() )* + }) + } + } + }; +} + +macro_rules! __report_gen_emit_step_execution_specific_by_parse { + ( + $ident:ident, + $variant_ident:ident, + $skip_field:ident; + $( $bname:ident : $bty:ty, )* ; $( $aname:ident : $aty:ty, )* + ) => { + __report_gen_emit_step_execution_specific!( + $ident, $variant_ident, $skip_field; + $( $bname : $bty, )* ; $( $aname : $aty, )* + ); + }; +} + +macro_rules! __report_gen_scan_before_step { + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + step_specifier : $skip_ty:ty, + $( $after:ident : $aty:ty, )* + ; + ) => { + __report_gen_emit_step_execution_specific_by_parse!( + $ident, $variant_ident, step_specifier; + $( $before : $bty, )* ; $( $after : $aty, )* + ); + }; + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + $name:ident : $ty:ty, $( $after:ident : $aty:ty, )* + ; + ) => { + __report_gen_scan_before_step!( + $ident, $variant_ident; + $( $before : $bty, )* $name : $ty, + ; + $( $after : $aty, )* + ; + ); + }; + ( + $ident:ident, $variant_ident:ident; + $( $before:ident : $bty:ty, )* + ; + ; + ) => {}; +} + +macro_rules! __report_gen_for_variant_step { + ( + $ident:ident, + $variant_ident:ident; + ) => {}; + ( + $ident:ident, + $variant_ident:ident; + $( $field_ident:ident : $field_ty:ty ),+ $(,)? + ) => { + __report_gen_scan_before_step!( + $ident, $variant_ident; + ; + $( $field_ident : $field_ty, )* + ; + ); + }; +} + +/// Defines the runner-event which is sent from the test runners to the report aggregator. +/// +/// This macro defines a number of things related to the reporting infrastructure and the interface +/// used. First of all, it defines the enum of all of the possible events that the runners can send +/// to the aggregator. For each one of the variants it defines a separate struct for it to allow the +/// variant field in the enum to be put in a [`Box`]. +/// +/// In addition to the above, it defines [`From`] implementations for the various event types for +/// the [`RunnerEvent`] enum essentially allowing for events such as [`CorpusFileDiscoveryEvent`] to +/// be converted into a [`RunnerEvent`]. +/// +/// In addition to the above, it also defines the [`RunnerEventReporter`] which is a wrapper around +/// an [`UnboundedSender`] allowing for events to be sent to the report aggregator. +/// +/// With the above description, we can see that this macro defines almost all of the interface of +/// the reporting infrastructure, from the enum itself, to its associated types, and also to the +/// reporter that's used to report events to the aggregator. +/// +/// [`UnboundedSender`]: tokio::sync::mpsc::UnboundedSender +macro_rules! define_event { + ( + $(#[$enum_meta: meta])* + $vis: vis enum $ident: ident { + $( + $(#[$variant_meta: meta])* + $variant_ident: ident { + $( + $(#[$field_meta: meta])* + $field_ident: ident: $field_ty: ty + ),* $(,)? + } + ),* $(,)? + } + ) => { + paste::paste! { + $(#[$enum_meta])* + #[derive(Debug)] + $vis enum $ident { + $( + $(#[$variant_meta])* + $variant_ident(Box<[<$variant_ident Event>]>) + ),* + } + + $( + #[derive(Debug)] + $(#[$variant_meta])* + $vis struct [<$variant_ident Event>] { + $( + $(#[$field_meta])* + $vis $field_ident: $field_ty + ),* + } + )* + + $( + impl From<[<$variant_ident Event>]> for $ident { + fn from(value: [<$variant_ident Event>]) -> Self { + Self::$variant_ident(Box::new(value)) + } + } + )* + + /// Provides a way to report events to the aggregator. + /// + /// Under the hood, this is a wrapper around an [`UnboundedSender`] which abstracts away + /// the fact that channels are used and that implements high-level methods for reporting + /// various events to the aggregator. + #[derive(Clone, Debug)] + pub struct [< $ident Reporter >]($vis tokio::sync::mpsc::UnboundedSender<$ident>); + + impl From> for [< $ident Reporter >] { + fn from(value: tokio::sync::mpsc::UnboundedSender<$ident>) -> Self { + Self(value) + } + } + + impl [< $ident Reporter >] { + pub fn test_specific_reporter( + &self, + test_specifier: impl Into> + ) -> [< $ident TestSpecificReporter >] { + [< $ident TestSpecificReporter >] { + reporter: self.clone(), + test_specifier: test_specifier.into(), + } + } + + fn report(&self, event: impl Into<$ident>) -> anyhow::Result<()> { + self.0.send(event.into()).map_err(Into::into) + } + + $( + pub fn [< report_ $variant_ident:snake _event >](&self, $($field_ident: impl Into<$field_ty>),*) -> anyhow::Result<()> { + self.report([< $variant_ident Event >] { + $($field_ident: $field_ident.into()),* + }) + } + )* + } + + /// A reporter that's tied to a specific test case. + #[derive(Clone, Debug)] + pub struct [< $ident TestSpecificReporter >] { + $vis reporter: [< $ident Reporter >], + $vis test_specifier: std::sync::Arc, + } + + impl [< $ident TestSpecificReporter >] { + pub fn execution_specific_reporter( + &self, + node_id: impl Into, + node_designation: impl Into<$crate::common::NodeDesignation> + ) -> [< $ident ExecutionSpecificReporter >] { + [< $ident ExecutionSpecificReporter >] { + reporter: self.reporter.clone(), + execution_specifier: Arc::new($crate::common::ExecutionSpecifier { + test_specifier: self.test_specifier.clone(), + node_id: node_id.into(), + node_designation: node_designation.into(), + }) + } + } + + fn report(&self, event: impl Into<$ident>) -> anyhow::Result<()> { + self.reporter.report(event) + } + + $( + __report_gen_for_variant! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } + )* + } + + /// A reporter that's tied to a specific execution of the test case such as execution on + /// a specific node like the leader or follower. + #[derive(Clone, Debug)] + pub struct [< $ident ExecutionSpecificReporter >] { + $vis reporter: [< $ident Reporter >], + $vis execution_specifier: std::sync::Arc<$crate::common::ExecutionSpecifier>, + } + + impl [< $ident ExecutionSpecificReporter >] { + fn report(&self, event: impl Into<$ident>) -> anyhow::Result<()> { + self.reporter.report(event) + } + + $( + __report_gen_for_variant_exec! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } + )* + } + + /// A reporter that's tied to a specific step execution + #[derive(Clone, Debug)] + pub struct [< $ident StepExecutionSpecificReporter >] { + $vis reporter: [< $ident Reporter >], + $vis step_specifier: std::sync::Arc<$crate::common::StepExecutionSpecifier>, + } + + impl [< $ident StepExecutionSpecificReporter >] { + fn report(&self, event: impl Into<$ident>) -> anyhow::Result<()> { + self.reporter.report(event) + } + + $( + __report_gen_for_variant_step! { $ident, $variant_ident; $( $field_ident : $field_ty ),* } + )* + } + } + }; +} + +define_event! { + /// An event type that's sent by the test runners/drivers to the report aggregator. + pub(crate) enum RunnerEvent { + /// An event emitted by the reporter when it wishes to listen to events emitted by the + /// aggregator. + SubscribeToEvents { + /// The channel that the aggregator is to send the receive side of the channel on. + tx: oneshot::Sender> + }, + /// An event emitted by runners when they've discovered a corpus file. + CorpusFileDiscovery { + /// The contents of the corpus file. + corpus: Corpus + }, + /// An event emitted by runners when they've discovered a metadata file. + MetadataFileDiscovery { + /// The path of the metadata file discovered. + path: MetadataFilePath, + /// The content of the metadata file. + metadata: Metadata + }, + /// An event emitted by the runners when they discover a test case. + TestCaseDiscovery { + /// A specifier for the test that was discovered. + test_specifier: Arc, + }, + /// An event emitted by the runners when a test case is ignored. + TestIgnored { + /// A specifier for the test that's been ignored. + test_specifier: Arc, + /// A reason for the test to be ignored. + reason: String, + /// Additional fields that describe more information on why the test was ignored. + additional_fields: IndexMap + }, + /// An event emitted by the runners when a test case has succeeded. + TestSucceeded { + /// A specifier for the test that succeeded. + test_specifier: Arc, + /// The number of steps of the case that were executed by the driver. + steps_executed: usize, + }, + /// An event emitted by the runners when a test case has failed. + TestFailed { + /// A specifier for the test that succeeded. + test_specifier: Arc, + /// A reason for the failure of the test. + reason: String, + }, + /// An event emitted when the test case is assigned a leader node. + LeaderNodeAssigned { + /// A specifier for the test that the assignment is for. + test_specifier: Arc, + /// The ID of the node that this case is being executed on. + id: usize, + /// The platform of the node. + platform: TestingPlatform, + /// The connection string of the node. + connection_string: String, + }, + /// An event emitted when the test case is assigned a follower node. + FollowerNodeAssigned { + /// A specifier for the test that the assignment is for. + test_specifier: Arc, + /// The ID of the node that this case is being executed on. + id: usize, + /// The platform of the node. + platform: TestingPlatform, + /// The connection string of the node. + connection_string: String, + }, + /// An event emitted by the runners when the compilation of the contracts has succeeded + /// on the pre-link contracts. + PreLinkContractsCompilationSucceeded { + /// A specifier for the execution that's taking place. + execution_specifier: Arc, + /// The version of the compiler used to compile the contracts. + compiler_version: Version, + /// The path of the compiler used to compile the contracts. + compiler_path: PathBuf, + /// A flag of whether the contract bytecode and ABI were cached or if they were compiled + /// anew. + is_cached: bool, + /// The input provided to the compiler - this is optional and not provided if the + /// contracts were obtained from the cache. + compiler_input: Option, + /// The output of the compiler. + compiler_output: CompilerOutput + }, + /// An event emitted by the runners when the compilation of the contracts has succeeded + /// on the post-link contracts. + PostLinkContractsCompilationSucceeded { + /// A specifier for the execution that's taking place. + execution_specifier: Arc, + /// The version of the compiler used to compile the contracts. + compiler_version: Version, + /// The path of the compiler used to compile the contracts. + compiler_path: PathBuf, + /// A flag of whether the contract bytecode and ABI were cached or if they were compiled + /// anew. + is_cached: bool, + /// The input provided to the compiler - this is optional and not provided if the + /// contracts were obtained from the cache. + compiler_input: Option, + /// The output of the compiler. + compiler_output: CompilerOutput + }, + /// An event emitted by the runners when the compilation of the pre-link contract has + /// failed. + PreLinkContractsCompilationFailed { + /// A specifier for the execution that's taking place. + execution_specifier: Arc, + /// The version of the compiler used to compile the contracts. + compiler_version: Option, + /// The path of the compiler used to compile the contracts. + compiler_path: Option, + /// The input provided to the compiler - this is optional and not provided if the + /// contracts were obtained from the cache. + compiler_input: Option, + /// The failure reason. + reason: String, + }, + /// An event emitted by the runners when the compilation of the post-link contract has + /// failed. + PostLinkContractsCompilationFailed { + /// A specifier for the execution that's taking place. + execution_specifier: Arc, + /// The version of the compiler used to compile the contracts. + compiler_version: Option, + /// The path of the compiler used to compile the contracts. + compiler_path: Option, + /// The input provided to the compiler - this is optional and not provided if the + /// contracts were obtained from the cache. + compiler_input: Option, + /// The failure reason. + reason: String, + }, + /// An event emitted by the runners when a library has been deployed. + LibrariesDeployed { + /// A specifier for the execution that's taking place. + execution_specifier: Arc, + /// The addresses of the libraries that were deployed. + libraries: BTreeMap + }, + /// An event emitted by the runners when they've deployed a new contract. + ContractDeployed { + /// A specifier for the execution that's taking place. + execution_specifier: Arc, + /// The instance name of the contract. + contract_instance: ContractInstance, + /// The address of the contract. + address: Address + }, + } +} + +/// An extension to the [`Reporter`] implemented by the macro. +impl RunnerEventReporter { + pub async fn subscribe(&self) -> anyhow::Result> { + let (tx, rx) = oneshot::channel::>(); + self.report_subscribe_to_events_event(tx)?; + rx.await.map_err(Into::into) + } +} + +pub type Reporter = RunnerEventReporter; +pub type TestSpecificReporter = RunnerEventTestSpecificReporter; +pub type ExecutionSpecificReporter = RunnerEventExecutionSpecificReporter;