diff --git a/crates/ml-test-runner/src/main.rs b/crates/ml-test-runner/src/main.rs index f18ff1c..c904258 100644 --- a/crates/ml-test-runner/src/main.rs +++ b/crates/ml-test-runner/src/main.rs @@ -62,17 +62,25 @@ struct MlTestRunnerArgs { /// RPC port to connect to when using existing node #[arg(long = "rpc-port", default_value = "8545")] rpc_port: u16, + + /// Show verbose output including cached tests and detailed error messages + #[arg(long = "verbose", short = 'v')] + verbose: bool, } fn main() -> anyhow::Result<()> { - let subscriber = FmtSubscriber::builder() - .with_env_filter(EnvFilter::from_default_env()) - .with_writer(std::io::stderr) - .finish(); - tracing::subscriber::set_global_default(subscriber).expect("Failed to set tracing subscriber"); - let args = MlTestRunnerArgs::parse(); + // Only set up tracing if RUST_LOG is explicitly set or --verbose is passed + if std::env::var("RUST_LOG").is_ok() || args.verbose { + let subscriber = FmtSubscriber::builder() + .with_env_filter(EnvFilter::from_default_env()) + .with_writer(std::io::stderr) + .finish(); + tracing::subscriber::set_global_default(subscriber) + .expect("Failed to set tracing subscriber"); + } + info!("ML test runner starting"); info!("Platform: {:?}", args.platform); info!("Start platform: {}", args.start_platform); @@ -185,7 +193,9 @@ async fn run(args: MlTestRunnerArgs) -> anyhow::Result<()> { { let cache = cached_passed.lock().await; if cache.contains(&file_display) { - println!("test {file_display} ... {YELLOW}cached{COLOUR_RESET}"); + if args.verbose { + println!("test {file_display} ... {YELLOW}cached{COLOUR_RESET}"); + } skipped_files += 1; continue; } @@ -214,15 +224,20 @@ async fn run(args: MlTestRunnerArgs) -> anyhow::Result<()> { println!("test {file_display} ... {GREEN}ok{COLOUR_RESET}"); passed_files += 1; - { + // Update cache + if let Some(cache_file) = &args.cached_passed { let mut cache = cached_passed.lock().await; cache.insert(file_display); + if let Err(e) = save_cached_passed(cache_file, &cache) { + info!("Failed to save cache: {}", e); + } } }, Err(e) => { println!("test {file_display} ... {RED}FAILED{COLOUR_RESET}"); failed_files += 1; - failures.push((file_display, format!("{:?}", e))); + let error_detail = if args.verbose { format!("{:?}", e) } else { format!("{}", e) }; + failures.push((file_display, error_detail)); if args.bail { info!("Bailing after first failure"); @@ -232,15 +247,9 @@ async fn run(args: MlTestRunnerArgs) -> anyhow::Result<()> { } } - if let Some(cache_file) = &args.cached_passed { - let cache = cached_passed.lock().await; - info!("Saving {} cached passed test(s)", cache.len()); - save_cached_passed(cache_file, &cache)?; - } - // Print summary println!(); - if !failures.is_empty() { + if !failures.is_empty() && args.verbose { println!("{BOLD}failures:{BOLD_RESET}"); println!(); for (file, error) in &failures { @@ -298,12 +307,43 @@ fn discover_test_files(path: &Path) -> anyhow::Result> { _ => anyhow::bail!("Unsupported file extension: {}. Expected .sol or .json", extension), } } else if path.is_dir() { - // Walk directory recursively for .sol files - for entry in FilesWithExtensionIterator::new(path) + // First, find all test.json files + let mut test_json_dirs = HashSet::new(); + for json_file in FilesWithExtensionIterator::new(path) + .with_allowed_extension("json") + .with_use_cached_fs(true) + { + if json_file.file_name().and_then(|s| s.to_str()) == Some("test.json") { + if let Some(parent) = json_file.parent() { + test_json_dirs.insert(parent.to_path_buf()); + } + + // Try to parse as corpus file first, then as metadata file + if let Ok(corpus) = Corpus::try_from_path(&json_file) { + // It's a corpus file - enumerate its tests + let metadata_files = corpus.enumerate_tests(); + for metadata in metadata_files { + files.push(metadata.metadata_file_path); + } + } else { + // It's a metadata file - use it directly + files.push(json_file); + } + } + } + + // Then, find .sol files that are NOT in directories with test.json + for sol_file in FilesWithExtensionIterator::new(path) .with_allowed_extension("sol") .with_use_cached_fs(true) { - files.push(entry); + if let Some(parent) = sol_file.parent() { + if !test_json_dirs.contains(parent) { + files.push(sol_file); + } + } else { + files.push(sol_file); + } } } else { anyhow::bail!("Path is neither a file nor a directory: {}", path.display()); @@ -468,7 +508,7 @@ async fn build_test_definition<'a>( }; if let Err((reason, _)) = test_definition.check_compatibility() { - println!(" Skipping case {}: {}", case_idx, reason); + info!("Skipping case {}: {}", case_idx, reason); return Ok(None); } diff --git a/crates/node/src/node_implementations/geth.rs b/crates/node/src/node_implementations/geth.rs index ac588fd..0b6faf4 100644 --- a/crates/node/src/node_implementations/geth.rs +++ b/crates/node/src/node_implementations/geth.rs @@ -94,8 +94,8 @@ impl GethNode { const TRANSACTION_INDEXING_ERROR: &str = "transaction indexing is in progress"; const TRANSACTION_TRACING_ERROR: &str = "historical state not available in path scheme yet"; - const RECEIPT_POLLING_DURATION: Duration = Duration::from_secs(5 * 60); - const TRACE_POLLING_DURATION: Duration = Duration::from_secs(60); + const RECEIPT_POLLING_DURATION: Duration = Duration::from_secs(10); + const TRACE_POLLING_DURATION: Duration = Duration::from_secs(10); pub fn new( context: impl AsRef diff --git a/crates/node/src/node_implementations/lighthouse_geth.rs b/crates/node/src/node_implementations/lighthouse_geth.rs index 1f3f7dd..7212a03 100644 --- a/crates/node/src/node_implementations/lighthouse_geth.rs +++ b/crates/node/src/node_implementations/lighthouse_geth.rs @@ -116,7 +116,7 @@ impl LighthouseGethNode { const TRANSACTION_INDEXING_ERROR: &str = "transaction indexing is in progress"; const TRANSACTION_TRACING_ERROR: &str = "historical state not available in path scheme yet"; - const RECEIPT_POLLING_DURATION: Duration = Duration::from_secs(5 * 60); + const RECEIPT_POLLING_DURATION: Duration = Duration::from_secs(30); const TRACE_POLLING_DURATION: Duration = Duration::from_secs(60); const VALIDATOR_MNEMONIC: &str = "giant issue aisle success illegal bike spike question tent bar rely arctic volcano long crawl hungry vocal artwork sniff fantasy very lucky have athlete"; diff --git a/crates/node/src/node_implementations/zombienet.rs b/crates/node/src/node_implementations/zombienet.rs index a7023b8..1e6eb66 100644 --- a/crates/node/src/node_implementations/zombienet.rs +++ b/crates/node/src/node_implementations/zombienet.rs @@ -428,16 +428,20 @@ impl EthereumNode for ZombieNode { transaction: alloy::rpc::types::TransactionRequest, ) -> Pin> + '_>> { Box::pin(async move { - let receipt = self + let pending = self .provider() .await .context("Failed to create provider for transaction submission")? .send_transaction(transaction) .await - .context("Failed to submit transaction to proxy")? - .get_receipt() - .await - .context("Failed to fetch transaction receipt from proxy")?; + .context("Failed to submit transaction to proxy")?; + + let receipt = + tokio::time::timeout(std::time::Duration::from_secs(120), pending.get_receipt()) + .await + .context("Timeout waiting for transaction receipt")? + .context("Failed to fetch transaction receipt from proxy")?; + Ok(receipt) }) } diff --git a/crates/node/src/provider_utils/provider.rs b/crates/node/src/provider_utils/provider.rs index 8d70955..d2f276d 100644 --- a/crates/node/src/provider_utils/provider.rs +++ b/crates/node/src/provider_utils/provider.rs @@ -108,9 +108,10 @@ where .await .context(format!("Transaction inclusion watching timeout for {tx_hash}"))?; - poll(Duration::from_secs(60), PollingWaitBehavior::Constant(Duration::from_secs(3)), || { - let provider = provider.clone(); + debug!(%tx_hash, "Transaction included, polling for receipt"); + poll(Duration::from_secs(30), PollingWaitBehavior::Constant(Duration::from_secs(3)), || { + let provider = provider.clone(); async move { match provider.get_transaction_receipt(tx_hash).await { Ok(Some(receipt)) => Ok(ControlFlow::Break(receipt)), @@ -119,5 +120,5 @@ where } }) .await - .context(format!("Polling for receipt failed for {tx_hash}")) + .context(format!("Polling for receipt timed out for {tx_hash}")) }