From da31e66fb4c52f4adf3d362981ea5f8ffb461723 Mon Sep 17 00:00:00 2001 From: Omar Abdulla Date: Tue, 29 Jul 2025 19:12:34 +0300 Subject: [PATCH] Make compiler IO completely generic. Before this commit, the types that were used for the compiler input and output were the resolc compiler types which was a leaky abstraction as we have traits to abstract the compilers away but we expose their internal types out to other crates. This commit did the following: 1. Made the compiler IO types fully generic so that all of the logic for constructing the map of compiled contracts is all done by the compiler implementation and not by the consuming code. 2. Changed the input types used for Solc to be the forge standard JSON types for Solc instead of resolc. --- Cargo.lock | 82 +++++++- Cargo.toml | 1 + crates/compiler/Cargo.toml | 3 + crates/compiler/src/lib.rs | 180 ++++++---------- crates/compiler/src/revive_resolc.rs | 199 +++++++++++------- crates/compiler/src/solc.rs | 141 ++++++++++--- .../assets/array_one_element/callable.sol | 9 + .../tests/assets/array_one_element/main.sol | 13 ++ .../tests/assets/array_one_element/test.json | 21 ++ crates/compiler/tests/lib.rs | 81 +++++++ crates/core/Cargo.toml | 1 - crates/core/src/driver/mod.rs | 88 +++----- crates/report/Cargo.toml | 2 +- crates/report/src/analyzer.rs | 29 +-- crates/report/src/reporter.rs | 16 +- 15 files changed, 550 insertions(+), 316 deletions(-) create mode 100644 crates/compiler/tests/assets/array_one_element/callable.sol create mode 100644 crates/compiler/tests/assets/array_one_element/main.sol create mode 100644 crates/compiler/tests/assets/array_one_element/test.json create mode 100644 crates/compiler/tests/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 3e2f201..9bbf99e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -339,6 +339,7 @@ dependencies = [ "const-hex", "derive_more 2.0.1", "foldhash", + "getrandom 0.3.3", "hashbrown 0.15.3", "indexmap 2.10.0", "itoa", @@ -2227,6 +2228,66 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "foundry-compilers-artifacts" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2676d70082ed23680fe2d08c0b750d5f7f2438c6d946f1cb140a76c5e5e0392" +dependencies = [ + "foundry-compilers-artifacts-solc", + "foundry-compilers-artifacts-vyper", +] + +[[package]] +name = "foundry-compilers-artifacts-solc" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3ada94dc5946334bb08df574855ba345ab03ba8c6f233560c72c8d61fa9db80" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "foundry-compilers-core", + "path-slash", + "regex", + "semver 1.0.26", + "serde", + "serde_json", + "thiserror 2.0.12", + "tracing", + "yansi", +] + +[[package]] +name = "foundry-compilers-artifacts-vyper" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "372052af72652e375a6e7eed22179bd8935114e25e1c5a8cca7f00e8f20bd94c" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "foundry-compilers-artifacts-solc", + "foundry-compilers-core", + "path-slash", + "semver 1.0.26", + "serde", +] + +[[package]] +name = "foundry-compilers-core" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf0962c46855979300f6526ed57f987ccf6a025c2b92ce574b281d9cb2ef666b" +dependencies = [ + "alloy-primitives", + "cfg-if", + "dunce", + "path-slash", + "semver 1.0.26", + "serde", + "serde_json", + "thiserror 2.0.12", +] + [[package]] name = "fs-err" version = "2.11.0" @@ -3456,6 +3517,12 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "path-slash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" + [[package]] name = "pbkdf2" version = "0.12.2" @@ -3964,14 +4031,17 @@ dependencies = [ name = "revive-dt-compiler" version = "0.1.0" dependencies = [ + "alloy", "alloy-primitives", "anyhow", + "foundry-compilers-artifacts", "revive-common", "revive-dt-common", "revive-dt-config", "revive-dt-solc-binaries", "revive-solc-json-interface", "semver 1.0.26", + "serde", "serde_json", "tracing", ] @@ -4003,7 +4073,6 @@ dependencies = [ "revive-dt-node", "revive-dt-node-interaction", "revive-dt-report", - "revive-solc-json-interface", "semver 1.0.26", "serde_json", "temp-dir", @@ -4058,9 +4127,9 @@ name = "revive-dt-report" version = "0.1.0" dependencies = [ "anyhow", + "revive-dt-compiler", "revive-dt-config", "revive-dt-format", - "revive-solc-json-interface", "serde", "serde_json", "tracing", @@ -4171,6 +4240,9 @@ name = "rustc-hash" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +dependencies = [ + "rand 0.8.5", +] [[package]] name = "rustc-hex" @@ -6221,6 +6293,12 @@ dependencies = [ "tap", ] +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + [[package]] name = "yoke" version = "0.8.0" diff --git a/Cargo.toml b/Cargo.toml index d077231..9855d12 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,7 @@ alloy-primitives = "1.2.1" alloy-sol-types = "1.2.1" anyhow = "1.0" clap = { version = "4", features = ["derive"] } +foundry-compilers-artifacts = { version = "0.18.0" } futures = { version = "0.3.31" } hex = "0.4.3" reqwest = { version = "0.12.15", features = ["blocking", "json"] } diff --git a/crates/compiler/Cargo.toml b/crates/compiler/Cargo.toml index 4a7d671..295a147 100644 --- a/crates/compiler/Cargo.toml +++ b/crates/compiler/Cargo.toml @@ -15,8 +15,11 @@ revive-dt-config = { workspace = true } revive-dt-solc-binaries = { workspace = true } revive-common = { workspace = true } +alloy = { workspace = true } alloy-primitives = { workspace = true } anyhow = { workspace = true } +foundry-compilers-artifacts = { workspace = true } semver = { workspace = true } +serde = { workspace = true } serde_json = { workspace = true } tracing = { workspace = true } diff --git a/crates/compiler/src/lib.rs b/crates/compiler/src/lib.rs index cb192b9..364359c 100644 --- a/crates/compiler/src/lib.rs +++ b/crates/compiler/src/lib.rs @@ -4,22 +4,20 @@ //! - Polkadot revive Wasm compiler use std::{ + collections::HashMap, fs::read_to_string, hash::Hash, path::{Path, PathBuf}, }; +use alloy::json_abi::JsonAbi; use alloy_primitives::Address; -use revive_dt_config::Arguments; +use semver::Version; +use serde::{Deserialize, Serialize}; use revive_common::EVMVersion; use revive_dt_common::types::VersionOrRequirement; -use revive_solc_json_interface::{ - SolcStandardJsonInput, SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings, - SolcStandardJsonInputSettingsOptimizer, SolcStandardJsonInputSettingsSelection, - SolcStandardJsonOutput, -}; -use semver::Version; +use revive_dt_config::Arguments; pub mod revive_js; pub mod revive_resolc; @@ -33,8 +31,9 @@ pub trait SolidityCompiler { /// The low-level compiler interface. fn build( &self, - input: CompilerInput, - ) -> anyhow::Result>; + input: CompilerInput, + additional_options: Self::Options, + ) -> anyhow::Result; fn new(solc_executable: PathBuf) -> Self; @@ -47,54 +46,29 @@ pub trait SolidityCompiler { } /// The generic compilation input configuration. -#[derive(Debug)] -pub struct CompilerInput { - pub extra_options: T, - pub input: SolcStandardJsonInput, +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CompilerInput { + pub enable_optimization: Option, + pub via_ir: Option, + pub evm_version: Option, pub allow_paths: Vec, pub base_path: Option, + pub sources: HashMap, + pub libraries: HashMap>, } /// The generic compilation output configuration. -#[derive(Debug)] -pub struct CompilerOutput { - /// The solc standard JSON input. - pub input: CompilerInput, - /// The produced solc standard JSON output. - pub output: SolcStandardJsonOutput, - /// The error message in case the compiler returns abnormally. - pub error: Option, +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +pub struct CompilerOutput { + /// The compiled contracts. The bytecode of the contract is kept as a string incase linking is + /// required and the compiled source has placeholders. + pub contracts: HashMap>, } -impl PartialEq for CompilerInput -where - T: PartialEq + Eq + Hash, -{ - fn eq(&self, other: &Self) -> bool { - let self_input = serde_json::to_vec(&self.input).unwrap_or_default(); - let other_input = serde_json::to_vec(&self.input).unwrap_or_default(); - self.extra_options.eq(&other.extra_options) && self_input == other_input - } -} - -impl Eq for CompilerInput where T: PartialEq + Eq + Hash {} - -impl Hash for CompilerInput -where - T: PartialEq + Eq + Hash, -{ - fn hash(&self, state: &mut H) { - self.extra_options.hash(state); - state.write(&serde_json::to_vec(&self.input).unwrap_or_default()); - } -} - -/// A generic builder style interface for configuring all compiler options. +/// A generic builder style interface for configuring the supported compiler options. pub struct Compiler { - input: SolcStandardJsonInput, - extra_options: T::Options, - allow_paths: Vec, - base_path: Option, + input: CompilerInput, + additional_options: T::Options, } impl Default for Compiler { @@ -109,93 +83,75 @@ where { pub fn new() -> Self { Self { - input: SolcStandardJsonInput { - language: SolcStandardJsonInputLanguage::Solidity, + input: CompilerInput { + enable_optimization: Default::default(), + via_ir: Default::default(), + evm_version: Default::default(), + allow_paths: Default::default(), + base_path: Default::default(), sources: Default::default(), - settings: SolcStandardJsonInputSettings::new( - None, - Default::default(), - None, - SolcStandardJsonInputSettingsSelection::new_required(), - SolcStandardJsonInputSettingsOptimizer::new( - false, - None, - &Version::new(0, 0, 0), - false, - ), - None, - None, - ), + libraries: Default::default(), }, - extra_options: Default::default(), - allow_paths: Default::default(), - base_path: None, + additional_options: T::Options::default(), } } - pub fn solc_optimizer(mut self, enabled: bool) -> Self { - self.input.settings.optimizer.enabled = enabled; + pub fn with_optimization(mut self, value: impl Into>) -> Self { + self.input.enable_optimization = value.into(); self } - pub fn with_source(mut self, path: &Path) -> anyhow::Result { + pub fn with_via_ir(mut self, value: impl Into>) -> Self { + self.input.via_ir = value.into(); + self + } + + pub fn with_evm_version(mut self, version: impl Into>) -> Self { + self.input.evm_version = version.into(); + self + } + + pub fn with_allow_path(mut self, path: impl AsRef) -> Self { + self.input.allow_paths.push(path.as_ref().into()); + self + } + + pub fn with_base_path(mut self, path: impl Into>) -> Self { + self.input.base_path = path.into(); + self + } + + pub fn with_source(mut self, path: impl AsRef) -> anyhow::Result { self.input .sources - .insert(path.display().to_string(), read_to_string(path)?.into()); + .insert(path.as_ref().to_path_buf(), read_to_string(path.as_ref())?); Ok(self) } - pub fn evm_version(mut self, evm_version: EVMVersion) -> Self { - self.input.settings.evm_version = Some(evm_version); - self - } - - pub fn extra_options(mut self, extra_options: T::Options) -> Self { - self.extra_options = extra_options; - self - } - - pub fn allow_path(mut self, path: PathBuf) -> Self { - self.allow_paths.push(path); - self - } - - pub fn base_path(mut self, base_path: PathBuf) -> Self { - self.base_path = Some(base_path); - self - } - pub fn with_library( mut self, - scope: impl AsRef, - library_ident: impl AsRef, - library_address: Address, + path: impl AsRef, + name: impl AsRef, + address: Address, ) -> Self { self.input - .settings .libraries - .get_or_insert_with(Default::default) - .entry(scope.as_ref().display().to_string()) + .entry(path.as_ref().to_path_buf()) .or_default() - .insert( - library_ident.as_ref().to_owned(), - library_address.to_string(), - ); - + .insert(name.as_ref().into(), address); self } - pub fn try_build(self, solc_path: PathBuf) -> anyhow::Result> { - T::new(solc_path).build(CompilerInput { - extra_options: self.extra_options, - input: self.input, - allow_paths: self.allow_paths, - base_path: self.base_path, - }) + pub fn with_additional_options(mut self, options: impl Into) -> Self { + self.additional_options = options.into(); + self } - /// Returns the compiler JSON input. - pub fn input(&self) -> SolcStandardJsonInput { + pub fn try_build(self, compiler_path: impl AsRef) -> anyhow::Result { + T::new(compiler_path.as_ref().to_path_buf()).build(self.input, self.additional_options) + } + + pub fn input(&self) -> CompilerInput { self.input.clone() } } diff --git a/crates/compiler/src/revive_resolc.rs b/crates/compiler/src/revive_resolc.rs index c272ff4..507539e 100644 --- a/crates/compiler/src/revive_resolc.rs +++ b/crates/compiler/src/revive_resolc.rs @@ -6,9 +6,14 @@ use std::{ process::{Command, Stdio}, }; +use alloy::json_abi::JsonAbi; use revive_dt_common::types::VersionOrRequirement; use revive_dt_config::Arguments; -use revive_solc_json_interface::SolcStandardJsonOutput; +use revive_solc_json_interface::{ + SolcStandardJsonInput, SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings, + SolcStandardJsonInputSettingsOptimizer, SolcStandardJsonInputSettingsSelection, + SolcStandardJsonOutput, +}; use crate::{CompilerInput, CompilerOutput, SolidityCompiler}; @@ -32,8 +37,56 @@ impl SolidityCompiler for Resolc { #[tracing::instrument(level = "debug", ret)] fn build( &self, - input: CompilerInput, - ) -> anyhow::Result> { + CompilerInput { + enable_optimization, + // Ignored and not honored since this is required for the resolc compilation. + via_ir: _via_ir, + evm_version, + allow_paths, + base_path, + sources, + libraries, + }: CompilerInput, + additional_options: Self::Options, + ) -> anyhow::Result { + let input = SolcStandardJsonInput { + language: SolcStandardJsonInputLanguage::Solidity, + sources: sources + .into_iter() + .map(|(path, source)| (path.display().to_string(), source.into())) + .collect(), + settings: SolcStandardJsonInputSettings { + evm_version, + libraries: Some( + libraries + .into_iter() + .map(|(source_code, libraries_map)| { + ( + source_code.display().to_string(), + libraries_map + .into_iter() + .map(|(library_ident, library_address)| { + (library_ident, library_address.to_string()) + }) + .collect(), + ) + }) + .collect(), + ), + remappings: None, + output_selection: Some(SolcStandardJsonInputSettingsSelection::new_required()), + via_ir: Some(true), + optimizer: SolcStandardJsonInputSettingsOptimizer::new( + enable_optimization.unwrap_or(false), + None, + &Version::new(0, 0, 0), + false, + ), + metadata: None, + polkavm: None, + }, + }; + let mut command = Command::new(&self.resolc_path); command .stdin(Stdio::piped()) @@ -41,13 +94,12 @@ impl SolidityCompiler for Resolc { .stderr(Stdio::piped()) .arg("--standard-json"); - if let Some(ref base_path) = input.base_path { + if let Some(ref base_path) = base_path { command.arg("--base-path").arg(base_path); } - if !input.allow_paths.is_empty() { + if !allow_paths.is_empty() { command.arg("--allow-paths").arg( - input - .allow_paths + allow_paths .iter() .map(|path| path.display().to_string()) .collect::>() @@ -57,93 +109,86 @@ impl SolidityCompiler for Resolc { let mut child = command.spawn()?; let stdin_pipe = child.stdin.as_mut().expect("stdin must be piped"); - serde_json::to_writer(stdin_pipe, &input.input)?; - - let json_in = serde_json::to_string_pretty(&input.input)?; + serde_json::to_writer(stdin_pipe, &input)?; let output = child.wait_with_output()?; let stdout = output.stdout; let stderr = output.stderr; if !output.status.success() { + let json_in = serde_json::to_string_pretty(&input)?; let message = String::from_utf8_lossy(&stderr); tracing::error!( - "resolc failed exit={} stderr={} JSON-in={} ", - output.status, - &message, - json_in, + status = %output.status, + message = %message, + json_input = json_in, + "Compilation using resolc failed" ); - return Ok(CompilerOutput { - input, - output: Default::default(), - error: Some(message.into()), - }); + anyhow::bail!("Compilation failed with an error: {message}"); } - let mut parsed = - serde_json::from_slice::(&stdout).map_err(|e| { - anyhow::anyhow!( - "failed to parse resolc JSON output: {e}\nstderr: {}", - String::from_utf8_lossy(&stderr) - ) - })?; - - // Detecting if the compiler output contained errors and reporting them through logs and - // errors instead of returning the compiler output that might contain errors. - for error in parsed.errors.iter().flatten() { - if error.severity == "error" { - tracing::error!(?error, ?input, "Encountered an error in the compilation"); - anyhow::bail!("Encountered an error in the compilation: {error}") - } - } - - // We need to do some post processing on the output to make it in the same format that solc - // outputs. More specifically, for each contract, the `.metadata` field should be replaced - // with the `.metadata.solc_metadata` field which contains the ABI and other information - // about the compiled contracts. We do this because we do not want any downstream logic to - // need to differentiate between which compiler is being used when extracting the ABI of the - // contracts. - if let Some(ref mut contracts) = parsed.contracts { - for (contract_path, contracts_map) in contracts.iter_mut() { - for (contract_name, contract_info) in contracts_map.iter_mut() { - let Some(metadata) = contract_info.metadata.take() else { - continue; - }; - - // Get the `solc_metadata` in the metadata of the contract. - let Some(solc_metadata) = metadata - .get("solc_metadata") - .and_then(|metadata| metadata.as_str()) - else { - tracing::error!( - contract_path, - contract_name, - metadata = serde_json::to_string(&metadata).unwrap(), - "Encountered a contract compiled with resolc that has no solc_metadata" - ); - anyhow::bail!( - "Contract {} compiled with resolc that has no solc_metadata", - contract_name - ); - }; - - // Replace the original metadata with the new solc_metadata. - contract_info.metadata = - Some(serde_json::Value::String(solc_metadata.to_string())); - } - } - } + let parsed = serde_json::from_slice::(&stdout).map_err(|e| { + anyhow::anyhow!( + "failed to parse resolc JSON output: {e}\nstderr: {}", + String::from_utf8_lossy(&stderr) + ) + })?; tracing::debug!( output = %serde_json::to_string(&parsed).unwrap(), "Compiled successfully" ); - Ok(CompilerOutput { - input, - output: parsed, - error: None, - }) + // Detecting if the compiler output contained errors and reporting them through logs and + // errors instead of returning the compiler output that might contain errors. + for error in parsed.errors.iter().flatten() { + if error.severity == "error" { + tracing::error!( + ?error, + ?input, + output = %serde_json::to_string(&parsed).unwrap(), + "Encountered an error in the compilation" + ); + anyhow::bail!("Encountered an error in the compilation: {error}") + } + } + + let Some(contracts) = parsed.contracts else { + anyhow::bail!("Unexpected error - resolc output doesn't have a contracts section"); + }; + + let mut compiler_output = CompilerOutput::default(); + for (source_path, contracts) in contracts.into_iter() { + let source_path = PathBuf::from(source_path).canonicalize()?; + + let map = compiler_output.contracts.entry(source_path).or_default(); + for (contract_name, contract_information) in contracts.into_iter() { + let bytecode = contract_information + .evm + .and_then(|evm| evm.bytecode.clone()) + .context("Unexpected - Contract compiled with resolc has no bytecode")?; + let abi = contract_information + .metadata + .as_ref() + .and_then(|metadata| metadata.as_object()) + .and_then(|metadata| metadata.get("solc_metadata")) + .and_then(|solc_metadata| solc_metadata.as_str()) + .and_then(|metadata| serde_json::from_str::(metadata).ok()) + .and_then(|metadata| { + metadata.get("output").and_then(|output| { + output + .get("abi") + .and_then(|abi| serde_json::from_value::(abi.clone()).ok()) + }) + }) + .context( + "Unexpected - Failed to get the ABI for a contract compiled with resolc", + )?; + map.insert(contract_name, (bytecode.object, abi)); + } + } + + Ok(compiler_output) } fn new(resolc_path: PathBuf) -> Self { diff --git a/crates/compiler/src/solc.rs b/crates/compiler/src/solc.rs index cc72247..20f5e6d 100644 --- a/crates/compiler/src/solc.rs +++ b/crates/compiler/src/solc.rs @@ -6,12 +6,20 @@ use std::{ process::{Command, Stdio}, }; -use crate::{CompilerInput, CompilerOutput, SolidityCompiler}; -use anyhow::Context; use revive_dt_common::types::VersionOrRequirement; use revive_dt_config::Arguments; use revive_dt_solc_binaries::download_solc; -use revive_solc_json_interface::SolcStandardJsonOutput; + +use crate::{CompilerInput, CompilerOutput, SolidityCompiler}; + +use anyhow::Context; +use foundry_compilers_artifacts::{ + output_selection::{ + BytecodeOutputSelection, ContractOutputSelection, EvmOutputSelection, OutputSelection, + }, + solc::CompilerOutput as SolcOutput, + solc::*, +}; use semver::Version; #[derive(Debug)] @@ -25,8 +33,63 @@ impl SolidityCompiler for Solc { #[tracing::instrument(level = "debug", ret)] fn build( &self, - input: CompilerInput, - ) -> anyhow::Result> { + CompilerInput { + enable_optimization, + via_ir, + evm_version, + allow_paths, + base_path, + sources, + libraries, + }: CompilerInput, + _: Self::Options, + ) -> anyhow::Result { + let input = SolcInput { + language: SolcLanguage::Solidity, + sources: Sources( + sources + .into_iter() + .map(|(source_path, source_code)| (source_path, Source::new(source_code))) + .collect(), + ), + settings: Settings { + optimizer: Optimizer { + enabled: enable_optimization, + details: Some(Default::default()), + ..Default::default() + }, + output_selection: OutputSelection::common_output_selection( + [ + ContractOutputSelection::Abi, + ContractOutputSelection::Evm(EvmOutputSelection::ByteCode( + BytecodeOutputSelection::Object, + )), + ] + .into_iter() + .map(|item| item.to_string()), + ), + evm_version: evm_version.map(|version| version.to_string().parse().unwrap()), + via_ir, + libraries: Libraries { + libs: libraries + .into_iter() + .map(|(file_path, libraries)| { + ( + file_path, + libraries + .into_iter() + .map(|(library_name, library_address)| { + (library_name, library_address.to_string()) + }) + .collect(), + ) + }) + .collect(), + }, + ..Default::default() + }, + }; + let mut command = Command::new(&self.solc_path); command .stdin(Stdio::piped()) @@ -34,13 +97,12 @@ impl SolidityCompiler for Solc { .stderr(Stdio::piped()) .arg("--standard-json"); - if let Some(ref base_path) = input.base_path { + if let Some(ref base_path) = base_path { command.arg("--base-path").arg(base_path); } - if !input.allow_paths.is_empty() { + if !allow_paths.is_empty() { command.arg("--allow-paths").arg( - input - .allow_paths + allow_paths .iter() .map(|path| path.display().to_string()) .collect::>() @@ -50,31 +112,32 @@ impl SolidityCompiler for Solc { let mut child = command.spawn()?; let stdin = child.stdin.as_mut().expect("should be piped"); - serde_json::to_writer(stdin, &input.input)?; + serde_json::to_writer(stdin, &input)?; let output = child.wait_with_output()?; if !output.status.success() { + let json_in = serde_json::to_string_pretty(&input)?; let message = String::from_utf8_lossy(&output.stderr); - tracing::error!("solc failed exit={} stderr={}", output.status, &message); - return Ok(CompilerOutput { - input, - output: Default::default(), - error: Some(message.into()), - }); + tracing::error!( + status = %output.status, + message = %message, + json_input = json_in, + "Compilation using solc failed" + ); + anyhow::bail!("Compilation failed with an error: {message}"); } - let parsed = - serde_json::from_slice::(&output.stdout).map_err(|e| { - anyhow::anyhow!( - "failed to parse resolc JSON output: {e}\nstderr: {}", - String::from_utf8_lossy(&output.stdout) - ) - })?; + let parsed = serde_json::from_slice::(&output.stdout).map_err(|e| { + anyhow::anyhow!( + "failed to parse resolc JSON output: {e}\nstderr: {}", + String::from_utf8_lossy(&output.stdout) + ) + })?; // Detecting if the compiler output contained errors and reporting them through logs and // errors instead of returning the compiler output that might contain errors. - for error in parsed.errors.iter().flatten() { - if error.severity == "error" { + for error in parsed.errors.iter() { + if error.severity == Severity::Error { tracing::error!(?error, ?input, "Encountered an error in the compilation"); anyhow::bail!("Encountered an error in the compilation: {error}") } @@ -85,11 +148,29 @@ impl SolidityCompiler for Solc { "Compiled successfully" ); - Ok(CompilerOutput { - input, - output: parsed, - error: None, - }) + let mut compiler_output = CompilerOutput::default(); + for (contract_path, contracts) in parsed.contracts { + let map = compiler_output + .contracts + .entry(contract_path.canonicalize()?) + .or_default(); + for (contract_name, contract_info) in contracts.into_iter() { + let source_code = contract_info + .evm + .and_then(|evm| evm.bytecode) + .map(|bytecode| match bytecode.object { + BytecodeObject::Bytecode(bytecode) => bytecode.to_string(), + BytecodeObject::Unlinked(unlinked) => unlinked, + }) + .context("Unexpected - contract compiled with solc has no source code")?; + let abi = contract_info + .abi + .context("Unexpected - contract compiled with solc as no ABI")?; + map.insert(contract_name, (source_code, abi)); + } + } + + Ok(compiler_output) } fn new(solc_path: PathBuf) -> Self { diff --git a/crates/compiler/tests/assets/array_one_element/callable.sol b/crates/compiler/tests/assets/array_one_element/callable.sol new file mode 100644 index 0000000..4c7bc55 --- /dev/null +++ b/crates/compiler/tests/assets/array_one_element/callable.sol @@ -0,0 +1,9 @@ +// SPDX-License-Identifier: MIT + +pragma solidity >=0.6.9; + +contract Callable { + function f(uint[1] memory p1) public pure returns(uint) { + return p1[0]; + } +} diff --git a/crates/compiler/tests/assets/array_one_element/main.sol b/crates/compiler/tests/assets/array_one_element/main.sol new file mode 100644 index 0000000..74789c9 --- /dev/null +++ b/crates/compiler/tests/assets/array_one_element/main.sol @@ -0,0 +1,13 @@ +// SPDX-License-Identifier: MIT + +// Report https://linear.app/matterlabs/issue/CPR-269/call-with-calldata-variable-bug + +pragma solidity >=0.6.9; + +import "./callable.sol"; + +contract Main { + function main(uint[1] calldata p1, Callable callable) public returns(uint) { + return callable.f(p1); + } +} diff --git a/crates/compiler/tests/assets/array_one_element/test.json b/crates/compiler/tests/assets/array_one_element/test.json new file mode 100644 index 0000000..718530b --- /dev/null +++ b/crates/compiler/tests/assets/array_one_element/test.json @@ -0,0 +1,21 @@ +{ "cases": [ { + "name": "first", + "inputs": [ + { + "instance": "Main", + "method": "main", + "calldata": [ + "1", + "Callable.address" + ] + } + ], + "expected": [ + "1" + ] +} ], + "contracts": { + "Main": "main.sol:Main", + "Callable": "callable.sol:Callable" + } +} \ No newline at end of file diff --git a/crates/compiler/tests/lib.rs b/crates/compiler/tests/lib.rs new file mode 100644 index 0000000..63e07e7 --- /dev/null +++ b/crates/compiler/tests/lib.rs @@ -0,0 +1,81 @@ +use std::path::PathBuf; + +use revive_dt_compiler::{Compiler, SolidityCompiler, revive_resolc::Resolc, solc::Solc}; +use revive_dt_config::Arguments; +use semver::Version; + +#[test] +fn contracts_can_be_compiled_with_solc() { + // Arrange + let args = Arguments::default(); + let compiler_path = Solc::get_compiler_executable(&args, Version::new(0, 8, 30)).unwrap(); + + // Act + let output = Compiler::::new() + .with_source("./tests/assets/array_one_element/callable.sol") + .unwrap() + .with_source("./tests/assets/array_one_element/main.sol") + .unwrap() + .try_build(compiler_path); + + // Assert + let output = output.expect("Failed to compile"); + assert_eq!(output.contracts.len(), 2); + + let main_file_contracts = output + .contracts + .get( + &PathBuf::from("./tests/assets/array_one_element/main.sol") + .canonicalize() + .unwrap(), + ) + .unwrap(); + let callable_file_contracts = output + .contracts + .get( + &PathBuf::from("./tests/assets/array_one_element/callable.sol") + .canonicalize() + .unwrap(), + ) + .unwrap(); + assert!(main_file_contracts.contains_key("Main")); + assert!(callable_file_contracts.contains_key("Callable")); +} + +#[test] +fn contracts_can_be_compiled_with_resolc() { + // Arrange + let args = Arguments::default(); + let compiler_path = Resolc::get_compiler_executable(&args, Version::new(0, 8, 30)).unwrap(); + + // Act + let output = Compiler::::new() + .with_source("./tests/assets/array_one_element/callable.sol") + .unwrap() + .with_source("./tests/assets/array_one_element/main.sol") + .unwrap() + .try_build(compiler_path); + + // Assert + let output = output.expect("Failed to compile"); + assert_eq!(output.contracts.len(), 2); + + let main_file_contracts = output + .contracts + .get( + &PathBuf::from("./tests/assets/array_one_element/main.sol") + .canonicalize() + .unwrap(), + ) + .unwrap(); + let callable_file_contracts = output + .contracts + .get( + &PathBuf::from("./tests/assets/array_one_element/callable.sol") + .canonicalize() + .unwrap(), + ) + .unwrap(); + assert!(main_file_contracts.contains_key("Main")); + assert!(callable_file_contracts.contains_key("Callable")); +} diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 4aad88c..5b6d8dc 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -28,7 +28,6 @@ indexmap = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } rayon = { workspace = true } -revive-solc-json-interface = { workspace = true } serde_json = { workspace = true } semver = { workspace = true } temp-dir = { workspace = true } diff --git a/crates/core/src/driver/mod.rs b/crates/core/src/driver/mod.rs index 2bfa330..cac0aa8 100644 --- a/crates/core/src/driver/mod.rs +++ b/crates/core/src/driver/mod.rs @@ -3,6 +3,7 @@ use std::collections::HashMap; use std::fmt::Debug; use std::marker::PhantomData; +use std::path::PathBuf; use std::str::FromStr; use alloy::json_abi::JsonAbi; @@ -22,7 +23,6 @@ use alloy::{ use anyhow::Context; use indexmap::IndexMap; use semver::Version; -use serde_json::Value; use revive_dt_common::iterators::FilesWithExtensionIterator; use revive_dt_compiler::{Compiler, SolidityCompiler}; @@ -34,7 +34,6 @@ use revive_dt_format::{input::Input, metadata::Metadata, mode::SolcMode}; use revive_dt_node::Node; use revive_dt_node_interaction::EthereumNode; use revive_dt_report::reporter::{CompilationTask, Report, Span}; -use revive_solc_json_interface::SolcStandardJsonOutput; use crate::Platform; @@ -48,11 +47,8 @@ pub struct State<'a, T: Platform> { /// The [`Span`] used in reporting. span: Span, - /// A vector of all of the compiled contracts. Each call to [`build_contracts`] adds a new entry - /// to this vector. - /// - /// [`build_contracts`]: State::build_contracts - contracts: Vec, + /// A map of all of the compiled contracts for the given metadata file. + compiled_contracts: HashMap>, /// This map stores the contracts deployments that have been made for each case within a /// metadata file. Note, this means that the state can't be reused between different metadata @@ -79,7 +75,7 @@ where Self { config, span, - contracts: Default::default(), + compiled_contracts: Default::default(), deployed_contracts: Default::default(), deployed_libraries: Default::default(), compiler_version: Default::default(), @@ -116,8 +112,8 @@ where tracing::info!(%compiler_version, "Resolved the compiler version to use"); let compiler = Compiler::::new() - .allow_path(metadata.directory()?) - .solc_optimizer(mode.solc_optimize()); + .with_allow_path(metadata.directory()?) + .with_optimization(mode.solc_optimize()); let mut compiler = metadata .files_to_compile()? .try_fold(compiler, |compiler, path| compiler.with_source(&path))?; @@ -151,21 +147,21 @@ where match compiler.try_build(compiler_path) { Ok(output) => { - task.json_output = Some(output.output.clone()); - task.error = output.error; - self.contracts.push(output.output); + task.json_output = Some(output.clone()); - if let Some(last_output) = self.contracts.last() { - if let Some(contracts) = &last_output.contracts { - for (file, contracts_map) in contracts { - for contract_name in contracts_map.keys() { - tracing::debug!( - "Compiled contract: {contract_name} from file: {file}" - ); - } - } - } else { - tracing::warn!("Compiled contracts field is None"); + for (contract_path, contracts) in output.contracts.into_iter() { + let map = self + .compiled_contracts + .entry(contract_path.clone()) + .or_default(); + for (contract_name, contract_info) in contracts.into_iter() { + tracing::debug!( + contract_path = %contract_path.display(), + contract_name = contract_name, + "Compiled contract" + ); + + map.insert(contract_name, contract_info); } } @@ -601,30 +597,24 @@ where ) }; - let compiled_contract = self.contracts.iter().rev().find_map(|output| { - output - .contracts - .as_ref()? - .get(&contract_source_path.display().to_string()) - .and_then(|source_file_contracts| { - source_file_contracts.get(contract_ident.as_ref()) - }) - }); - let Some(code) = compiled_contract - .and_then(|contract| contract.evm.as_ref().and_then(|evm| evm.bytecode.as_ref())) + let Some((code, abi)) = self + .compiled_contracts + .get(&contract_source_path) + .and_then(|source_file_contracts| source_file_contracts.get(contract_ident.as_ref())) + .cloned() else { tracing::error!( contract_source_path = contract_source_path.display().to_string(), contract_ident = contract_ident.as_ref(), - "Failed to find bytecode for contract" + "Failed to find information for contract" ); anyhow::bail!( - "Failed to find bytecode for contract {:?}", + "Failed to find information for contract {:?}", contract_instance ) }; - let mut code = match alloy::hex::decode(&code.object) { + let mut code = match alloy::hex::decode(&code) { Ok(code) => code, Err(error) => { tracing::error!( @@ -637,28 +627,6 @@ where } }; - let Some(Value::String(metadata)) = - compiled_contract.and_then(|contract| contract.metadata.as_ref()) - else { - tracing::error!("Contract does not have a metadata field"); - anyhow::bail!("Contract does not have a metadata field"); - }; - - let Ok(metadata) = serde_json::from_str::(metadata) else { - tracing::error!(%metadata, "Failed to parse solc metadata into a structured value"); - anyhow::bail!("Failed to parse solc metadata into a structured value {metadata}"); - }; - - let Some(abi) = metadata.get("output").and_then(|value| value.get("abi")) else { - tracing::error!(%metadata, "Failed to access the .output.abi field of the solc metadata"); - anyhow::bail!("Failed to access the .output.abi field of the solc metadata {metadata}"); - }; - - let Ok(abi) = serde_json::from_value::(abi.clone()) else { - tracing::error!(%metadata, "Failed to deserialize ABI into a structured format"); - anyhow::bail!("Failed to deserialize ABI into a structured format {metadata}"); - }; - if let Some(calldata) = calldata { let calldata = calldata.calldata(self.deployed_contracts(case_idx), node)?; code.extend(calldata); diff --git a/crates/report/Cargo.toml b/crates/report/Cargo.toml index 84c294e..1ffb7b8 100644 --- a/crates/report/Cargo.toml +++ b/crates/report/Cargo.toml @@ -10,9 +10,9 @@ rust-version.workspace = true [dependencies] revive-dt-config = { workspace = true } revive-dt-format = { workspace = true } +revive-dt-compiler = { workspace = true } anyhow = { workspace = true } tracing = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } -revive-solc-json-interface = { workspace = true } diff --git a/crates/report/src/analyzer.rs b/crates/report/src/analyzer.rs index 81d2409..52fd360 100644 --- a/crates/report/src/analyzer.rs +++ b/crates/report/src/analyzer.rs @@ -1,5 +1,6 @@ //! The report analyzer enriches the raw report data. +use revive_dt_compiler::CompilerOutput; use serde::{Deserialize, Serialize}; use crate::reporter::CompilationTask; @@ -13,41 +14,27 @@ pub struct CompilerStatistics { pub mean_code_size: usize, /// The mean size of the optimized YUL IR. pub mean_yul_size: usize, - /// Is a proxy because the YUL also containes a lot of comments. + /// Is a proxy because the YUL also contains a lot of comments. pub yul_to_bytecode_size_ratio: f32, } impl CompilerStatistics { /// Cumulatively update the statistics with the next compiler task. pub fn sample(&mut self, compilation_task: &CompilationTask) { - let Some(output) = &compilation_task.json_output else { - return; - }; - - let Some(contracts) = &output.contracts else { + let Some(CompilerOutput { contracts }) = &compilation_task.json_output else { return; }; for (_solidity, contracts) in contracts.iter() { - for (_name, contract) in contracts.iter() { - let Some(evm) = &contract.evm else { - continue; - }; - let Some(deploy_code) = &evm.deployed_bytecode else { - continue; - }; - + for (_name, (bytecode, _)) in contracts.iter() { // The EVM bytecode can be unlinked and thus is not necessarily a decodable hex // string; for our statistics this is a good enough approximation. - let bytecode_size = deploy_code.object.len() / 2; + let bytecode_size = bytecode.len() / 2; - let yul_size = contract - .ir_optimized - .as_ref() - .expect("if the contract has a deploy code it should also have the opimized IR") - .len(); + // TODO: for the time being we set the yul_size to be zero. We need to change this + // when we overhaul the reporting. - self.update_sizes(bytecode_size, yul_size); + self.update_sizes(bytecode_size, 0); } } } diff --git a/crates/report/src/reporter.rs b/crates/report/src/reporter.rs index 1a5c0a6..9b9303d 100644 --- a/crates/report/src/reporter.rs +++ b/crates/report/src/reporter.rs @@ -12,11 +12,11 @@ use std::{ }; use anyhow::Context; +use revive_dt_compiler::{CompilerInput, CompilerOutput}; use serde::{Deserialize, Serialize}; use revive_dt_config::{Arguments, TestingPlatform}; use revive_dt_format::{corpus::Corpus, mode::SolcMode}; -use revive_solc_json_interface::{SolcStandardJsonInput, SolcStandardJsonOutput}; use crate::analyzer::CompilerStatistics; @@ -44,9 +44,9 @@ pub struct Report { #[derive(Clone, Debug, Serialize, Deserialize)] pub struct CompilationTask { /// The observed compiler input. - pub json_input: SolcStandardJsonInput, + pub json_input: CompilerInput, /// The observed compiler output. - pub json_output: Option, + pub json_output: Option, /// The observed compiler mode. pub mode: SolcMode, /// The observed compiler version. @@ -152,15 +152,7 @@ impl Report { for (platform, results) in self.compiler_results.iter() { for result in results { // ignore if there were no errors - if result.compilation_task.error.is_none() - && result - .compilation_task - .json_output - .as_ref() - .and_then(|output| output.errors.as_ref()) - .map(|errors| errors.is_empty()) - .unwrap_or(true) - { + if result.compilation_task.error.is_none() { continue; }