diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0eeb93f..4b52009 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -99,9 +99,12 @@ jobs: - name: Install Geth on Ubuntu if: matrix.os == 'ubuntu-24.04' run: | + sudo add-apt-repository -y ppa:ethereum/ethereum sudo apt-get update sudo apt-get install -y protobuf-compiler + sudo apt-get install -y solc + # We were facing some issues in CI with the 1.16.* versions of geth, and specifically on # Ubuntu. Eventually, we found out that the last version of geth that worked in our CI was # version 1.15.11. Thus, this is the version that we want to use in CI. The PPA sadly does @@ -122,12 +125,22 @@ jobs: wget -qO- "$URL" | sudo tar xz -C /usr/local/bin --strip-components=1 geth --version + curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-x86_64-unknown-linux-musl -o resolc + chmod +x resolc + sudo mv resolc /usr/local/bin + - name: Install Geth on macOS if: matrix.os == 'macos-14' run: | brew tap ethereum/ethereum brew install ethereum protobuf + brew install solidity + + curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-universal-apple-darwin -o resolc + chmod +x resolc + sudo mv resolc /usr/local/bin + - name: Machete uses: bnjbvr/cargo-machete@v0.7.1 @@ -143,5 +156,8 @@ jobs: - name: Check eth-rpc version run: eth-rpc --version + - name: Check resolc version + run: resolc --version + - name: Test cargo workspace run: make test diff --git a/Cargo.lock b/Cargo.lock index 20507b9..160dc91 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -339,6 +339,7 @@ dependencies = [ "const-hex", "derive_more 2.0.1", "foldhash", + "getrandom 0.3.3", "hashbrown 0.15.3", "indexmap 2.10.0", "itoa", @@ -2227,6 +2228,66 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "foundry-compilers-artifacts" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2676d70082ed23680fe2d08c0b750d5f7f2438c6d946f1cb140a76c5e5e0392" +dependencies = [ + "foundry-compilers-artifacts-solc", + "foundry-compilers-artifacts-vyper", +] + +[[package]] +name = "foundry-compilers-artifacts-solc" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3ada94dc5946334bb08df574855ba345ab03ba8c6f233560c72c8d61fa9db80" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "foundry-compilers-core", + "path-slash", + "regex", + "semver 1.0.26", + "serde", + "serde_json", + "thiserror 2.0.12", + "tracing", + "yansi", +] + +[[package]] +name = "foundry-compilers-artifacts-vyper" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "372052af72652e375a6e7eed22179bd8935114e25e1c5a8cca7f00e8f20bd94c" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "foundry-compilers-artifacts-solc", + "foundry-compilers-core", + "path-slash", + "semver 1.0.26", + "serde", +] + +[[package]] +name = "foundry-compilers-core" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf0962c46855979300f6526ed57f987ccf6a025c2b92ce574b281d9cb2ef666b" +dependencies = [ + "alloy-primitives", + "cfg-if", + "dunce", + "path-slash", + "semver 1.0.26", + "serde", + "serde_json", + "thiserror 2.0.12", +] + [[package]] name = "fs-err" version = "2.11.0" @@ -3456,6 +3517,12 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "path-slash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42" + [[package]] name = "pbkdf2" version = "0.12.2" @@ -3955,6 +4022,7 @@ dependencies = [ "anyhow", "futures", "once_cell", + "semver 1.0.26", "tokio", "tracing", ] @@ -3963,13 +4031,17 @@ dependencies = [ name = "revive-dt-compiler" version = "0.1.0" dependencies = [ + "alloy", "alloy-primitives", "anyhow", + "foundry-compilers-artifacts", "revive-common", + "revive-dt-common", "revive-dt-config", "revive-dt-solc-binaries", "revive-solc-json-interface", "semver 1.0.26", + "serde", "serde_json", "tracing", ] @@ -4001,8 +4073,7 @@ dependencies = [ "revive-dt-node", "revive-dt-node-interaction", "revive-dt-report", - "revive-solc-json-interface", - "serde_json", + "semver 1.0.26", "temp-dir", "tracing", "tracing-subscriber", @@ -4055,9 +4126,9 @@ name = "revive-dt-report" version = "0.1.0" dependencies = [ "anyhow", + "revive-dt-compiler", "revive-dt-config", "revive-dt-format", - "revive-solc-json-interface", "serde", "serde_json", "tracing", @@ -4070,6 +4141,7 @@ dependencies = [ "anyhow", "hex", "reqwest", + "revive-dt-common", "semver 1.0.26", "serde", "sha2 0.10.9", @@ -4167,6 +4239,9 @@ name = "rustc-hash" version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +dependencies = [ + "rand 0.8.5", +] [[package]] name = "rustc-hex" @@ -6217,6 +6292,12 @@ dependencies = [ "tap", ] +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + [[package]] name = "yoke" version = "0.8.0" diff --git a/Cargo.toml b/Cargo.toml index d077231..9855d12 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,7 @@ alloy-primitives = "1.2.1" alloy-sol-types = "1.2.1" anyhow = "1.0" clap = { version = "4", features = ["derive"] } +foundry-compilers-artifacts = { version = "0.18.0" } futures = { version = "0.3.31" } hex = "0.4.3" reqwest = { version = "0.12.15", features = ["blocking", "json"] } diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index 50d3d5e..7dc81c4 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -11,6 +11,7 @@ rust-version.workspace = true [dependencies] anyhow = { workspace = true } futures = { workspace = true } +semver = { workspace = true } tracing = { workspace = true } once_cell = { workspace = true } tokio = { workspace = true } diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs index baee9f1..8b0ae35 100644 --- a/crates/common/src/lib.rs +++ b/crates/common/src/lib.rs @@ -4,3 +4,4 @@ pub mod concepts; pub mod iterators; pub mod macros; +pub mod types; diff --git a/crates/common/src/types/mod.rs b/crates/common/src/types/mod.rs new file mode 100644 index 0000000..4cd063a --- /dev/null +++ b/crates/common/src/types/mod.rs @@ -0,0 +1,3 @@ +mod version_or_requirement; + +pub use version_or_requirement::*; diff --git a/crates/common/src/types/version_or_requirement.rs b/crates/common/src/types/version_or_requirement.rs new file mode 100644 index 0000000..787a37d --- /dev/null +++ b/crates/common/src/types/version_or_requirement.rs @@ -0,0 +1,41 @@ +use semver::{Version, VersionReq}; + +#[derive(Clone, Debug)] +pub enum VersionOrRequirement { + Version(Version), + Requirement(VersionReq), +} + +impl From for VersionOrRequirement { + fn from(value: Version) -> Self { + Self::Version(value) + } +} + +impl From for VersionOrRequirement { + fn from(value: VersionReq) -> Self { + Self::Requirement(value) + } +} + +impl TryFrom for Version { + type Error = anyhow::Error; + + fn try_from(value: VersionOrRequirement) -> Result { + let VersionOrRequirement::Version(version) = value else { + anyhow::bail!("Version or requirement was not a version"); + }; + Ok(version) + } +} + +impl TryFrom for VersionReq { + type Error = anyhow::Error; + + fn try_from(value: VersionOrRequirement) -> Result { + let VersionOrRequirement::Requirement(requirement) = value else { + anyhow::bail!("Version or requirement was not a requirement"); + }; + Ok(requirement) + } +} diff --git a/crates/compiler/Cargo.toml b/crates/compiler/Cargo.toml index 05b02d4..295a147 100644 --- a/crates/compiler/Cargo.toml +++ b/crates/compiler/Cargo.toml @@ -10,12 +10,16 @@ rust-version.workspace = true [dependencies] revive-solc-json-interface = { workspace = true } +revive-dt-common = { workspace = true } revive-dt-config = { workspace = true } revive-dt-solc-binaries = { workspace = true } revive-common = { workspace = true } +alloy = { workspace = true } alloy-primitives = { workspace = true } anyhow = { workspace = true } +foundry-compilers-artifacts = { workspace = true } semver = { workspace = true } +serde = { workspace = true } serde_json = { workspace = true } tracing = { workspace = true } diff --git a/crates/compiler/src/lib.rs b/crates/compiler/src/lib.rs index 63e496e..364359c 100644 --- a/crates/compiler/src/lib.rs +++ b/crates/compiler/src/lib.rs @@ -4,21 +4,20 @@ //! - Polkadot revive Wasm compiler use std::{ + collections::HashMap, fs::read_to_string, hash::Hash, path::{Path, PathBuf}, }; +use alloy::json_abi::JsonAbi; use alloy_primitives::Address; -use revive_dt_config::Arguments; +use semver::Version; +use serde::{Deserialize, Serialize}; use revive_common::EVMVersion; -use revive_solc_json_interface::{ - SolcStandardJsonInput, SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings, - SolcStandardJsonInputSettingsOptimizer, SolcStandardJsonInputSettingsSelection, - SolcStandardJsonOutput, -}; -use semver::Version; +use revive_dt_common::types::VersionOrRequirement; +use revive_dt_config::Arguments; pub mod revive_js; pub mod revive_resolc; @@ -32,63 +31,44 @@ pub trait SolidityCompiler { /// The low-level compiler interface. fn build( &self, - input: CompilerInput, - ) -> anyhow::Result>; + input: CompilerInput, + additional_options: Self::Options, + ) -> anyhow::Result; fn new(solc_executable: PathBuf) -> Self; - fn get_compiler_executable(config: &Arguments, version: Version) -> anyhow::Result; + fn get_compiler_executable( + config: &Arguments, + version: impl Into, + ) -> anyhow::Result; + + fn version(&self) -> anyhow::Result; } /// The generic compilation input configuration. -#[derive(Debug)] -pub struct CompilerInput { - pub extra_options: T, - pub input: SolcStandardJsonInput, +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CompilerInput { + pub enable_optimization: Option, + pub via_ir: Option, + pub evm_version: Option, pub allow_paths: Vec, pub base_path: Option, + pub sources: HashMap, + pub libraries: HashMap>, } /// The generic compilation output configuration. -#[derive(Debug)] -pub struct CompilerOutput { - /// The solc standard JSON input. - pub input: CompilerInput, - /// The produced solc standard JSON output. - pub output: SolcStandardJsonOutput, - /// The error message in case the compiler returns abnormally. - pub error: Option, +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +pub struct CompilerOutput { + /// The compiled contracts. The bytecode of the contract is kept as a string incase linking is + /// required and the compiled source has placeholders. + pub contracts: HashMap>, } -impl PartialEq for CompilerInput -where - T: PartialEq + Eq + Hash, -{ - fn eq(&self, other: &Self) -> bool { - let self_input = serde_json::to_vec(&self.input).unwrap_or_default(); - let other_input = serde_json::to_vec(&self.input).unwrap_or_default(); - self.extra_options.eq(&other.extra_options) && self_input == other_input - } -} - -impl Eq for CompilerInput where T: PartialEq + Eq + Hash {} - -impl Hash for CompilerInput -where - T: PartialEq + Eq + Hash, -{ - fn hash(&self, state: &mut H) { - self.extra_options.hash(state); - state.write(&serde_json::to_vec(&self.input).unwrap_or_default()); - } -} - -/// A generic builder style interface for configuring all compiler options. +/// A generic builder style interface for configuring the supported compiler options. pub struct Compiler { - input: SolcStandardJsonInput, - extra_options: T::Options, - allow_paths: Vec, - base_path: Option, + input: CompilerInput, + additional_options: T::Options, } impl Default for Compiler { @@ -103,93 +83,75 @@ where { pub fn new() -> Self { Self { - input: SolcStandardJsonInput { - language: SolcStandardJsonInputLanguage::Solidity, + input: CompilerInput { + enable_optimization: Default::default(), + via_ir: Default::default(), + evm_version: Default::default(), + allow_paths: Default::default(), + base_path: Default::default(), sources: Default::default(), - settings: SolcStandardJsonInputSettings::new( - None, - Default::default(), - None, - SolcStandardJsonInputSettingsSelection::new_required(), - SolcStandardJsonInputSettingsOptimizer::new( - false, - None, - &Version::new(0, 0, 0), - false, - ), - None, - None, - ), + libraries: Default::default(), }, - extra_options: Default::default(), - allow_paths: Default::default(), - base_path: None, + additional_options: T::Options::default(), } } - pub fn solc_optimizer(mut self, enabled: bool) -> Self { - self.input.settings.optimizer.enabled = enabled; + pub fn with_optimization(mut self, value: impl Into>) -> Self { + self.input.enable_optimization = value.into(); self } - pub fn with_source(mut self, path: &Path) -> anyhow::Result { + pub fn with_via_ir(mut self, value: impl Into>) -> Self { + self.input.via_ir = value.into(); + self + } + + pub fn with_evm_version(mut self, version: impl Into>) -> Self { + self.input.evm_version = version.into(); + self + } + + pub fn with_allow_path(mut self, path: impl AsRef) -> Self { + self.input.allow_paths.push(path.as_ref().into()); + self + } + + pub fn with_base_path(mut self, path: impl Into>) -> Self { + self.input.base_path = path.into(); + self + } + + pub fn with_source(mut self, path: impl AsRef) -> anyhow::Result { self.input .sources - .insert(path.display().to_string(), read_to_string(path)?.into()); + .insert(path.as_ref().to_path_buf(), read_to_string(path.as_ref())?); Ok(self) } - pub fn evm_version(mut self, evm_version: EVMVersion) -> Self { - self.input.settings.evm_version = Some(evm_version); - self - } - - pub fn extra_options(mut self, extra_options: T::Options) -> Self { - self.extra_options = extra_options; - self - } - - pub fn allow_path(mut self, path: PathBuf) -> Self { - self.allow_paths.push(path); - self - } - - pub fn base_path(mut self, base_path: PathBuf) -> Self { - self.base_path = Some(base_path); - self - } - pub fn with_library( mut self, - scope: impl AsRef, - library_ident: impl AsRef, - library_address: Address, + path: impl AsRef, + name: impl AsRef, + address: Address, ) -> Self { self.input - .settings .libraries - .get_or_insert_with(Default::default) - .entry(scope.as_ref().display().to_string()) + .entry(path.as_ref().to_path_buf()) .or_default() - .insert( - library_ident.as_ref().to_owned(), - library_address.to_string(), - ); - + .insert(name.as_ref().into(), address); self } - pub fn try_build(self, solc_path: PathBuf) -> anyhow::Result> { - T::new(solc_path).build(CompilerInput { - extra_options: self.extra_options, - input: self.input, - allow_paths: self.allow_paths, - base_path: self.base_path, - }) + pub fn with_additional_options(mut self, options: impl Into) -> Self { + self.additional_options = options.into(); + self } - /// Returns the compiler JSON input. - pub fn input(&self) -> SolcStandardJsonInput { + pub fn try_build(self, compiler_path: impl AsRef) -> anyhow::Result { + T::new(compiler_path.as_ref().to_path_buf()).build(self.input, self.additional_options) + } + + pub fn input(&self) -> CompilerInput { self.input.clone() } } diff --git a/crates/compiler/src/revive_resolc.rs b/crates/compiler/src/revive_resolc.rs index dad9ee9..507539e 100644 --- a/crates/compiler/src/revive_resolc.rs +++ b/crates/compiler/src/revive_resolc.rs @@ -6,9 +6,19 @@ use std::{ process::{Command, Stdio}, }; -use crate::{CompilerInput, CompilerOutput, SolidityCompiler}; +use alloy::json_abi::JsonAbi; +use revive_dt_common::types::VersionOrRequirement; use revive_dt_config::Arguments; -use revive_solc_json_interface::SolcStandardJsonOutput; +use revive_solc_json_interface::{ + SolcStandardJsonInput, SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings, + SolcStandardJsonInputSettingsOptimizer, SolcStandardJsonInputSettingsSelection, + SolcStandardJsonOutput, +}; + +use crate::{CompilerInput, CompilerOutput, SolidityCompiler}; + +use anyhow::Context; +use semver::Version; // TODO: I believe that we need to also pass the solc compiler to resolc so that resolc uses the // specified solc compiler. I believe that currently we completely ignore the specified solc binary @@ -27,8 +37,56 @@ impl SolidityCompiler for Resolc { #[tracing::instrument(level = "debug", ret)] fn build( &self, - input: CompilerInput, - ) -> anyhow::Result> { + CompilerInput { + enable_optimization, + // Ignored and not honored since this is required for the resolc compilation. + via_ir: _via_ir, + evm_version, + allow_paths, + base_path, + sources, + libraries, + }: CompilerInput, + additional_options: Self::Options, + ) -> anyhow::Result { + let input = SolcStandardJsonInput { + language: SolcStandardJsonInputLanguage::Solidity, + sources: sources + .into_iter() + .map(|(path, source)| (path.display().to_string(), source.into())) + .collect(), + settings: SolcStandardJsonInputSettings { + evm_version, + libraries: Some( + libraries + .into_iter() + .map(|(source_code, libraries_map)| { + ( + source_code.display().to_string(), + libraries_map + .into_iter() + .map(|(library_ident, library_address)| { + (library_ident, library_address.to_string()) + }) + .collect(), + ) + }) + .collect(), + ), + remappings: None, + output_selection: Some(SolcStandardJsonInputSettingsSelection::new_required()), + via_ir: Some(true), + optimizer: SolcStandardJsonInputSettingsOptimizer::new( + enable_optimization.unwrap_or(false), + None, + &Version::new(0, 0, 0), + false, + ), + metadata: None, + polkavm: None, + }, + }; + let mut command = Command::new(&self.resolc_path); command .stdin(Stdio::piped()) @@ -36,13 +94,12 @@ impl SolidityCompiler for Resolc { .stderr(Stdio::piped()) .arg("--standard-json"); - if let Some(ref base_path) = input.base_path { + if let Some(ref base_path) = base_path { command.arg("--base-path").arg(base_path); } - if !input.allow_paths.is_empty() { + if !allow_paths.is_empty() { command.arg("--allow-paths").arg( - input - .allow_paths + allow_paths .iter() .map(|path| path.display().to_string()) .collect::>() @@ -52,93 +109,86 @@ impl SolidityCompiler for Resolc { let mut child = command.spawn()?; let stdin_pipe = child.stdin.as_mut().expect("stdin must be piped"); - serde_json::to_writer(stdin_pipe, &input.input)?; - - let json_in = serde_json::to_string_pretty(&input.input)?; + serde_json::to_writer(stdin_pipe, &input)?; let output = child.wait_with_output()?; let stdout = output.stdout; let stderr = output.stderr; if !output.status.success() { + let json_in = serde_json::to_string_pretty(&input)?; let message = String::from_utf8_lossy(&stderr); tracing::error!( - "resolc failed exit={} stderr={} JSON-in={} ", - output.status, - &message, - json_in, + status = %output.status, + message = %message, + json_input = json_in, + "Compilation using resolc failed" ); - return Ok(CompilerOutput { - input, - output: Default::default(), - error: Some(message.into()), - }); + anyhow::bail!("Compilation failed with an error: {message}"); } - let mut parsed = - serde_json::from_slice::(&stdout).map_err(|e| { - anyhow::anyhow!( - "failed to parse resolc JSON output: {e}\nstderr: {}", - String::from_utf8_lossy(&stderr) - ) - })?; - - // Detecting if the compiler output contained errors and reporting them through logs and - // errors instead of returning the compiler output that might contain errors. - for error in parsed.errors.iter().flatten() { - if error.severity == "error" { - tracing::error!(?error, ?input, "Encountered an error in the compilation"); - anyhow::bail!("Encountered an error in the compilation: {error}") - } - } - - // We need to do some post processing on the output to make it in the same format that solc - // outputs. More specifically, for each contract, the `.metadata` field should be replaced - // with the `.metadata.solc_metadata` field which contains the ABI and other information - // about the compiled contracts. We do this because we do not want any downstream logic to - // need to differentiate between which compiler is being used when extracting the ABI of the - // contracts. - if let Some(ref mut contracts) = parsed.contracts { - for (contract_path, contracts_map) in contracts.iter_mut() { - for (contract_name, contract_info) in contracts_map.iter_mut() { - let Some(metadata) = contract_info.metadata.take() else { - continue; - }; - - // Get the `solc_metadata` in the metadata of the contract. - let Some(solc_metadata) = metadata - .get("solc_metadata") - .and_then(|metadata| metadata.as_str()) - else { - tracing::error!( - contract_path, - contract_name, - metadata = serde_json::to_string(&metadata).unwrap(), - "Encountered a contract compiled with resolc that has no solc_metadata" - ); - anyhow::bail!( - "Contract {} compiled with resolc that has no solc_metadata", - contract_name - ); - }; - - // Replace the original metadata with the new solc_metadata. - contract_info.metadata = - Some(serde_json::Value::String(solc_metadata.to_string())); - } - } - } + let parsed = serde_json::from_slice::(&stdout).map_err(|e| { + anyhow::anyhow!( + "failed to parse resolc JSON output: {e}\nstderr: {}", + String::from_utf8_lossy(&stderr) + ) + })?; tracing::debug!( output = %serde_json::to_string(&parsed).unwrap(), "Compiled successfully" ); - Ok(CompilerOutput { - input, - output: parsed, - error: None, - }) + // Detecting if the compiler output contained errors and reporting them through logs and + // errors instead of returning the compiler output that might contain errors. + for error in parsed.errors.iter().flatten() { + if error.severity == "error" { + tracing::error!( + ?error, + ?input, + output = %serde_json::to_string(&parsed).unwrap(), + "Encountered an error in the compilation" + ); + anyhow::bail!("Encountered an error in the compilation: {error}") + } + } + + let Some(contracts) = parsed.contracts else { + anyhow::bail!("Unexpected error - resolc output doesn't have a contracts section"); + }; + + let mut compiler_output = CompilerOutput::default(); + for (source_path, contracts) in contracts.into_iter() { + let source_path = PathBuf::from(source_path).canonicalize()?; + + let map = compiler_output.contracts.entry(source_path).or_default(); + for (contract_name, contract_information) in contracts.into_iter() { + let bytecode = contract_information + .evm + .and_then(|evm| evm.bytecode.clone()) + .context("Unexpected - Contract compiled with resolc has no bytecode")?; + let abi = contract_information + .metadata + .as_ref() + .and_then(|metadata| metadata.as_object()) + .and_then(|metadata| metadata.get("solc_metadata")) + .and_then(|solc_metadata| solc_metadata.as_str()) + .and_then(|metadata| serde_json::from_str::(metadata).ok()) + .and_then(|metadata| { + metadata.get("output").and_then(|output| { + output + .get("abi") + .and_then(|abi| serde_json::from_value::(abi.clone()).ok()) + }) + }) + .context( + "Unexpected - Failed to get the ABI for a contract compiled with resolc", + )?; + map.insert(contract_name, (bytecode.object, abi)); + } + } + + Ok(compiler_output) } fn new(resolc_path: PathBuf) -> Self { @@ -147,7 +197,7 @@ impl SolidityCompiler for Resolc { fn get_compiler_executable( config: &Arguments, - _version: semver::Version, + _version: impl Into, ) -> anyhow::Result { if !config.resolc.as_os_str().is_empty() { return Ok(config.resolc.clone()); @@ -155,4 +205,45 @@ impl SolidityCompiler for Resolc { Ok(PathBuf::from("resolc")) } + + fn version(&self) -> anyhow::Result { + // Logic for parsing the resolc version from the following string: + // Solidity frontend for the revive compiler version 0.3.0+commit.b238913.llvm-18.1.8 + + let output = Command::new(self.resolc_path.as_path()) + .arg("--version") + .stdout(Stdio::piped()) + .spawn()? + .wait_with_output()? + .stdout; + let output = String::from_utf8_lossy(&output); + let version_string = output + .split("version ") + .nth(1) + .context("Version parsing failed")? + .split("+") + .next() + .context("Version parsing failed")?; + + Version::parse(version_string).map_err(Into::into) + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn compiler_version_can_be_obtained() { + // Arrange + let args = Arguments::default(); + let path = Resolc::get_compiler_executable(&args, Version::new(0, 7, 6)).unwrap(); + let compiler = Resolc::new(path); + + // Act + let version = compiler.version(); + + // Assert + let _ = version.expect("Failed to get version"); + } } diff --git a/crates/compiler/src/solc.rs b/crates/compiler/src/solc.rs index 8184b83..20f5e6d 100644 --- a/crates/compiler/src/solc.rs +++ b/crates/compiler/src/solc.rs @@ -6,10 +6,21 @@ use std::{ process::{Command, Stdio}, }; -use crate::{CompilerInput, CompilerOutput, SolidityCompiler}; +use revive_dt_common::types::VersionOrRequirement; use revive_dt_config::Arguments; use revive_dt_solc_binaries::download_solc; -use revive_solc_json_interface::SolcStandardJsonOutput; + +use crate::{CompilerInput, CompilerOutput, SolidityCompiler}; + +use anyhow::Context; +use foundry_compilers_artifacts::{ + output_selection::{ + BytecodeOutputSelection, ContractOutputSelection, EvmOutputSelection, OutputSelection, + }, + solc::CompilerOutput as SolcOutput, + solc::*, +}; +use semver::Version; #[derive(Debug)] pub struct Solc { @@ -22,8 +33,63 @@ impl SolidityCompiler for Solc { #[tracing::instrument(level = "debug", ret)] fn build( &self, - input: CompilerInput, - ) -> anyhow::Result> { + CompilerInput { + enable_optimization, + via_ir, + evm_version, + allow_paths, + base_path, + sources, + libraries, + }: CompilerInput, + _: Self::Options, + ) -> anyhow::Result { + let input = SolcInput { + language: SolcLanguage::Solidity, + sources: Sources( + sources + .into_iter() + .map(|(source_path, source_code)| (source_path, Source::new(source_code))) + .collect(), + ), + settings: Settings { + optimizer: Optimizer { + enabled: enable_optimization, + details: Some(Default::default()), + ..Default::default() + }, + output_selection: OutputSelection::common_output_selection( + [ + ContractOutputSelection::Abi, + ContractOutputSelection::Evm(EvmOutputSelection::ByteCode( + BytecodeOutputSelection::Object, + )), + ] + .into_iter() + .map(|item| item.to_string()), + ), + evm_version: evm_version.map(|version| version.to_string().parse().unwrap()), + via_ir, + libraries: Libraries { + libs: libraries + .into_iter() + .map(|(file_path, libraries)| { + ( + file_path, + libraries + .into_iter() + .map(|(library_name, library_address)| { + (library_name, library_address.to_string()) + }) + .collect(), + ) + }) + .collect(), + }, + ..Default::default() + }, + }; + let mut command = Command::new(&self.solc_path); command .stdin(Stdio::piped()) @@ -31,13 +97,12 @@ impl SolidityCompiler for Solc { .stderr(Stdio::piped()) .arg("--standard-json"); - if let Some(ref base_path) = input.base_path { + if let Some(ref base_path) = base_path { command.arg("--base-path").arg(base_path); } - if !input.allow_paths.is_empty() { + if !allow_paths.is_empty() { command.arg("--allow-paths").arg( - input - .allow_paths + allow_paths .iter() .map(|path| path.display().to_string()) .collect::>() @@ -47,31 +112,32 @@ impl SolidityCompiler for Solc { let mut child = command.spawn()?; let stdin = child.stdin.as_mut().expect("should be piped"); - serde_json::to_writer(stdin, &input.input)?; + serde_json::to_writer(stdin, &input)?; let output = child.wait_with_output()?; if !output.status.success() { + let json_in = serde_json::to_string_pretty(&input)?; let message = String::from_utf8_lossy(&output.stderr); - tracing::error!("solc failed exit={} stderr={}", output.status, &message); - return Ok(CompilerOutput { - input, - output: Default::default(), - error: Some(message.into()), - }); + tracing::error!( + status = %output.status, + message = %message, + json_input = json_in, + "Compilation using solc failed" + ); + anyhow::bail!("Compilation failed with an error: {message}"); } - let parsed = - serde_json::from_slice::(&output.stdout).map_err(|e| { - anyhow::anyhow!( - "failed to parse resolc JSON output: {e}\nstderr: {}", - String::from_utf8_lossy(&output.stdout) - ) - })?; + let parsed = serde_json::from_slice::(&output.stdout).map_err(|e| { + anyhow::anyhow!( + "failed to parse resolc JSON output: {e}\nstderr: {}", + String::from_utf8_lossy(&output.stdout) + ) + })?; // Detecting if the compiler output contained errors and reporting them through logs and // errors instead of returning the compiler output that might contain errors. - for error in parsed.errors.iter().flatten() { - if error.severity == "error" { + for error in parsed.errors.iter() { + if error.severity == Severity::Error { tracing::error!(?error, ?input, "Encountered an error in the compilation"); anyhow::bail!("Encountered an error in the compilation: {error}") } @@ -82,11 +148,29 @@ impl SolidityCompiler for Solc { "Compiled successfully" ); - Ok(CompilerOutput { - input, - output: parsed, - error: None, - }) + let mut compiler_output = CompilerOutput::default(); + for (contract_path, contracts) in parsed.contracts { + let map = compiler_output + .contracts + .entry(contract_path.canonicalize()?) + .or_default(); + for (contract_name, contract_info) in contracts.into_iter() { + let source_code = contract_info + .evm + .and_then(|evm| evm.bytecode) + .map(|bytecode| match bytecode.object { + BytecodeObject::Bytecode(bytecode) => bytecode.to_string(), + BytecodeObject::Unlinked(unlinked) => unlinked, + }) + .context("Unexpected - contract compiled with solc has no source code")?; + let abi = contract_info + .abi + .context("Unexpected - contract compiled with solc as no ABI")?; + map.insert(contract_name, (source_code, abi)); + } + } + + Ok(compiler_output) } fn new(solc_path: PathBuf) -> Self { @@ -95,9 +179,57 @@ impl SolidityCompiler for Solc { fn get_compiler_executable( config: &Arguments, - version: semver::Version, + version: impl Into, ) -> anyhow::Result { let path = download_solc(config.directory(), version, config.wasm)?; Ok(path) } + + fn version(&self) -> anyhow::Result { + // The following is the parsing code for the version from the solc version strings which + // look like the following: + // ``` + // solc, the solidity compiler commandline interface + // Version: 0.8.30+commit.73712a01.Darwin.appleclang + // ``` + + let child = Command::new(self.solc_path.as_path()) + .arg("--version") + .stdout(Stdio::piped()) + .spawn()?; + let output = child.wait_with_output()?; + let output = String::from_utf8_lossy(&output.stdout); + let version_line = output + .split("Version: ") + .nth(1) + .context("Version parsing failed")?; + let version_string = version_line + .split("+") + .next() + .context("Version parsing failed")?; + + Version::parse(version_string).map_err(Into::into) + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn compiler_version_can_be_obtained() { + // Arrange + let args = Arguments::default(); + let path = Solc::get_compiler_executable(&args, Version::new(0, 7, 6)).unwrap(); + let compiler = Solc::new(path); + + // Act + let version = compiler.version(); + + // Assert + assert_eq!( + version.expect("Failed to get version"), + Version::new(0, 7, 6) + ) + } } diff --git a/crates/compiler/tests/assets/array_one_element/callable.sol b/crates/compiler/tests/assets/array_one_element/callable.sol new file mode 100644 index 0000000..4c7bc55 --- /dev/null +++ b/crates/compiler/tests/assets/array_one_element/callable.sol @@ -0,0 +1,9 @@ +// SPDX-License-Identifier: MIT + +pragma solidity >=0.6.9; + +contract Callable { + function f(uint[1] memory p1) public pure returns(uint) { + return p1[0]; + } +} diff --git a/crates/compiler/tests/assets/array_one_element/main.sol b/crates/compiler/tests/assets/array_one_element/main.sol new file mode 100644 index 0000000..74789c9 --- /dev/null +++ b/crates/compiler/tests/assets/array_one_element/main.sol @@ -0,0 +1,13 @@ +// SPDX-License-Identifier: MIT + +// Report https://linear.app/matterlabs/issue/CPR-269/call-with-calldata-variable-bug + +pragma solidity >=0.6.9; + +import "./callable.sol"; + +contract Main { + function main(uint[1] calldata p1, Callable callable) public returns(uint) { + return callable.f(p1); + } +} diff --git a/crates/compiler/tests/assets/array_one_element/test.json b/crates/compiler/tests/assets/array_one_element/test.json new file mode 100644 index 0000000..718530b --- /dev/null +++ b/crates/compiler/tests/assets/array_one_element/test.json @@ -0,0 +1,21 @@ +{ "cases": [ { + "name": "first", + "inputs": [ + { + "instance": "Main", + "method": "main", + "calldata": [ + "1", + "Callable.address" + ] + } + ], + "expected": [ + "1" + ] +} ], + "contracts": { + "Main": "main.sol:Main", + "Callable": "callable.sol:Callable" + } +} \ No newline at end of file diff --git a/crates/compiler/tests/lib.rs b/crates/compiler/tests/lib.rs new file mode 100644 index 0000000..63e07e7 --- /dev/null +++ b/crates/compiler/tests/lib.rs @@ -0,0 +1,81 @@ +use std::path::PathBuf; + +use revive_dt_compiler::{Compiler, SolidityCompiler, revive_resolc::Resolc, solc::Solc}; +use revive_dt_config::Arguments; +use semver::Version; + +#[test] +fn contracts_can_be_compiled_with_solc() { + // Arrange + let args = Arguments::default(); + let compiler_path = Solc::get_compiler_executable(&args, Version::new(0, 8, 30)).unwrap(); + + // Act + let output = Compiler::::new() + .with_source("./tests/assets/array_one_element/callable.sol") + .unwrap() + .with_source("./tests/assets/array_one_element/main.sol") + .unwrap() + .try_build(compiler_path); + + // Assert + let output = output.expect("Failed to compile"); + assert_eq!(output.contracts.len(), 2); + + let main_file_contracts = output + .contracts + .get( + &PathBuf::from("./tests/assets/array_one_element/main.sol") + .canonicalize() + .unwrap(), + ) + .unwrap(); + let callable_file_contracts = output + .contracts + .get( + &PathBuf::from("./tests/assets/array_one_element/callable.sol") + .canonicalize() + .unwrap(), + ) + .unwrap(); + assert!(main_file_contracts.contains_key("Main")); + assert!(callable_file_contracts.contains_key("Callable")); +} + +#[test] +fn contracts_can_be_compiled_with_resolc() { + // Arrange + let args = Arguments::default(); + let compiler_path = Resolc::get_compiler_executable(&args, Version::new(0, 8, 30)).unwrap(); + + // Act + let output = Compiler::::new() + .with_source("./tests/assets/array_one_element/callable.sol") + .unwrap() + .with_source("./tests/assets/array_one_element/main.sol") + .unwrap() + .try_build(compiler_path); + + // Assert + let output = output.expect("Failed to compile"); + assert_eq!(output.contracts.len(), 2); + + let main_file_contracts = output + .contracts + .get( + &PathBuf::from("./tests/assets/array_one_element/main.sol") + .canonicalize() + .unwrap(), + ) + .unwrap(); + let callable_file_contracts = output + .contracts + .get( + &PathBuf::from("./tests/assets/array_one_element/callable.sol") + .canonicalize() + .unwrap(), + ) + .unwrap(); + assert!(main_file_contracts.contains_key("Main")); + assert!(callable_file_contracts.contains_key("Callable")); +} diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index 6b60f73..e275904 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -3,6 +3,7 @@ use std::{ fmt::Display, path::{Path, PathBuf}, + sync::LazyLock, }; use alloy::{network::EthereumWallet, signers::local::PrivateKeySigner}; @@ -144,7 +145,14 @@ impl Arguments { impl Default for Arguments { fn default() -> Self { - Arguments::parse_from(["retester"]) + static TEMP_DIR: LazyLock = LazyLock::new(|| TempDir::new().unwrap()); + + let default = Arguments::parse_from(["retester"]); + + Arguments { + temp_dir: Some(&TEMP_DIR), + ..default + } } } diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index 7c6c551..a1da05d 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -28,6 +28,5 @@ indexmap = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } rayon = { workspace = true } -revive-solc-json-interface = { workspace = true } -serde_json = { workspace = true } +semver = { workspace = true } temp-dir = { workspace = true } diff --git a/crates/core/src/driver/mod.rs b/crates/core/src/driver/mod.rs index 11c16ae..cac0aa8 100644 --- a/crates/core/src/driver/mod.rs +++ b/crates/core/src/driver/mod.rs @@ -3,6 +3,7 @@ use std::collections::HashMap; use std::fmt::Debug; use std::marker::PhantomData; +use std::path::PathBuf; use std::str::FromStr; use alloy::json_abi::JsonAbi; @@ -21,7 +22,7 @@ use alloy::{ }; use anyhow::Context; use indexmap::IndexMap; -use serde_json::Value; +use semver::Version; use revive_dt_common::iterators::FilesWithExtensionIterator; use revive_dt_compiler::{Compiler, SolidityCompiler}; @@ -33,7 +34,6 @@ use revive_dt_format::{input::Input, metadata::Metadata, mode::SolcMode}; use revive_dt_node::Node; use revive_dt_node_interaction::EthereumNode; use revive_dt_report::reporter::{CompilationTask, Report, Span}; -use revive_solc_json_interface::SolcStandardJsonOutput; use crate::Platform; @@ -47,11 +47,8 @@ pub struct State<'a, T: Platform> { /// The [`Span`] used in reporting. span: Span, - /// A vector of all of the compiled contracts. Each call to [`build_contracts`] adds a new entry - /// to this vector. - /// - /// [`build_contracts`]: State::build_contracts - contracts: Vec, + /// A map of all of the compiled contracts for the given metadata file. + compiled_contracts: HashMap>, /// This map stores the contracts deployments that have been made for each case within a /// metadata file. Note, this means that the state can't be reused between different metadata @@ -64,6 +61,9 @@ pub struct State<'a, T: Platform> { /// the libraries with each case. deployed_libraries: HashMap, + /// Stores the version of the compiler used for the given Solc mode. + compiler_version: HashMap<&'a SolcMode, Version>, + phantom: PhantomData, } @@ -75,9 +75,10 @@ where Self { config, span, - contracts: Default::default(), + compiled_contracts: Default::default(), deployed_contracts: Default::default(), deployed_libraries: Default::default(), + compiler_version: Default::default(), phantom: Default::default(), } } @@ -87,7 +88,11 @@ where self.span } - pub fn build_contracts(&mut self, mode: &SolcMode, metadata: &Metadata) -> anyhow::Result<()> { + pub fn build_contracts( + &mut self, + mode: &'a SolcMode, + metadata: &Metadata, + ) -> anyhow::Result<()> { let mut span = self.span(); span.next_metadata( metadata @@ -97,34 +102,21 @@ where .clone(), ); - let Some(version) = mode.last_patch_version(&self.config.solc) else { - anyhow::bail!("unsupported solc version: {:?}", &mode.solc_version); - }; + let compiler_version_or_requirement = + mode.compiler_version_to_use(self.config.solc.clone()); + let compiler_path = + T::Compiler::get_compiler_executable(self.config, compiler_version_or_requirement)?; + let compiler_version = T::Compiler::new(compiler_path.clone()).version()?; + self.compiler_version.insert(mode, compiler_version.clone()); - // Note: if the metadata is contained within a solidity file then this is the only file that - // we wish to compile since this is a self-contained test. Otherwise, if it's a JSON file - // then we need to compile all of the contracts that are in the directory since imports are - // allowed in there. - let Some(ref metadata_file_path) = metadata.file_path else { - anyhow::bail!("The metadata file path is not defined"); - }; - let mut files_to_compile = if metadata_file_path - .extension() - .is_some_and(|extension| extension.eq_ignore_ascii_case("sol")) - { - Box::new(std::iter::once(metadata_file_path.clone())) as Box> - } else { - Box::new( - FilesWithExtensionIterator::new(metadata.directory()?) - .with_allowed_extension("sol"), - ) - }; + tracing::info!(%compiler_version, "Resolved the compiler version to use"); let compiler = Compiler::::new() - .allow_path(metadata.directory()?) - .solc_optimizer(mode.solc_optimize()); - let mut compiler = - files_to_compile.try_fold(compiler, |compiler, path| compiler.with_source(&path))?; + .with_allow_path(metadata.directory()?) + .with_optimization(mode.solc_optimize()); + let mut compiler = metadata + .files_to_compile()? + .try_fold(compiler, |compiler, path| compiler.with_source(&path))?; for (library_instance, (library_address, _)) in self.deployed_libraries.iter() { let library_ident = &metadata .contracts @@ -149,28 +141,27 @@ where json_input: compiler.input(), json_output: None, mode: mode.clone(), - compiler_version: format!("{}", &version), + compiler_version: format!("{}", &compiler_version), error: None, }; - let compiler_path = T::Compiler::get_compiler_executable(self.config, version)?; match compiler.try_build(compiler_path) { Ok(output) => { - task.json_output = Some(output.output.clone()); - task.error = output.error; - self.contracts.push(output.output); + task.json_output = Some(output.clone()); - if let Some(last_output) = self.contracts.last() { - if let Some(contracts) = &last_output.contracts { - for (file, contracts_map) in contracts { - for contract_name in contracts_map.keys() { - tracing::debug!( - "Compiled contract: {contract_name} from file: {file}" - ); - } - } - } else { - tracing::warn!("Compiled contracts field is None"); + for (contract_path, contracts) in output.contracts.into_iter() { + let map = self + .compiled_contracts + .entry(contract_path.clone()) + .or_default(); + for (contract_name, contract_info) in contracts.into_iter() { + tracing::debug!( + contract_path = %contract_path.display(), + contract_name = contract_name, + "Compiled contract" + ); + + map.insert(contract_name, contract_info); } } @@ -188,7 +179,7 @@ where pub fn build_and_publish_libraries( &mut self, metadata: &Metadata, - mode: &SolcMode, + mode: &'a SolcMode, node: &T::Blockchain, ) -> anyhow::Result<()> { self.build_contracts(mode, metadata)?; @@ -405,10 +396,11 @@ where mode: &SolcMode, ) -> anyhow::Result<()> { if let Some(ref version_requirement) = expectation.compiler_version { - let Some(compiler_version) = mode.last_patch_version(&self.config.solc) else { - anyhow::bail!("unsupported solc version: {:?}", &mode.solc_version); - }; - if !version_requirement.matches(&compiler_version) { + let compiler_version = self + .compiler_version + .get(mode) + .context("Failed to find the compiler version fo the solc mode")?; + if !version_requirement.matches(compiler_version) { return Ok(()); } } @@ -605,30 +597,24 @@ where ) }; - let compiled_contract = self.contracts.iter().rev().find_map(|output| { - output - .contracts - .as_ref()? - .get(&contract_source_path.display().to_string()) - .and_then(|source_file_contracts| { - source_file_contracts.get(contract_ident.as_ref()) - }) - }); - let Some(code) = compiled_contract - .and_then(|contract| contract.evm.as_ref().and_then(|evm| evm.bytecode.as_ref())) + let Some((code, abi)) = self + .compiled_contracts + .get(&contract_source_path) + .and_then(|source_file_contracts| source_file_contracts.get(contract_ident.as_ref())) + .cloned() else { tracing::error!( contract_source_path = contract_source_path.display().to_string(), contract_ident = contract_ident.as_ref(), - "Failed to find bytecode for contract" + "Failed to find information for contract" ); anyhow::bail!( - "Failed to find bytecode for contract {:?}", + "Failed to find information for contract {:?}", contract_instance ) }; - let mut code = match alloy::hex::decode(&code.object) { + let mut code = match alloy::hex::decode(&code) { Ok(code) => code, Err(error) => { tracing::error!( @@ -641,28 +627,6 @@ where } }; - let Some(Value::String(metadata)) = - compiled_contract.and_then(|contract| contract.metadata.as_ref()) - else { - tracing::error!("Contract does not have a metadata field"); - anyhow::bail!("Contract does not have a metadata field"); - }; - - let Ok(metadata) = serde_json::from_str::(metadata) else { - tracing::error!(%metadata, "Failed to parse solc metadata into a structured value"); - anyhow::bail!("Failed to parse solc metadata into a structured value {metadata}"); - }; - - let Some(abi) = metadata.get("output").and_then(|value| value.get("abi")) else { - tracing::error!(%metadata, "Failed to access the .output.abi field of the solc metadata"); - anyhow::bail!("Failed to access the .output.abi field of the solc metadata {metadata}"); - }; - - let Ok(abi) = serde_json::from_value::(abi.clone()) else { - tracing::error!(%metadata, "Failed to deserialize ABI into a structured format"); - anyhow::bail!("Failed to deserialize ABI into a structured format {metadata}"); - }; - if let Some(calldata) = calldata { let calldata = calldata.calldata(self.deployed_contracts(case_idx), node)?; code.extend(calldata); diff --git a/crates/format/src/metadata.rs b/crates/format/src/metadata.rs index fa61dc6..b72376b 100644 --- a/crates/format/src/metadata.rs +++ b/crates/format/src/metadata.rs @@ -9,7 +9,7 @@ use std::{ use serde::{Deserialize, Serialize}; -use revive_dt_common::macros::define_wrapper_type; +use revive_dt_common::{iterators::FilesWithExtensionIterator, macros::define_wrapper_type}; use crate::{ case::Case, @@ -212,6 +212,29 @@ impl Metadata { } } } + + /// Returns an iterator over all of the solidity files that needs to be compiled for this + /// [`Metadata`] object + /// + /// Note: if the metadata is contained within a solidity file then this is the only file that + /// we wish to compile since this is a self-contained test. Otherwise, if it's a JSON file + /// then we need to compile all of the contracts that are in the directory since imports are + /// allowed in there. + pub fn files_to_compile(&self) -> anyhow::Result>> { + let Some(ref metadata_file_path) = self.file_path else { + anyhow::bail!("The metadata file path is not defined"); + }; + if metadata_file_path + .extension() + .is_some_and(|extension| extension.eq_ignore_ascii_case("sol")) + { + Ok(Box::new(std::iter::once(metadata_file_path.clone()))) + } else { + Ok(Box::new( + FilesWithExtensionIterator::new(self.directory()?).with_allowed_extension("sol"), + )) + } + } } define_wrapper_type!( diff --git a/crates/format/src/mode.rs b/crates/format/src/mode.rs index a99a5e8..69b55b2 100644 --- a/crates/format/src/mode.rs +++ b/crates/format/src/mode.rs @@ -1,3 +1,4 @@ +use revive_dt_common::types::VersionOrRequirement; use semver::Version; use serde::de::Deserializer; use serde::{Deserialize, Serialize}; @@ -78,6 +79,15 @@ impl SolcMode { None } + + /// Resolves the [`SolcMode`]'s solidity version requirement into a [`VersionOrRequirement`] if + /// the requirement is present on the object. Otherwise, the passed default version is used. + pub fn compiler_version_to_use(&self, default: Version) -> VersionOrRequirement { + match self.solc_version { + Some(ref requirement) => requirement.clone().into(), + None => default.into(), + } + } } impl<'de> Deserialize<'de> for Mode { diff --git a/crates/report/Cargo.toml b/crates/report/Cargo.toml index 84c294e..1ffb7b8 100644 --- a/crates/report/Cargo.toml +++ b/crates/report/Cargo.toml @@ -10,9 +10,9 @@ rust-version.workspace = true [dependencies] revive-dt-config = { workspace = true } revive-dt-format = { workspace = true } +revive-dt-compiler = { workspace = true } anyhow = { workspace = true } tracing = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } -revive-solc-json-interface = { workspace = true } diff --git a/crates/report/src/analyzer.rs b/crates/report/src/analyzer.rs index 81d2409..52fd360 100644 --- a/crates/report/src/analyzer.rs +++ b/crates/report/src/analyzer.rs @@ -1,5 +1,6 @@ //! The report analyzer enriches the raw report data. +use revive_dt_compiler::CompilerOutput; use serde::{Deserialize, Serialize}; use crate::reporter::CompilationTask; @@ -13,41 +14,27 @@ pub struct CompilerStatistics { pub mean_code_size: usize, /// The mean size of the optimized YUL IR. pub mean_yul_size: usize, - /// Is a proxy because the YUL also containes a lot of comments. + /// Is a proxy because the YUL also contains a lot of comments. pub yul_to_bytecode_size_ratio: f32, } impl CompilerStatistics { /// Cumulatively update the statistics with the next compiler task. pub fn sample(&mut self, compilation_task: &CompilationTask) { - let Some(output) = &compilation_task.json_output else { - return; - }; - - let Some(contracts) = &output.contracts else { + let Some(CompilerOutput { contracts }) = &compilation_task.json_output else { return; }; for (_solidity, contracts) in contracts.iter() { - for (_name, contract) in contracts.iter() { - let Some(evm) = &contract.evm else { - continue; - }; - let Some(deploy_code) = &evm.deployed_bytecode else { - continue; - }; - + for (_name, (bytecode, _)) in contracts.iter() { // The EVM bytecode can be unlinked and thus is not necessarily a decodable hex // string; for our statistics this is a good enough approximation. - let bytecode_size = deploy_code.object.len() / 2; + let bytecode_size = bytecode.len() / 2; - let yul_size = contract - .ir_optimized - .as_ref() - .expect("if the contract has a deploy code it should also have the opimized IR") - .len(); + // TODO: for the time being we set the yul_size to be zero. We need to change this + // when we overhaul the reporting. - self.update_sizes(bytecode_size, yul_size); + self.update_sizes(bytecode_size, 0); } } } diff --git a/crates/report/src/reporter.rs b/crates/report/src/reporter.rs index 1a5c0a6..9b9303d 100644 --- a/crates/report/src/reporter.rs +++ b/crates/report/src/reporter.rs @@ -12,11 +12,11 @@ use std::{ }; use anyhow::Context; +use revive_dt_compiler::{CompilerInput, CompilerOutput}; use serde::{Deserialize, Serialize}; use revive_dt_config::{Arguments, TestingPlatform}; use revive_dt_format::{corpus::Corpus, mode::SolcMode}; -use revive_solc_json_interface::{SolcStandardJsonInput, SolcStandardJsonOutput}; use crate::analyzer::CompilerStatistics; @@ -44,9 +44,9 @@ pub struct Report { #[derive(Clone, Debug, Serialize, Deserialize)] pub struct CompilationTask { /// The observed compiler input. - pub json_input: SolcStandardJsonInput, + pub json_input: CompilerInput, /// The observed compiler output. - pub json_output: Option, + pub json_output: Option, /// The observed compiler mode. pub mode: SolcMode, /// The observed compiler version. @@ -152,15 +152,7 @@ impl Report { for (platform, results) in self.compiler_results.iter() { for result in results { // ignore if there were no errors - if result.compilation_task.error.is_none() - && result - .compilation_task - .json_output - .as_ref() - .and_then(|output| output.errors.as_ref()) - .map(|errors| errors.is_empty()) - .unwrap_or(true) - { + if result.compilation_task.error.is_none() { continue; } diff --git a/crates/solc-binaries/Cargo.toml b/crates/solc-binaries/Cargo.toml index 22d29b0..9bb6090 100644 --- a/crates/solc-binaries/Cargo.toml +++ b/crates/solc-binaries/Cargo.toml @@ -9,6 +9,8 @@ repository.workspace = true rust-version.workspace = true [dependencies] +revive-dt-common = { workspace = true } + anyhow = { workspace = true } hex = { workspace = true } tracing = { workspace = true } diff --git a/crates/solc-binaries/src/download.rs b/crates/solc-binaries/src/download.rs index c893e3a..067102c 100644 --- a/crates/solc-binaries/src/download.rs +++ b/crates/solc-binaries/src/download.rs @@ -5,6 +5,8 @@ use std::{ sync::{LazyLock, Mutex}, }; +use revive_dt_common::types::VersionOrRequirement; + use semver::Version; use sha2::{Digest, Sha256}; @@ -52,27 +54,50 @@ impl GHDownloader { pub const WINDOWS_NAME: &str = "solc-windows.exe"; pub const WASM_NAME: &str = "soljson.js"; - fn new(version: Version, target: &'static str, list: &'static str) -> Self { - Self { - version, - target, - list, + fn new( + version: impl Into, + target: &'static str, + list: &'static str, + ) -> anyhow::Result { + let version_or_requirement = version.into(); + match version_or_requirement { + VersionOrRequirement::Version(version) => Ok(Self { + version, + target, + list, + }), + VersionOrRequirement::Requirement(requirement) => { + let Some(version) = List::download(list)? + .builds + .into_iter() + .map(|build| build.version) + .filter(|version| requirement.matches(version)) + .max() + else { + anyhow::bail!("Failed to find a version that satisfies {requirement:?}"); + }; + Ok(Self { + version, + target, + list, + }) + } } } - pub fn linux(version: Version) -> Self { + pub fn linux(version: impl Into) -> anyhow::Result { Self::new(version, Self::LINUX_NAME, List::LINUX_URL) } - pub fn macosx(version: Version) -> Self { + pub fn macosx(version: impl Into) -> anyhow::Result { Self::new(version, Self::MACOSX_NAME, List::MACOSX_URL) } - pub fn windows(version: Version) -> Self { + pub fn windows(version: impl Into) -> anyhow::Result { Self::new(version, Self::WINDOWS_NAME, List::WINDOWS_URL) } - pub fn wasm(version: Version) -> Self { + pub fn wasm(version: impl Into) -> anyhow::Result { Self::new(version, Self::WASM_NAME, List::WASM_URL) } @@ -111,24 +136,24 @@ mod tests { #[test] fn try_get_windows() { let version = List::download(List::WINDOWS_URL).unwrap().latest_release; - GHDownloader::windows(version).download().unwrap(); + GHDownloader::windows(version).unwrap().download().unwrap(); } #[test] fn try_get_macosx() { let version = List::download(List::MACOSX_URL).unwrap().latest_release; - GHDownloader::macosx(version).download().unwrap(); + GHDownloader::macosx(version).unwrap().download().unwrap(); } #[test] fn try_get_linux() { let version = List::download(List::LINUX_URL).unwrap().latest_release; - GHDownloader::linux(version).download().unwrap(); + GHDownloader::linux(version).unwrap().download().unwrap(); } #[test] fn try_get_wasm() { let version = List::download(List::WASM_URL).unwrap().latest_release; - GHDownloader::wasm(version).download().unwrap(); + GHDownloader::wasm(version).unwrap().download().unwrap(); } } diff --git a/crates/solc-binaries/src/lib.rs b/crates/solc-binaries/src/lib.rs index aabc86e..5fefbd8 100644 --- a/crates/solc-binaries/src/lib.rs +++ b/crates/solc-binaries/src/lib.rs @@ -7,7 +7,8 @@ use std::path::{Path, PathBuf}; use cache::get_or_download; use download::GHDownloader; -use semver::Version; + +use revive_dt_common::types::VersionOrRequirement; pub mod cache; pub mod download; @@ -20,7 +21,7 @@ pub mod list; /// and not download it again. pub fn download_solc( cache_directory: &Path, - version: Version, + version: impl Into, wasm: bool, ) -> anyhow::Result { let downloader = if wasm { @@ -33,7 +34,7 @@ pub fn download_solc( GHDownloader::windows(version) } else { unimplemented!() - }; + }?; get_or_download(cache_directory, &downloader) }