resolc crate (#328)

- Factor the YUL crate out of `revive-solidity`.
- `revive-solidity` is in reality not a Solidity implementation but the
revive solidity compiler driver (`resolc`). By renaming we not only get
this straight but also a binary with the same name as the crate which
should be less confusing.

---------

Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
This commit is contained in:
xermicus
2025-05-27 09:48:43 +02:00
committed by GitHub
parent 090e3ac13c
commit bd4e108bb0
99 changed files with 599 additions and 624 deletions
+155
View File
@@ -0,0 +1,155 @@
//! The Solidity contract build.
use std::collections::HashSet;
use std::fs::File;
use std::io::Write;
use std::path::Path;
use revive_solc_json_interface::CombinedJsonContract;
use revive_solc_json_interface::SolcStandardJsonOutputContract;
use serde::Deserialize;
use serde::Serialize;
/// The Solidity contract build.
#[derive(Debug, Serialize, Deserialize)]
pub struct Contract {
/// The contract path.
pub path: String,
/// The auxiliary identifier. Used to identify Yul objects.
pub identifier: String,
/// The LLVM module build.
pub build: revive_llvm_context::PolkaVMBuild,
/// The metadata JSON.
pub metadata_json: serde_json::Value,
/// The factory dependencies.
pub factory_dependencies: HashSet<String>,
}
impl Contract {
/// A shortcut constructor.
pub fn new(
path: String,
identifier: String,
build: revive_llvm_context::PolkaVMBuild,
metadata_json: serde_json::Value,
factory_dependencies: HashSet<String>,
) -> Self {
Self {
path,
identifier,
build,
metadata_json,
factory_dependencies,
}
}
/// Writes the contract text assembly and bytecode to files.
pub fn write_to_directory(
self,
path: &Path,
output_assembly: bool,
output_binary: bool,
overwrite: bool,
) -> anyhow::Result<()> {
let file_name = Self::short_path(self.path.as_str());
if output_assembly {
let file_name = format!(
"{}.{}",
file_name,
revive_common::EXTENSION_POLKAVM_ASSEMBLY
);
let mut file_path = path.to_owned();
file_path.push(file_name);
if file_path.exists() && !overwrite {
anyhow::bail!(
"Refusing to overwrite an existing file {file_path:?} (use --overwrite to force)."
);
} else {
let assembly_text = self.build.assembly_text;
File::create(&file_path)
.map_err(|error| {
anyhow::anyhow!("File {:?} creating error: {}", file_path, error)
})?
.write_all(assembly_text.as_bytes())
.map_err(|error| {
anyhow::anyhow!("File {:?} writing error: {}", file_path, error)
})?;
}
}
if output_binary {
let file_name = format!("{}.{}", file_name, revive_common::EXTENSION_POLKAVM_BINARY);
let mut file_path = path.to_owned();
file_path.push(file_name);
if file_path.exists() && !overwrite {
anyhow::bail!(
"Refusing to overwrite an existing file {file_path:?} (use --overwrite to force)."
);
} else {
File::create(&file_path)
.map_err(|error| {
anyhow::anyhow!("File {:?} creating error: {}", file_path, error)
})?
.write_all(self.build.bytecode.as_slice())
.map_err(|error| {
anyhow::anyhow!("File {:?} writing error: {}", file_path, error)
})?;
}
}
Ok(())
}
/// Writes the contract text assembly and bytecode to the combined JSON.
pub fn write_to_combined_json(
self,
combined_json_contract: &mut CombinedJsonContract,
) -> anyhow::Result<()> {
if let Some(metadata) = combined_json_contract.metadata.as_mut() {
*metadata = self.metadata_json.to_string();
}
if let Some(asm) = combined_json_contract.asm.as_mut() {
*asm = serde_json::Value::String(self.build.assembly_text);
}
let hexadecimal_bytecode = hex::encode(self.build.bytecode);
combined_json_contract.bin = Some(hexadecimal_bytecode);
combined_json_contract
.bin_runtime
.clone_from(&combined_json_contract.bin);
combined_json_contract.factory_deps = Some(self.build.factory_dependencies);
Ok(())
}
/// Writes the contract text assembly and bytecode to the standard JSON.
pub fn write_to_standard_json(
self,
standard_json_contract: &mut SolcStandardJsonOutputContract,
) -> anyhow::Result<()> {
standard_json_contract.metadata = Some(self.metadata_json);
let assembly_text = self.build.assembly_text;
let bytecode = hex::encode(self.build.bytecode.as_slice());
if let Some(evm) = standard_json_contract.evm.as_mut() {
evm.modify(assembly_text, bytecode);
}
standard_json_contract.factory_dependencies = Some(self.build.factory_dependencies);
standard_json_contract.hash = Some(self.build.bytecode_hash);
Ok(())
}
/// Converts the full path to a short one.
pub fn short_path(path: &str) -> &str {
path.rfind('/')
.map(|last_slash| &path[last_slash + 1..])
.unwrap_or_else(|| path)
}
}
+94
View File
@@ -0,0 +1,94 @@
//! The Solidity project build.
pub mod contract;
use std::collections::BTreeMap;
use std::path::Path;
use revive_solc_json_interface::combined_json::CombinedJson;
use revive_solc_json_interface::SolcStandardJsonOutput;
use crate::solc::version::Version as SolcVersion;
use crate::ResolcVersion;
use self::contract::Contract;
/// The Solidity project build.
#[derive(Debug, Default)]
pub struct Build {
/// The contract data,
pub contracts: BTreeMap<String, Contract>,
}
impl Build {
/// Writes all contracts to the specified directory.
pub fn write_to_directory(
self,
output_directory: &Path,
output_assembly: bool,
output_binary: bool,
overwrite: bool,
) -> anyhow::Result<()> {
for (_path, contract) in self.contracts.into_iter() {
contract.write_to_directory(
output_directory,
output_assembly,
output_binary,
overwrite,
)?;
}
Ok(())
}
/// Writes all contracts assembly and bytecode to the combined JSON.
pub fn write_to_combined_json(self, combined_json: &mut CombinedJson) -> anyhow::Result<()> {
for (path, contract) in self.contracts.into_iter() {
let combined_json_contract = combined_json
.contracts
.iter_mut()
.find_map(|(json_path, contract)| {
if path.ends_with(json_path) {
Some(contract)
} else {
None
}
})
.ok_or_else(|| anyhow::anyhow!("Contract `{}` not found in the project", path))?;
contract.write_to_combined_json(combined_json_contract)?;
}
combined_json.revive_version = Some(ResolcVersion::default().long);
Ok(())
}
/// Writes all contracts assembly and bytecode to the standard JSON.
pub fn write_to_standard_json(
mut self,
standard_json: &mut SolcStandardJsonOutput,
solc_version: &SolcVersion,
) -> anyhow::Result<()> {
let contracts = match standard_json.contracts.as_mut() {
Some(contracts) => contracts,
None => return Ok(()),
};
for (path, contracts) in contracts.iter_mut() {
for (name, contract) in contracts.iter_mut() {
let full_name = format!("{path}:{name}");
if let Some(contract_data) = self.contracts.remove(full_name.as_str()) {
contract_data.write_to_standard_json(contract)?;
}
}
}
standard_json.version = Some(solc_version.default.to_string());
standard_json.long_version = Some(solc_version.long.to_owned());
standard_json.revive_version = Some(ResolcVersion::default().long);
Ok(())
}
}
+16
View File
@@ -0,0 +1,16 @@
//! Solidity to PolkaVM compiler constants.
/// The default executable name.
pub static DEFAULT_EXECUTABLE_NAME: &str = "resolc";
/// The `keccak256` scratch space offset.
pub const OFFSET_SCRATCH_SPACE: usize = 0;
/// The memory pointer offset.
pub const OFFSET_MEMORY_POINTER: usize = 2 * revive_common::BYTE_LENGTH_WORD;
/// The empty slot offset.
pub const OFFSET_EMPTY_SLOT: usize = 3 * revive_common::BYTE_LENGTH_WORD;
/// The non-reserved memory offset.
pub const OFFSET_NON_RESERVED: usize = 4 * revive_common::BYTE_LENGTH_WORD;
+334
View File
@@ -0,0 +1,334 @@
//! Solidity to PolkaVM compiler library.
pub(crate) mod build;
pub(crate) mod r#const;
pub(crate) mod missing_libraries;
pub(crate) mod process;
pub(crate) mod project;
pub(crate) mod solc;
pub(crate) mod version;
pub use self::build::contract::Contract as ContractBuild;
pub use self::build::Build;
pub use self::missing_libraries::MissingLibraries;
pub use self::process::input::Input as ProcessInput;
#[cfg(not(target_os = "emscripten"))]
pub use self::process::native_process::NativeProcess;
pub use self::process::output::Output as ProcessOutput;
#[cfg(target_os = "emscripten")]
pub use self::process::worker_process::WorkerProcess;
pub use self::process::Process;
pub use self::project::contract::Contract as ProjectContract;
pub use self::project::Project;
pub use self::r#const::*;
#[cfg(not(target_os = "emscripten"))]
pub use self::solc::solc_compiler::SolcCompiler;
#[cfg(target_os = "emscripten")]
pub use self::solc::soljson_compiler::SoljsonCompiler;
pub use self::solc::version::Version as SolcVersion;
pub use self::solc::Compiler;
pub use self::solc::FIRST_SUPPORTED_VERSION as SolcFirstSupportedVersion;
pub use self::solc::LAST_SUPPORTED_VERSION as SolcLastSupportedVersion;
pub use self::version::Version as ResolcVersion;
#[cfg(not(target_os = "emscripten"))]
pub mod test_utils;
pub mod tests;
use std::collections::BTreeSet;
use std::io::Write;
use std::path::PathBuf;
use revive_solc_json_interface::standard_json::input::settings::metadata_hash::MetadataHash;
use revive_solc_json_interface::ResolcWarning;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonInputLanguage;
use revive_solc_json_interface::SolcStandardJsonInputSettingsOptimizer;
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVM;
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory;
use revive_solc_json_interface::SolcStandardJsonInputSettingsSelection;
/// Runs the Yul mode.
pub fn yul<T: Compiler>(
input_files: &[PathBuf],
solc: &mut T,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: &[String],
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<Build> {
let path = match input_files.len() {
1 => input_files.first().expect("Always exists"),
0 => anyhow::bail!("The input file is missing"),
length => anyhow::bail!(
"Only one input file is allowed in the Yul mode, but found {}",
length,
),
};
if solc.version()?.default != solc::LAST_SUPPORTED_VERSION {
anyhow::bail!(
"The Yul mode is only supported with the most recent version of the Solidity compiler: {}",
solc::LAST_SUPPORTED_VERSION,
);
}
let solc_validator = Some(&*solc);
let project = Project::try_from_yul_path(path, solc_validator)?;
let build = project.compile(
optimizer_settings,
include_metadata_hash,
debug_config,
llvm_arguments,
memory_config,
)?;
Ok(build)
}
/// Runs the LLVM IR mode.
pub fn llvm_ir(
input_files: &[PathBuf],
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: &[String],
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<Build> {
let path = match input_files.len() {
1 => input_files.first().expect("Always exists"),
0 => anyhow::bail!("The input file is missing"),
length => anyhow::bail!(
"Only one input file is allowed in the LLVM IR mode, but found {}",
length,
),
};
let project = Project::try_from_llvm_ir_path(path)?;
let build = project.compile(
optimizer_settings,
include_metadata_hash,
debug_config,
llvm_arguments,
memory_config,
)?;
Ok(build)
}
/// Runs the standard output mode.
#[allow(clippy::too_many_arguments)]
pub fn standard_output<T: Compiler>(
input_files: &[PathBuf],
libraries: Vec<String>,
solc: &mut T,
evm_version: Option<revive_common::EVMVersion>,
solc_optimizer_enabled: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
remappings: Option<BTreeSet<String>>,
suppressed_warnings: Option<Vec<ResolcWarning>>,
debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: &[String],
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<Build> {
let solc_version = solc.version()?;
let solc_input = SolcStandardJsonInput::try_from_paths(
SolcStandardJsonInputLanguage::Solidity,
evm_version,
input_files,
libraries,
remappings,
SolcStandardJsonInputSettingsSelection::new_required(),
SolcStandardJsonInputSettingsOptimizer::new(
solc_optimizer_enabled,
None,
&solc_version.default,
optimizer_settings.is_fallback_to_size_enabled(),
),
None,
suppressed_warnings,
Some(SolcStandardJsonInputSettingsPolkaVM::new(
Some(memory_config),
debug_config.emit_debug_info,
)),
)?;
let source_code_files = solc_input
.sources
.iter()
.map(|(path, source)| (path.to_owned(), source.content.to_owned()))
.collect();
let libraries = solc_input.settings.libraries.clone().unwrap_or_default();
let solc_output = solc.standard_json(solc_input, base_path, include_paths, allow_paths)?;
if let Some(errors) = solc_output.errors.as_deref() {
let mut has_errors = false;
for error in errors.iter() {
if error.severity.as_str() == "error" {
has_errors = true;
}
writeln!(std::io::stderr(), "{error}")?;
}
if has_errors {
anyhow::bail!("Error(s) found. Compilation aborted");
}
}
let project = Project::try_from_standard_json_output(
&solc_output,
source_code_files,
libraries,
&solc_version,
&debug_config,
)?;
let build = project.compile(
optimizer_settings,
include_metadata_hash,
debug_config,
llvm_arguments,
memory_config,
)?;
Ok(build)
}
/// Runs the standard JSON mode.
#[allow(clippy::too_many_arguments)]
pub fn standard_json<T: Compiler>(
solc: &mut T,
detect_missing_libraries: bool,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
mut debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: &[String],
) -> anyhow::Result<()> {
let solc_version = solc.version()?;
let solc_input = SolcStandardJsonInput::try_from_stdin()?;
let source_code_files = solc_input
.sources
.iter()
.map(|(path, source)| (path.to_owned(), source.content.to_owned()))
.collect();
let optimizer_settings =
revive_llvm_context::OptimizerSettings::try_from(&solc_input.settings.optimizer)?;
let polkavm_settings = solc_input.settings.polkavm.unwrap_or_default();
debug_config.emit_debug_info = polkavm_settings.debug_information;
let include_metadata_hash = match solc_input.settings.metadata {
Some(ref metadata) => metadata.bytecode_hash != Some(MetadataHash::None),
None => true,
};
let libraries = solc_input.settings.libraries.clone().unwrap_or_default();
let mut solc_output = solc.standard_json(solc_input, base_path, include_paths, allow_paths)?;
if let Some(errors) = solc_output.errors.as_deref() {
for error in errors.iter() {
if error.severity.as_str() == "error" {
serde_json::to_writer(std::io::stdout(), &solc_output)?;
std::process::exit(0);
}
}
}
let project = Project::try_from_standard_json_output(
&solc_output,
source_code_files,
libraries,
&solc_version,
&debug_config,
)?;
if detect_missing_libraries {
let missing_libraries = project.get_missing_libraries();
missing_libraries.write_to_standard_json(&mut solc_output, &solc_version)?;
} else {
let build = project.compile(
optimizer_settings,
include_metadata_hash,
debug_config,
llvm_arguments,
polkavm_settings.memory_config,
)?;
build.write_to_standard_json(&mut solc_output, &solc_version)?;
}
serde_json::to_writer(std::io::stdout(), &solc_output)?;
std::process::exit(0);
}
/// Runs the combined JSON mode.
#[allow(clippy::too_many_arguments)]
pub fn combined_json<T: Compiler>(
format: String,
input_files: &[PathBuf],
libraries: Vec<String>,
solc: &mut T,
evm_version: Option<revive_common::EVMVersion>,
solc_optimizer_enabled: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
remappings: Option<BTreeSet<String>>,
suppressed_warnings: Option<Vec<ResolcWarning>>,
debug_config: revive_llvm_context::DebugConfig,
output_directory: Option<PathBuf>,
overwrite: bool,
llvm_arguments: &[String],
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<()> {
let build = standard_output(
input_files,
libraries,
solc,
evm_version,
solc_optimizer_enabled,
optimizer_settings,
include_metadata_hash,
base_path,
include_paths,
allow_paths,
remappings,
suppressed_warnings,
debug_config,
llvm_arguments,
memory_config,
)?;
let mut combined_json = solc.combined_json(input_files, format.as_str())?;
build.write_to_combined_json(&mut combined_json)?;
match output_directory {
Some(output_directory) => {
std::fs::create_dir_all(output_directory.as_path())?;
combined_json.write_to_directory(output_directory.as_path(), overwrite)?;
}
None => {
writeln!(
std::io::stdout(),
"{}",
serde_json::to_string(&combined_json).expect("Always valid")
)?;
}
}
std::process::exit(0);
}
+51
View File
@@ -0,0 +1,51 @@
//! The missing Solidity libraries.
use std::collections::BTreeMap;
use std::collections::HashSet;
use revive_solc_json_interface::SolcStandardJsonOutput;
use crate::solc::version::Version as SolcVersion;
use crate::ResolcVersion;
/// The missing Solidity libraries.
pub struct MissingLibraries {
/// The missing libraries.
pub contract_libraries: BTreeMap<String, HashSet<String>>,
}
impl MissingLibraries {
/// A shortcut constructor.
pub fn new(contract_libraries: BTreeMap<String, HashSet<String>>) -> Self {
Self { contract_libraries }
}
/// Writes the missing libraries to the standard JSON.
pub fn write_to_standard_json(
mut self,
standard_json: &mut SolcStandardJsonOutput,
solc_version: &SolcVersion,
) -> anyhow::Result<()> {
let contracts = match standard_json.contracts.as_mut() {
Some(contracts) => contracts,
None => return Ok(()),
};
for (path, contracts) in contracts.iter_mut() {
for (name, contract) in contracts.iter_mut() {
let full_name = format!("{path}:{name}");
let missing_libraries = self.contract_libraries.remove(full_name.as_str());
if let Some(missing_libraries) = missing_libraries {
contract.missing_libraries = Some(missing_libraries);
}
}
}
standard_json.version = Some(solc_version.default.to_string());
standard_json.long_version = Some(solc_version.long.to_owned());
standard_json.revive_version = Some(ResolcVersion::default().long);
Ok(())
}
}
+51
View File
@@ -0,0 +1,51 @@
//! Process for compiling a single compilation unit.
//! The input data.
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory;
use serde::Deserialize;
use serde::Serialize;
use crate::project::contract::Contract;
use crate::project::Project;
/// The input data.
#[derive(Debug, Serialize, Deserialize)]
pub struct Input {
/// The contract representation.
pub contract: Contract,
/// The project representation.
pub project: Project,
/// Whether to append the metadata hash.
pub include_metadata_hash: bool,
/// The optimizer settings.
pub optimizer_settings: revive_llvm_context::OptimizerSettings,
/// The debug output config.
pub debug_config: revive_llvm_context::DebugConfig,
/// The extra LLVM arguments give used for manual control.
pub llvm_arguments: Vec<String>,
/// The PVM memory configuration.
pub memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
}
impl Input {
/// A shortcut constructor.
pub fn new(
contract: Contract,
project: Project,
include_metadata_hash: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: Vec<String>,
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> Self {
Self {
contract,
project,
include_metadata_hash,
optimizer_settings,
debug_config,
llvm_arguments,
memory_config,
}
}
}
+77
View File
@@ -0,0 +1,77 @@
//! Process for compiling a single compilation unit.
pub mod input;
#[cfg(not(target_os = "emscripten"))]
pub mod native_process;
pub mod output;
#[cfg(target_os = "emscripten")]
pub mod worker_process;
use std::io::{Read, Write};
use self::input::Input;
use self::output::Output;
pub trait Process {
/// Read input from `stdin`, compile a contract, and write the output to `stdout`.
fn run(input_file: Option<&mut std::fs::File>) -> anyhow::Result<()> {
let mut stdin = std::io::stdin();
let mut stdout = std::io::stdout();
let mut stderr = std::io::stderr();
let mut buffer = Vec::with_capacity(16384);
match input_file {
Some(ins) => {
if let Err(error) = ins.read_to_end(&mut buffer) {
anyhow::bail!("Failed to read recursive process input file: {:?}", error);
}
}
None => {
if let Err(error) = stdin.read_to_end(&mut buffer) {
anyhow::bail!(
"Failed to read recursive process input from stdin: {:?}",
error
)
}
}
}
let input: Input = revive_common::deserialize_from_slice(buffer.as_slice())?;
revive_llvm_context::initialize_llvm(
revive_llvm_context::Target::PVM,
crate::DEFAULT_EXECUTABLE_NAME,
&input.llvm_arguments,
);
let result = input.contract.compile(
input.project,
input.optimizer_settings,
input.include_metadata_hash,
input.debug_config,
&input.llvm_arguments,
input.memory_config,
);
match result {
Ok(build) => {
let output = Output::new(build);
let json = serde_json::to_vec(&output).expect("Always valid");
stdout
.write_all(json.as_slice())
.expect("Stdout writing error");
Ok(())
}
Err(error) => {
let message = error.to_string();
stderr
.write_all(message.as_bytes())
.expect("Stderr writing error");
Err(error)
}
}
}
/// Runs this process recursively to compile a single contract.
fn call(input: Input) -> anyhow::Result<Output>;
}
@@ -0,0 +1,76 @@
//! Process for compiling a single compilation unit.
use std::io::Write;
use std::path::PathBuf;
use std::process::Command;
use once_cell::sync::OnceCell;
use super::Input;
use super::Output;
use super::Process;
/// The overriden executable name used when the compiler is run as a library.
pub static EXECUTABLE: OnceCell<PathBuf> = OnceCell::new();
pub struct NativeProcess;
impl Process for NativeProcess {
fn call(input: Input) -> anyhow::Result<Output> {
let input_json = serde_json::to_vec(&input).expect("Always valid");
let executable = match EXECUTABLE.get() {
Some(executable) => executable.to_owned(),
None => std::env::current_exe()?,
};
let mut command = Command::new(executable.as_path());
command.stdin(std::process::Stdio::piped());
command.stdout(std::process::Stdio::piped());
command.stderr(std::process::Stdio::piped());
command.arg("--recursive-process");
let process = command.spawn().map_err(|error| {
anyhow::anyhow!("{:?} subprocess spawning error: {:?}", executable, error)
})?;
#[cfg(debug_assertions)]
input
.debug_config
.dump_stage_output(&input.contract.path, Some("stage"), &input_json)
.map_err(|error| {
anyhow::anyhow!(
"{:?} failed to log the recursive process output: {:?}",
executable,
error,
)
})?;
process
.stdin
.as_ref()
.ok_or_else(|| anyhow::anyhow!("{:?} stdin getting error", executable))?
.write_all(input_json.as_slice())
.map_err(|error| {
anyhow::anyhow!("{:?} stdin writing error: {:?}", executable, error)
})?;
let output = process.wait_with_output().map_err(|error| {
anyhow::anyhow!("{:?} subprocess output error: {:?}", executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{}",
String::from_utf8_lossy(output.stderr.as_slice()).to_string(),
);
}
let output: Output = revive_common::deserialize_from_slice(output.stdout.as_slice())
.map_err(|error| {
anyhow::anyhow!(
"{:?} subprocess output parsing error: {}",
executable,
error,
)
})?;
Ok(output)
}
}
+21
View File
@@ -0,0 +1,21 @@
//! Process for compiling a single compilation unit.
//! The output data.
use serde::Deserialize;
use serde::Serialize;
use crate::build::contract::Contract as ContractBuild;
/// The output data.
#[derive(Debug, Serialize, Deserialize)]
pub struct Output {
/// The contract build.
pub build: ContractBuild,
}
impl Output {
/// A shortcut constructor.
pub fn new(build: ContractBuild) -> Self {
Self { build }
}
}
@@ -0,0 +1,69 @@
//! Process for compiling a single compilation unit using Web Workers.
use std::ffi::{c_char, c_void, CStr, CString};
use super::Input;
use super::Output;
use super::Process;
use anyhow::Context;
use serde::Deserialize;
#[derive(Deserialize)]
struct Error {
message: String,
}
#[derive(Deserialize)]
struct Success {
data: String,
}
#[derive(Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
enum Response {
Success(Success),
Error(Error),
}
pub struct WorkerProcess;
impl Process for WorkerProcess {
fn call(input: Input) -> anyhow::Result<Output> {
let input_json = serde_json::to_vec(&input).expect("Always valid");
let input_str = String::from_utf8(input_json).expect("Input shall be valid");
// Prepare the input string for the Emscripten function
let input_cstring = CString::new(input_str).expect("CString allocation failed");
// Call the Emscripten function
let output_ptr =
unsafe { resolc_compile(input_cstring.as_ptr(), input_cstring.as_bytes().len()) };
// Convert the output pointer back to a Rust string
let output_str = unsafe {
CStr::from_ptr(output_ptr)
.to_str()
.with_context(|| "Failed to convert C string to Rust string")
.map(str::to_owned)
};
unsafe { libc::free(output_ptr as *mut c_void) };
let output_str = output_str?;
let response: Response = serde_json::from_str(&output_str)
.map_err(|error| anyhow::anyhow!("Worker output parsing error: {}", error,))?;
match response {
Response::Success(out) => {
let output: Output = revive_common::deserialize_from_slice(out.data.as_bytes())
.map_err(|error| {
anyhow::anyhow!("resolc.js subprocess output parsing error: {}", error,)
})?;
Ok(output)
}
Response::Error(err) => anyhow::bail!("Worker error: {}", err.message,),
}
}
}
extern "C" {
fn resolc_compile(input_ptr: *const c_char, input_len: usize) -> *const c_char;
}
@@ -0,0 +1,21 @@
//! The contract LLVM IR source code.
use serde::Deserialize;
use serde::Serialize;
/// The contract LLVM IR source code.
#[derive(Debug, Serialize, Deserialize, Clone)]
#[allow(clippy::upper_case_acronyms)]
pub struct LLVMIR {
/// The LLVM IR file path.
pub path: String,
/// The LLVM IR source code.
pub source: String,
}
impl LLVMIR {
/// A shortcut constructor.
pub fn new(path: String, source: String) -> Self {
Self { path, source }
}
}
@@ -0,0 +1,66 @@
//! The contract source code.
pub mod llvm_ir;
pub mod yul;
use std::collections::HashSet;
use serde::Deserialize;
use serde::Serialize;
use revive_yul::parser::statement::object::Object;
use self::llvm_ir::LLVMIR;
use self::yul::Yul;
/// The contract source code.
#[derive(Debug, Serialize, Deserialize, Clone)]
#[allow(clippy::upper_case_acronyms)]
pub enum IR {
/// The Yul source code.
Yul(Yul),
/// The LLVM IR source code.
LLVMIR(LLVMIR),
}
impl IR {
/// A shortcut constructor.
pub fn new_yul(source_code: String, object: Object) -> Self {
Self::Yul(Yul::new(source_code, object))
}
/// A shortcut constructor.
pub fn new_llvm_ir(path: String, source: String) -> Self {
Self::LLVMIR(LLVMIR::new(path, source))
}
/// Get the list of missing deployable libraries.
pub fn get_missing_libraries(&self) -> HashSet<String> {
match self {
Self::Yul(inner) => inner.get_missing_libraries(),
Self::LLVMIR(_inner) => HashSet::new(),
}
}
}
impl<D> revive_llvm_context::PolkaVMWriteLLVM<D> for IR
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
fn declare(
&mut self,
context: &mut revive_llvm_context::PolkaVMContext<D>,
) -> anyhow::Result<()> {
match self {
Self::Yul(inner) => inner.declare(context),
Self::LLVMIR(_inner) => Ok(()),
}
}
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
match self {
Self::Yul(inner) => inner.into_llvm(context),
Self::LLVMIR(_inner) => Ok(()),
}
}
}
@@ -0,0 +1,48 @@
//! The contract Yul source code.
use std::collections::HashSet;
use serde::Deserialize;
use serde::Serialize;
use revive_yul::parser::statement::object::Object;
/// The contract Yul source code.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Yul {
/// The Yul source code.
pub source_code: String,
/// The Yul AST object.
pub object: Object,
}
impl Yul {
/// A shortcut constructor.
pub fn new(source_code: String, object: Object) -> Self {
Self {
source_code,
object,
}
}
/// Get the list of missing deployable libraries.
pub fn get_missing_libraries(&self) -> HashSet<String> {
self.object.get_missing_libraries()
}
}
impl<D> revive_llvm_context::PolkaVMWriteLLVM<D> for Yul
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
fn declare(
&mut self,
context: &mut revive_llvm_context::PolkaVMContext<D>,
) -> anyhow::Result<()> {
self.object.declare(context)
}
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
self.object.into_llvm(context)
}
}
@@ -0,0 +1,43 @@
//! The Solidity contract metadata.
use serde::Serialize;
use crate::ResolcVersion;
/// The Solidity contract metadata.
/// Is used to append the metadata hash to the contract bytecode.
#[derive(Debug, Serialize)]
pub struct Metadata {
/// The `solc` metadata.
pub solc_metadata: serde_json::Value,
/// The `solc` version.
pub solc_version: String,
/// The pallet revive edition.
pub revive_pallet_version: Option<semver::Version>,
/// The PolkaVM compiler version.
pub revive_version: String,
/// The PolkaVM compiler optimizer settings.
pub optimizer_settings: revive_llvm_context::OptimizerSettings,
/// The extra LLVM arguments give used for manual control.
pub llvm_arguments: Vec<String>,
}
impl Metadata {
/// A shortcut constructor.
pub fn new(
solc_metadata: serde_json::Value,
solc_version: String,
revive_pallet_version: Option<semver::Version>,
optimizer_settings: revive_llvm_context::OptimizerSettings,
llvm_arguments: Vec<String>,
) -> Self {
Self {
solc_metadata,
solc_version,
revive_pallet_version,
revive_version: ResolcVersion::default().long,
optimizer_settings,
llvm_arguments,
}
}
}
+190
View File
@@ -0,0 +1,190 @@
//! The contract data.
pub mod ir;
pub mod metadata;
use std::collections::HashSet;
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory;
use serde::Deserialize;
use serde::Serialize;
use sha3::Digest;
use revive_llvm_context::PolkaVMWriteLLVM;
use crate::build::contract::Contract as ContractBuild;
use crate::project::Project;
use crate::solc::version::Version as SolcVersion;
use self::ir::IR;
use self::metadata::Metadata;
/// The contract data.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Contract {
/// The absolute file path.
pub path: String,
/// The IR source code data.
pub ir: IR,
/// The metadata JSON.
pub metadata_json: serde_json::Value,
}
impl Contract {
/// A shortcut constructor.
pub fn new(
path: String,
source_hash: [u8; revive_common::BYTE_LENGTH_WORD],
source_version: SolcVersion,
ir: IR,
metadata_json: Option<serde_json::Value>,
) -> Self {
let metadata_json = metadata_json.unwrap_or_else(|| {
serde_json::json!({
"source_hash": hex::encode(source_hash.as_slice()),
"source_version": serde_json::to_value(&source_version).expect("Always valid"),
})
});
Self {
path,
ir,
metadata_json,
}
}
/// Returns the contract identifier, which is:
/// - the Yul object identifier for Yul
/// - the module name for LLVM IR
pub fn identifier(&self) -> &str {
match self.ir {
IR::Yul(ref yul) => yul.object.identifier.as_str(),
IR::LLVMIR(ref llvm_ir) => llvm_ir.path.as_str(),
}
}
/// Extract factory dependencies.
pub fn drain_factory_dependencies(&mut self) -> HashSet<String> {
match self.ir {
IR::Yul(ref mut yul) => yul.object.factory_dependencies.drain().collect(),
IR::LLVMIR(_) => HashSet::new(),
}
}
/// Compiles the specified contract, setting its build artifacts.
pub fn compile(
mut self,
project: Project,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: &[String],
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<ContractBuild> {
let llvm = inkwell::context::Context::create();
let optimizer = revive_llvm_context::Optimizer::new(optimizer_settings);
let version = project.version.clone();
let identifier = self.identifier().to_owned();
let metadata = Metadata::new(
self.metadata_json.take(),
version.long.clone(),
version.l2_revision.clone(),
optimizer.settings().to_owned(),
llvm_arguments.to_vec(),
);
let metadata_json = serde_json::to_value(&metadata).expect("Always valid");
let metadata_hash: Option<[u8; revive_common::BYTE_LENGTH_WORD]> = if include_metadata_hash
{
let metadata_string = serde_json::to_string(&metadata).expect("Always valid");
Some(sha3::Keccak256::digest(metadata_string.as_bytes()).into())
} else {
None
};
let module = match self.ir {
IR::LLVMIR(ref llvm_ir) => {
// Create the output module
let memory_buffer =
inkwell::memory_buffer::MemoryBuffer::create_from_memory_range_copy(
llvm_ir.source.as_bytes(),
self.path.as_str(),
);
llvm.create_module_from_ir(memory_buffer)
.map_err(|error| anyhow::anyhow!(error.to_string()))?
}
_ => llvm.create_module(self.path.as_str()),
};
let mut context = revive_llvm_context::PolkaVMContext::new(
&llvm,
module,
optimizer,
Some(project),
include_metadata_hash,
debug_config,
llvm_arguments,
memory_config,
);
context.set_solidity_data(revive_llvm_context::PolkaVMContextSolidityData::default());
match self.ir {
IR::Yul(_) => {
context.set_yul_data(Default::default());
}
IR::LLVMIR(_) => {}
}
let factory_dependencies = self.drain_factory_dependencies();
self.ir.declare(&mut context).map_err(|error| {
anyhow::anyhow!(
"The contract `{}` LLVM IR generator declaration pass error: {}",
self.path,
error
)
})?;
self.ir.into_llvm(&mut context).map_err(|error| {
anyhow::anyhow!(
"The contract `{}` LLVM IR generator definition pass error: {}",
self.path,
error
)
})?;
if let Some(debug_info) = context.debug_info() {
debug_info.finalize_module()
}
let build = context.build(self.path.as_str(), metadata_hash)?;
Ok(ContractBuild::new(
self.path,
identifier,
build,
metadata_json,
factory_dependencies,
))
}
/// Get the list of missing deployable libraries.
pub fn get_missing_libraries(&self) -> HashSet<String> {
self.ir.get_missing_libraries()
}
}
impl<D> PolkaVMWriteLLVM<D> for Contract
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
fn declare(
&mut self,
context: &mut revive_llvm_context::PolkaVMContext<D>,
) -> anyhow::Result<()> {
self.ir.declare(context)
}
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
self.ir.into_llvm(context)
}
}
+382
View File
@@ -0,0 +1,382 @@
//! The processed input data.
pub mod contract;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
#[cfg(feature = "parallel")]
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde::Deserialize;
use serde::Serialize;
use sha3::Digest;
use revive_solc_json_interface::SolcStandardJsonOutput;
use revive_yul::lexer::Lexer;
use revive_yul::parser::statement::object::Object;
use crate::build::contract::Contract as ContractBuild;
use crate::build::Build;
use crate::missing_libraries::MissingLibraries;
use crate::process::input::Input as ProcessInput;
use crate::process::Process;
use crate::project::contract::ir::IR;
use crate::solc::version::Version as SolcVersion;
use crate::solc::Compiler;
use self::contract::Contract;
/// The processes input data.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Project {
/// The source code version.
pub version: SolcVersion,
/// The project contracts,
pub contracts: BTreeMap<String, Contract>,
/// The mapping of auxiliary identifiers, e.g. Yul object names, to full contract paths.
pub identifier_paths: BTreeMap<String, String>,
/// The library addresses.
pub libraries: BTreeMap<String, BTreeMap<String, String>>,
}
impl Project {
/// A shortcut constructor.
pub fn new(
version: SolcVersion,
contracts: BTreeMap<String, Contract>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
) -> Self {
let mut identifier_paths = BTreeMap::new();
for (path, contract) in contracts.iter() {
identifier_paths.insert(contract.identifier().to_owned(), path.to_owned());
}
Self {
version,
contracts,
identifier_paths,
libraries,
}
}
/// Compiles all contracts, returning their build artifacts.
pub fn compile(
self,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: &[String],
memory_config: revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<Build> {
let project = self.clone();
#[cfg(feature = "parallel")]
let iter = self.contracts.into_par_iter();
#[cfg(not(feature = "parallel"))]
let iter = self.contracts.into_iter();
let results: BTreeMap<String, anyhow::Result<ContractBuild>> = iter
.map(|(full_path, contract)| {
let process_input = ProcessInput::new(
contract,
project.clone(),
include_metadata_hash,
optimizer_settings.clone(),
debug_config.clone(),
llvm_arguments.to_vec(),
memory_config,
);
let process_output = {
#[cfg(target_os = "emscripten")]
{
crate::WorkerProcess::call(process_input)
}
#[cfg(not(target_os = "emscripten"))]
{
crate::NativeProcess::call(process_input)
}
};
(full_path, process_output.map(|output| output.build))
})
.collect();
let mut build = Build::default();
let mut hashes = HashMap::with_capacity(results.len());
for (path, result) in results.iter() {
match result {
Ok(contract) => {
hashes.insert(path.to_owned(), contract.build.bytecode_hash.to_owned());
}
Err(error) => {
anyhow::bail!("Contract `{}` compiling error: {:?}", path, error);
}
}
}
for (path, result) in results.into_iter() {
match result {
Ok(mut contract) => {
for dependency in contract.factory_dependencies.drain() {
let dependency_path = project
.identifier_paths
.get(dependency.as_str())
.cloned()
.unwrap_or_else(|| {
panic!("Dependency `{dependency}` full path not found")
});
let hash = match hashes.get(dependency_path.as_str()) {
Some(hash) => hash.to_owned(),
None => anyhow::bail!(
"Dependency contract `{}` not found in the project",
dependency_path
),
};
contract
.build
.factory_dependencies
.insert(hash, dependency_path);
}
build.contracts.insert(path, contract);
}
Err(error) => {
anyhow::bail!("Contract `{}` compiling error: {:?}", path, error);
}
}
}
Ok(build)
}
/// Get the list of missing deployable libraries.
pub fn get_missing_libraries(&self) -> MissingLibraries {
let deployed_libraries = self
.libraries
.iter()
.flat_map(|(file, names)| {
names
.iter()
.map(|(name, _address)| format!("{file}:{name}"))
.collect::<HashSet<String>>()
})
.collect::<HashSet<String>>();
let mut missing_deployable_libraries = BTreeMap::new();
for (contract_path, contract) in self.contracts.iter() {
let missing_libraries = contract
.get_missing_libraries()
.into_iter()
.filter(|library| !deployed_libraries.contains(library))
.collect::<HashSet<String>>();
missing_deployable_libraries.insert(contract_path.to_owned(), missing_libraries);
}
MissingLibraries::new(missing_deployable_libraries)
}
/// Parses the Yul source code file and returns the source data.
pub fn try_from_yul_path<T: Compiler>(
path: &Path,
solc_validator: Option<&T>,
) -> anyhow::Result<Self> {
let source_code = std::fs::read_to_string(path)
.map_err(|error| anyhow::anyhow!("Yul file {:?} reading error: {}", path, error))?;
Self::try_from_yul_string(path, source_code.as_str(), solc_validator)
}
/// Parses the test Yul source code string and returns the source data.
/// Only for integration testing purposes.
pub fn try_from_yul_string<T: Compiler>(
path: &Path,
source_code: &str,
solc_validator: Option<&T>,
) -> anyhow::Result<Self> {
if let Some(solc) = solc_validator {
solc.validate_yul(path)?;
}
let source_version = SolcVersion::new_simple(crate::solc::LAST_SUPPORTED_VERSION);
let path = path.to_string_lossy().to_string();
let source_hash = sha3::Keccak256::digest(source_code.as_bytes()).into();
let mut lexer = Lexer::new(source_code.to_owned());
let object = Object::parse(&mut lexer, None)
.map_err(|error| anyhow::anyhow!("Yul object `{}` parsing error: {}", path, error))?;
let mut project_contracts = BTreeMap::new();
project_contracts.insert(
path.to_owned(),
Contract::new(
path,
source_hash,
source_version.clone(),
IR::new_yul(source_code.to_owned(), object),
None,
),
);
Ok(Self::new(
source_version,
project_contracts,
BTreeMap::new(),
))
}
/// Parses the LLVM IR source code file and returns the source data.
pub fn try_from_llvm_ir_path(path: &Path) -> anyhow::Result<Self> {
let source_code = std::fs::read_to_string(path)
.map_err(|error| anyhow::anyhow!("LLVM IR file {:?} reading error: {}", path, error))?;
let source_hash = sha3::Keccak256::digest(source_code.as_bytes()).into();
let source_version =
SolcVersion::new_simple(revive_llvm_context::polkavm_const::LLVM_VERSION);
let path = path.to_string_lossy().to_string();
let mut project_contracts = BTreeMap::new();
project_contracts.insert(
path.clone(),
Contract::new(
path.clone(),
source_hash,
source_version.clone(),
IR::new_llvm_ir(path, source_code),
None,
),
);
Ok(Self::new(
source_version,
project_contracts,
BTreeMap::new(),
))
}
/// Converts the `solc` JSON output into a convenient project.
pub fn try_from_standard_json_output(
output: &SolcStandardJsonOutput,
source_code_files: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
solc_version: &SolcVersion,
debug_config: &revive_llvm_context::DebugConfig,
) -> anyhow::Result<Self> {
let files = match output.contracts.as_ref() {
Some(files) => files,
None => match &output.errors {
Some(errors) if errors.iter().any(|e| e.severity == "error") => {
anyhow::bail!(serde_json::to_string_pretty(errors).expect("Always valid"));
}
_ => &BTreeMap::new(),
},
};
let mut project_contracts = BTreeMap::new();
for (path, contracts) in files.iter() {
for (name, contract) in contracts.iter() {
let full_path = format!("{path}:{name}");
let ir_optimized = match contract.ir_optimized.to_owned() {
Some(ir_optimized) => ir_optimized,
None => continue,
};
if ir_optimized.is_empty() {
continue;
}
debug_config.dump_yul(full_path.as_str(), ir_optimized.as_str())?;
let mut lexer = Lexer::new(ir_optimized.to_owned());
let object = Object::parse(&mut lexer, None).map_err(|error| {
anyhow::anyhow!("Contract `{}` parsing error: {:?}", full_path, error)
})?;
let source = IR::new_yul(ir_optimized.to_owned(), object);
let source_code = source_code_files
.get(path.as_str())
.ok_or_else(|| anyhow::anyhow!("Source code for path `{}` not found", path))?;
let source_hash = sha3::Keccak256::digest(source_code.as_bytes()).into();
let project_contract = Contract::new(
full_path.clone(),
source_hash,
solc_version.to_owned(),
source,
contract.metadata.to_owned(),
);
project_contracts.insert(full_path, project_contract);
}
}
Ok(Project::new(
solc_version.to_owned(),
project_contracts,
libraries,
))
}
}
impl revive_llvm_context::PolkaVMDependency for Project {
fn compile(
project: Self,
identifier: &str,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: &[String],
memory_config: revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<String> {
let contract_path = project.resolve_path(identifier)?;
let contract = project
.contracts
.get(contract_path.as_str())
.cloned()
.ok_or_else(|| {
anyhow::anyhow!(
"Dependency contract `{}` not found in the project",
contract_path
)
})?;
contract
.compile(
project,
optimizer_settings,
include_metadata_hash,
debug_config,
llvm_arguments,
memory_config,
)
.map_err(|error| {
anyhow::anyhow!(
"Dependency contract `{}` compiling error: {}",
identifier,
error
)
})
.map(|contract| contract.build.bytecode_hash)
}
fn resolve_path(&self, identifier: &str) -> anyhow::Result<String> {
self.identifier_paths
.get(identifier.strip_suffix("_deployed").unwrap_or(identifier))
.cloned()
.ok_or_else(|| {
anyhow::anyhow!(
"Contract with identifier `{}` not found in the project",
identifier
)
})
}
fn resolve_library(&self, path: &str) -> anyhow::Result<String> {
for (file_path, contracts) in self.libraries.iter() {
for (contract_name, address) in contracts.iter() {
let key = format!("{file_path}:{contract_name}");
if key.as_str() == path {
return Ok(address["0x".len()..].to_owned());
}
}
}
anyhow::bail!("Library `{}` not found in the project", path);
}
}
+404
View File
@@ -0,0 +1,404 @@
//! Solidity to PolkaVM compiler arguments.
use std::collections::BTreeSet;
use std::path::Path;
use std::path::PathBuf;
use clap::Parser;
use path_slash::PathExt;
/// Compiles the provided Solidity input files (or use the standard input if no files
/// are given or "-" is specified as a file name). Outputs the components based on the
/// chosen options, either to the standard output or to files within the designated
/// output directory.
/// Example: resolc ERC20.sol -O3 --bin --output-dir './build/'
#[derive(Debug, Parser)]
#[command(name = "The PolkaVM Solidity compiler", arg_required_else_help = true)]
pub struct Arguments {
/// Print the version and exit.
#[arg(long = "version")]
pub version: bool,
/// Print supported `solc` versions and exit.
#[arg(long = "supported-solc-versions")]
pub supported_solc_versions: bool,
/// Print the licence and exit.
#[arg(long = "license")]
pub license: bool,
/// Specify the input paths and remappings.
/// If an argument contains a '=', it is considered a remapping.
/// Multiple Solidity files can be passed in the default Solidity mode.
/// Yul, LLVM IR, and PolkaVM Assembly modes currently support only a single file.
pub inputs: Vec<String>,
/// Set the given path as the root of the source tree instead of the root of the filesystem.
/// Passed to `solc` without changes.
#[arg(long = "base-path")]
pub base_path: Option<String>,
/// Make an additional source directory available to the default import callback.
/// Can be used multiple times. Can only be used if the base path has a non-empty value.
/// Passed to `solc` without changes.
#[arg(long = "include-path")]
pub include_paths: Vec<String>,
/// Allow a given path for imports. A list of paths can be supplied by separating them with a comma.
/// Passed to `solc` without changes.
#[arg(long = "allow-paths")]
pub allow_paths: Option<String>,
/// Create one file per component and contract/file at the specified directory, if given.
#[arg(short = 'o', long = "output-dir")]
pub output_directory: Option<PathBuf>,
/// Overwrite existing files (used together with -o).
#[arg(long = "overwrite")]
pub overwrite: bool,
/// Set the optimization parameter -O[0 | 1 | 2 | 3 | s | z].
/// Use `3` for best performance and `z` for minimal size.
#[arg(short = 'O', long = "optimization")]
pub optimization: Option<char>,
/// Try to recompile with -Oz if the bytecode is too large.
#[arg(long = "fallback-Oz")]
pub fallback_to_optimizing_for_size: bool,
/// Disable the `solc` optimizer.
/// Use it if your project uses the `MSIZE` instruction, or in other cases.
/// Beware that it will prevent libraries from being inlined.
#[arg(long = "disable-solc-optimizer")]
pub disable_solc_optimizer: bool,
/// Specify the path to the `solc` executable. By default, the one in `${PATH}` is used.
/// Yul mode: `solc` is used for source code validation, as `resolc` itself assumes that the input Yul is valid.
/// LLVM IR mode: `solc` is unused.
#[arg(long = "solc")]
pub solc: Option<String>,
/// The EVM target version to generate IR for.
/// See https://github.com/paritytech/revive/blob/main/crates/common/src/evm_version.rs for reference.
#[arg(long = "evm-version")]
pub evm_version: Option<String>,
/// Specify addresses of deployable libraries. Syntax: `<libraryName>=<address> [, or whitespace] ...`.
/// Addresses are interpreted as hexadecimal strings prefixed with `0x`.
#[arg(short = 'l', long = "libraries")]
pub libraries: Vec<String>,
/// Output a single JSON document containing the specified information.
/// Available arguments: `abi`, `hashes`, `metadata`, `devdoc`, `userdoc`, `storage-layout`, `ast`, `asm`, `bin`, `bin-runtime`.
#[arg(long = "combined-json")]
pub combined_json: Option<String>,
/// Switch to standard JSON input/output mode. Read from stdin, write the result to stdout.
/// This is the default used by the Hardhat plugin.
#[arg(long = "standard-json")]
pub standard_json: bool,
/// Switch to missing deployable libraries detection mode.
/// Only available for standard JSON input/output mode.
/// Contracts are not compiled in this mode, and all compilation artifacts are not included.
#[arg(long = "detect-missing-libraries")]
pub detect_missing_libraries: bool,
/// Switch to Yul mode.
/// Only one input Yul file is allowed.
/// Cannot be used with combined and standard JSON modes.
#[arg(long = "yul")]
pub yul: bool,
/// Switch to LLVM IR mode.
/// Only one input LLVM IR file is allowed.
/// Cannot be used with combined and standard JSON modes.
/// Use this mode at your own risk, as LLVM IR input validation is not implemented.
#[arg(long = "llvm-ir")]
pub llvm_ir: bool,
/// Set metadata hash mode.
/// The only supported value is `none` that disables appending the metadata hash.
/// Is enabled by default.
#[arg(long = "metadata-hash")]
pub metadata_hash: Option<String>,
/// Output PolkaVM assembly of the contracts.
#[arg(long = "asm")]
pub output_assembly: bool,
/// Output PolkaVM bytecode of the contracts.
#[arg(long = "bin")]
pub output_binary: bool,
/// Suppress specified warnings.
/// Available arguments: `ecrecover`, `sendtransfer`, `extcodesize`, `txorigin`, `blocktimestamp`, `blocknumber`, `blockhash`.
#[arg(long = "suppress-warnings")]
pub suppress_warnings: Option<Vec<String>>,
/// Generate source based debug information in the output code file. This only has an effect
/// with the LLVM-IR code generator and is ignored otherwise.
#[arg(short = 'g')]
pub emit_source_debug_info: bool,
/// Dump all IRs to files in the specified directory.
/// Only for testing and debugging.
#[arg(long = "debug-output-dir")]
pub debug_output_directory: Option<PathBuf>,
/// Set the verify-each option in LLVM.
/// Only for testing and debugging.
#[arg(long = "llvm-verify-each")]
pub llvm_verify_each: bool,
/// Set the debug-logging option in LLVM.
/// Only for testing and debugging.
#[arg(long = "llvm-debug-logging")]
pub llvm_debug_logging: bool,
/// Run this process recursively and provide JSON input to compile a single contract.
/// Only for usage from within the compiler.
#[arg(long = "recursive-process")]
pub recursive_process: bool,
/// Specify the input file to use instead of stdin when --recursive-process is given.
/// This is only intended for use when developing the compiler.
#[cfg(debug_assertions)]
#[arg(long = "recursive-process-input")]
pub recursive_process_input: Option<String>,
/// These are passed to LLVM as the command line to allow manual control.
#[arg(long = "llvm-arg")]
pub llvm_arguments: Vec<String>,
/// The emulated EVM linear heap memory static buffer size in bytes.
///
/// Unlike the EVM, due to the lack of dynamic memory metering, PVM contracts emulate
/// the EVM heap memory with a static buffer. Consequentially, instead of infinite
/// memory with exponentially growing gas costs, PVM contracts have a finite amount
/// of memory with constant gas costs available.
///
/// If the contract uses more heap memory than configured, it will compile fine but
/// eventually revert execution at runtime!
///
/// You are incentiviced to keep this value as small as possible:
/// 1.Increasing the heap size will increase startup costs.
/// 2.The heap size contributes to the total memory size a contract can use,
/// which includes the contracts code size
#[arg(long = "heap-size")]
pub heap_size: Option<u32>,
/// The contracts total stack size in bytes.
///
/// PVM is a register machine with a traditional stack memory space for local
/// variables. This controls the total amount of stack space the contract can use.
///
/// If the contract uses more stack memory than configured, it will compile fine but
/// eventually revert execution at runtime!
///
/// You are incentiviced to keep this value as small as possible:
/// 1.Increasing the heap size will increase startup costs.
/// 2.The stack size contributes to the total memory size a contract can use,
/// which includes the contracts code size
#[arg(long = "stack-size")]
pub stack_size: Option<u32>,
}
impl Arguments {
/// Validates the arguments.
pub fn validate(&self) -> anyhow::Result<()> {
if self.version && std::env::args().count() > 2 {
anyhow::bail!("No other options are allowed while getting the compiler version.");
}
if self.supported_solc_versions && std::env::args().count() > 2 {
anyhow::bail!(
"No other options are allowed while getting the supported `solc` versions."
);
}
#[cfg(debug_assertions)]
if self.recursive_process_input.is_some() && !self.recursive_process {
anyhow::bail!("--process-input can be only used when --recursive-process is given");
}
#[cfg(debug_assertions)]
if self.recursive_process
&& ((self.recursive_process_input.is_none() && std::env::args().count() > 2)
|| (self.recursive_process_input.is_some() && std::env::args().count() > 4))
{
anyhow::bail!("No other options are allowed in recursive mode.");
}
#[cfg(not(debug_assertions))]
if self.recursive_process && std::env::args().count() > 2 {
anyhow::bail!("No other options are allowed in recursive mode.");
}
let modes_count = [
self.yul,
self.llvm_ir,
self.combined_json.is_some(),
self.standard_json,
]
.iter()
.filter(|&&x| x)
.count();
if modes_count > 1 {
anyhow::bail!("Only one modes is allowed at the same time: Yul, LLVM IR, PolkaVM assembly, combined JSON, standard JSON.");
}
if self.yul || self.llvm_ir {
if self.base_path.is_some() {
anyhow::bail!(
"`base-path` is not used in Yul, LLVM IR and PolkaVM assembly modes."
);
}
if !self.include_paths.is_empty() {
anyhow::bail!(
"`include-paths` is not used in Yul, LLVM IR and PolkaVM assembly modes."
);
}
if self.allow_paths.is_some() {
anyhow::bail!(
"`allow-paths` is not used in Yul, LLVM IR and PolkaVM assembly modes."
);
}
if !self.libraries.is_empty() {
anyhow::bail!(
"Libraries are not supported in Yul, LLVM IR and PolkaVM assembly modes."
);
}
if self.evm_version.is_some() {
anyhow::bail!(
"`evm-version` is not used in Yul, LLVM IR and PolkaVM assembly modes."
);
}
if self.disable_solc_optimizer {
anyhow::bail!("Disabling the solc optimizer is not supported in Yul, LLVM IR and PolkaVM assembly modes.");
}
}
if self.llvm_ir && self.solc.is_some() {
anyhow::bail!("`solc` is not used in LLVM IR and PolkaVM assembly modes.");
}
if self.combined_json.is_some() && (self.output_assembly || self.output_binary) {
anyhow::bail!(
"Cannot output assembly or binary outside of JSON in combined JSON mode."
);
}
if self.standard_json {
if self.output_assembly || self.output_binary {
anyhow::bail!(
"Cannot output assembly or binary outside of JSON in standard JSON mode."
);
}
if !self.inputs.is_empty() {
anyhow::bail!("Input files must be passed via standard JSON input.");
}
if !self.libraries.is_empty() {
anyhow::bail!("Libraries must be passed via standard JSON input.");
}
if self.evm_version.is_some() {
anyhow::bail!("EVM version must be passed via standard JSON input.");
}
if self.output_directory.is_some() {
anyhow::bail!("Output directory cannot be used in standard JSON mode.");
}
if self.overwrite {
anyhow::bail!("Overwriting flag cannot be used in standard JSON mode.");
}
if self.disable_solc_optimizer {
anyhow::bail!(
"Disabling the solc optimizer must specified in standard JSON input settings."
);
}
if self.optimization.is_some() {
anyhow::bail!("LLVM optimizations must specified in standard JSON input settings.");
}
if self.fallback_to_optimizing_for_size {
anyhow::bail!(
"Falling back to -Oz must specified in standard JSON input settings."
);
}
if self.metadata_hash.is_some() {
anyhow::bail!("Metadata hash mode must specified in standard JSON input settings.");
}
if self.heap_size.is_some() {
anyhow::bail!(
"Heap size must be specified in standard JSON input polkavm memory settings."
);
}
if self.stack_size.is_some() {
anyhow::bail!(
"Stack size must be specified in standard JSON input polkavm memory settings."
);
}
if self.emit_source_debug_info {
anyhow::bail!(
"Debug info must be requested in standard JSON input polkavm settings."
);
}
}
Ok(())
}
/// Returns remappings from input paths.
pub fn split_input_files_and_remappings(
&self,
) -> anyhow::Result<(Vec<PathBuf>, Option<BTreeSet<String>>)> {
let mut input_files = Vec::with_capacity(self.inputs.len());
let mut remappings = BTreeSet::new();
for input in self.inputs.iter() {
if input.contains('=') {
let mut parts = Vec::with_capacity(2);
for path in input.trim().split('=') {
let path = PathBuf::from(path);
parts.push(
Self::path_to_posix(path.as_path())?
.to_string_lossy()
.to_string(),
);
}
if parts.len() != 2 {
anyhow::bail!(
"Invalid remapping `{}`: expected two parts separated by '='",
input
);
}
remappings.insert(parts.join("="));
} else {
let path = PathBuf::from(input.trim());
let path = Self::path_to_posix(path.as_path())?;
input_files.push(path);
}
}
let remappings = if remappings.is_empty() {
None
} else {
Some(remappings)
};
Ok((input_files, remappings))
}
/// Normalizes an input path by converting it to POSIX format.
fn path_to_posix(path: &Path) -> anyhow::Result<PathBuf> {
let path = path
.to_slash()
.ok_or_else(|| anyhow::anyhow!("Input path {:?} POSIX conversion error", path))?
.to_string();
let path = PathBuf::from(path.as_str());
Ok(path)
}
}
+277
View File
@@ -0,0 +1,277 @@
//! Solidity to PolkaVM compiler binary.
pub mod arguments;
use std::io::Write;
use std::str::FromStr;
use resolc::Process;
use self::arguments::Arguments;
#[cfg(feature = "parallel")]
/// The rayon worker stack size.
const RAYON_WORKER_STACK_SIZE: usize = 16 * 1024 * 1024;
#[cfg(target_env = "musl")]
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
fn main() -> anyhow::Result<()> {
std::process::exit(match main_inner() {
Ok(()) => revive_common::EXIT_CODE_SUCCESS,
Err(error) => {
writeln!(std::io::stderr(), "{error}")?;
revive_common::EXIT_CODE_FAILURE
}
})
}
fn main_inner() -> anyhow::Result<()> {
let arguments = <Arguments as clap::Parser>::try_parse()?;
arguments.validate()?;
if arguments.version {
writeln!(
std::io::stdout(),
"{} version {}",
env!("CARGO_PKG_DESCRIPTION"),
resolc::ResolcVersion::default().long
)?;
return Ok(());
}
if arguments.supported_solc_versions {
writeln!(
std::io::stdout(),
">={},<={}",
resolc::SolcFirstSupportedVersion,
resolc::SolcLastSupportedVersion,
)?;
return Ok(());
}
if arguments.license {
let license_mit = include_str!("../../../../LICENSE-MIT");
let license_apache = include_str!("../../../../LICENSE-APACHE");
writeln!(std::io::stdout(), "{}\n{}\n", license_mit, license_apache)?;
return Ok(());
}
#[cfg(feature = "parallel")]
rayon::ThreadPoolBuilder::new()
.stack_size(RAYON_WORKER_STACK_SIZE)
.build_global()
.expect("Thread pool configuration failure");
if arguments.recursive_process {
#[cfg(debug_assertions)]
if let Some(fname) = arguments.recursive_process_input {
let mut infile = std::fs::File::open(fname)?;
#[cfg(target_os = "emscripten")]
{
return resolc::WorkerProcess::run(Some(&mut infile));
}
#[cfg(not(target_os = "emscripten"))]
{
return resolc::NativeProcess::run(Some(&mut infile));
}
}
#[cfg(target_os = "emscripten")]
{
return resolc::WorkerProcess::run(None);
}
#[cfg(not(target_os = "emscripten"))]
{
return resolc::NativeProcess::run(None);
}
}
let debug_config = match arguments.debug_output_directory {
Some(ref debug_output_directory) => {
std::fs::create_dir_all(debug_output_directory.as_path())?;
revive_llvm_context::DebugConfig::new(
Some(debug_output_directory.to_owned()),
arguments.emit_source_debug_info,
)
}
None => revive_llvm_context::DebugConfig::new(None, arguments.emit_source_debug_info),
};
let (input_files, remappings) = arguments.split_input_files_and_remappings()?;
let suppressed_warnings = match arguments.suppress_warnings {
Some(warnings) => Some(revive_solc_json_interface::ResolcWarning::try_from_strings(
warnings.as_slice(),
)?),
None => None,
};
let mut solc = {
#[cfg(target_os = "emscripten")]
{
resolc::SoljsonCompiler
}
#[cfg(not(target_os = "emscripten"))]
{
resolc::SolcCompiler::new(
arguments
.solc
.unwrap_or_else(|| resolc::SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned()),
)?
}
};
let evm_version = match arguments.evm_version {
Some(evm_version) => Some(revive_common::EVMVersion::try_from(evm_version.as_str())?),
None => None,
};
let mut optimizer_settings = match arguments.optimization {
Some(mode) => revive_llvm_context::OptimizerSettings::try_from_cli(mode)?,
None => revive_llvm_context::OptimizerSettings::size(),
};
if arguments.fallback_to_optimizing_for_size {
optimizer_settings.enable_fallback_to_size();
}
optimizer_settings.is_verify_each_enabled = arguments.llvm_verify_each;
optimizer_settings.is_debug_logging_enabled = arguments.llvm_debug_logging;
let include_metadata_hash = match arguments.metadata_hash {
Some(metadata_hash) => {
let metadata =
revive_solc_json_interface::SolcStandardJsonInputSettingsMetadataHash::from_str(
metadata_hash.as_str(),
)?;
metadata != revive_solc_json_interface::SolcStandardJsonInputSettingsMetadataHash::None
}
None => true,
};
let mut memory_config =
revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory::default();
if let Some(heap_size) = arguments.heap_size {
memory_config.heap_size = heap_size
}
if let Some(stack_size) = arguments.stack_size {
memory_config.stack_size = stack_size
}
let build = if arguments.yul {
resolc::yul(
input_files.as_slice(),
&mut solc,
optimizer_settings,
include_metadata_hash,
debug_config,
&arguments.llvm_arguments,
memory_config,
)
} else if arguments.llvm_ir {
resolc::llvm_ir(
input_files.as_slice(),
optimizer_settings,
include_metadata_hash,
debug_config,
&arguments.llvm_arguments,
memory_config,
)
} else if arguments.standard_json {
resolc::standard_json(
&mut solc,
arguments.detect_missing_libraries,
arguments.base_path,
arguments.include_paths,
arguments.allow_paths,
debug_config,
&arguments.llvm_arguments,
)?;
return Ok(());
} else if let Some(format) = arguments.combined_json {
resolc::combined_json(
format,
input_files.as_slice(),
arguments.libraries,
&mut solc,
evm_version,
!arguments.disable_solc_optimizer,
optimizer_settings,
include_metadata_hash,
arguments.base_path,
arguments.include_paths,
arguments.allow_paths,
remappings,
suppressed_warnings,
debug_config,
arguments.output_directory,
arguments.overwrite,
&arguments.llvm_arguments,
memory_config,
)?;
return Ok(());
} else {
resolc::standard_output(
input_files.as_slice(),
arguments.libraries,
&mut solc,
evm_version,
!arguments.disable_solc_optimizer,
optimizer_settings,
include_metadata_hash,
arguments.base_path,
arguments.include_paths,
arguments.allow_paths,
remappings,
suppressed_warnings,
debug_config,
&arguments.llvm_arguments,
memory_config,
)
}?;
if let Some(output_directory) = arguments.output_directory {
std::fs::create_dir_all(&output_directory)?;
build.write_to_directory(
&output_directory,
arguments.output_assembly,
arguments.output_binary,
arguments.overwrite,
)?;
writeln!(
std::io::stderr(),
"Compiler run successful. Artifact(s) can be found in directory {output_directory:?}."
)?;
} else if arguments.output_assembly || arguments.output_binary {
for (path, contract) in build.contracts.into_iter() {
if arguments.output_assembly {
let assembly_text = contract.build.assembly_text;
writeln!(
std::io::stdout(),
"Contract `{}` assembly:\n\n{}",
path,
assembly_text
)?;
}
if arguments.output_binary {
writeln!(
std::io::stdout(),
"Contract `{}` bytecode: 0x{}",
path,
hex::encode(contract.build.bytecode)
)?;
}
}
} else {
writeln!(
std::io::stderr(),
"Compiler run successful. No output requested. Use --asm and --bin flags."
)?;
}
Ok(())
}
+50
View File
@@ -0,0 +1,50 @@
//! The Solidity compiler.
#[cfg(not(target_os = "emscripten"))]
pub mod solc_compiler;
#[cfg(target_os = "emscripten")]
pub mod soljson_compiler;
pub mod version;
use std::path::Path;
use std::path::PathBuf;
use revive_solc_json_interface::combined_json::CombinedJson;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonOutput;
use self::version::Version;
/// The first version of `solc` with the support of standard JSON interface.
pub const FIRST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 8, 0);
/// The last supported version of `solc`.
pub const LAST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 8, 30);
/// `--include-path` was introduced in solc `0.8.8` <https://github.com/ethereum/solidity/releases/tag/v0.8.8>
pub const FIRST_INCLUDE_PATH_VERSION: semver::Version = semver::Version::new(0, 8, 8);
/// The Solidity compiler.
pub trait Compiler {
/// Compiles the Solidity `--standard-json` input into Yul IR.
fn standard_json(
&mut self,
input: SolcStandardJsonInput,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
) -> anyhow::Result<SolcStandardJsonOutput>;
/// The `solc --combined-json abi,hashes...` mirror.
fn combined_json(
&self,
paths: &[PathBuf],
combined_json_argument: &str,
) -> anyhow::Result<CombinedJson>;
/// The `solc` Yul validator.
fn validate_yul(&self, path: &Path) -> anyhow::Result<()>;
/// The `solc --version` mini-parser.
fn version(&mut self) -> anyhow::Result<Version>;
}
+266
View File
@@ -0,0 +1,266 @@
//! The Solidity compiler solc interface.
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use revive_solc_json_interface::combined_json::CombinedJson;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonOutput;
use crate::solc::version::Version;
use super::Compiler;
/// The Solidity compiler.
pub struct SolcCompiler {
/// The binary executable name.
pub executable: String,
}
impl SolcCompiler {
/// The default executable name.
pub const DEFAULT_EXECUTABLE_NAME: &'static str = "solc";
/// A shortcut constructor.
/// Different tools may use different `executable` names. For example, the integration tester
/// uses `solc-<version>` format.
pub fn new(executable: String) -> anyhow::Result<Self> {
if let Err(error) = which::which(executable.as_str()) {
anyhow::bail!(
"The `{executable}` executable not found in ${{PATH}}: {}",
error
);
}
Ok(Self { executable })
}
}
impl Compiler for SolcCompiler {
/// Compiles the Solidity `--standard-json` input into Yul IR.
fn standard_json(
&mut self,
mut input: SolcStandardJsonInput,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
) -> anyhow::Result<SolcStandardJsonOutput> {
let version = self.version()?.validate(&include_paths)?.default;
let mut command = std::process::Command::new(self.executable.as_str());
command.stdin(std::process::Stdio::piped());
command.stdout(std::process::Stdio::piped());
command.arg("--standard-json");
for include_path in include_paths.into_iter() {
command.arg("--include-path");
command.arg(include_path);
}
if let Some(base_path) = base_path {
command.arg("--base-path");
command.arg(base_path);
}
if let Some(allow_paths) = allow_paths {
command.arg("--allow-paths");
command.arg(allow_paths);
}
input.normalize(&version);
let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default();
let input_json = serde_json::to_vec(&input).expect("Always valid");
let process = command.spawn().map_err(|error| {
anyhow::anyhow!("{} subprocess spawning error: {:?}", self.executable, error)
})?;
process
.stdin
.as_ref()
.ok_or_else(|| anyhow::anyhow!("{} stdin getting error", self.executable))?
.write_all(input_json.as_slice())
.map_err(|error| {
anyhow::anyhow!("{} stdin writing error: {:?}", self.executable, error)
})?;
let output = process.wait_with_output().map_err(|error| {
anyhow::anyhow!("{} subprocess output error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
let mut output: SolcStandardJsonOutput =
revive_common::deserialize_from_slice(output.stdout.as_slice()).map_err(|error| {
anyhow::anyhow!(
"{} subprocess output parsing error: {}\n{}",
self.executable,
error,
revive_common::deserialize_from_slice::<serde_json::Value>(
output.stdout.as_slice()
)
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(
|_| String::from_utf8_lossy(output.stdout.as_slice()).to_string()
),
)
})?;
output.preprocess_ast(suppressed_warnings.as_slice())?;
Ok(output)
}
/// The `solc --combined-json abi,hashes...` mirror.
fn combined_json(
&self,
paths: &[PathBuf],
combined_json_argument: &str,
) -> anyhow::Result<CombinedJson> {
let mut command = std::process::Command::new(self.executable.as_str());
command.args(paths);
let mut combined_json_flags = Vec::new();
let mut combined_json_fake_flag_pushed = false;
let mut filtered_flags = Vec::with_capacity(3);
for flag in combined_json_argument.split(',') {
match flag {
flag @ "asm" | flag @ "bin" | flag @ "bin-runtime" => filtered_flags.push(flag),
flag => combined_json_flags.push(flag),
}
}
if combined_json_flags.is_empty() {
combined_json_flags.push("ast");
combined_json_fake_flag_pushed = true;
}
command.arg("--combined-json");
command.arg(combined_json_flags.join(","));
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
writeln!(
std::io::stdout(),
"{}",
String::from_utf8_lossy(output.stdout.as_slice())
)?;
writeln!(
std::io::stdout(),
"{}",
String::from_utf8_lossy(output.stderr.as_slice())
)?;
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stdout.as_slice()).to_string()
);
}
let mut combined_json: CombinedJson =
revive_common::deserialize_from_slice(output.stdout.as_slice()).map_err(|error| {
anyhow::anyhow!(
"{} subprocess output parsing error: {}\n{}",
self.executable,
error,
revive_common::deserialize_from_slice::<serde_json::Value>(
output.stdout.as_slice()
)
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(
|_| String::from_utf8_lossy(output.stdout.as_slice()).to_string()
),
)
})?;
for filtered_flag in filtered_flags.into_iter() {
for (_path, contract) in combined_json.contracts.iter_mut() {
match filtered_flag {
"asm" => contract.asm = Some(serde_json::Value::Null),
"bin" => contract.bin = Some("".to_owned()),
"bin-runtime" => contract.bin_runtime = Some("".to_owned()),
_ => continue,
}
}
}
if combined_json_fake_flag_pushed {
combined_json.source_list = None;
combined_json.sources = None;
}
combined_json.remove_evm();
Ok(combined_json)
}
/// The `solc` Yul validator.
fn validate_yul(&self, path: &Path) -> anyhow::Result<()> {
let mut command = std::process::Command::new(self.executable.as_str());
command.arg("--strict-assembly");
command.arg(path);
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
Ok(())
}
/// The `solc --version` mini-parser.
fn version(&mut self) -> anyhow::Result<Version> {
let mut command = std::process::Command::new(self.executable.as_str());
command.arg("--version");
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
let stdout = String::from_utf8_lossy(output.stdout.as_slice());
let long = stdout
.lines()
.nth(1)
.ok_or_else(|| {
anyhow::anyhow!("{} version parsing: not enough lines", self.executable)
})?
.split(' ')
.nth(1)
.ok_or_else(|| {
anyhow::anyhow!(
"{} version parsing: not enough words in the 2nd line",
self.executable
)
})?
.to_owned();
let default: semver::Version = long
.split('+')
.next()
.ok_or_else(|| {
anyhow::anyhow!("{} version parsing: metadata dropping", self.executable)
})?
.parse()
.map_err(|error| anyhow::anyhow!("{} version parsing: {}", self.executable, error))?;
let l2_revision: Option<semver::Version> = stdout
.lines()
.nth(2)
.and_then(|line| line.split(' ').nth(1))
.and_then(|line| line.split('-').nth(1))
.and_then(|version| version.parse().ok());
Ok(Version::new(long, default, l2_revision))
}
}
+121
View File
@@ -0,0 +1,121 @@
//! The Solidity compiler solJson interface.
use std::path::Path;
use std::path::PathBuf;
use revive_solc_json_interface::combined_json::CombinedJson;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonOutput;
use crate::solc::version::Version;
use anyhow::Context;
use std::ffi::{c_char, c_void, CStr, CString};
use super::Compiler;
extern "C" {
fn soljson_version() -> *const c_char;
fn soljson_compile(inputPtr: *const c_char, inputLen: usize) -> *const c_char;
}
/// The Solidity compiler.
pub struct SoljsonCompiler;
impl Compiler for SoljsonCompiler {
/// Compiles the Solidity `--standard-json` input into Yul IR.
fn standard_json(
&mut self,
mut input: SolcStandardJsonInput,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
) -> anyhow::Result<SolcStandardJsonOutput> {
if !include_paths.is_empty() {
anyhow::bail!("configuring include paths is not supported with solJson")
}
if base_path.is_some() {
anyhow::bail!("configuring the base path is not supported with solJson")
}
if allow_paths.is_some() {
anyhow::bail!("configuring allow paths is not supported with solJson")
}
let version = self.version()?.validate(&include_paths)?.default;
input.normalize(&version);
let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default();
let input_json = serde_json::to_string(&input).expect("Always valid");
let out = Self::compile_standard_json(input_json)?;
let mut output: SolcStandardJsonOutput =
revive_common::deserialize_from_slice(out.as_bytes()).map_err(|error| {
anyhow::anyhow!(
"Soljson output parsing error: {}\n{}",
error,
revive_common::deserialize_from_slice::<serde_json::Value>(out.as_bytes())
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(|_| String::from_utf8_lossy(out.as_bytes()).to_string()),
)
})?;
output.preprocess_ast(suppressed_warnings.as_slice())?;
Ok(output)
}
fn combined_json(
&self,
_paths: &[PathBuf],
_combined_json_argument: &str,
) -> anyhow::Result<CombinedJson> {
unimplemented!();
}
fn validate_yul(&self, _path: &Path) -> anyhow::Result<()> {
unimplemented!();
}
fn version(&mut self) -> anyhow::Result<Version> {
let version = Self::get_soljson_version()?;
let long = version.clone();
let default: semver::Version = version
.split('+')
.next()
.ok_or_else(|| anyhow::anyhow!("Soljson version parsing: metadata dropping"))?
.parse()
.map_err(|error| anyhow::anyhow!("Soljson version parsing: {}", error))?;
let l2_revision: Option<semver::Version> = version
.split('-')
.nth(1)
.and_then(|version| version.parse().ok());
Ok(Version::new(long, default, l2_revision))
}
}
impl SoljsonCompiler {
fn get_soljson_version() -> anyhow::Result<String> {
unsafe {
let version_ptr = soljson_version();
let version = CStr::from_ptr(version_ptr)
.to_str()
.with_context(|| "Failed to convert C string to Rust string")
.map(str::to_owned);
libc::free(version_ptr as *mut c_void);
Ok(version?)
}
}
fn compile_standard_json(input: String) -> anyhow::Result<String> {
let c_input = CString::new(input).unwrap();
let c_input_len = c_input.as_bytes().len();
unsafe {
let output_ptr = soljson_compile(c_input.as_ptr(), c_input_len);
let output_json = CStr::from_ptr(output_ptr)
.to_str()
.with_context(|| "Failed to convert C string to Rust string")
.map(str::to_owned);
libc::free(output_ptr as *mut c_void);
Ok(output_json?)
}
}
}
+61
View File
@@ -0,0 +1,61 @@
//! The Solidity compiler version.
use serde::Deserialize;
use serde::Serialize;
/// The Solidity compiler version.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Version {
/// The long version string.
pub long: String,
/// The short `semver`.
pub default: semver::Version,
/// The L2 revision additional versioning.
pub l2_revision: Option<semver::Version>,
}
impl Version {
/// A shortcut constructor.
pub fn new(
long: String,
default: semver::Version,
l2_revision: Option<semver::Version>,
) -> Self {
Self {
long,
default,
l2_revision,
}
}
/// A shortcut constructor for a simple version.
pub fn new_simple(version: semver::Version) -> Self {
Self {
long: version.to_string(),
default: version,
l2_revision: None,
}
}
pub fn validate(self, include_paths: &[String]) -> anyhow::Result<Self> {
if self.default < super::FIRST_SUPPORTED_VERSION {
anyhow::bail!(
"`solc` versions <{} are not supported, found {}",
super::FIRST_SUPPORTED_VERSION,
self.default
);
}
if self.default > super::LAST_SUPPORTED_VERSION {
anyhow::bail!(
"`solc` versions >{} are not supported, found {}",
super::LAST_SUPPORTED_VERSION,
self.default
);
}
if !include_paths.is_empty() && self.default < super::FIRST_INCLUDE_PATH_VERSION {
anyhow::bail!("--include-path is not supported in solc {}", self.default);
}
Ok(self)
}
}
+421
View File
@@ -0,0 +1,421 @@
//! Common utility used for in frontend and integration tests.
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Mutex;
use once_cell::sync::Lazy;
use revive_llvm_context::OptimizerSettings;
use revive_solc_json_interface::standard_json::output::contract::evm::bytecode::Bytecode;
use revive_solc_json_interface::standard_json::output::contract::evm::bytecode::DeployedBytecode;
use revive_solc_json_interface::warning::Warning;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonInputSettingsOptimizer;
use revive_solc_json_interface::SolcStandardJsonInputSettingsSelection;
use revive_solc_json_interface::SolcStandardJsonOutput;
use crate::project::Project;
use crate::solc::solc_compiler::SolcCompiler;
use crate::solc::Compiler;
static PVM_BLOB_CACHE: Lazy<Mutex<HashMap<CachedBlob, Vec<u8>>>> = Lazy::new(Default::default);
static EVM_BLOB_CACHE: Lazy<Mutex<HashMap<CachedBlob, Vec<u8>>>> = Lazy::new(Default::default);
static EVM_RUNTIME_BLOB_CACHE: Lazy<Mutex<HashMap<CachedBlob, Vec<u8>>>> =
Lazy::new(Default::default);
const DEBUG_CONFIG: revive_llvm_context::DebugConfig =
revive_llvm_context::DebugConfig::new(None, true);
#[derive(Hash, PartialEq, Eq)]
struct CachedBlob {
contract_name: String,
solidity: String,
solc_optimizer_enabled: bool,
opt: String,
}
/// Checks if the required executables are present in `${PATH}`.
fn check_dependencies() {
for executable in [
crate::r#const::DEFAULT_EXECUTABLE_NAME,
SolcCompiler::DEFAULT_EXECUTABLE_NAME,
]
.iter()
{
assert!(
which::which(executable).is_ok(),
"The `{executable}` executable not found in ${{PATH}}"
);
}
}
/// Builds the Solidity project and returns the standard JSON output.
pub fn build_solidity(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
optimizer_settings: revive_llvm_context::OptimizerSettings,
) -> anyhow::Result<SolcStandardJsonOutput> {
build_solidity_with_options(sources, libraries, remappings, optimizer_settings, true)
}
/// Builds the Solidity project and returns the standard JSON output.
/// Gives control over additional options:
/// - `solc_optimizer_enabled`: Whether to use the `solc` optimizer
pub fn build_solidity_with_options(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
optimizer_settings: revive_llvm_context::OptimizerSettings,
solc_optimizer_enabled: bool,
) -> anyhow::Result<SolcStandardJsonOutput> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
revive_llvm_context::initialize_llvm(
revive_llvm_context::Target::PVM,
crate::DEFAULT_EXECUTABLE_NAME,
&[],
);
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
let input = SolcStandardJsonInput::try_from_sources(
None,
sources.clone(),
libraries.clone(),
remappings,
SolcStandardJsonInputSettingsSelection::new_required(),
SolcStandardJsonInputSettingsOptimizer::new(
solc_optimizer_enabled,
optimizer_settings.middle_end_as_string().chars().last(),
&solc_version.default,
false,
),
None,
None,
None,
)?;
let mut output = solc.standard_json(input, None, vec![], None)?;
let debug_config = revive_llvm_context::DebugConfig::new(
None,
optimizer_settings.middle_end_as_string() != "z",
);
let project = Project::try_from_standard_json_output(
&output,
sources,
libraries,
&solc_version,
&debug_config,
)?;
let build: crate::Build = project.compile(
optimizer_settings,
false,
debug_config,
Default::default(),
Default::default(),
)?;
build.write_to_standard_json(&mut output, &solc_version)?;
Ok(output)
}
/// Build a Solidity contract and get the EVM code
pub fn build_solidity_with_options_evm(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
solc_optimizer_enabled: bool,
) -> anyhow::Result<BTreeMap<String, (Bytecode, DeployedBytecode)>> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
revive_llvm_context::initialize_llvm(
revive_llvm_context::Target::PVM,
crate::DEFAULT_EXECUTABLE_NAME,
&[],
);
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
let input = SolcStandardJsonInput::try_from_sources(
None,
sources.clone(),
libraries.clone(),
remappings,
SolcStandardJsonInputSettingsSelection::new_required(),
SolcStandardJsonInputSettingsOptimizer::new(
solc_optimizer_enabled,
None,
&solc_version.default,
false,
),
None,
None,
None,
)?;
let mut output = solc.standard_json(input, None, vec![], None)?;
let mut contracts = BTreeMap::new();
if let Some(files) = output.contracts.as_mut() {
for (_, file) in files.iter_mut() {
for (name, contract) in file.iter_mut() {
if let Some(evm) = contract.evm.as_mut() {
let (Some(bytecode), Some(deployed_bytecode)) =
(evm.bytecode.as_ref(), evm.deployed_bytecode.as_ref())
else {
continue;
};
contracts.insert(name.clone(), (bytecode.clone(), deployed_bytecode.clone()));
}
}
}
}
Ok(contracts)
}
/// Builds the Solidity project and returns the standard JSON output.
pub fn build_solidity_and_detect_missing_libraries(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
) -> anyhow::Result<SolcStandardJsonOutput> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
revive_llvm_context::initialize_llvm(
revive_llvm_context::Target::PVM,
crate::DEFAULT_EXECUTABLE_NAME,
&[],
);
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
let input = SolcStandardJsonInput::try_from_sources(
None,
sources.clone(),
libraries.clone(),
None,
SolcStandardJsonInputSettingsSelection::new_required(),
SolcStandardJsonInputSettingsOptimizer::new(true, None, &solc_version.default, false),
None,
None,
None,
)?;
let mut output = solc.standard_json(input, None, vec![], None)?;
let project = Project::try_from_standard_json_output(
&output,
sources,
libraries,
&solc_version,
&DEBUG_CONFIG,
)?;
let missing_libraries = project.get_missing_libraries();
missing_libraries.write_to_standard_json(&mut output, &solc.version()?)?;
Ok(output)
}
/// Checks if the Yul project can be built without errors.
pub fn build_yul(source_code: &str) -> anyhow::Result<()> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
revive_llvm_context::initialize_llvm(
revive_llvm_context::Target::PVM,
crate::DEFAULT_EXECUTABLE_NAME,
&[],
);
let optimizer_settings = revive_llvm_context::OptimizerSettings::none();
let project = Project::try_from_yul_string::<SolcCompiler>(
PathBuf::from("test.yul").as_path(),
source_code,
None,
)?;
let _build = project.compile(
optimizer_settings,
false,
DEBUG_CONFIG,
Default::default(),
Default::default(),
)?;
Ok(())
}
/// Checks if the built Solidity project contains the given warning.
pub fn check_solidity_warning(
source_code: &str,
warning_substring: &str,
libraries: BTreeMap<String, BTreeMap<String, String>>,
skip_for_revive_edition: bool,
suppressed_warnings: Option<Vec<Warning>>,
) -> anyhow::Result<bool> {
check_dependencies();
let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
if skip_for_revive_edition && solc_version.l2_revision.is_some() {
return Ok(true);
}
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_string(), source_code.to_string());
let input = SolcStandardJsonInput::try_from_sources(
None,
sources.clone(),
libraries,
None,
SolcStandardJsonInputSettingsSelection::new_required(),
SolcStandardJsonInputSettingsOptimizer::new(true, None, &solc_version.default, false),
None,
suppressed_warnings,
None,
)?;
let output = solc.standard_json(input, None, vec![], None)?;
let contains_warning = output
.errors
.ok_or_else(|| anyhow::anyhow!("Solidity compiler messages not found"))?
.iter()
.any(|error| error.formatted_message.contains(warning_substring));
Ok(contains_warning)
}
/// Compile the blob of `contract_name` found in given `source_code`.
/// The `solc` optimizer will be enabled
pub fn compile_blob(contract_name: &str, source_code: &str) -> Vec<u8> {
compile_blob_with_options(
contract_name,
source_code,
true,
OptimizerSettings::cycles(),
)
}
/// Compile the EVM bin-runtime of `contract_name` found in given `source_code`.
/// The `solc` optimizer will be enabled
pub fn compile_evm_bin_runtime(contract_name: &str, source_code: &str) -> Vec<u8> {
compile_evm(contract_name, source_code, true, true)
}
/// Compile the EVM bin of `contract_name` found in given `source_code`.
/// The `solc` optimizer will be enabled
pub fn compile_evm_deploy_code(
contract_name: &str,
source_code: &str,
solc_optimizer_enabled: bool,
) -> Vec<u8> {
compile_evm(contract_name, source_code, solc_optimizer_enabled, false)
}
fn compile_evm(
contract_name: &str,
source_code: &str,
solc_optimizer_enabled: bool,
runtime: bool,
) -> Vec<u8> {
let id = CachedBlob {
contract_name: contract_name.to_owned(),
solidity: source_code.to_owned(),
solc_optimizer_enabled,
opt: String::new(),
};
let cache = if runtime {
&EVM_RUNTIME_BLOB_CACHE
} else {
&EVM_BLOB_CACHE
};
if let Some(blob) = cache.lock().unwrap().get(&id) {
return blob.clone();
}
let file_name = "contract.sol";
let contracts = build_solidity_with_options_evm(
[(file_name.into(), source_code.into())].into(),
Default::default(),
None,
solc_optimizer_enabled,
)
.expect("source should compile");
let object = &contracts
.get(contract_name)
.unwrap_or_else(|| panic!("contract '{}' didn't produce bin-runtime", contract_name));
let code = if runtime {
object.1.object.as_str()
} else {
object.0.object.as_str()
};
let blob = hex::decode(code).expect("code shold be hex encoded");
cache.lock().unwrap().insert(id, blob.clone());
blob
}
/// Compile the blob of `contract_name` found in given `source_code`.
pub fn compile_blob_with_options(
contract_name: &str,
source_code: &str,
solc_optimizer_enabled: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
) -> Vec<u8> {
let id = CachedBlob {
contract_name: contract_name.to_owned(),
solidity: source_code.to_owned(),
solc_optimizer_enabled,
opt: optimizer_settings.middle_end_as_string(),
};
if let Some(blob) = PVM_BLOB_CACHE.lock().unwrap().get(&id) {
return blob.clone();
}
let file_name = "contract.sol";
let contracts = build_solidity_with_options(
[(file_name.into(), source_code.into())].into(),
Default::default(),
None,
optimizer_settings,
solc_optimizer_enabled,
)
.expect("source should compile")
.contracts
.expect("source should contain at least one contract");
let bytecode = contracts[file_name][contract_name]
.evm
.as_ref()
.expect("source should produce EVM output")
.bytecode
.as_ref()
.expect("source should produce assembly text")
.object
.as_str();
let blob = hex::decode(bytecode).expect("hex encoding should always be valid");
PVM_BLOB_CACHE.lock().unwrap().insert(id, blob.clone());
blob
}
@@ -0,0 +1,4 @@
module.exports = {
preset: "ts-jest",
testEnvironment: "node",
};
@@ -0,0 +1,66 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="jest tests" tests="27" failures="0" errors="0" time="2.146">
<testsuite name="Run with --yul by default" errors="0" failures="0" skipped="1" timestamp="2024-10-24T17:08:50" time="1.508" tests="6">
<testcase classname="Run with --yul by default Valid command exit code = 0" name="Run with --yul by default Valid command exit code = 0" time="0.003">
</testcase>
<testcase classname="Run with --yul by default --yul output is presented" name="Run with --yul by default --yul output is presented" time="0">
</testcase>
<testcase classname="Run with --yul by default solc exit code == resolc exit code" name="Run with --yul by default solc exit code == resolc exit code" time="0">
<skipped/>
</testcase>
<testcase classname="Run with --yul by default run invalid: resolc --yul" name="Run with --yul by default run invalid: resolc --yul" time="0.001">
</testcase>
<testcase classname="Run with --yul by default Invalid command exit code = 1" name="Run with --yul by default Invalid command exit code = 1" time="0">
</testcase>
<testcase classname="Run with --yul by default Invalid solc exit code == Invalid resolc exit code" name="Run with --yul by default Invalid solc exit code == Invalid resolc exit code" time="0.041">
</testcase>
</testsuite>
<testsuite name="Run with --asm by default" errors="0" failures="0" skipped="0" timestamp="2024-10-24T17:08:50" time="1.512" tests="6">
<testcase classname="Run with --asm by default Valid command exit code = 0" name="Run with --asm by default Valid command exit code = 0" time="0.002">
</testcase>
<testcase classname="Run with --asm by default --asm output is presented" name="Run with --asm by default --asm output is presented" time="0.001">
</testcase>
<testcase classname="Run with --asm by default solc exit code == resolc exit code" name="Run with --asm by default solc exit code == resolc exit code" time="0.044">
</testcase>
<testcase classname="Run with --asm by default run invalid: resolc --asm" name="Run with --asm by default run invalid: resolc --asm" time="0">
</testcase>
<testcase classname="Run with --asm by default Invalid command exit code = 1" name="Run with --asm by default Invalid command exit code = 1" time="0.001">
</testcase>
<testcase classname="Run with --asm by default Invalid solc exit code == Invalid resolc exit code" name="Run with --asm by default Invalid solc exit code == Invalid resolc exit code" time="0.04">
</testcase>
</testsuite>
<testsuite name="Run resolc without any options" errors="0" failures="0" skipped="2" timestamp="2024-10-24T17:08:50" time="2.016" tests="15">
<testcase classname="Run resolc without any options Info with help is presented" name="Run resolc without any options Info with help is presented" time="0.002">
</testcase>
<testcase classname="Run resolc without any options Exit code = 1" name="Run resolc without any options Exit code = 1" time="0">
</testcase>
<testcase classname="Run resolc without any options solc exit code == resolc exit code" name="Run resolc without any options solc exit code == resolc exit code" time="0.044">
</testcase>
<testcase classname="Default run a command from the help Compiler run successful" name="Default run a command from the help Compiler run successful" time="0">
</testcase>
<testcase classname="Default run a command from the help Exit code = 0" name="Default run a command from the help Exit code = 0" time="0.001">
</testcase>
<testcase classname="Default run a command from the help Output dir is created" name="Default run a command from the help Output dir is created" time="0">
</testcase>
<testcase classname="Default run a command from the help Output file is created" name="Default run a command from the help Output file is created" time="0">
<skipped/>
</testcase>
<testcase classname="Default run a command from the help the output file is not empty" name="Default run a command from the help the output file is not empty" time="0">
</testcase>
<testcase classname="Default run a command from the help No &apos;Error&apos;/&apos;Warning&apos;/&apos;Fail&apos; in the output" name="Default run a command from the help No &apos;Error&apos;/&apos;Warning&apos;/&apos;Fail&apos; in the output" time="0">
</testcase>
<testcase classname="Default run a command from the help Compiler run successful" name="Default run a command from the help Compiler run successful" time="0.001">
</testcase>
<testcase classname="Default run a command from the help Exit code = 0" name="Default run a command from the help Exit code = 0" time="0">
</testcase>
<testcase classname="Default run a command from the help Output dir is created" name="Default run a command from the help Output dir is created" time="0">
</testcase>
<testcase classname="Default run a command from the help Output files are created" name="Default run a command from the help Output files are created" time="0">
<skipped/>
</testcase>
<testcase classname="Default run a command from the help the output files are not empty" name="Default run a command from the help the output files are not empty" time="0.003">
</testcase>
<testcase classname="Default run a command from the help No &apos;Error&apos;/&apos;Warning&apos;/&apos;Fail&apos; in the output" name="Default run a command from the help No &apos;Error&apos;/&apos;Warning&apos;/&apos;Fail&apos; in the output" time="0">
</testcase>
</testsuite>
</testsuites>
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,26 @@
{
"name": "cli-tests",
"version": "1.0.0",
"title": "resolc CLI Tests",
"description": "Auto tests for verifying resolc CLI",
"repository": "https://github.com/paritytech/revive",
"main": "index.js",
"private": true,
"scripts": {
"test": "npx jest --verbose --testPathPattern="
},
"keywords": [],
"author": "Matter Labs",
"contributors": [
"cyrill@parity.io"
],
"license": "MIT",
"devDependencies": {
"@types/jest": "^29.5.14",
"@types/shelljs": "^0.8.15",
"jest": "^29.7.0",
"shelljs": "^0.8.5",
"ts-jest": "^29.2.5",
"typescript": "^5.7.3"
}
}
File diff suppressed because one or more lines are too long
@@ -0,0 +1,3 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity >=0.0;
contract C {}
@@ -0,0 +1,54 @@
object "Test" {
code {
{
mstore(64, 128)
if callvalue() { revert(0, 0) }
let _1 := datasize("Test_deployed")
codecopy(0, dataoffset("Test_deployed"), _1)
return(0, _1)
}
}
object "Test_deployed" {
code {
{
mstore(64, 128)
if iszero(lt(calldatasize(), 4))
{
let _1 := 0
switch shr(224, calldataload(_1))
case 0x3df4ddf4 {
if callvalue() { revert(_1, _1) }
if slt(add(calldatasize(), not(3)), _1) { revert(_1, _1) }
let memPos := allocate_memory(_1)
mstore(memPos, 0x2a)
return(memPos, 32)
}
case 0x5a8ac02d {
if callvalue() { revert(_1, _1) }
if slt(add(calldatasize(), not(3)), _1) { revert(_1, _1) }
let memPos_1 := allocate_memory(_1)
return(memPos_1, sub(abi_encode_uint256(memPos_1, 0x63), memPos_1))
}
}
revert(0, 0)
}
function abi_encode_uint256(headStart, value0) -> tail
{
tail := add(headStart, 32)
mstore(headStart, value0)
}
function allocate_memory(size) -> memPtr
{
memPtr := mload(64)
let newFreePtr := add(memPtr, and(add(size, 31), not(31)))
if or(gt(newFreePtr, 0xffffffffffffffff), lt(newFreePtr, memPtr))
{
mstore(0, shl(224, 0x4e487b71))
mstore(4, 0x41)
revert(0, 0x24)
}
mstore(64, newFreePtr)
}
}
}
}
@@ -0,0 +1,47 @@
import * as path from 'path'
const outputDir = 'artifacts'
const binExtension = ':C.pvm'
const asmExtension = ':C.pvmasm'
const llvmExtension = '.ll'
const contractSolFilename = 'contract.sol'
const contractYulFilename = 'contract.yul'
const contractOptimizedLLVMFilename = contractSolFilename + '.C.optimized'
const contractUnoptimizedLLVMFilename = contractSolFilename + '.C.unoptimized'
const pathToOutputDir = path.join(__dirname, '..', outputDir)
const pathToContracts = path.join(__dirname, '..', 'src', 'contracts')
const pathToBasicYulContract = path.join(
pathToContracts,
'yul',
contractYulFilename
)
const pathToBasicSolContract = path.join(
pathToContracts,
'solidity',
contractSolFilename
)
const pathToSolBinOutputFile = path.join(
pathToOutputDir,
contractSolFilename + binExtension
)
const pathToSolAsmOutputFile = path.join(
pathToOutputDir,
contractSolFilename + asmExtension
)
export const paths = {
outputDir: outputDir,
binExtension: binExtension,
asmExtension: asmExtension,
llvmExtension: llvmExtension,
contractSolFilename: contractSolFilename,
contractYulFilename: contractYulFilename,
contractOptimizedLLVMFilename: contractOptimizedLLVMFilename,
contractUnoptimizedLLVMFilename: contractUnoptimizedLLVMFilename,
pathToOutputDir: pathToOutputDir,
pathToContracts: pathToContracts,
pathToBasicSolContract: pathToBasicSolContract,
pathToBasicYulContract: pathToBasicYulContract,
pathToSolBinOutputFile: pathToSolBinOutputFile,
pathToSolAsmOutputFile: pathToSolAsmOutputFile,
}
@@ -0,0 +1,51 @@
import * as shell from 'shelljs'
import * as fs from 'fs'
import { spawnSync } from 'child_process'
interface CommandResult {
output: string
exitCode: number
}
export const executeCommand = (
command: string,
stdin?: string
): CommandResult => {
if (stdin) {
const process = spawnSync(command, [], {
input: stdin,
shell: true,
encoding: 'utf8',
maxBuffer: 30 * 1024 * 1024,
})
return {
exitCode: process.status || 0,
output: (process.stdout || process.stderr || '').toString(),
}
}
const result = shell.exec(command, { silent: true, async: false })
return {
exitCode: result.code,
output: result.stdout || result.stderr || '',
}
}
export const isFolderExist = (folder: string): boolean => {
return shell.test('-d', folder)
}
export const isFileExist = (
pathToFileDir: string,
fileName: string,
fileExtension: string
): boolean => {
return shell.ls(pathToFileDir).stdout.includes(fileName + fileExtension)
}
export const isFileEmpty = (file: string): boolean => {
if (fs.existsSync(file)) {
return fs.readFileSync(file).length === 0
}
}
@@ -0,0 +1,44 @@
import { executeCommand } from '../src/helper'
import { paths } from '../src/entities'
//id1746
describe('Run with --asm by default', () => {
const command = `resolc ${paths.pathToBasicSolContract} --asm`
const result = executeCommand(command)
const commandInvalid = 'resolc --asm'
const resultInvalid = executeCommand(commandInvalid)
it('Valid command exit code = 0', () => {
expect(result.exitCode).toBe(0)
})
it('--asm output is presented', () => {
const expectedPatterns = [/(deploy)/i, /(call)/i, /(seal_return)/i]
for (const pattern of expectedPatterns) {
expect(result.output).toMatch(pattern)
}
})
it('solc exit code == resolc exit code', () => {
const command = `solc ${paths.pathToBasicSolContract} --asm`
const solcResult = executeCommand(command)
expect(solcResult.exitCode).toBe(result.exitCode)
})
it('run invalid: resolc --asm', () => {
expect(resultInvalid.output).toMatch(
/(No input sources specified|Compilation aborted)/i
)
})
it('Invalid command exit code = 1', () => {
expect(resultInvalid.exitCode).toBe(1)
})
it('Invalid solc exit code == Invalid resolc exit code', () => {
const command = 'solc --asm'
const solcResult = executeCommand(command)
expect(solcResult.exitCode).toBe(resultInvalid.exitCode)
})
})
@@ -0,0 +1,241 @@
import {
executeCommand,
isFolderExist,
isFileExist,
isFileEmpty,
} from '../src/helper'
import { paths } from '../src/entities'
import * as shell from 'shelljs'
import * as path from 'path'
//id1762
describe('Run resolc without any options', () => {
const command = 'resolc'
const result = executeCommand(command)
it('Info with help is presented', () => {
expect(result.output).toMatch(/(Usage: resolc)/i)
})
it('Exit code = 1', () => {
expect(result.exitCode).toBe(1)
})
it('solc exit code == resolc exit code', () => {
const command = 'solc'
const solcResult = executeCommand(command)
expect(solcResult.exitCode).toBe(result.exitCode)
})
})
//#1713
describe('Default run a command from the help', () => {
const command = `resolc ${paths.pathToBasicSolContract} --overwrite -O3 --bin --output-dir "${paths.pathToOutputDir}"` // potential issue on resolc with full path on Windows cmd
const result = executeCommand(command)
it('Compiler run successful', () => {
expect(result.output).toMatch(/(Compiler run successful.)/i)
})
it('Exit code = 0', () => {
expect(result.exitCode).toBe(0)
})
it('Output dir is created', () => {
expect(isFolderExist(paths.pathToOutputDir)).toBe(true)
})
xit('Output file is created', () => {
// a bug on windows
expect(
isFileExist(
paths.pathToOutputDir,
paths.contractSolFilename,
paths.binExtension
)
).toBe(true)
})
it('the output file is not empty', () => {
expect(isFileEmpty(paths.pathToSolBinOutputFile)).toBe(false)
})
it("No 'Error'/'Warning'/'Fail' in the output", () => {
expect(result.output).not.toMatch(/([Ee]rror|[Ww]arning|[Ff]ail)/i)
})
})
//#1818
describe('Default run a command from the help', () => {
const command = `resolc ${paths.pathToBasicSolContract} --overwrite -O3 --bin --asm --output-dir "${paths.pathToOutputDir}"` // potential issue on resolc with full path on Windows cmd
const result = executeCommand(command)
it('Compiler run successful', () => {
expect(result.output).toMatch(/(Compiler run successful.)/i)
})
it('Exit code = 0', () => {
expect(result.exitCode).toBe(0)
})
it('Output dir is created', () => {
expect(isFolderExist(paths.pathToOutputDir)).toBe(true)
})
xit('Output files are created', () => {
// a bug on windows
expect(
isFileExist(
paths.pathToOutputDir,
paths.contractSolFilename,
paths.binExtension
)
).toBe(true)
expect(
isFileExist(
paths.pathToOutputDir,
paths.contractSolFilename,
paths.asmExtension
)
).toBe(true)
})
it('the output files are not empty', () => {
expect(isFileEmpty(paths.pathToSolBinOutputFile)).toBe(false)
expect(isFileEmpty(paths.pathToSolAsmOutputFile)).toBe(false)
})
it("No 'Error'/'Warning'/'Fail' in the output", () => {
expect(result.output).not.toMatch(/([Ee]rror|[Ww]arning|[Ff]ail)/i)
})
})
describe('Run resolc with source debug information', () => {
const commands = [
`resolc -g ${paths.pathToBasicSolContract} --overwrite --bin --asm --output-dir "${paths.pathToOutputDir}"`,
`resolc --disable-solc-optimizer -g ${paths.pathToBasicSolContract} --overwrite --bin --asm --output-dir "${paths.pathToOutputDir}"`,
] // potential issue on resolc with full path on Windows cmd`;
for (var idx in commands) {
const command = commands[idx]
const result = executeCommand(command)
it('Compiler run successful', () => {
expect(result.output).toMatch(/(Compiler run successful.)/i)
})
it('Exit code = 0', () => {
expect(result.exitCode).toBe(0)
})
it('Output dir is created', () => {
expect(isFolderExist(paths.pathToOutputDir)).toBe(true)
})
it('Output files are created', () => {
// a bug on windows
expect(
isFileExist(
paths.pathToOutputDir,
paths.contractSolFilename,
paths.binExtension
)
).toBe(true)
expect(
isFileExist(
paths.pathToOutputDir,
paths.contractSolFilename,
paths.asmExtension
)
).toBe(true)
})
it('the output files are not empty', () => {
expect(isFileEmpty(paths.pathToSolBinOutputFile)).toBe(false)
expect(isFileEmpty(paths.pathToSolAsmOutputFile)).toBe(false)
})
it("No 'Error'/'Fail' in the output", () => {
expect(result.output).not.toMatch(/([Ee]rror|[Ff]ail)/i)
})
}
})
describe('Run resolc with source debug information, check LLVM debug-info', () => {
const commands = [
`resolc -g ${paths.pathToBasicSolContract} --overwrite --debug-output-dir="${paths.pathToOutputDir}"`,
`resolc -g --disable-solc-optimizer ${paths.pathToBasicSolContract} --overwrite --debug-output-dir="${paths.pathToOutputDir}"`,
] // potential issue on resolc with full path on Windows cmd`;
for (var idx in commands) {
const command = commands[idx]
const result = executeCommand(command)
it('Compiler run successful', () => {
expect(result.output).toMatch(/(Compiler run successful.)/i)
})
it('Exit code = 0', () => {
expect(result.exitCode).toBe(0)
})
it('Output dir is created', () => {
expect(isFolderExist(paths.pathToOutputDir)).toBe(true)
})
it('Output files are created', () => {
// a bug on windows
expect(
isFileExist(
paths.pathToOutputDir,
paths.contractOptimizedLLVMFilename,
paths.llvmExtension
)
).toBe(true)
expect(
isFileExist(
paths.pathToOutputDir,
paths.contractUnoptimizedLLVMFilename,
paths.llvmExtension
)
).toBe(true)
})
it('the output files are not empty', () => {
expect(isFileEmpty(paths.pathToSolBinOutputFile)).toBe(false)
expect(isFileEmpty(paths.pathToSolAsmOutputFile)).toBe(false)
})
it("No 'Error'/'Fail' in the output", () => {
expect(result.output).not.toMatch(/([Ee]rror|[Ff]ail)/i)
})
}
})
describe('Standard JSON compilation with path options', () => {
const contractsDir = path.join(shell.tempdir(), 'contracts-test')
const inputFile = path.join(__dirname, '..', 'src/contracts/compiled/1.json')
beforeAll(() => {
shell.mkdir('-p', contractsDir)
const input = JSON.parse(shell.cat(inputFile).toString())
Object.entries(input.sources).forEach(
([sourcePath, source]: [string, any]) => {
const filePath = path.join(contractsDir, sourcePath)
shell.mkdir('-p', path.dirname(filePath))
shell.ShellString(source.content).to(filePath)
}
)
})
afterAll(() => {
shell.rm('-rf', contractsDir)
})
describe('Output with all path options', () => {
let result: { exitCode: number; output: string }
beforeAll(() => {
const tempInputFile = path.join(contractsDir, 'temp-input.json')
shell.cp(inputFile, tempInputFile)
const inputContent = shell.cat(inputFile).toString()
const command = `resolc --standard-json --base-path "${contractsDir}" --include-path "${contractsDir}" --allow-paths "${contractsDir}"`
result = executeCommand(command, inputContent)
shell.rm(tempInputFile)
})
it('Compiler run successful without emiting warnings', () => {
const parsedResults = JSON.parse(result.output)
expect(
parsedResults.errors.filter(
(error: { type: string }) => error.type != 'Warning'
)
).toEqual([])
})
})
})
@@ -0,0 +1,39 @@
import { executeCommand } from '../src/helper'
import { paths } from '../src/entities'
//id1743
describe('Run with --yul by default', () => {
const command = `resolc ${paths.pathToBasicYulContract} --yul`
const result = executeCommand(command)
const commandInvalid = 'resolc --yul'
const resultInvalid = executeCommand(commandInvalid)
it('Valid command exit code = 0', () => {
expect(result.exitCode).toBe(0)
})
it('--yul output is presented', () => {
expect(result.output).toMatch(/(Compiler run successful)/i)
expect(result.output).toMatch(/(No output requested)/i)
})
xit('solc exit code == resolc exit code', () => {
// unknown solc issue for datatype of the contract
const command = `solc ${paths.pathToBasicSolContract} --yul`
const solcResult = executeCommand(command)
expect(solcResult.exitCode).toBe(result.exitCode)
})
it('run invalid: resolc --yul', () => {
expect(resultInvalid.output).toMatch(/(The input file is missing)/i)
})
it('Invalid command exit code = 1', () => {
expect(resultInvalid.exitCode).toBe(1)
})
it('Invalid solc exit code == Invalid resolc exit code', () => {
const command = 'solc --yul'
const solcResult = executeCommand(command)
expect(solcResult.exitCode).toBe(resultInvalid.exitCode)
})
})
@@ -0,0 +1,7 @@
{
"compilerOptions": {
"target": "ES6",
"module": "CommonJS",
"outDir": "./dist",
}
}
@@ -0,0 +1,188 @@
import { executeCommand } from '../src/helper'
import { paths } from '../src/entities'
describe('Set of --combined-json tests', () => {
const zksolcCommand = 'zksolc'
const solcCommand = 'solc'
const json_args: string[] = [
`abi`,
`hashes`,
`metadata`,
`devdoc`,
`userdoc`,
`storage-layout`,
`ast`,
`asm`,
`bin`,
`bin-runtime`,
]
//id1742:I
describe(`Run ${zksolcCommand} with just --combined-json`, () => {
const args = [`--combined-json`]
const result = executeCommand(zksolcCommand, args)
it('Valid command exit code = 1', () => {
expect(result.exitCode).toBe(1)
})
it('--combined-json error is presented', () => {
expect(result.output).toMatch(/(requires a value but none was supplied)/i)
})
it('solc exit code == zksolc exit code', () => {
const solcResult = executeCommand(solcCommand, args)
expect(solcResult.exitCode).toBe(result.exitCode)
})
})
//id1742:II
describe(`Run ${zksolcCommand} with Sol contract and --combined-json`, () => {
const args = [`${paths.pathToBasicSolContract}`, `--combined-json`]
const result = executeCommand(zksolcCommand, args)
it('Valid command exit code = 1', () => {
expect(result.exitCode).toBe(1)
})
it('--combined-json error is presented', () => {
expect(result.output).toMatch(/(requires a value but none was supplied)/i)
})
it('solc exit code == zksolc exit code', () => {
const solcResult = executeCommand(solcCommand, args)
expect(solcResult.exitCode).toBe(result.exitCode)
})
})
//id1742:III
for (let i = 0; i < json_args.length; i++) {
describe(`Run ${zksolcCommand} with Sol, --combined-json and ARG: ${json_args[i]}`, () => {
const args = [
`${paths.pathToBasicSolContract}`,
`--combined-json`,
`${json_args[i]}`,
]
const result = executeCommand(zksolcCommand, args)
it('Valid command exit code = 0', () => {
expect(result.exitCode).toBe(0)
})
it('--combined-json error is presented', () => {
expect(result.output).toMatch(/(contracts)/i)
})
it('solc exit code == zksolc exit code', () => {
const solcResult = executeCommand(solcCommand, args)
expect(solcResult.exitCode).toBe(result.exitCode)
})
})
}
//id1829:I
for (let i = 0; i < json_args.length; i++) {
describe(`Run ${zksolcCommand} with Sol, --combined-json and wrong ARG: --${json_args[i]}`, () => {
const args = [
`${paths.pathToBasicSolContract}`,
`--combined-json`,
`--${json_args[i]}`,
]
const result = executeCommand(zksolcCommand, args)
it('Valid command exit code = 1', () => {
expect(result.exitCode).toBe(1)
})
it('--combined-json error is presented', () => {
expect(result.output).toMatch(/(Invalid option|error)/i)
})
it('solc exit code == zksolc exit code', () => {
const solcResult = executeCommand(solcCommand, args)
expect(solcResult.exitCode).toBe(result.exitCode)
})
})
}
//id1829:II
for (let i = 0; i < json_args.length; i++) {
describe(`Run ${zksolcCommand} with Sol, --combined-json and multiple ARG: ${json_args[i]} ${json_args[i]}`, () => {
const args = [
`${paths.pathToBasicSolContract}`,
`--combined-json`,
`${json_args[i]}`,
`${json_args[i]}`,
]
const result = executeCommand(zksolcCommand, args)
xit('Valid command exit code = 1', () => {
expect(result.exitCode).toBe(1)
})
it('--combined-json error is presented', () => {
expect(result.output).toMatch(
/(No such file or directory|cannot find the file specified)/i
) // Hopefully we should have more precise message here!
})
xit('solc exit code == zksolc exit code', () => {
const solcResult = executeCommand(solcCommand, args)
expect(solcResult.exitCode).toBe(result.exitCode)
})
})
}
//id1829:III
for (let i = 0; i < json_args.length; i++) {
describe(`Run ${zksolcCommand} with Sol, and multiple (--combined-json ${json_args[i]})`, () => {
const args = [
`${paths.pathToBasicSolContract}`,
`--combined-json`,
`${json_args[i]}`,
`--combined-json`,
`${json_args[i]}`,
]
const result = executeCommand(zksolcCommand, args)
it('Valid command exit code = 1', () => {
expect(result.exitCode).toBe(1)
})
it('--combined-json error is presented', () => {
expect(result.output).toMatch(/(cannot be used multiple times)/i)
})
it('solc exit code == zksolc exit code', () => {
const solcResult = executeCommand(solcCommand, args)
expect(solcResult.exitCode).toBe(result.exitCode)
})
})
}
//id1830
for (let i = 0; i < json_args.length; i++) {
describe(`Run ${zksolcCommand} with Yul, and --combined-json ${json_args[i]}`, () => {
const args = [
`${paths.pathToBasicYulContract}`,
`--combined-json`,
`${json_args[i]}`,
]
const result = executeCommand(zksolcCommand, args)
it('Valid command exit code = 1', () => {
expect(result.exitCode).toBe(1)
})
it('--combined-json error is presented', () => {
expect(result.output).toMatch(/(ParserError: Expected identifier)/i)
})
asd
it('solc exit code == zksolc exit code', () => {
const solcResult = executeCommand(solcCommand, args)
expect(solcResult.exitCode).toBe(result.exitCode)
})
})
}
})
@@ -0,0 +1,88 @@
//! The Solidity compiler unit tests for factory dependencies.
#![cfg(test)]
use std::collections::BTreeMap;
pub const MAIN_CODE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity >=0.4.16;
import "./callable.sol";
contract Main {
function main() external returns(uint256) {
Callable callable = new Callable();
callable.set(10);
return callable.get();
}
}
"#;
pub const CALLABLE_CODE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity >=0.4.16;
contract Callable {
uint256 value;
function set(uint256 x) external {
value = x;
}
function get() external view returns(uint256) {
return value;
}
}
"#;
#[test]
fn default() {
let mut sources = BTreeMap::new();
sources.insert("main.sol".to_owned(), MAIN_CODE.to_owned());
sources.insert("callable.sol".to_owned(), CALLABLE_CODE.to_owned());
let output = super::build_solidity(
sources,
BTreeMap::new(),
None,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Build failure");
assert_eq!(
output
.contracts
.as_ref()
.expect("Missing field `contracts`")
.get("main.sol")
.expect("Missing file `main.sol`")
.get("Main")
.expect("Missing contract `main.sol:Main`")
.factory_dependencies
.as_ref()
.expect("Missing field `factory_dependencies`")
.len(),
1,
"Expected 1 factory dependency in `main.sol:Main`"
);
assert_eq!(
output
.contracts
.as_ref()
.expect("Missing field `contracts`")
.get("callable.sol")
.expect("Missing file `callable.sol`")
.get("Callable")
.expect("Missing contract `callable.sol:Callable`")
.factory_dependencies
.as_ref()
.expect("Missing field `factory_dependencies`")
.len(),
0,
"Expected 0 factory dependencies in `callable.sol:Callable`"
);
}
+45
View File
@@ -0,0 +1,45 @@
//! The Solidity compiler unit tests for IR artifacts.
//! The tests check if the IR artifacts are kept in the final output.
#![cfg(test)]
use std::collections::BTreeMap;
#[test]
fn yul() {
let source_code = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract Test {
function main() public view returns (uint) {
return 42;
}
}
"#;
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), source_code.to_owned());
let build = super::build_solidity(
sources,
BTreeMap::new(),
None,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
assert!(
build
.contracts
.as_ref()
.expect("Always exists")
.get("test.sol")
.expect("Always exists")
.get("Test")
.expect("Always exists")
.ir_optimized
.is_some(),
"Yul IR is missing"
);
}
+90
View File
@@ -0,0 +1,90 @@
//! The Solidity compiler unit tests for libraries.
#![cfg(test)]
use std::collections::BTreeMap;
pub const LIBRARY_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
// A simple library with at least one external method
library SimpleLibrary {
function add(uint256 a, uint256 b) external pure returns (uint256) {
return a + b;
}
}
// A contract calling that library
contract SimpleContract {
using SimpleLibrary for uint256;
function performAlgorithm(uint256 a, uint256 b) public pure returns (uint256) {
uint sum = 0;
if (a > b) {
while (true) {
sum += a.add(b);
}
}
return sum;
}
}
"#;
#[test]
fn not_specified() {
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), LIBRARY_TEST_SOURCE.to_owned());
let output =
super::build_solidity_and_detect_missing_libraries(sources.clone(), BTreeMap::new())
.expect("Test failure");
assert!(
output
.contracts
.as_ref()
.expect("Always exists")
.get("test.sol")
.expect("Always exists")
.get("SimpleContract")
.expect("Always exists")
.missing_libraries
.as_ref()
.expect("Always exists")
.contains("test.sol:SimpleLibrary"),
"Missing library not detected"
);
}
#[test]
fn specified() {
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), LIBRARY_TEST_SOURCE.to_owned());
let mut libraries = BTreeMap::new();
libraries
.entry("test.sol".to_string())
.or_insert_with(BTreeMap::new)
.entry("SimpleLibrary".to_string())
.or_insert("0x00000000000000000000000000000000DEADBEEF".to_string());
let output =
super::build_solidity_and_detect_missing_libraries(sources.clone(), libraries.clone())
.expect("Test failure");
assert!(
output
.contracts
.as_ref()
.expect("Always exists")
.get("test.sol")
.expect("Always exists")
.get("SimpleContract")
.expect("Always exists")
.missing_libraries
.as_ref()
.cloned()
.unwrap_or_default()
.is_empty(),
"The list of missing libraries must be empty"
);
}
+251
View File
@@ -0,0 +1,251 @@
//! The Solidity compiler unit tests for messages.
#![cfg(test)]
use std::collections::BTreeMap;
use revive_solc_json_interface::warning::Warning;
pub const ECRECOVER_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract ECRecoverExample {
function recoverAddress(
bytes32 messageHash,
uint8 v,
bytes32 r,
bytes32 s
) public pure returns (address) {
return ecrecover(messageHash, v, r, s);
}
}
"#;
#[test]
fn ecrecover() {
assert!(
super::check_solidity_warning(
ECRECOVER_TEST_SOURCE,
"Warning: It looks like you are using 'ecrecover' to validate a signature of a user account.",
BTreeMap::new(),
false,
None,
).expect("Test failure")
);
}
#[test]
fn ecrecover_suppressed() {
assert!(
!super::check_solidity_warning(
ECRECOVER_TEST_SOURCE,
"Warning: It looks like you are using 'ecrecover' to validate a signature of a user account.",
BTreeMap::new(),
false,
Some(vec![Warning::EcRecover]),
).expect("Test failure")
);
}
pub const SEND_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract SendExample {
address payable public recipient;
constructor(address payable _recipient) {
recipient = _recipient;
}
function forwardEther() external payable {
bool success = recipient.send(msg.value);
require(success, "Failed to send Ether");
}
}
"#;
pub const BALANCE_CALLS_MESSAGE: &str =
"Warning: It looks like you are using '<address payable>.send/transfer(<X>)'";
#[test]
fn send() {
assert!(super::check_solidity_warning(
SEND_TEST_SOURCE,
BALANCE_CALLS_MESSAGE,
BTreeMap::new(),
false,
None,
)
.expect("Test failure"));
}
#[test]
fn send_suppressed() {
assert!(!super::check_solidity_warning(
SEND_TEST_SOURCE,
BALANCE_CALLS_MESSAGE,
BTreeMap::new(),
false,
Some(vec![Warning::SendTransfer]),
)
.expect("Test failure"));
}
pub const TRANSFER_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract TransferExample {
address payable public recipient;
constructor(address payable _recipient) {
recipient = _recipient;
}
function forwardEther() external payable {
recipient.transfer(msg.value);
}
}
"#;
#[test]
fn transfer() {
assert!(super::check_solidity_warning(
TRANSFER_TEST_SOURCE,
BALANCE_CALLS_MESSAGE,
BTreeMap::new(),
false,
None,
)
.expect("Test failure"));
}
#[test]
fn transfer_suppressed() {
assert!(!super::check_solidity_warning(
TRANSFER_TEST_SOURCE,
BALANCE_CALLS_MESSAGE,
BTreeMap::new(),
false,
Some(vec![Warning::SendTransfer]),
)
.expect("Test failure"));
}
pub const EXTCODESIZE_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract ExternalCodeSize {
function getExternalCodeSize(address target) public view returns (uint256) {
uint256 codeSize;
assembly {
codeSize := extcodesize(target)
}
return codeSize;
}
}
"#;
#[test]
fn extcodesize() {
assert!(super::check_solidity_warning(
EXTCODESIZE_TEST_SOURCE,
"Warning: Your code or one of its dependencies uses the 'extcodesize' instruction,",
BTreeMap::new(),
false,
None,
)
.expect("Test failure"));
}
#[test]
fn extcodesize_suppressed() {
assert!(!super::check_solidity_warning(
EXTCODESIZE_TEST_SOURCE,
"Warning: Your code or one of its dependencies uses the 'extcodesize' instruction,",
BTreeMap::new(),
false,
Some(vec![Warning::ExtCodeSize]),
)
.expect("Test failure"));
}
pub const TX_ORIGIN_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract TxOriginExample {
function isOriginSender() public view returns (bool) {
return tx.origin == msg.sender;
}
}
"#;
#[test]
fn tx_origin() {
assert!(super::check_solidity_warning(
TX_ORIGIN_TEST_SOURCE,
"Warning: You are checking for 'tx.origin' in your code, which might lead to",
BTreeMap::new(),
false,
None,
)
.expect("Test failure"));
}
#[test]
fn tx_origin_suppressed() {
assert!(!super::check_solidity_warning(
TX_ORIGIN_TEST_SOURCE,
"Warning: You are checking for 'tx.origin' in your code, which might lead to",
BTreeMap::new(),
false,
Some(vec![Warning::TxOrigin]),
)
.expect("Test failure"));
}
pub const TX_ORIGIN_ASSEMBLY_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract TxOriginExample {
function isOriginSender() public view returns (bool) {
address txOrigin;
address sender = msg.sender;
assembly {
txOrigin := origin() // Get the transaction origin using the 'origin' instruction
}
return txOrigin == sender;
}
}
"#;
#[test]
fn tx_origin_assembly() {
assert!(super::check_solidity_warning(
TX_ORIGIN_ASSEMBLY_TEST_SOURCE,
"Warning: You are checking for 'tx.origin' in your code, which might lead to",
BTreeMap::new(),
false,
None,
)
.expect("Test failure"));
}
#[test]
fn tx_origin_assembly_suppressed() {
assert!(!super::check_solidity_warning(
TX_ORIGIN_ASSEMBLY_TEST_SOURCE,
"Warning: You are checking for 'tx.origin' in your code, which might lead to",
BTreeMap::new(),
false,
Some(vec![Warning::TxOrigin]),
)
.expect("Test failure"));
}
+14
View File
@@ -0,0 +1,14 @@
//! The Solidity compiler unit tests.
#![cfg(test)]
mod factory_dependency;
mod ir_artifacts;
mod libraries;
mod messages;
mod optimizer;
mod remappings;
mod runtime_code;
mod unsupported_opcodes;
pub(crate) use super::test_utils::*;
+130
View File
@@ -0,0 +1,130 @@
//! The Solidity compiler unit tests for the optimizer.
#![cfg(test)]
use std::collections::BTreeMap;
pub const SOURCE_CODE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity >=0.5.0;
contract Test {
uint8 constant ARRAY_SIZE = 40;
uint128 constant P = 257;
uint128 constant MODULO = 1000000007;
function complex() public pure returns(uint64) {
uint8[ARRAY_SIZE] memory array;
// generate array where first half equals second
for(uint8 i = 0; i < ARRAY_SIZE; i++) {
array[i] = (i % (ARRAY_SIZE / 2)) * (255 / (ARRAY_SIZE / 2 - 1));
}
bool result = true;
for(uint8 i = 0; i < ARRAY_SIZE/2; i++) {
result = result && hash(array, 0, i + 1) == hash(array, ARRAY_SIZE/2, ARRAY_SIZE/2 + i + 1)
&& hash(array, i, ARRAY_SIZE/2) == hash(array, i + ARRAY_SIZE/2, ARRAY_SIZE);
}
if (result) {
return 1;
} else {
return 0;
}
}
function hash(uint8[ARRAY_SIZE] memory array, uint8 begin, uint8 end) private pure returns(uint128) {
uint128 h = 0;
for(uint8 i = begin; i < end; i++) {
h = (h * P + array[i]) % MODULO;
}
return h;
}
}
"#;
#[test]
fn optimizer() {
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), SOURCE_CODE.to_owned());
let build_unoptimized = super::build_solidity(
sources.clone(),
BTreeMap::new(),
None,
revive_llvm_context::OptimizerSettings::none(),
)
.expect("Build failure");
let build_optimized_for_cycles = super::build_solidity(
sources.clone(),
BTreeMap::new(),
None,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Build failure");
let build_optimized_for_size = super::build_solidity(
sources,
BTreeMap::new(),
None,
revive_llvm_context::OptimizerSettings::size(),
)
.expect("Build failure");
let size_when_unoptimized = build_unoptimized
.contracts
.as_ref()
.expect("Missing field `contracts`")
.get("test.sol")
.expect("Missing file `test.sol`")
.get("Test")
.expect("Missing contract `test.sol:Test`")
.evm
.as_ref()
.expect("Missing EVM data")
.bytecode
.as_ref()
.expect("Missing bytecode")
.object
.len();
let size_when_optimized_for_cycles = build_optimized_for_cycles
.contracts
.as_ref()
.expect("Missing field `contracts`")
.get("test.sol")
.expect("Missing file `test.sol`")
.get("Test")
.expect("Missing contract `test.sol:Test`")
.evm
.as_ref()
.expect("Missing EVM data")
.bytecode
.as_ref()
.expect("Missing bytecode")
.object
.len();
let size_when_optimized_for_size = build_optimized_for_size
.contracts
.as_ref()
.expect("Missing field `contracts`")
.get("test.sol")
.expect("Missing file `test.sol`")
.get("Test")
.expect("Missing contract `test.sol:Test`")
.evm
.as_ref()
.expect("Missing EVM data")
.bytecode
.as_ref()
.expect("Missing bytecode")
.object
.len();
assert!(
size_when_optimized_for_cycles < size_when_unoptimized,
"Expected the cycles-optimized bytecode to be smaller than the unoptimized. Optimized: {}B, Unoptimized: {}B", size_when_optimized_for_cycles, size_when_unoptimized,
);
assert!(
size_when_optimized_for_size < size_when_unoptimized,
"Expected the size-optimized bytecode to be smaller than the unoptimized. Optimized: {}B, Unoptimized: {}B", size_when_optimized_for_size, size_when_unoptimized,
);
}
+50
View File
@@ -0,0 +1,50 @@
//! The Solidity compiler unit tests for remappings.
#![cfg(test)]
use std::collections::BTreeMap;
use std::collections::BTreeSet;
pub const CALLEE_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity >=0.4.16;
contract Callable {
function f(uint a) public pure returns(uint) {
return a * 2;
}
}
"#;
pub const CALLER_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity >=0.4.16;
import "libraries/default/callable.sol";
contract Main {
function main(Callable callable) public returns(uint) {
return callable.f(5);
}
}
"#;
#[test]
fn default() {
let mut sources = BTreeMap::new();
sources.insert("./test.sol".to_owned(), CALLER_TEST_SOURCE.to_owned());
sources.insert("./callable.sol".to_owned(), CALLEE_TEST_SOURCE.to_owned());
let mut remappings = BTreeSet::new();
remappings.insert("libraries/default/=./".to_owned());
super::build_solidity(
sources,
BTreeMap::new(),
Some(remappings),
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
}
+33
View File
@@ -0,0 +1,33 @@
//! The Solidity compiler unit tests for runtime code.
#![cfg(test)]
use std::collections::BTreeMap;
#[test]
#[should_panic(expected = "runtimeCode is not supported")]
fn default() {
let source_code = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract A {}
contract Test {
function main() public pure returns(bytes memory) {
return type(A).runtimeCode;
}
}
"#;
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), source_code.to_owned());
super::build_solidity(
sources,
BTreeMap::new(),
None,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
}
@@ -0,0 +1,166 @@
//! The Solidity compiler unit tests for unsupported opcodes.
#![cfg(test)]
use std::collections::BTreeMap;
#[test]
#[should_panic(expected = "The `CODECOPY` instruction is not supported")]
fn codecopy_yul_runtime() {
let source_code = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract FixedCodeCopy {
function copyCode() public view returns (bytes memory) {
uint256 fixedCodeSize = 64;
bytes memory code = new bytes(fixedCodeSize);
assembly {
codecopy(add(code, 0x20), 0, fixedCodeSize)
}
return code;
}
}
"#;
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), source_code.to_owned());
super::build_solidity(
sources,
BTreeMap::new(),
None,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
}
pub const CALLCODE_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract CallcodeTest {
function testCallcode(address target, bytes4 signature, uint256 inputValue) public returns (bool) {
bool success;
assembly {
let input := mload(0x40)
mstore(input, signature)
mstore(add(input, 0x04), inputValue)
let callResult := callcode(gas(), target, 0, input, 0x24, 0, 0)
success := and(callResult, 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF)
}
return success;
}
}
"#;
#[test]
#[should_panic(expected = "The `CALLCODE` instruction is not supported")]
fn callcode_yul() {
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), CALLCODE_TEST_SOURCE.to_owned());
super::build_solidity(
sources,
BTreeMap::new(),
None,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
}
#[test]
#[should_panic(expected = "The `PC` instruction is not supported")]
fn pc_yul() {
let source_code = r#"
object "ProgramCounter" {
code {
datacopy(0, dataoffset("ProgramCounter_deployed"), datasize("ProgramCounter_deployed"))
return(0, datasize("ProgramCounter_deployed"))
}
object "ProgramCounter_deployed" {
code {
function getPC() -> programCounter {
programCounter := pc()
}
let pcValue := getPC()
sstore(0, pcValue)
}
}
}
"#;
super::build_yul(source_code).expect("Test failure");
}
pub const EXTCODECOPY_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract ExternalCodeCopy {
function copyExternalCode(address target, uint256 codeSize) public view returns (bytes memory) {
bytes memory code = new bytes(codeSize);
assembly {
extcodecopy(target, add(code, 0x20), 0, codeSize)
}
return code;
}
}
"#;
#[test]
#[should_panic(expected = "The `EXTCODECOPY` instruction is not supported")]
fn extcodecopy_yul() {
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), EXTCODECOPY_TEST_SOURCE.to_owned());
super::build_solidity(
sources,
BTreeMap::new(),
None,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
}
pub const SELFDESTRUCT_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract MinimalDestructible {
address payable public owner;
constructor() {
owner = payable(msg.sender);
}
function destroy() public {
require(msg.sender == owner, "Only the owner can call this function.");
selfdestruct(owner);
}
}
"#;
#[test]
#[should_panic(expected = "The `SELFDESTRUCT` instruction is not supported")]
fn selfdestruct_yul() {
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), SELFDESTRUCT_TEST_SOURCE.to_owned());
super::build_solidity(
sources,
BTreeMap::new(),
None,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
}
+30
View File
@@ -0,0 +1,30 @@
//! The resolc compiler version.
use serde::Deserialize;
use serde::Serialize;
/// The resolc compiler version.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Version {
/// The long version string.
pub long: String,
/// The short `semver`.
pub default: semver::Version,
/// The LLVM version string.
pub llvm: semver::Version,
}
impl Default for Version {
fn default() -> Self {
let default = semver::Version::parse(env!("CARGO_PKG_VERSION")).expect("Always valid");
let commit = env!("GIT_COMMIT_HASH");
let (llvm_major, llvm_minor, llvm_patch) = inkwell::support::get_llvm_version();
let llvm = semver::Version::new(llvm_major as u64, llvm_minor as u64, llvm_patch as u64);
Self {
long: format!("{default}+commit.{commit}.llvm-{llvm}"),
default,
llvm,
}
}
}