Emerge Yul recompiler (#1)

Provide a modified (and incomplete) version of ZKSync zksolc that can compile the most basic contracts
This commit is contained in:
Cyrill Leutwiler
2024-03-12 12:06:02 +01:00
committed by GitHub
parent d238d8f39e
commit cffa14a4d2
247 changed files with 35357 additions and 4905 deletions
@@ -0,0 +1,79 @@
//!
//! The `solc --combined-json` contract.
//!
use std::collections::BTreeMap;
use std::collections::HashSet;
use serde::Deserialize;
use serde::Serialize;
///
/// The contract.
///
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct Contract {
/// The `solc` hashes output.
#[serde(skip_serializing_if = "Option::is_none")]
pub hashes: Option<BTreeMap<String, String>>,
/// The `solc` ABI output.
#[serde(skip_serializing_if = "Option::is_none")]
pub abi: Option<serde_json::Value>,
/// The `solc` metadata output.
#[serde(skip_serializing_if = "Option::is_none")]
pub metadata: Option<String>,
/// The `solc` developer documentation output.
#[serde(skip_serializing_if = "Option::is_none")]
pub devdoc: Option<serde_json::Value>,
/// The `solc` user documentation output.
#[serde(skip_serializing_if = "Option::is_none")]
pub userdoc: Option<serde_json::Value>,
/// The `solc` storage layout output.
#[serde(skip_serializing_if = "Option::is_none")]
pub storage_layout: Option<serde_json::Value>,
/// The `solc` AST output.
#[serde(skip_serializing_if = "Option::is_none")]
pub ast: Option<serde_json::Value>,
/// The `solc` assembly output.
#[serde(skip_serializing_if = "Option::is_none")]
pub asm: Option<serde_json::Value>,
/// The `solc` hexadecimal binary output.
#[serde(skip_serializing_if = "Option::is_none")]
pub bin: Option<String>,
/// The `solc` hexadecimal binary runtime part output.
#[serde(skip_serializing_if = "Option::is_none")]
pub bin_runtime: Option<String>,
/// The factory dependencies.
#[serde(skip_serializing_if = "Option::is_none")]
pub factory_deps: Option<BTreeMap<String, String>>,
/// The missing libraries.
#[serde(skip_serializing_if = "Option::is_none")]
pub missing_libraries: Option<HashSet<String>>,
}
impl Contract {
///
/// Returns the signature hash of the specified contract entry.
///
/// # Panics
/// If the hashes have not been requested in the `solc` call.
///
pub fn entry(&self, entry: &str) -> u32 {
self.hashes
.as_ref()
.expect("Always exists")
.iter()
.find_map(|(contract_entry, hash)| {
if contract_entry.starts_with(entry) {
Some(
u32::from_str_radix(hash.as_str(), era_compiler_common::BASE_HEXADECIMAL)
.expect("Test hash is always valid"),
)
} else {
None
}
})
.unwrap_or_else(|| panic!("Entry `{entry}` not found"))
}
}
@@ -0,0 +1,108 @@
//!
//! The `solc --combined-json` output.
//!
pub mod contract;
use std::collections::BTreeMap;
use std::fs::File;
use std::io::Write;
use std::path::Path;
use serde::Deserialize;
use serde::Serialize;
use self::contract::Contract;
///
/// The `solc --combined-json` output.
///
#[derive(Debug, Serialize, Deserialize)]
pub struct CombinedJson {
/// The contract entries.
pub contracts: BTreeMap<String, Contract>,
/// The list of source files.
#[serde(rename = "sourceList")]
#[serde(skip_serializing_if = "Option::is_none")]
pub source_list: Option<Vec<String>>,
/// The source code extra data, including the AST.
#[serde(skip_serializing_if = "Option::is_none")]
pub sources: Option<serde_json::Value>,
/// The `solc` compiler version.
pub version: String,
/// The `zksolc` compiler version.
#[serde(skip_serializing_if = "Option::is_none")]
pub zk_version: Option<String>,
}
impl CombinedJson {
///
/// Returns the signature hash of the specified contract and entry.
///
pub fn entry(&self, path: &str, entry: &str) -> u32 {
self.contracts
.iter()
.find_map(|(name, contract)| {
if name.starts_with(path) {
Some(contract)
} else {
None
}
})
.expect("Always exists")
.entry(entry)
}
///
/// Returns the full contract path which can be found in `combined-json` output.
///
pub fn get_full_path(&self, name: &str) -> Option<String> {
self.contracts.iter().find_map(|(path, _value)| {
if let Some(last_slash_position) = path.rfind('/') {
if let Some(colon_position) = path.rfind(':') {
if &path[last_slash_position + 1..colon_position] == name {
return Some(path.to_owned());
}
}
}
None
})
}
///
/// Removes EVM artifacts to prevent their accidental usage.
///
pub fn remove_evm(&mut self) {
for (_, contract) in self.contracts.iter_mut() {
contract.bin = None;
contract.bin_runtime = None;
}
}
///
/// Writes the JSON to the specified directory.
///
pub fn write_to_directory(
self,
output_directory: &Path,
overwrite: bool,
) -> anyhow::Result<()> {
let mut file_path = output_directory.to_owned();
file_path.push(format!("combined.{}", era_compiler_common::EXTENSION_JSON));
if file_path.exists() && !overwrite {
eprintln!(
"Refusing to overwrite an existing file {file_path:?} (use --overwrite to force)."
);
return Ok(());
}
File::create(&file_path)
.map_err(|error| anyhow::anyhow!("File {:?} creating error: {}", file_path, error))?
.write_all(serde_json::to_vec(&self).expect("Always valid").as_slice())
.map_err(|error| anyhow::anyhow!("File {:?} writing error: {}", file_path, error))?;
Ok(())
}
}
+315
View File
@@ -0,0 +1,315 @@
//!
//! The Solidity compiler.
//!
pub mod combined_json;
pub mod pipeline;
pub mod standard_json;
pub mod version;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use self::combined_json::CombinedJson;
use self::pipeline::Pipeline;
use self::standard_json::input::Input as StandardJsonInput;
use self::standard_json::output::Output as StandardJsonOutput;
use self::version::Version;
///
/// The Solidity compiler.
///
pub struct Compiler {
/// The binary executable name.
pub executable: String,
/// The lazily-initialized compiler version.
pub version: Option<Version>,
}
impl Compiler {
/// The default executable name.
pub const DEFAULT_EXECUTABLE_NAME: &'static str = "solc";
/// The first version of `solc` with the support of standard JSON interface.
pub const FIRST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 4, 12);
/// The first version of `solc`, where Yul codegen is considered robust enough.
pub const FIRST_YUL_VERSION: semver::Version = semver::Version::new(0, 8, 0);
/// The first version of `solc`, where `--via-ir` codegen mode is supported.
pub const FIRST_VIA_IR_VERSION: semver::Version = semver::Version::new(0, 8, 13);
/// The last supported version of `solc`.
pub const LAST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 8, 24);
///
/// A shortcut constructor.
///
/// Different tools may use different `executable` names. For example, the integration tester
/// uses `solc-<version>` format.
///
pub fn new(executable: String) -> anyhow::Result<Self> {
if let Err(error) = which::which(executable.as_str()) {
anyhow::bail!(
"The `{executable}` executable not found in ${{PATH}}: {}",
error
);
}
Ok(Self {
executable,
version: None,
})
}
///
/// Compiles the Solidity `--standard-json` input into Yul IR.
///
pub fn standard_json(
&mut self,
mut input: StandardJsonInput,
pipeline: Pipeline,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
) -> anyhow::Result<StandardJsonOutput> {
let version = self.version()?;
let mut command = std::process::Command::new(self.executable.as_str());
command.stdin(std::process::Stdio::piped());
command.stdout(std::process::Stdio::piped());
command.arg("--standard-json");
if let Some(base_path) = base_path {
command.arg("--base-path");
command.arg(base_path);
}
for include_path in include_paths.into_iter() {
command.arg("--include-path");
command.arg(include_path);
}
if let Some(allow_paths) = allow_paths {
command.arg("--allow-paths");
command.arg(allow_paths);
}
input.normalize(&version.default);
let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default();
let input_json = serde_json::to_vec(&input).expect("Always valid");
let process = command.spawn().map_err(|error| {
anyhow::anyhow!("{} subprocess spawning error: {:?}", self.executable, error)
})?;
process
.stdin
.as_ref()
.ok_or_else(|| anyhow::anyhow!("{} stdin getting error", self.executable))?
.write_all(input_json.as_slice())
.map_err(|error| {
anyhow::anyhow!("{} stdin writing error: {:?}", self.executable, error)
})?;
let output = process.wait_with_output().map_err(|error| {
anyhow::anyhow!("{} subprocess output error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
let mut output: StandardJsonOutput = era_compiler_common::deserialize_from_slice(
output.stdout.as_slice(),
)
.map_err(|error| {
anyhow::anyhow!(
"{} subprocess output parsing error: {}\n{}",
self.executable,
error,
era_compiler_common::deserialize_from_slice::<serde_json::Value>(
output.stdout.as_slice()
)
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(|_| String::from_utf8_lossy(output.stdout.as_slice()).to_string()),
)
})?;
output.preprocess_ast(&version, pipeline, suppressed_warnings.as_slice())?;
output.remove_evm();
Ok(output)
}
///
/// The `solc --combined-json abi,hashes...` mirror.
///
pub fn combined_json(
&self,
paths: &[PathBuf],
combined_json_argument: &str,
) -> anyhow::Result<CombinedJson> {
let mut command = std::process::Command::new(self.executable.as_str());
command.args(paths);
let mut combined_json_flags = Vec::new();
let mut combined_json_fake_flag_pushed = false;
let mut filtered_flags = Vec::with_capacity(3);
for flag in combined_json_argument.split(',') {
match flag {
flag @ "asm" | flag @ "bin" | flag @ "bin-runtime" => filtered_flags.push(flag),
flag => combined_json_flags.push(flag),
}
}
if combined_json_flags.is_empty() {
combined_json_flags.push("ast");
combined_json_fake_flag_pushed = true;
}
command.arg("--combined-json");
command.arg(combined_json_flags.join(","));
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
println!("{}", String::from_utf8_lossy(output.stdout.as_slice()));
println!("{}", String::from_utf8_lossy(output.stderr.as_slice()));
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stdout.as_slice()).to_string()
);
}
let mut combined_json: CombinedJson = era_compiler_common::deserialize_from_slice(
output.stdout.as_slice(),
)
.map_err(|error| {
anyhow::anyhow!(
"{} subprocess output parsing error: {}\n{}",
self.executable,
error,
era_compiler_common::deserialize_from_slice::<serde_json::Value>(
output.stdout.as_slice()
)
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(|_| String::from_utf8_lossy(output.stdout.as_slice()).to_string()),
)
})?;
for filtered_flag in filtered_flags.into_iter() {
for (_path, contract) in combined_json.contracts.iter_mut() {
match filtered_flag {
"asm" => contract.asm = Some(serde_json::Value::Null),
"bin" => contract.bin = Some("".to_owned()),
"bin-runtime" => contract.bin_runtime = Some("".to_owned()),
_ => continue,
}
}
}
if combined_json_fake_flag_pushed {
combined_json.source_list = None;
combined_json.sources = None;
}
combined_json.remove_evm();
Ok(combined_json)
}
///
/// The `solc` Yul validator.
///
pub fn validate_yul(&self, path: &Path) -> anyhow::Result<()> {
let mut command = std::process::Command::new(self.executable.as_str());
command.arg("--strict-assembly");
command.arg(path);
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
Ok(())
}
///
/// The `solc --version` mini-parser.
///
pub fn version(&mut self) -> anyhow::Result<Version> {
if let Some(version) = self.version.as_ref() {
return Ok(version.to_owned());
}
let mut command = std::process::Command::new(self.executable.as_str());
command.arg("--version");
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
let stdout = String::from_utf8_lossy(output.stdout.as_slice());
let long = stdout
.lines()
.nth(1)
.ok_or_else(|| {
anyhow::anyhow!("{} version parsing: not enough lines", self.executable)
})?
.split(' ')
.nth(1)
.ok_or_else(|| {
anyhow::anyhow!(
"{} version parsing: not enough words in the 2nd line",
self.executable
)
})?
.to_owned();
let default: semver::Version = long
.split('+')
.next()
.ok_or_else(|| {
anyhow::anyhow!("{} version parsing: metadata dropping", self.executable)
})?
.parse()
.map_err(|error| anyhow::anyhow!("{} version parsing: {}", self.executable, error))?;
let l2_revision: Option<semver::Version> = stdout
.lines()
.nth(2)
.and_then(|line| line.split(' ').nth(1))
.and_then(|line| line.split('-').nth(1))
.and_then(|version| version.parse().ok());
let version = Version::new(long, default, l2_revision);
if version.default < Self::FIRST_SUPPORTED_VERSION {
anyhow::bail!(
"`solc` versions <{} are not supported, found {}",
Self::FIRST_SUPPORTED_VERSION,
version.default
);
}
if version.default > Self::LAST_SUPPORTED_VERSION {
anyhow::bail!(
"`solc` versions >{} are not supported, found {}",
Self::LAST_SUPPORTED_VERSION,
version.default
);
}
self.version = Some(version.clone());
Ok(version)
}
}
+32
View File
@@ -0,0 +1,32 @@
//!
//! The Solidity compiler pipeline type.
//!
use crate::solc::version::Version as SolcVersion;
use crate::solc::Compiler as SolcCompiler;
///
/// The Solidity compiler pipeline type.
///
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[allow(non_camel_case_types)]
#[allow(clippy::upper_case_acronyms)]
pub enum Pipeline {
/// The Yul IR.
Yul,
/// The EVM legacy assembly IR.
EVMLA,
}
impl Pipeline {
///
/// We always use EVMLA for Solidity <=0.7, or if the user does not want to compile via Yul.
///
pub fn new(solc_version: &SolcVersion, force_evmla: bool) -> Self {
if solc_version.default < SolcCompiler::FIRST_YUL_VERSION || force_evmla {
Self::EVMLA
} else {
Self::Yul
}
}
}
@@ -0,0 +1,26 @@
//!
//! The `solc --standard-json` input language.
//!
use serde::Deserialize;
use serde::Serialize;
///
/// The `solc --standard-json` input language.
///
#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Language {
/// The Solidity language.
Solidity,
/// The Yul IR.
Yul,
}
impl std::fmt::Display for Language {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Solidity => write!(f, "Solidity"),
Self::Yul => write!(f, "Yul"),
}
}
}
@@ -0,0 +1,147 @@
//!
//! The `solc --standard-json` input.
//!
pub mod language;
pub mod settings;
pub mod source;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::path::PathBuf;
use rayon::iter::IntoParallelIterator;
use rayon::iter::ParallelIterator;
use serde::Deserialize;
use serde::Serialize;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use crate::solc::standard_json::input::settings::metadata::Metadata as SolcStandardJsonInputSettingsMetadata;
use crate::solc::standard_json::input::settings::optimizer::Optimizer as SolcStandardJsonInputSettingsOptimizer;
use crate::solc::standard_json::input::settings::selection::Selection as SolcStandardJsonInputSettingsSelection;
use crate::warning::Warning;
use self::language::Language;
use self::settings::Settings;
use self::source::Source;
///
/// The `solc --standard-json` input.
///
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Input {
/// The input language.
pub language: Language,
/// The input source code files hashmap.
pub sources: BTreeMap<String, Source>,
/// The compiler settings.
pub settings: Settings,
/// The suppressed warnings.
#[serde(skip_serializing)]
pub suppressed_warnings: Option<Vec<Warning>>,
}
impl Input {
///
/// A shortcut constructor from stdin.
///
pub fn try_from_stdin(solc_pipeline: SolcPipeline) -> anyhow::Result<Self> {
let mut input: Self = serde_json::from_reader(std::io::BufReader::new(std::io::stdin()))?;
input
.settings
.output_selection
.get_or_insert_with(SolcStandardJsonInputSettingsSelection::default)
.extend_with_required(solc_pipeline);
Ok(input)
}
///
/// A shortcut constructor from paths.
///
#[allow(clippy::too_many_arguments)]
pub fn try_from_paths(
language: Language,
evm_version: Option<era_compiler_common::EVMVersion>,
paths: &[PathBuf],
library_map: Vec<String>,
remappings: Option<BTreeSet<String>>,
output_selection: SolcStandardJsonInputSettingsSelection,
optimizer: SolcStandardJsonInputSettingsOptimizer,
metadata: Option<SolcStandardJsonInputSettingsMetadata>,
via_ir: bool,
suppressed_warnings: Option<Vec<Warning>>,
) -> anyhow::Result<Self> {
let sources = paths
.into_par_iter()
.map(|path| {
let source = Source::try_from(path.as_path()).unwrap_or_else(|error| {
panic!("Source code file {path:?} reading error: {error}")
});
(path.to_string_lossy().to_string(), source)
})
.collect();
let libraries = Settings::parse_libraries(library_map)?;
Ok(Self {
language,
sources,
settings: Settings::new(
evm_version,
libraries,
remappings,
output_selection,
via_ir,
optimizer,
metadata,
),
suppressed_warnings,
})
}
///
/// A shortcut constructor from source code.
///
/// Only for the integration test purposes.
///
#[allow(clippy::too_many_arguments)]
pub fn try_from_sources(
evm_version: Option<era_compiler_common::EVMVersion>,
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
output_selection: SolcStandardJsonInputSettingsSelection,
optimizer: SolcStandardJsonInputSettingsOptimizer,
metadata: Option<SolcStandardJsonInputSettingsMetadata>,
via_ir: bool,
suppressed_warnings: Option<Vec<Warning>>,
) -> anyhow::Result<Self> {
let sources = sources
.into_par_iter()
.map(|(path, content)| (path, Source::from(content)))
.collect();
Ok(Self {
language: Language::Solidity,
sources,
settings: Settings::new(
evm_version,
libraries,
remappings,
output_selection,
via_ir,
optimizer,
metadata,
),
suppressed_warnings,
})
}
///
/// Sets the necessary defaults.
///
pub fn normalize(&mut self, version: &semver::Version) {
self.settings.normalize(version);
}
}
@@ -0,0 +1,28 @@
//!
//! The `solc --standard-json` input settings metadata.
//!
use serde::Deserialize;
use serde::Serialize;
///
/// The `solc --standard-json` input settings metadata.
///
#[derive(Debug, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Metadata {
/// The bytecode hash mode.
#[serde(skip_serializing_if = "Option::is_none")]
pub bytecode_hash: Option<era_compiler_llvm_context::EraVMMetadataHash>,
}
impl Metadata {
///
/// A shortcut constructor.
///
pub fn new(bytecode_hash: era_compiler_llvm_context::EraVMMetadataHash) -> Self {
Self {
bytecode_hash: Some(bytecode_hash),
}
}
}
@@ -0,0 +1,111 @@
//!
//! The `solc --standard-json` input settings.
//!
pub mod metadata;
pub mod optimizer;
pub mod selection;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use serde::Deserialize;
use serde::Serialize;
use self::metadata::Metadata;
use self::optimizer::Optimizer;
use self::selection::Selection;
///
/// The `solc --standard-json` input settings.
///
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Settings {
/// The target EVM version.
#[serde(skip_serializing_if = "Option::is_none")]
pub evm_version: Option<era_compiler_common::EVMVersion>,
/// The linker library addresses.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub libraries: Option<BTreeMap<String, BTreeMap<String, String>>>,
/// The sorted list of remappings.
#[serde(skip_serializing_if = "Option::is_none")]
pub remappings: Option<BTreeSet<String>>,
/// The output selection filters.
#[serde(skip_serializing_if = "Option::is_none")]
pub output_selection: Option<Selection>,
/// Whether to compile via IR. Only for testing with solc >=0.8.13.
#[serde(
rename = "viaIR",
skip_serializing_if = "Option::is_none",
skip_deserializing
)]
pub via_ir: Option<bool>,
/// The optimizer settings.
pub optimizer: Optimizer,
/// The metadata settings.
#[serde(skip_serializing_if = "Option::is_none")]
pub metadata: Option<Metadata>,
}
impl Settings {
///
/// A shortcut constructor.
///
pub fn new(
evm_version: Option<era_compiler_common::EVMVersion>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
output_selection: Selection,
via_ir: bool,
optimizer: Optimizer,
metadata: Option<Metadata>,
) -> Self {
Self {
evm_version,
libraries: Some(libraries),
remappings,
output_selection: Some(output_selection),
via_ir: if via_ir { Some(true) } else { None },
optimizer,
metadata,
}
}
///
/// Sets the necessary defaults.
///
pub fn normalize(&mut self, version: &semver::Version) {
self.optimizer.normalize(version);
}
///
/// Parses the library list and returns their double hashmap with path and name as keys.
///
pub fn parse_libraries(
input: Vec<String>,
) -> anyhow::Result<BTreeMap<String, BTreeMap<String, String>>> {
let mut libraries = BTreeMap::new();
for (index, library) in input.into_iter().enumerate() {
let mut path_and_address = library.split('=');
let path = path_and_address
.next()
.ok_or_else(|| anyhow::anyhow!("The library #{} path is missing", index))?;
let mut file_and_contract = path.split(':');
let file = file_and_contract
.next()
.ok_or_else(|| anyhow::anyhow!("The library `{}` file name is missing", path))?;
let contract = file_and_contract.next().ok_or_else(|| {
anyhow::anyhow!("The library `{}` contract name is missing", path)
})?;
let address = path_and_address
.next()
.ok_or_else(|| anyhow::anyhow!("The library `{}` address is missing", path))?;
libraries
.entry(file.to_owned())
.or_insert_with(BTreeMap::new)
.insert(contract.to_owned(), address.to_owned());
}
Ok(libraries)
}
}
@@ -0,0 +1,67 @@
//!
//! The `solc --standard-json` input settings optimizer details.
//!
use serde::Deserialize;
use serde::Serialize;
///
/// The `solc --standard-json` input settings optimizer details.
///
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Details {
/// Whether the pass is enabled.
pub peephole: bool,
/// Whether the pass is enabled.
#[serde(skip_serializing_if = "Option::is_none")]
pub inliner: Option<bool>,
/// Whether the pass is enabled.
pub jumpdest_remover: bool,
/// Whether the pass is enabled.
pub order_literals: bool,
/// Whether the pass is enabled.
pub deduplicate: bool,
/// Whether the pass is enabled.
pub cse: bool,
/// Whether the pass is enabled.
pub constant_optimizer: bool,
}
impl Details {
///
/// A shortcut constructor.
///
pub fn new(
peephole: bool,
inliner: Option<bool>,
jumpdest_remover: bool,
order_literals: bool,
deduplicate: bool,
cse: bool,
constant_optimizer: bool,
) -> Self {
Self {
peephole,
inliner,
jumpdest_remover,
order_literals,
deduplicate,
cse,
constant_optimizer,
}
}
///
/// Creates a set of disabled optimizations.
///
pub fn disabled(version: &semver::Version) -> Self {
let inliner = if version >= &semver::Version::new(0, 8, 5) {
Some(false)
} else {
None
};
Self::new(false, inliner, false, false, false, false, false)
}
}
@@ -0,0 +1,82 @@
//!
//! The `solc --standard-json` input settings optimizer.
//!
pub mod details;
use serde::Deserialize;
use serde::Serialize;
use self::details::Details;
///
/// The `solc --standard-json` input settings optimizer.
///
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Optimizer {
/// Whether the optimizer is enabled.
pub enabled: bool,
/// The optimization mode string.
#[serde(skip_serializing)]
pub mode: Option<char>,
/// The `solc` optimizer details.
#[serde(skip_serializing_if = "Option::is_none")]
pub details: Option<Details>,
/// Whether to try to recompile with -Oz if the bytecode is too large.
#[serde(skip_serializing)]
pub fallback_to_optimizing_for_size: Option<bool>,
/// Whether to disable the system request memoization.
#[serde(skip_serializing)]
pub disable_system_request_memoization: Option<bool>,
}
impl Optimizer {
///
/// A shortcut constructor.
///
pub fn new(
enabled: bool,
mode: Option<char>,
version: &semver::Version,
fallback_to_optimizing_for_size: bool,
disable_system_request_memoization: bool,
) -> Self {
Self {
enabled,
mode,
details: Some(Details::disabled(version)),
fallback_to_optimizing_for_size: Some(fallback_to_optimizing_for_size),
disable_system_request_memoization: Some(disable_system_request_memoization),
}
}
///
/// Sets the necessary defaults.
///
pub fn normalize(&mut self, version: &semver::Version) {
self.details = if version >= &semver::Version::new(0, 5, 5) {
Some(Details::disabled(version))
} else {
None
};
}
}
impl TryFrom<&Optimizer> for era_compiler_llvm_context::OptimizerSettings {
type Error = anyhow::Error;
fn try_from(value: &Optimizer) -> Result<Self, Self::Error> {
let mut result = match value.mode {
Some(mode) => Self::try_from_cli(mode)?,
None => Self::cycles(),
};
if value.fallback_to_optimizing_for_size.unwrap_or_default() {
result.enable_fallback_to_size();
}
if value.disable_system_request_memoization.unwrap_or_default() {
result.disable_system_request_memoization();
}
Ok(result)
}
}
@@ -0,0 +1,69 @@
//!
//! The `solc --standard-json` expected output selection flag.
//!
use serde::Deserialize;
use serde::Serialize;
use crate::solc::pipeline::Pipeline as SolcPipeline;
///
/// The `solc --standard-json` expected output selection flag.
///
#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)]
#[allow(non_camel_case_types)]
#[allow(clippy::upper_case_acronyms)]
pub enum Flag {
/// The ABI JSON.
#[serde(rename = "abi")]
ABI,
/// The metadata.
#[serde(rename = "metadata")]
Metadata,
/// The developer documentation.
#[serde(rename = "devdoc")]
Devdoc,
/// The user documentation.
#[serde(rename = "userdoc")]
Userdoc,
/// The function signature hashes JSON.
#[serde(rename = "evm.methodIdentifiers")]
MethodIdentifiers,
/// The storage layout.
#[serde(rename = "storageLayout")]
StorageLayout,
/// The AST JSON.
#[serde(rename = "ast")]
AST,
/// The Yul IR.
#[serde(rename = "irOptimized")]
Yul,
/// The EVM legacy assembly JSON.
#[serde(rename = "evm.legacyAssembly")]
EVMLA,
}
impl From<SolcPipeline> for Flag {
fn from(pipeline: SolcPipeline) -> Self {
match pipeline {
SolcPipeline::Yul => Self::Yul,
SolcPipeline::EVMLA => Self::EVMLA,
}
}
}
impl std::fmt::Display for Flag {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::ABI => write!(f, "abi"),
Self::Metadata => write!(f, "metadata"),
Self::Devdoc => write!(f, "devdoc"),
Self::Userdoc => write!(f, "userdoc"),
Self::MethodIdentifiers => write!(f, "evm.methodIdentifiers"),
Self::StorageLayout => write!(f, "storageLayout"),
Self::AST => write!(f, "ast"),
Self::Yul => write!(f, "irOptimized"),
Self::EVMLA => write!(f, "evm.legacyAssembly"),
}
}
}
@@ -0,0 +1,58 @@
//!
//! The `solc --standard-json` output file selection.
//!
pub mod flag;
use std::collections::HashSet;
use serde::Deserialize;
use serde::Serialize;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use self::flag::Flag as SelectionFlag;
///
/// The `solc --standard-json` output file selection.
///
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct File {
/// The per-file output selections.
#[serde(rename = "", skip_serializing_if = "Option::is_none")]
pub per_file: Option<HashSet<SelectionFlag>>,
/// The per-contract output selections.
#[serde(rename = "*", skip_serializing_if = "Option::is_none")]
pub per_contract: Option<HashSet<SelectionFlag>>,
}
impl File {
///
/// Creates the selection required by our compilation process.
///
pub fn new_required(pipeline: SolcPipeline) -> Self {
Self {
per_file: Some(HashSet::from_iter([SelectionFlag::AST])),
per_contract: Some(HashSet::from_iter([
SelectionFlag::MethodIdentifiers,
SelectionFlag::Metadata,
SelectionFlag::from(pipeline),
])),
}
}
///
/// Extends the user's output selection with flag required by our compilation process.
///
pub fn extend_with_required(&mut self, pipeline: SolcPipeline) -> &mut Self {
let required = Self::new_required(pipeline);
self.per_file
.get_or_insert_with(HashSet::default)
.extend(required.per_file.unwrap_or_default());
self.per_contract
.get_or_insert_with(HashSet::default)
.extend(required.per_contract.unwrap_or_default());
self
}
}
@@ -0,0 +1,43 @@
//!
//! The `solc --standard-json` output selection.
//!
pub mod file;
use serde::Deserialize;
use serde::Serialize;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use self::file::File as FileSelection;
///
/// The `solc --standard-json` output selection.
///
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct Selection {
/// Only the 'all' wildcard is available for robustness reasons.
#[serde(rename = "*", skip_serializing_if = "Option::is_none")]
pub all: Option<FileSelection>,
}
impl Selection {
///
/// Creates the selection required by our compilation process.
///
pub fn new_required(pipeline: SolcPipeline) -> Self {
Self {
all: Some(FileSelection::new_required(pipeline)),
}
}
///
/// Extends the user's output selection with flag required by our compilation process.
///
pub fn extend_with_required(&mut self, pipeline: SolcPipeline) -> &mut Self {
self.all
.get_or_insert_with(|| FileSelection::new_required(pipeline))
.extend_with_required(pipeline);
self
}
}
@@ -0,0 +1,44 @@
//!
//! The `solc --standard-json` input source.
//!
use std::io::Read;
use std::path::Path;
use serde::Deserialize;
use serde::Serialize;
///
/// The `solc --standard-json` input source.
///
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Source {
/// The source code file content.
pub content: String,
}
impl From<String> for Source {
fn from(content: String) -> Self {
Self { content }
}
}
impl TryFrom<&Path> for Source {
type Error = anyhow::Error;
fn try_from(path: &Path) -> Result<Self, Self::Error> {
let content = if path.to_string_lossy() == "-" {
let mut solidity_code = String::with_capacity(16384);
std::io::stdin()
.read_to_string(&mut solidity_code)
.map_err(|error| anyhow::anyhow!("<stdin> reading error: {}", error))?;
solidity_code
} else {
std::fs::read_to_string(path)
.map_err(|error| anyhow::anyhow!("File {:?} reading error: {}", path, error))?
};
Ok(Self { content })
}
}
@@ -0,0 +1,6 @@
//!
//! The `solc <input>.sol --standard-json`.
//!
pub mod input;
pub mod output;
@@ -0,0 +1,25 @@
//!
//! The `solc --standard-json` output contract EVM bytecode.
//!
use serde::Deserialize;
use serde::Serialize;
///
/// The `solc --standard-json` output contract EVM bytecode.
///
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Bytecode {
/// The bytecode object.
pub object: String,
}
impl Bytecode {
///
/// A shortcut constructor.
///
pub fn new(object: String) -> Self {
Self { object }
}
}
@@ -0,0 +1,52 @@
//!
//! The `solc --standard-json` output contract EVM extra metadata.
//!
pub mod recursive_function;
use serde::Deserialize;
use serde::Serialize;
use self::recursive_function::RecursiveFunction;
///
/// The `solc --standard-json` output contract EVM extra metadata.
///
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ExtraMetadata {
/// The list of recursive functions.
#[serde(default = "Vec::new")]
pub recursive_functions: Vec<RecursiveFunction>,
}
impl ExtraMetadata {
///
/// Returns the recursive function reference for the specified tag.
///
pub fn get(
&self,
block_key: &era_compiler_llvm_context::EraVMFunctionBlockKey,
) -> Option<&RecursiveFunction> {
for function in self.recursive_functions.iter() {
match block_key.code_type {
era_compiler_llvm_context::EraVMCodeType::Deploy => {
if let Some(creation_tag) = function.creation_tag {
if num::BigUint::from(creation_tag) == block_key.tag {
return Some(function);
}
}
}
era_compiler_llvm_context::EraVMCodeType::Runtime => {
if let Some(runtime_tag) = function.runtime_tag {
if num::BigUint::from(runtime_tag) == block_key.tag {
return Some(function);
}
}
}
}
}
None
}
}
@@ -0,0 +1,26 @@
//!
//! The `solc --standard-json` output contract EVM recursive function.
//!
use serde::Deserialize;
use serde::Serialize;
///
/// The `solc --standard-json` output contract EVM recursive function.
///
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct RecursiveFunction {
/// The function name.
pub name: String,
/// The creation code function block tag.
pub creation_tag: Option<usize>,
/// The runtime code function block tag.
pub runtime_tag: Option<usize>,
/// The number of input arguments.
#[serde(rename = "totalParamSize")]
pub input_size: usize,
/// The number of output arguments.
#[serde(rename = "totalRetParamSize")]
pub output_size: usize,
}
@@ -0,0 +1,51 @@
//!
//! The `solc --standard-json` output contract EVM data.
//!
pub mod bytecode;
pub mod extra_metadata;
use std::collections::BTreeMap;
use serde::Deserialize;
use serde::Serialize;
use crate::evmla::assembly::Assembly;
use self::bytecode::Bytecode;
use self::extra_metadata::ExtraMetadata;
///
/// The `solc --standard-json` output contract EVM data.
///
/// It is replaced by EraVM data after compiling.
///
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct EVM {
/// The contract EVM legacy assembly code.
#[serde(rename = "legacyAssembly")]
pub assembly: Option<Assembly>,
/// The contract EraVM assembly code.
#[serde(rename = "assembly")]
pub assembly_text: Option<String>,
/// The contract bytecode.
/// Is reset by that of EraVM before yielding the compiled project artifacts.
pub bytecode: Option<Bytecode>,
/// The contract function signatures.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub method_identifiers: Option<BTreeMap<String, String>>,
/// The extra EVMLA metadata.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub extra_metadata: Option<ExtraMetadata>,
}
impl EVM {
///
/// Sets the EraVM assembly and bytecode.
///
pub fn modify(&mut self, assembly_text: String, bytecode: String) {
self.assembly_text = Some(assembly_text);
self.bytecode = Some(Bytecode::new(bytecode));
}
}
@@ -0,0 +1,51 @@
//!
//! The `solc --standard-json` output contract.
//!
pub mod evm;
use std::collections::BTreeMap;
use std::collections::HashSet;
use serde::Deserialize;
use serde::Serialize;
use self::evm::EVM;
///
/// The `solc --standard-json` output contract.
///
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Contract {
/// The contract ABI.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub abi: Option<serde_json::Value>,
/// The contract metadata.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub metadata: Option<serde_json::Value>,
/// The contract developer documentation.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub devdoc: Option<serde_json::Value>,
/// The contract user documentation.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub userdoc: Option<serde_json::Value>,
/// The contract storage layout.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub storage_layout: Option<serde_json::Value>,
/// Contract's bytecode and related objects
#[serde(default, skip_serializing_if = "Option::is_none")]
pub evm: Option<EVM>,
/// The contract optimized IR code.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub ir_optimized: Option<String>,
/// The contract EraVM bytecode hash.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub hash: Option<String>,
/// The contract factory dependencies.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub factory_dependencies: Option<BTreeMap<String, String>>,
/// The contract missing libraries.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub missing_libraries: Option<HashSet<String>>,
}
@@ -0,0 +1,177 @@
//!
//! The `solc --standard-json` output error.
//!
pub mod source_location;
use std::str::FromStr;
use serde::Deserialize;
use serde::Serialize;
use self::source_location::SourceLocation;
///
/// The `solc --standard-json` output error.
///
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Error {
/// The component type.
pub component: String,
/// The error code.
pub error_code: Option<String>,
/// The formatted error message.
pub formatted_message: String,
/// The non-formatted error message.
pub message: String,
/// The error severity.
pub severity: String,
/// The error location data.
pub source_location: Option<SourceLocation>,
/// The error type.
pub r#type: String,
}
impl Error {
///
/// Returns the `ecrecover` function usage warning.
///
pub fn message_ecrecover(src: Option<&str>) -> Self {
let message = r#"
┌──────────────────────────────────────────────────────────────────────────────────────────────────┐
│ Warning: It looks like you are using 'ecrecover' to validate a signature of a user account. │
│ zkSync Era comes with native account abstraction support, therefore it is highly recommended NOT │
│ to rely on the fact that the account has an ECDSA private key attached to it since accounts might│
│ implement other signature schemes. │
│ Read more about Account Abstraction at https://v2-docs.zksync.io/dev/developer-guides/aa.html │
└──────────────────────────────────────────────────────────────────────────────────────────────────┘"#
.to_owned();
Self {
component: "general".to_owned(),
error_code: None,
formatted_message: message.clone(),
message,
severity: "warning".to_owned(),
source_location: src.map(SourceLocation::from_str).and_then(Result::ok),
r#type: "Warning".to_owned(),
}
}
///
/// Returns the `<address payable>`'s `send` and `transfer` methods usage error.
///
pub fn message_send_and_transfer(src: Option<&str>) -> Self {
let message = r#"
┌──────────────────────────────────────────────────────────────────────────────────────────────────┐
│ Warning: It looks like you are using '<address payable>.send/transfer(<X>)' without providing │
│ the gas amount. Such calls will fail depending on the pubdata costs. │
│ This might be a false positive if you are using an interface (like IERC20) instead of the │
│ native Solidity `send/transfer`. │
│ Please use 'payable(<address>).call{value: <X>}("")' instead, but be careful with the reentrancy │
│ attack. `send` and `transfer` send limited amount of gas that prevents reentrancy, whereas │
│ `<address>.call{value: <X>}` sends all gas to the callee. Learn more on │
│ https://docs.soliditylang.org/en/latest/security-considerations.html#reentrancy │
└──────────────────────────────────────────────────────────────────────────────────────────────────┘"#
.to_owned();
Self {
component: "general".to_owned(),
error_code: None,
formatted_message: message.clone(),
message,
severity: "warning".to_owned(),
source_location: src.map(SourceLocation::from_str).and_then(Result::ok),
r#type: "Warning".to_owned(),
}
}
///
/// Returns the `extcodesize` instruction usage warning.
///
pub fn message_extcodesize(src: Option<&str>) -> Self {
let message = r#"
┌──────────────────────────────────────────────────────────────────────────────────────────────────┐
│ Warning: Your code or one of its dependencies uses the 'extcodesize' instruction, which is │
│ usually needed in the following cases: │
│ 1. To detect whether an address belongs to a smart contract. │
│ 2. To detect whether the deploy code execution has finished. │
│ zkSync Era comes with native account abstraction support (so accounts are smart contracts, │
│ including private-key controlled EOAs), and you should avoid differentiating between contracts │
│ and non-contract addresses. │
└──────────────────────────────────────────────────────────────────────────────────────────────────┘"#
.to_owned();
Self {
component: "general".to_owned(),
error_code: None,
formatted_message: message.clone(),
message,
severity: "warning".to_owned(),
source_location: src.map(SourceLocation::from_str).and_then(Result::ok),
r#type: "Warning".to_owned(),
}
}
///
/// Returns the `origin` instruction usage warning.
///
pub fn message_tx_origin(src: Option<&str>) -> Self {
let message = r#"
┌──────────────────────────────────────────────────────────────────────────────────────────────────┐
│ Warning: You are checking for 'tx.origin' in your code, which might lead to unexpected behavior. │
│ zkSync Era comes with native account abstraction support, and therefore the initiator of a │
│ transaction might be different from the contract calling your code. It is highly recommended NOT │
│ to rely on tx.origin, but use msg.sender instead. │
│ Read more about Account Abstraction at https://v2-docs.zksync.io/dev/developer-guides/aa.html │
└──────────────────────────────────────────────────────────────────────────────────────────────────┘"#
.to_owned();
Self {
component: "general".to_owned(),
error_code: None,
formatted_message: message.clone(),
message,
severity: "warning".to_owned(),
source_location: src.map(SourceLocation::from_str).and_then(Result::ok),
r#type: "Warning".to_owned(),
}
}
///
/// Returns the internal function pointer usage error.
///
pub fn message_internal_function_pointer(src: Option<&str>) -> Self {
let message = r#"
┌──────────────────────────────────────────────────────────────────────────────────────────────────┐
│ Error: Internal function pointers are not supported in EVM legacy assembly pipeline. │
│ Please use the Yul IR codegen instead. │
└──────────────────────────────────────────────────────────────────────────────────────────────────┘"#
.to_owned();
Self {
component: "general".to_owned(),
error_code: None,
formatted_message: message.clone(),
message,
severity: "error".to_owned(),
source_location: src.map(SourceLocation::from_str).and_then(Result::ok),
r#type: "Error".to_owned(),
}
}
///
/// Appends the contract path to the message..
///
pub fn push_contract_path(&mut self, path: &str) {
self.formatted_message
.push_str(format!("\n--> {path}\n").as_str());
}
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.formatted_message)
}
}
@@ -0,0 +1,47 @@
//!
//! The `solc --standard-json` output error source location.
//!
use std::str::FromStr;
use serde::Deserialize;
use serde::Serialize;
///
/// The `solc --standard-json` output error source location.
///
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct SourceLocation {
/// The source file path.
pub file: String,
/// The start location.
pub start: isize,
/// The end location.
pub end: isize,
}
impl FromStr for SourceLocation {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
let mut parts = string.split(':');
let start = parts
.next()
.map(|string| string.parse::<isize>())
.and_then(Result::ok)
.unwrap_or_default();
let length = parts
.next()
.map(|string| string.parse::<isize>())
.and_then(Result::ok)
.unwrap_or_default();
let file = parts.next().unwrap_or_default().to_owned();
Ok(Self {
file,
start,
end: start + length,
})
}
}
@@ -0,0 +1,281 @@
//!
//! The `solc --standard-json` output.
//!
pub mod contract;
pub mod error;
pub mod source;
use std::collections::BTreeMap;
use serde::Deserialize;
use serde::Serialize;
use sha3::Digest;
use crate::evmla::assembly::instruction::Instruction;
use crate::evmla::assembly::Assembly;
use crate::project::contract::ir::IR as ProjectContractIR;
use crate::project::contract::Contract as ProjectContract;
use crate::project::Project;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use crate::solc::version::Version as SolcVersion;
use crate::warning::Warning;
use crate::yul::lexer::Lexer;
use crate::yul::parser::statement::object::Object;
use self::contract::Contract;
use self::error::Error as SolcStandardJsonOutputError;
use self::source::Source;
///
/// The `solc --standard-json` output.
///
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Output {
/// The file-contract hashmap.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub contracts: Option<BTreeMap<String, BTreeMap<String, Contract>>>,
/// The source code mapping data.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sources: Option<BTreeMap<String, Source>>,
/// The compilation errors and warnings.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub errors: Option<Vec<SolcStandardJsonOutputError>>,
/// The `solc` compiler version.
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
/// The `solc` compiler long version.
#[serde(skip_serializing_if = "Option::is_none")]
pub long_version: Option<String>,
/// The `zksolc` compiler version.
#[serde(skip_serializing_if = "Option::is_none")]
pub zk_version: Option<String>,
}
impl Output {
///
/// Converts the `solc` JSON output into a convenient project.
///
pub fn try_to_project(
&mut self,
source_code_files: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
pipeline: SolcPipeline,
solc_version: &SolcVersion,
debug_config: Option<&era_compiler_llvm_context::DebugConfig>,
) -> anyhow::Result<Project> {
if let SolcPipeline::EVMLA = pipeline {
self.preprocess_dependencies()?;
}
let files = match self.contracts.as_ref() {
Some(files) => files,
None => {
anyhow::bail!(
"{}",
self.errors
.as_ref()
.map(|errors| serde_json::to_string_pretty(errors).expect("Always valid"))
.unwrap_or_else(|| "Unknown project assembling error".to_owned())
);
}
};
let mut project_contracts = BTreeMap::new();
for (path, contracts) in files.iter() {
for (name, contract) in contracts.iter() {
let full_path = format!("{path}:{name}");
let source = match pipeline {
SolcPipeline::Yul => {
let ir_optimized = match contract.ir_optimized.to_owned() {
Some(ir_optimized) => ir_optimized,
None => continue,
};
if ir_optimized.is_empty() {
continue;
}
if let Some(debug_config) = debug_config {
debug_config.dump_yul(full_path.as_str(), ir_optimized.as_str())?;
}
let mut lexer = Lexer::new(ir_optimized.to_owned());
let object = Object::parse(&mut lexer, None).map_err(|error| {
anyhow::anyhow!("Contract `{}` parsing error: {:?}", full_path, error)
})?;
ProjectContractIR::new_yul(ir_optimized.to_owned(), object)
}
SolcPipeline::EVMLA => {
let evm = contract.evm.as_ref();
let assembly = match evm.and_then(|evm| evm.assembly.to_owned()) {
Some(assembly) => assembly.to_owned(),
None => continue,
};
let extra_metadata = evm
.and_then(|evm| evm.extra_metadata.to_owned())
.unwrap_or_default();
ProjectContractIR::new_evmla(assembly, extra_metadata)
}
};
let source_code = source_code_files
.get(path.as_str())
.ok_or_else(|| anyhow::anyhow!("Source code for path `{}` not found", path))?;
let source_hash = sha3::Keccak256::digest(source_code.as_bytes()).into();
let project_contract = ProjectContract::new(
full_path.clone(),
source_hash,
solc_version.to_owned(),
source,
contract.metadata.to_owned(),
);
project_contracts.insert(full_path, project_contract);
}
}
Ok(Project::new(
solc_version.to_owned(),
project_contracts,
libraries,
))
}
///
/// Removes EVM artifacts to prevent their accidental usage.
///
pub fn remove_evm(&mut self) {
if let Some(files) = self.contracts.as_mut() {
for (_, file) in files.iter_mut() {
for (_, contract) in file.iter_mut() {
if let Some(evm) = contract.evm.as_mut() {
evm.bytecode = None;
}
}
}
}
}
///
/// Traverses the AST and returns the list of additional errors and warnings.
///
pub fn preprocess_ast(
&mut self,
version: &SolcVersion,
pipeline: SolcPipeline,
suppressed_warnings: &[Warning],
) -> anyhow::Result<()> {
let sources = match self.sources.as_ref() {
Some(sources) => sources,
None => return Ok(()),
};
let mut messages = Vec::new();
for (path, source) in sources.iter() {
if let Some(ast) = source.ast.as_ref() {
let mut eravm_messages =
Source::get_messages(ast, version, pipeline, suppressed_warnings);
for message in eravm_messages.iter_mut() {
message.push_contract_path(path.as_str());
}
messages.extend(eravm_messages);
}
}
self.errors = match self.errors.take() {
Some(mut errors) => {
errors.extend(messages);
Some(errors)
}
None => Some(messages),
};
Ok(())
}
///
/// The pass, which replaces with dependency indexes with actual data.
///
fn preprocess_dependencies(&mut self) -> anyhow::Result<()> {
let files = match self.contracts.as_mut() {
Some(files) => files,
None => return Ok(()),
};
let mut hash_path_mapping = BTreeMap::new();
for (path, contracts) in files.iter() {
for (name, contract) in contracts.iter() {
let full_path = format!("{path}:{name}");
let hash = match contract
.evm
.as_ref()
.and_then(|evm| evm.assembly.as_ref())
.map(|assembly| assembly.keccak256())
{
Some(hash) => hash,
None => continue,
};
hash_path_mapping.insert(hash, full_path);
}
}
for (path, contracts) in files.iter_mut() {
for (name, contract) in contracts.iter_mut() {
let assembly = match contract.evm.as_mut().and_then(|evm| evm.assembly.as_mut()) {
Some(assembly) => assembly,
None => continue,
};
let full_path = format!("{path}:{name}");
Self::preprocess_dependency_level(
full_path.as_str(),
assembly,
&hash_path_mapping,
)?;
}
}
Ok(())
}
///
/// Preprocesses an assembly JSON structure dependency data map.
///
fn preprocess_dependency_level(
full_path: &str,
assembly: &mut Assembly,
hash_path_mapping: &BTreeMap<String, String>,
) -> anyhow::Result<()> {
assembly.set_full_path(full_path.to_owned());
let deploy_code_index_path_mapping =
assembly.deploy_dependencies_pass(full_path, hash_path_mapping)?;
if let Some(deploy_code_instructions) = assembly.code.as_deref_mut() {
Instruction::replace_data_aliases(
deploy_code_instructions,
&deploy_code_index_path_mapping,
)?;
};
let runtime_code_index_path_mapping =
assembly.runtime_dependencies_pass(full_path, hash_path_mapping)?;
if let Some(runtime_code_instructions) = assembly
.data
.as_mut()
.and_then(|data_map| data_map.get_mut("0"))
.and_then(|data| data.get_assembly_mut())
.and_then(|assembly| assembly.code.as_deref_mut())
{
Instruction::replace_data_aliases(
runtime_code_instructions,
&runtime_code_index_path_mapping,
)?;
}
Ok(())
}
}
@@ -0,0 +1,265 @@
//!
//! The `solc --standard-json` output source.
//!
use serde::Deserialize;
use serde::Serialize;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use crate::solc::standard_json::output::error::Error as SolcStandardJsonOutputError;
use crate::solc::version::Version as SolcVersion;
use crate::warning::Warning;
///
/// The `solc --standard-json` output source.
///
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Source {
/// The source code ID.
pub id: usize,
/// The source code AST.
pub ast: Option<serde_json::Value>,
}
impl Source {
///
/// Checks the AST node for the `ecrecover` function usage.
///
pub fn check_ecrecover(ast: &serde_json::Value) -> Option<SolcStandardJsonOutputError> {
let ast = ast.as_object()?;
if ast.get("nodeType")?.as_str()? != "FunctionCall" {
return None;
}
let expression = ast.get("expression")?.as_object()?;
if expression.get("nodeType")?.as_str()? != "Identifier" {
return None;
}
if expression.get("name")?.as_str()? != "ecrecover" {
return None;
}
Some(SolcStandardJsonOutputError::message_ecrecover(
ast.get("src")?.as_str(),
))
}
///
/// Checks the AST node for the `<address payable>`'s `send` and `transfer` methods usage.
///
pub fn check_send_and_transfer(ast: &serde_json::Value) -> Option<SolcStandardJsonOutputError> {
let ast = ast.as_object()?;
if ast.get("nodeType")?.as_str()? != "FunctionCall" {
return None;
}
let expression = ast.get("expression")?.as_object()?;
if expression.get("nodeType")?.as_str()? != "MemberAccess" {
return None;
}
let member_name = expression.get("memberName")?.as_str()?;
if member_name != "send" && member_name != "transfer" {
return None;
}
Some(SolcStandardJsonOutputError::message_send_and_transfer(
ast.get("src")?.as_str(),
))
}
///
/// Checks the AST node for the `extcodesize` assembly instruction usage.
///
pub fn check_assembly_extcodesize(
ast: &serde_json::Value,
) -> Option<SolcStandardJsonOutputError> {
let ast = ast.as_object()?;
if ast.get("nodeType")?.as_str()? != "YulFunctionCall" {
return None;
}
if ast
.get("functionName")?
.as_object()?
.get("name")?
.as_str()?
!= "extcodesize"
{
return None;
}
Some(SolcStandardJsonOutputError::message_extcodesize(
ast.get("src")?.as_str(),
))
}
///
/// Checks the AST node for the `origin` assembly instruction usage.
///
pub fn check_assembly_origin(ast: &serde_json::Value) -> Option<SolcStandardJsonOutputError> {
let ast = ast.as_object()?;
if ast.get("nodeType")?.as_str()? != "YulFunctionCall" {
return None;
}
if ast
.get("functionName")?
.as_object()?
.get("name")?
.as_str()?
!= "origin"
{
return None;
}
Some(SolcStandardJsonOutputError::message_tx_origin(
ast.get("src")?.as_str(),
))
}
///
/// Checks the AST node for the `tx.origin` value usage.
///
pub fn check_tx_origin(ast: &serde_json::Value) -> Option<SolcStandardJsonOutputError> {
let ast = ast.as_object()?;
if ast.get("nodeType")?.as_str()? != "MemberAccess" {
return None;
}
if ast.get("memberName")?.as_str()? != "origin" {
return None;
}
let expression = ast.get("expression")?.as_object()?;
if expression.get("nodeType")?.as_str()? != "Identifier" {
return None;
}
if expression.get("name")?.as_str()? != "tx" {
return None;
}
Some(SolcStandardJsonOutputError::message_tx_origin(
ast.get("src")?.as_str(),
))
}
///
/// Checks the AST node for the internal function pointers value usage.
///
pub fn check_internal_function_pointer(
ast: &serde_json::Value,
) -> Option<SolcStandardJsonOutputError> {
let ast = ast.as_object()?;
if ast.get("nodeType")?.as_str()? != "VariableDeclaration" {
return None;
}
let type_descriptions = ast.get("typeDescriptions")?.as_object()?;
if !type_descriptions
.get("typeIdentifier")?
.as_str()?
.contains("function_internal")
{
return None;
}
Some(
SolcStandardJsonOutputError::message_internal_function_pointer(
ast.get("src")?.as_str(),
),
)
}
///
/// Returns the list of messages for some specific parts of the AST.
///
pub fn get_messages(
ast: &serde_json::Value,
version: &SolcVersion,
pipeline: SolcPipeline,
suppressed_warnings: &[Warning],
) -> Vec<SolcStandardJsonOutputError> {
let mut messages = Vec::new();
if !suppressed_warnings.contains(&Warning::EcRecover) {
if let Some(message) = Self::check_ecrecover(ast) {
messages.push(message);
}
}
if !suppressed_warnings.contains(&Warning::SendTransfer) {
if let Some(message) = Self::check_send_and_transfer(ast) {
messages.push(message);
}
}
if !suppressed_warnings.contains(&Warning::ExtCodeSize) {
if let Some(message) = Self::check_assembly_extcodesize(ast) {
messages.push(message);
}
}
if !suppressed_warnings.contains(&Warning::TxOrigin) {
if let Some(message) = Self::check_assembly_origin(ast) {
messages.push(message);
}
if let Some(message) = Self::check_tx_origin(ast) {
messages.push(message);
}
}
if SolcPipeline::EVMLA == pipeline && version.l2_revision.is_none() {
if let Some(message) = Self::check_internal_function_pointer(ast) {
messages.push(message);
}
}
match ast {
serde_json::Value::Array(array) => {
for element in array.iter() {
messages.extend(Self::get_messages(
element,
version,
pipeline,
suppressed_warnings,
));
}
}
serde_json::Value::Object(object) => {
for (_key, value) in object.iter() {
messages.extend(Self::get_messages(
value,
version,
pipeline,
suppressed_warnings,
));
}
}
_ => {}
}
messages
}
///
/// Returns the name of the last contract.
///
pub fn last_contract_name(&self) -> anyhow::Result<String> {
self.ast
.as_ref()
.ok_or_else(|| anyhow::anyhow!("The AST is empty"))?
.get("nodes")
.and_then(|value| value.as_array())
.ok_or_else(|| {
anyhow::anyhow!("The last contract cannot be found in an empty list of nodes")
})?
.iter()
.filter_map(
|node| match node.get("nodeType").and_then(|node| node.as_str()) {
Some("ContractDefinition") => Some(node.get("name")?.as_str()?.to_owned()),
_ => None,
},
)
.last()
.ok_or_else(|| anyhow::anyhow!("The last contract not found in the AST"))
}
}
+47
View File
@@ -0,0 +1,47 @@
//!
//! The Solidity compiler version.
//!
use serde::Deserialize;
use serde::Serialize;
///
/// The Solidity compiler version.
///
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Version {
/// The long version string.
pub long: String,
/// The short `semver`.
pub default: semver::Version,
/// The L2 revision additional versioning.
pub l2_revision: Option<semver::Version>,
}
impl Version {
///
/// A shortcut constructor.
///
pub fn new(
long: String,
default: semver::Version,
l2_revision: Option<semver::Version>,
) -> Self {
Self {
long,
default,
l2_revision,
}
}
///
/// A shortcut constructor for a simple version.
///
pub fn new_simple(version: semver::Version) -> Self {
Self {
long: version.to_string(),
default: version,
l2_revision: None,
}
}
}