remove support for legacy evm assembly (#186)

This commit is contained in:
xermicus
2025-02-03 14:13:43 +01:00
committed by GitHub
parent ab90af49df
commit bfda465c32
70 changed files with 325 additions and 5927 deletions
+1
View File
@@ -8,6 +8,7 @@ This is a development pre-release.
- Support for the `coinbase` opcode.
### Changed
- Removed support for legacy EVM assembly (EVMLA) translation.
### Fixed
- Solidity: Add the solc `--libraries` files to sources.
Generated
-8
View File
@@ -4732,12 +4732,6 @@ dependencies = [
"rawpointer",
]
[[package]]
name = "md5"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
[[package]]
name = "memchr"
version = "2.7.4"
@@ -8350,7 +8344,6 @@ dependencies = [
"hex",
"inkwell",
"itertools 0.14.0",
"md5",
"num",
"polkavm-common 0.19.0",
"polkavm-disassembler",
@@ -8398,7 +8391,6 @@ dependencies = [
"hex",
"inkwell",
"libc",
"md5",
"mimalloc",
"num",
"once_cell",
-1
View File
@@ -42,7 +42,6 @@ once_cell = "1.19"
num = "0.4.3"
sha1 = "0.10"
sha3 = "0.10"
md5 = "0.7.0"
thiserror = "2.0"
which = "7.0"
path-slash = "0.2"
+1 -1
View File
@@ -3,7 +3,7 @@
# revive
YUL and EVM assembly recompiler to LLVM, targetting RISC-V on [PolkaVM](https://github.com/koute/polkavm).
YUL recompiler to LLVM, targetting RISC-V on [PolkaVM](https://github.com/koute/polkavm).
Visit [contracts.polkadot.io](https://contracts.polkadot.io) to learn more about contracts on Polkadot!
-6
View File
@@ -12,12 +12,6 @@ pub static EXTENSION_ABI: &str = "abi";
/// The Yul IR file extension.
pub static EXTENSION_YUL: &str = "yul";
/// The EVM legacy assembly IR file extension.
pub static EXTENSION_EVMLA: &str = "evmla";
/// The Ethereal IR file extension.
pub static EXTENSION_ETHIR: &str = "ethir";
/// The EVM file extension.
pub static EXTENSION_EVM: &str = "evm";
-2
View File
@@ -62,7 +62,6 @@ fn instantiate(path: &str, contract: &str) -> Vec<SpecsAction> {
path: Some(path.into()),
contract: contract.to_string(),
solc_optimizer: None,
pipeline: None,
},
data: vec![],
salt: OptionalHex::default(),
@@ -355,7 +354,6 @@ fn ext_code_size() {
path: Some("contracts/Baseline.sol".into()),
contract: "Baseline".to_string(),
solc_optimizer: None,
pipeline: None,
},
data: vec![],
salt: OptionalHex::from([0; 32]),
-1
View File
@@ -21,7 +21,6 @@ serde = { workspace = true, features = ["derive"] }
num = { workspace = true }
hex = { workspace = true }
sha3 = { workspace = true }
md5 = { workspace = true }
inkwell = { workspace = true }
polkavm-disassembler = { workspace = true }
polkavm-common = { workspace = true }
@@ -7,10 +7,6 @@
pub enum IRType {
/// Whether to dump the Yul code.
Yul,
/// Whether to dump the EVM legacy assembly code.
EVMLA,
/// Whether to dump the Ethereal IR code.
EthIR,
/// Whether to dump the LLVM IR code.
LLVM,
/// Whether to dump the assembly code.
@@ -27,8 +23,6 @@ impl IRType {
pub fn file_extension(&self) -> &'static str {
match self {
Self::Yul => revive_common::EXTENSION_YUL,
Self::EthIR => revive_common::EXTENSION_ETHIR,
Self::EVMLA => revive_common::EXTENSION_EVMLA,
Self::LLVM => revive_common::EXTENSION_LLVM_SOURCE,
Self::Assembly => revive_common::EXTENSION_POLKAVM_ASSEMBLY,
#[cfg(debug_assertions)]
@@ -39,30 +39,6 @@ impl DebugConfig {
Ok(())
}
/// Dumps the EVM legacy assembly IR.
pub fn dump_evmla(&self, contract_path: &str, code: &str) -> anyhow::Result<()> {
if let Some(output_directory) = self.output_directory.as_ref() {
let mut file_path = output_directory.to_owned();
let full_file_name = Self::full_file_name(contract_path, None, IRType::EVMLA);
file_path.push(full_file_name);
std::fs::write(file_path, code)?;
}
Ok(())
}
/// Dumps the Ethereal IR.
pub fn dump_ethir(&self, contract_path: &str, code: &str) -> anyhow::Result<()> {
if let Some(output_directory) = self.output_directory.as_ref() {
let mut file_path = output_directory.to_owned();
let full_file_name = Self::full_file_name(contract_path, None, IRType::EthIR);
file_path.push(full_file_name);
std::fs::write(file_path, code)?;
}
Ok(())
}
/// Dumps the unoptimized LLVM IR.
pub fn dump_llvm_ir_unoptimized(
&self,
-5
View File
@@ -17,12 +17,7 @@ pub use self::polkavm::context::attribute::Attribute as PolkaVMAttribute;
pub use self::polkavm::context::build::Build as PolkaVMBuild;
pub use self::polkavm::context::code_type::CodeType as PolkaVMCodeType;
pub use self::polkavm::context::debug_info::DebugInfo;
pub use self::polkavm::context::evmla_data::EVMLAData as PolkaVMContextEVMLAData;
pub use self::polkavm::context::function::block::evmla_data::key::Key as PolkaVMFunctionBlockKey;
pub use self::polkavm::context::function::block::evmla_data::EVMLAData as PolkaVMFunctionBlockEVMLAData;
pub use self::polkavm::context::function::block::Block as PolkaVMFunctionBlock;
pub use self::polkavm::context::function::declaration::Declaration as PolkaVMFunctionDeclaration;
pub use self::polkavm::context::function::evmla_data::EVMLAData as PolkaVMFunctionEVMLAData;
pub use self::polkavm::context::function::intrinsics::Intrinsics as PolkaVMIntrinsicFunction;
pub use self::polkavm::context::function::llvm_runtime::LLVMRuntime as PolkaVMLLVMRuntime;
pub use self::polkavm::context::function::r#return::Return as PolkaVMFunctionReturn;
@@ -1,27 +0,0 @@
//! The LLVM IR generator EVM legacy assembly data.
use crate::polkavm::context::argument::Argument;
/// The LLVM IR generator EVM legacy assembly data.
/// Describes some data that is only relevant to the EVM legacy assembly.
#[derive(Debug, Clone)]
pub struct EVMLAData<'ctx> {
/// The Solidity compiler version.
/// Some instruction behave differenly depending on the version.
pub version: semver::Version,
/// The static stack allocated for the current function.
pub stack: Vec<Argument<'ctx>>,
}
impl EVMLAData<'_> {
/// The default stack size.
pub const DEFAULT_STACK_SIZE: usize = 64;
/// A shortcut constructor.
pub fn new(version: semver::Version) -> Self {
Self {
version,
stack: Vec::with_capacity(Self::DEFAULT_STACK_SIZE),
}
}
}
@@ -1,34 +0,0 @@
//! The LLVM IR generator function block key.
use crate::polkavm::context::code_type::CodeType;
/// The LLVM IR generator function block key.
/// Is only relevant to the EVM legacy assembly.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Key {
/// The block code type.
pub code_type: CodeType,
/// The block tag.
pub tag: num::BigUint,
}
impl Key {
/// A shortcut constructor.
pub fn new(code_type: CodeType, tag: num::BigUint) -> Self {
Self { code_type, tag }
}
}
impl std::fmt::Display for Key {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}_{}",
match self.code_type {
CodeType::Deploy => "dt",
CodeType::Runtime => "rt",
},
self.tag
)
}
}
@@ -1,18 +0,0 @@
//! The LLVM function block EVM legacy assembly data.
pub mod key;
/// The LLVM function block EVM legacy assembly data.
/// Describes some data that is only relevant to the EVM legacy assembly.
#[derive(Debug, Clone)]
pub struct EVMLAData {
/// The initial hashes of the allowed stack states.
pub stack_hashes: Vec<md5::Digest>,
}
impl EVMLAData {
/// A shortcut constructor.
pub fn new(stack_hashes: Vec<md5::Digest>) -> Self {
Self { stack_hashes }
}
}
@@ -1,52 +0,0 @@
//! The LLVM IR generator function block.
pub mod evmla_data;
use self::evmla_data::EVMLAData;
/// The LLVM IR generator function block.
#[derive(Debug, Clone)]
pub struct Block<'ctx> {
/// The inner block.
inner: inkwell::basic_block::BasicBlock<'ctx>,
/// The EVM legacy assembly compiler data.
evmla_data: Option<EVMLAData>,
}
impl<'ctx> Block<'ctx> {
/// A shortcut constructor.
pub fn new(inner: inkwell::basic_block::BasicBlock<'ctx>) -> Self {
Self {
inner,
evmla_data: None,
}
}
/// Sets the EVM legacy assembly data.
pub fn set_evmla_data(&mut self, data: EVMLAData) {
self.evmla_data = Some(data);
}
/// The LLVM object reference.
pub fn inner(&self) -> inkwell::basic_block::BasicBlock<'ctx> {
self.inner
}
/// Returns the EVM data reference.
/// # Panics
/// If the EVM data has not been initialized.
pub fn evm(&self) -> &EVMLAData {
self.evmla_data
.as_ref()
.expect("The EVM data must have been initialized")
}
/// Returns the EVM data mutable reference.
/// # Panics
/// If the EVM data has not been initialized.
pub fn evm_mut(&mut self) -> &mut EVMLAData {
self.evmla_data
.as_mut()
.expect("The EVM data must have been initialized")
}
}
@@ -1,74 +0,0 @@
//! The LLVM function EVM legacy assembly data.
use std::collections::BTreeMap;
use crate::polkavm::context::function::block::evmla_data::key::Key as BlockKey;
use crate::polkavm::context::function::block::Block;
/// The LLVM function EVM legacy assembly data.
/// Describes some data that is only relevant to the EVM legacy assembly.
#[derive(Debug)]
pub struct EVMLAData<'ctx> {
/// The ordinary blocks with numeric tags.
/// Is only used by the Solidity EVM compiler.
pub blocks: BTreeMap<BlockKey, Vec<Block<'ctx>>>,
/// The function stack size.
pub stack_size: usize,
}
impl<'ctx> EVMLAData<'ctx> {
/// A shortcut constructor.
pub fn new(stack_size: usize) -> Self {
Self {
blocks: BTreeMap::new(),
stack_size,
}
}
/// Inserts a function block.
pub fn insert_block(&mut self, key: BlockKey, block: Block<'ctx>) {
if let Some(blocks) = self.blocks.get_mut(&key) {
blocks.push(block);
} else {
self.blocks.insert(key, vec![block]);
}
}
/// Returns the block with the specified tag and initial stack pattern.
/// If there is only one block, it is returned unconditionally.
pub fn find_block(
&self,
key: &BlockKey,
stack_hash: &md5::Digest,
) -> anyhow::Result<Block<'ctx>> {
if self
.blocks
.get(key)
.ok_or_else(|| anyhow::anyhow!("Undeclared function block {}", key))?
.len()
== 1
{
return self
.blocks
.get(key)
.ok_or_else(|| anyhow::anyhow!("Undeclared function block {}", key))?
.first()
.cloned()
.ok_or_else(|| anyhow::anyhow!("Undeclared function block {}", key));
}
self.blocks
.get(key)
.ok_or_else(|| anyhow::anyhow!("Undeclared function block {}", key))?
.iter()
.find(|block| {
block
.evm()
.stack_hashes
.iter()
.any(|hash| hash == stack_hash)
})
.cloned()
.ok_or_else(|| anyhow::anyhow!("Undeclared function block {}", key))
}
}
@@ -1,8 +1,6 @@
//! The LLVM IR generator function.
pub mod block;
pub mod declaration;
pub mod evmla_data;
pub mod intrinsics;
pub mod llvm_runtime;
pub mod r#return;
@@ -19,7 +17,6 @@ use crate::polkavm::context::attribute::Attribute;
use crate::polkavm::context::pointer::Pointer;
use self::declaration::Declaration;
use self::evmla_data::EVMLAData;
use self::r#return::Return;
use self::yul_data::YulData;
@@ -45,8 +42,6 @@ pub struct Function<'ctx> {
/// The Yul compiler data.
yul_data: Option<YulData>,
/// The EVM legacy assembly compiler data.
evmla_data: Option<EVMLAData<'ctx>>,
}
impl<'ctx> Function<'ctx> {
@@ -72,7 +67,6 @@ impl<'ctx> Function<'ctx> {
return_block,
yul_data: None,
evmla_data: None,
}
}
@@ -300,29 +294,6 @@ impl<'ctx> Function<'ctx> {
self.return_block
}
/// Sets the EVM legacy assembly data.
pub fn set_evmla_data(&mut self, data: EVMLAData<'ctx>) {
self.evmla_data = Some(data);
}
/// Returns the EVM legacy assembly data reference.
/// # Panics
/// If the EVM data has not been initialized.
pub fn evmla(&self) -> &EVMLAData<'ctx> {
self.evmla_data
.as_ref()
.expect("The EVM data must have been initialized")
}
/// Returns the EVM legacy assembly data mutable reference.
/// # Panics
/// If the EVM data has not been initialized.
pub fn evmla_mut(&mut self) -> &mut EVMLAData<'ctx> {
self.evmla_data
.as_mut()
.expect("The EVM data must have been initialized")
}
/// Sets the Yul data.
pub fn set_yul_data(&mut self, data: YulData) {
self.yul_data = Some(data);
@@ -6,7 +6,6 @@ pub mod attribute;
pub mod build;
pub mod code_type;
pub mod debug_info;
pub mod evmla_data;
pub mod function;
pub mod global;
pub mod r#loop;
@@ -38,7 +37,6 @@ use self::attribute::Attribute;
use self::build::Build;
use self::code_type::CodeType;
use self::debug_info::DebugInfo;
use self::evmla_data::EVMLAData;
use self::function::declaration::Declaration as FunctionDeclaration;
use self::function::intrinsics::Intrinsics;
use self::function::llvm_runtime::LLVMRuntime;
@@ -95,8 +93,6 @@ where
solidity_data: Option<SolidityData>,
/// The Yul data.
yul_data: Option<YulData>,
/// The EVM legacy assembly data.
evmla_data: Option<EVMLAData<'ctx>>,
}
impl<'ctx, D> Context<'ctx, D>
@@ -257,7 +253,6 @@ where
solidity_data: None,
yul_data: None,
evmla_data: None,
}
}
@@ -1574,29 +1569,6 @@ where
.expect("The Yul data must have been initialized")
}
/// Sets the EVM legacy assembly data.
pub fn set_evmla_data(&mut self, data: EVMLAData<'ctx>) {
self.evmla_data = Some(data);
}
/// Returns the EVM legacy assembly data reference.
/// # Panics
/// If the EVM data has not been initialized.
pub fn evmla(&self) -> &EVMLAData<'ctx> {
self.evmla_data
.as_ref()
.expect("The EVMLA data must have been initialized")
}
/// Returns the EVM legacy assembly data mutable reference.
/// # Panics
/// If the EVM data has not been initialized.
pub fn evmla_mut(&mut self) -> &mut EVMLAData<'ctx> {
self.evmla_data
.as_mut()
.expect("The EVMLA data must have been initialized")
}
/// Returns the current number of immutables values in the contract.
/// If the size is set manually, then it is returned. Otherwise, the number of elements in
/// the identifier-to-offset mapping tree is returned.
-3
View File
@@ -239,7 +239,6 @@ pub enum Code {
Solidity {
path: Option<std::path::PathBuf>,
solc_optimizer: Option<bool>,
pipeline: Option<revive_solidity::SolcPipeline>,
contract: String,
},
/// Read the contract blob from disk
@@ -264,7 +263,6 @@ impl From<Code> for pallet_revive::Code {
path,
contract,
solc_optimizer,
pipeline,
} => {
let Some(path) = path else {
panic!("Solidity source of contract '{contract}' missing path");
@@ -276,7 +274,6 @@ impl From<Code> for pallet_revive::Code {
&contract,
&source_code,
solc_optimizer.unwrap_or(true),
pipeline.unwrap_or(revive_solidity::SolcPipeline::Yul),
))
}
Code::Path(path) => pallet_revive::Code::Upload(std::fs::read(path).unwrap()),
-6
View File
@@ -282,17 +282,11 @@ impl Specs {
let Code::Solidity {
path: Some(path),
solc_optimizer,
pipeline,
contract,
} = code
else {
panic!("the differential runner requires Code::Solidity source");
};
assert_ne!(
pipeline,
Some(revive_solidity::SolcPipeline::EVMLA),
"yul pipeline must be enabled in differential mode"
);
assert!(
salt.0.is_none(),
"salt is not supported in differential mode"
-1
View File
@@ -33,7 +33,6 @@ regex = { workspace = true }
hex = { workspace = true }
num = { workspace = true }
sha3 = { workspace = true }
md5 = { workspace = true }
inkwell = { workspace = true }
revive-common = { workspace = true }
@@ -1,67 +0,0 @@
//! The inner JSON legacy assembly code element.
use std::collections::HashSet;
use serde::Deserialize;
use serde::Serialize;
use crate::evmla::assembly::Assembly;
/// The inner JSON legacy assembly code element.
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(untagged)]
pub enum Data {
/// The assembly code wrapper.
Assembly(Assembly),
/// The hash.
Hash(String),
/// The full contract path after the factory dependencies replacing pass.
Path(String),
}
impl Data {
/// Returns the inner assembly reference if it is present.
pub fn get_assembly(&self) -> Option<&Assembly> {
match self {
Self::Assembly(ref assembly) => Some(assembly),
Self::Hash(_) => None,
Self::Path(_) => None,
}
}
/// Returns the inner assembly mutable reference if it is present.
pub fn get_assembly_mut(&mut self) -> Option<&mut Assembly> {
match self {
Self::Assembly(ref mut assembly) => Some(assembly),
Self::Hash(_) => None,
Self::Path(_) => None,
}
}
/// Get the list of missing deployable libraries.
pub fn get_missing_libraries(&self) -> HashSet<String> {
match self {
Self::Assembly(assembly) => assembly.get_missing_libraries(),
Self::Hash(_) => HashSet::new(),
Self::Path(_) => HashSet::new(),
}
}
/// Gets the contract `keccak256` hash.
pub fn keccak256(&self) -> String {
match self {
Self::Assembly(assembly) => assembly.keccak256(),
Self::Hash(hash) => panic!("Expected assembly, found hash `{hash}`"),
Self::Path(path) => panic!("Expected assembly, found path `{path}`"),
}
}
}
impl std::fmt::Display for Data {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Assembly(inner) => writeln!(f, "{inner}"),
Self::Hash(inner) => writeln!(f, "Hash `{inner}`"),
Self::Path(inner) => writeln!(f, "Path `{inner}`"),
}
}
}
@@ -1,79 +0,0 @@
//! Translates the CODECOPY use cases.
/// Translates the contract hash copying.
pub fn contract_hash<'ctx, D>(
context: &mut revive_llvm_context::PolkaVMContext<'ctx, D>,
offset: inkwell::values::IntValue<'ctx>,
value: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<()>
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
let offset = context.builder().build_int_add(
offset,
context
.word_const((revive_common::BYTE_LENGTH_X32 + revive_common::BYTE_LENGTH_WORD) as u64),
"datacopy_contract_hash_offset",
)?;
revive_llvm_context::polkavm_evm_memory::store(context, offset, value)?;
Ok(())
}
/// Translates the library marker copying.
pub fn library_marker<D>(
context: &mut revive_llvm_context::PolkaVMContext<D>,
offset: u64,
value: u64,
) -> anyhow::Result<()>
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
revive_llvm_context::polkavm_evm_memory::store_byte(
context,
context.word_const(offset),
context.word_const(value),
)?;
Ok(())
}
/// Translates the static data copying.
pub fn static_data<'ctx, D>(
context: &mut revive_llvm_context::PolkaVMContext<'ctx, D>,
destination: inkwell::values::IntValue<'ctx>,
source: &str,
) -> anyhow::Result<()>
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
let mut offset = 0;
for (index, chunk) in source
.chars()
.collect::<Vec<char>>()
.chunks(revive_common::BYTE_LENGTH_WORD * 2)
.enumerate()
{
let mut value_string = chunk.iter().collect::<String>();
value_string.push_str(
"0".repeat((revive_common::BYTE_LENGTH_WORD * 2) - chunk.len())
.as_str(),
);
let datacopy_destination = context.builder().build_int_add(
destination,
context.word_const(offset as u64),
format!("datacopy_destination_index_{index}").as_str(),
)?;
let datacopy_value = context.word_const_str_hex(value_string.as_str());
revive_llvm_context::polkavm_evm_memory::store(
context,
datacopy_destination,
datacopy_value,
)?;
offset += chunk.len() / 2;
}
Ok(())
}
@@ -1,69 +0,0 @@
//! Translates the jump operations.
/// Translates the unconditional jump.
pub fn unconditional<D>(
context: &mut revive_llvm_context::PolkaVMContext<D>,
destination: num::BigUint,
stack_hash: md5::Digest,
) -> anyhow::Result<()>
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
let code_type = context
.code_type()
.ok_or_else(|| anyhow::anyhow!("The contract code part type is undefined"))?;
let block_key = revive_llvm_context::PolkaVMFunctionBlockKey::new(code_type, destination);
let block = context
.current_function()
.borrow()
.evmla()
.find_block(&block_key, &stack_hash)?;
context.build_unconditional_branch(block.inner());
Ok(())
}
/// Translates the conditional jump.
pub fn conditional<D>(
context: &mut revive_llvm_context::PolkaVMContext<D>,
destination: num::BigUint,
stack_hash: md5::Digest,
stack_height: usize,
) -> anyhow::Result<()>
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
let code_type = context
.code_type()
.ok_or_else(|| anyhow::anyhow!("The contract code part type is undefined"))?;
let block_key = revive_llvm_context::PolkaVMFunctionBlockKey::new(code_type, destination);
let condition_pointer = context.evmla().stack[stack_height]
.to_llvm()
.into_pointer_value();
let condition = context.build_load(
revive_llvm_context::PolkaVMPointer::new_stack_field(context, condition_pointer),
format!("conditional_{block_key}_condition").as_str(),
)?;
let condition = context.builder().build_int_compare(
inkwell::IntPredicate::NE,
condition.into_int_value(),
context.word_const(0),
format!("conditional_{block_key}_condition_compared").as_str(),
)?;
let then_block = context
.current_function()
.borrow()
.evmla()
.find_block(&block_key, &stack_hash)?;
let join_block =
context.append_basic_block(format!("conditional_{block_key}_join_block").as_str());
context.build_conditional_branch(condition, then_block.inner(), join_block)?;
context.set_basic_block(join_block);
Ok(())
}
@@ -1,382 +0,0 @@
//! The EVM instruction.
pub mod codecopy;
pub mod jump;
pub mod name;
pub mod stack;
use std::collections::BTreeMap;
use serde::Deserialize;
use serde::Serialize;
use self::name::Name;
/// The EVM instruction.
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct Instruction {
/// The opcode or tag identifier.
pub name: Name,
/// The optional value argument.
pub value: Option<String>,
/// The source code identifier.
#[serde(skip_serializing_if = "Option::is_none")]
pub source: Option<isize>,
/// The source code location begin.
pub begin: isize,
/// The source code location end.
pub end: isize,
}
impl Instruction {
/// Returns the number of input stack arguments.
pub const fn input_size(&self, version: &semver::Version) -> usize {
match self.name {
Name::POP => 1,
Name::JUMP => 1,
Name::JUMPI => 2,
Name::ADD => 2,
Name::SUB => 2,
Name::MUL => 2,
Name::DIV => 2,
Name::MOD => 2,
Name::SDIV => 2,
Name::SMOD => 2,
Name::LT => 2,
Name::GT => 2,
Name::EQ => 2,
Name::ISZERO => 1,
Name::SLT => 2,
Name::SGT => 2,
Name::OR => 2,
Name::XOR => 2,
Name::NOT => 1,
Name::AND => 2,
Name::SHL => 2,
Name::SHR => 2,
Name::SAR => 2,
Name::BYTE => 2,
Name::ADDMOD => 3,
Name::MULMOD => 3,
Name::EXP => 2,
Name::SIGNEXTEND => 2,
Name::SHA3 => 2,
Name::KECCAK256 => 2,
Name::MLOAD => 1,
Name::MSTORE => 2,
Name::MSTORE8 => 2,
Name::MCOPY => 3,
Name::SLOAD => 1,
Name::SSTORE => 2,
Name::TLOAD => 1,
Name::TSTORE => 2,
Name::PUSHIMMUTABLE => 0,
Name::ASSIGNIMMUTABLE => {
if version.minor >= 8 {
2
} else {
1
}
}
Name::CALLDATALOAD => 1,
Name::CALLDATACOPY => 3,
Name::CODECOPY => 3,
Name::RETURNDATACOPY => 3,
Name::EXTCODESIZE => 1,
Name::EXTCODEHASH => 1,
Name::CALL => 7,
Name::CALLCODE => 7,
Name::STATICCALL => 6,
Name::DELEGATECALL => 6,
Name::RETURN => 2,
Name::REVERT => 2,
Name::SELFDESTRUCT => 1,
Name::LOG0 => 2,
Name::LOG1 => 3,
Name::LOG2 => 4,
Name::LOG3 => 5,
Name::LOG4 => 6,
Name::CREATE => 3,
Name::CREATE2 => 4,
Name::ZK_CREATE => 3,
Name::ZK_CREATE2 => 4,
Name::BALANCE => 1,
Name::BLOCKHASH => 1,
Name::BLOBHASH => 1,
Name::EXTCODECOPY => 4,
Name::RecursiveCall { input_size, .. } => input_size,
Name::RecursiveReturn { input_size } => input_size,
_ => 0,
}
}
/// Returns the number of output stack arguments.
pub const fn output_size(&self) -> usize {
match self.name {
Name::PUSH => 1,
Name::PUSH_Data => 1,
Name::PUSH_Tag => 1,
Name::PUSH_ContractHash => 1,
Name::PUSH_ContractHashSize => 1,
Name::PUSHLIB => 1,
Name::PUSHDEPLOYADDRESS => 1,
Name::PUSH1 => 1,
Name::PUSH2 => 1,
Name::PUSH3 => 1,
Name::PUSH4 => 1,
Name::PUSH5 => 1,
Name::PUSH6 => 1,
Name::PUSH7 => 1,
Name::PUSH8 => 1,
Name::PUSH9 => 1,
Name::PUSH10 => 1,
Name::PUSH11 => 1,
Name::PUSH12 => 1,
Name::PUSH13 => 1,
Name::PUSH14 => 1,
Name::PUSH15 => 1,
Name::PUSH16 => 1,
Name::PUSH17 => 1,
Name::PUSH18 => 1,
Name::PUSH19 => 1,
Name::PUSH20 => 1,
Name::PUSH21 => 1,
Name::PUSH22 => 1,
Name::PUSH23 => 1,
Name::PUSH24 => 1,
Name::PUSH25 => 1,
Name::PUSH26 => 1,
Name::PUSH27 => 1,
Name::PUSH28 => 1,
Name::PUSH29 => 1,
Name::PUSH30 => 1,
Name::PUSH31 => 1,
Name::PUSH32 => 1,
Name::DUP1 => 1,
Name::DUP2 => 1,
Name::DUP3 => 1,
Name::DUP4 => 1,
Name::DUP5 => 1,
Name::DUP6 => 1,
Name::DUP7 => 1,
Name::DUP8 => 1,
Name::DUP9 => 1,
Name::DUP10 => 1,
Name::DUP11 => 1,
Name::DUP12 => 1,
Name::DUP13 => 1,
Name::DUP14 => 1,
Name::DUP15 => 1,
Name::DUP16 => 1,
Name::ADD => 1,
Name::SUB => 1,
Name::MUL => 1,
Name::DIV => 1,
Name::MOD => 1,
Name::SDIV => 1,
Name::SMOD => 1,
Name::LT => 1,
Name::GT => 1,
Name::EQ => 1,
Name::ISZERO => 1,
Name::SLT => 1,
Name::SGT => 1,
Name::OR => 1,
Name::XOR => 1,
Name::NOT => 1,
Name::AND => 1,
Name::SHL => 1,
Name::SHR => 1,
Name::SAR => 1,
Name::BYTE => 1,
Name::ADDMOD => 1,
Name::MULMOD => 1,
Name::EXP => 1,
Name::SIGNEXTEND => 1,
Name::SHA3 => 1,
Name::KECCAK256 => 1,
Name::MLOAD => 1,
Name::SLOAD => 1,
Name::TLOAD => 1,
Name::PUSHIMMUTABLE => 1,
Name::CALLDATALOAD => 1,
Name::CALLDATASIZE => 1,
Name::CODESIZE => 1,
Name::PUSHSIZE => 1,
Name::RETURNDATASIZE => 1,
Name::EXTCODESIZE => 1,
Name::EXTCODEHASH => 1,
Name::CALL => 1,
Name::CALLCODE => 1,
Name::STATICCALL => 1,
Name::DELEGATECALL => 1,
Name::CREATE => 1,
Name::CREATE2 => 1,
Name::ZK_CREATE => 1,
Name::ZK_CREATE2 => 1,
Name::ADDRESS => 1,
Name::CALLER => 1,
Name::TIMESTAMP => 1,
Name::NUMBER => 1,
Name::CALLVALUE => 1,
Name::GAS => 1,
Name::BALANCE => 1,
Name::SELFBALANCE => 1,
Name::GASLIMIT => 1,
Name::GASPRICE => 1,
Name::ORIGIN => 1,
Name::CHAINID => 1,
Name::BLOCKHASH => 1,
Name::BLOBHASH => 1,
Name::DIFFICULTY => 1,
Name::PREVRANDAO => 1,
Name::COINBASE => 1,
Name::MSIZE => 1,
Name::BASEFEE => 1,
Name::BLOBBASEFEE => 1,
Name::PC => 1,
Name::RecursiveCall { output_size, .. } => output_size,
_ => 0,
}
}
/// Replaces the instruction data aliases with the actual data.
pub fn replace_data_aliases(
instructions: &mut [Self],
mapping: &BTreeMap<String, String>,
) -> anyhow::Result<()> {
for instruction in instructions.iter_mut() {
match instruction {
Instruction {
name: Name::PUSH_ContractHash | Name::PUSH_ContractHashSize,
value: Some(value),
..
} => {
*value = mapping.get(value.as_str()).cloned().ok_or_else(|| {
anyhow::anyhow!("Contract alias `{}` data not found", value)
})?;
}
Instruction {
name: Name::PUSH_Data,
value: Some(value),
..
} => {
let mut key_extended =
"0".repeat(revive_common::BYTE_LENGTH_WORD * 2 - value.len());
key_extended.push_str(value.as_str());
*value = mapping.get(key_extended.as_str()).cloned().ok_or_else(|| {
anyhow::anyhow!("Data chunk alias `{}` data not found", key_extended)
})?;
}
_ => {}
}
}
Ok(())
}
/// Initializes an `INVALID` instruction to terminate an invalid unreachable block part.
pub fn invalid(previous: &Self) -> Self {
Self {
name: Name::INVALID,
value: None,
source: previous.source,
begin: previous.begin,
end: previous.end,
}
}
/// Initializes a recursive function `Call` instruction.
pub fn recursive_call(
name: String,
entry_key: revive_llvm_context::PolkaVMFunctionBlockKey,
stack_hash: md5::Digest,
input_size: usize,
output_size: usize,
return_address: revive_llvm_context::PolkaVMFunctionBlockKey,
previous: &Self,
) -> Self {
Self {
name: Name::RecursiveCall {
name,
entry_key,
stack_hash,
input_size,
output_size,
return_address,
},
value: None,
source: previous.source,
begin: previous.begin,
end: previous.end,
}
}
/// Initializes a recursive function `Return` instruction.
pub fn recursive_return(input_size: usize, previous: &Self) -> Self {
Self {
name: Name::RecursiveReturn { input_size },
value: None,
source: previous.source,
begin: previous.begin,
end: previous.end,
}
}
}
impl std::fmt::Display for Instruction {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let name = self.name.to_string();
match self.name {
Name::Tag => write!(f, "{:4}", name),
_ => write!(f, "{:15}", name),
}?;
match self.value {
Some(ref value) if value.len() <= 64 => write!(f, "{}", value)?,
Some(ref value) => write!(f, "... {}", &value[value.len() - 60..])?,
None => {}
}
Ok(())
}
}
@@ -1,417 +0,0 @@
//! The EVM instruction name.
use serde::Deserialize;
use serde::Serialize;
/// The EVM instruction name.
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)]
#[allow(non_camel_case_types)]
#[allow(clippy::upper_case_acronyms)]
pub enum Name {
/// The eponymous EVM instruction.
PUSH,
/// Pushes a constant tag index.
#[serde(rename = "PUSH [tag]")]
PUSH_Tag,
/// Pushes an unknown `data` value.
#[serde(rename = "PUSH data")]
PUSH_Data,
/// Pushes a contract hash size.
#[serde(rename = "PUSH #[$]")]
PUSH_ContractHashSize,
/// Pushes a contract hash.
#[serde(rename = "PUSH [$]")]
PUSH_ContractHash,
/// The eponymous EVM instruction.
PUSH1,
/// The eponymous EVM instruction.
PUSH2,
/// The eponymous EVM instruction.
PUSH3,
/// The eponymous EVM instruction.
PUSH4,
/// The eponymous EVM instruction.
PUSH5,
/// The eponymous EVM instruction.
PUSH6,
/// The eponymous EVM instruction.
PUSH7,
/// The eponymous EVM instruction.
PUSH8,
/// The eponymous EVM instruction.
PUSH9,
/// The eponymous EVM instruction.
PUSH10,
/// The eponymous EVM instruction.
PUSH11,
/// The eponymous EVM instruction.
PUSH12,
/// The eponymous EVM instruction.
PUSH13,
/// The eponymous EVM instruction.
PUSH14,
/// The eponymous EVM instruction.
PUSH15,
/// The eponymous EVM instruction.
PUSH16,
/// The eponymous EVM instruction.
PUSH17,
/// The eponymous EVM instruction.
PUSH18,
/// The eponymous EVM instruction.
PUSH19,
/// The eponymous EVM instruction.
PUSH20,
/// The eponymous EVM instruction.
PUSH21,
/// The eponymous EVM instruction.
PUSH22,
/// The eponymous EVM instruction.
PUSH23,
/// The eponymous EVM instruction.
PUSH24,
/// The eponymous EVM instruction.
PUSH25,
/// The eponymous EVM instruction.
PUSH26,
/// The eponymous EVM instruction.
PUSH27,
/// The eponymous EVM instruction.
PUSH28,
/// The eponymous EVM instruction.
PUSH29,
/// The eponymous EVM instruction.
PUSH30,
/// The eponymous EVM instruction.
PUSH31,
/// The eponymous EVM instruction.
PUSH32,
/// The eponymous EVM instruction.
DUP1,
/// The eponymous EVM instruction.
DUP2,
/// The eponymous EVM instruction.
DUP3,
/// The eponymous EVM instruction.
DUP4,
/// The eponymous EVM instruction.
DUP5,
/// The eponymous EVM instruction.
DUP6,
/// The eponymous EVM instruction.
DUP7,
/// The eponymous EVM instruction.
DUP8,
/// The eponymous EVM instruction.
DUP9,
/// The eponymous EVM instruction.
DUP10,
/// The eponymous EVM instruction.
DUP11,
/// The eponymous EVM instruction.
DUP12,
/// The eponymous EVM instruction.
DUP13,
/// The eponymous EVM instruction.
DUP14,
/// The eponymous EVM instruction.
DUP15,
/// The eponymous EVM instruction.
DUP16,
/// The eponymous EVM instruction.
SWAP1,
/// The eponymous EVM instruction.
SWAP2,
/// The eponymous EVM instruction.
SWAP3,
/// The eponymous EVM instruction.
SWAP4,
/// The eponymous EVM instruction.
SWAP5,
/// The eponymous EVM instruction.
SWAP6,
/// The eponymous EVM instruction.
SWAP7,
/// The eponymous EVM instruction.
SWAP8,
/// The eponymous EVM instruction.
SWAP9,
/// The eponymous EVM instruction.
SWAP10,
/// The eponymous EVM instruction.
SWAP11,
/// The eponymous EVM instruction.
SWAP12,
/// The eponymous EVM instruction.
SWAP13,
/// The eponymous EVM instruction.
SWAP14,
/// The eponymous EVM instruction.
SWAP15,
/// The eponymous EVM instruction.
SWAP16,
/// The eponymous EVM instruction.
POP,
/// Sets the current basic code block.
#[serde(rename = "tag")]
Tag,
/// The eponymous EVM instruction.
JUMP,
/// The eponymous EVM instruction.
JUMPI,
/// The eponymous EVM instruction.
JUMPDEST,
/// The eponymous EVM instruction.
ADD,
/// The eponymous EVM instruction.
SUB,
/// The eponymous EVM instruction.
MUL,
/// The eponymous EVM instruction.
DIV,
/// The eponymous EVM instruction.
MOD,
/// The eponymous EVM instruction.
SDIV,
/// The eponymous EVM instruction.
SMOD,
/// The eponymous EVM instruction.
LT,
/// The eponymous EVM instruction.
GT,
/// The eponymous EVM instruction.
EQ,
/// The eponymous EVM instruction.
ISZERO,
/// The eponymous EVM instruction.
SLT,
/// The eponymous EVM instruction.
SGT,
/// The eponymous EVM instruction.
OR,
/// The eponymous EVM instruction.
XOR,
/// The eponymous EVM instruction.
NOT,
/// The eponymous EVM instruction.
AND,
/// The eponymous EVM instruction.
SHL,
/// The eponymous EVM instruction.
SHR,
/// The eponymous EVM instruction.
SAR,
/// The eponymous EVM instruction.
BYTE,
/// The eponymous EVM instruction.
ADDMOD,
/// The eponymous EVM instruction.
MULMOD,
/// The eponymous EVM instruction.
EXP,
/// The eponymous EVM instruction.
SIGNEXTEND,
/// The eponymous EVM instruction.
SHA3,
/// The eponymous EVM instruction.
KECCAK256,
/// The eponymous EVM instruction.
MLOAD,
/// The eponymous EVM instruction.
MSTORE,
/// The eponymous EVM instruction.
MSTORE8,
/// The eponymous EVM instruction.
MCOPY,
/// The eponymous EVM instruction.
SLOAD,
/// The eponymous EVM instruction.
SSTORE,
/// The eponymous EVM instruction.
TLOAD,
/// The eponymous EVM instruction.
TSTORE,
/// The eponymous EVM instruction.
PUSHIMMUTABLE,
/// The eponymous EVM instruction.
ASSIGNIMMUTABLE,
/// The eponymous EVM instruction.
CALLDATALOAD,
/// The eponymous EVM instruction.
CALLDATASIZE,
/// The eponymous EVM instruction.
CALLDATACOPY,
/// The eponymous EVM instruction.
CODESIZE,
/// The eponymous EVM instruction.
CODECOPY,
/// The eponymous EVM instruction.
PUSHSIZE,
/// The eponymous EVM instruction.
EXTCODESIZE,
/// The eponymous EVM instruction.
EXTCODEHASH,
/// The eponymous EVM instruction.
RETURNDATASIZE,
/// The eponymous EVM instruction.
RETURNDATACOPY,
/// The eponymous EVM instruction.
RETURN,
/// The eponymous EVM instruction.
REVERT,
/// The eponymous EVM instruction.
STOP,
/// The eponymous EVM instruction.
INVALID,
/// The eponymous EVM instruction.
LOG0,
/// The eponymous EVM instruction.
LOG1,
/// The eponymous EVM instruction.
LOG2,
/// The eponymous EVM instruction.
LOG3,
/// The eponymous EVM instruction.
LOG4,
/// The eponymous EVM instruction.
CALL,
/// The eponymous EVM instruction.
STATICCALL,
/// The eponymous EVM instruction.
DELEGATECALL,
/// The eponymous EVM instruction.
CREATE,
/// The eponymous EVM instruction.
CREATE2,
/// The eponymous PolkaVM instruction.
#[serde(rename = "$ZK_CREATE")]
ZK_CREATE,
/// The eponymous PolkaVM instruction.
#[serde(rename = "$ZK_CREATE2")]
ZK_CREATE2,
/// The eponymous EVM instruction.
ADDRESS,
/// The eponymous EVM instruction.
CALLER,
/// The eponymous EVM instruction.
CALLVALUE,
/// The eponymous EVM instruction.
GAS,
/// The eponymous EVM instruction.
BALANCE,
/// The eponymous EVM instruction.
SELFBALANCE,
/// The eponymous EVM instruction.
PUSHLIB,
/// The eponymous EVM instruction.
PUSHDEPLOYADDRESS,
/// The eponymous EVM instruction.
GASLIMIT,
/// The eponymous EVM instruction.
GASPRICE,
/// The eponymous EVM instruction.
ORIGIN,
/// The eponymous EVM instruction.
CHAINID,
/// The eponymous EVM instruction.
TIMESTAMP,
/// The eponymous EVM instruction.
NUMBER,
/// The eponymous EVM instruction.
BLOCKHASH,
/// The eponymous EVM instruction.
BLOBHASH,
/// The eponymous EVM instruction.
DIFFICULTY,
/// The eponymous EVM instruction.
PREVRANDAO,
/// The eponymous EVM instruction.
COINBASE,
/// The eponymous EVM instruction.
BASEFEE,
/// The eponymous EVM instruction.
BLOBBASEFEE,
/// The eponymous EVM instruction.
MSIZE,
/// The eponymous EVM instruction.
CALLCODE,
/// The eponymous EVM instruction.
PC,
/// The eponymous EVM instruction.
EXTCODECOPY,
/// The eponymous EVM instruction.
SELFDESTRUCT,
/// The recursive function call instruction.
#[serde(skip)]
RecursiveCall {
/// The called function name.
name: String,
/// The called function key.
entry_key: revive_llvm_context::PolkaVMFunctionBlockKey,
/// The stack state hash after return.
stack_hash: md5::Digest,
/// The input size.
input_size: usize,
/// The output size.
output_size: usize,
/// The return address.
return_address: revive_llvm_context::PolkaVMFunctionBlockKey,
},
/// The recursive function return instruction.
#[serde(skip)]
RecursiveReturn {
/// The output size.
input_size: usize,
},
}
impl std::fmt::Display for Name {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Tag => write!(f, "Tag"),
Self::RecursiveCall {
name,
entry_key,
input_size,
output_size,
return_address,
..
} => write!(
f,
"RECURSIVE_CALL({}_{}, {}, {}, {})",
name, entry_key, input_size, output_size, return_address
),
Self::RecursiveReturn { input_size } => write!(f, "RECURSIVE_RETURN({})", input_size),
_ => write!(
f,
"{}",
serde_json::to_string(self)
.expect("Always valid")
.trim_matches('\"')
),
}
}
}
@@ -1,106 +0,0 @@
//! Translates the stack memory operations.
use inkwell::values::BasicValue;
/// Translates the ordinar value push.
pub fn push<'ctx, D>(
context: &mut revive_llvm_context::PolkaVMContext<'ctx, D>,
value: String,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
let result = context
.word_type()
.const_int_from_string(
value.to_ascii_uppercase().as_str(),
inkwell::types::StringRadix::Hexadecimal,
)
.expect("Always valid")
.as_basic_value_enum();
Ok(result)
}
/// Translates the block tag label push.
pub fn push_tag<'ctx, D>(
context: &mut revive_llvm_context::PolkaVMContext<'ctx, D>,
value: String,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
let result = context
.word_type()
.const_int_from_string(value.as_str(), inkwell::types::StringRadix::Decimal)
.expect("Always valid");
Ok(result.as_basic_value_enum())
}
/// Translates the stack memory duplicate.
pub fn dup<'ctx, D>(
context: &mut revive_llvm_context::PolkaVMContext<'ctx, D>,
offset: usize,
height: usize,
original: &mut Option<String>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
let element = &context.evmla().stack[height - offset - 1];
let value = context.build_load(
revive_llvm_context::PolkaVMPointer::new_stack_field(
context,
element.to_llvm().into_pointer_value(),
),
format!("dup{offset}").as_str(),
)?;
element.original.clone_into(original);
Ok(value)
}
/// Translates the stack memory swap.
pub fn swap<D>(
context: &mut revive_llvm_context::PolkaVMContext<D>,
offset: usize,
height: usize,
) -> anyhow::Result<()>
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
let top_element = context.evmla().stack[height - 1].to_owned();
let top_pointer = revive_llvm_context::PolkaVMPointer::new_stack_field(
context,
top_element.to_llvm().into_pointer_value(),
);
let top_value = context.build_load(top_pointer, format!("swap{offset}_top_value").as_str())?;
let swap_element = context.evmla().stack[height - offset - 1].to_owned();
let swap_pointer = revive_llvm_context::PolkaVMPointer::new_stack_field(
context,
swap_element.to_llvm().into_pointer_value(),
);
let swap_value =
context.build_load(swap_pointer, format!("swap{offset}_swap_value").as_str())?;
swap_element
.original
.clone_into(&mut context.evmla_mut().stack[height - 1].original);
top_element
.original
.clone_into(&mut context.evmla_mut().stack[height - offset - 1].original);
context.build_store(top_pointer, swap_value)?;
context.build_store(swap_pointer, top_value)?;
Ok(())
}
/// Translates the stack memory pop.
pub fn pop<D>(_context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()>
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
Ok(())
}
-295
View File
@@ -1,295 +0,0 @@
//! The `solc --asm-json` output.
pub mod data;
pub mod instruction;
use std::collections::BTreeMap;
use std::collections::HashSet;
use serde::Deserialize;
use serde::Serialize;
use sha3::Digest;
use crate::evmla::ethereal_ir::entry_link::EntryLink;
use crate::evmla::ethereal_ir::EtherealIR;
use crate::solc::standard_json::output::contract::evm::extra_metadata::ExtraMetadata;
use self::data::Data;
use self::instruction::name::Name as InstructionName;
use self::instruction::Instruction;
/// The JSON assembly.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Assembly {
/// The metadata string.
#[serde(rename = ".auxdata")]
pub auxdata: Option<String>,
/// The deploy code instructions.
#[serde(rename = ".code")]
pub code: Option<Vec<Instruction>>,
/// The runtime code.
#[serde(rename = ".data")]
pub data: Option<BTreeMap<String, Data>>,
/// The full contract path.
#[serde(skip_serializing_if = "Option::is_none")]
pub full_path: Option<String>,
/// The factory dependency paths.
#[serde(default = "HashSet::new")]
pub factory_dependencies: HashSet<String>,
/// The EVMLA extra metadata.
#[serde(skip_serializing_if = "Option::is_none")]
pub extra_metadata: Option<ExtraMetadata>,
}
impl Assembly {
/// Gets the contract `keccak256` hash.
pub fn keccak256(&self) -> String {
let json = serde_json::to_vec(self).expect("Always valid");
hex::encode(sha3::Keccak256::digest(json.as_slice()))
}
/// Sets the full contract path.
pub fn set_full_path(&mut self, full_path: String) {
self.full_path = Some(full_path);
}
/// Returns the full contract path if it is set, or `<undefined>` otherwise.
/// # Panics
/// If the `full_path` has not been set.
pub fn full_path(&self) -> &str {
self.full_path
.as_deref()
.unwrap_or_else(|| panic!("The full path of some contracts is unset"))
}
/// Get the list of missing deployable libraries.
pub fn get_missing_libraries(&self) -> HashSet<String> {
let mut missing_libraries = HashSet::new();
if let Some(code) = self.code.as_ref() {
for instruction in code.iter() {
if let InstructionName::PUSHLIB = instruction.name {
let library_path = instruction.value.to_owned().expect("Always exists");
missing_libraries.insert(library_path);
}
}
}
if let Some(data) = self.data.as_ref() {
for (_, data) in data.iter() {
missing_libraries.extend(data.get_missing_libraries());
}
}
missing_libraries
}
/// Replaces the deploy code dependencies with full contract path and returns the list.
pub fn deploy_dependencies_pass(
&mut self,
full_path: &str,
hash_data_mapping: &BTreeMap<String, String>,
) -> anyhow::Result<BTreeMap<String, String>> {
let mut index_path_mapping = BTreeMap::new();
let index = "0".repeat(revive_common::BYTE_LENGTH_WORD * 2);
index_path_mapping.insert(index, full_path.to_owned());
let dependencies = match self.data.as_mut() {
Some(dependencies) => dependencies,
None => return Ok(index_path_mapping),
};
for (index, data) in dependencies.iter_mut() {
if index == "0" {
continue;
}
let mut index_extended = "0".repeat(revive_common::BYTE_LENGTH_WORD * 2 - index.len());
index_extended.push_str(index.as_str());
*data = match data {
Data::Assembly(assembly) => {
let hash = assembly.keccak256();
let full_path =
hash_data_mapping
.get(hash.as_str())
.cloned()
.ok_or_else(|| {
anyhow::anyhow!("Contract path not found for hash `{}`", hash)
})?;
self.factory_dependencies.insert(full_path.to_owned());
index_path_mapping.insert(index_extended, full_path.clone());
Data::Path(full_path)
}
Data::Hash(hash) => {
index_path_mapping.insert(index_extended, hash.to_owned());
continue;
}
_ => continue,
};
}
Ok(index_path_mapping)
}
/// Replaces the runtime code dependencies with full contract path and returns the list.
pub fn runtime_dependencies_pass(
&mut self,
full_path: &str,
hash_data_mapping: &BTreeMap<String, String>,
) -> anyhow::Result<BTreeMap<String, String>> {
let mut index_path_mapping = BTreeMap::new();
let index = "0".repeat(revive_common::BYTE_LENGTH_WORD * 2);
index_path_mapping.insert(index, full_path.to_owned());
let dependencies = match self
.data
.as_mut()
.and_then(|data| data.get_mut("0"))
.and_then(|data| data.get_assembly_mut())
.and_then(|assembly| assembly.data.as_mut())
{
Some(dependencies) => dependencies,
None => return Ok(index_path_mapping),
};
for (index, data) in dependencies.iter_mut() {
let mut index_extended = "0".repeat(revive_common::BYTE_LENGTH_WORD * 2 - index.len());
index_extended.push_str(index.as_str());
*data = match data {
Data::Assembly(assembly) => {
let hash = assembly.keccak256();
let full_path =
hash_data_mapping
.get(hash.as_str())
.cloned()
.ok_or_else(|| {
anyhow::anyhow!("Contract path not found for hash `{}`", hash)
})?;
self.factory_dependencies.insert(full_path.to_owned());
index_path_mapping.insert(index_extended, full_path.clone());
Data::Path(full_path)
}
Data::Hash(hash) => {
index_path_mapping.insert(index_extended, hash.to_owned());
continue;
}
_ => continue,
};
}
Ok(index_path_mapping)
}
}
impl<D> revive_llvm_context::PolkaVMWriteLLVM<D> for Assembly
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
fn declare(
&mut self,
context: &mut revive_llvm_context::PolkaVMContext<D>,
) -> anyhow::Result<()> {
let mut entry = revive_llvm_context::PolkaVMEntryFunction::default();
entry.declare(context)?;
revive_llvm_context::PolkaVMDeployCodeFunction::new(
revive_llvm_context::PolkaVMDummyLLVMWritable::default(),
)
.declare(context)?;
revive_llvm_context::PolkaVMRuntimeCodeFunction::new(
revive_llvm_context::PolkaVMDummyLLVMWritable::default(),
)
.declare(context)?;
revive_llvm_context::PolkaVMImmutableDataLoadFunction.declare(context)?;
entry.into_llvm(context)?;
Ok(())
}
fn into_llvm(
mut self,
context: &mut revive_llvm_context::PolkaVMContext<D>,
) -> anyhow::Result<()> {
let full_path = self.full_path().to_owned();
context
.debug_config()
.dump_evmla(full_path.as_str(), self.to_string().as_str())?;
let deploy_code_blocks = EtherealIR::get_blocks(
context.evmla().version.to_owned(),
revive_llvm_context::PolkaVMCodeType::Deploy,
self.code
.as_deref()
.ok_or_else(|| anyhow::anyhow!("Deploy code instructions not found"))?,
)?;
let data = self
.data
.ok_or_else(|| anyhow::anyhow!("Runtime code data not found"))?
.remove("0")
.expect("Always exists");
context
.debug_config()
.dump_evmla(full_path.as_str(), data.to_string().as_str())?;
let runtime_code_instructions = match data {
Data::Assembly(assembly) => assembly
.code
.ok_or_else(|| anyhow::anyhow!("Runtime code instructions not found"))?,
Data::Hash(hash) => {
anyhow::bail!("Expected runtime code instructions, found hash `{}`", hash)
}
Data::Path(path) => {
anyhow::bail!("Expected runtime code instructions, found path `{}`", path)
}
};
let runtime_code_blocks = EtherealIR::get_blocks(
context.evmla().version.to_owned(),
revive_llvm_context::PolkaVMCodeType::Runtime,
runtime_code_instructions.as_slice(),
)?;
let extra_metadata = self.extra_metadata.take().unwrap_or_default();
let mut blocks = deploy_code_blocks;
blocks.extend(runtime_code_blocks);
let mut ethereal_ir =
EtherealIR::new(context.evmla().version.to_owned(), extra_metadata, blocks)?;
context
.debug_config()
.dump_ethir(full_path.as_str(), ethereal_ir.to_string().as_str())?;
ethereal_ir.declare(context)?;
ethereal_ir.into_llvm(context)?;
revive_llvm_context::PolkaVMDeployCodeFunction::new(EntryLink::new(
revive_llvm_context::PolkaVMCodeType::Deploy,
))
.into_llvm(context)?;
revive_llvm_context::PolkaVMRuntimeCodeFunction::new(EntryLink::new(
revive_llvm_context::PolkaVMCodeType::Runtime,
))
.into_llvm(context)?;
revive_llvm_context::PolkaVMImmutableDataLoadFunction.into_llvm(context)?;
Ok(())
}
}
impl std::fmt::Display for Assembly {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(instructions) = self.code.as_ref() {
for (index, instruction) in instructions.iter().enumerate() {
match instruction.name {
InstructionName::Tag => writeln!(f, "{index:03} {instruction}")?,
_ => writeln!(f, "{index:03} {instruction}")?,
}
}
}
Ok(())
}
}
@@ -1,48 +0,0 @@
//! The Ethereal IR entry function link.
use inkwell::values::BasicValue;
use crate::evmla::ethereal_ir::EtherealIR;
/// The Ethereal IR entry function link.
/// The link represents branching between the deploy and runtime code.
#[derive(Debug, Clone)]
pub struct EntryLink {
/// The code part type.
pub code_type: revive_llvm_context::PolkaVMCodeType,
}
impl EntryLink {
/// A shortcut constructor.
pub fn new(code_type: revive_llvm_context::PolkaVMCodeType) -> Self {
Self { code_type }
}
}
impl<D> revive_llvm_context::PolkaVMWriteLLVM<D> for EntryLink
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
let target = context
.get_function(EtherealIR::DEFAULT_ENTRY_FUNCTION_NAME)
.expect("Always exists")
.borrow()
.declaration();
let is_deploy_code = match self.code_type {
revive_llvm_context::PolkaVMCodeType::Deploy => context
.integer_type(revive_common::BIT_LENGTH_BOOLEAN)
.const_int(1, false),
revive_llvm_context::PolkaVMCodeType::Runtime => context
.integer_type(revive_common::BIT_LENGTH_BOOLEAN)
.const_int(0, false),
};
context.build_call(
target,
&[is_deploy_code.as_basic_value_enum()],
format!("call_link_{}", EtherealIR::DEFAULT_ENTRY_FUNCTION_NAME).as_str(),
);
Ok(())
}
}
File diff suppressed because it is too large Load Diff
@@ -1,37 +0,0 @@
//! The Ethereal IR block element stack element.
/// The Ethereal IR block element stack element.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Element {
/// The runtime value.
Value(String),
/// The compile-time value.
Constant(num::BigUint),
/// The compile-time destination tag.
Tag(num::BigUint),
/// The compile-time path.
Path(String),
/// The compile-time hexadecimal data chunk.
Data(String),
/// The recursive function return address.
ReturnAddress(usize),
}
impl Element {
pub fn value(identifier: String) -> Self {
Self::Value(identifier)
}
}
impl std::fmt::Display for Element {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Value(identifier) => write!(f, "V_{identifier}"),
Self::Constant(value) => write!(f, "{value:X}"),
Self::Tag(tag) => write!(f, "T_{tag}"),
Self::Path(path) => write!(f, "{path}"),
Self::Data(data) => write!(f, "{data}"),
Self::ReturnAddress(_) => write!(f, "RETURN_ADDRESS"),
}
}
}
@@ -1,120 +0,0 @@
//! The Ethereal IR block element stack.
pub mod element;
use self::element::Element;
/// The Ethereal IR block element stack.
#[derive(Debug, Default, Clone)]
pub struct Stack {
/// The stack elements.
pub elements: Vec<Element>,
}
impl Stack {
/// The default stack size.
pub const DEFAULT_STACK_SIZE: usize = 16;
/// A shortcut constructor.
pub fn new() -> Self {
Self {
elements: Vec::with_capacity(Self::DEFAULT_STACK_SIZE),
}
}
/// A shortcut constructor.
pub fn with_capacity(capacity: usize) -> Self {
Self {
elements: Vec::with_capacity(capacity),
}
}
/// A shortcut constructor.
pub fn new_with_elements(elements: Vec<Element>) -> Self {
Self { elements }
}
/// The stack state hash, which acts as a block identifier.
/// Each block clone has its own initial stack state, which uniquely identifies the block.
pub fn hash(&self) -> md5::Digest {
let mut hash_context = md5::Context::new();
for element in self.elements.iter() {
match element {
Element::Tag(tag) => hash_context.consume(tag.to_bytes_be()),
_ => hash_context.consume([0]),
}
}
hash_context.compute()
}
/// Pushes an element onto the stack.
pub fn push(&mut self, element: Element) {
self.elements.push(element);
}
/// Appends another stack on top of this one.
pub fn append(&mut self, other: &mut Self) {
self.elements.append(&mut other.elements);
}
/// Pops a stack element.
pub fn pop(&mut self) -> anyhow::Result<Element> {
self.elements
.pop()
.ok_or_else(|| anyhow::anyhow!("Stack underflow"))
}
/// Pops the tag from the top.
pub fn pop_tag(&mut self) -> anyhow::Result<num::BigUint> {
match self.elements.pop() {
Some(Element::Tag(tag)) => Ok(tag),
Some(element) => anyhow::bail!("Expected tag, found {}", element),
None => anyhow::bail!("Stack underflow"),
}
}
/// Swaps two stack elements.
pub fn swap(&mut self, index: usize) -> anyhow::Result<()> {
if self.elements.len() < index + 1 {
anyhow::bail!("Stack underflow");
}
let length = self.elements.len();
self.elements.swap(length - 1, length - 1 - index);
Ok(())
}
/// Duplicates a stack element.
pub fn dup(&mut self, index: usize) -> anyhow::Result<Element> {
if self.elements.len() < index {
anyhow::bail!("Stack underflow");
}
Ok(self.elements[self.elements.len() - index].to_owned())
}
/// Returns the stack length.
pub fn len(&self) -> usize {
self.elements.len()
}
/// Returns an emptiness flag.
pub fn is_empty(&self) -> bool {
self.elements.len() == 0
}
}
impl std::fmt::Display for Stack {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"[ {} ]",
self.elements
.iter()
.map(Element::to_string)
.collect::<Vec<String>>()
.join(" | ")
)
}
}
@@ -1,156 +0,0 @@
//! The Ethereal IR block.
pub mod element;
use std::collections::HashSet;
use num::Zero;
use crate::evmla::assembly::instruction::name::Name as InstructionName;
use crate::evmla::assembly::instruction::Instruction;
use self::element::stack::Stack as ElementStack;
use self::element::Element;
/// The Ethereal IR block.
#[derive(Debug, Clone)]
pub struct Block {
/// The Solidity compiler version.
pub solc_version: semver::Version,
/// The block key.
pub key: revive_llvm_context::PolkaVMFunctionBlockKey,
/// The block instance.
pub instance: Option<usize>,
/// The block elements relevant to the stack consistency.
pub elements: Vec<Element>,
/// The block predecessors.
pub predecessors: HashSet<(revive_llvm_context::PolkaVMFunctionBlockKey, usize)>,
/// The initial stack state.
pub initial_stack: ElementStack,
/// The stack.
pub stack: ElementStack,
/// The extra block hashes for alternative routes.
pub extra_hashes: Vec<md5::Digest>,
}
impl Block {
/// The elements vector initial capacity.
pub const ELEMENTS_VECTOR_DEFAULT_CAPACITY: usize = 64;
/// The predecessors hashset initial capacity.
pub const PREDECESSORS_HASHSET_DEFAULT_CAPACITY: usize = 4;
/// Assembles a block from the sequence of instructions.
pub fn try_from_instructions(
solc_version: semver::Version,
code_type: revive_llvm_context::PolkaVMCodeType,
slice: &[Instruction],
) -> anyhow::Result<(Self, usize)> {
let mut cursor = 0;
let tag: num::BigUint = match slice[cursor].name {
InstructionName::Tag => {
let tag = slice[cursor]
.value
.as_deref()
.expect("Always exists")
.parse()
.expect("Always valid");
cursor += 1;
tag
}
_ => num::BigUint::zero(),
};
let mut block = Self {
solc_version: solc_version.clone(),
key: revive_llvm_context::PolkaVMFunctionBlockKey::new(code_type, tag),
instance: None,
elements: Vec::with_capacity(Self::ELEMENTS_VECTOR_DEFAULT_CAPACITY),
predecessors: HashSet::with_capacity(Self::PREDECESSORS_HASHSET_DEFAULT_CAPACITY),
initial_stack: ElementStack::new(),
stack: ElementStack::new(),
extra_hashes: vec![],
};
let mut dead_code = false;
while cursor < slice.len() {
if !dead_code {
let element: Element = Element::new(solc_version.clone(), slice[cursor].to_owned());
block.elements.push(element);
}
match slice[cursor].name {
InstructionName::RETURN
| InstructionName::REVERT
| InstructionName::STOP
| InstructionName::INVALID => {
cursor += 1;
dead_code = true;
}
InstructionName::JUMP => {
cursor += 1;
dead_code = true;
}
InstructionName::Tag => {
break;
}
_ => {
cursor += 1;
}
}
}
Ok((block, cursor))
}
/// Inserts a predecessor tag.
pub fn insert_predecessor(
&mut self,
key: revive_llvm_context::PolkaVMFunctionBlockKey,
instance: usize,
) {
self.predecessors.insert((key, instance));
}
}
impl<D> revive_llvm_context::PolkaVMWriteLLVM<D> for Block
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
context.set_code_type(self.key.code_type);
for element in self.elements.into_iter() {
element.into_llvm(context)?;
}
Ok(())
}
}
impl std::fmt::Display for Block {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(
f,
"block_{}/{}: {}",
self.key,
self.instance.unwrap_or_default(),
if self.predecessors.is_empty() {
"".to_owned()
} else {
format!(
"(predecessors: {})",
self.predecessors
.iter()
.map(|(key, instance)| format!("{}/{}", key, instance))
.collect::<Vec<String>>()
.join(", ")
)
},
)?;
for element in self.elements.iter() {
writeln!(f, " {element}")?;
}
Ok(())
}
}
File diff suppressed because it is too large Load Diff
@@ -1,29 +0,0 @@
//! The Ethereal IR block queue element.
use crate::evmla::ethereal_ir::function::block::element::stack::Stack;
/// The Ethereal IR block queue element.
#[derive(Debug, Clone)]
pub struct QueueElement {
/// The block key.
pub block_key: revive_llvm_context::PolkaVMFunctionBlockKey,
/// The block predecessor.
pub predecessor: Option<(revive_llvm_context::PolkaVMFunctionBlockKey, usize)>,
/// The predecessor's last stack state.
pub stack: Stack,
}
impl QueueElement {
/// A shortcut constructor.
pub fn new(
block_key: revive_llvm_context::PolkaVMFunctionBlockKey,
predecessor: Option<(revive_llvm_context::PolkaVMFunctionBlockKey, usize)>,
stack: Stack,
) -> Self {
Self {
block_key,
predecessor,
stack,
}
}
}
@@ -1,41 +0,0 @@
//! The Ethereal IR function type.
/// The Ethereal IR function type.
#[derive(Debug, Clone)]
pub enum Type {
/// The initial function, combining deploy and runtime code.
Initial,
/// The recursive function with a specific block starting its recursive context.
Recursive {
/// The function name.
name: String,
/// The function initial block key.
block_key: revive_llvm_context::PolkaVMFunctionBlockKey,
/// The size of stack input (in cells or 256-bit words).
input_size: usize,
/// The size of stack output (in cells or 256-bit words).
output_size: usize,
},
}
impl Type {
/// A shortcut constructor.
pub fn new_initial() -> Self {
Self::Initial
}
/// A shortcut constructor.
pub fn new_recursive(
name: String,
block_key: revive_llvm_context::PolkaVMFunctionBlockKey,
input_size: usize,
output_size: usize,
) -> Self {
Self::Recursive {
name,
block_key,
input_size,
output_size,
}
}
}
@@ -1,65 +0,0 @@
//! The Ethereal IR block visited element.
use std::cmp::Ordering;
/// The Ethereal IR block visited element.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct VisitedElement {
/// The block key.
pub block_key: revive_llvm_context::PolkaVMFunctionBlockKey,
/// The initial stack state hash.
pub stack_hash: md5::Digest,
}
impl VisitedElement {
/// A shortcut constructor.
pub fn new(
block_key: revive_llvm_context::PolkaVMFunctionBlockKey,
stack_hash: md5::Digest,
) -> Self {
Self {
block_key,
stack_hash,
}
}
}
impl PartialOrd for VisitedElement {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for VisitedElement {
fn cmp(&self, other: &Self) -> Ordering {
match (self.block_key.code_type, other.block_key.code_type) {
(
revive_llvm_context::PolkaVMCodeType::Deploy,
revive_llvm_context::PolkaVMCodeType::Runtime,
) => Ordering::Less,
(
revive_llvm_context::PolkaVMCodeType::Runtime,
revive_llvm_context::PolkaVMCodeType::Deploy,
) => Ordering::Greater,
(
revive_llvm_context::PolkaVMCodeType::Deploy,
revive_llvm_context::PolkaVMCodeType::Deploy,
)
| (
revive_llvm_context::PolkaVMCodeType::Runtime,
revive_llvm_context::PolkaVMCodeType::Runtime,
) => {
let tag_comparison = self.block_key.tag.cmp(&other.block_key.tag);
if tag_comparison == Ordering::Equal {
if self.stack_hash == other.stack_hash {
Ordering::Equal
} else {
Ordering::Less
}
} else {
tag_comparison
}
}
}
}
}
@@ -1,134 +0,0 @@
//! The Ethereal IR of the EVM bytecode.
pub mod entry_link;
pub mod function;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use crate::evmla::assembly::instruction::Instruction;
use crate::solc::standard_json::output::contract::evm::extra_metadata::ExtraMetadata;
use self::function::block::Block;
use self::function::r#type::Type as FunctionType;
use self::function::Function;
/// The Ethereal IR of the EVM bytecode.
/// The Ethereal IR (EthIR) is a special IR between the EVM legacy assembly and LLVM IR. It is
/// created to facilitate the translation and provide an additional environment for applying some
/// transformations, duplicating parts of the call and control flow graphs, tracking the
/// data flow, and a few more algorithms of static analysis.
/// The most important feature of EthIR is flattening the block tags and duplicating blocks for
/// each of initial states of the stack. The LLVM IR supports only static control flow, so the
/// stack state must be known all the way throughout the program.
#[derive(Debug)]
pub struct EtherealIR {
/// The Solidity compiler version.
pub solc_version: semver::Version,
/// The EVMLA extra metadata.
pub extra_metadata: ExtraMetadata,
/// The all-inlined function.
pub entry_function: Function,
/// The recursive functions.
pub recursive_functions: BTreeMap<revive_llvm_context::PolkaVMFunctionBlockKey, Function>,
}
impl EtherealIR {
/// The default entry function name.
pub const DEFAULT_ENTRY_FUNCTION_NAME: &'static str = "main";
/// The blocks hashmap initial capacity.
pub const BLOCKS_HASHMAP_DEFAULT_CAPACITY: usize = 64;
/// Assembles a sequence of functions from the sequence of instructions.
pub fn new(
solc_version: semver::Version,
extra_metadata: ExtraMetadata,
blocks: HashMap<revive_llvm_context::PolkaVMFunctionBlockKey, Block>,
) -> anyhow::Result<Self> {
let mut entry_function = Function::new(solc_version.clone(), FunctionType::new_initial());
let mut recursive_functions = BTreeMap::new();
let mut visited_functions = BTreeSet::new();
entry_function.traverse(
&blocks,
&mut recursive_functions,
&extra_metadata,
&mut visited_functions,
)?;
Ok(Self {
solc_version,
extra_metadata,
entry_function,
recursive_functions,
})
}
/// Gets blocks for the specified type of the contract code.
pub fn get_blocks(
solc_version: semver::Version,
code_type: revive_llvm_context::PolkaVMCodeType,
instructions: &[Instruction],
) -> anyhow::Result<HashMap<revive_llvm_context::PolkaVMFunctionBlockKey, Block>> {
let mut blocks = HashMap::with_capacity(Self::BLOCKS_HASHMAP_DEFAULT_CAPACITY);
let mut offset = 0;
while offset < instructions.len() {
let (block, size) = Block::try_from_instructions(
solc_version.clone(),
code_type,
&instructions[offset..],
)?;
blocks.insert(
revive_llvm_context::PolkaVMFunctionBlockKey::new(code_type, block.key.tag.clone()),
block,
);
offset += size;
}
Ok(blocks)
}
}
impl<D> revive_llvm_context::PolkaVMWriteLLVM<D> for EtherealIR
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
fn declare(
&mut self,
context: &mut revive_llvm_context::PolkaVMContext<D>,
) -> anyhow::Result<()> {
self.entry_function.declare(context)?;
for (_key, function) in self.recursive_functions.iter_mut() {
function.declare(context)?;
}
Ok(())
}
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
context.evmla_mut().stack = vec![];
self.entry_function.into_llvm(context)?;
for (_key, function) in self.recursive_functions.into_iter() {
function.into_llvm(context)?;
}
Ok(())
}
}
impl std::fmt::Display for EtherealIR {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(f, "{}", self.entry_function)?;
for (_key, function) in self.recursive_functions.iter() {
writeln!(f, "{}", function)?;
}
Ok(())
}
}
-4
View File
@@ -1,4 +0,0 @@
//! The EVM legacy assembly compiling tools.
pub mod assembly;
pub mod ethereal_ir;
+8 -39
View File
@@ -2,7 +2,6 @@
pub(crate) mod build;
pub(crate) mod r#const;
pub(crate) mod evmla;
pub(crate) mod missing_libraries;
pub(crate) mod process;
pub(crate) mod project;
@@ -26,7 +25,6 @@ pub use self::project::Project;
pub use self::r#const::*;
pub use self::solc::combined_json::contract::Contract as SolcCombinedJsonContract;
pub use self::solc::combined_json::CombinedJson as SolcCombinedJson;
pub use self::solc::pipeline::Pipeline as SolcPipeline;
#[cfg(not(target_os = "emscripten"))]
pub use self::solc::solc_compiler::SolcCompiler;
#[cfg(target_os = "emscripten")]
@@ -119,7 +117,6 @@ pub fn standard_output<T: Compiler>(
evm_version: Option<revive_common::EVMVersion>,
solc_optimizer_enabled: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
force_evmla: bool,
include_metadata_hash: bool,
base_path: Option<String>,
include_paths: Vec<String>,
@@ -129,7 +126,6 @@ pub fn standard_output<T: Compiler>(
debug_config: revive_llvm_context::DebugConfig,
) -> anyhow::Result<Build> {
let solc_version = solc.version()?;
let solc_pipeline = SolcPipeline::new(&solc_version, force_evmla);
let solc_input = SolcStandardJsonInput::try_from_paths(
SolcStandardJsonInputLanguage::Solidity,
@@ -137,7 +133,7 @@ pub fn standard_output<T: Compiler>(
input_files,
libraries,
remappings,
SolcStandardJsonInputSettingsSelection::new_required(solc_pipeline),
SolcStandardJsonInputSettingsSelection::new_required(),
SolcStandardJsonInputSettingsOptimizer::new(
solc_optimizer_enabled,
None,
@@ -145,7 +141,6 @@ pub fn standard_output<T: Compiler>(
optimizer_settings.is_fallback_to_size_enabled(),
),
None,
solc_pipeline == SolcPipeline::Yul,
suppressed_warnings,
)?;
@@ -156,13 +151,7 @@ pub fn standard_output<T: Compiler>(
.collect();
let libraries = solc_input.settings.libraries.clone().unwrap_or_default();
let mut solc_output = solc.standard_json(
solc_input,
solc_pipeline,
base_path,
include_paths,
allow_paths,
)?;
let mut solc_output = solc.standard_json(solc_input, base_path, include_paths, allow_paths)?;
if let Some(errors) = solc_output.errors.as_deref() {
let mut has_errors = false;
@@ -180,13 +169,8 @@ pub fn standard_output<T: Compiler>(
}
}
let project = solc_output.try_to_project(
source_code_files,
libraries,
solc_pipeline,
&solc_version,
&debug_config,
)?;
let project =
solc_output.try_to_project(source_code_files, libraries, &solc_version, &debug_config)?;
let build = project.compile(optimizer_settings, include_metadata_hash, debug_config)?;
@@ -198,16 +182,14 @@ pub fn standard_output<T: Compiler>(
pub fn standard_json<T: Compiler>(
solc: &mut T,
detect_missing_libraries: bool,
force_evmla: bool,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
debug_config: revive_llvm_context::DebugConfig,
) -> anyhow::Result<()> {
let solc_version = solc.version()?;
let solc_pipeline = SolcPipeline::new(&solc_version, force_evmla);
let solc_input = SolcStandardJsonInput::try_from_stdin(solc_pipeline)?;
let solc_input = SolcStandardJsonInput::try_from_stdin()?;
let source_code_files = solc_input
.sources
.iter()
@@ -225,13 +207,7 @@ pub fn standard_json<T: Compiler>(
};
let libraries = solc_input.settings.libraries.clone().unwrap_or_default();
let mut solc_output = solc.standard_json(
solc_input,
solc_pipeline,
base_path,
include_paths,
allow_paths,
)?;
let mut solc_output = solc.standard_json(solc_input, base_path, include_paths, allow_paths)?;
if let Some(errors) = solc_output.errors.as_deref() {
for error in errors.iter() {
@@ -242,13 +218,8 @@ pub fn standard_json<T: Compiler>(
}
}
let project = solc_output.try_to_project(
source_code_files,
libraries,
solc_pipeline,
&solc_version,
&debug_config,
)?;
let project =
solc_output.try_to_project(source_code_files, libraries, &solc_version, &debug_config)?;
if detect_missing_libraries {
let missing_libraries = project.get_missing_libraries();
@@ -271,7 +242,6 @@ pub fn combined_json<T: Compiler>(
evm_version: Option<revive_common::EVMVersion>,
solc_optimizer_enabled: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
force_evmla: bool,
include_metadata_hash: bool,
base_path: Option<String>,
include_paths: Vec<String>,
@@ -289,7 +259,6 @@ pub fn combined_json<T: Compiler>(
evm_version,
solc_optimizer_enabled,
optimizer_settings,
force_evmla,
include_metadata_hash,
base_path,
include_paths,
@@ -1,47 +0,0 @@
//! The contract EVM legacy assembly source code.
use std::collections::HashSet;
use serde::Deserialize;
use serde::Serialize;
use crate::evmla::assembly::Assembly;
use crate::solc::standard_json::output::contract::evm::extra_metadata::ExtraMetadata;
/// The contract EVM legacy assembly source code.
#[derive(Debug, Serialize, Deserialize, Clone)]
#[allow(non_camel_case_types)]
#[allow(clippy::upper_case_acronyms)]
pub struct EVMLA {
/// The EVM legacy assembly source code.
pub assembly: Assembly,
}
impl EVMLA {
/// A shortcut constructor.
pub fn new(mut assembly: Assembly, extra_metadata: ExtraMetadata) -> Self {
assembly.extra_metadata = Some(extra_metadata);
Self { assembly }
}
/// Get the list of missing deployable libraries.
pub fn get_missing_libraries(&self) -> HashSet<String> {
self.assembly.get_missing_libraries()
}
}
impl<D> revive_llvm_context::PolkaVMWriteLLVM<D> for EVMLA
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
fn declare(
&mut self,
context: &mut revive_llvm_context::PolkaVMContext<D>,
) -> anyhow::Result<()> {
self.assembly.declare(context)
}
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
self.assembly.into_llvm(context)
}
}
@@ -1,6 +1,5 @@
//! The contract source code.
pub mod evmla;
pub mod llvm_ir;
pub mod yul;
@@ -9,11 +8,8 @@ use std::collections::HashSet;
use serde::Deserialize;
use serde::Serialize;
use crate::evmla::assembly::Assembly;
use crate::solc::standard_json::output::contract::evm::extra_metadata::ExtraMetadata;
use crate::yul::parser::statement::object::Object;
use self::evmla::EVMLA;
use self::llvm_ir::LLVMIR;
use self::yul::Yul;
@@ -25,8 +21,6 @@ use self::yul::Yul;
pub enum IR {
/// The Yul source code.
Yul(Yul),
/// The EVM legacy assembly source code.
EVMLA(EVMLA),
/// The LLVM IR source code.
LLVMIR(LLVMIR),
}
@@ -37,11 +31,6 @@ impl IR {
Self::Yul(Yul::new(source_code, object))
}
/// A shortcut constructor.
pub fn new_evmla(assembly: Assembly, extra_metadata: ExtraMetadata) -> Self {
Self::EVMLA(EVMLA::new(assembly, extra_metadata))
}
/// A shortcut constructor.
pub fn new_llvm_ir(path: String, source: String) -> Self {
Self::LLVMIR(LLVMIR::new(path, source))
@@ -51,7 +40,6 @@ impl IR {
pub fn get_missing_libraries(&self) -> HashSet<String> {
match self {
Self::Yul(inner) => inner.get_missing_libraries(),
Self::EVMLA(inner) => inner.get_missing_libraries(),
Self::LLVMIR(_inner) => HashSet::new(),
}
}
@@ -67,7 +55,6 @@ where
) -> anyhow::Result<()> {
match self {
Self::Yul(inner) => inner.declare(context),
Self::EVMLA(inner) => inner.declare(context),
Self::LLVMIR(_inner) => Ok(()),
}
}
@@ -75,7 +62,6 @@ where
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
match self {
Self::Yul(inner) => inner.into_llvm(context),
Self::EVMLA(inner) => inner.into_llvm(context),
Self::LLVMIR(_inner) => Ok(()),
}
}
@@ -54,12 +54,10 @@ impl Contract {
/// Returns the contract identifier, which is:
/// - the Yul object identifier for Yul
/// - the full contract path for EVM legacy assembly
/// - the module name for LLVM IR
pub fn identifier(&self) -> &str {
match self.ir {
IR::Yul(ref yul) => yul.object.identifier.as_str(),
IR::EVMLA(ref evm) => evm.assembly.full_path(),
IR::LLVMIR(ref llvm_ir) => llvm_ir.path.as_str(),
}
}
@@ -68,7 +66,6 @@ impl Contract {
pub fn drain_factory_dependencies(&mut self) -> HashSet<String> {
match self.ir {
IR::Yul(ref mut yul) => yul.object.factory_dependencies.drain().collect(),
IR::EVMLA(ref mut evm) => evm.assembly.factory_dependencies.drain().collect(),
IR::LLVMIR(_) => HashSet::new(),
}
}
@@ -129,10 +126,6 @@ impl Contract {
IR::Yul(_) => {
context.set_yul_data(Default::default());
}
IR::EVMLA(_) => {
let evmla_data = revive_llvm_context::PolkaVMContextEVMLAData::new(version.default);
context.set_evmla_data(evmla_data);
}
IR::LLVMIR(_) => {}
}
-10
View File
@@ -113,12 +113,6 @@ pub struct Arguments {
#[structopt(long = "llvm-ir")]
pub llvm_ir: bool,
/// Forcibly switch to EVM legacy assembly pipeline.
/// It is useful for older revisions of `solc` 0.8, where Yul was considered highly experimental
/// and contained more bugs than today.
#[structopt(long = "force-evmla")]
pub force_evmla: bool,
/// Set metadata hash mode.
/// The only supported value is `none` that disables appending the metadata hash.
/// Is enabled by default.
@@ -248,10 +242,6 @@ impl Arguments {
);
}
if self.force_evmla {
anyhow::bail!("EVM legacy assembly mode is not supported in Yul, LLVM IR and PolkaVM assembly modes.");
}
if self.disable_solc_optimizer {
anyhow::bail!("Disabling the solc optimizer is not supported in Yul, LLVM IR and PolkaVM assembly modes.");
}
-3
View File
@@ -157,7 +157,6 @@ fn main_inner() -> anyhow::Result<()> {
revive_solidity::standard_json(
&mut solc,
arguments.detect_missing_libraries,
arguments.force_evmla,
arguments.base_path,
arguments.include_paths,
arguments.allow_paths,
@@ -173,7 +172,6 @@ fn main_inner() -> anyhow::Result<()> {
evm_version,
!arguments.disable_solc_optimizer,
optimizer_settings,
arguments.force_evmla,
include_metadata_hash,
arguments.base_path,
arguments.include_paths,
@@ -193,7 +191,6 @@ fn main_inner() -> anyhow::Result<()> {
evm_version,
!arguments.disable_solc_optimizer,
optimizer_settings,
arguments.force_evmla,
include_metadata_hash,
arguments.base_path,
arguments.include_paths,
+3 -15
View File
@@ -1,7 +1,6 @@
//! The Solidity compiler.
pub mod combined_json;
pub mod pipeline;
#[cfg(not(target_os = "emscripten"))]
pub mod solc_compiler;
#[cfg(target_os = "emscripten")]
@@ -9,35 +8,25 @@ pub mod soljson_compiler;
pub mod standard_json;
pub mod version;
use once_cell::sync::Lazy;
use semver::VersionReq;
use std::path::Path;
use std::path::PathBuf;
use self::combined_json::CombinedJson;
use self::pipeline::Pipeline;
use self::standard_json::input::Input as StandardJsonInput;
use self::standard_json::output::Output as StandardJsonOutput;
use self::version::Version;
/// The first version of `solc` with the support of standard JSON interface.
pub const FIRST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 4, 12);
/// The first version of `solc`, where Yul codegen is considered robust enough.
pub const FIRST_YUL_VERSION: semver::Version = semver::Version::new(0, 8, 0);
pub const FIRST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 8, 0);
/// The first version of `solc`, where `--via-ir` codegen mode is supported.
pub const FIRST_VIA_IR_VERSION: semver::Version = semver::Version::new(0, 8, 13);
/// The last supported version of `solc`.
pub const LAST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 8, 28);
/// `--base-path` was introduced in 0.6.9 <https://github.com/ethereum/solidity/releases/tag/v0.6.9>
pub static FIRST_SUPPORTS_BASE_PATH: Lazy<VersionReq> =
Lazy::new(|| VersionReq::parse(">=0.6.9").unwrap());
/// `--include-path` was introduced in 0.8.8 <https://github.com/ethereum/solidity/releases/tag/v0.8.8>
pub static FIRST_SUPPORTS_INCLUDE_PATH: Lazy<VersionReq> =
Lazy::new(|| VersionReq::parse(">=0.8.8").unwrap());
/// `--include-path` was introduced in solc `0.8.8` <https://github.com/ethereum/solidity/releases/tag/v0.8.8>
pub const FIRST_INCLUDE_PATH_VERSION: semver::Version = semver::Version::new(0, 8, 8);
/// The Solidity compiler.
pub trait Compiler {
@@ -45,7 +34,6 @@ pub trait Compiler {
fn standard_json(
&mut self,
input: StandardJsonInput,
pipeline: Pipeline,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
-27
View File
@@ -1,27 +0,0 @@
//! The Solidity compiler pipeline type.
use serde::{Deserialize, Serialize};
use crate::solc::version::Version as SolcVersion;
/// The Solidity compiler pipeline type.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[allow(non_camel_case_types)]
#[allow(clippy::upper_case_acronyms)]
pub enum Pipeline {
/// The Yul IR.
Yul,
/// The EVM legacy assembly IR.
EVMLA,
}
impl Pipeline {
/// We always use EVMLA for Solidity <=0.7, or if the user does not want to compile via Yul.
pub fn new(solc_version: &SolcVersion, force_evmla: bool) -> Self {
if solc_version.default < crate::solc::FIRST_YUL_VERSION || force_evmla {
Self::EVMLA
} else {
Self::Yul
}
}
}
+12 -23
View File
@@ -5,13 +5,12 @@ use std::path::Path;
use std::path::PathBuf;
use crate::solc::combined_json::CombinedJson;
use crate::solc::pipeline::Pipeline;
use crate::solc::standard_json::input::Input as StandardJsonInput;
use crate::solc::standard_json::output::Output as StandardJsonOutput;
use crate::solc::version::Version;
use crate::solc::FIRST_INCLUDE_PATH_VERSION;
use super::Compiler;
use crate::solc::{FIRST_SUPPORTS_BASE_PATH, FIRST_SUPPORTS_INCLUDE_PATH};
/// The Solidity compiler.
pub struct SolcCompiler {
@@ -47,45 +46,35 @@ impl Compiler for SolcCompiler {
fn standard_json(
&mut self,
mut input: StandardJsonInput,
pipeline: Pipeline,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
) -> anyhow::Result<StandardJsonOutput> {
let version = self.version()?;
let version = self.version()?.default;
if !include_paths.is_empty() && version < FIRST_INCLUDE_PATH_VERSION {
anyhow::bail!("--include-path is not supported in solc {version}");
}
let mut command = std::process::Command::new(self.executable.as_str());
command.stdin(std::process::Stdio::piped());
command.stdout(std::process::Stdio::piped());
command.arg("--standard-json");
if let Some(base_path) = &base_path {
if !FIRST_SUPPORTS_BASE_PATH.matches(&version.default) {
anyhow::bail!(
"--base-path not supported this version {} of solc",
&version.default
);
}
command.arg("--base-path").arg(base_path);
}
if !include_paths.is_empty() && !FIRST_SUPPORTS_INCLUDE_PATH.matches(&version.default) {
anyhow::bail!(
"--include-path not supported this version {} of solc",
&version.default
);
}
for include_path in include_paths.into_iter() {
command.arg("--include-path");
command.arg(include_path);
}
if let Some(base_path) = base_path {
command.arg("--base-path");
command.arg(base_path);
}
if let Some(allow_paths) = allow_paths {
command.arg("--allow-paths");
command.arg(allow_paths);
}
input.normalize(&version.default);
input.normalize(&version);
let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default();
@@ -129,7 +118,7 @@ impl Compiler for SolcCompiler {
),
)
})?;
output.preprocess_ast(&version, pipeline, suppressed_warnings.as_slice())?;
output.preprocess_ast(suppressed_warnings.as_slice())?;
Ok(output)
}
+1 -3
View File
@@ -4,7 +4,6 @@ use std::path::Path;
use std::path::PathBuf;
use crate::solc::combined_json::CombinedJson;
use crate::solc::pipeline::Pipeline;
use crate::solc::standard_json::input::Input as StandardJsonInput;
use crate::solc::standard_json::output::Output as StandardJsonOutput;
use crate::solc::version::Version;
@@ -29,7 +28,6 @@ impl Compiler for SoljsonCompiler {
fn standard_json(
&mut self,
mut input: StandardJsonInput,
pipeline: Pipeline,
_base_path: Option<String>,
_include_paths: Vec<String>,
_allow_paths: Option<String>,
@@ -50,7 +48,7 @@ impl Compiler for SoljsonCompiler {
.unwrap_or_else(|_| String::from_utf8_lossy(out.as_bytes()).to_string()),
)
})?;
output.preprocess_ast(&version, pipeline, suppressed_warnings.as_slice())?;
output.preprocess_ast(suppressed_warnings.as_slice())?;
Ok(output)
}
@@ -13,7 +13,6 @@ use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde::Deserialize;
use serde::Serialize;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use crate::solc::standard_json::input::settings::metadata::Metadata as SolcStandardJsonInputSettingsMetadata;
use crate::solc::standard_json::input::settings::optimizer::Optimizer as SolcStandardJsonInputSettingsOptimizer;
use crate::solc::standard_json::input::settings::selection::Selection as SolcStandardJsonInputSettingsSelection;
@@ -40,13 +39,13 @@ pub struct Input {
impl Input {
/// A shortcut constructor from stdin.
pub fn try_from_stdin(solc_pipeline: SolcPipeline) -> anyhow::Result<Self> {
pub fn try_from_stdin() -> anyhow::Result<Self> {
let mut input: Self = serde_json::from_reader(std::io::BufReader::new(std::io::stdin()))?;
input
.settings
.output_selection
.get_or_insert_with(SolcStandardJsonInputSettingsSelection::default)
.extend_with_required(solc_pipeline);
.extend_with_required();
Ok(input)
}
@@ -61,7 +60,6 @@ impl Input {
output_selection: SolcStandardJsonInputSettingsSelection,
optimizer: SolcStandardJsonInputSettingsOptimizer,
metadata: Option<SolcStandardJsonInputSettingsMetadata>,
via_ir: bool,
suppressed_warnings: Option<Vec<Warning>>,
) -> anyhow::Result<Self> {
let mut paths: BTreeSet<PathBuf> = paths.iter().cloned().collect();
@@ -92,7 +90,6 @@ impl Input {
libraries,
remappings,
output_selection,
via_ir,
optimizer,
metadata,
),
@@ -111,7 +108,6 @@ impl Input {
output_selection: SolcStandardJsonInputSettingsSelection,
optimizer: SolcStandardJsonInputSettingsOptimizer,
metadata: Option<SolcStandardJsonInputSettingsMetadata>,
via_ir: bool,
suppressed_warnings: Option<Vec<Warning>>,
) -> anyhow::Result<Self> {
#[cfg(feature = "parallel")]
@@ -131,7 +127,6 @@ impl Input {
libraries,
remappings,
output_selection,
via_ir,
optimizer,
metadata,
),
@@ -51,7 +51,6 @@ impl Settings {
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
output_selection: Selection,
via_ir: bool,
optimizer: Optimizer,
metadata: Option<Metadata>,
) -> Self {
@@ -60,9 +59,9 @@ impl Settings {
libraries: Some(libraries),
remappings,
output_selection: Some(output_selection),
via_ir: if via_ir { Some(true) } else { None },
optimizer,
metadata,
via_ir: Some(true),
}
}
@@ -3,8 +3,6 @@
use serde::Deserialize;
use serde::Serialize;
use crate::solc::pipeline::Pipeline as SolcPipeline;
/// The `solc --standard-json` expected output selection flag.
#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)]
#[allow(non_camel_case_types)]
@@ -46,15 +44,6 @@ pub enum Flag {
Assembly,
}
impl From<SolcPipeline> for Flag {
fn from(pipeline: SolcPipeline) -> Self {
match pipeline {
SolcPipeline::Yul => Self::Yul,
SolcPipeline::EVMLA => Self::EVMLA,
}
}
}
impl std::fmt::Display for Flag {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
@@ -7,8 +7,6 @@ use std::collections::HashSet;
use serde::Deserialize;
use serde::Serialize;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use self::flag::Flag as SelectionFlag;
/// The `solc --standard-json` output file selection.
@@ -24,7 +22,7 @@ pub struct File {
impl File {
/// Creates the selection required by our compilation process.
pub fn new_required(pipeline: SolcPipeline) -> Self {
pub fn new_required() -> Self {
Self {
per_file: Some(HashSet::from_iter([SelectionFlag::AST])),
per_contract: Some(HashSet::from_iter([
@@ -32,14 +30,14 @@ impl File {
SelectionFlag::EVMDBC,
SelectionFlag::MethodIdentifiers,
SelectionFlag::Metadata,
SelectionFlag::from(pipeline),
SelectionFlag::Yul,
])),
}
}
/// Extends the user's output selection with flag required by our compilation process.
pub fn extend_with_required(&mut self, pipeline: SolcPipeline) -> &mut Self {
let required = Self::new_required(pipeline);
pub fn extend_with_required(&mut self) -> &mut Self {
let required = Self::new_required();
self.per_file
.get_or_insert_with(HashSet::default)
@@ -5,8 +5,6 @@ pub mod file;
use serde::Deserialize;
use serde::Serialize;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use self::file::File as FileSelection;
/// The `solc --standard-json` output selection.
@@ -19,17 +17,17 @@ pub struct Selection {
impl Selection {
/// Creates the selection required by our compilation process.
pub fn new_required(pipeline: SolcPipeline) -> Self {
pub fn new_required() -> Self {
Self {
all: Some(FileSelection::new_required(pipeline)),
all: Some(FileSelection::new_required()),
}
}
/// Extends the user's output selection with flag required by our compilation process.
pub fn extend_with_required(&mut self, pipeline: SolcPipeline) -> &mut Self {
pub fn extend_with_required(&mut self) -> &mut Self {
self.all
.get_or_insert_with(|| FileSelection::new_required(pipeline))
.extend_with_required(pipeline);
.get_or_insert_with(FileSelection::new_required)
.extend_with_required();
self
}
}
@@ -1,46 +0,0 @@
//! The `solc --standard-json` output contract EVM extra metadata.
pub mod recursive_function;
use serde::Deserialize;
use serde::Serialize;
use self::recursive_function::RecursiveFunction;
/// The `solc --standard-json` output contract EVM extra metadata.
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ExtraMetadata {
/// The list of recursive functions.
#[serde(default = "Vec::new")]
pub recursive_functions: Vec<RecursiveFunction>,
}
impl ExtraMetadata {
/// Returns the recursive function reference for the specified tag.
pub fn get(
&self,
block_key: &revive_llvm_context::PolkaVMFunctionBlockKey,
) -> Option<&RecursiveFunction> {
for function in self.recursive_functions.iter() {
match block_key.code_type {
revive_llvm_context::PolkaVMCodeType::Deploy => {
if let Some(creation_tag) = function.creation_tag {
if num::BigUint::from(creation_tag) == block_key.tag {
return Some(function);
}
}
}
revive_llvm_context::PolkaVMCodeType::Runtime => {
if let Some(runtime_tag) = function.runtime_tag {
if num::BigUint::from(runtime_tag) == block_key.tag {
return Some(function);
}
}
}
}
}
None
}
}
@@ -1,22 +0,0 @@
//! The `solc --standard-json` output contract EVM recursive function.
use serde::Deserialize;
use serde::Serialize;
/// The `solc --standard-json` output contract EVM recursive function.
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct RecursiveFunction {
/// The function name.
pub name: String,
/// The creation code function block tag.
pub creation_tag: Option<usize>,
/// The runtime code function block tag.
pub runtime_tag: Option<usize>,
/// The number of input arguments.
#[serde(rename = "totalParamSize")]
pub input_size: usize,
/// The number of output arguments.
#[serde(rename = "totalRetParamSize")]
pub output_size: usize,
}
@@ -1,27 +1,20 @@
//! The `solc --standard-json` output contract EVM data.
pub mod bytecode;
pub mod extra_metadata;
use std::collections::BTreeMap;
use serde::Deserialize;
use serde::Serialize;
use crate::evmla::assembly::Assembly;
use self::bytecode::Bytecode;
use self::bytecode::DeployedBytecode;
use self::extra_metadata::ExtraMetadata;
/// The `solc --standard-json` output contract EVM data.
/// It is replaced by PolkaVM data after compiling.
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct EVM {
/// The contract EVM legacy assembly code.
#[serde(rename = "legacyAssembly", skip_serializing_if = "Option::is_none")]
pub assembly: Option<Assembly>,
/// The contract PolkaVM assembly code.
#[serde(rename = "assembly", skip_serializing_if = "Option::is_none")]
pub assembly_text: Option<String>,
@@ -37,9 +30,6 @@ pub struct EVM {
/// The contract function signatures.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub method_identifiers: Option<BTreeMap<String, String>>,
/// The extra EVMLA metadata.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub extra_metadata: Option<ExtraMetadata>,
}
impl EVM {
@@ -125,26 +125,6 @@ impl Error {
}
}
/// Returns the internal function pointer usage error.
pub fn message_internal_function_pointer(src: Option<&str>) -> Self {
let message = r#"
┌──────────────────────────────────────────────────────────────────────────────────────────────────┐
│ Error: Internal function pointers are not supported in EVM legacy assembly pipeline. │
│ Please use the Yul IR codegen instead. │
└──────────────────────────────────────────────────────────────────────────────────────────────────┘"#
.to_owned();
Self {
component: "general".to_owned(),
error_code: None,
formatted_message: message.clone(),
message,
severity: "error".to_owned(),
source_location: src.map(SourceLocation::from_str).and_then(Result::ok),
r#type: "Error".to_owned(),
}
}
/// Appends the contract path to the message..
pub fn push_contract_path(&mut self, path: &str) {
self.formatted_message
@@ -10,12 +10,9 @@ use serde::Deserialize;
use serde::Serialize;
use sha3::Digest;
use crate::evmla::assembly::instruction::Instruction;
use crate::evmla::assembly::Assembly;
use crate::project::contract::ir::IR as ProjectContractIR;
use crate::project::contract::Contract as ProjectContract;
use crate::project::Project;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use crate::solc::version::Version as SolcVersion;
use crate::warning::Warning;
use crate::yul::lexer::Lexer;
@@ -53,14 +50,9 @@ impl Output {
&mut self,
source_code_files: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
pipeline: SolcPipeline,
solc_version: &SolcVersion,
debug_config: &revive_llvm_context::DebugConfig,
) -> anyhow::Result<Project> {
if let SolcPipeline::EVMLA = pipeline {
self.preprocess_dependencies()?;
}
let files = match self.contracts.as_ref() {
Some(files) => files,
None => match &self.errors {
@@ -76,38 +68,22 @@ impl Output {
for (name, contract) in contracts.iter() {
let full_path = format!("{path}:{name}");
let source = match pipeline {
SolcPipeline::Yul => {
let ir_optimized = match contract.ir_optimized.to_owned() {
Some(ir_optimized) => ir_optimized,
None => continue,
};
if ir_optimized.is_empty() {
continue;
}
debug_config.dump_yul(full_path.as_str(), ir_optimized.as_str())?;
let mut lexer = Lexer::new(ir_optimized.to_owned());
let object = Object::parse(&mut lexer, None).map_err(|error| {
anyhow::anyhow!("Contract `{}` parsing error: {:?}", full_path, error)
})?;
ProjectContractIR::new_yul(ir_optimized.to_owned(), object)
}
SolcPipeline::EVMLA => {
let evm = contract.evm.as_ref();
let assembly = match evm.and_then(|evm| evm.assembly.to_owned()) {
Some(assembly) => assembly.to_owned(),
None => continue,
};
let extra_metadata = evm
.and_then(|evm| evm.extra_metadata.to_owned())
.unwrap_or_default();
ProjectContractIR::new_evmla(assembly, extra_metadata)
}
let ir_optimized = match contract.ir_optimized.to_owned() {
Some(ir_optimized) => ir_optimized,
None => continue,
};
if ir_optimized.is_empty() {
continue;
}
debug_config.dump_yul(full_path.as_str(), ir_optimized.as_str())?;
let mut lexer = Lexer::new(ir_optimized.to_owned());
let object = Object::parse(&mut lexer, None).map_err(|error| {
anyhow::anyhow!("Contract `{}` parsing error: {:?}", full_path, error)
})?;
let source = ProjectContractIR::new_yul(ir_optimized.to_owned(), object);
let source_code = source_code_files
.get(path.as_str())
@@ -133,12 +109,7 @@ impl Output {
}
/// Traverses the AST and returns the list of additional errors and warnings.
pub fn preprocess_ast(
&mut self,
version: &SolcVersion,
pipeline: SolcPipeline,
suppressed_warnings: &[Warning],
) -> anyhow::Result<()> {
pub fn preprocess_ast(&mut self, suppressed_warnings: &[Warning]) -> anyhow::Result<()> {
let sources = match self.sources.as_ref() {
Some(sources) => sources,
None => return Ok(()),
@@ -147,8 +118,7 @@ impl Output {
let mut messages = Vec::new();
for (path, source) in sources.iter() {
if let Some(ast) = source.ast.as_ref() {
let mut polkavm_messages =
Source::get_messages(ast, version, pipeline, suppressed_warnings);
let mut polkavm_messages = Source::get_messages(ast, suppressed_warnings);
for message in polkavm_messages.iter_mut() {
message.push_contract_path(path.as_str());
}
@@ -165,83 +135,4 @@ impl Output {
Ok(())
}
/// The pass, which replaces with dependency indexes with actual data.
fn preprocess_dependencies(&mut self) -> anyhow::Result<()> {
let files = match self.contracts.as_mut() {
Some(files) => files,
None => return Ok(()),
};
let mut hash_path_mapping = BTreeMap::new();
for (path, contracts) in files.iter() {
for (name, contract) in contracts.iter() {
let full_path = format!("{path}:{name}");
let hash = match contract
.evm
.as_ref()
.and_then(|evm| evm.assembly.as_ref())
.map(|assembly| assembly.keccak256())
{
Some(hash) => hash,
None => continue,
};
hash_path_mapping.insert(hash, full_path);
}
}
for (path, contracts) in files.iter_mut() {
for (name, contract) in contracts.iter_mut() {
let assembly = match contract.evm.as_mut().and_then(|evm| evm.assembly.as_mut()) {
Some(assembly) => assembly,
None => continue,
};
let full_path = format!("{path}:{name}");
Self::preprocess_dependency_level(
full_path.as_str(),
assembly,
&hash_path_mapping,
)?;
}
}
Ok(())
}
/// Preprocesses an assembly JSON structure dependency data map.
fn preprocess_dependency_level(
full_path: &str,
assembly: &mut Assembly,
hash_path_mapping: &BTreeMap<String, String>,
) -> anyhow::Result<()> {
assembly.set_full_path(full_path.to_owned());
let deploy_code_index_path_mapping =
assembly.deploy_dependencies_pass(full_path, hash_path_mapping)?;
if let Some(deploy_code_instructions) = assembly.code.as_deref_mut() {
Instruction::replace_data_aliases(
deploy_code_instructions,
&deploy_code_index_path_mapping,
)?;
};
let runtime_code_index_path_mapping =
assembly.runtime_dependencies_pass(full_path, hash_path_mapping)?;
if let Some(runtime_code_instructions) = assembly
.data
.as_mut()
.and_then(|data_map| data_map.get_mut("0"))
.and_then(|data| data.get_assembly_mut())
.and_then(|assembly| assembly.code.as_deref_mut())
{
Instruction::replace_data_aliases(
runtime_code_instructions,
&runtime_code_index_path_mapping,
)?;
}
Ok(())
}
}
@@ -3,9 +3,7 @@
use serde::Deserialize;
use serde::Serialize;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use crate::solc::standard_json::output::error::Error as SolcStandardJsonOutputError;
use crate::solc::version::Version as SolcVersion;
use crate::warning::Warning;
/// The `solc --standard-json` output source.
@@ -132,37 +130,9 @@ impl Source {
))
}
/// Checks the AST node for the internal function pointers value usage.
pub fn check_internal_function_pointer(
ast: &serde_json::Value,
) -> Option<SolcStandardJsonOutputError> {
let ast = ast.as_object()?;
if ast.get("nodeType")?.as_str()? != "VariableDeclaration" {
return None;
}
let type_descriptions = ast.get("typeDescriptions")?.as_object()?;
if !type_descriptions
.get("typeIdentifier")?
.as_str()?
.contains("function_internal")
{
return None;
}
Some(
SolcStandardJsonOutputError::message_internal_function_pointer(
ast.get("src")?.as_str(),
),
)
}
/// Returns the list of messages for some specific parts of the AST.
pub fn get_messages(
ast: &serde_json::Value,
version: &SolcVersion,
pipeline: SolcPipeline,
suppressed_warnings: &[Warning],
) -> Vec<SolcStandardJsonOutputError> {
let mut messages = Vec::new();
@@ -189,31 +159,16 @@ impl Source {
messages.push(message);
}
}
if SolcPipeline::EVMLA == pipeline && version.l2_revision.is_none() {
if let Some(message) = Self::check_internal_function_pointer(ast) {
messages.push(message);
}
}
match ast {
serde_json::Value::Array(array) => {
for element in array.iter() {
messages.extend(Self::get_messages(
element,
version,
pipeline,
suppressed_warnings,
));
messages.extend(Self::get_messages(element, suppressed_warnings));
}
}
serde_json::Value::Object(object) => {
for (_key, value) in object.iter() {
messages.extend(Self::get_messages(
value,
version,
pipeline,
suppressed_warnings,
));
messages.extend(Self::get_messages(value, suppressed_warnings));
}
}
_ => {}
+12 -38
View File
@@ -8,7 +8,6 @@ use std::sync::Mutex;
use once_cell::sync::Lazy;
use crate::project::Project;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use crate::solc::solc_compiler::SolcCompiler;
use crate::solc::standard_json::input::settings::optimizer::Optimizer as SolcStandardJsonInputSettingsOptimizer;
use crate::solc::standard_json::input::settings::selection::Selection as SolcStandardJsonInputSettingsSelection;
@@ -31,7 +30,6 @@ const DEBUG_CONFIG: revive_llvm_context::DebugConfig =
struct CachedBlob {
contract_name: String,
solc_optimizer_enabled: bool,
pipeline: SolcPipeline,
}
/// Checks if the required executables are present in `${PATH}`.
@@ -54,17 +52,9 @@ pub fn build_solidity(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
pipeline: SolcPipeline,
optimizer_settings: revive_llvm_context::OptimizerSettings,
) -> anyhow::Result<SolcStandardJsonOutput> {
build_solidity_with_options(
sources,
libraries,
remappings,
pipeline,
optimizer_settings,
true,
)
build_solidity_with_options(sources, libraries, remappings, optimizer_settings, true)
}
/// Builds the Solidity project and returns the standard JSON output.
@@ -74,7 +64,6 @@ pub fn build_solidity_with_options(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
pipeline: SolcPipeline,
optimizer_settings: revive_llvm_context::OptimizerSettings,
solc_optimizer_enabled: bool,
) -> anyhow::Result<SolcStandardJsonOutput> {
@@ -93,7 +82,7 @@ pub fn build_solidity_with_options(
sources.clone(),
libraries.clone(),
remappings,
SolcStandardJsonInputSettingsSelection::new_required(pipeline),
SolcStandardJsonInputSettingsSelection::new_required(),
SolcStandardJsonInputSettingsOptimizer::new(
solc_optimizer_enabled,
None,
@@ -101,14 +90,12 @@ pub fn build_solidity_with_options(
false,
),
None,
pipeline == SolcPipeline::Yul,
None,
)?;
let mut output = solc.standard_json(input, pipeline, None, vec![], None)?;
let mut output = solc.standard_json(input, None, vec![], None)?;
let project =
output.try_to_project(sources, libraries, pipeline, &solc_version, &DEBUG_CONFIG)?;
let project = output.try_to_project(sources, libraries, &solc_version, &DEBUG_CONFIG)?;
let build: crate::Build = project.compile(optimizer_settings, false, DEBUG_CONFIG)?;
build.write_to_standard_json(&mut output, &solc_version)?;
@@ -121,7 +108,6 @@ pub fn build_solidity_with_options_evm(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
pipeline: SolcPipeline,
solc_optimizer_enabled: bool,
) -> anyhow::Result<BTreeMap<String, (Bytecode, DeployedBytecode)>> {
check_dependencies();
@@ -139,7 +125,7 @@ pub fn build_solidity_with_options_evm(
sources.clone(),
libraries.clone(),
remappings,
SolcStandardJsonInputSettingsSelection::new_required(pipeline),
SolcStandardJsonInputSettingsSelection::new_required(),
SolcStandardJsonInputSettingsOptimizer::new(
solc_optimizer_enabled,
None,
@@ -147,11 +133,10 @@ pub fn build_solidity_with_options_evm(
false,
),
None,
pipeline == SolcPipeline::Yul,
None,
)?;
let mut output = solc.standard_json(input, pipeline, None, vec![], None)?;
let mut output = solc.standard_json(input, None, vec![], None)?;
let mut contracts = BTreeMap::new();
if let Some(files) = output.contracts.as_mut() {
@@ -176,7 +161,6 @@ pub fn build_solidity_with_options_evm(
pub fn build_solidity_and_detect_missing_libraries(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
pipeline: SolcPipeline,
) -> anyhow::Result<SolcStandardJsonOutput> {
check_dependencies();
@@ -193,17 +177,15 @@ pub fn build_solidity_and_detect_missing_libraries(
sources.clone(),
libraries.clone(),
None,
SolcStandardJsonInputSettingsSelection::new_required(pipeline),
SolcStandardJsonInputSettingsSelection::new_required(),
SolcStandardJsonInputSettingsOptimizer::new(true, None, &solc_version.default, false),
None,
pipeline == SolcPipeline::Yul,
None,
)?;
let mut output = solc.standard_json(input, pipeline, None, vec![], None)?;
let mut output = solc.standard_json(input, None, vec![], None)?;
let project =
output.try_to_project(sources, libraries, pipeline, &solc_version, &DEBUG_CONFIG)?;
let project = output.try_to_project(sources, libraries, &solc_version, &DEBUG_CONFIG)?;
let missing_libraries = project.get_missing_libraries();
missing_libraries.write_to_standard_json(&mut output, &solc.version()?)?;
@@ -234,7 +216,6 @@ pub fn check_solidity_warning(
source_code: &str,
warning_substring: &str,
libraries: BTreeMap<String, BTreeMap<String, String>>,
pipeline: SolcPipeline,
skip_for_revive_edition: bool,
suppressed_warnings: Option<Vec<Warning>>,
) -> anyhow::Result<bool> {
@@ -253,14 +234,13 @@ pub fn check_solidity_warning(
sources.clone(),
libraries,
None,
SolcStandardJsonInputSettingsSelection::new_required(pipeline),
SolcStandardJsonInputSettingsSelection::new_required(),
SolcStandardJsonInputSettingsOptimizer::new(true, None, &solc_version.default, false),
None,
pipeline == SolcPipeline::Yul,
suppressed_warnings,
)?;
let output = solc.standard_json(input, pipeline, None, vec![], None)?;
let output = solc.standard_json(input, None, vec![], None)?;
let contains_warning = output
.errors
.ok_or_else(|| anyhow::anyhow!("Solidity compiler messages not found"))?
@@ -273,7 +253,7 @@ pub fn check_solidity_warning(
/// Compile the blob of `contract_name` found in given `source_code`.
/// The `solc` optimizer will be enabled
pub fn compile_blob(contract_name: &str, source_code: &str) -> Vec<u8> {
compile_blob_with_options(contract_name, source_code, true, SolcPipeline::Yul)
compile_blob_with_options(contract_name, source_code, true)
}
/// Compile the EVM bin-runtime of `contract_name` found in given `source_code`.
@@ -298,10 +278,8 @@ fn compile_evm(
solc_optimizer_enabled: bool,
runtime: bool,
) -> Vec<u8> {
let pipeline = SolcPipeline::Yul;
let id = CachedBlob {
contract_name: contract_name.to_owned(),
pipeline,
solc_optimizer_enabled,
};
@@ -319,7 +297,6 @@ fn compile_evm(
[(file_name.into(), source_code.into())].into(),
Default::default(),
None,
pipeline,
solc_optimizer_enabled,
)
.expect("source should compile");
@@ -343,12 +320,10 @@ pub fn compile_blob_with_options(
contract_name: &str,
source_code: &str,
solc_optimizer_enabled: bool,
pipeline: SolcPipeline,
) -> Vec<u8> {
let id = CachedBlob {
contract_name: contract_name.to_owned(),
solc_optimizer_enabled,
pipeline,
};
if let Some(blob) = PVM_BLOB_CACHE.lock().unwrap().get(&id) {
@@ -360,7 +335,6 @@ pub fn compile_blob_with_options(
[(file_name.into(), source_code.into())].into(),
Default::default(),
None,
pipeline,
revive_llvm_context::OptimizerSettings::cycles(),
solc_optimizer_enabled,
)
@@ -0,0 +1,66 @@
<?xml version="1.0" encoding="UTF-8"?>
<testsuites name="jest tests" tests="27" failures="0" errors="0" time="2.146">
<testsuite name="Run with --yul by default" errors="0" failures="0" skipped="1" timestamp="2024-10-24T17:08:50" time="1.508" tests="6">
<testcase classname="Run with --yul by default Valid command exit code = 0" name="Run with --yul by default Valid command exit code = 0" time="0.003">
</testcase>
<testcase classname="Run with --yul by default --yul output is presented" name="Run with --yul by default --yul output is presented" time="0">
</testcase>
<testcase classname="Run with --yul by default solc exit code == resolc exit code" name="Run with --yul by default solc exit code == resolc exit code" time="0">
<skipped/>
</testcase>
<testcase classname="Run with --yul by default run invalid: resolc --yul" name="Run with --yul by default run invalid: resolc --yul" time="0.001">
</testcase>
<testcase classname="Run with --yul by default Invalid command exit code = 1" name="Run with --yul by default Invalid command exit code = 1" time="0">
</testcase>
<testcase classname="Run with --yul by default Invalid solc exit code == Invalid resolc exit code" name="Run with --yul by default Invalid solc exit code == Invalid resolc exit code" time="0.041">
</testcase>
</testsuite>
<testsuite name="Run with --asm by default" errors="0" failures="0" skipped="0" timestamp="2024-10-24T17:08:50" time="1.512" tests="6">
<testcase classname="Run with --asm by default Valid command exit code = 0" name="Run with --asm by default Valid command exit code = 0" time="0.002">
</testcase>
<testcase classname="Run with --asm by default --asm output is presented" name="Run with --asm by default --asm output is presented" time="0.001">
</testcase>
<testcase classname="Run with --asm by default solc exit code == resolc exit code" name="Run with --asm by default solc exit code == resolc exit code" time="0.044">
</testcase>
<testcase classname="Run with --asm by default run invalid: resolc --asm" name="Run with --asm by default run invalid: resolc --asm" time="0">
</testcase>
<testcase classname="Run with --asm by default Invalid command exit code = 1" name="Run with --asm by default Invalid command exit code = 1" time="0.001">
</testcase>
<testcase classname="Run with --asm by default Invalid solc exit code == Invalid resolc exit code" name="Run with --asm by default Invalid solc exit code == Invalid resolc exit code" time="0.04">
</testcase>
</testsuite>
<testsuite name="Run resolc without any options" errors="0" failures="0" skipped="2" timestamp="2024-10-24T17:08:50" time="2.016" tests="15">
<testcase classname="Run resolc without any options Info with help is presented" name="Run resolc without any options Info with help is presented" time="0.002">
</testcase>
<testcase classname="Run resolc without any options Exit code = 1" name="Run resolc without any options Exit code = 1" time="0">
</testcase>
<testcase classname="Run resolc without any options solc exit code == resolc exit code" name="Run resolc without any options solc exit code == resolc exit code" time="0.044">
</testcase>
<testcase classname="Default run a command from the help Compiler run successful" name="Default run a command from the help Compiler run successful" time="0">
</testcase>
<testcase classname="Default run a command from the help Exit code = 0" name="Default run a command from the help Exit code = 0" time="0.001">
</testcase>
<testcase classname="Default run a command from the help Output dir is created" name="Default run a command from the help Output dir is created" time="0">
</testcase>
<testcase classname="Default run a command from the help Output file is created" name="Default run a command from the help Output file is created" time="0">
<skipped/>
</testcase>
<testcase classname="Default run a command from the help the output file is not empty" name="Default run a command from the help the output file is not empty" time="0">
</testcase>
<testcase classname="Default run a command from the help No &apos;Error&apos;/&apos;Warning&apos;/&apos;Fail&apos; in the output" name="Default run a command from the help No &apos;Error&apos;/&apos;Warning&apos;/&apos;Fail&apos; in the output" time="0">
</testcase>
<testcase classname="Default run a command from the help Compiler run successful" name="Default run a command from the help Compiler run successful" time="0.001">
</testcase>
<testcase classname="Default run a command from the help Exit code = 0" name="Default run a command from the help Exit code = 0" time="0">
</testcase>
<testcase classname="Default run a command from the help Output dir is created" name="Default run a command from the help Output dir is created" time="0">
</testcase>
<testcase classname="Default run a command from the help Output files are created" name="Default run a command from the help Output files are created" time="0">
<skipped/>
</testcase>
<testcase classname="Default run a command from the help the output files are not empty" name="Default run a command from the help the output files are not empty" time="0.003">
</testcase>
<testcase classname="Default run a command from the help No &apos;Error&apos;/&apos;Warning&apos;/&apos;Fail&apos; in the output" name="Default run a command from the help No &apos;Error&apos;/&apos;Warning&apos;/&apos;Fail&apos; in the output" time="0">
</testcase>
</testsuite>
</testsuites>
@@ -0,0 +1,153 @@
import { executeCommand } from "../src/helper";
import { paths } from '../src/entities';
describe("Set of --combined-json tests", () => {
const zksolcCommand = 'zksolc';
const solcCommand = 'solc';
const json_args: string[] = [`abi`, `hashes`, `metadata`, `devdoc`, `userdoc`, `storage-layout`, `ast`, `asm`, `bin`, `bin-runtime`];
//id1742:I
describe(`Run ${zksolcCommand} with just --combined-json`, () => {
const args = [`--combined-json`];
const result = executeCommand(zksolcCommand, args);
it("Valid command exit code = 1", () => {
expect(result.exitCode).toBe(1);
});
it("--combined-json error is presented", () => {
expect(result.output).toMatch(/(requires a value but none was supplied)/i);
});
it("solc exit code == zksolc exit code", () => {
const solcResult = executeCommand(solcCommand, args);
expect(solcResult.exitCode).toBe(result.exitCode);
});
});
//id1742:II
describe(`Run ${zksolcCommand} with Sol contract and --combined-json`, () => {
const args = [`${paths.pathToBasicSolContract}`, `--combined-json`];
const result = executeCommand(zksolcCommand, args);
it("Valid command exit code = 1", () => {
expect(result.exitCode).toBe(1);
});
it("--combined-json error is presented", () => {
expect(result.output).toMatch(/(requires a value but none was supplied)/i);
});
it("solc exit code == zksolc exit code", () => {
const solcResult = executeCommand(solcCommand, args);
expect(solcResult.exitCode).toBe(result.exitCode);
});
});
//id1742:III
for (let i = 0; i < json_args.length; i++) {
describe(`Run ${zksolcCommand} with Sol, --combined-json and ARG: ${json_args[i]}`, () => {
const args = [`${paths.pathToBasicSolContract}`, `--combined-json`, `${json_args[i]}`];
const result = executeCommand(zksolcCommand, args);
it("Valid command exit code = 0", () => {
expect(result.exitCode).toBe(0);
});
it("--combined-json error is presented", () => {
expect(result.output).toMatch(/(contracts)/i);
});
it("solc exit code == zksolc exit code", () => {
const solcResult = executeCommand(solcCommand, args);
expect(solcResult.exitCode).toBe(result.exitCode);
});
});
}
//id1829:I
for (let i = 0; i < json_args.length; i++) {
describe(`Run ${zksolcCommand} with Sol, --combined-json and wrong ARG: --${json_args[i]}`, () => {
const args = [`${paths.pathToBasicSolContract}`, `--combined-json`, `--${json_args[i]}`];
const result = executeCommand(zksolcCommand, args);
it("Valid command exit code = 1", () => {
expect(result.exitCode).toBe(1);
});
it("--combined-json error is presented", () => {
expect(result.output).toMatch(/(Invalid option|error)/i);
});
it("solc exit code == zksolc exit code", () => {
const solcResult = executeCommand(solcCommand, args);
expect(solcResult.exitCode).toBe(result.exitCode);
});
});
}
//id1829:II
for (let i = 0; i < json_args.length; i++) {
describe(`Run ${zksolcCommand} with Sol, --combined-json and multiple ARG: ${json_args[i]} ${json_args[i]}`, () => {
const args = [`${paths.pathToBasicSolContract}`, `--combined-json`, `${json_args[i]}`, `${json_args[i]}`];
const result = executeCommand(zksolcCommand, args);
xit("Valid command exit code = 1", () => {
expect(result.exitCode).toBe(1);
});
it("--combined-json error is presented", () => {
expect(result.output).toMatch(/(No such file or directory|cannot find the file specified)/i); // Hopefully we should have more precise message here!
});
xit("solc exit code == zksolc exit code", () => {
const solcResult = executeCommand(solcCommand, args);
expect(solcResult.exitCode).toBe(result.exitCode);
});
});
}
//id1829:III
for (let i = 0; i < json_args.length; i++) {
describe(`Run ${zksolcCommand} with Sol, and multiple (--combined-json ${json_args[i]})`, () => {
const args = [`${paths.pathToBasicSolContract}`, `--combined-json`, `${json_args[i]}`, `--combined-json`, `${json_args[i]}`];
const result = executeCommand(zksolcCommand, args);
it("Valid command exit code = 1", () => {
expect(result.exitCode).toBe(1);
});
it("--combined-json error is presented", () => {
expect(result.output).toMatch(/(cannot be used multiple times)/i);
});
it("solc exit code == zksolc exit code", () => {
const solcResult = executeCommand(solcCommand, args);
expect(solcResult.exitCode).toBe(result.exitCode);
});
});
}
//id1830
for (let i = 0; i < json_args.length; i++) {
describe(`Run ${zksolcCommand} with Yul, and --combined-json ${json_args[i]}`, () => {
const args = [`${paths.pathToBasicYulContract}`, `--combined-json`, `${json_args[i]}`];
const result = executeCommand(zksolcCommand, args);
it("Valid command exit code = 1", () => {
expect(result.exitCode).toBe(1);
});
it("--combined-json error is presented", () => {
expect(result.output).toMatch(/(ParserError: Expected identifier)/i);
});
asd
it("solc exit code == zksolc exit code", () => {
const solcResult = executeCommand(solcCommand, args);
expect(solcResult.exitCode).toBe(result.exitCode);
});
});
}
});
@@ -4,8 +4,6 @@
use std::collections::BTreeMap;
use crate::solc::pipeline::Pipeline as SolcPipeline;
pub const MAIN_CODE: &str = r#"
// SPDX-License-Identifier: MIT
@@ -51,7 +49,6 @@ fn default() {
sources,
BTreeMap::new(),
None,
SolcPipeline::Yul,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Build failure");
-74
View File
@@ -5,8 +5,6 @@
use std::collections::BTreeMap;
use crate::solc::pipeline::Pipeline as SolcPipeline;
#[test]
fn yul() {
let source_code = r#"
@@ -27,7 +25,6 @@ contract Test {
sources,
BTreeMap::new(),
None,
SolcPipeline::Yul,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
@@ -45,75 +42,4 @@ contract Test {
.is_some(),
"Yul IR is missing"
);
assert!(
build
.contracts
.as_ref()
.expect("Always exists")
.get("test.sol")
.expect("Always exists")
.get("Test")
.expect("Always exists")
.evm
.as_ref()
.expect("EVM object is missing")
.assembly
.is_none(),
"EVMLA IR is present although not requested"
);
}
#[test]
fn evmla() {
let source_code = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract Test {
function main() public view returns (uint) {
return 42;
}
}
"#;
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), source_code.to_owned());
let build = super::build_solidity(
sources,
BTreeMap::new(),
None,
SolcPipeline::EVMLA,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
assert!(
build
.contracts
.as_ref()
.expect("Always exists")
.get("test.sol")
.expect("Always exists")
.get("Test")
.expect("Always exists")
.evm
.as_ref()
.expect("EVM object is missing")
.assembly
.is_some(),
"EVMLA IR is missing",
);
assert!(
build
.contracts
.as_ref()
.expect("Always exists")
.get("test.sol")
.expect("Always exists")
.get("Test")
.expect("Always exists")
.ir_optimized
.is_none(),
"Yul IR is present although not requested",
);
}
+37 -49
View File
@@ -4,8 +4,6 @@
use std::collections::BTreeMap;
use crate::solc::pipeline::Pipeline as SolcPipeline;
pub const LIBRARY_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
@@ -38,29 +36,24 @@ fn not_specified() {
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), LIBRARY_TEST_SOURCE.to_owned());
for pipeline in [SolcPipeline::EVMLA, SolcPipeline::Yul] {
let output = super::build_solidity_and_detect_missing_libraries(
sources.clone(),
BTreeMap::new(),
pipeline,
)
.expect("Test failure");
assert!(
output
.contracts
.as_ref()
.expect("Always exists")
.get("test.sol")
.expect("Always exists")
.get("SimpleContract")
.expect("Always exists")
.missing_libraries
.as_ref()
.expect("Always exists")
.contains("test.sol:SimpleLibrary"),
"Missing library not detected"
);
}
let output =
super::build_solidity_and_detect_missing_libraries(sources.clone(), BTreeMap::new())
.expect("Test failure");
assert!(
output
.contracts
.as_ref()
.expect("Always exists")
.get("test.sol")
.expect("Always exists")
.get("SimpleContract")
.expect("Always exists")
.missing_libraries
.as_ref()
.expect("Always exists")
.contains("test.sol:SimpleLibrary"),
"Missing library not detected"
);
}
#[test]
@@ -75,28 +68,23 @@ fn specified() {
.entry("SimpleLibrary".to_string())
.or_insert("0x00000000000000000000000000000000DEADBEEF".to_string());
for pipeline in [SolcPipeline::EVMLA, SolcPipeline::Yul] {
let output = super::build_solidity_and_detect_missing_libraries(
sources.clone(),
libraries.clone(),
pipeline,
)
.expect("Test failure");
assert!(
output
.contracts
.as_ref()
.expect("Always exists")
.get("test.sol")
.expect("Always exists")
.get("SimpleContract")
.expect("Always exists")
.missing_libraries
.as_ref()
.cloned()
.unwrap_or_default()
.is_empty(),
"The list of missing libraries must be empty"
);
}
let output =
super::build_solidity_and_detect_missing_libraries(sources.clone(), libraries.clone())
.expect("Test failure");
assert!(
output
.contracts
.as_ref()
.expect("Always exists")
.get("test.sol")
.expect("Always exists")
.get("SimpleContract")
.expect("Always exists")
.missing_libraries
.as_ref()
.cloned()
.unwrap_or_default()
.is_empty(),
"The list of missing libraries must be empty"
);
}
-140
View File
@@ -4,7 +4,6 @@
use std::collections::BTreeMap;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use crate::warning::Warning;
pub const ECRECOVER_TEST_SOURCE: &str = r#"
@@ -30,7 +29,6 @@ fn ecrecover() {
ECRECOVER_TEST_SOURCE,
"Warning: It looks like you are using 'ecrecover' to validate a signature of a user account.",
BTreeMap::new(),
SolcPipeline::Yul,
false,
None,
).expect("Test failure")
@@ -44,7 +42,6 @@ fn ecrecover_suppressed() {
ECRECOVER_TEST_SOURCE,
"Warning: It looks like you are using 'ecrecover' to validate a signature of a user account.",
BTreeMap::new(),
SolcPipeline::Yul,
false,
Some(vec![Warning::EcRecover]),
).expect("Test failure")
@@ -76,7 +73,6 @@ fn send() {
SEND_TEST_SOURCE,
"Warning: It looks like you are using '<address payable>.send/transfer(<X>)' without providing",
BTreeMap::new(),
SolcPipeline::Yul,
false,
None,
).expect("Test failure")
@@ -90,7 +86,6 @@ fn send_suppressed() {
SEND_TEST_SOURCE,
"Warning: It looks like you are using '<address payable>.send/transfer(<X>)' without providing",
BTreeMap::new(),
SolcPipeline::Yul,
false,
Some(vec![Warning::SendTransfer]),
).expect("Test failure")
@@ -121,7 +116,6 @@ fn transfer() {
TRANSFER_TEST_SOURCE,
"Warning: It looks like you are using '<address payable>.send/transfer(<X>)' without providing",
BTreeMap::new(),
SolcPipeline::Yul,
false,
None,
).expect("Test failure")
@@ -135,7 +129,6 @@ fn transfer_suppressed() {
TRANSFER_TEST_SOURCE,
"Warning: It looks like you are using '<address payable>.send/transfer(<X>)' without providing",
BTreeMap::new(),
SolcPipeline::Yul,
false,
Some(vec![Warning::SendTransfer]),
).expect("Test failure")
@@ -163,7 +156,6 @@ fn extcodesize() {
EXTCODESIZE_TEST_SOURCE,
"Warning: Your code or one of its dependencies uses the 'extcodesize' instruction,",
BTreeMap::new(),
SolcPipeline::Yul,
false,
None,
)
@@ -176,7 +168,6 @@ fn extcodesize_suppressed() {
EXTCODESIZE_TEST_SOURCE,
"Warning: Your code or one of its dependencies uses the 'extcodesize' instruction,",
BTreeMap::new(),
SolcPipeline::Yul,
false,
Some(vec![Warning::ExtCodeSize]),
)
@@ -200,7 +191,6 @@ fn tx_origin() {
TX_ORIGIN_TEST_SOURCE,
"Warning: You are checking for 'tx.origin' in your code, which might lead to",
BTreeMap::new(),
SolcPipeline::Yul,
false,
None,
)
@@ -213,7 +203,6 @@ fn tx_origin_suppressed() {
TX_ORIGIN_TEST_SOURCE,
"Warning: You are checking for 'tx.origin' in your code, which might lead to",
BTreeMap::new(),
SolcPipeline::Yul,
false,
Some(vec![Warning::TxOrigin]),
)
@@ -244,7 +233,6 @@ fn tx_origin_assembly() {
TX_ORIGIN_ASSEMBLY_TEST_SOURCE,
"Warning: You are checking for 'tx.origin' in your code, which might lead to",
BTreeMap::new(),
SolcPipeline::Yul,
false,
None,
)
@@ -257,136 +245,8 @@ fn tx_origin_assembly_suppressed() {
TX_ORIGIN_ASSEMBLY_TEST_SOURCE,
"Warning: You are checking for 'tx.origin' in your code, which might lead to",
BTreeMap::new(),
SolcPipeline::Yul,
false,
Some(vec![Warning::TxOrigin]),
)
.expect("Test failure"));
}
#[test]
fn internal_function_pointer_argument() {
let source_code = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract InternalFunctionPointerExample {
function add(uint256 a, uint256 b) internal pure returns (uint256) {
return a + b;
}
function sub(uint256 a, uint256 b) internal pure returns (uint256) {
return a - b;
}
function executeOperation(
function (uint256, uint256) internal pure returns (uint256) operation,
uint256 a,
uint256 b
) private pure returns (uint256) {
return operation(a, b);
}
function testAdd(uint256 a, uint256 b) public pure returns (uint256) {
return executeOperation(add, a, b);
}
function testSub(uint256 a, uint256 b) public pure returns (uint256) {
return executeOperation(sub, a, b);
}
}
"#;
assert!(super::check_solidity_warning(
source_code,
"Error: Internal function pointers are not supported in EVM legacy assembly pipeline.",
BTreeMap::new(),
SolcPipeline::EVMLA,
true,
None,
)
.expect("Test failure"));
}
#[test]
fn internal_function_pointer_stack() {
let source_code = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract StackFunctionPointerExample {
function add(uint256 a, uint256 b) internal pure returns (uint256) {
return a + b;
}
function sub(uint256 a, uint256 b) internal pure returns (uint256) {
return a - b;
}
function testAdd(uint256 a, uint256 b) public pure returns (uint256) {
function (uint256, uint256) internal pure returns (uint256) operation = add;
return operation(a, b);
}
function testSub(uint256 a, uint256 b) public pure returns (uint256) {
function (uint256, uint256) internal pure returns (uint256) operation = sub;
return operation(a, b);
}
}
"#;
assert!(super::check_solidity_warning(
source_code,
"Error: Internal function pointers are not supported in EVM legacy assembly pipeline.",
BTreeMap::new(),
SolcPipeline::EVMLA,
true,
None,
)
.expect("Test failure"));
}
#[test]
fn internal_function_pointer_storage() {
let source_code = r#"
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.0;
contract StorageFunctionPointerExample {
function add(uint256 a, uint256 b) internal pure returns (uint256) {
return a + b;
}
function sub(uint256 a, uint256 b) internal pure returns (uint256) {
return a - b;
}
function (uint256, uint256) internal pure returns (uint256) operation;
bool private isOperationSet = false;
function setOperation(bool isAdd) public {
if (isAdd) {
operation = add;
} else {
operation = sub;
}
isOperationSet = true;
}
function executeOperation(uint256 a, uint256 b) public view returns (uint256) {
require(isOperationSet, "Operation not set");
return operation(a, b);
}
}
"#;
assert!(super::check_solidity_warning(
source_code,
"Error: Internal function pointers are not supported in EVM legacy assembly pipeline.",
BTreeMap::new(),
SolcPipeline::EVMLA,
true,
None,
)
.expect("Test failure"));
}
-5
View File
@@ -4,8 +4,6 @@
use std::collections::BTreeMap;
use crate::solc::pipeline::Pipeline as SolcPipeline;
pub const SOURCE_CODE: &str = r#"
// SPDX-License-Identifier: MIT
@@ -54,7 +52,6 @@ fn optimizer() {
sources.clone(),
BTreeMap::new(),
None,
SolcPipeline::Yul,
revive_llvm_context::OptimizerSettings::none(),
)
.expect("Build failure");
@@ -62,7 +59,6 @@ fn optimizer() {
sources.clone(),
BTreeMap::new(),
None,
SolcPipeline::Yul,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Build failure");
@@ -70,7 +66,6 @@ fn optimizer() {
sources,
BTreeMap::new(),
None,
SolcPipeline::Yul,
revive_llvm_context::OptimizerSettings::size(),
)
.expect("Build failure");
-3
View File
@@ -5,8 +5,6 @@
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use crate::solc::pipeline::Pipeline as SolcPipeline;
pub const CALLEE_TEST_SOURCE: &str = r#"
// SPDX-License-Identifier: MIT
@@ -46,7 +44,6 @@ fn default() {
sources,
BTreeMap::new(),
Some(remappings),
SolcPipeline::Yul,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
@@ -4,8 +4,6 @@
use std::collections::BTreeMap;
use crate::solc::pipeline::Pipeline as SolcPipeline;
#[test]
#[should_panic(expected = "runtimeCode is not supported")]
fn default() {
@@ -29,7 +27,6 @@ contract Test {
sources,
BTreeMap::new(),
None,
SolcPipeline::Yul,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
@@ -4,8 +4,6 @@
use std::collections::BTreeMap;
use crate::solc::pipeline::Pipeline as SolcPipeline;
#[test]
#[should_panic(expected = "The `CODECOPY` instruction is not supported")]
fn codecopy_yul_runtime() {
@@ -34,7 +32,6 @@ contract FixedCodeCopy {
sources,
BTreeMap::new(),
None,
SolcPipeline::Yul,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
@@ -63,22 +60,6 @@ contract CallcodeTest {
}
"#;
#[test]
#[should_panic(expected = "The `CALLCODE` instruction is not supported")]
fn callcode_evmla() {
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), CALLCODE_TEST_SOURCE.to_owned());
super::build_solidity(
sources,
BTreeMap::new(),
None,
SolcPipeline::EVMLA,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
}
#[test]
#[should_panic(expected = "The `CALLCODE` instruction is not supported")]
fn callcode_yul() {
@@ -89,7 +70,6 @@ fn callcode_yul() {
sources,
BTreeMap::new(),
None,
SolcPipeline::Yul,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
@@ -137,22 +117,6 @@ contract ExternalCodeCopy {
}
"#;
#[test]
#[should_panic(expected = "The `EXTCODECOPY` instruction is not supported")]
fn extcodecopy_evmla() {
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), EXTCODECOPY_TEST_SOURCE.to_owned());
super::build_solidity(
sources,
BTreeMap::new(),
None,
SolcPipeline::EVMLA,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
}
#[test]
#[should_panic(expected = "The `EXTCODECOPY` instruction is not supported")]
fn extcodecopy_yul() {
@@ -163,7 +127,6 @@ fn extcodecopy_yul() {
sources,
BTreeMap::new(),
None,
SolcPipeline::Yul,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
@@ -187,22 +150,6 @@ contract MinimalDestructible {
}
"#;
#[test]
#[should_panic(expected = "The `SELFDESTRUCT` instruction is not supported")]
fn selfdestruct_evmla() {
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_owned(), SELFDESTRUCT_TEST_SOURCE.to_owned());
super::build_solidity(
sources,
BTreeMap::new(),
None,
SolcPipeline::EVMLA,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");
}
#[test]
#[should_panic(expected = "The `SELFDESTRUCT` instruction is not supported")]
fn selfdestruct_yul() {
@@ -213,7 +160,6 @@ fn selfdestruct_yul() {
sources,
BTreeMap::new(),
None,
SolcPipeline::Yul,
revive_llvm_context::OptimizerSettings::cycles(),
)
.expect("Test failure");