Separate compilation and linker phases (#376)

Separate between compilation and linker phases to allow deploy time
linking and back-porting era compiler changes to fix #91. Unlinked
contract binaries (caused by missing libraries or missing factory
dependencies in turn) are emitted as raw ELF object.

Few drive by fixes:
- #98
- A compiler panic on missing libraries definitions.
- Fixes some incosistent type forwarding in JSON output (empty string
vs. null object).
- Remove the unused fallback for size optimization setting.
- Remove the broken `--lvm-ir`  mode.
- CI workflow fixes.

---------

Signed-off-by: Cyrill Leutwiler <bigcyrill@hotmail.com>
Signed-off-by: xermicus <bigcyrill@hotmail.com>
Signed-off-by: xermicus <cyrill@parity.io>
This commit is contained in:
xermicus
2025-09-27 20:52:22 +02:00
committed by GitHub
parent 13faedf08a
commit 94ec34c4d5
169 changed files with 6288 additions and 5206 deletions
+1 -1
View File
@@ -132,7 +132,7 @@ jobs:
run: |
result=$(./resolc-${{ matrix.target }} --bin crates/integration/contracts/flipper.sol)
echo $result
if [[ $result == *'0x50564d'* ]]; then exit 0; else exit 1; fi
if [[ $result == *'50564d'* ]]; then exit 0; else exit 1; fi
- uses: actions/upload-artifact@v4
with:
+1
View File
@@ -26,6 +26,7 @@ jobs:
with:
# without this it will override our rust flags
rustflags: ""
components: rustfmt, clippy
- name: Install Solc
uses: ./.github/actions/get-solc
+9
View File
@@ -12,15 +12,24 @@ This is a development pre-release.
Supported `polkadot-sdk` rev: `2503.0.1`
### Changed
- Remove the broken `--llvm-ir` mode.
- Remove the unused fallback for size optimization setting.
- Unlinked contract binaries are emitted as raw ELF objects.
### Added
- Line debug information per YUL builtin and for `if` statements.
- Column numbers in debug information.
- Support for the YUL optimizer details in the standard json input definition.
- The `revive-explorer` compiler utility.
- `revive-yul`: The AST visitor interface.
- The `--link` deploy time linking mode.
### Fixed
- The debug info source file matches the YUL path in `--debug-output-dir`, allowing tools to display the source line.
- Incosistent type forwarding in JSON output (empty string vs. null object).
- The solc automatic import resolution.
- Compiler panic on missing libraries definition.
## v0.3.0
Generated
+711 -430
View File
File diff suppressed because it is too large Load Diff
+7 -6
View File
@@ -14,22 +14,22 @@ repository = "https://github.com/paritytech/revive"
rust-version = "1.85.0"
[workspace.dependencies]
resolc = { version = "0.3.0", path = "crates/resolc" }
resolc = { version = "0.4.0", path = "crates/resolc", default-features = false }
revive-benchmarks = { version = "0.1.0", path = "crates/benchmarks" }
revive-builtins = { version = "0.1.0", path = "crates/builtins" }
revive-common = { version = "0.1.0", path = "crates/common" }
revive-common = { version = "0.2.0", path = "crates/common" }
revive-differential = { version = "0.1.0", path = "crates/differential" }
revive-explorer = { version = "0.1.0", path = "crates/explore" }
revive-integration = { version = "0.1.1", path = "crates/integration" }
revive-linker = { version = "0.1.0", path = "crates/linker" }
revive-linker = { version = "0.2.0", path = "crates/linker" }
lld-sys = { version = "0.1.0", path = "crates/lld-sys" }
revive-llvm-context = { version = "0.3.0", path = "crates/llvm-context" }
revive-llvm-context = { version = "0.4.0", path = "crates/llvm-context" }
revive-runtime-api = { version = "0.2.0", path = "crates/runtime-api" }
revive-runner = { version = "0.1.0", path = "crates/runner" }
revive-solc-json-interface = { version = "0.2.0", path = "crates/solc-json-interface" }
revive-solc-json-interface = { version = "0.3.0", path = "crates/solc-json-interface", default-features = false }
revive-stdlib = { version = "0.1.1", path = "crates/stdlib" }
revive-build-utils = { version = "0.1.0", path = "crates/build-utils" }
revive-yul = { version = "0.2.1", path = "crates/yul" }
revive-yul = { version = "0.3.0", path = "crates/yul" }
hex = "0.4.3"
cc = "1.2"
@@ -71,6 +71,7 @@ tar = "0.4"
toml = "0.8"
assert_cmd = "2.0"
assert_fs = "1.1"
normpath = "1.3"
# polkadot-sdk and friends
codec = { version = "3.7.5", default-features = false, package = "parity-scale-codec" }
+1
View File
@@ -0,0 +1 @@
large-error-threshold = 192
+3 -1
View File
@@ -1,6 +1,6 @@
[package]
name = "revive-common"
version.workspace = true
version = "0.2.0"
license.workspace = true
edition.workspace = true
repository.workspace = true
@@ -15,6 +15,8 @@ doctest = false
[dependencies]
anyhow = { workspace = true }
hex = { workspace = true }
sha3 = { workspace = true }
serde = { workspace = true, features = ["derive"] }
serde_json = { workspace = true, features = [ "arbitrary_precision", "unbounded_depth" ] }
serde_stacker = { workspace = true }
+33
View File
@@ -0,0 +1,33 @@
//! The contract identifier helper library.
use serde::{Deserialize, Serialize};
/// This structure simplifies passing the contract identifiers through the compilation pipeline.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ContractIdentifier {
/// The absolute file path.
pub path: String,
/// The contract name.
/// Is set for Solidity contracts only. Otherwise it would be equal to the file name.
pub name: Option<String>,
/// The full contract identifier.
/// For Solidity, The format is `<absolute file path>:<contract name>`.
/// For other languages, `<absolute file path>`.
pub full_path: String,
}
impl ContractIdentifier {
/// A shortcut constructor.
pub fn new(path: String, name: Option<String>) -> Self {
let full_path = match name {
Some(ref name) => format!("{path}:{name}"),
None => path.clone(),
};
Self {
path,
name,
full_path,
}
}
}
+1 -1
View File
@@ -37,4 +37,4 @@ pub static EXTENSION_POLKAVM_ASSEMBLY: &str = "pvmasm";
pub static EXTENSION_POLKAVM_BINARY: &str = "pvm";
/// The ELF shared object file extension.
pub static EXTENSION_SHARED_OBJECT: &str = "so";
pub static EXTENSION_OBJECT: &str = "o";
+68
View File
@@ -0,0 +1,68 @@
//! Keccak-256 hash utilities.
use serde::{Deserialize, Serialize};
use sha3::digest::FixedOutput;
use sha3::Digest;
pub const DIGEST_BYTES: usize = 32;
/// Keccak-256 hash utilities.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Keccak256 {
/// Binary representation.
bytes: [u8; DIGEST_BYTES],
/// Hexadecimal string representation.
string: String,
}
impl Keccak256 {
/// Computes the `keccak256` hash for `preimage`.
pub fn from_slice(preimage: &[u8]) -> Self {
let bytes = sha3::Keccak256::digest(preimage).into();
let string = format!("0x{}", hex::encode(bytes));
Self { bytes, string }
}
/// Computes the `keccak256` hash for an array of `preimages`.
pub fn from_slices<R: AsRef<[u8]>>(preimages: &[R]) -> Self {
let mut hasher = sha3::Keccak256::new();
for preimage in preimages.iter() {
hasher.update(preimage);
}
let bytes: [u8; DIGEST_BYTES] = hasher.finalize_fixed().into();
let string = format!("0x{}", hex::encode(bytes));
Self { bytes, string }
}
/// Returns a reference to the 32-byte SHA-3 hash.
pub fn as_bytes(&self) -> &[u8] {
self.bytes.as_slice()
}
/// Returns a reference to the hexadecimal string representation.
pub fn as_str(&self) -> &str {
self.string.as_str()
}
/// Extracts the binary representation.
pub fn to_vec(&self) -> Vec<u8> {
self.bytes.to_vec()
}
}
impl std::fmt::Display for Keccak256 {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
#[cfg(test)]
mod tests {
#[test]
fn hash_and_stringify_works() {
assert_eq!(
super::Keccak256::from_slices(&["foo".as_bytes(), "bar".as_bytes(),]).as_str(),
"0x38d18acb67d25c8bb9942764b62f18e17054f66a817bd4295423adf9ed98873e"
);
}
}
+8
View File
@@ -3,9 +3,13 @@
pub(crate) mod base;
pub(crate) mod bit_length;
pub(crate) mod byte_length;
pub(crate) mod contract_identifier;
pub(crate) mod evm_version;
pub(crate) mod exit_code;
pub(crate) mod extension;
pub(crate) mod keccak256;
pub(crate) mod metadata;
pub(crate) mod object;
pub(crate) mod utils;
pub use self::base::*;
@@ -14,4 +18,8 @@ pub use self::byte_length::*;
pub use self::evm_version::EVMVersion;
pub use self::exit_code::*;
pub use self::extension::*;
pub use self::keccak256::*;
pub use self::metadata::*;
pub use self::object::*;
pub use self::utils::*;
pub use contract_identifier::*;
+42
View File
@@ -0,0 +1,42 @@
//! The metadata hash type.
use std::str::FromStr;
use serde::{Deserialize, Serialize};
/// The metadata hash type.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum MetadataHash {
/// Do not include bytecode hash.
#[serde(rename = "none")]
None,
/// Include the `ipfs` hash.
#[serde(rename = "ipfs")]
IPFS,
/// Include the `keccak256`` hash.
#[serde(rename = "keccak256")]
Keccak256,
}
impl FromStr for MetadataHash {
type Err = anyhow::Error;
fn from_str(string: &str) -> Result<Self, Self::Err> {
match string {
"none" => Ok(Self::None),
"ipfs" => Ok(Self::IPFS),
"keccak256" => Ok(Self::Keccak256),
string => anyhow::bail!("unknown bytecode hash mode: `{string}`"),
}
}
}
impl std::fmt::Display for MetadataHash {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Self::None => write!(f, "none"),
Self::IPFS => write!(f, "ipfs"),
Self::Keccak256 => write!(f, "keccak256"),
}
}
}
+61
View File
@@ -0,0 +1,61 @@
//! The revive binary object helper module.
use std::str::FromStr;
use serde::{Deserialize, Serialize};
/// The binary object format.
///
/// Unlinked contracts are stored in a different object format
/// than final (linked) contract blobs.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum ObjectFormat {
/// The unlinked ELF object format.
ELF,
/// The fully linked PVM format.
PVM,
}
impl ObjectFormat {
pub const PVM_MAGIC: [u8; 4] = [b'P', b'V', b'M', b'\0'];
pub const ELF_MAGIC: [u8; 4] = [0x7f, b'E', b'L', b'F'];
}
impl FromStr for ObjectFormat {
type Err = anyhow::Error;
fn from_str(value: &str) -> Result<Self, Self::Err> {
match value {
"ELF" => Ok(Self::ELF),
"PVM" => Ok(Self::PVM),
_ => anyhow::bail!(
"Unknown object format: {value}. Supported formats: {}, {}",
Self::ELF.to_string(),
Self::PVM.to_string()
),
}
}
}
impl TryFrom<&[u8]> for ObjectFormat {
type Error = &'static str;
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
if value.starts_with(&Self::PVM_MAGIC) {
return Ok(Self::PVM);
}
if value.starts_with(&Self::ELF_MAGIC) {
return Ok(Self::ELF);
}
Err("expected a contract object")
}
}
impl std::fmt::Display for ObjectFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::ELF => write!(f, "ELF"),
Self::PVM => write!(f, "PVM"),
}
}
}
+26 -6
View File
@@ -1,25 +1,45 @@
//! The compiler common utils.
/// Deserializes a `serde_json` object from slice with the recursion limit disabled.
///
/// Must be used for all JSON I/O to avoid crashes due to the aforementioned limit.
pub fn deserialize_from_slice<O>(input: &[u8]) -> anyhow::Result<O>
where
O: serde::de::DeserializeOwned,
{
let mut deserializer = serde_json::Deserializer::from_slice(input);
deserializer.disable_recursion_limit();
let deserializer = serde_stacker::Deserializer::new(&mut deserializer);
let result = O::deserialize(deserializer)?;
Ok(result)
let deserializer = serde_json::Deserializer::from_slice(input);
deserialize(deserializer)
}
/// Deserializes a `serde_json` object from string with the recursion limit disabled.
///
/// Must be used for all JSON I/O to avoid crashes due to the aforementioned limit.
pub fn deserialize_from_str<O>(input: &str) -> anyhow::Result<O>
where
O: serde::de::DeserializeOwned,
{
let mut deserializer = serde_json::Deserializer::from_str(input);
let deserializer = serde_json::Deserializer::from_str(input);
deserialize(deserializer)
}
/// Deserializes a `serde_json` object from reader with the recursion limit disabled.
///
/// Must be used for all JSON I/O to avoid crashes due to the aforementioned limit.
pub fn deserialize_from_reader<R, O>(reader: R) -> anyhow::Result<O>
where
R: std::io::Read,
O: serde::de::DeserializeOwned,
{
let deserializer = serde_json::Deserializer::from_reader(reader);
deserialize(deserializer)
}
/// Runs the generic deserializer.
pub fn deserialize<'de, R, O>(mut deserializer: serde_json::Deserializer<R>) -> anyhow::Result<O>
where
R: serde_json::de::Read<'de>,
O: serde::de::DeserializeOwned,
{
deserializer.disable_recursion_limit();
let deserializer = serde_stacker::Deserializer::new(&mut deserializer);
let result = O::deserialize(deserializer)?;
+20 -20
View File
@@ -12,6 +12,26 @@ pub struct Contract {
pub calldata: Vec<u8>,
}
impl Contract {
pub fn build(calldata: Vec<u8>, name: &'static str, code: &str) -> Self {
Self {
name,
evm_runtime: compile_evm_bin_runtime(name, code),
pvm_runtime: compile_blob(name, code),
calldata,
}
}
pub fn build_size_opt(calldata: Vec<u8>, name: &'static str, code: &str) -> Self {
Self {
name,
evm_runtime: compile_evm_bin_runtime(name, code),
pvm_runtime: compile_blob_with_options(name, code, true, OptimizerSettings::size()),
calldata,
}
}
}
macro_rules! case {
// Arguments:
// 1. The file name, expect to live under "../contracts/"
@@ -261,26 +281,6 @@ sol!(
case!("AddressPredictor.sol", Predicted, constructorCall, predicted_constructor, salt: U256);
case!("AddressPredictor.sol", AddressPredictor, constructorCall, address_predictor_constructor, salt: U256, bytecode: Bytes);
impl Contract {
pub fn build(calldata: Vec<u8>, name: &'static str, code: &str) -> Self {
Self {
name,
evm_runtime: compile_evm_bin_runtime(name, code),
pvm_runtime: compile_blob(name, code),
calldata,
}
}
pub fn build_size_opt(calldata: Vec<u8>, name: &'static str, code: &str) -> Self {
Self {
name,
evm_runtime: compile_evm_bin_runtime(name, code),
pvm_runtime: compile_blob_with_options(name, code, true, OptimizerSettings::size()),
calldata,
}
}
}
#[cfg(test)]
mod tests {
use rayon::iter::{IntoParallelIterator, ParallelIterator};
+5 -5
View File
@@ -1,6 +1,6 @@
[package]
name = "revive-linker"
version.workspace = true
version = "0.2.0"
license.workspace = true
edition.workspace = true
repository.workspace = true
@@ -8,10 +8,10 @@ authors.workspace = true
description = "revive compiler linker utils"
[dependencies]
tempfile = { workspace = true }
polkavm-linker = { workspace = true }
libc = { workspace = true }
anyhow = { workspace = true }
libc = { workspace = true }
polkavm-linker = { workspace = true }
tempfile = { workspace = true }
revive-builtins = { workspace = true }
lld-sys = { workspace = true }
revive-builtins = { workspace = true }
+114
View File
@@ -0,0 +1,114 @@
//! The revive ELF object linker library.
use std::{ffi::CString, fs, path::PathBuf, sync::Mutex};
use lld_sys::LLDELFLink;
use tempfile::TempDir;
use revive_builtins::COMPILER_RT;
static GUARD: Mutex<()> = Mutex::new(());
/// The revive ELF object linker.
pub struct ElfLinker {
temporary_directory: TempDir,
output_path: PathBuf,
object_path: PathBuf,
symbols_path: PathBuf,
linker_script_path: PathBuf,
}
impl ElfLinker {
const LINKER_SCRIPT: &str = r#"
SECTIONS {
.text : { KEEP(*(.text.polkavm_export)) *(.text .text.*) }
}"#;
const BUILTINS_ARCHIVE_FILE: &str = "libclang_rt.builtins-riscv64.a";
const BUILTINS_LIB_NAME: &str = "clang_rt.builtins-riscv64";
/// The setup routine prepares a temporary working directory.
pub fn setup() -> anyhow::Result<Self> {
let temporary_directory = TempDir::new()?;
let object_path = temporary_directory.path().join("obj.o");
let output_path = temporary_directory.path().join("out.o");
let symbols_path = temporary_directory.path().join("sym.o");
let linker_script_path = temporary_directory.path().join("linker.ld");
fs::write(&linker_script_path, Self::LINKER_SCRIPT)
.map_err(|message| anyhow::anyhow!("{message} {linker_script_path:?}",))?;
let compiler_rt_path = temporary_directory.path().join(Self::BUILTINS_ARCHIVE_FILE);
fs::write(&compiler_rt_path, COMPILER_RT)
.map_err(|message| anyhow::anyhow!("{message} {compiler_rt_path:?}"))?;
Ok(Self {
temporary_directory,
output_path,
object_path,
symbols_path,
linker_script_path,
})
}
/// Link `input` with `symbols` and the `compiler_rt` via `LLD`.
pub fn link<T: AsRef<[u8]>>(self, input: T, symbols: T) -> anyhow::Result<Vec<u8>> {
fs::write(&self.object_path, input)
.map_err(|message| anyhow::anyhow!("{message} {:?}", self.object_path))?;
fs::write(&self.symbols_path, symbols)
.map_err(|message| anyhow::anyhow!("{message} {:?}", self.symbols_path))?;
if lld(self
.create_arguments()
.into_iter()
.map(|v| v.to_string())
.collect())
{
return Err(anyhow::anyhow!("ld.lld failed"));
}
Ok(fs::read(&self.output_path)?)
}
/// The argument creation helper function.
fn create_arguments(&self) -> Vec<String> {
[
"ld.lld",
"--error-limit=0",
"--relocatable",
"--emit-relocs",
"--no-relax",
"--unique",
"--gc-sections",
self.linker_script_path.to_str().expect("should be utf8"),
"-o",
self.output_path.to_str().expect("should be utf8"),
self.object_path.to_str().expect("should be utf8"),
self.symbols_path.to_str().expect("should be utf8"),
"--library-path",
self.temporary_directory
.path()
.to_str()
.expect("should be utf8"),
"--library",
Self::BUILTINS_LIB_NAME,
]
.iter()
.map(ToString::to_string)
.collect()
}
}
/// The thread-safe LLD helper function.
fn lld(arguments: Vec<String>) -> bool {
let c_strings = arguments
.into_iter()
.map(|arg| CString::new(arg).expect("ld.lld args should not contain null bytes"))
.collect::<Vec<_>>();
let args: Vec<*const libc::c_char> = c_strings.iter().map(|arg| arg.as_ptr()).collect();
let _lock = GUARD.lock().expect("ICE: linker mutex should not poison");
unsafe { LLDELFLink(args.as_ptr(), args.len()) == 0 }
}
+3 -75
View File
@@ -1,76 +1,4 @@
use std::{env, ffi::CString, fs};
//! The revive ELF object to PVM blob linker library.
use lld_sys::LLDELFLink;
use revive_builtins::COMPILER_RT;
const LINKER_SCRIPT: &str = r#"
SECTIONS {
.text : { KEEP(*(.text.polkavm_export)) *(.text .text.*) }
}"#;
const BUILTINS_ARCHIVE_FILE: &str = "libclang_rt.builtins-riscv64.a";
const BUILTINS_LIB_NAME: &str = "clang_rt.builtins-riscv64";
fn invoke_lld(cmd_args: &[&str]) -> bool {
let c_strings = cmd_args
.iter()
.map(|arg| CString::new(*arg).expect("ld.lld args should not contain null bytes"))
.collect::<Vec<_>>();
let args: Vec<*const libc::c_char> = c_strings.iter().map(|arg| arg.as_ptr()).collect();
unsafe { LLDELFLink(args.as_ptr(), args.len()) == 0 }
}
pub fn polkavm_linker<T: AsRef<[u8]>>(code: T, strip_binary: bool) -> anyhow::Result<Vec<u8>> {
let mut config = polkavm_linker::Config::default();
config.set_strip(strip_binary);
config.set_optimize(true);
polkavm_linker::program_from_elf(config, code.as_ref())
.map_err(|reason| anyhow::anyhow!("polkavm linker failed: {}", reason))
}
pub fn link<T: AsRef<[u8]>>(input: T) -> anyhow::Result<Vec<u8>> {
let dir = tempfile::tempdir().expect("failed to create temp directory for linking");
let output_path = dir.path().join("out.so");
let object_path = dir.path().join("out.o");
let linker_script_path = dir.path().join("linker.ld");
let compiler_rt_path = dir.path().join(BUILTINS_ARCHIVE_FILE);
fs::write(&object_path, input).map_err(|msg| anyhow::anyhow!("{msg} {object_path:?}"))?;
if env::var("PVM_LINKER_DUMP_OBJ").is_ok() {
fs::copy(&object_path, "/tmp/out.o")?;
}
fs::write(&linker_script_path, LINKER_SCRIPT)
.map_err(|msg| anyhow::anyhow!("{msg} {linker_script_path:?}"))?;
fs::write(&compiler_rt_path, COMPILER_RT)
.map_err(|msg| anyhow::anyhow!("{msg} {compiler_rt_path:?}"))?;
let ld_args = [
"ld.lld",
"--error-limit=0",
"--relocatable",
"--emit-relocs",
"--no-relax",
"--unique",
"--gc-sections",
"--library-path",
dir.path().to_str().expect("should be utf8"),
"--library",
BUILTINS_LIB_NAME,
linker_script_path.to_str().expect("should be utf8"),
object_path.to_str().expect("should be utf8"),
"-o",
output_path.to_str().expect("should be utf8"),
];
if invoke_lld(&ld_args) {
return Err(anyhow::anyhow!("ld.lld failed"));
}
Ok(fs::read(&output_path)?)
}
pub mod elf;
pub mod pvm;
+10
View File
@@ -0,0 +1,10 @@
//! The revive PVM blob linker library.
pub fn polkavm_linker<T: AsRef<[u8]>>(code: T, strip_binary: bool) -> anyhow::Result<Vec<u8>> {
let mut config = polkavm_linker::Config::default();
config.set_strip(strip_binary);
config.set_optimize(true);
polkavm_linker::program_from_elf(config, code.as_ref())
.map_err(|reason| anyhow::anyhow!("polkavm linker failed: {}", reason))
}
+1 -2
View File
@@ -1,6 +1,6 @@
[package]
name = "revive-llvm-context"
version = "0.3.0"
version = "0.4.0"
license.workspace = true
edition.workspace = true
repository.workspace = true
@@ -20,7 +20,6 @@ itertools = { workspace = true }
serde = { workspace = true, features = ["derive"] }
num = { workspace = true }
hex = { workspace = true }
sha3 = { workspace = true }
inkwell = { workspace = true }
libc = { workspace = true }
polkavm-disassembler = { workspace = true }
@@ -1,5 +1,9 @@
//! The debug IR type.
use revive_common::{
EXTENSION_LLVM_SOURCE, EXTENSION_OBJECT, EXTENSION_POLKAVM_ASSEMBLY, EXTENSION_YUL,
};
/// The debug IR type.
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -11,22 +15,17 @@ pub enum IRType {
/// Whether to dump the assembly code.
Assembly,
/// Whether to dump the ELF shared object
SO,
/// Whether to jump JSON
#[cfg(debug_assertions)]
JSON,
Object,
}
impl IRType {
/// Returns the file extension for the specified IR.
pub fn file_extension(&self) -> &'static str {
match self {
Self::Yul => revive_common::EXTENSION_YUL,
Self::LLVM => revive_common::EXTENSION_LLVM_SOURCE,
Self::Assembly => revive_common::EXTENSION_POLKAVM_ASSEMBLY,
#[cfg(debug_assertions)]
Self::JSON => revive_common::EXTENSION_JSON,
Self::SO => revive_common::EXTENSION_SHARED_OBJECT,
Self::Yul => EXTENSION_YUL,
Self::LLVM => EXTENSION_LLVM_SOURCE,
Self::Assembly => EXTENSION_POLKAVM_ASSEMBLY,
Self::Object => EXTENSION_OBJECT,
}
}
}
+3 -42
View File
@@ -1,8 +1,5 @@
//! The debug configuration.
pub mod ir_type;
use std::path::Path;
use std::path::PathBuf;
use serde::Deserialize;
@@ -10,6 +7,8 @@ use serde::Serialize;
use self::ir_type::IRType;
pub mod ir_type;
/// The debug configuration.
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
pub struct DebugConfig {
@@ -18,13 +17,7 @@ pub struct DebugConfig {
/// Whether debug info should be emitted.
pub emit_debug_info: bool,
/// The YUL debug output file path.
///
/// Is expected to be configured when running in YUL mode.
pub contract_path: Option<PathBuf>,
/// The YUL input file path.
///
/// Is expected to be configured when not running in YUL mode.
pub yul_path: Option<PathBuf>,
}
impl DebugConfig {
@@ -34,29 +27,15 @@ impl DebugConfig {
output_directory,
emit_debug_info,
contract_path: None,
yul_path: None,
}
}
/// Set the current YUL path.
pub fn set_yul_path(&mut self, yul_path: &Path) {
self.yul_path = yul_path.to_path_buf().into();
}
/// Set the current contract path.
pub fn set_contract_path(&mut self, contract_path: &str) {
self.contract_path = self.yul_source_path(contract_path);
}
/// Returns with the following precedence:
/// 1. The YUL source path if it was configured.
/// 2. The source YUL path from the debug output dir if it was configured.
/// 3. `None` if there is no debug output directory.
pub fn yul_source_path(&self, contract_path: &str) -> Option<PathBuf> {
if let Some(path) = self.yul_path.as_ref() {
return Some(path.clone());
}
self.output_directory.as_ref().map(|output_directory| {
let mut file_path = output_directory.to_owned();
let full_file_name = Self::full_file_name(contract_path, None, IRType::Yul);
@@ -128,7 +107,7 @@ impl DebugConfig {
pub fn dump_object(&self, contract_path: &str, code: &[u8]) -> anyhow::Result<()> {
if let Some(output_directory) = self.output_directory.as_ref() {
let mut file_path = output_directory.to_owned();
let full_file_name = Self::full_file_name(contract_path, None, IRType::SO);
let full_file_name = Self::full_file_name(contract_path, None, IRType::Object);
file_path.push(full_file_name);
std::fs::write(file_path, code)?;
}
@@ -136,24 +115,6 @@ impl DebugConfig {
Ok(())
}
/// Dumps the stage output as a json file suitable for use with --recursive-process
#[cfg(debug_assertions)]
pub fn dump_stage_output(
&self,
contract_path: &str,
contract_suffix: Option<&str>,
stage_json: &Vec<u8>,
) -> anyhow::Result<()> {
if let Some(output_directory) = self.output_directory.as_ref() {
let mut file_path = output_directory.to_owned();
let full_file_name = Self::full_file_name(contract_path, contract_suffix, IRType::JSON);
file_path.push(full_file_name);
std::fs::write(file_path, stage_json)?;
}
Ok(())
}
/// Creates a full file name, given the contract full path, suffix, and extension.
fn full_file_name(contract_path: &str, suffix: Option<&str>, ir_type: IRType) -> String {
let mut full_file_name = contract_path.replace('/', "_").replace(':', ".");
+10 -7
View File
@@ -1,5 +1,7 @@
//! The LLVM context library.
#![allow(clippy::too_many_arguments)]
use std::ffi::CString;
use std::sync::OnceLock;
@@ -8,7 +10,7 @@ pub use self::debug_config::DebugConfig;
pub use self::optimizer::settings::size_level::SizeLevel as OptimizerSettingsSizeLevel;
pub use self::optimizer::settings::Settings as OptimizerSettings;
pub use self::optimizer::Optimizer;
pub use self::polkavm::build_assembly_text as polkavm_build_assembly_text;
pub use self::polkavm::build as polkavm_build;
pub use self::polkavm::context::address_space::AddressSpace as PolkaVMAddressSpace;
pub use self::polkavm::context::argument::Argument as PolkaVMArgument;
pub use self::polkavm::context::attribute::Attribute as PolkaVMAttribute;
@@ -46,6 +48,7 @@ pub use self::polkavm::context::r#loop::Loop as PolkaVMLoop;
pub use self::polkavm::context::solidity_data::SolidityData as PolkaVMContextSolidityData;
pub use self::polkavm::context::yul_data::YulData as PolkaVMContextYulData;
pub use self::polkavm::context::Context as PolkaVMContext;
pub use self::polkavm::disassemble as polkavm_disassemble;
pub use self::polkavm::evm::arithmetic as polkavm_evm_arithmetic;
pub use self::polkavm::evm::bitwise as polkavm_evm_bitwise;
pub use self::polkavm::evm::call as polkavm_evm_call;
@@ -66,13 +69,13 @@ pub use self::polkavm::evm::memory as polkavm_evm_memory;
pub use self::polkavm::evm::r#return as polkavm_evm_return;
pub use self::polkavm::evm::return_data as polkavm_evm_return_data;
pub use self::polkavm::evm::storage as polkavm_evm_storage;
pub use self::polkavm::hash as polkavm_hash;
pub use self::polkavm::link as polkavm_link;
pub use self::polkavm::r#const as polkavm_const;
pub use self::polkavm::Dependency as PolkaVMDependency;
pub use self::polkavm::DummyDependency as PolkaVMDummyDependency;
pub use self::polkavm::DummyLLVMWritable as PolkaVMDummyLLVMWritable;
pub use self::polkavm::WriteLLVM as PolkaVMWriteLLVM;
pub use self::target_machine::target::Target;
pub use self::target_machine::TargetMachine;
pub use self::target_machine::target::Target as PolkaVMTarget;
pub use self::target_machine::TargetMachine as PolkaVMTargetMachine;
pub(crate) mod debug_config;
pub(crate) mod optimizer;
@@ -86,7 +89,7 @@ static DID_INITIALIZE: OnceLock<()> = OnceLock::new();
/// This is a no-op if called subsequentially.
///
/// `llvm_arguments` are passed as-is to the LLVM CL options parser.
pub fn initialize_llvm(target: Target, name: &str, llvm_arguments: &[String]) {
pub fn initialize_llvm(target: PolkaVMTarget, name: &str, llvm_arguments: &[String]) {
let Ok(_) = DID_INITIALIZE.set(()) else {
return; // Tests don't go through a recursive process
};
@@ -109,6 +112,6 @@ pub fn initialize_llvm(target: Target, name: &str, llvm_arguments: &[String]) {
inkwell::support::enable_llvm_pretty_stack_trace();
match target {
Target::PVM => inkwell::targets::Target::initialize_riscv(&Default::default()),
PolkaVMTarget::PVM => inkwell::targets::Target::initialize_riscv(&Default::default()),
}
}
+2 -2
View File
@@ -1,7 +1,5 @@
//! The LLVM optimizing tools.
pub mod settings;
use serde::Deserialize;
use serde::Serialize;
@@ -9,6 +7,8 @@ use crate::target_machine::TargetMachine;
use self::settings::Settings;
pub mod settings;
/// The LLVM optimizing tools.
#[derive(Debug, Serialize, Deserialize)]
pub struct Optimizer {
@@ -1,8 +1,5 @@
//! The LLVM optimizer settings.
pub mod size_level;
use revive_solc_json_interface::SolcStandardJsonInputSettingsOptimizer;
use serde::Deserialize;
use serde::Serialize;
@@ -10,6 +7,8 @@ use itertools::Itertools;
use self::size_level::SizeLevel;
pub mod size_level;
/// The LLVM optimizer and code-gen settings.
#[derive(Debug, Serialize, Deserialize, Clone, Eq)]
pub struct Settings {
@@ -20,9 +19,6 @@ pub struct Settings {
/// The back-end optimization level.
pub level_back_end: inkwell::OptimizationLevel,
/// Fallback to optimizing for size if the bytecode is too large.
pub is_fallback_to_size_enabled: bool,
/// Whether the LLVM `verify each` option is enabled.
pub is_verify_each_enabled: bool,
/// Whether the LLVM `debug logging` option is enabled.
@@ -41,8 +37,6 @@ impl Settings {
level_middle_end_size,
level_back_end,
is_fallback_to_size_enabled: false,
is_verify_each_enabled: false,
is_debug_logging_enabled: false,
}
@@ -62,8 +56,6 @@ impl Settings {
level_middle_end_size,
level_back_end,
is_fallback_to_size_enabled: false,
is_verify_each_enabled,
is_debug_logging_enabled,
}
@@ -197,16 +189,6 @@ impl Settings {
combinations
}
/// Sets the fallback to optimizing for size if the bytecode is too large.
pub fn enable_fallback_to_size(&mut self) {
self.is_fallback_to_size_enabled = true;
}
/// Whether the fallback to optimizing for size is enabled.
pub fn is_fallback_to_size_enabled(&self) -> bool {
self.is_fallback_to_size_enabled
}
}
impl PartialEq for Settings {
@@ -227,18 +209,3 @@ impl std::fmt::Display for Settings {
)
}
}
impl TryFrom<&SolcStandardJsonInputSettingsOptimizer> for Settings {
type Error = anyhow::Error;
fn try_from(value: &SolcStandardJsonInputSettingsOptimizer) -> Result<Self, Self::Error> {
let mut result = match value.mode {
Some(mode) => Self::try_from_cli(mode)?,
None => Self::size(),
};
if value.fallback_to_optimizing_for_size.unwrap_or_default() {
result.enable_fallback_to_size();
}
Ok(result)
}
}
+4 -2
View File
@@ -1,10 +1,12 @@
//! The LLVM context constants.
use revive_common::{BIT_LENGTH_X32, BYTE_LENGTH_WORD};
/// The LLVM framework version.
pub const LLVM_VERSION: semver::Version = semver::Version::new(18, 1, 4);
/// The pointer width sized type.
pub static XLEN: usize = revive_common::BIT_LENGTH_X32;
pub static XLEN: usize = BIT_LENGTH_X32;
/// The calldata size global variable name.
pub static GLOBAL_CALLDATA_SIZE: &str = "calldatasize";
@@ -20,4 +22,4 @@ pub static GLOBAL_ADDRESS_SPILL_BUFFER: &str = "address_spill_buffer";
/// The deployer call header size that consists of:
/// - bytecode hash (32 bytes)
pub const DEPLOYER_CALL_HEADER_SIZE: usize = revive_common::BYTE_LENGTH_WORD;
pub const DEPLOYER_CALL_HEADER_SIZE: usize = BYTE_LENGTH_WORD;
@@ -66,9 +66,9 @@ impl<'ctx> Argument<'ctx> {
/// Access the underlying value.
///
/// Will emit a stack load if `self` is a pointer argument.
pub fn access<D: crate::polkavm::Dependency + Clone>(
pub fn access(
&self,
context: &crate::polkavm::context::Context<'ctx, D>,
context: &crate::polkavm::context::Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
match &self.value {
Value::Register(value) => Ok(*value),
@@ -79,9 +79,9 @@ impl<'ctx> Argument<'ctx> {
/// Access the underlying value.
///
/// Will emit a stack load if `self` is a pointer argument.
pub fn as_pointer<D: crate::polkavm::Dependency + Clone>(
pub fn as_pointer(
&self,
context: &crate::polkavm::context::Context<'ctx, D>,
context: &crate::polkavm::context::Context<'ctx>,
) -> anyhow::Result<crate::polkavm::context::Pointer<'ctx>> {
match &self.value {
Value::Register(value) => {
@@ -2,6 +2,7 @@
use std::collections::BTreeMap;
use revive_common::BYTE_LENGTH_WORD;
use serde::Deserialize;
use serde::Serialize;
@@ -9,30 +10,25 @@ use serde::Serialize;
#[derive(Debug, Serialize, Deserialize)]
pub struct Build {
/// The PolkaVM text assembly.
pub assembly_text: String,
pub assembly_text: Option<String>,
/// The metadata hash.
pub metadata_hash: Option<[u8; revive_common::BYTE_LENGTH_WORD]>,
pub metadata_hash: Option<[u8; BYTE_LENGTH_WORD]>,
/// The PolkaVM binary bytecode.
pub bytecode: Vec<u8>,
/// The PolkaVM bytecode hash.
pub bytecode_hash: String,
/// The PolkaVM bytecode hash. Unlinked builds don't have a hash yet.
pub bytecode_hash: Option<[u8; BYTE_LENGTH_WORD]>,
/// The hash-to-full-path mapping of the contract factory dependencies.
pub factory_dependencies: BTreeMap<String, String>,
}
impl Build {
/// A shortcut constructor.
pub fn new(
assembly_text: String,
metadata_hash: Option<[u8; revive_common::BYTE_LENGTH_WORD]>,
bytecode: Vec<u8>,
bytecode_hash: String,
) -> Self {
pub fn new(metadata_hash: Option<[u8; BYTE_LENGTH_WORD]>, bytecode: Vec<u8>) -> Self {
Self {
assembly_text,
assembly_text: None,
metadata_hash,
bytecode,
bytecode_hash,
bytecode_hash: None,
factory_dependencies: BTreeMap::new(),
}
}
@@ -2,6 +2,8 @@
use std::cell::RefCell;
use revive_common::BIT_LENGTH_WORD;
use inkwell::debug_info::AsDIScope;
use inkwell::debug_info::DIScope;
@@ -164,7 +166,7 @@ impl<'ctx> DebugInfo<'ctx> {
&self,
flags: Option<inkwell::debug_info::DIFlags>,
) -> anyhow::Result<inkwell::debug_info::DIBasicType<'ctx>> {
self.create_primitive_type(revive_common::BIT_LENGTH_WORD, flags)
self.create_primitive_type(BIT_LENGTH_WORD, flags)
}
/// Return the DIBuilder.
@@ -1,12 +1,5 @@
//! The LLVM IR generator function.
pub mod declaration;
pub mod intrinsics;
pub mod llvm_runtime;
pub mod r#return;
pub mod runtime;
pub mod yul_data;
use std::collections::HashMap;
use inkwell::debug_info::AsDIScope;
@@ -20,6 +13,13 @@ use self::declaration::Declaration;
use self::r#return::Return;
use self::yul_data::YulData;
pub mod declaration;
pub mod intrinsics;
pub mod llvm_runtime;
pub mod r#return;
pub mod runtime;
pub mod yul_data;
/// The LLVM IR generator function.
#[derive(Debug)]
pub struct Function<'ctx> {
@@ -1,22 +1,19 @@
//! Translates the arithmetic operations.
use inkwell::values::BasicValue;
use revive_common::BIT_LENGTH_WORD;
use crate::polkavm::context::runtime::RuntimeFunction;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::polkavm::WriteLLVM;
/// Implements the division operator according to the EVM specification.
pub struct Division;
impl<D> RuntimeFunction<D> for Division
where
D: Dependency + Clone,
{
impl RuntimeFunction for Division {
const NAME: &'static str = "__revive_division";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context.word_type().fn_type(
&[context.word_type().into(), context.word_type().into()],
false,
@@ -25,7 +22,7 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<inkwell::values::BasicValueEnum<'ctx>>> {
let operand_1 = Self::paramater(context, 0).into_int_value();
let operand_2 = Self::paramater(context, 1).into_int_value();
@@ -39,29 +36,23 @@ where
}
}
impl<D> WriteLLVM<D> for Division
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for Division {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
/// Implements the signed division operator according to the EVM specification.
pub struct SignedDivision;
impl<D> RuntimeFunction<D> for SignedDivision
where
D: Dependency + Clone,
{
impl RuntimeFunction for SignedDivision {
const NAME: &'static str = "__revive_signed_division";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context.word_type().fn_type(
&[context.word_type().into(), context.word_type().into()],
false,
@@ -70,7 +61,7 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<inkwell::values::BasicValueEnum<'ctx>>> {
let operand_1 = Self::paramater(context, 0).into_int_value();
let operand_2 = Self::paramater(context, 1).into_int_value();
@@ -96,9 +87,7 @@ where
context.set_basic_block(block_overflow);
let max_uint = context.builder().build_int_z_extend(
context
.integer_type(revive_common::BIT_LENGTH_WORD - 1)
.const_all_ones(),
context.integer_type(BIT_LENGTH_WORD - 1).const_all_ones(),
context.word_type(),
"max_uint",
)?;
@@ -121,29 +110,23 @@ where
}
}
impl<D> WriteLLVM<D> for SignedDivision
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for SignedDivision {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
/// Implements the remainder operator according to the EVM specification.
pub struct Remainder;
impl<D> RuntimeFunction<D> for Remainder
where
D: Dependency + Clone,
{
impl RuntimeFunction for Remainder {
const NAME: &'static str = "__revive_remainder";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context.word_type().fn_type(
&[context.word_type().into(), context.word_type().into()],
false,
@@ -152,7 +135,7 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<inkwell::values::BasicValueEnum<'ctx>>> {
let operand_1 = Self::paramater(context, 0).into_int_value();
let operand_2 = Self::paramater(context, 1).into_int_value();
@@ -166,29 +149,23 @@ where
}
}
impl<D> WriteLLVM<D> for Remainder
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for Remainder {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
/// Implements the signed remainder operator according to the EVM specification.
pub struct SignedRemainder;
impl<D> RuntimeFunction<D> for SignedRemainder
where
D: Dependency + Clone,
{
impl RuntimeFunction for SignedRemainder {
const NAME: &'static str = "__revive_signed_remainder";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context.word_type().fn_type(
&[context.word_type().into(), context.word_type().into()],
false,
@@ -197,7 +174,7 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<inkwell::values::BasicValueEnum<'ctx>>> {
let operand_1 = Self::paramater(context, 0).into_int_value();
let operand_2 = Self::paramater(context, 1).into_int_value();
@@ -211,16 +188,13 @@ where
}
}
impl<D> WriteLLVM<D> for SignedRemainder
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for SignedRemainder {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
@@ -231,13 +205,12 @@ where
///
/// The result is either the calculated quotient or zero,
/// selected at runtime.
fn wrapped_division<'ctx, D, F, T>(
context: &Context<'ctx, D>,
fn wrapped_division<'ctx, F, T>(
context: &Context<'ctx>,
denominator: inkwell::values::IntValue<'ctx>,
f: F,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
F: FnOnce() -> anyhow::Result<T>,
T: inkwell::values::IntMathValue<'ctx>,
{
@@ -1,47 +1,36 @@
//! The deploy code function.
use std::marker::PhantomData;
use crate::polkavm::context::code_type::CodeType;
use crate::polkavm::context::function::runtime;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::polkavm::WriteLLVM;
/// The deploy code function.
/// Is a special function that is only used by the front-end generated code.
#[derive(Debug)]
pub struct DeployCode<B, D>
pub struct DeployCode<B>
where
B: WriteLLVM<D>,
D: Dependency + Clone,
B: WriteLLVM,
{
/// The deploy code AST representation.
inner: B,
/// The `D` phantom data.
_pd: PhantomData<D>,
}
impl<B, D> DeployCode<B, D>
impl<B> DeployCode<B>
where
B: WriteLLVM<D>,
D: Dependency + Clone,
B: WriteLLVM,
{
/// A shortcut constructor.
pub fn new(inner: B) -> Self {
Self {
inner,
_pd: PhantomData,
}
Self { inner }
}
}
impl<B, D> WriteLLVM<D> for DeployCode<B, D>
impl<B> WriteLLVM for DeployCode<B>
where
B: WriteLLVM<D>,
D: Dependency + Clone,
B: WriteLLVM,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
let function_type = context.function_type::<inkwell::types::BasicTypeEnum>(vec![], 0);
context.add_function(
runtime::FUNCTION_DEPLOY_CODE,
@@ -54,7 +43,7 @@ where
self.inner.declare(context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
context.set_current_function(runtime::FUNCTION_DEPLOY_CODE, None)?;
context.set_basic_block(context.current_function().borrow().entry_block());
@@ -1,12 +1,16 @@
//! The entry function.
use inkwell::types::BasicType;
use revive_common::BIT_LENGTH_ETH_ADDRESS;
use revive_runtime_api::immutable_data::{
GLOBAL_IMMUTABLE_DATA_POINTER, GLOBAL_IMMUTABLE_DATA_SIZE,
};
use revive_runtime_api::polkavm_imports::CALL_DATA_SIZE;
use revive_solc_json_interface::PolkaVMDefaultHeapMemorySize;
use crate::polkavm::context::address_space::AddressSpace;
use crate::polkavm::context::function::runtime;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::polkavm::WriteLLVM;
/// The entry function.
@@ -21,10 +25,7 @@ impl Entry {
/// Initializes the global variables.
/// The pointers are not initialized, because it's not possible to create a null pointer.
pub fn initialize_globals<D>(context: &mut Context<D>) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
pub fn initialize_globals(context: &mut Context) -> anyhow::Result<()> {
context.set_global(
crate::polkavm::GLOBAL_CALLDATA_SIZE,
context.xlen_type(),
@@ -52,7 +53,7 @@ impl Entry {
heap_memory_type.const_zero(),
);
let address_type = context.integer_type(revive_common::BIT_LENGTH_ETH_ADDRESS);
let address_type = context.integer_type(BIT_LENGTH_ETH_ADDRESS);
context.set_global(
crate::polkavm::GLOBAL_ADDRESS_SPILL_BUFFER,
address_type,
@@ -64,16 +65,13 @@ impl Entry {
}
/// Populate the calldata size global value.
pub fn load_calldata_size<D>(context: &mut Context<D>) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
pub fn load_calldata_size(context: &mut Context) -> anyhow::Result<()> {
let call_data_size_pointer = context
.get_global(crate::polkavm::GLOBAL_CALLDATA_SIZE)?
.value
.as_pointer_value();
let call_data_size_value = context
.build_runtime_call(revive_runtime_api::polkavm_imports::CALL_DATA_SIZE, &[])
.build_runtime_call(CALL_DATA_SIZE, &[])
.expect("the call_data_size syscall method should return a value")
.into_int_value();
let call_data_size_value = context.builder().build_int_truncate(
@@ -90,10 +88,7 @@ impl Entry {
/// Calls the deploy code if the first function argument was `1`.
/// Calls the runtime code otherwise.
pub fn leave_entry<D>(context: &mut Context<D>) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
pub fn leave_entry(context: &mut Context) -> anyhow::Result<()> {
context.set_debug_location(0, 0, None)?;
let is_deploy = context
@@ -133,11 +128,8 @@ impl Entry {
}
}
impl<D> WriteLLVM<D> for Entry
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
impl WriteLLVM for Entry {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
let entry_arguments = vec![context.bool_type().as_basic_type_enum()];
let entry_function_type = context.function_type(entry_arguments, 0);
context.add_function(
@@ -149,13 +141,13 @@ where
)?;
context.declare_global(
revive_runtime_api::immutable_data::GLOBAL_IMMUTABLE_DATA_POINTER,
GLOBAL_IMMUTABLE_DATA_POINTER,
context.word_type().array_type(0),
AddressSpace::Stack,
);
context.declare_global(
revive_runtime_api::immutable_data::GLOBAL_IMMUTABLE_DATA_SIZE,
GLOBAL_IMMUTABLE_DATA_SIZE,
context.xlen_type(),
AddressSpace::Stack,
);
@@ -166,7 +158,7 @@ where
/// Instead of a single entrypoint, the runtime expects two exports: `call ` and `deploy`.
/// `call` and `deploy` directly call `entry`, signaling a deploy if the first arg is `1`.
/// The `entry` function loads calldata, sets globals and calls the runtime or deploy code.
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
let entry = context
.get_function(runtime::FUNCTION_ENTRY)
.expect("the entry function should already be declared")
@@ -5,7 +5,6 @@ use inkwell::values::BasicValue;
use crate::polkavm::context::function::Attribute;
use crate::polkavm::context::runtime::RuntimeFunction;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::polkavm::WriteLLVM;
/// Pointers are represented as opaque 256 bit integer values in EVM.
@@ -15,10 +14,7 @@ use crate::polkavm::WriteLLVM;
/// (but wrong) pointers when truncated.
pub struct WordToPointer;
impl<D> RuntimeFunction<D> for WordToPointer
where
D: Dependency + Clone,
{
impl RuntimeFunction for WordToPointer {
const NAME: &'static str = "__revive_int_truncate";
const ATTRIBUTES: &'static [Attribute] = &[
@@ -27,7 +23,7 @@ where
Attribute::AlwaysInline,
];
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context
.xlen_type()
.fn_type(&[context.word_type().into()], false)
@@ -35,7 +31,7 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<inkwell::values::BasicValueEnum<'ctx>>> {
let value = Self::paramater(context, 0).into_int_value();
let truncated =
@@ -67,26 +63,20 @@ where
}
}
impl<D> WriteLLVM<D> for WordToPointer
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for WordToPointer {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
/// The revive runtime exit function.
pub struct Exit;
impl<D> RuntimeFunction<D> for Exit
where
D: Dependency + Clone,
{
impl RuntimeFunction for Exit {
const NAME: &'static str = "__revive_exit";
const ATTRIBUTES: &'static [Attribute] = &[
@@ -95,7 +85,7 @@ where
Attribute::AlwaysInline,
];
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context.void_type().fn_type(
&[
context.xlen_type().into(),
@@ -108,7 +98,7 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<inkwell::values::BasicValueEnum<'ctx>>> {
let flags = Self::paramater(context, 0).into_int_value();
let offset = Self::paramater(context, 1).into_int_value();
@@ -133,15 +123,12 @@ where
}
}
impl<D> WriteLLVM<D> for Exit
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for Exit {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
@@ -1,47 +1,36 @@
//! The runtime code function.
use std::marker::PhantomData;
use crate::polkavm::context::code_type::CodeType;
use crate::polkavm::context::function::runtime;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::polkavm::WriteLLVM;
/// The runtime code function.
/// Is a special function that is only used by the front-end generated code.
#[derive(Debug)]
pub struct RuntimeCode<B, D>
pub struct RuntimeCode<B>
where
B: WriteLLVM<D>,
D: Dependency + Clone,
B: WriteLLVM,
{
/// The runtime code AST representation.
inner: B,
/// The `D` phantom data.
_pd: PhantomData<D>,
}
impl<B, D> RuntimeCode<B, D>
impl<B> RuntimeCode<B>
where
B: WriteLLVM<D>,
D: Dependency + Clone,
B: WriteLLVM,
{
/// A shortcut constructor.
pub fn new(inner: B) -> Self {
Self {
inner,
_pd: PhantomData,
}
Self { inner }
}
}
impl<B, D> WriteLLVM<D> for RuntimeCode<B, D>
impl<B> WriteLLVM for RuntimeCode<B>
where
B: WriteLLVM<D>,
D: Dependency + Clone,
B: WriteLLVM,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
let function_type = context.function_type::<inkwell::types::BasicTypeEnum>(vec![], 0);
context.add_function(
runtime::FUNCTION_RUNTIME_CODE,
@@ -54,7 +43,7 @@ where
self.inner.declare(context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
context.set_current_function(runtime::FUNCTION_RUNTIME_CODE, None)?;
context.set_basic_block(context.current_function().borrow().entry_block());
@@ -1,11 +1,11 @@
//! Emulates the linear EVM heap memory via a simulated `sbrk` system call.
use inkwell::values::BasicValue;
use revive_common::BYTE_LENGTH_WORD;
use crate::polkavm::context::attribute::Attribute;
use crate::polkavm::context::runtime::RuntimeFunction;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::polkavm::WriteLLVM;
/// Simulates the `sbrk` system call, reproducing the semantics of the EVM heap memory.
@@ -24,10 +24,7 @@ use crate::polkavm::WriteLLVM;
/// - Maintains the total memory size (`msize`) in global heap size value.
pub struct Sbrk;
impl<D> RuntimeFunction<D> for Sbrk
where
D: Dependency + Clone,
{
impl RuntimeFunction for Sbrk {
const NAME: &'static str = "__sbrk_internal";
const ATTRIBUTES: &'static [Attribute] = &[
@@ -36,7 +33,7 @@ where
Attribute::WillReturn,
];
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context.llvm().ptr_type(Default::default()).fn_type(
&[context.xlen_type().into(), context.xlen_type().into()],
false,
@@ -45,7 +42,7 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<inkwell::values::BasicValueEnum<'ctx>>> {
let offset = Self::paramater(context, 0).into_int_value();
let size = Self::paramater(context, 1).into_int_value();
@@ -71,7 +68,7 @@ where
context.set_basic_block(offset_in_bounds_block);
let mask = context
.xlen_type()
.const_int(revive_common::BYTE_LENGTH_WORD as u64 - 1, false);
.const_int(BYTE_LENGTH_WORD as u64 - 1, false);
let total_size = context
.builder()
.build_int_add(offset, size, "total_size")?;
@@ -130,15 +127,12 @@ where
}
}
impl<D> WriteLLVM<D> for Sbrk
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for Sbrk {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
@@ -5,7 +5,6 @@ use inkwell::values::BasicValue;
use crate::polkavm::context::address_space::AddressSpace;
use crate::polkavm::context::Context;
use crate::PolkaVMDependency;
/// The LLVM global value.
#[derive(Debug, Clone, Copy)]
@@ -18,15 +17,14 @@ pub struct Global<'ctx> {
impl<'ctx> Global<'ctx> {
/// A shortcut constructor.
pub fn new<D, T, V>(
context: &mut Context<'ctx, D>,
pub fn new<T, V>(
context: &mut Context<'ctx>,
r#type: T,
address_space: AddressSpace,
initializer: V,
name: &str,
) -> Self
where
D: PolkaVMDependency + Clone,
T: BasicType<'ctx>,
V: BasicValue<'ctx>,
{
@@ -53,14 +51,13 @@ impl<'ctx> Global<'ctx> {
}
/// Construct an external global.
pub fn declare<D, T>(
context: &mut Context<'ctx, D>,
pub fn declare<T>(
context: &mut Context<'ctx>,
r#type: T,
address_space: AddressSpace,
name: &str,
) -> Self
where
D: PolkaVMDependency + Clone,
T: BasicType<'ctx>,
{
let r#type = r#type.as_basic_type_enum();
+45 -145
View File
@@ -1,22 +1,5 @@
//! The LLVM IR generator context.
pub mod address_space;
pub mod argument;
pub mod attribute;
pub mod build;
pub mod code_type;
pub mod debug_info;
pub mod function;
pub mod global;
pub mod r#loop;
pub mod pointer;
pub mod runtime;
pub mod solidity_data;
pub mod yul_data;
#[cfg(test)]
mod tests;
use std::cell::RefCell;
use std::collections::HashMap;
use std::rc::Rc;
@@ -32,7 +15,6 @@ use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory;
use crate::optimizer::settings::Settings as OptimizerSettings;
use crate::optimizer::Optimizer;
use crate::polkavm::DebugConfig;
use crate::polkavm::Dependency;
use crate::target_machine::target::Target;
use crate::target_machine::TargetMachine;
use crate::PolkaVMLoadHeapWordFunction;
@@ -58,13 +40,27 @@ use self::runtime::RuntimeFunction;
use self::solidity_data::SolidityData;
use self::yul_data::YulData;
pub mod address_space;
pub mod argument;
pub mod attribute;
pub mod build;
pub mod code_type;
pub mod debug_info;
pub mod function;
pub mod global;
pub mod r#loop;
pub mod pointer;
pub mod runtime;
pub mod solidity_data;
pub mod yul_data;
#[cfg(test)]
mod tests;
/// The LLVM IR generator context.
/// It is a not-so-big god-like object glueing all the compilers' complexity and act as an adapter
/// and a superstructure over the inner `inkwell` LLVM context.
pub struct Context<'ctx, D>
where
D: Dependency + Clone,
{
pub struct Context<'ctx> {
/// The inner LLVM context.
llvm: &'ctx inkwell::context::Context,
/// The inner LLVM context builder.
@@ -87,17 +83,9 @@ where
current_function: Option<Rc<RefCell<Function<'ctx>>>>,
/// The loop context stack.
loop_stack: Vec<Loop<'ctx>>,
/// The extra LLVM arguments that were used during target initialization.
llvm_arguments: &'ctx [String],
/// The PVM memory configuration.
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
/// The project dependency manager. It can be any entity implementing the trait.
/// The manager is used to get information about contracts and their dependencies during
/// the multi-threaded compilation process.
dependency_manager: Option<D>,
/// Whether to append the metadata hash at the end of bytecode.
include_metadata_hash: bool,
/// The debug info of the current module.
debug_info: Option<DebugInfo<'ctx>>,
/// The debug configuration telling whether to dump the needed IRs.
@@ -109,10 +97,7 @@ where
yul_data: Option<YulData>,
}
impl<'ctx, D> Context<'ctx, D>
where
D: Dependency + Clone,
{
impl<'ctx> Context<'ctx> {
/// The functions hashmap default capacity.
const FUNCTIONS_HASHMAP_INITIAL_CAPACITY: usize = 64;
@@ -221,15 +206,11 @@ where
}
/// Initializes a new LLVM context.
#[allow(clippy::too_many_arguments)]
pub fn new(
llvm: &'ctx inkwell::context::Context,
module: inkwell::module::Module<'ctx>,
optimizer: Optimizer,
dependency_manager: Option<D>,
include_metadata_hash: bool,
debug_config: DebugConfig,
llvm_arguments: &'ctx [String],
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> Self {
Self::set_data_layout(llvm, &module);
@@ -264,12 +245,8 @@ where
functions: HashMap::with_capacity(Self::FUNCTIONS_HASHMAP_INITIAL_CAPACITY),
current_function: None,
loop_stack: Vec::with_capacity(Self::LOOP_STACK_INITIAL_CAPACITY),
llvm_arguments,
memory_config,
dependency_manager,
include_metadata_hash,
debug_info,
debug_config,
@@ -280,12 +257,10 @@ where
/// Builds the LLVM IR module, returning the build artifacts.
pub fn build(
mut self,
self,
contract_path: &str,
metadata_hash: Option<[u8; revive_common::BYTE_LENGTH_WORD]>,
metadata_hash: Option<revive_common::Keccak256>,
) -> anyhow::Result<Build> {
let module_clone = self.module.clone();
self.link_polkavm_exports(contract_path)?;
self.link_immutable_data(contract_path)?;
@@ -334,33 +309,16 @@ where
)
})?;
let shared_object = revive_linker::link(buffer.as_slice())?;
let object = buffer.as_slice().to_vec();
self.debug_config
.dump_object(contract_path, &shared_object)?;
self.debug_config.dump_object(contract_path, &object)?;
let polkavm_bytecode =
revive_linker::polkavm_linker(shared_object, !self.debug_config().emit_debug_info)?;
let build = match crate::polkavm::build_assembly_text(
contract_path,
&polkavm_bytecode,
metadata_hash,
self.debug_config(),
) {
Ok(build) => build,
Err(_error)
if self.optimizer.settings() != &OptimizerSettings::size()
&& self.optimizer.settings().is_fallback_to_size_enabled() =>
{
self.optimizer = Optimizer::new(OptimizerSettings::size());
self.module = module_clone;
self.build(contract_path, metadata_hash)?
}
Err(error) => Err(error)?,
};
Ok(build)
crate::polkavm::build(
&object,
metadata_hash
.as_ref()
.map(|hash| hash.as_bytes().try_into().unwrap()),
)
}
/// Verifies the current LLVM IR module.
@@ -437,11 +395,15 @@ where
}
}
/// Declare an external global.
/// Declare an external global. This is an idempotent method.
pub fn declare_global<T>(&mut self, name: &str, r#type: T, address_space: AddressSpace)
where
T: BasicType<'ctx> + Clone + Copy,
{
if self.globals.contains_key(name) {
return;
}
let global = Global::declare(self, r#type, address_space, name);
self.globals.insert(name.to_owned(), global);
}
@@ -650,54 +612,6 @@ where
.expect("The current context is not in a loop")
}
/// Compiles a contract dependency, if the dependency manager is set.
pub fn compile_dependency(&mut self, name: &str) -> anyhow::Result<String> {
self.dependency_manager
.to_owned()
.ok_or_else(|| anyhow::anyhow!("The dependency manager is unset"))
.and_then(|manager| {
Dependency::compile(
manager,
name,
self.optimizer.settings().to_owned(),
self.include_metadata_hash,
self.debug_config.clone(),
self.llvm_arguments,
self.memory_config,
)
})
}
/// Gets a full contract_path from the dependency manager.
pub fn resolve_path(&self, identifier: &str) -> anyhow::Result<String> {
self.dependency_manager
.to_owned()
.ok_or_else(|| anyhow::anyhow!("The dependency manager is unset"))
.and_then(|manager| {
let full_path = manager.resolve_path(identifier)?;
Ok(full_path)
})
}
/// Gets a deployed library address from the dependency manager.
pub fn resolve_library(&self, path: &str) -> anyhow::Result<inkwell::values::IntValue<'ctx>> {
self.dependency_manager
.to_owned()
.ok_or_else(|| anyhow::anyhow!("The dependency manager is unset"))
.and_then(|manager| {
let address = manager.resolve_library(path)?;
let address = self.word_const_str_hex(address.as_str());
Ok(address)
})
}
/// Extracts the dependency manager.
pub fn take_dependency_manager(&mut self) -> D {
self.dependency_manager
.take()
.expect("The dependency manager is unset")
}
/// Returns the debug info.
pub fn debug_info(&self) -> Option<&DebugInfo<'ctx>> {
self.debug_info.as_ref()
@@ -808,9 +722,9 @@ where
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
match pointer.address_space {
AddressSpace::Heap => {
let name = <PolkaVMLoadHeapWordFunction as RuntimeFunction<D>>::NAME;
let name = <PolkaVMLoadHeapWordFunction as RuntimeFunction>::NAME;
let declaration =
<PolkaVMLoadHeapWordFunction as RuntimeFunction<D>>::declaration(self);
<PolkaVMLoadHeapWordFunction as RuntimeFunction>::declaration(self);
let arguments = [self
.builder()
.build_ptr_to_int(pointer.value, self.xlen_type(), "offset_ptrtoint")?
@@ -846,7 +760,7 @@ where
match pointer.address_space {
AddressSpace::Heap => {
let declaration =
<PolkaVMStoreHeapWordFunction as RuntimeFunction<D>>::declaration(self);
<PolkaVMStoreHeapWordFunction as RuntimeFunction>::declaration(self);
let arguments = [
pointer.to_int(self).as_basic_value_enum(),
value.as_basic_value_enum(),
@@ -966,10 +880,7 @@ where
pub fn build_runtime_call_to_getter(
&self,
import: &'static str,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let pointer = self.build_alloca_at_entry(self.word_type(), &format!("{import}_output"));
self.build_runtime_call(import, &[pointer.to_int(self).into()]);
self.build_load(pointer, import)
@@ -1064,7 +975,7 @@ where
length: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<()> {
self.build_call(
<Exit as RuntimeFunction<D>>::declaration(self),
<Exit as RuntimeFunction>::declaration(self),
&[flags.into(), offset.into(), length.into()],
"exit",
);
@@ -1088,14 +999,14 @@ where
Ok(self
.build_call(
<WordToPointer as RuntimeFunction<D>>::declaration(self),
<WordToPointer as RuntimeFunction>::declaration(self),
&[value.into()],
"word_to_pointer",
)
.unwrap_or_else(|| {
panic!(
"revive runtime function {} should return a value",
<WordToPointer as RuntimeFunction<D>>::NAME,
<WordToPointer as RuntimeFunction>::NAME,
)
})
.into_int_value())
@@ -1111,7 +1022,7 @@ where
size: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::PointerValue<'ctx>> {
let call_site_value = self.builder().build_call(
<PolkaVMSbrkFunction as RuntimeFunction<D>>::declaration(self).function_value(),
<PolkaVMSbrkFunction as RuntimeFunction>::declaration(self).function_value(),
&[offset.into(), size.into()],
"alloc_start",
)?;
@@ -1133,7 +1044,7 @@ where
.unwrap_or_else(|| {
panic!(
"revive runtime function {} should return a value",
<PolkaVMSbrkFunction as RuntimeFunction<D>>::NAME,
<PolkaVMSbrkFunction as RuntimeFunction>::NAME,
)
})
.into_pointer_value())
@@ -1433,19 +1344,8 @@ where
/// Returns the Yul data reference.
/// # Panics
/// If the Yul data has not been initialized.
pub fn yul(&self) -> &YulData {
self.yul_data
.as_ref()
.expect("The Yul data must have been initialized")
}
/// Returns the Yul data mutable reference.
/// # Panics
/// If the Yul data has not been initialized.
pub fn yul_mut(&mut self) -> &mut YulData {
self.yul_data
.as_mut()
.expect("The Yul data must have been initialized")
pub fn yul(&self) -> Option<&YulData> {
self.yul_data.as_ref()
}
/// Returns the current number of immutables values in the contract.
@@ -2,21 +2,20 @@
use inkwell::values::BasicValueEnum;
use revive_common::BYTE_LENGTH_BYTE;
use revive_common::BYTE_LENGTH_WORD;
use crate::polkavm::context::runtime::RuntimeFunction;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::polkavm::WriteLLVM;
/// Load a word size value from a heap pointer.
pub struct LoadWord;
impl<D> RuntimeFunction<D> for LoadWord
where
D: Dependency + Clone,
{
impl RuntimeFunction for LoadWord {
const NAME: &'static str = "__revive_load_heap_word";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context
.word_type()
.fn_type(&[context.xlen_type().into()], false)
@@ -24,12 +23,12 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<BasicValueEnum<'ctx>>> {
let offset = Self::paramater(context, 0).into_int_value();
let length = context
.xlen_type()
.const_int(revive_common::BYTE_LENGTH_WORD as u64, false);
.const_int(BYTE_LENGTH_WORD as u64, false);
let pointer = context.build_heap_gep(offset, length)?;
let value = context
.builder()
@@ -38,7 +37,7 @@ where
.basic_block()
.get_last_instruction()
.expect("Always exists")
.set_alignment(revive_common::BYTE_LENGTH_BYTE as u32)
.set_alignment(BYTE_LENGTH_BYTE as u32)
.expect("Alignment is valid");
let swapped_value = context.build_byte_swap(value)?;
@@ -46,29 +45,23 @@ where
}
}
impl<D> WriteLLVM<D> for LoadWord
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for LoadWord {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
/// Store a word size value through a heap pointer.
pub struct StoreWord;
impl<D> RuntimeFunction<D> for StoreWord
where
D: Dependency + Clone,
{
impl RuntimeFunction for StoreWord {
const NAME: &'static str = "__revive_store_heap_word";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context.void_type().fn_type(
&[context.xlen_type().into(), context.word_type().into()],
false,
@@ -77,12 +70,12 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<BasicValueEnum<'ctx>>> {
let offset = Self::paramater(context, 0).into_int_value();
let length = context
.xlen_type()
.const_int(revive_common::BYTE_LENGTH_WORD as u64, false);
.const_int(BYTE_LENGTH_WORD as u64, false);
let pointer = context.build_heap_gep(offset, length)?;
let value = context.build_byte_swap(Self::paramater(context, 1))?;
@@ -90,21 +83,18 @@ where
context
.builder()
.build_store(pointer.value, value)?
.set_alignment(revive_common::BYTE_LENGTH_BYTE as u32)
.set_alignment(BYTE_LENGTH_BYTE as u32)
.expect("Alignment is valid");
Ok(None)
}
}
impl<D> WriteLLVM<D> for StoreWord
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for StoreWord {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
@@ -5,7 +5,6 @@ use inkwell::types::BasicType;
use crate::polkavm::context::address_space::AddressSpace;
use crate::polkavm::context::global::Global;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
pub mod heap;
pub mod storage;
@@ -39,13 +38,10 @@ impl<'ctx> Pointer<'ctx> {
}
/// Wraps a 256-bit primitive type pointer.
pub fn new_stack_field<D>(
context: &Context<'ctx, D>,
pub fn new_stack_field(
context: &Context<'ctx>,
value: inkwell::values::PointerValue<'ctx>,
) -> Self
where
D: Dependency + Clone,
{
) -> Self {
Self {
r#type: context.word_type().as_basic_type_enum(),
address_space: AddressSpace::Stack,
@@ -54,15 +50,14 @@ impl<'ctx> Pointer<'ctx> {
}
/// Creates a new pointer with the specified `offset`.
pub fn new_with_offset<D, T>(
context: &Context<'ctx, D>,
pub fn new_with_offset<T>(
context: &Context<'ctx>,
address_space: AddressSpace,
r#type: T,
offset: inkwell::values::IntValue<'ctx>,
name: &str,
) -> Self
where
D: Dependency + Clone,
T: BasicType<'ctx>,
{
assert_ne!(
@@ -92,25 +87,19 @@ impl<'ctx> Pointer<'ctx> {
}
/// Cast this pointer to a register sized integer value.
pub fn to_int<D>(&self, context: &Context<'ctx, D>) -> inkwell::values::IntValue<'ctx>
where
D: Dependency + Clone,
{
pub fn to_int(&self, context: &Context<'ctx>) -> inkwell::values::IntValue<'ctx> {
context
.builder()
.build_ptr_to_int(self.value, context.xlen_type(), "ptr_to_xlen")
.expect("we should be positioned")
}
pub fn address_space_cast<D>(
pub fn address_space_cast(
self,
context: &Context<'ctx, D>,
context: &Context<'ctx>,
address_space: AddressSpace,
name: &str,
) -> anyhow::Result<Self>
where
D: Dependency + Clone,
{
) -> anyhow::Result<Self> {
let value = context.builder().build_address_space_cast(
self.value,
context.llvm().ptr_type(address_space.into()),
@@ -4,19 +4,15 @@ use inkwell::values::BasicValueEnum;
use crate::polkavm::context::runtime::RuntimeFunction;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::polkavm::WriteLLVM;
/// Load a word size value from a storage pointer.
pub struct LoadWord;
impl<D> RuntimeFunction<D> for LoadWord
where
D: Dependency + Clone,
{
impl RuntimeFunction for LoadWord {
const NAME: &'static str = "__revive_load_storage_word";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context
.word_type()
.fn_type(&[context.llvm().ptr_type(Default::default()).into()], false)
@@ -24,7 +20,7 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<BasicValueEnum<'ctx>>> {
Ok(Some(emit_load(
context,
@@ -34,29 +30,23 @@ where
}
}
impl<D> WriteLLVM<D> for LoadWord
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for LoadWord {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
/// Load a word size value from a transient storage pointer.
pub struct LoadTransientWord;
impl<D> RuntimeFunction<D> for LoadTransientWord
where
D: Dependency + Clone,
{
impl RuntimeFunction for LoadTransientWord {
const NAME: &'static str = "__revive_load_transient_storage_word";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context
.word_type()
.fn_type(&[context.llvm().ptr_type(Default::default()).into()], false)
@@ -64,35 +54,29 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<BasicValueEnum<'ctx>>> {
Ok(Some(emit_load(context, Self::paramater(context, 0), true)?))
}
}
impl<D> WriteLLVM<D> for LoadTransientWord
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for LoadTransientWord {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
/// Store a word size value through a storage pointer.
pub struct StoreWord;
impl<D> RuntimeFunction<D> for StoreWord
where
D: Dependency + Clone,
{
impl RuntimeFunction for StoreWord {
const NAME: &'static str = "__revive_store_storage_word";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context.void_type().fn_type(
&[
context.llvm().ptr_type(Default::default()).into(),
@@ -104,7 +88,7 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<BasicValueEnum<'ctx>>> {
emit_store(
context,
@@ -117,29 +101,23 @@ where
}
}
impl<D> WriteLLVM<D> for StoreWord
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for StoreWord {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
/// Store a word size value through a transient storage pointer.
pub struct StoreTransientWord;
impl<D> RuntimeFunction<D> for StoreTransientWord
where
D: Dependency + Clone,
{
impl RuntimeFunction for StoreTransientWord {
const NAME: &'static str = "__revive_store_transient_storage_word";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context.void_type().fn_type(
&[
context.llvm().ptr_type(Default::default()).into(),
@@ -151,7 +129,7 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<BasicValueEnum<'ctx>>> {
emit_store(
context,
@@ -164,21 +142,18 @@ where
}
}
impl<D> WriteLLVM<D> for StoreTransientWord
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for StoreTransientWord {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
fn emit_load<'ctx, D: Dependency + Clone>(
context: &mut Context<'ctx, D>,
fn emit_load<'ctx>(
context: &mut Context<'ctx>,
key: BasicValueEnum<'ctx>,
transient: bool,
) -> anyhow::Result<BasicValueEnum<'ctx>> {
@@ -229,8 +204,8 @@ fn emit_load<'ctx, D: Dependency + Clone>(
})
}
fn emit_store<'ctx, D: Dependency + Clone>(
context: &mut Context<'ctx, D>,
fn emit_store<'ctx>(
context: &mut Context<'ctx>,
key: BasicValueEnum<'ctx>,
value: BasicValueEnum<'ctx>,
transient: bool,
@@ -8,14 +8,10 @@ use crate::polkavm::context::function::declaration::Declaration;
use crate::polkavm::context::function::Function;
use crate::polkavm::context::Attribute;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// The revive runtime function interface simplifies declaring runtime functions
/// and code emitting by providing helpful default implementations.
pub trait RuntimeFunction<D>
where
D: Dependency + Clone,
{
pub trait RuntimeFunction {
/// The function name.
const NAME: &'static str;
@@ -26,10 +22,10 @@ where
];
/// The function type.
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx>;
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx>;
/// Declare the function.
fn declare(&self, context: &mut Context<D>) -> anyhow::Result<()> {
fn declare(&self, context: &mut Context) -> anyhow::Result<()> {
let function = context.add_function(
Self::NAME,
Self::r#type(context),
@@ -54,7 +50,7 @@ where
}
/// Get the function declaration.
fn declaration<'ctx>(context: &Context<'ctx, D>) -> Declaration<'ctx> {
fn declaration<'ctx>(context: &Context<'ctx>) -> Declaration<'ctx> {
context
.get_function(Self::NAME)
.unwrap_or_else(|| panic!("runtime function {} should be declared", Self::NAME))
@@ -63,7 +59,7 @@ where
}
/// Emit the function.
fn emit(&self, context: &mut Context<D>) -> anyhow::Result<()> {
fn emit(&self, context: &mut Context) -> anyhow::Result<()> {
context.set_current_function(Self::NAME, None)?;
context.set_basic_block(context.current_function().borrow().entry_block());
@@ -78,13 +74,13 @@ where
/// Emit the function body.
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<inkwell::values::BasicValueEnum<'ctx>>>;
/// Emit the function return instructions.
fn emit_epilogue<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
return_value: Option<inkwell::values::BasicValueEnum<'ctx>>,
) {
let return_block = context.current_function().borrow().return_block();
@@ -98,7 +94,7 @@ where
/// Get the nth function paramater.
fn paramater<'ctx>(
context: &Context<'ctx, D>,
context: &Context<'ctx>,
index: usize,
) -> inkwell::values::BasicValueEnum<'ctx> {
let name = Self::NAME;
@@ -2,6 +2,8 @@
use std::collections::BTreeMap;
use revive_common::BYTE_LENGTH_WORD;
/// The LLVM IR generator Solidity data.
/// Describes some data that is only relevant to Solidity.
#[derive(Debug, Default)]
@@ -19,14 +21,14 @@ impl SolidityData {
/// Returns the current size of immutable values in the contract.
pub fn immutables_size(&self) -> usize {
self.immutables.len() * revive_common::BYTE_LENGTH_WORD
self.immutables.len() * BYTE_LENGTH_WORD
}
/// Allocates memory for an immutable value in the auxiliary heap.
/// If the identifier is already known, just returns its offset.
pub fn allocate_immutable(&mut self, identifier: &str) -> usize {
let number_of_elements = self.immutables.len();
let new_offset = number_of_elements * revive_common::BYTE_LENGTH_WORD;
let new_offset = number_of_elements * BYTE_LENGTH_WORD;
*self
.immutables
.entry(identifier.to_owned())
@@ -4,24 +4,21 @@ use crate::optimizer::settings::Settings as OptimizerSettings;
use crate::optimizer::Optimizer;
use crate::polkavm::context::attribute::Attribute;
use crate::polkavm::context::Context;
use crate::polkavm::DummyDependency;
use crate::PolkaVMTarget;
pub fn create_context(
llvm: &inkwell::context::Context,
optimizer_settings: OptimizerSettings,
) -> Context<'_, DummyDependency> {
crate::initialize_llvm(crate::Target::PVM, "resolc", Default::default());
) -> Context<'_> {
crate::initialize_llvm(PolkaVMTarget::PVM, "resolc", Default::default());
let module = llvm.create_module("test");
let optimizer = Optimizer::new(optimizer_settings);
Context::<DummyDependency>::new(
Context::new(
llvm,
module,
optimizer,
None,
true,
Default::default(),
Default::default(),
Default::default(),
)
@@ -2,60 +2,25 @@
use std::collections::BTreeMap;
use num::Zero;
/// The LLVM IR generator Yul data.
/// Describes some data that is only relevant to Yul.
///
/// Contains data that is only relevant to Yul.
#[derive(Debug, Default)]
pub struct YulData {
/// The list of constant arrays in the code section.
/// It is a temporary storage used until the finalization method is called.
const_arrays: BTreeMap<u8, Vec<num::BigUint>>,
/// Mapping from Yul object identifiers to full contract paths.
identifier_paths: BTreeMap<String, String>,
}
impl YulData {
/// Declares a temporary constant array representation.
pub fn const_array_declare(&mut self, index: u8, size: u16) -> anyhow::Result<()> {
if self.const_arrays.contains_key(&index) {
anyhow::bail!(
"The constant array with index {} is already declared",
index
);
}
self.const_arrays
.insert(index, vec![num::BigUint::zero(); size as usize]);
Ok(())
/// A shorthand constructor.
pub fn new(identifier_paths: BTreeMap<String, String>) -> Self {
Self { identifier_paths }
}
/// Sets a value in the constant array representation.
pub fn const_array_set(
&mut self,
index: u8,
offset: u16,
value: num::BigUint,
) -> anyhow::Result<()> {
let array = self.const_arrays.get_mut(&index).ok_or_else(|| {
anyhow::anyhow!("The constant array with index {} is not declared", index)
})?;
if offset >= array.len() as u16 {
anyhow::bail!(
"The constant array with index {} has size {} but the offset is {}",
index,
array.len(),
offset,
);
}
array[offset as usize] = value;
Ok(())
}
/// Finalizes the constant array declaration.
pub fn const_array_take(&mut self, index: u8) -> anyhow::Result<Vec<num::BigUint>> {
self.const_arrays.remove(&index).ok_or_else(|| {
anyhow::anyhow!("The constant array with index {} is not declared", index)
})
/// Resolves the full contract path by the Yul object identifier.
pub fn resolve_path(&self, identifier: &str) -> Option<&str> {
self.identifier_paths
.get(identifier)
.map(|path| path.as_str())
}
}
@@ -4,21 +4,17 @@ use inkwell::values::BasicValue;
use crate::polkavm::context::runtime::RuntimeFunction;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::PolkaVMDivisionFunction;
use crate::PolkaVMRemainderFunction;
use crate::PolkaVMSignedDivisionFunction;
use crate::PolkaVMSignedRemainderFunction;
/// Translates the arithmetic addition.
pub fn addition<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn addition<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context
.builder()
.build_int_add(operand_1, operand_2, "addition_result")?
@@ -26,14 +22,11 @@ where
}
/// Translates the arithmetic subtraction.
pub fn subtraction<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn subtraction<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context
.builder()
.build_int_sub(operand_1, operand_2, "subtraction_result")?
@@ -41,14 +34,11 @@ where
}
/// Translates the arithmetic multiplication.
pub fn multiplication<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn multiplication<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context
.builder()
.build_int_mul(operand_1, operand_2, "multiplication_result")?
@@ -56,32 +46,26 @@ where
}
/// Translates the arithmetic division.
pub fn division<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn division<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
let name = <PolkaVMDivisionFunction as RuntimeFunction<D>>::NAME;
let declaration = <PolkaVMDivisionFunction as RuntimeFunction<D>>::declaration(context);
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let name = <PolkaVMDivisionFunction as RuntimeFunction>::NAME;
let declaration = <PolkaVMDivisionFunction as RuntimeFunction>::declaration(context);
Ok(context
.build_call(declaration, &[operand_1.into(), operand_2.into()], "div")
.unwrap_or_else(|| panic!("revive runtime function {name} should return a value",)))
}
/// Translates the arithmetic remainder.
pub fn remainder<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn remainder<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
let name = <PolkaVMRemainderFunction as RuntimeFunction<D>>::NAME;
let declaration = <PolkaVMRemainderFunction as RuntimeFunction<D>>::declaration(context);
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let name = <PolkaVMRemainderFunction as RuntimeFunction>::NAME;
let declaration = <PolkaVMRemainderFunction as RuntimeFunction>::declaration(context);
Ok(context
.build_call(declaration, &[operand_1.into(), operand_2.into()], "rem")
.unwrap_or_else(|| panic!("revive runtime function {name} should return a value",)))
@@ -91,32 +75,26 @@ where
/// Two differences between the EVM and LLVM IR:
/// 1. In case of division by zero, 0 is returned.
/// 2. In case of overflow, the first argument is returned.
pub fn division_signed<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn division_signed<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
let name = <PolkaVMSignedDivisionFunction as RuntimeFunction<D>>::NAME;
let declaration = <PolkaVMSignedDivisionFunction as RuntimeFunction<D>>::declaration(context);
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let name = <PolkaVMSignedDivisionFunction as RuntimeFunction>::NAME;
let declaration = <PolkaVMSignedDivisionFunction as RuntimeFunction>::declaration(context);
Ok(context
.build_call(declaration, &[operand_1.into(), operand_2.into()], "sdiv")
.unwrap_or_else(|| panic!("revive runtime function {name} should return a value",)))
}
/// Translates the signed arithmetic remainder.
pub fn remainder_signed<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn remainder_signed<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
let name = <PolkaVMSignedRemainderFunction as RuntimeFunction<D>>::NAME;
let declaration = <PolkaVMSignedRemainderFunction as RuntimeFunction<D>>::declaration(context);
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let name = <PolkaVMSignedRemainderFunction as RuntimeFunction>::NAME;
let declaration = <PolkaVMSignedRemainderFunction as RuntimeFunction>::declaration(context);
Ok(context
.build_call(declaration, &[operand_1.into(), operand_2.into()], "srem")
.unwrap_or_else(|| panic!("revive runtime function {name} should return a value",)))
+32 -54
View File
@@ -2,18 +2,17 @@
use inkwell::values::BasicValue;
use revive_common::BIT_LENGTH_BYTE;
use revive_common::BIT_LENGTH_WORD;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// Translates the bitwise OR.
pub fn or<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn or<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context
.builder()
.build_or(operand_1, operand_2, "or_result")?
@@ -21,14 +20,11 @@ where
}
/// Translates the bitwise XOR.
pub fn xor<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn xor<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context
.builder()
.build_xor(operand_1, operand_2, "xor_result")?
@@ -36,14 +32,11 @@ where
}
/// Translates the bitwise AND.
pub fn and<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn and<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context
.builder()
.build_and(operand_1, operand_2, "and_result")?
@@ -51,14 +44,11 @@ where
}
/// Translates the bitwise shift left.
pub fn shift_left<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn shift_left<'ctx>(
context: &mut Context<'ctx>,
shift: inkwell::values::IntValue<'ctx>,
value: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let overflow_block = context.append_basic_block("shift_left_overflow");
let non_overflow_block = context.append_basic_block("shift_left_non_overflow");
let join_block = context.append_basic_block("shift_left_join");
@@ -66,7 +56,7 @@ where
let condition_is_overflow = context.builder().build_int_compare(
inkwell::IntPredicate::UGT,
shift,
context.word_const((revive_common::BIT_LENGTH_WORD - 1) as u64),
context.word_const((BIT_LENGTH_WORD - 1) as u64),
"shift_left_is_overflow",
)?;
context.build_conditional_branch(condition_is_overflow, overflow_block, non_overflow_block)?;
@@ -93,14 +83,11 @@ where
}
/// Translates the bitwise shift right.
pub fn shift_right<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn shift_right<'ctx>(
context: &mut Context<'ctx>,
shift: inkwell::values::IntValue<'ctx>,
value: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let overflow_block = context.append_basic_block("shift_right_overflow");
let non_overflow_block = context.append_basic_block("shift_right_non_overflow");
let join_block = context.append_basic_block("shift_right_join");
@@ -108,7 +95,7 @@ where
let condition_is_overflow = context.builder().build_int_compare(
inkwell::IntPredicate::UGT,
shift,
context.word_const((revive_common::BIT_LENGTH_WORD - 1) as u64),
context.word_const((BIT_LENGTH_WORD - 1) as u64),
"shift_right_is_overflow",
)?;
context.build_conditional_branch(condition_is_overflow, overflow_block, non_overflow_block)?;
@@ -137,14 +124,11 @@ where
}
/// Translates the arithmetic bitwise shift right.
pub fn shift_right_arithmetic<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn shift_right_arithmetic<'ctx>(
context: &mut Context<'ctx>,
shift: inkwell::values::IntValue<'ctx>,
value: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let overflow_block = context.append_basic_block("shift_right_arithmetic_overflow");
let overflow_positive_block =
context.append_basic_block("shift_right_arithmetic_overflow_positive");
@@ -156,7 +140,7 @@ where
let condition_is_overflow = context.builder().build_int_compare(
inkwell::IntPredicate::UGT,
shift,
context.word_const((revive_common::BIT_LENGTH_WORD - 1) as u64),
context.word_const((BIT_LENGTH_WORD - 1) as u64),
"shift_right_arithmetic_is_overflow",
)?;
context.build_conditional_branch(condition_is_overflow, overflow_block, non_overflow_block)?;
@@ -164,7 +148,7 @@ where
context.set_basic_block(overflow_block);
let sign_bit = context.builder().build_right_shift(
value,
context.word_const((revive_common::BIT_LENGTH_WORD - 1) as u64),
context.word_const((BIT_LENGTH_WORD - 1) as u64),
false,
"shift_right_arithmetic_sign_bit",
)?;
@@ -217,14 +201,11 @@ where
/// Because this opcode returns zero on overflows, the index `operand_1`
/// is checked for overflow. On overflow, the mask will be all zeros,
/// resulting in a branchless implementation.
pub fn byte<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn byte<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
const MAX_INDEX_BYTES: u64 = 31;
let is_overflow_bit = context.builder().build_int_compare(
@@ -254,16 +235,13 @@ where
.build_int_truncate(operand_1, context.byte_type(), "index_truncated")?;
let index_in_bits = context.builder().build_int_mul(
index_truncated,
context
.byte_type()
.const_int(revive_common::BIT_LENGTH_BYTE as u64, false),
context.byte_type().const_int(BIT_LENGTH_BYTE as u64, false),
"index_in_bits",
)?;
let index_from_most_significant_bit = context.builder().build_int_sub(
context.byte_type().const_int(
MAX_INDEX_BYTES * revive_common::BIT_LENGTH_BYTE as u64,
false,
),
context
.byte_type()
.const_int(MAX_INDEX_BYTES * BIT_LENGTH_BYTE as u64, false),
index_in_bits,
"index_from_msb",
)?;
+19 -37
View File
@@ -2,18 +2,15 @@
use inkwell::values::BasicValue;
use crate::polkavm::context::argument::Argument;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
const STATIC_CALL_FLAG: u32 = 0b0001_0000;
const REENTRANT_CALL_FLAG: u32 = 0b0000_1000;
const SOLIDITY_TRANSFER_GAS_STIPEND_THRESHOLD: u64 = 2300;
/// Translates a contract call.
#[allow(clippy::too_many_arguments)]
pub fn call<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn call<'ctx>(
context: &mut Context<'ctx>,
gas: inkwell::values::IntValue<'ctx>,
address: inkwell::values::IntValue<'ctx>,
value: Option<inkwell::values::IntValue<'ctx>>,
@@ -23,10 +20,7 @@ pub fn call<'ctx, D>(
output_length: inkwell::values::IntValue<'ctx>,
_constants: Vec<Option<num::BigUint>>,
static_call: bool,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let address_pointer = context.build_address_argument_store(address)?;
let value = value.unwrap_or_else(|| context.word_const(0));
@@ -115,9 +109,8 @@ where
.as_basic_value_enum())
}
#[allow(clippy::too_many_arguments)]
pub fn delegate_call<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn delegate_call<'ctx>(
context: &mut Context<'ctx>,
_gas: inkwell::values::IntValue<'ctx>,
address: inkwell::values::IntValue<'ctx>,
input_offset: inkwell::values::IntValue<'ctx>,
@@ -125,10 +118,7 @@ pub fn delegate_call<'ctx, D>(
output_offset: inkwell::values::IntValue<'ctx>,
output_length: inkwell::values::IntValue<'ctx>,
_constants: Vec<Option<num::BigUint>>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let address_pointer = context.build_address_argument_store(address)?;
let input_offset = context.safe_truncate_int_to_xlen(input_offset)?;
@@ -199,21 +189,16 @@ where
}
/// Translates the Yul `linkersymbol` instruction.
pub fn linker_symbol<'ctx, D>(
context: &mut Context<'ctx, D>,
mut arguments: [Argument<'ctx>; 1],
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
let path = arguments[0]
.original
.take()
.ok_or_else(|| anyhow::anyhow!("Linker symbol literal is missing"))?;
Ok(context
.resolve_library(path.as_str())?
.as_basic_value_enum())
pub fn linker_symbol<'ctx>(
context: &mut Context<'ctx>,
path: &str,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
context.declare_global(
path,
context.integer_type(revive_common::BIT_LENGTH_ETH_ADDRESS),
Default::default(),
);
context.build_load_address(context.get_global(path)?.into())
}
/// The Solidity `address.transfer` and `address.send` call detection heuristic.
@@ -236,18 +221,15 @@ where
///
/// # Returns
/// The call flags xlen `IntValue` and the deposit limit word `IntValue`.
fn call_reentrancy_heuristic<'ctx, D>(
context: &mut Context<'ctx, D>,
fn call_reentrancy_heuristic<'ctx>(
context: &mut Context<'ctx>,
gas: inkwell::values::IntValue<'ctx>,
input_length: inkwell::values::IntValue<'ctx>,
output_length: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<(
inkwell::values::IntValue<'ctx>,
inkwell::values::IntValue<'ctx>,
)>
where
D: Dependency + Clone,
{
)> {
// Branch-free SSA implementation: First derive the heuristic boolean (int1) value.
let input_length_or_output_length =
context
@@ -1,16 +1,12 @@
//! Translates the calldata instructions.
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// Translates the calldata load.
pub fn load<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn load<'ctx>(
context: &mut Context<'ctx>,
offset: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let output_pointer = context.build_alloca_at_entry(context.word_type(), "call_data_output");
let offset = context.safe_truncate_int_to_xlen(offset)?;
@@ -23,12 +19,9 @@ where
}
/// Translates the calldata size.
pub fn size<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn size<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let value = context.get_global_value(crate::polkavm::GLOBAL_CALLDATA_SIZE)?;
Ok(context
.builder()
@@ -41,15 +34,12 @@ where
}
/// Translates the calldata copy.
pub fn copy<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn copy<'ctx>(
context: &mut Context<'ctx>,
destination_offset: inkwell::values::IntValue<'ctx>,
source_offset: inkwell::values::IntValue<'ctx>,
size: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
) -> anyhow::Result<()> {
let source_offset = context.safe_truncate_int_to_xlen(source_offset)?;
let size = context.safe_truncate_int_to_xlen(size)?;
let destination_offset = context.safe_truncate_int_to_xlen(destination_offset)?;
@@ -3,19 +3,15 @@
use inkwell::values::BasicValue;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// Translates the comparison operations.
/// There is not difference between the EVM and LLVM IR behaviors.
pub fn compare<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn compare<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
operation: inkwell::IntPredicate,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let result = context.builder().build_int_compare(
operation,
operand_1,
+39 -74
View File
@@ -2,17 +2,15 @@
use inkwell::values::BasicValue;
use revive_common::BIT_LENGTH_ETH_ADDRESS;
use crate::polkavm::context::pointer::Pointer;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// Translates the `gas_limit` instruction.
pub fn gas_limit<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn gas_limit<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let gas_limit_value = context
.build_runtime_call(revive_runtime_api::polkavm_imports::GAS_LIMIT, &[])
.expect("the gas_limit syscall method should return a value")
@@ -25,12 +23,9 @@ where
}
/// Translates the `gas_price` instruction.
pub fn gas_price<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn gas_price<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let gas_price_value = context
.build_runtime_call(revive_runtime_api::polkavm_imports::GAS_PRICE, &[])
.expect("the gas_price syscall method should return a value")
@@ -43,13 +38,10 @@ where
}
/// Translates the `tx.origin` instruction.
pub fn origin<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
let address_type = context.integer_type(revive_common::BIT_LENGTH_ETH_ADDRESS);
pub fn origin<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let address_type = context.integer_type(BIT_LENGTH_ETH_ADDRESS);
let address_pointer: Pointer<'_> = context
.get_global(crate::polkavm::GLOBAL_ADDRESS_SPILL_BUFFER)?
.into();
@@ -62,43 +54,31 @@ where
}
/// Translates the `chain_id` instruction.
pub fn chain_id<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn chain_id<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
context.build_runtime_call_to_getter(revive_runtime_api::polkavm_imports::CHAIN_ID)
}
/// Translates the `block_number` instruction.
pub fn block_number<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn block_number<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
context.build_runtime_call_to_getter(revive_runtime_api::polkavm_imports::BLOCK_NUMBER)
}
/// Translates the `block_timestamp` instruction.
pub fn block_timestamp<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn block_timestamp<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
context.build_runtime_call_to_getter(revive_runtime_api::polkavm_imports::NOW)
}
/// Translates the `block_hash` instruction.
pub fn block_hash<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn block_hash<'ctx>(
context: &mut Context<'ctx>,
index: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let output_pointer = context.build_alloca_at_entry(context.word_type(), "blockhash_out_ptr");
let index_pointer = context.build_alloca_at_entry(context.word_type(), "blockhash_index_ptr");
context.build_store(index_pointer, index)?;
@@ -114,22 +94,16 @@ where
}
/// Translates the `difficulty` instruction.
pub fn difficulty<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn difficulty<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context.word_const(2500000000000000).as_basic_value_enum())
}
/// Translates the `coinbase` instruction.
pub fn coinbase<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn coinbase<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let pointer: Pointer<'_> = context
.get_global(crate::polkavm::GLOBAL_ADDRESS_SPILL_BUFFER)?
.into();
@@ -141,22 +115,16 @@ where
}
/// Translates the `basefee` instruction.
pub fn basefee<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn basefee<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
context.build_runtime_call_to_getter(revive_runtime_api::polkavm_imports::BASE_FEE)
}
/// Translates the `address` instruction.
pub fn address<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn address<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let pointer: Pointer<'_> = context
.get_global(crate::polkavm::GLOBAL_ADDRESS_SPILL_BUFFER)?
.into();
@@ -168,12 +136,9 @@ where
}
/// Translates the `caller` instruction.
pub fn caller<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn caller<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let pointer: Pointer<'_> = context
.get_global(crate::polkavm::GLOBAL_ADDRESS_SPILL_BUFFER)?
.into();
+58 -54
View File
@@ -3,24 +3,22 @@
use inkwell::values::BasicValue;
use num::Zero;
use revive_common::BIT_LENGTH_ETH_ADDRESS;
use crate::polkavm::context::argument::Argument;
use crate::polkavm::context::code_type::CodeType;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// Translates the contract `create` and `create2` instruction.
///
/// A `salt` value of `None` is equivalent to `create1`.
pub fn create<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn create<'ctx>(
context: &mut Context<'ctx>,
value: inkwell::values::IntValue<'ctx>,
input_offset: inkwell::values::IntValue<'ctx>,
input_length: inkwell::values::IntValue<'ctx>,
salt: Option<inkwell::values::IntValue<'ctx>>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let input_offset = context.safe_truncate_int_to_xlen(input_offset)?;
let input_length = context.safe_truncate_int_to_xlen(input_length)?;
@@ -40,7 +38,7 @@ where
};
let address_pointer = context.build_alloca_at_entry(
context.integer_type(revive_common::BIT_LENGTH_ETH_ADDRESS),
context.integer_type(BIT_LENGTH_ETH_ADDRESS),
"address_pointer",
);
context.build_store(address_pointer, context.word_const(0))?;
@@ -96,77 +94,83 @@ where
/// Translates the contract hash instruction, which is actually used to set the hash of the contract
/// being created, or other related auxiliary data.
/// Represents `dataoffset` in Yul and `PUSH [$]` in the EVM legacy assembly.
pub fn contract_hash<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn contract_hash<'ctx>(
context: &mut Context<'ctx>,
identifier: String,
) -> anyhow::Result<Argument<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<Argument<'ctx>> {
let code_type = context
.code_type()
.ok_or_else(|| anyhow::anyhow!("The contract code part type is undefined"))?;
let parent = context.module().get_name().to_str().expect("Always valid");
let contract_path =
context
.resolve_path(identifier.as_str())
.map_err(|error| match code_type {
CodeType::Runtime if identifier.ends_with("_deployed") => {
anyhow::anyhow!("type({}).runtimeCode is not supported", identifier)
}
_ => error,
})?;
if contract_path.as_str() == parent {
return Ok(Argument::value(context.word_const(0).as_basic_value_enum())
.with_constant(num::BigUint::zero()));
} else if identifier.ends_with("_deployed") && code_type == CodeType::Runtime {
anyhow::bail!("type({}).runtimeCode is not supported", identifier);
let full_path = match context.yul() {
Some(yul_data) => yul_data
.resolve_path(
identifier
.strip_suffix("_deployed")
.unwrap_or(identifier.as_str()),
)
.expect("Always exists")
.to_owned(),
None => identifier.clone(),
};
match code_type {
CodeType::Deploy if full_path == parent => {
return Ok(Argument::value(context.word_const(0).as_basic_value_enum())
.with_constant(num::BigUint::zero()));
}
CodeType::Runtime if context.yul().is_some() && identifier.ends_with("_deployed") => {
anyhow::bail!("type({identifier}).runtimeCode is not supported");
}
_ => {}
}
let hash_string = context.compile_dependency(identifier.as_str())?;
let hash_value = context
.word_const_str_hex(hash_string.as_str())
.as_basic_value_enum();
Ok(Argument::value(hash_value).with_original(hash_string))
context.declare_global(&full_path, context.word_type(), Default::default());
context
.build_load(context.get_global(&full_path)?.into(), &full_path)
.map(Argument::value)
}
/// Translates the deploy call header size instruction. the header consists of
/// the hash of the bytecode of the contract whose instance is being created.
/// Represents `datasize` in Yul and `PUSH #[$]` in the EVM legacy assembly.
pub fn header_size<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn header_size<'ctx>(
context: &mut Context<'ctx>,
identifier: String,
) -> anyhow::Result<Argument<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<Argument<'ctx>> {
let code_type = context
.code_type()
.ok_or_else(|| anyhow::anyhow!("The contract code part type is undefined"))?;
let parent = context.module().get_name().to_str().expect("Always valid");
let contract_path =
context
.resolve_path(identifier.as_str())
.map_err(|error| match code_type {
CodeType::Runtime if identifier.ends_with("_deployed") => {
anyhow::anyhow!("type({}).runtimeCode is not supported", identifier)
}
_ => error,
})?;
if contract_path.as_str() == parent {
return Ok(Argument::value(context.word_const(0).as_basic_value_enum())
.with_constant(num::BigUint::zero()));
} else if identifier.ends_with("_deployed") && code_type == CodeType::Runtime {
anyhow::bail!("type({}).runtimeCode is not supported", identifier);
let full_path = match context.yul() {
Some(yul_data) => yul_data
.resolve_path(
identifier
.strip_suffix("_deployed")
.unwrap_or(identifier.as_str()),
)
.unwrap_or_else(|| panic!("ICE: {identifier} not found {yul_data:?}")),
None => identifier.as_str(),
};
match code_type {
CodeType::Deploy if full_path == parent => {
return Ok(Argument::value(context.word_const(0).as_basic_value_enum())
.with_constant(num::BigUint::zero()));
}
CodeType::Runtime if context.yul().is_some() && identifier.ends_with("_deployed") => {
anyhow::bail!("type({identifier}).runtimeCode is not supported");
}
_ => {}
}
let size_bigint = num::BigUint::from(crate::polkavm::DEPLOYER_CALL_HEADER_SIZE);
let size_value = context
.word_const(crate::polkavm::DEPLOYER_CALL_HEADER_SIZE as u64)
.as_basic_value_enum();
let size_bigint = num::BigUint::from(crate::polkavm::DEPLOYER_CALL_HEADER_SIZE);
Ok(Argument::value(size_value).with_constant(size_bigint))
}
@@ -1,17 +1,13 @@
//! Translates the cryptographic operations.
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// Translates the `sha3` instruction.
pub fn sha3<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn sha3<'ctx>(
context: &mut Context<'ctx>,
offset: inkwell::values::IntValue<'ctx>,
length: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let offset_casted = context.safe_truncate_int_to_xlen(offset)?;
let length_casted = context.safe_truncate_int_to_xlen(length)?;
let input_pointer = context.build_heap_gep(offset_casted, length_casted)?;
@@ -1,15 +1,11 @@
//! Translates the value and balance operations.
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// Translates the `gas` instruction.
pub fn gas<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn gas<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let ref_time_left_value = context
.build_runtime_call(revive_runtime_api::polkavm_imports::REF_TIME_LEFT, &[])
.expect("the ref_time_left syscall method should return a value")
@@ -22,12 +18,9 @@ where
}
/// Translates the `value` instruction.
pub fn value<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn value<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let output_pointer = context.build_alloca_at_entry(context.value_type(), "value_transferred");
context.build_store(output_pointer, context.word_const(0))?;
context.build_runtime_call(
@@ -38,13 +31,10 @@ where
}
/// Translates the `balance` instructions.
pub fn balance<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn balance<'ctx>(
context: &mut Context<'ctx>,
address: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let address_pointer = context.build_address_argument_store(address)?;
let balance_pointer = context.build_alloca_at_entry(context.word_type(), "balance_pointer");
let balance = context.builder().build_ptr_to_int(
@@ -62,12 +52,9 @@ where
}
/// Translates the `selfbalance` instructions.
pub fn self_balance<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn self_balance<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let balance_pointer = context.build_alloca_at_entry(context.word_type(), "balance_pointer");
let balance = context.builder().build_ptr_to_int(
balance_pointer.value,
+35 -56
View File
@@ -1,19 +1,16 @@
//! Translates a log or event call.
use inkwell::values::BasicValue;
use revive_common::BYTE_LENGTH_WORD;
use crate::polkavm::context::runtime::RuntimeFunction;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::polkavm::WriteLLVM;
/// A function for emitting EVM event logs from contract code.
pub struct EventLog<const N: usize>;
impl<D, const N: usize> RuntimeFunction<D> for EventLog<N>
where
D: Dependency + Clone,
{
impl<const N: usize> RuntimeFunction for EventLog<N> {
const NAME: &'static str = match N {
0 => "__revive_log_0",
1 => "__revive_log_1",
@@ -23,7 +20,7 @@ where
_ => unreachable!(),
};
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
let mut parameter_types = vec![context.xlen_type().into(), context.xlen_type().into()];
parameter_types.extend_from_slice(&[context.word_type().into(); N]);
context.void_type().fn_type(&parameter_types, false)
@@ -31,7 +28,7 @@ where
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<inkwell::values::BasicValueEnum<'ctx>>> {
let input_offset = Self::paramater(context, 0).into_int_value();
let input_length = Self::paramater(context, 1).into_int_value();
@@ -49,7 +46,7 @@ where
input_length.as_basic_value_enum(),
]
} else {
let topics_buffer_size = N * revive_common::BYTE_LENGTH_WORD;
let topics_buffer_size = N * BYTE_LENGTH_WORD;
let topics_buffer_pointer = context.build_alloca_at_entry(
context.byte_type().array_type(topics_buffer_size as u32),
"topics_buffer",
@@ -59,7 +56,7 @@ where
let topic = Self::paramater(context, n + 2);
let topic_buffer_offset = context
.xlen_type()
.const_int((n * revive_common::BYTE_LENGTH_WORD) as u64, false);
.const_int((n * BYTE_LENGTH_WORD) as u64, false);
context.build_store(
context.build_gep(
topics_buffer_pointer,
@@ -98,82 +95,64 @@ where
}
}
impl<D> WriteLLVM<D> for EventLog<0>
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for EventLog<0> {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
impl<D> WriteLLVM<D> for EventLog<1>
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for EventLog<1> {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
impl<D> WriteLLVM<D> for EventLog<2>
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for EventLog<2> {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
impl<D> WriteLLVM<D> for EventLog<3>
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for EventLog<3> {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
impl<D> WriteLLVM<D> for EventLog<4>
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for EventLog<4> {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
/// Translates a log or event call.
pub fn log<'ctx, D, const N: usize>(
context: &mut Context<'ctx, D>,
pub fn log<'ctx, const N: usize>(
context: &mut Context<'ctx>,
input_offset: inkwell::values::IntValue<'ctx>,
input_length: inkwell::values::IntValue<'ctx>,
topics: [inkwell::values::BasicValueEnum<'ctx>; N],
) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
let declaration = <EventLog<N> as RuntimeFunction<D>>::declaration(context);
) -> anyhow::Result<()> {
let declaration = <EventLog<N> as RuntimeFunction>::declaration(context);
let mut arguments = vec![
context.safe_truncate_int_to_xlen(input_offset)?.into(),
context.safe_truncate_int_to_xlen(input_length)?.into(),
@@ -1,17 +1,15 @@
//! Translates the external code operations.
use revive_common::BIT_LENGTH_ETH_ADDRESS;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// Translates the `extcodesize` instruction if `address` is `Some`.
/// Otherwise, translates the `codesize` instruction.
pub fn size<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn size<'ctx>(
context: &mut Context<'ctx>,
address: Option<inkwell::values::IntValue<'ctx>>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let address = match address {
Some(address) => address,
None => super::context::address(context)?.into_int_value(),
@@ -33,14 +31,11 @@ where
}
/// Translates the `extcodehash` instruction.
pub fn hash<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn hash<'ctx>(
context: &mut Context<'ctx>,
address: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
let address_type = context.integer_type(revive_common::BIT_LENGTH_ETH_ADDRESS);
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let address_type = context.integer_type(BIT_LENGTH_ETH_ADDRESS);
let address_pointer = context.build_alloca_at_entry(address_type, "address_pointer");
let address_truncated =
context
@@ -7,7 +7,6 @@ use crate::polkavm::context::code_type::CodeType;
use crate::polkavm::context::pointer::Pointer;
use crate::polkavm::context::runtime::RuntimeFunction;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::polkavm::WriteLLVM;
/// A function for requesting the immutable data from the runtime.
@@ -20,19 +19,16 @@ use crate::polkavm::WriteLLVM;
/// However, this is a one time assertion, hence worth it.
pub struct Load;
impl<D> RuntimeFunction<D> for Load
where
D: Dependency + Clone,
{
impl RuntimeFunction for Load {
const NAME: &'static str = "__revive_load_immutable_data";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context.void_type().fn_type(Default::default(), false)
}
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<inkwell::values::BasicValueEnum<'ctx>>> {
let immutable_data_size_pointer = context
.get_global(revive_runtime_api::immutable_data::GLOBAL_IMMUTABLE_DATA_SIZE)?
@@ -109,35 +105,29 @@ where
}
}
impl<D> WriteLLVM<D> for Load
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for Load {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
/// Store the immutable data from the constructor code.
pub struct Store;
impl<D> RuntimeFunction<D> for Store
where
D: Dependency + Clone,
{
impl RuntimeFunction for Store {
const NAME: &'static str = "__revive_store_immutable_data";
fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> {
fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> {
context.void_type().fn_type(Default::default(), false)
}
fn emit_body<'ctx>(
&self,
context: &mut Context<'ctx, D>,
context: &mut Context<'ctx>,
) -> anyhow::Result<Option<inkwell::values::BasicValueEnum<'ctx>>> {
let immutable_data_size_pointer = context
.get_global(revive_runtime_api::immutable_data::GLOBAL_IMMUTABLE_DATA_SIZE)?
@@ -192,16 +182,13 @@ where
}
}
impl<D> WriteLLVM<D> for Store
where
D: Dependency + Clone,
{
fn declare(&mut self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::declare(self, context)
impl WriteLLVM for Store {
fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::declare(self, context)
}
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()> {
<Self as RuntimeFunction<_>>::emit(&self, context)
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> {
<Self as RuntimeFunction>::emit(&self, context)
}
}
@@ -210,20 +197,17 @@ where
/// In deploy code the values are read from the stack.
///
/// In runtime code they are loaded lazily with the `get_immutable_data` syscall.
pub fn load<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn load<'ctx>(
context: &mut Context<'ctx>,
index: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
match context.code_type() {
None => {
anyhow::bail!("Immutables are not available if the contract part is undefined");
}
Some(CodeType::Deploy) => load_from_memory(context, index),
Some(CodeType::Runtime) => {
let name = <Load as RuntimeFunction<D>>::NAME;
let name = <Load as RuntimeFunction>::NAME;
context.build_call(
context
.get_function(name)
@@ -244,14 +228,11 @@ where
/// being prepared for storing them using the `set_immutable_data` syscall.
///
/// Ignored in the runtime code.
pub fn store<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn store<'ctx>(
context: &mut Context<'ctx>,
index: inkwell::values::IntValue<'ctx>,
value: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
) -> anyhow::Result<()> {
match context.code_type() {
None => {
anyhow::bail!("Immutables are not available if the contract part is undefined");
@@ -279,13 +260,10 @@ where
}
}
pub fn load_from_memory<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn load_from_memory<'ctx>(
context: &mut Context<'ctx>,
index: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let immutable_data_pointer = context
.get_global(revive_runtime_api::immutable_data::GLOBAL_IMMUTABLE_DATA_POINTER)?
.value
+12 -25
View File
@@ -3,18 +3,14 @@
use inkwell::values::BasicValue;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// Translates the `addmod` instruction.
pub fn add_mod<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn add_mod<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
modulo: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context
.build_call(
context.llvm_runtime().add_mod,
@@ -29,15 +25,12 @@ where
}
/// Translates the `mulmod` instruction.
pub fn mul_mod<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn mul_mod<'ctx>(
context: &mut Context<'ctx>,
operand_1: inkwell::values::IntValue<'ctx>,
operand_2: inkwell::values::IntValue<'ctx>,
modulo: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context
.build_call(
context.llvm_runtime().mul_mod,
@@ -52,14 +45,11 @@ where
}
/// Translates the `exp` instruction.
pub fn exponent<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn exponent<'ctx>(
context: &mut Context<'ctx>,
value: inkwell::values::IntValue<'ctx>,
exponent: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context
.build_call(
context.llvm_runtime().exp,
@@ -70,14 +60,11 @@ where
}
/// Translates the `signextend` instruction.
pub fn sign_extend<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn sign_extend<'ctx>(
context: &mut Context<'ctx>,
bytes: inkwell::values::IntValue<'ctx>,
value: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context
.build_call(
context.llvm_runtime().sign_extend,
+14 -26
View File
@@ -1,19 +1,16 @@
//! Translates the heap memory operations.
use inkwell::values::BasicValue;
use revive_common::BYTE_LENGTH_BYTE;
use crate::polkavm::context::address_space::AddressSpace;
use crate::polkavm::context::pointer::Pointer;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// Translates the `msize` instruction.
pub fn msize<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn msize<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
Ok(context
.builder()
.build_int_z_extend(
@@ -26,13 +23,10 @@ where
/// Translates the `mload` instruction.
/// Uses the main heap.
pub fn load<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn load<'ctx>(
context: &mut Context<'ctx>,
offset: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let pointer = Pointer::new_with_offset(
context,
AddressSpace::Heap,
@@ -45,14 +39,11 @@ where
/// Translates the `mstore` instruction.
/// Uses the main heap.
pub fn store<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn store<'ctx>(
context: &mut Context<'ctx>,
offset: inkwell::values::IntValue<'ctx>,
value: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
) -> anyhow::Result<()> {
let pointer = Pointer::new_with_offset(
context,
AddressSpace::Heap,
@@ -66,14 +57,11 @@ where
/// Translates the `mstore8` instruction.
/// Uses the main heap.
pub fn store_byte<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn store_byte<'ctx>(
context: &mut Context<'ctx>,
offset: inkwell::values::IntValue<'ctx>,
value: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
) -> anyhow::Result<()> {
let byte_type = context.byte_type();
let value = context
.builder()
@@ -92,7 +80,7 @@ where
context
.builder()
.build_store(pointer, value)?
.set_alignment(revive_common::BYTE_LENGTH_BYTE as u32)
.set_alignment(BYTE_LENGTH_BYTE as u32)
.expect("Alignment is valid");
Ok(())
}
+9 -22
View File
@@ -4,22 +4,18 @@ use crate::polkavm::context::code_type::CodeType;
use crate::polkavm::context::runtime::RuntimeFunction;
use crate::polkavm::context::Context;
use crate::polkavm::evm::immutable::Store;
use crate::polkavm::Dependency;
/// Translates the `return` instruction.
pub fn r#return<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn r#return<'ctx>(
context: &mut Context<'ctx>,
offset: inkwell::values::IntValue<'ctx>,
length: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
) -> anyhow::Result<()> {
match context.code_type() {
None => anyhow::bail!("Return is not available if the contract part is undefined"),
Some(CodeType::Deploy) => {
context.build_call(
<Store as RuntimeFunction<D>>::declaration(context),
<Store as RuntimeFunction>::declaration(context),
Default::default(),
"store_immutable_data",
);
@@ -35,14 +31,11 @@ where
}
/// Translates the `revert` instruction.
pub fn revert<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn revert<'ctx>(
context: &mut Context<'ctx>,
offset: inkwell::values::IntValue<'ctx>,
length: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
) -> anyhow::Result<()> {
context.build_exit(
context.integer_const(crate::polkavm::XLEN, 1),
offset,
@@ -52,19 +45,13 @@ where
/// Translates the `stop` instruction.
/// Is the same as `return(0, 0)`.
pub fn stop<D>(context: &mut Context<D>) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
pub fn stop(context: &mut Context) -> anyhow::Result<()> {
r#return(context, context.word_const(0), context.word_const(0))
}
/// Translates the `invalid` instruction.
/// Burns all gas using an out-of-bounds memory store, causing a panic.
pub fn invalid<D>(context: &mut Context<D>) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
pub fn invalid(context: &mut Context) -> anyhow::Result<()> {
crate::polkavm::evm::memory::store(
context,
context.word_type().const_all_ones(),
@@ -1,15 +1,11 @@
//! Translates the return data instructions.
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
/// Translates the return data size.
pub fn size<'ctx, D>(
context: &mut Context<'ctx, D>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
pub fn size<'ctx>(
context: &mut Context<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let return_data_size_value = context
.build_runtime_call(revive_runtime_api::polkavm_imports::RETURNDATASIZE, &[])
.expect("the return_data_size syscall method should return a value")
@@ -29,15 +25,12 @@ where
/// - Destination, offset or size exceed the VM register size (XLEN)
/// - `source_offset + size` overflows (in XLEN)
/// - `source_offset + size` is beyond `RETURNDATASIZE`
pub fn copy<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn copy<'ctx>(
context: &mut Context<'ctx>,
destination_offset: inkwell::values::IntValue<'ctx>,
source_offset: inkwell::values::IntValue<'ctx>,
size: inkwell::values::IntValue<'ctx>,
) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
) -> anyhow::Result<()> {
let source_offset = context.safe_truncate_int_to_xlen(source_offset)?;
let destination_offset = context.safe_truncate_int_to_xlen(destination_offset)?;
let size = context.safe_truncate_int_to_xlen(size)?;
+18 -31
View File
@@ -2,7 +2,6 @@
use crate::polkavm::context::runtime::RuntimeFunction;
use crate::polkavm::context::Context;
use crate::polkavm::Dependency;
use crate::PolkaVMArgument;
use crate::PolkaVMLoadStorageWordFunction;
use crate::PolkaVMLoadTransientStorageWordFunction;
@@ -10,15 +9,12 @@ use crate::PolkaVMStoreStorageWordFunction;
use crate::PolkaVMStoreTransientStorageWordFunction;
/// Translates the storage load.
pub fn load<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn load<'ctx>(
context: &mut Context<'ctx>,
position: &PolkaVMArgument<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
let name = <PolkaVMLoadStorageWordFunction as RuntimeFunction<D>>::NAME;
let declaration = <PolkaVMLoadStorageWordFunction as RuntimeFunction<D>>::declaration(context);
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let name = <PolkaVMLoadStorageWordFunction as RuntimeFunction>::NAME;
let declaration = <PolkaVMLoadStorageWordFunction as RuntimeFunction>::declaration(context);
let arguments = [position.as_pointer(context)?.value.into()];
Ok(context
.build_call(declaration, &arguments, "storage_load")
@@ -26,15 +22,12 @@ where
}
/// Translates the storage store.
pub fn store<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn store<'ctx>(
context: &mut Context<'ctx>,
position: &PolkaVMArgument<'ctx>,
value: &PolkaVMArgument<'ctx>,
) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
let declaration = <PolkaVMStoreStorageWordFunction as RuntimeFunction<D>>::declaration(context);
) -> anyhow::Result<()> {
let declaration = <PolkaVMStoreStorageWordFunction as RuntimeFunction>::declaration(context);
let arguments = [
position.as_pointer(context)?.value.into(),
value.as_pointer(context)?.value.into(),
@@ -44,33 +37,27 @@ where
}
/// Translates the transient storage load.
pub fn transient_load<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn transient_load<'ctx>(
context: &mut Context<'ctx>,
position: &PolkaVMArgument<'ctx>,
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>>
where
D: Dependency + Clone,
{
let name = <PolkaVMLoadTransientStorageWordFunction as RuntimeFunction<D>>::NAME;
) -> anyhow::Result<inkwell::values::BasicValueEnum<'ctx>> {
let name = <PolkaVMLoadTransientStorageWordFunction as RuntimeFunction>::NAME;
let arguments = [position.as_pointer(context)?.value.into()];
let declaration =
<PolkaVMLoadTransientStorageWordFunction as RuntimeFunction<D>>::declaration(context);
<PolkaVMLoadTransientStorageWordFunction as RuntimeFunction>::declaration(context);
Ok(context
.build_call(declaration, &arguments, "transient_storage_load")
.unwrap_or_else(|| panic!("runtime function {name} should return a value")))
}
/// Translates the transient storage store.
pub fn transient_store<'ctx, D>(
context: &mut Context<'ctx, D>,
pub fn transient_store<'ctx>(
context: &mut Context<'ctx>,
position: &PolkaVMArgument<'ctx>,
value: &PolkaVMArgument<'ctx>,
) -> anyhow::Result<()>
where
D: Dependency + Clone,
{
) -> anyhow::Result<()> {
let declaration =
<PolkaVMStoreTransientStorageWordFunction as RuntimeFunction<D>>::declaration(context);
<PolkaVMStoreTransientStorageWordFunction as RuntimeFunction>::declaration(context);
let arguments = [
position.as_pointer(context)?.value.into(),
value.as_pointer(context)?.value.into(),
+106 -82
View File
@@ -1,30 +1,43 @@
//! The LLVM context library.
use std::collections::BTreeMap;
use crate::debug_config::DebugConfig;
use crate::optimizer::settings::Settings as OptimizerSettings;
use crate::{PolkaVMTarget, PolkaVMTargetMachine};
use anyhow::Context as AnyhowContext;
use polkavm_common::program::ProgramBlob;
use polkavm_disassembler::{Disassembler, DisassemblyFormat};
use revive_common::{
Keccak256, ObjectFormat, BIT_LENGTH_ETH_ADDRESS, BIT_LENGTH_WORD, BYTE_LENGTH_ETH_ADDRESS,
BYTE_LENGTH_WORD,
};
use revive_linker::elf::ElfLinker;
use revive_linker::pvm::polkavm_linker;
use self::context::build::Build;
use self::context::Context;
pub use self::r#const::*;
pub mod r#const;
pub mod context;
pub mod evm;
pub use self::r#const::*;
/// Get a [Build] from contract bytecode and its auxilliary data.
pub fn build(
bytecode: &[u8],
metadata_hash: Option<[u8; BYTE_LENGTH_WORD]>,
) -> anyhow::Result<Build> {
Ok(Build::new(metadata_hash, bytecode.to_owned()))
}
use crate::debug_config::DebugConfig;
use crate::optimizer::settings::Settings as OptimizerSettings;
use anyhow::Context as AnyhowContext;
use polkavm_common::program::ProgramBlob;
use polkavm_disassembler::{Disassembler, DisassemblyFormat};
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory;
use sha3::Digest;
use self::context::build::Build;
use self::context::Context;
/// Builds PolkaVM assembly text.
pub fn build_assembly_text(
/// Disassembles the PolkaVM blob into assembly text representation.
pub fn disassemble(
contract_path: &str,
bytecode: &[u8],
metadata_hash: Option<[u8; revive_common::BYTE_LENGTH_WORD]>,
debug_config: &DebugConfig,
) -> anyhow::Result<Build> {
) -> anyhow::Result<String> {
let program_blob = ProgramBlob::parse(bytecode.into())
.map_err(anyhow::Error::msg)
.with_context(|| format!("Failed to parse program blob for contract: {contract_path}"))?;
@@ -45,86 +58,97 @@ pub fn build_assembly_text(
debug_config.dump_assembly(contract_path, &assembly_text)?;
Ok(Build::new(
assembly_text.to_owned(),
metadata_hash,
bytecode.to_owned(),
hex::encode(sha3::Keccak256::digest(bytecode)),
))
Ok(assembly_text)
}
/// Computes the PVM bytecode hash.
pub fn hash(bytecode_buffer: &[u8]) -> [u8; BYTE_LENGTH_WORD] {
Keccak256::from_slice(bytecode_buffer)
.as_bytes()
.try_into()
.expect("the bytecode hash should be word sized")
}
/// Links the `bytecode` with `linker_symbols` and `factory_dependencies`.
pub fn link(
bytecode: &[u8],
linker_symbols: &BTreeMap<String, [u8; BYTE_LENGTH_ETH_ADDRESS]>,
factory_dependencies: &BTreeMap<String, [u8; BYTE_LENGTH_WORD]>,
strip_binary: bool,
) -> anyhow::Result<(Vec<u8>, ObjectFormat)> {
Ok(match ObjectFormat::try_from(bytecode) {
Ok(format @ ObjectFormat::PVM) => (bytecode.to_vec(), format),
Ok(ObjectFormat::ELF) => {
let symbols = build_symbols(linker_symbols, factory_dependencies)?;
let bytecode_linked = ElfLinker::setup()?.link(bytecode, symbols.as_slice())?;
polkavm_linker(&bytecode_linked, strip_binary)
.map(|pvm| (pvm, ObjectFormat::PVM))
.unwrap_or_else(|_| (bytecode.to_vec(), ObjectFormat::ELF))
}
Err(error) => panic!("ICE: linker: {error}"),
})
}
/// The returned module defines given `linker_symbols` and `factory_dependencies` global values.
pub fn build_symbols(
linker_symbols: &BTreeMap<String, [u8; BYTE_LENGTH_ETH_ADDRESS]>,
factory_dependencies: &BTreeMap<String, [u8; BYTE_LENGTH_WORD]>,
) -> anyhow::Result<inkwell::memory_buffer::MemoryBuffer> {
let context = inkwell::context::Context::create();
let module = context.create_module("symbols");
let word_type = context.custom_width_int_type(BIT_LENGTH_WORD as u32);
let address_type = context.custom_width_int_type(BIT_LENGTH_ETH_ADDRESS as u32);
for (name, value) in linker_symbols {
let global_value = module.add_global(address_type, Default::default(), name);
global_value.set_linkage(inkwell::module::Linkage::External);
global_value.set_initializer(
&address_type
.const_int_from_string(
hex::encode(value).as_str(),
inkwell::types::StringRadix::Hexadecimal,
)
.expect("should be valid"),
);
}
for (name, value) in factory_dependencies {
let global_value = module.add_global(word_type, Default::default(), name);
global_value.set_linkage(inkwell::module::Linkage::External);
global_value.set_initializer(
&word_type
.const_int_from_string(
hex::encode(value).as_str(),
inkwell::types::StringRadix::Hexadecimal,
)
.expect("should be valid"),
);
}
Ok(
PolkaVMTargetMachine::new(PolkaVMTarget::PVM, &OptimizerSettings::none())?
.write_to_memory_buffer(&module)
.expect("ICE: the symbols module should be valid"),
)
}
/// Implemented by items which are translated into LLVM IR.
pub trait WriteLLVM<D>
where
D: Dependency + Clone,
{
pub trait WriteLLVM {
/// Declares the entity in the LLVM IR.
/// Is usually performed in order to use the item before defining it.
fn declare(&mut self, _context: &mut Context<D>) -> anyhow::Result<()> {
fn declare(&mut self, _context: &mut Context) -> anyhow::Result<()> {
Ok(())
}
/// Translates the entity into LLVM IR.
fn into_llvm(self, context: &mut Context<D>) -> anyhow::Result<()>;
fn into_llvm(self, context: &mut Context) -> anyhow::Result<()>;
}
/// The dummy LLVM writable entity.
#[derive(Debug, Default, Clone)]
pub struct DummyLLVMWritable {}
impl<D> WriteLLVM<D> for DummyLLVMWritable
where
D: Dependency + Clone,
{
fn into_llvm(self, _context: &mut Context<D>) -> anyhow::Result<()> {
impl WriteLLVM for DummyLLVMWritable {
fn into_llvm(self, _context: &mut Context) -> anyhow::Result<()> {
Ok(())
}
}
/// Implemented by items managing project dependencies.
pub trait Dependency {
/// Compiles a project dependency.
fn compile(
dependency: Self,
path: &str,
optimizer_settings: OptimizerSettings,
include_metadata_hash: bool,
debug_config: DebugConfig,
llvm_arguments: &[String],
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<String>;
/// Resolves a full contract path.
fn resolve_path(&self, identifier: &str) -> anyhow::Result<String>;
/// Resolves a library address.
fn resolve_library(&self, path: &str) -> anyhow::Result<String>;
}
/// The dummy dependency entity.
#[derive(Debug, Default, Clone)]
pub struct DummyDependency {}
impl Dependency for DummyDependency {
fn compile(
_dependency: Self,
_path: &str,
_optimizer_settings: OptimizerSettings,
_include_metadata_hash: bool,
_debug_config: DebugConfig,
_llvm_arguments: &[String],
_memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<String> {
Ok(String::new())
}
/// Resolves a full contract path.
fn resolve_path(&self, _identifier: &str) -> anyhow::Result<String> {
Ok(String::new())
}
/// Resolves a library address.
fn resolve_library(&self, _path: &str) -> anyhow::Result<String> {
Ok(String::new())
}
}
@@ -1,12 +1,12 @@
//! The LLVM target machine.
pub mod target;
use crate::optimizer::settings::size_level::SizeLevel as OptimizerSettingsSizeLevel;
use crate::optimizer::settings::Settings as OptimizerSettings;
use self::target::Target;
pub mod target;
/// The LLVM target machine.
#[derive(Debug)]
pub struct TargetMachine {
+7 -3
View File
@@ -1,6 +1,6 @@
[package]
name = "resolc"
version = "0.3.0"
version = "0.4.0"
license.workspace = true
edition.workspace = true
repository.workspace = true
@@ -28,8 +28,8 @@ rayon = { workspace = true, optional = true }
semver = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
sha3 = { workspace = true }
which = { workspace = true }
normpath = { workspace = true }
revive-common = { workspace = true }
revive-llvm-context = { workspace = true }
@@ -46,6 +46,10 @@ inkwell = { workspace = true, features = ["target-riscv", "llvm18-1-no-llvm-link
[build-dependencies]
git2 = { workspace = true, default-features = false }
[dev-dependencies]
tempfile = { workspace = true }
[features]
parallel = ["rayon"]
parallel = ["rayon", "revive-solc-json-interface/parallel"]
default = ["parallel"]
+135 -60
View File
@@ -1,10 +1,19 @@
//! The Solidity contract build.
use std::collections::HashSet;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::fs::File;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use revive_common::ContractIdentifier;
use revive_common::ObjectFormat;
use revive_common::BYTE_LENGTH_WORD;
use revive_common::EXTENSION_JSON;
use revive_common::EXTENSION_POLKAVM_ASSEMBLY;
use revive_common::EXTENSION_POLKAVM_BINARY;
use revive_llvm_context::PolkaVMBuild;
use revive_solc_json_interface::CombinedJsonContract;
use revive_solc_json_interface::SolcStandardJsonOutputContract;
use serde::Deserialize;
@@ -13,92 +22,137 @@ use serde::Serialize;
/// The Solidity contract build.
#[derive(Debug, Serialize, Deserialize)]
pub struct Contract {
/// The contract path.
pub path: String,
/// The auxiliary identifier. Used to identify Yul objects.
pub identifier: String,
/// The contract identifier.
pub identifier: ContractIdentifier,
/// The LLVM module build.
pub build: revive_llvm_context::PolkaVMBuild,
pub build: PolkaVMBuild,
/// The metadata JSON.
pub metadata_json: serde_json::Value,
/// The factory dependencies.
pub factory_dependencies: HashSet<String>,
/// The unlinked missing libraries.
pub missing_libraries: BTreeSet<String>,
/// The unresolved factory dependencies.
pub factory_dependencies: BTreeSet<String>,
/// The resolved factory dependencies.
pub factory_dependencies_resolved: BTreeMap<[u8; BYTE_LENGTH_WORD], String>,
/// The binary object format.
pub object_format: ObjectFormat,
}
impl Contract {
/// A shortcut constructor.
pub fn new(
path: String,
identifier: String,
build: revive_llvm_context::PolkaVMBuild,
identifier: ContractIdentifier,
build: PolkaVMBuild,
metadata_json: serde_json::Value,
factory_dependencies: HashSet<String>,
missing_libraries: BTreeSet<String>,
factory_dependencies: BTreeSet<String>,
object_format: ObjectFormat,
) -> Self {
Self {
path,
identifier,
build,
metadata_json,
missing_libraries,
factory_dependencies,
factory_dependencies_resolved: BTreeMap::new(),
object_format,
}
}
/// Writes the contract text assembly and bytecode to terminal.
pub fn write_to_terminal(
self,
path: String,
output_metadata: bool,
output_assembly: bool,
output_binary: bool,
) -> anyhow::Result<()> {
writeln!(std::io::stdout(), "\n======= {path} =======")?;
if output_assembly {
writeln!(
std::io::stdout(),
"Assembly:\n{}",
self.build.assembly_text.unwrap_or_default(),
)?;
}
if output_metadata {
writeln!(std::io::stdout(), "Metadata:\n{}", self.metadata_json)?;
}
if output_binary {
writeln!(
std::io::stdout(),
"Binary:\n{}",
hex::encode(self.build.bytecode)
)?;
}
Ok(())
}
/// Writes the contract text assembly and bytecode to files.
pub fn write_to_directory(
self,
path: &Path,
output_metadata: bool,
output_assembly: bool,
output_binary: bool,
overwrite: bool,
) -> anyhow::Result<()> {
let file_name = Self::short_path(self.path.as_str());
if output_assembly {
let file_name = format!(
"{}.{}",
file_name,
revive_common::EXTENSION_POLKAVM_ASSEMBLY
);
let mut file_path = path.to_owned();
file_path.push(file_name);
let file_path = PathBuf::from(self.identifier.path);
let file_name = file_path
.file_name()
.expect("Always exists")
.to_str()
.expect("Always valid");
let output_path = path.to_owned();
std::fs::create_dir_all(output_path.as_path())?;
if output_metadata {
let file_path = output_path.join(format!(
"{file_name}:{}.{EXTENSION_JSON}",
self.identifier.name.as_deref().unwrap_or(file_name),
));
if file_path.exists() && !overwrite {
anyhow::bail!(
"Refusing to overwrite an existing file {file_path:?} (use --overwrite to force)."
);
} else {
let assembly_text = self.build.assembly_text;
File::create(&file_path)
.map_err(|error| {
anyhow::anyhow!("File {:?} creating error: {}", file_path, error)
})?
.write_all(assembly_text.as_bytes())
.map_err(|error| {
anyhow::anyhow!("File {:?} writing error: {}", file_path, error)
})?;
}
std::fs::write(
file_path.as_path(),
self.metadata_json.to_string().as_bytes(),
)
.map_err(|error| anyhow::anyhow!("File {file_path:?} writing: {error}"))?;
}
if output_assembly {
let file_path = output_path.join(format!(
"{file_name}:{}.{EXTENSION_POLKAVM_ASSEMBLY}",
self.identifier.name.as_deref().unwrap_or(file_name),
));
if file_path.exists() && !overwrite {
anyhow::bail!(
"Refusing to overwrite an existing file {file_path:?} (use --overwrite to force)."
);
}
File::create(&file_path)
.map_err(|error| anyhow::anyhow!("File {file_path:?} creating error: {error}"))?
.write_all(self.build.assembly_text.unwrap_or_default().as_bytes())
.map_err(|error| anyhow::anyhow!("File {file_path:?} writing error: {error}"))?;
}
if output_binary {
let file_name = format!("{}.{}", file_name, revive_common::EXTENSION_POLKAVM_BINARY);
let mut file_path = path.to_owned();
file_path.push(file_name);
let file_path = output_path.join(format!(
"{file_name}:{}.{EXTENSION_POLKAVM_BINARY}",
self.identifier.name.as_deref().unwrap_or(file_name),
));
if file_path.exists() && !overwrite {
anyhow::bail!(
"Refusing to overwrite an existing file {file_path:?} (use --overwrite to force)."
);
} else {
File::create(&file_path)
.map_err(|error| {
anyhow::anyhow!("File {:?} creating error: {}", file_path, error)
})?
.write_all(self.build.bytecode.as_slice())
.map_err(|error| {
anyhow::anyhow!("File {:?} writing error: {}", file_path, error)
})?;
}
File::create(&file_path)
.map_err(|error| anyhow::anyhow!("File {file_path:?} creating error: {error}"))?
.write_all(self.build.bytecode.as_slice())
.map_err(|error| anyhow::anyhow!("File {file_path:?} writing error: {error}"))?;
}
Ok(())
@@ -109,20 +163,30 @@ impl Contract {
self,
combined_json_contract: &mut CombinedJsonContract,
) -> anyhow::Result<()> {
let hexadecimal_bytecode = hex::encode(self.build.bytecode);
if let Some(metadata) = combined_json_contract.metadata.as_mut() {
*metadata = self.metadata_json.to_string();
}
if let Some(asm) = combined_json_contract.asm.as_mut() {
*asm = serde_json::Value::String(self.build.assembly_text);
}
let hexadecimal_bytecode = hex::encode(self.build.bytecode);
combined_json_contract.assembly = self.build.assembly_text;
combined_json_contract.bin = Some(hexadecimal_bytecode);
combined_json_contract
.bin_runtime
.clone_from(&combined_json_contract.bin);
combined_json_contract.factory_deps = Some(self.build.factory_dependencies);
combined_json_contract
.missing_libraries
.extend(self.missing_libraries);
combined_json_contract
.factory_deps_unlinked
.extend(self.factory_dependencies);
combined_json_contract.factory_deps.extend(
self.factory_dependencies_resolved
.into_iter()
.map(|(hash, path)| (hex::encode(hash), path)),
);
combined_json_contract.object_format = Some(self.object_format);
Ok(())
}
@@ -132,16 +196,27 @@ impl Contract {
self,
standard_json_contract: &mut SolcStandardJsonOutputContract,
) -> anyhow::Result<()> {
standard_json_contract.metadata = Some(self.metadata_json);
let assembly_text = self.build.assembly_text;
let bytecode = hex::encode(self.build.bytecode.as_slice());
if let Some(evm) = standard_json_contract.evm.as_mut() {
evm.modify(assembly_text, bytecode);
}
let assembly_text = self.build.assembly_text.unwrap_or_default();
standard_json_contract.factory_dependencies = Some(self.build.factory_dependencies);
standard_json_contract.hash = Some(self.build.bytecode_hash);
standard_json_contract.metadata = self.metadata_json;
standard_json_contract
.evm
.get_or_insert_with(Default::default)
.modify(assembly_text, bytecode);
standard_json_contract.hash = self.build.bytecode_hash.map(hex::encode);
standard_json_contract
.missing_libraries
.extend(self.missing_libraries);
standard_json_contract
.factory_dependencies_unlinked
.extend(self.factory_dependencies);
standard_json_contract.factory_dependencies.extend(
self.factory_dependencies_resolved
.into_iter()
.map(|(hash, path)| (hex::encode(hash), path)),
);
standard_json_contract.object_format = Some(self.object_format);
Ok(())
}
+308 -50
View File
@@ -1,94 +1,352 @@
//! The Solidity project build.
use std::collections::BTreeMap;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use normpath::PathExt;
use revive_common::ObjectFormat;
use revive_common::BYTE_LENGTH_ETH_ADDRESS;
use revive_llvm_context::polkavm_disassemble;
use revive_llvm_context::polkavm_hash;
use revive_llvm_context::polkavm_link;
use revive_llvm_context::DebugConfig;
use revive_solc_json_interface::combined_json::CombinedJson;
use revive_solc_json_interface::CombinedJsonContract;
use revive_solc_json_interface::SolcStandardJsonOutput;
use revive_solc_json_interface::SolcStandardJsonOutputContract;
use revive_solc_json_interface::SolcStandardJsonOutputError;
use revive_solc_json_interface::SolcStandardJsonOutputErrorHandler;
use crate::build::contract::Contract;
use crate::solc::version::Version as SolcVersion;
pub mod contract;
use std::collections::BTreeMap;
use std::path::Path;
use revive_solc_json_interface::combined_json::CombinedJson;
use revive_solc_json_interface::SolcStandardJsonOutput;
use crate::solc::version::Version as SolcVersion;
use crate::ResolcVersion;
use self::contract::Contract;
/// The Solidity project build.
/// The Solidity project PVM build.
#[derive(Debug, Default)]
pub struct Build {
/// The contract data,
pub contracts: BTreeMap<String, Contract>,
pub results: BTreeMap<String, Result<Contract, SolcStandardJsonOutputError>>,
/// The additional message to output (added by the revive compiler).
pub messages: Vec<SolcStandardJsonOutputError>,
}
impl Build {
/// Writes all contracts to the specified directory.
pub fn write_to_directory(
self,
output_directory: &Path,
/// A shorthand constructor.
///
/// Note: Takes the supplied `messages`, leaving an empty vec.
pub fn new(
results: BTreeMap<String, Result<Contract, SolcStandardJsonOutputError>>,
messages: &mut Vec<SolcStandardJsonOutputError>,
) -> Self {
Self {
results,
messages: std::mem::take(messages),
}
}
/// Links the PVM build.
pub fn link(
mut self,
linker_symbols: BTreeMap<String, [u8; BYTE_LENGTH_ETH_ADDRESS]>,
debug_config: &DebugConfig,
) -> Self {
let mut contracts: BTreeMap<String, Contract> = self
.results
.into_iter()
.map(|(path, result)| (path, result.expect("Cannot link a project with errors")))
.collect();
loop {
let mut linkage_data = BTreeMap::new();
for (path, contract) in contracts
.iter()
.filter(|(_path, contract)| contract.object_format == ObjectFormat::ELF)
{
match polkavm_link(
&contract.build.bytecode,
&linker_symbols,
&contract
.factory_dependencies
.iter()
.filter_map(|dependency| {
let bytecode_hash = contracts
.get(dependency)
.as_ref()?
.build
.bytecode_hash
.as_ref()?
.to_owned();
Some((dependency.to_owned(), bytecode_hash))
})
.collect(),
!debug_config.emit_debug_info,
) {
Ok((memory_buffer_linked, ObjectFormat::PVM)) => {
let bytecode_hash = polkavm_hash(&memory_buffer_linked);
let assembly_text =
polkavm_disassemble(path, &memory_buffer_linked, debug_config)
.unwrap_or_else(|error| {
panic!("ICE: The PVM disassembler failed: {error}")
});
linkage_data.insert(
path.to_owned(),
(memory_buffer_linked, bytecode_hash, assembly_text),
);
}
Ok((_memory_buffer_linked, ObjectFormat::ELF)) => {}
Err(error) => self
.messages
.push(SolcStandardJsonOutputError::new_error(error, None, None)),
}
}
if linkage_data.is_empty() {
break;
}
for (path, (memory_buffer_linked, bytecode_hash, assembly_text)) in
linkage_data.into_iter()
{
let contract = contracts.get(path.as_str()).expect("Always exists");
let factory_dependencies_resolved = contract
.factory_dependencies
.iter()
.filter_map(|dependency| {
Some((
contracts
.get(dependency)
.as_ref()?
.build
.bytecode_hash
.as_ref()?
.to_owned(),
dependency.to_owned(),
))
})
.collect();
let contract = contracts.get_mut(path.as_str()).expect("Always exists");
contract.build.bytecode = memory_buffer_linked.as_slice().to_vec();
contract.build.bytecode_hash = Some(bytecode_hash);
contract.build.assembly_text = Some(assembly_text);
contract.factory_dependencies_resolved = factory_dependencies_resolved;
contract.object_format = ObjectFormat::PVM;
}
}
let results = contracts
.into_iter()
.map(|(path, contract)| {
if contract.object_format == ObjectFormat::ELF {
self.messages.push(SolcStandardJsonOutputError::new_warning(
format!("{path} is unlinked. Consider providing missing libraries."),
None,
None,
));
}
(path, Ok(contract))
})
.collect();
Self::new(results, &mut self.messages)
}
/// Writes all contracts to the terminal.
pub fn write_to_terminal(
mut self,
output_metadata: bool,
output_assembly: bool,
output_binary: bool,
overwrite: bool,
) -> anyhow::Result<()> {
for (_path, contract) in self.contracts.into_iter() {
contract.write_to_directory(
output_directory,
self.take_and_write_warnings();
self.exit_on_error();
if !output_metadata && !output_assembly && !output_binary {
writeln!(
std::io::stderr(),
"Compiler run successful. No output requested. Use flags --metadata, --asm, --bin."
)?;
return Ok(());
}
for (path, build) in self.results.into_iter() {
build.expect("Always valid").write_to_terminal(
path,
output_metadata,
output_assembly,
output_binary,
overwrite,
)?;
}
Ok(())
}
/// Writes all contracts assembly and bytecode to the combined JSON.
pub fn write_to_combined_json(self, combined_json: &mut CombinedJson) -> anyhow::Result<()> {
for (path, contract) in self.contracts.into_iter() {
let combined_json_contract = combined_json
.contracts
.iter_mut()
.find_map(|(json_path, contract)| {
if path.ends_with(json_path) {
Some(contract)
} else {
None
}
})
.ok_or_else(|| anyhow::anyhow!("Contract `{}` not found in the project", path))?;
/// Writes all contracts to the specified directory.
pub fn write_to_directory(
mut self,
output_directory: &Path,
output_metadata: bool,
output_assembly: bool,
output_binary: bool,
overwrite: bool,
) -> anyhow::Result<()> {
self.take_and_write_warnings();
self.exit_on_error();
contract.write_to_combined_json(combined_json_contract)?;
std::fs::create_dir_all(output_directory)?;
for build in self.results.into_values() {
build.expect("Always valid").write_to_directory(
output_directory,
output_metadata,
output_assembly,
output_binary,
overwrite,
)?;
}
combined_json.revive_version = Some(ResolcVersion::default().long);
writeln!(
std::io::stderr(),
"Compiler run successful. Artifact(s) can be found in directory {output_directory:?}."
)?;
Ok(())
}
/// Writes all contracts assembly and bytecode to the combined JSON.
pub fn write_to_combined_json(
mut self,
combined_json: &mut CombinedJson,
) -> anyhow::Result<()> {
self.take_and_write_warnings();
self.exit_on_error();
for result in self.results.into_values() {
let build = result.expect("Exits on an error above");
let identifier = build.identifier.clone();
let combined_json_contract =
match combined_json
.contracts
.iter_mut()
.find_map(|(json_path, contract)| {
if Self::normalize_full_path(identifier.full_path.as_str())
.ends_with(Self::normalize_full_path(json_path).as_str())
{
Some(contract)
} else {
None
}
}) {
Some(contract) => contract,
None => {
combined_json.contracts.insert(
identifier.full_path.clone(),
CombinedJsonContract::default(),
);
combined_json
.contracts
.get_mut(identifier.full_path.as_str())
.expect("Always exists")
}
};
build.write_to_combined_json(combined_json_contract)?;
}
Ok(())
}
/// Writes all contracts assembly and bytecode to the standard JSON.
pub fn write_to_standard_json(
mut self,
self,
standard_json: &mut SolcStandardJsonOutput,
solc_version: &SolcVersion,
) -> anyhow::Result<()> {
let contracts = match standard_json.contracts.as_mut() {
Some(contracts) => contracts,
None => return Ok(()),
};
let mut errors = Vec::with_capacity(self.results.len());
for result in self.results.into_values() {
let build = match result {
Ok(build) => build,
Err(error) => {
errors.push(error);
continue;
}
};
let identifier = build.identifier.clone();
for (path, contracts) in contracts.iter_mut() {
for (name, contract) in contracts.iter_mut() {
let full_name = format!("{path}:{name}");
if let Some(contract_data) = self.contracts.remove(full_name.as_str()) {
contract_data.write_to_standard_json(contract)?;
match standard_json
.contracts
.get_mut(identifier.path.as_str())
.and_then(|contracts| {
contracts.get_mut(
identifier
.name
.as_deref()
.unwrap_or(identifier.path.as_str()),
)
}) {
Some(contract) => {
build.write_to_standard_json(contract)?;
}
None => {
let contracts = standard_json
.contracts
.entry(identifier.path.clone())
.or_default();
let mut contract = SolcStandardJsonOutputContract::default();
build.write_to_standard_json(&mut contract)?;
contracts.insert(identifier.name.unwrap_or(identifier.path), contract);
}
}
}
standard_json.errors.extend(errors);
standard_json.version = Some(solc_version.default.to_string());
standard_json.long_version = Some(solc_version.long.to_owned());
standard_json.revive_version = Some(ResolcVersion::default().long);
Ok(())
}
/// Normalizes the full contract path.
///
/// # Panics
/// If the path does not contain a colon.
fn normalize_full_path(path: &str) -> String {
let mut iterator = path.split(':');
let path = iterator.next().expect("Always exists");
let name = iterator.next().expect("Always exists");
let mut full_path = PathBuf::from(path)
.normalize()
.expect("Path normalization error")
.as_os_str()
.to_string_lossy()
.into_owned();
full_path.push(':');
full_path.push_str(name);
full_path
}
}
impl SolcStandardJsonOutputErrorHandler for Build {
fn errors(&self) -> Vec<&SolcStandardJsonOutputError> {
let mut errors: Vec<&SolcStandardJsonOutputError> = self
.results
.values()
.filter_map(|build| build.as_ref().err())
.collect();
errors.extend(self.messages.iter().filter(|message| message.is_error()));
errors
}
fn take_warnings(&mut self) -> Vec<SolcStandardJsonOutputError> {
let warnings = self
.messages
.iter()
.filter(|message| message.is_warning())
.cloned()
.collect();
self.messages.retain(|message| !message.is_warning());
warnings
}
}
+5 -3
View File
@@ -1,5 +1,7 @@
//! Solidity to PolkaVM compiler constants.
use revive_common::BYTE_LENGTH_WORD;
/// The default executable name.
pub static DEFAULT_EXECUTABLE_NAME: &str = "resolc";
@@ -7,10 +9,10 @@ pub static DEFAULT_EXECUTABLE_NAME: &str = "resolc";
pub const OFFSET_SCRATCH_SPACE: usize = 0;
/// The memory pointer offset.
pub const OFFSET_MEMORY_POINTER: usize = 2 * revive_common::BYTE_LENGTH_WORD;
pub const OFFSET_MEMORY_POINTER: usize = 2 * BYTE_LENGTH_WORD;
/// The empty slot offset.
pub const OFFSET_EMPTY_SLOT: usize = 3 * revive_common::BYTE_LENGTH_WORD;
pub const OFFSET_EMPTY_SLOT: usize = 3 * BYTE_LENGTH_WORD;
/// The non-reserved memory offset.
pub const OFFSET_NON_RESERVED: usize = 4 * revive_common::BYTE_LENGTH_WORD;
pub const OFFSET_NON_RESERVED: usize = 4 * BYTE_LENGTH_WORD;
+273 -210
View File
@@ -1,12 +1,35 @@
//! Solidity to PolkaVM compiler library.
pub(crate) mod build;
pub(crate) mod r#const;
pub(crate) mod missing_libraries;
pub(crate) mod process;
pub(crate) mod project;
pub(crate) mod solc;
pub(crate) mod version;
#![allow(clippy::too_many_arguments)]
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashSet;
use std::io::Write;
use std::path::PathBuf;
#[cfg(feature = "parallel")]
use rayon::iter::IntoParallelIterator;
#[cfg(feature = "parallel")]
use rayon::iter::ParallelIterator;
use revive_common::EVMVersion;
use revive_common::MetadataHash;
use revive_common::EXIT_CODE_SUCCESS;
use revive_llvm_context::DebugConfig;
use revive_llvm_context::OptimizerSettings;
use revive_solc_json_interface::CombinedJsonSelector;
use revive_solc_json_interface::ResolcWarning;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonInputLanguage;
use revive_solc_json_interface::SolcStandardJsonInputSettingsLibraries;
use revive_solc_json_interface::SolcStandardJsonInputSettingsOptimizer;
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVM;
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory;
use revive_solc_json_interface::SolcStandardJsonInputSettingsSelection;
use revive_solc_json_interface::SolcStandardJsonOutputError;
use revive_solc_json_interface::SolcStandardJsonOutputErrorHandler;
use crate::linker::Output;
pub use self::build::contract::Contract as ContractBuild;
pub use self::build::Build;
@@ -31,118 +54,76 @@ pub use self::solc::FIRST_SUPPORTED_VERSION as SolcFirstSupportedVersion;
pub use self::solc::LAST_SUPPORTED_VERSION as SolcLastSupportedVersion;
pub use self::version::Version as ResolcVersion;
pub(crate) mod build;
pub(crate) mod r#const;
pub(crate) mod linker;
pub(crate) mod missing_libraries;
pub(crate) mod process;
pub(crate) mod project;
pub(crate) mod solc;
#[cfg(not(target_os = "emscripten"))]
pub mod test_utils;
pub mod tests;
pub(crate) mod version;
use std::collections::BTreeSet;
use std::io::Write;
use std::path::PathBuf;
use revive_solc_json_interface::standard_json::input::settings::metadata_hash::MetadataHash;
use revive_solc_json_interface::ResolcWarning;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonInputLanguage;
use revive_solc_json_interface::SolcStandardJsonInputSettingsOptimizer;
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVM;
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory;
use revive_solc_json_interface::SolcStandardJsonInputSettingsSelection;
/// The rayon worker stack size.
pub const RAYON_WORKER_STACK_SIZE: usize = 64 * 1024 * 1024;
/// Runs the Yul mode.
pub fn yul<T: Compiler>(
solc: &T,
input_files: &[PathBuf],
solc: &mut T,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
mut debug_config: revive_llvm_context::DebugConfig,
libraries: &[String],
metadata_hash: MetadataHash,
messages: &mut Vec<SolcStandardJsonOutputError>,
optimizer_settings: OptimizerSettings,
debug_config: DebugConfig,
llvm_arguments: &[String],
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<Build> {
let path = match input_files.len() {
1 => input_files.first().expect("Always exists"),
0 => anyhow::bail!("The input file is missing"),
length => anyhow::bail!(
"Only one input file is allowed in the Yul mode, but found {}",
length,
),
};
let libraries = SolcStandardJsonInputSettingsLibraries::try_from(libraries)?;
solc.validate_yul_paths(input_files, libraries.clone(), messages)?;
if solc.version()?.default != solc::LAST_SUPPORTED_VERSION {
anyhow::bail!(
"The Yul mode is only supported with the most recent version of the Solidity compiler: {}",
solc::LAST_SUPPORTED_VERSION,
);
}
let solc_validator = Some(&*solc);
let project = Project::try_from_yul_path(path, solc_validator)?;
debug_config.set_yul_path(path);
let build = project.compile(
let linker_symbols = libraries.as_linker_symbols()?;
let project = Project::try_from_yul_paths(input_files, None, libraries, &debug_config)?;
let mut build = project.compile(
messages,
optimizer_settings,
include_metadata_hash,
debug_config,
llvm_arguments,
memory_config,
)?;
Ok(build)
}
/// Runs the LLVM IR mode.
pub fn llvm_ir(
input_files: &[PathBuf],
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: &[String],
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<Build> {
let path = match input_files.len() {
1 => input_files.first().expect("Always exists"),
0 => anyhow::bail!("The input file is missing"),
length => anyhow::bail!(
"Only one input file is allowed in the LLVM IR mode, but found {}",
length,
),
};
let project = Project::try_from_llvm_ir_path(path)?;
let build = project.compile(
optimizer_settings,
include_metadata_hash,
debug_config,
metadata_hash,
&debug_config,
llvm_arguments,
memory_config,
)?;
build.take_and_write_warnings();
build.check_errors()?;
let mut build = build.link(linker_symbols, &debug_config);
build.take_and_write_warnings();
build.check_errors()?;
Ok(build)
}
/// Runs the standard output mode.
#[allow(clippy::too_many_arguments)]
pub fn standard_output<T: Compiler>(
solc: &T,
input_files: &[PathBuf],
libraries: Vec<String>,
solc: &mut T,
evm_version: Option<revive_common::EVMVersion>,
libraries: &[String],
metadata_hash: MetadataHash,
messages: &mut Vec<SolcStandardJsonOutputError>,
evm_version: Option<EVMVersion>,
solc_optimizer_enabled: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
optimizer_settings: OptimizerSettings,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
remappings: Option<BTreeSet<String>>,
suppressed_warnings: Option<Vec<ResolcWarning>>,
debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: &[String],
remappings: BTreeSet<String>,
suppressed_warnings: Vec<ResolcWarning>,
debug_config: DebugConfig,
llvm_arguments: Vec<String>,
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<Build> {
let solc_version = solc.version()?;
let solc_input = SolcStandardJsonInput::try_from_paths(
SolcStandardJsonInputLanguage::Solidity,
let mut solc_input = SolcStandardJsonInput::try_from_solidity_paths(
evm_version,
input_files,
libraries,
@@ -150,162 +131,211 @@ pub fn standard_output<T: Compiler>(
SolcStandardJsonInputSettingsSelection::new_required(),
SolcStandardJsonInputSettingsOptimizer::new(
solc_optimizer_enabled,
None,
&solc_version.default,
optimizer_settings.is_fallback_to_size_enabled(),
SolcStandardJsonInputSettingsOptimizer::default_mode(),
Default::default(),
),
None,
Default::default(),
suppressed_warnings,
Some(SolcStandardJsonInputSettingsPolkaVM::new(
SolcStandardJsonInputSettingsPolkaVM::new(
Some(memory_config),
debug_config.emit_debug_info,
)),
),
llvm_arguments,
false,
)?;
let mut solc_output = solc.standard_json(
&mut solc_input,
messages,
base_path,
include_paths,
allow_paths,
)?;
solc_output.take_and_write_warnings();
solc_output.check_errors()?;
let source_code_files = solc_input
.sources
.iter()
.map(|(path, source)| (path.to_owned(), source.content.to_owned()))
.collect();
let libraries = solc_input.settings.libraries.clone().unwrap_or_default();
let solc_output = solc.standard_json(solc_input, base_path, include_paths, allow_paths)?;
if let Some(errors) = solc_output.errors.as_deref() {
let mut has_errors = false;
for error in errors.iter() {
if error.severity.as_str() == "error" {
has_errors = true;
}
writeln!(std::io::stderr(), "{error}")?;
}
if has_errors {
anyhow::bail!("Error(s) found. Compilation aborted");
}
}
let linker_symbols = solc_input.settings.libraries.as_linker_symbols()?;
let project = Project::try_from_standard_json_output(
&solc_output,
source_code_files,
libraries,
&mut solc_output,
solc_input.settings.libraries,
&solc_version,
&debug_config,
)?;
solc_output.take_and_write_warnings();
solc_output.check_errors()?;
let build = project.compile(
let mut build = project.compile(
messages,
optimizer_settings,
include_metadata_hash,
debug_config,
llvm_arguments,
metadata_hash,
&debug_config,
&solc_input.settings.llvm_arguments,
memory_config,
)?;
build.take_and_write_warnings();
build.check_errors()?;
let mut build = build.link(linker_symbols, &debug_config);
build.take_and_write_warnings();
build.check_errors()?;
Ok(build)
}
/// Runs the standard JSON mode.
#[allow(clippy::too_many_arguments)]
pub fn standard_json<T: Compiler>(
solc: &mut T,
detect_missing_libraries: bool,
solc: &T,
metadata_hash: MetadataHash,
messages: &mut Vec<SolcStandardJsonOutputError>,
json_path: Option<PathBuf>,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
mut debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: &[String],
mut debug_config: DebugConfig,
detect_missing_libraries: bool,
) -> anyhow::Result<()> {
let solc_version = solc.version()?;
let solc_input = SolcStandardJsonInput::try_from_stdin()?;
let source_code_files = solc_input
.sources
.iter()
.map(|(path, source)| (path.to_owned(), source.content.to_owned()))
.collect();
let optimizer_settings =
revive_llvm_context::OptimizerSettings::try_from(&solc_input.settings.optimizer)?;
let polkavm_settings = solc_input.settings.polkavm.unwrap_or_default();
debug_config.emit_debug_info = polkavm_settings.debug_information.unwrap_or_default();
let include_metadata_hash = match solc_input.settings.metadata {
Some(ref metadata) => metadata.bytecode_hash != Some(MetadataHash::None),
None => true,
};
let libraries = solc_input.settings.libraries.clone().unwrap_or_default();
let mut solc_output = solc.standard_json(solc_input, base_path, include_paths, allow_paths)?;
if let Some(errors) = solc_output.errors.as_deref() {
for error in errors.iter() {
if error.severity.as_str() == "error" {
serde_json::to_writer(std::io::stdout(), &solc_output)?;
std::process::exit(0);
}
}
}
let project = Project::try_from_standard_json_output(
&solc_output,
source_code_files,
libraries,
&solc_version,
&debug_config,
let mut solc_input = SolcStandardJsonInput::try_from(json_path.as_deref())?;
let language = solc_input.language;
let prune_output = solc_input.settings.selection_to_prune();
let deployed_libraries = solc_input.settings.libraries.as_paths();
let linker_symbols = solc_input.settings.libraries.as_linker_symbols()?;
let optimizer_settings = OptimizerSettings::try_from_cli(solc_input.settings.optimizer.mode)?;
let detect_missing_libraries =
solc_input.settings.detect_missing_libraries || detect_missing_libraries;
debug_config.emit_debug_info = solc_input
.settings
.polkavm
.debug_information
.unwrap_or(false);
solc_input.extend_selection(SolcStandardJsonInputSettingsSelection::new_required());
let mut solc_output = solc.standard_json(
&mut solc_input,
messages,
base_path,
include_paths,
allow_paths,
)?;
if detect_missing_libraries {
let missing_libraries = project.get_missing_libraries();
missing_libraries.write_to_standard_json(&mut solc_output, &solc_version)?;
} else {
let build = project.compile(
optimizer_settings,
include_metadata_hash,
debug_config,
llvm_arguments,
polkavm_settings
.memory_config
.unwrap_or_else(SolcStandardJsonInputSettingsPolkaVMMemory::default),
)?;
build.write_to_standard_json(&mut solc_output, &solc_version)?;
let (mut solc_output, project) = match language {
SolcStandardJsonInputLanguage::Solidity => {
let project = Project::try_from_standard_json_output(
&mut solc_output,
solc_input.settings.libraries,
&solc_version,
&debug_config,
)?;
(solc_output, project)
}
SolcStandardJsonInputLanguage::Yul => {
let mut solc_output = solc.validate_yul_standard_json(&mut solc_input, messages)?;
if solc_output.has_errors() {
solc_output.write_and_exit(prune_output);
}
let project = Project::try_from_yul_sources(
solc_input.sources,
solc_input.settings.libraries,
Some(&mut solc_output),
&debug_config,
)?;
(solc_output, project)
}
};
if solc_output.has_errors() {
solc_output.write_and_exit(prune_output);
}
serde_json::to_writer(std::io::stdout(), &solc_output)?;
std::process::exit(0);
if detect_missing_libraries {
let missing_libraries = project.get_missing_libraries(&deployed_libraries);
missing_libraries.write_to_standard_json(&mut solc_output, &solc_version);
solc_output.write_and_exit(prune_output);
}
let build = project.compile(
messages,
optimizer_settings,
metadata_hash,
&debug_config,
&solc_input.settings.llvm_arguments,
solc_input
.settings
.polkavm
.memory_config
.unwrap_or_default(),
)?;
if build.has_errors() {
build.write_to_standard_json(&mut solc_output, &solc_version)?;
solc_output.write_and_exit(prune_output);
}
let build = build.link(linker_symbols, &debug_config);
build.write_to_standard_json(&mut solc_output, &solc_version)?;
solc_output.write_and_exit(prune_output);
}
/// Runs the combined JSON mode.
#[allow(clippy::too_many_arguments)]
pub fn combined_json<T: Compiler>(
solc: &T,
paths: &[PathBuf],
libraries: &[String],
metadata_hash: MetadataHash,
messages: &mut Vec<SolcStandardJsonOutputError>,
evm_version: Option<EVMVersion>,
format: String,
input_files: &[PathBuf],
libraries: Vec<String>,
solc: &mut T,
evm_version: Option<revive_common::EVMVersion>,
solc_optimizer_enabled: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
optimizer_settings: OptimizerSettings,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
remappings: Option<BTreeSet<String>>,
suppressed_warnings: Option<Vec<ResolcWarning>>,
debug_config: revive_llvm_context::DebugConfig,
remappings: BTreeSet<String>,
suppressed_warnings: Vec<ResolcWarning>,
debug_config: DebugConfig,
output_directory: Option<PathBuf>,
overwrite: bool,
llvm_arguments: &[String],
llvm_arguments: Vec<String>,
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<()> {
let build = standard_output(
input_files,
libraries,
let selectors = CombinedJsonSelector::from_cli(format.as_str())
.into_iter()
.filter_map(|result| match result {
Ok(selector) => Some(selector),
Err(error) => {
messages.push(SolcStandardJsonOutputError::new_error(
error.to_string(),
None,
None,
));
None
}
})
.collect::<HashSet<_>>();
if !selectors.contains(&CombinedJsonSelector::Bytecode) {
messages.push(SolcStandardJsonOutputError::new_warning(
"Bytecode is always emitted even if the selector is not provided.".to_string(),
None,
None,
));
}
if selectors.contains(&CombinedJsonSelector::BytecodeRuntime) {
messages.push(SolcStandardJsonOutputError::new_warning(
format!("The `{}` selector does not make sense for the PVM target, since there is only one bytecode segment.", CombinedJsonSelector::BytecodeRuntime),
None,
None,
));
}
let mut combined_json = solc.combined_json(paths, selectors)?;
standard_output(
solc,
paths,
libraries,
metadata_hash,
messages,
evm_version,
solc_optimizer_enabled,
optimizer_settings,
include_metadata_hash,
base_path,
include_paths,
allow_paths,
@@ -314,24 +344,57 @@ pub fn combined_json<T: Compiler>(
debug_config,
llvm_arguments,
memory_config,
)?;
let mut combined_json = solc.combined_json(input_files, format.as_str())?;
build.write_to_combined_json(&mut combined_json)?;
)?
.write_to_combined_json(&mut combined_json)?;
match output_directory {
Some(output_directory) => {
std::fs::create_dir_all(output_directory.as_path())?;
combined_json.write_to_directory(output_directory.as_path(), overwrite)?;
}
None => {
writeln!(
std::io::stdout(),
"{}",
serde_json::to_string(&combined_json).expect("Always valid")
std::io::stderr(),
"Compiler run successful. Artifact(s) can be found in directory {output_directory:?}."
)?;
}
None => {
serde_json::to_writer(std::io::stdout(), &combined_json)?;
}
}
std::process::exit(0);
std::process::exit(EXIT_CODE_SUCCESS);
}
/// Links unlinked bytecode files.
pub fn link(paths: Vec<String>, libraries: Vec<String>) -> anyhow::Result<()> {
#[cfg(feature = "parallel")]
let iter = paths.into_par_iter();
#[cfg(not(feature = "parallel"))]
let iter = paths.into_iter();
let bytecodes = iter
.map(|path| {
let bytecode = std::fs::read(path.as_str())?;
Ok((path, bytecode))
})
.collect::<anyhow::Result<BTreeMap<String, Vec<u8>>>>()?;
let output = Output::try_from(&bytecodes, &libraries)?;
#[cfg(feature = "parallel")]
let iter = output.linked.into_par_iter();
#[cfg(not(feature = "parallel"))]
let iter = output.linked.into_iter();
iter.map(|(path, bytecode)| {
std::fs::write(path, bytecode)?;
Ok(())
})
.collect::<anyhow::Result<()>>()?;
for (path, _) in output.unlinked {
println!("Warning: file '{path}' still unresolved");
}
println!("Linking completed");
std::process::exit(EXIT_CODE_SUCCESS);
}
+96
View File
@@ -0,0 +1,96 @@
//! The Solidity to PolkaVM compiler deploy time linking library.
//!
//! # Deploy time linking
//!
//! At compile time, factory dependencies and library addresses
//! are declared but not necessarily defined.
//!
//! `resolc` will emit raw ELF objects for any contract requiring
//! deploy time linking using the `--link` flag.
//!
//! # Internals
//!
//! After all contracts have been built successfully, the compiler
//! tries to link the resulting raw ELF object files into PVM blobs.
//! This fails if any library address symbols are unknown at compile
//! time (which is better known in Solidity as the so called "deploy
//! time linking" feature). Since factory dependency symbols can be
//! resolved only after the the final PVM blob linking step, missing
//! libraries may further lead to unresolved factory dependencies.
use std::collections::BTreeMap;
use revive_common::{ObjectFormat, EXTENSION_POLKAVM_BINARY};
use revive_llvm_context::{polkavm_hash, polkavm_link};
use revive_solc_json_interface::SolcStandardJsonInputSettingsLibraries;
/// The Solidity to PolkaVM compiler deploy time linking outputs.
pub struct Output {
/// The linked objects.
pub linked: BTreeMap<String, Vec<u8>>,
/// The unlinked objects.
pub unlinked: Vec<(String, Vec<u8>)>,
}
impl Output {
/// Try linking given `libraries` into given `bytecodes`.
///
/// Bytecodes failing to fully resolve end up in [Output::unlinked].
pub fn try_from(
bytecodes: &BTreeMap<String, Vec<u8>>,
libraries: &[String],
) -> anyhow::Result<Self> {
let linker_symbols =
SolcStandardJsonInputSettingsLibraries::try_from(libraries)?.as_linker_symbols()?;
let mut linked = BTreeMap::default();
let mut unlinked = Vec::default();
let mut factory_dependencies = BTreeMap::default();
for (path, bytecode) in bytecodes {
match ObjectFormat::try_from(bytecode.as_slice()) {
Ok(ObjectFormat::ELF) => unlinked.push((path.clone(), bytecode.clone())),
Ok(ObjectFormat::PVM) => {
factory_dependencies
.insert(factory_dependency_symbol(path), polkavm_hash(bytecode));
}
Err(error) => anyhow::bail!("{path}: {error}"),
}
}
loop {
let mut linked_counter = 0;
let mut remaining_objects = Vec::new();
for (path, bytecode_buffer) in unlinked.drain(..) {
let (linked_bytecode, object_format) = polkavm_link(
&bytecode_buffer,
&linker_symbols,
&factory_dependencies,
true,
)?;
match object_format {
ObjectFormat::ELF => remaining_objects.push((path, linked_bytecode)),
ObjectFormat::PVM => {
factory_dependencies.insert(
factory_dependency_symbol(&path),
polkavm_hash(&linked_bytecode),
);
linked.insert(path, linked_bytecode);
linked_counter += 1;
}
}
}
unlinked = remaining_objects;
if linked_counter == 0 {
break;
}
}
Ok(Self { linked, unlinked })
}
}
fn factory_dependency_symbol(path: &str) -> String {
path.trim_end_matches(&format!(".{EXTENSION_POLKAVM_BINARY}"))
.to_string()
}
+7 -16
View File
@@ -1,22 +1,21 @@
//! The missing Solidity libraries.
use std::collections::BTreeMap;
use std::collections::HashSet;
use std::collections::BTreeSet;
use revive_solc_json_interface::SolcStandardJsonOutput;
use crate::solc::version::Version as SolcVersion;
use crate::ResolcVersion;
/// The missing Solidity libraries.
pub struct MissingLibraries {
/// The missing libraries.
pub contract_libraries: BTreeMap<String, HashSet<String>>,
pub contract_libraries: BTreeMap<String, BTreeSet<String>>,
}
impl MissingLibraries {
/// A shortcut constructor.
pub fn new(contract_libraries: BTreeMap<String, HashSet<String>>) -> Self {
pub fn new(contract_libraries: BTreeMap<String, BTreeSet<String>>) -> Self {
Self { contract_libraries }
}
@@ -25,27 +24,19 @@ impl MissingLibraries {
mut self,
standard_json: &mut SolcStandardJsonOutput,
solc_version: &SolcVersion,
) -> anyhow::Result<()> {
let contracts = match standard_json.contracts.as_mut() {
Some(contracts) => contracts,
None => return Ok(()),
};
for (path, contracts) in contracts.iter_mut() {
for (name, contract) in contracts.iter_mut() {
) {
for (path, file) in standard_json.contracts.iter_mut() {
for (name, contract) in file.iter_mut() {
let full_name = format!("{path}:{name}");
let missing_libraries = self.contract_libraries.remove(full_name.as_str());
if let Some(missing_libraries) = missing_libraries {
contract.missing_libraries = Some(missing_libraries);
contract.missing_libraries = missing_libraries;
}
}
}
standard_json.version = Some(solc_version.default.to_string());
standard_json.long_version = Some(solc_version.long.to_owned());
standard_json.revive_version = Some(ResolcVersion::default().long);
Ok(())
}
}
+30 -12
View File
@@ -1,51 +1,69 @@
//! Process for compiling a single compilation unit.
//! The input data.
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use revive_common::MetadataHash;
use revive_llvm_context::DebugConfig;
use revive_llvm_context::OptimizerSettings;
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory;
use serde::Deserialize;
use serde::Serialize;
use crate::project::contract::Contract;
use crate::project::Project;
use crate::SolcVersion;
/// The input data.
#[derive(Debug, Serialize, Deserialize)]
pub struct Input {
/// The contract representation.
pub contract: Contract,
/// The project representation.
pub project: Project,
/// The `solc` compiler version.
pub solc_version: Option<SolcVersion>,
/// Whether to append the metadata hash.
pub include_metadata_hash: bool,
pub metadata_hash: MetadataHash,
/// The optimizer settings.
pub optimizer_settings: revive_llvm_context::OptimizerSettings,
pub optimizer_settings: OptimizerSettings,
/// The debug output config.
pub debug_config: revive_llvm_context::DebugConfig,
pub debug_config: DebugConfig,
/// The extra LLVM arguments give used for manual control.
pub llvm_arguments: Vec<String>,
/// The PVM memory configuration.
pub memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
/// Missing unlinked libraries.
pub missing_libraries: BTreeSet<String>,
/// Factory dependencies.
pub factory_dependencies: BTreeSet<String>,
/// The mapping of auxiliary identifiers, e.g. Yul object names, to full contract paths.
pub identifier_paths: BTreeMap<String, String>,
}
impl Input {
/// A shortcut constructor.
pub fn new(
contract: Contract,
project: Project,
include_metadata_hash: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
debug_config: revive_llvm_context::DebugConfig,
solc_version: Option<SolcVersion>,
metadata_hash: MetadataHash,
optimizer_settings: OptimizerSettings,
debug_config: DebugConfig,
llvm_arguments: Vec<String>,
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
missing_libraries: BTreeSet<String>,
factory_dependencies: BTreeSet<String>,
identifier_paths: BTreeMap<String, String>,
) -> Self {
Self {
contract,
project,
include_metadata_hash,
solc_version,
metadata_hash,
optimizer_settings,
debug_config,
llvm_arguments,
memory_config,
missing_libraries,
factory_dependencies,
identifier_paths,
}
}
}
+12 -63
View File
@@ -1,5 +1,12 @@
//! Process for compiling a single compilation unit.
use revive_solc_json_interface::SolcStandardJsonOutputError;
use serde::de::DeserializeOwned;
use serde::Serialize;
use self::input::Input;
use self::output::Output;
pub mod input;
#[cfg(not(target_os = "emscripten"))]
pub mod native_process;
@@ -7,71 +14,13 @@ pub mod output;
#[cfg(target_os = "emscripten")]
pub mod worker_process;
use std::io::{Read, Write};
use self::input::Input;
use self::output::Output;
pub trait Process {
/// Read input from `stdin`, compile a contract, and write the output to `stdout`.
fn run(input_file: Option<&mut std::fs::File>) -> anyhow::Result<()> {
let mut stdin = std::io::stdin();
let mut stdout = std::io::stdout();
let mut stderr = std::io::stderr();
let mut buffer = Vec::with_capacity(16384);
match input_file {
Some(ins) => {
if let Err(error) = ins.read_to_end(&mut buffer) {
anyhow::bail!("Failed to read recursive process input file: {:?}", error);
}
}
None => {
if let Err(error) = stdin.read_to_end(&mut buffer) {
anyhow::bail!(
"Failed to read recursive process input from stdin: {:?}",
error
)
}
}
}
let input: Input = revive_common::deserialize_from_slice(buffer.as_slice())?;
revive_llvm_context::initialize_llvm(
revive_llvm_context::Target::PVM,
crate::DEFAULT_EXECUTABLE_NAME,
&input.llvm_arguments,
);
let result = input.contract.compile(
input.project,
input.optimizer_settings,
input.include_metadata_hash,
input.debug_config,
&input.llvm_arguments,
input.memory_config,
);
match result {
Ok(build) => {
let output = Output::new(build);
let json = serde_json::to_vec(&output).expect("Always valid");
stdout
.write_all(json.as_slice())
.expect("Stdout writing error");
Ok(())
}
Err(error) => {
let message = error.to_string();
stderr
.write_all(message.as_bytes())
.expect("Stderr writing error");
Err(error)
}
}
}
fn run(input: Input) -> anyhow::Result<()>;
/// Runs this process recursively to compile a single contract.
fn call(input: Input) -> anyhow::Result<Output>;
fn call<I: Serialize, O: DeserializeOwned>(
path: &str,
input: I,
) -> Result<O, SolcStandardJsonOutputError>;
}
+84 -44
View File
@@ -5,6 +5,12 @@ use std::path::PathBuf;
use std::process::Command;
use once_cell::sync::OnceCell;
use revive_common::deserialize_from_slice;
use revive_common::EXIT_CODE_SUCCESS;
use revive_solc_json_interface::standard_json::output::error::source_location::SourceLocation;
use revive_solc_json_interface::SolcStandardJsonOutputError;
use serde::de::DeserializeOwned;
use serde::Serialize;
use super::Input;
use super::Output;
@@ -16,61 +22,95 @@ pub static EXECUTABLE: OnceCell<PathBuf> = OnceCell::new();
pub struct NativeProcess;
impl Process for NativeProcess {
fn call(input: Input) -> anyhow::Result<Output> {
let input_json = serde_json::to_vec(&input).expect("Always valid");
fn run(input: Input) -> anyhow::Result<()> {
let source_location = SourceLocation::new(input.contract.identifier.path.to_owned());
let executable = match EXECUTABLE.get() {
Some(executable) => executable.to_owned(),
None => std::env::current_exe()?,
};
let result = std::thread::Builder::new()
.stack_size(crate::RAYON_WORKER_STACK_SIZE)
.spawn(move || {
input
.contract
.compile(
input.solc_version,
input.optimizer_settings,
input.metadata_hash,
input.debug_config,
&input.llvm_arguments,
input.memory_config,
input.missing_libraries,
input.factory_dependencies,
input.identifier_paths,
)
.map(Output::new)
.map_err(|error| {
SolcStandardJsonOutputError::new_error(error, Some(source_location), None)
})
})
.expect("Threading error")
.join()
.expect("Threading error");
serde_json::to_writer(std::io::stdout(), &result)
.map_err(|error| anyhow::anyhow!("Stdout writing error: {error}"))?;
Ok(())
}
fn call<I, O>(path: &str, input: I) -> Result<O, SolcStandardJsonOutputError>
where
I: Serialize,
O: DeserializeOwned,
{
let executable = EXECUTABLE
.get()
.cloned()
.unwrap_or_else(|| std::env::current_exe().expect("Should have an executable"));
let mut command = Command::new(executable.as_path());
command.stdin(std::process::Stdio::piped());
command.stdout(std::process::Stdio::piped());
command.stderr(std::process::Stdio::piped());
command.arg("--recursive-process");
let process = command.spawn().map_err(|error| {
anyhow::anyhow!("{:?} subprocess spawning error: {:?}", executable, error)
})?;
command.arg(path);
#[cfg(debug_assertions)]
input
.debug_config
.dump_stage_output(&input.contract.path, Some("stage"), &input_json)
.map_err(|error| {
anyhow::anyhow!(
"{:?} failed to log the recursive process output: {:?}",
executable,
error,
)
})?;
process
let mut process = command
.spawn()
.unwrap_or_else(|error| panic!("{executable:?} subprocess spawning: {error:?}"));
let stdin = process
.stdin
.as_ref()
.ok_or_else(|| anyhow::anyhow!("{:?} stdin getting error", executable))?
.write_all(input_json.as_slice())
.map_err(|error| {
anyhow::anyhow!("{:?} stdin writing error: {:?}", executable, error)
})?;
let output = process.wait_with_output().map_err(|error| {
anyhow::anyhow!("{:?} subprocess output error: {:?}", executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{}",
String::from_utf8_lossy(output.stderr.as_slice()).to_string(),
.as_mut()
.unwrap_or_else(|| panic!("{executable:?} subprocess stdin getting error"));
let stdin_input = serde_json::to_vec(&input).expect("Always valid");
stdin
.write_all(stdin_input.as_slice())
.unwrap_or_else(|error| panic!("{executable:?} subprocess stdin writing: {error:?}"));
let result = process
.wait_with_output()
.unwrap_or_else(|error| panic!("{executable:?} subprocess output reading: {error:?}"));
if result.status.code() != Some(EXIT_CODE_SUCCESS) {
let message = format!(
"{executable:?} subprocess failed with exit code {:?}:\n{}\n{}",
result.status.code(),
String::from_utf8_lossy(result.stdout.as_slice()),
String::from_utf8_lossy(result.stderr.as_slice()),
);
return Err(SolcStandardJsonOutputError::new_error(
message,
Some(SourceLocation::new(path.to_owned())),
None,
));
}
let output: Output = revive_common::deserialize_from_slice(output.stdout.as_slice())
.map_err(|error| {
anyhow::anyhow!(
"{:?} subprocess output parsing error: {}",
executable,
error,
)
})?;
Ok(output)
match deserialize_from_slice(result.stdout.as_slice()) {
Ok(output) => output,
Err(error) => {
panic!(
"{executable:?} subprocess stdout parsing error: {error:?}\n{}\n{}",
String::from_utf8_lossy(result.stdout.as_slice()),
String::from_utf8_lossy(result.stderr.as_slice()),
);
}
}
}
}
+52 -23
View File
@@ -2,13 +2,18 @@
use std::ffi::{c_char, c_void, CStr, CString};
use serde::de::DeserializeOwned;
use serde::Deserialize;
use serde::Serialize;
use revive_common::deserialize_from_slice;
use revive_solc_json_interface::standard_json::output::error::source_location::SourceLocation;
use revive_solc_json_interface::SolcStandardJsonOutputError;
use super::Input;
use super::Output;
use super::Process;
use anyhow::Context;
use serde::Deserialize;
#[derive(Deserialize)]
struct Error {
message: String,
@@ -29,10 +34,40 @@ enum Response {
pub struct WorkerProcess;
impl Process for WorkerProcess {
fn call(input: Input) -> anyhow::Result<Output> {
fn run(input: Input) -> anyhow::Result<()> {
let source_location = SourceLocation::new(input.contract.identifier.path.to_owned());
let result = input
.contract
.compile(
None,
input.optimizer_settings,
input.metadata_hash,
input.debug_config,
&input.llvm_arguments,
input.memory_config,
input.missing_libraries,
input.factory_dependencies,
input.identifier_paths,
)
.map(Output::new)
.map_err(|error| {
SolcStandardJsonOutputError::new_error(error, Some(source_location), None)
});
serde_json::to_writer(std::io::stdout(), &result)
.map_err(|error| anyhow::anyhow!("Stdout writing error: {error}"))?;
Ok(())
}
fn call<I, O>(_path: &str, input: I) -> Result<O, SolcStandardJsonOutputError>
where
I: Serialize,
O: DeserializeOwned,
{
let input_json = serde_json::to_vec(&input).expect("Always valid");
let input_str = String::from_utf8(input_json).expect("Input shall be valid");
// Prepare the input string for the Emscripten function
let input_cstring = CString::new(input_str).expect("CString allocation failed");
// Call the Emscripten function
@@ -40,26 +75,20 @@ impl Process for WorkerProcess {
unsafe { resolc_compile(input_cstring.as_ptr(), input_cstring.as_bytes().len()) };
// Convert the output pointer back to a Rust string
let output_str = unsafe {
CStr::from_ptr(output_ptr)
.to_str()
.with_context(|| "Failed to convert C string to Rust string")
.map(str::to_owned)
};
let output_str = unsafe { CStr::from_ptr(output_ptr).to_str().map(str::to_owned) };
unsafe { libc::free(output_ptr as *mut c_void) };
let output_str = output_str?;
let response: Response = serde_json::from_str(&output_str)
.map_err(|error| anyhow::anyhow!("Worker output parsing error: {}", error,))?;
match response {
Response::Success(out) => {
let output: Output = revive_common::deserialize_from_slice(out.data.as_bytes())
.map_err(|error| {
anyhow::anyhow!("resolc.js subprocess output parsing error: {}", error,)
})?;
Ok(output)
}
Response::Error(err) => anyhow::bail!("Worker error: {}", err.message,),
let output_str = output_str.unwrap_or_else(|error| panic!("resolc.js output: {error:?}"));
let response = serde_json::from_str(&output_str)
.unwrap_or_else(|error| panic!("Worker output parsing error: {error}"));
match response {
Response::Success(out) => match deserialize_from_slice(out.data.as_bytes()) {
Ok(output) => output,
Err(error) => {
panic!("resolc.js subprocess output parsing error: {error}")
}
},
Response::Error(err) => panic!("Worker error: {}", err.message),
}
}
}
@@ -1,21 +0,0 @@
//! The contract LLVM IR source code.
use serde::Deserialize;
use serde::Serialize;
/// The contract LLVM IR source code.
#[derive(Debug, Serialize, Deserialize, Clone)]
#[allow(clippy::upper_case_acronyms)]
pub struct LLVMIR {
/// The LLVM IR file path.
pub path: String,
/// The LLVM IR source code.
pub source: String,
}
impl LLVMIR {
/// A shortcut constructor.
pub fn new(path: String, source: String) -> Self {
Self { path, source }
}
}
+12 -38
View File
@@ -1,66 +1,40 @@
//! The contract source code.
pub mod llvm_ir;
pub mod yul;
use std::collections::HashSet;
use std::collections::BTreeSet;
use serde::Deserialize;
use serde::Serialize;
use revive_yul::parser::statement::object::Object;
use self::llvm_ir::LLVMIR;
use self::yul::Yul;
pub mod yul;
/// The contract source code.
#[derive(Debug, Serialize, Deserialize, Clone)]
#[allow(clippy::upper_case_acronyms)]
pub enum IR {
/// The Yul source code.
Yul(Yul),
/// The LLVM IR source code.
LLVMIR(LLVMIR),
}
impl IR {
/// A shortcut constructor.
pub fn new_yul(source_code: String, object: Object) -> Self {
Self::Yul(Yul::new(source_code, object))
}
/// A shortcut constructor.
pub fn new_llvm_ir(path: String, source: String) -> Self {
Self::LLVMIR(LLVMIR::new(path, source))
/// Drains the list of factory dependencies.
pub fn drain_factory_dependencies(&mut self) -> BTreeSet<String> {
match self {
IR::Yul(ref mut yul) => yul.object.factory_dependencies.drain().collect(),
}
}
/// Get the list of missing deployable libraries.
pub fn get_missing_libraries(&self) -> HashSet<String> {
pub fn get_missing_libraries(&self) -> BTreeSet<String> {
match self {
Self::Yul(inner) => inner.get_missing_libraries(),
Self::LLVMIR(_inner) => HashSet::new(),
}
}
}
impl<D> revive_llvm_context::PolkaVMWriteLLVM<D> for IR
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
fn declare(
&mut self,
context: &mut revive_llvm_context::PolkaVMContext<D>,
) -> anyhow::Result<()> {
match self {
Self::Yul(inner) => inner.declare(context),
Self::LLVMIR(_inner) => Ok(()),
}
}
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
match self {
Self::Yul(inner) => inner.into_llvm(context),
Self::LLVMIR(_inner) => Ok(()),
}
impl From<Yul> for IR {
fn from(inner: Yul) -> Self {
Self::Yul(inner)
}
}
+18 -20
View File
@@ -1,48 +1,46 @@
//! The contract Yul source code.
use std::collections::HashSet;
use std::collections::BTreeSet;
use revive_yul::lexer::Lexer;
use serde::Deserialize;
use serde::Serialize;
use revive_yul::parser::statement::object::Object;
/// The contract Yul source code.
/// he contract Yul source code.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Yul {
/// The Yul source code.
pub source_code: String,
/// The Yul AST object.
pub object: Object,
}
impl Yul {
/// A shortcut constructor.
pub fn new(source_code: String, object: Object) -> Self {
Self {
source_code,
object,
}
/// Transforms the `solc` standard JSON output contract into a Yul object.
pub fn try_from_source(source_code: &str) -> anyhow::Result<Option<Self>> {
if source_code.is_empty() {
return Ok(None);
};
let mut lexer = Lexer::new(source_code.to_owned());
let object = Object::parse(&mut lexer, None)
.map_err(|error| anyhow::anyhow!("Yul parsing: {error:?}"))?;
Ok(Some(Self { object }))
}
/// Get the list of missing deployable libraries.
pub fn get_missing_libraries(&self) -> HashSet<String> {
pub fn get_missing_libraries(&self) -> BTreeSet<String> {
self.object.get_missing_libraries()
}
}
impl<D> revive_llvm_context::PolkaVMWriteLLVM<D> for Yul
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
fn declare(
&mut self,
context: &mut revive_llvm_context::PolkaVMContext<D>,
) -> anyhow::Result<()> {
impl revive_llvm_context::PolkaVMWriteLLVM for Yul {
fn declare(&mut self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> {
self.object.declare(context)
}
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> {
self.object.into_llvm(context)
}
}
@@ -1,5 +1,6 @@
//! The Solidity contract metadata.
use revive_llvm_context::OptimizerSettings;
use serde::Serialize;
use crate::ResolcVersion;
@@ -11,13 +12,11 @@ pub struct Metadata {
/// The `solc` metadata.
pub solc_metadata: serde_json::Value,
/// The `solc` version.
pub solc_version: String,
pub solc_version: Option<semver::Version>,
/// The pallet revive edition.
pub revive_pallet_version: Option<semver::Version>,
/// The PolkaVM compiler version.
pub revive_version: String,
/// The PolkaVM compiler optimizer settings.
pub optimizer_settings: revive_llvm_context::OptimizerSettings,
pub optimizer_settings: OptimizerSettings,
/// The extra LLVM arguments give used for manual control.
pub llvm_arguments: Vec<String>,
}
@@ -26,15 +25,13 @@ impl Metadata {
/// A shortcut constructor.
pub fn new(
solc_metadata: serde_json::Value,
solc_version: String,
revive_pallet_version: Option<semver::Version>,
optimizer_settings: revive_llvm_context::OptimizerSettings,
solc_version: Option<semver::Version>,
optimizer_settings: OptimizerSettings,
llvm_arguments: Vec<String>,
) -> Self {
Self {
solc_metadata,
solc_version,
revive_pallet_version,
revive_version: ResolcVersion::default().long,
optimizer_settings,
llvm_arguments,
+62 -125
View File
@@ -1,29 +1,38 @@
//! The contract data.
pub mod ir;
pub mod metadata;
use std::collections::HashSet;
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use revive_common::ContractIdentifier;
use revive_common::Keccak256;
use revive_common::MetadataHash;
use revive_common::ObjectFormat;
use revive_llvm_context::DebugConfig;
use revive_llvm_context::Optimizer;
use revive_llvm_context::OptimizerSettings;
use revive_llvm_context::PolkaVMContext;
use revive_llvm_context::PolkaVMContextSolidityData;
use revive_llvm_context::PolkaVMContextYulData;
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory;
use serde::Deserialize;
use serde::Serialize;
use sha3::Digest;
use revive_llvm_context::PolkaVMWriteLLVM;
use crate::build::contract::Contract as ContractBuild;
use crate::project::Project;
use crate::solc::version::Version as SolcVersion;
use self::ir::IR;
use self::metadata::Metadata;
pub mod ir;
pub mod metadata;
/// The contract data.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Contract {
/// The absolute file path.
pub path: String,
pub identifier: ContractIdentifier,
/// The IR source code data.
pub ir: IR,
/// The metadata JSON.
@@ -32,22 +41,9 @@ pub struct Contract {
impl Contract {
/// A shortcut constructor.
pub fn new(
path: String,
source_hash: [u8; revive_common::BYTE_LENGTH_WORD],
source_version: SolcVersion,
ir: IR,
metadata_json: Option<serde_json::Value>,
) -> Self {
let metadata_json = metadata_json.unwrap_or_else(|| {
serde_json::json!({
"source_hash": hex::encode(source_hash.as_slice()),
"source_version": serde_json::to_value(&source_version).expect("Always valid"),
})
});
pub fn new(identifier: ContractIdentifier, ir: IR, metadata_json: serde_json::Value) -> Self {
Self {
path,
identifier,
ir,
metadata_json,
}
@@ -56,136 +52,77 @@ impl Contract {
/// Returns the contract identifier, which is:
/// - the Yul object identifier for Yul
/// - the module name for LLVM IR
pub fn identifier(&self) -> &str {
pub fn object_identifier(&self) -> &str {
match self.ir {
IR::Yul(ref yul) => yul.object.identifier.as_str(),
IR::LLVMIR(ref llvm_ir) => llvm_ir.path.as_str(),
}
}
/// Extract factory dependencies.
pub fn drain_factory_dependencies(&mut self) -> HashSet<String> {
match self.ir {
IR::Yul(ref mut yul) => yul.object.factory_dependencies.drain().collect(),
IR::LLVMIR(_) => HashSet::new(),
}
}
/// Compiles the specified contract, setting its build artifacts.
pub fn compile(
mut self,
project: Project,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
mut debug_config: revive_llvm_context::DebugConfig,
self,
solc_version: Option<SolcVersion>,
optimizer_settings: OptimizerSettings,
metadata_hash: MetadataHash,
mut debug_config: DebugConfig,
llvm_arguments: &[String],
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
missing_libraries: BTreeSet<String>,
factory_dependencies: BTreeSet<String>,
identifier_paths: BTreeMap<String, String>,
) -> anyhow::Result<ContractBuild> {
let llvm = inkwell::context::Context::create();
let optimizer = revive_llvm_context::Optimizer::new(optimizer_settings);
let version = project.version.clone();
let identifier = self.identifier().to_owned();
let optimizer = Optimizer::new(optimizer_settings);
let metadata = Metadata::new(
self.metadata_json.take(),
version.long.clone(),
version.l2_revision.clone(),
self.metadata_json,
solc_version
.as_ref()
.map(|version| version.default.to_owned()),
optimizer.settings().to_owned(),
llvm_arguments.to_vec(),
llvm_arguments.to_owned(),
);
let metadata_json = serde_json::to_value(&metadata).expect("Always valid");
let metadata_hash: Option<[u8; revive_common::BYTE_LENGTH_WORD]> = if include_metadata_hash
{
let metadata_string = serde_json::to_string(&metadata).expect("Always valid");
Some(sha3::Keccak256::digest(metadata_string.as_bytes()).into())
} else {
None
let metadata_json_bytes = serde_json::to_vec(&metadata_json).expect("Always valid");
let metadata_bytes = match metadata_hash {
MetadataHash::Keccak256 => Keccak256::from_slice(&metadata_json_bytes).into(),
MetadataHash::IPFS => todo!("IPFS hash isn't supported yet"),
MetadataHash::None => None,
};
debug_config.set_contract_path(&self.identifier.full_path);
let module = match self.ir {
IR::LLVMIR(ref llvm_ir) => {
// Create the output module
let memory_buffer =
inkwell::memory_buffer::MemoryBuffer::create_from_memory_range_copy(
llvm_ir.source.as_bytes(),
self.path.as_str(),
);
llvm.create_module_from_ir(memory_buffer)
.map_err(|error| anyhow::anyhow!(error.to_string()))?
let build = match self.ir {
IR::Yul(mut yul) => {
let module = llvm.create_module(self.identifier.full_path.as_str());
let mut context =
PolkaVMContext::new(&llvm, module, optimizer, debug_config, memory_config);
context.set_solidity_data(PolkaVMContextSolidityData::default());
let yul_data = PolkaVMContextYulData::new(identifier_paths);
context.set_yul_data(yul_data);
yul.declare(&mut context)?;
yul.into_llvm(&mut context)
.map_err(|error| anyhow::anyhow!("LLVM IR generator: {error}"))?;
context.build(self.identifier.full_path.as_str(), metadata_bytes)?
}
_ => llvm.create_module(self.path.as_str()),
};
debug_config.set_contract_path(&self.path);
let mut context = revive_llvm_context::PolkaVMContext::new(
&llvm,
module,
optimizer,
Some(project),
include_metadata_hash,
debug_config,
llvm_arguments,
memory_config,
);
context.set_solidity_data(revive_llvm_context::PolkaVMContextSolidityData::default());
match self.ir {
IR::Yul(_) => {
context.set_yul_data(Default::default());
}
IR::LLVMIR(_) => {}
}
let factory_dependencies = self.drain_factory_dependencies();
self.ir.declare(&mut context).map_err(|error| {
anyhow::anyhow!(
"The contract `{}` LLVM IR generator declaration pass error: {}",
self.path,
error
)
})?;
self.ir.into_llvm(&mut context).map_err(|error| {
anyhow::anyhow!(
"The contract `{}` LLVM IR generator definition pass error: {}",
self.path,
error
)
})?;
if let Some(debug_info) = context.debug_info() {
debug_info.finalize_module()
}
let build = context.build(self.path.as_str(), metadata_hash)?;
Ok(ContractBuild::new(
self.path,
identifier,
self.identifier,
build,
metadata_json,
missing_libraries,
factory_dependencies,
ObjectFormat::ELF,
))
}
/// Get the list of missing deployable libraries.
pub fn get_missing_libraries(&self) -> HashSet<String> {
self.ir.get_missing_libraries()
}
}
impl<D> PolkaVMWriteLLVM<D> for Contract
where
D: revive_llvm_context::PolkaVMDependency + Clone,
{
fn declare(
&mut self,
context: &mut revive_llvm_context::PolkaVMContext<D>,
) -> anyhow::Result<()> {
self.ir.declare(context)
}
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
self.ir.into_llvm(context)
pub fn get_missing_libraries(&self, deployed_libraries: &BTreeSet<String>) -> BTreeSet<String> {
self.ir
.get_missing_libraries()
.into_iter()
.filter(|library| !deployed_libraries.contains(library))
.collect::<BTreeSet<String>>()
}
}
+163 -282
View File
@@ -3,54 +3,59 @@
pub mod contract;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use std::collections::BTreeSet;
use std::path::PathBuf;
#[cfg(feature = "parallel")]
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use revive_common::Keccak256;
use revive_common::MetadataHash;
use revive_llvm_context::DebugConfig;
use revive_llvm_context::OptimizerSettings;
use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory;
use revive_solc_json_interface::SolcStandardJsonInputSource;
use revive_solc_json_interface::SolcStandardJsonOutputError;
use serde::Deserialize;
use serde::Serialize;
use sha3::Digest;
use revive_common::ContractIdentifier;
use revive_solc_json_interface::SolcStandardJsonInputSettingsLibraries;
use revive_solc_json_interface::SolcStandardJsonOutput;
use revive_yul::lexer::Lexer;
use revive_yul::parser::statement::object::Object;
use crate::build::contract::Contract as ContractBuild;
use crate::build::Build;
use crate::missing_libraries::MissingLibraries;
use crate::process::input::Input as ProcessInput;
use crate::process::Process;
use crate::project::contract::ir::yul::Yul;
use crate::project::contract::ir::IR;
use crate::project::contract::Contract;
use crate::solc::version::Version as SolcVersion;
use crate::solc::Compiler;
use self::contract::Contract;
use crate::ProcessOutput;
/// The processes input data.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Project {
/// The source code version.
pub version: SolcVersion,
pub version: Option<SolcVersion>,
/// The project contracts,
pub contracts: BTreeMap<String, Contract>,
/// The mapping of auxiliary identifiers, e.g. Yul object names, to full contract paths.
pub identifier_paths: BTreeMap<String, String>,
/// The library addresses.
pub libraries: BTreeMap<String, BTreeMap<String, String>>,
pub libraries: SolcStandardJsonInputSettingsLibraries,
}
impl Project {
/// A shortcut constructor.
pub fn new(
version: SolcVersion,
version: Option<SolcVersion>,
contracts: BTreeMap<String, Contract>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
libraries: SolcStandardJsonInputSettingsLibraries,
) -> Self {
let mut identifier_paths = BTreeMap::new();
for (path, contract) in contracts.iter() {
identifier_paths.insert(contract.identifier().to_owned(), path.to_owned());
identifier_paths.insert(contract.object_identifier().to_owned(), path.to_owned());
}
Self {
@@ -64,319 +69,195 @@ impl Project {
/// Compiles all contracts, returning their build artifacts.
pub fn compile(
self,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
debug_config: revive_llvm_context::DebugConfig,
messages: &mut Vec<SolcStandardJsonOutputError>,
optimizer_settings: OptimizerSettings,
metadata_hash: MetadataHash,
debug_config: &DebugConfig,
llvm_arguments: &[String],
memory_config: revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory,
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<Build> {
let project = self.clone();
let deployed_libraries = self.libraries.as_paths();
#[cfg(feature = "parallel")]
let iter = self.contracts.into_par_iter();
#[cfg(not(feature = "parallel"))]
let iter = self.contracts.into_iter();
let results: BTreeMap<String, anyhow::Result<ContractBuild>> = iter
.map(|(full_path, contract)| {
let process_input = ProcessInput::new(
let results = iter
.map(|(path, mut contract)| {
let factory_dependencies = contract
.ir
.drain_factory_dependencies()
.iter()
.map(|identifier| {
self.identifier_paths
.get(identifier)
.cloned()
.expect("Always exists")
})
.collect();
let missing_libraries = contract.get_missing_libraries(&deployed_libraries);
let input = ProcessInput::new(
contract,
project.clone(),
include_metadata_hash,
self.version.clone(),
metadata_hash,
optimizer_settings.clone(),
debug_config.clone(),
llvm_arguments.to_vec(),
llvm_arguments.to_owned(),
memory_config,
missing_libraries,
factory_dependencies,
self.identifier_paths.clone(),
);
let process_output = {
let result: Result<ProcessOutput, SolcStandardJsonOutputError> = {
#[cfg(target_os = "emscripten")]
{
crate::WorkerProcess::call(process_input)
crate::WorkerProcess::call(path.as_str(), input)
}
#[cfg(not(target_os = "emscripten"))]
{
crate::NativeProcess::call(process_input)
crate::NativeProcess::call(path.as_str(), input)
}
};
(full_path, process_output.map(|output| output.build))
let result = result.map(|output| output.build);
(path, result)
})
.collect();
let mut build = Build::default();
let mut hashes = HashMap::with_capacity(results.len());
for (path, result) in results.iter() {
match result {
Ok(contract) => {
hashes.insert(path.to_owned(), contract.build.bytecode_hash.to_owned());
}
Err(error) => {
anyhow::bail!("Contract `{}` compiling error: {:?}", path, error);
}
}
}
for (path, result) in results.into_iter() {
match result {
Ok(mut contract) => {
for dependency in contract.factory_dependencies.drain() {
let dependency_path = project
.identifier_paths
.get(dependency.as_str())
.cloned()
.unwrap_or_else(|| {
panic!("Dependency `{dependency}` full path not found")
});
let hash = match hashes.get(dependency_path.as_str()) {
Some(hash) => hash.to_owned(),
None => anyhow::bail!(
"Dependency contract `{}` not found in the project",
dependency_path
),
};
contract
.build
.factory_dependencies
.insert(hash, dependency_path);
}
build.contracts.insert(path, contract);
}
Err(error) => {
anyhow::bail!("Contract `{}` compiling error: {:?}", path, error);
}
}
}
Ok(build)
.collect::<BTreeMap<String, Result<ContractBuild, SolcStandardJsonOutputError>>>();
Ok(Build::new(results, messages))
}
/// Get the list of missing deployable libraries.
pub fn get_missing_libraries(&self) -> MissingLibraries {
let deployed_libraries = self
.libraries
pub fn get_missing_libraries(&self, deployed_libraries: &BTreeSet<String>) -> MissingLibraries {
let missing_libraries = self
.contracts
.iter()
.flat_map(|(file, names)| {
names
.keys()
.map(|name| format!("{file}:{name}"))
.collect::<HashSet<String>>()
.map(|(path, contract)| {
(
path.to_owned(),
contract.get_missing_libraries(deployed_libraries),
)
})
.collect::<HashSet<String>>();
let mut missing_deployable_libraries = BTreeMap::new();
for (contract_path, contract) in self.contracts.iter() {
let missing_libraries = contract
.get_missing_libraries()
.into_iter()
.filter(|library| !deployed_libraries.contains(library))
.collect::<HashSet<String>>();
missing_deployable_libraries.insert(contract_path.to_owned(), missing_libraries);
}
MissingLibraries::new(missing_deployable_libraries)
.collect();
MissingLibraries::new(missing_libraries)
}
/// Parses the Yul source code file and returns the source data.
pub fn try_from_yul_path<T: Compiler>(
path: &Path,
solc_validator: Option<&T>,
pub fn try_from_yul_paths(
paths: &[PathBuf],
solc_output: Option<&mut SolcStandardJsonOutput>,
libraries: SolcStandardJsonInputSettingsLibraries,
debug_config: &DebugConfig,
) -> anyhow::Result<Self> {
let source_code = std::fs::read_to_string(path)
.map_err(|error| anyhow::anyhow!("Yul file {:?} reading error: {}", path, error))?;
Self::try_from_yul_string(path, source_code.as_str(), solc_validator)
let sources = paths
.iter()
.map(|path| {
let source = SolcStandardJsonInputSource::from(path.as_path());
(path.to_string_lossy().to_string(), source)
})
.collect::<BTreeMap<String, SolcStandardJsonInputSource>>();
Self::try_from_yul_sources(sources, libraries, solc_output, debug_config)
}
/// Parses the test Yul source code string and returns the source data.
/// Only for integration testing purposes.
pub fn try_from_yul_string<T: Compiler>(
path: &Path,
source_code: &str,
solc_validator: Option<&T>,
pub fn try_from_yul_sources(
sources: BTreeMap<String, SolcStandardJsonInputSource>,
libraries: SolcStandardJsonInputSettingsLibraries,
mut solc_output: Option<&mut SolcStandardJsonOutput>,
debug_config: &DebugConfig,
) -> anyhow::Result<Self> {
if let Some(solc) = solc_validator {
solc.validate_yul(path)?;
#[cfg(feature = "parallel")]
let iter = sources.into_par_iter();
#[cfg(not(feature = "parallel"))]
let iter = sources.into_iter();
let results = iter
.filter_map(|(path, mut source)| {
let source_code = match source.try_resolve() {
Ok(()) => source.take_content().expect("Always exists"),
Err(error) => return Some((path, Err(error))),
};
let ir = match Yul::try_from_source(&source_code) {
Ok(ir) => ir?,
Err(error) => return Some((path, Err(error))),
};
let object_identifier = ir.object.identifier.clone();
let name = ContractIdentifier::new(path.clone(), Some(object_identifier));
let full_path = name.full_path.clone();
if let Err(error) = debug_config.dump_yul(&name.full_path, &source_code) {
return Some((full_path.clone(), Err(error)));
}
let source_metadata = serde_json::json!({
"source_hash": Keccak256::from_slice(source_code.as_bytes()).to_string()
});
let contract = Contract::new(name, ir.into(), source_metadata);
Some((full_path, Ok(contract)))
})
.collect::<BTreeMap<String, anyhow::Result<Contract>>>();
let mut contracts = BTreeMap::new();
for (path, result) in results.into_iter() {
match result {
Ok(contract) => {
contracts.insert(path, contract);
}
Err(error) => match solc_output {
Some(ref mut solc_output) => solc_output.push_error(Some(path), error),
None => anyhow::bail!(error),
},
}
}
let source_version = SolcVersion::new_simple(crate::solc::LAST_SUPPORTED_VERSION);
let path = path.to_string_lossy().to_string();
let source_hash = sha3::Keccak256::digest(source_code.as_bytes()).into();
let mut lexer = Lexer::new(source_code.to_owned());
let object = Object::parse(&mut lexer, None)
.map_err(|error| anyhow::anyhow!("Yul object `{}` parsing error: {}", path, error))?;
let mut project_contracts = BTreeMap::new();
project_contracts.insert(
path.to_owned(),
Contract::new(
path,
source_hash,
source_version.clone(),
IR::new_yul(source_code.to_owned(), object),
None,
),
);
Ok(Self::new(
source_version,
project_contracts,
BTreeMap::new(),
))
}
/// Parses the LLVM IR source code file and returns the source data.
pub fn try_from_llvm_ir_path(path: &Path) -> anyhow::Result<Self> {
let source_code = std::fs::read_to_string(path)
.map_err(|error| anyhow::anyhow!("LLVM IR file {:?} reading error: {}", path, error))?;
let source_hash = sha3::Keccak256::digest(source_code.as_bytes()).into();
let source_version =
SolcVersion::new_simple(revive_llvm_context::polkavm_const::LLVM_VERSION);
let path = path.to_string_lossy().to_string();
let mut project_contracts = BTreeMap::new();
project_contracts.insert(
path.clone(),
Contract::new(
path.clone(),
source_hash,
source_version.clone(),
IR::new_llvm_ir(path, source_code),
None,
),
);
Ok(Self::new(
source_version,
project_contracts,
BTreeMap::new(),
))
Ok(Self::new(None, contracts, libraries))
}
/// Converts the `solc` JSON output into a convenient project.
pub fn try_from_standard_json_output(
output: &SolcStandardJsonOutput,
source_code_files: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
solc_output: &mut SolcStandardJsonOutput,
libraries: SolcStandardJsonInputSettingsLibraries,
solc_version: &SolcVersion,
debug_config: &revive_llvm_context::DebugConfig,
debug_config: &DebugConfig,
) -> anyhow::Result<Self> {
let files = match output.contracts.as_ref() {
Some(files) => files,
None => match &output.errors {
Some(errors) if errors.iter().any(|e| e.severity == "error") => {
anyhow::bail!(serde_json::to_string_pretty(errors).expect("Always valid"));
}
_ => &BTreeMap::new(),
},
};
let mut project_contracts = BTreeMap::new();
for (path, contracts) in files.iter() {
for (name, contract) in contracts.iter() {
let full_path = format!("{path}:{name}");
let ir_optimized = match contract.ir_optimized.to_owned() {
Some(ir_optimized) => ir_optimized,
None => continue,
};
if ir_optimized.is_empty() {
continue;
}
debug_config.dump_yul(full_path.as_str(), ir_optimized.as_str())?;
let mut lexer = Lexer::new(ir_optimized.to_owned());
let object = Object::parse(&mut lexer, None).map_err(|error| {
anyhow::anyhow!("Contract `{}` parsing error: {:?}", full_path, error)
})?;
let source = IR::new_yul(ir_optimized.to_owned(), object);
let source_code = source_code_files
.get(path.as_str())
.ok_or_else(|| anyhow::anyhow!("Source code for path `{}` not found", path))?;
let source_hash = sha3::Keccak256::digest(source_code.as_bytes()).into();
let project_contract = Contract::new(
full_path.clone(),
source_hash,
solc_version.to_owned(),
source,
contract.metadata.to_owned(),
);
project_contracts.insert(full_path, project_contract);
let mut input_contracts = Vec::with_capacity(solc_output.contracts.len());
for (path, file) in solc_output.contracts.iter() {
for (name, contract) in file.iter() {
let name = ContractIdentifier::new((*path).to_owned(), Some((*name).to_owned()));
input_contracts.push((name, contract));
}
}
#[cfg(feature = "parallel")]
let iter = input_contracts.into_par_iter();
#[cfg(not(feature = "parallel"))]
let iter = input_contracts.into_iter();
let results = iter
.filter_map(|(name, contract)| {
let ir = match Yul::try_from_source(&contract.ir_optimized)
.map(|yul| yul.map(IR::from))
{
Ok(ir) => ir?,
Err(error) => return Some((name.full_path, Err(error))),
};
if let Err(error) = debug_config.dump_yul(&name.full_path, &contract.ir_optimized) {
return Some((name.full_path, Err(error)));
}
let contract = Contract::new(name.clone(), ir, contract.metadata.clone());
Some((name.full_path, Ok(contract)))
})
.collect::<BTreeMap<String, anyhow::Result<Contract>>>();
let mut contracts = BTreeMap::new();
for (path, result) in results.into_iter() {
match result {
Ok(contract) => {
contracts.insert(path, contract);
}
Err(error) => solc_output.push_error(Some(path), error),
}
}
Ok(Project::new(
solc_version.to_owned(),
project_contracts,
Some(solc_version.clone()),
contracts,
libraries,
))
}
}
impl revive_llvm_context::PolkaVMDependency for Project {
fn compile(
project: Self,
identifier: &str,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
debug_config: revive_llvm_context::DebugConfig,
llvm_arguments: &[String],
memory_config: revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory,
) -> anyhow::Result<String> {
let contract_path = project.resolve_path(identifier)?;
let contract = project
.contracts
.get(contract_path.as_str())
.cloned()
.ok_or_else(|| {
anyhow::anyhow!(
"Dependency contract `{}` not found in the project",
contract_path
)
})?;
contract
.compile(
project,
optimizer_settings,
include_metadata_hash,
debug_config,
llvm_arguments,
memory_config,
)
.map_err(|error| {
anyhow::anyhow!(
"Dependency contract `{}` compiling error: {}",
identifier,
error
)
})
.map(|contract| contract.build.bytecode_hash)
}
fn resolve_path(&self, identifier: &str) -> anyhow::Result<String> {
self.identifier_paths
.get(identifier.strip_suffix("_deployed").unwrap_or(identifier))
.cloned()
.ok_or_else(|| {
anyhow::anyhow!(
"Contract with identifier `{}` not found in the project",
identifier
)
})
}
fn resolve_library(&self, path: &str) -> anyhow::Result<String> {
for (file_path, contracts) in self.libraries.iter() {
for (contract_name, address) in contracts.iter() {
let key = format!("{file_path}:{contract_name}");
if key.as_str() == path {
return Ok(address["0x".len()..].to_owned());
}
}
}
anyhow::bail!("Library `{}` not found in the project", path);
}
}
+156 -106
View File
@@ -6,6 +6,8 @@ use std::path::PathBuf;
use clap::Parser;
use path_slash::PathExt;
use revive_common::MetadataHash;
use revive_solc_json_interface::SolcStandardJsonOutputError;
/// Compiles the provided Solidity input files (or use the standard input if no files
/// are given or "-" is specified as a file name). Outputs the components based on the
@@ -58,10 +60,6 @@ pub struct Arguments {
#[arg(short = 'O', long = "optimization")]
pub optimization: Option<char>,
/// Try to recompile with -Oz if the bytecode is too large.
#[arg(long = "fallback-Oz")]
pub fallback_to_optimizing_for_size: bool,
/// Disable the `solc` optimizer.
/// Use it if your project uses the `MSIZE` instruction, or in other cases.
/// Beware that it will prevent libraries from being inlined.
@@ -92,7 +90,7 @@ pub struct Arguments {
/// Switch to standard JSON input/output mode. Read from stdin, write the result to stdout.
/// This is the default used by the Hardhat plugin.
#[arg(long = "standard-json")]
pub standard_json: bool,
pub standard_json: Option<Option<String>>,
/// Switch to missing deployable libraries detection mode.
/// Only available for standard JSON input/output mode.
@@ -106,17 +104,20 @@ pub struct Arguments {
#[arg(long = "yul")]
pub yul: bool,
/// Switch to LLVM IR mode.
/// Only one input LLVM IR file is allowed.
/// Cannot be used with combined and standard JSON modes.
/// Use this mode at your own risk, as LLVM IR input validation is not implemented.
#[arg(long = "llvm-ir")]
pub llvm_ir: bool,
/// Switch to linker mode, ignoring all options apart from `--libraries` and modify binaries in place.
///
/// Unlinked contract binaries (caused by missing libraries or missing factory dependencies in turn)
/// are emitted as raw ELF objects. Use this mode to link them into PVM blobs.
///
/// NOTE: Contracts must be present in the input files with the EXACT SAME directory structure as their source code,
/// otherwise this may fail to resolve factory dependencies.
#[arg(long)]
pub link: bool,
/// Set metadata hash mode.
/// The only supported value is `none` that disables appending the metadata hash.
/// Is enabled by default.
#[arg(long = "metadata-hash")]
/// Set the metadata hash type.
/// Available types: `none`, `ipfs`, `keccak256`.
/// The default is `keccak256`.
#[arg(long)]
pub metadata_hash: Option<String>,
/// Output PolkaVM assembly of the contracts.
@@ -127,6 +128,10 @@ pub struct Arguments {
#[arg(long = "bin")]
pub output_binary: bool,
/// Output metadata of the compiled project.
#[arg(long = "metadata")]
pub output_metadata: bool,
/// Suppress specified warnings.
/// Available arguments: `ecrecover`, `sendtransfer`, `extcodesize`, `txorigin`, `blocktimestamp`, `blocknumber`, `blockhash`.
#[arg(long = "suppress-warnings")]
@@ -202,155 +207,206 @@ pub struct Arguments {
impl Arguments {
/// Validates the arguments.
pub fn validate(&self) -> anyhow::Result<()> {
pub fn validate(&self) -> Vec<SolcStandardJsonOutputError> {
let mut messages = Vec::new();
if self.version && std::env::args().count() > 2 {
anyhow::bail!("No other options are allowed while getting the compiler version.");
messages.push(SolcStandardJsonOutputError::new_error(
"No other options are allowed while getting the compiler version.",
None,
None,
));
}
if self.supported_solc_versions && std::env::args().count() > 2 {
anyhow::bail!(
"No other options are allowed while getting the supported `solc` versions."
);
messages.push(SolcStandardJsonOutputError::new_error(
"No other options are allowed while getting the supported `solc` version.",
None,
None,
));
}
#[cfg(debug_assertions)]
if self.recursive_process_input.is_some() && !self.recursive_process {
anyhow::bail!("--process-input can be only used when --recursive-process is given");
if self.metadata_hash == Some(MetadataHash::IPFS.to_string()) {
messages.push(SolcStandardJsonOutputError::new_error(
"`IPFS` metadata hash type is not supported. Please use `keccak256` instead.",
None,
None,
));
}
#[cfg(debug_assertions)]
if self.recursive_process
&& ((self.recursive_process_input.is_none() && std::env::args().count() > 2)
|| (self.recursive_process_input.is_some() && std::env::args().count() > 4))
{
anyhow::bail!("No other options are allowed in recursive mode.");
}
#[cfg(not(debug_assertions))]
if self.recursive_process && std::env::args().count() > 2 {
anyhow::bail!("No other options are allowed in recursive mode.");
}
let modes_count = [
let modes = [
self.yul,
self.llvm_ir,
self.combined_json.is_some(),
self.standard_json,
self.standard_json.is_some(),
self.link,
]
.iter()
.filter(|&&x| x)
.count();
if modes_count > 1 {
anyhow::bail!("Only one modes is allowed at the same time: Yul, LLVM IR, PolkaVM assembly, combined JSON, standard JSON.");
let acceptable_count = 1 + self.standard_json.is_some() as usize;
if modes > acceptable_count {
messages.push(SolcStandardJsonOutputError::new_error(
"Only one modes is allowed at the same time: Yul, LLVM IR, PolkaVM assembly, combined JSON, standard JSON.",None,None));
}
if self.yul || self.llvm_ir {
if self.yul && !self.libraries.is_empty() {
messages.push(SolcStandardJsonOutputError::new_error(
"Libraries are not supported in Yul and linker modes.",
None,
None,
));
}
if self.yul || self.link {
if self.base_path.is_some() {
anyhow::bail!(
"`base-path` is not used in Yul, LLVM IR and PolkaVM assembly modes."
);
messages.push(SolcStandardJsonOutputError::new_error(
"`base-path` is not used in Yul and linker modes.",
None,
None,
));
}
if !self.include_paths.is_empty() {
anyhow::bail!(
"`include-paths` is not used in Yul, LLVM IR and PolkaVM assembly modes."
);
messages.push(SolcStandardJsonOutputError::new_error(
"`include-paths` is not used in Yul and linker modes.",
None,
None,
));
}
if self.allow_paths.is_some() {
anyhow::bail!(
"`allow-paths` is not used in Yul, LLVM IR and PolkaVM assembly modes."
);
messages.push(SolcStandardJsonOutputError::new_error(
"`allow-paths` is not used in Yul and linker modes.",
None,
None,
));
}
if !self.libraries.is_empty() {
anyhow::bail!(
"Libraries are not supported in Yul, LLVM IR and PolkaVM assembly modes."
);
}
if self.evm_version.is_some() {
anyhow::bail!(
"`evm-version` is not used in Yul, LLVM IR and PolkaVM assembly modes."
);
messages.push(SolcStandardJsonOutputError::new_error(
"`evm-version` is not used in Yul and linker modes.",
None,
None,
));
}
if self.disable_solc_optimizer {
anyhow::bail!("Disabling the solc optimizer is not supported in Yul, LLVM IR and PolkaVM assembly modes.");
messages.push(SolcStandardJsonOutputError::new_error(
"Disabling the solc optimizer is not supported in Yul and linker modes.",
None,
None,
));
}
}
if self.llvm_ir && self.solc.is_some() {
anyhow::bail!("`solc` is not used in LLVM IR and PolkaVM assembly modes.");
}
if self.combined_json.is_some() && (self.output_assembly || self.output_binary) {
anyhow::bail!(
"Cannot output assembly or binary outside of JSON in combined JSON mode."
);
messages.push(SolcStandardJsonOutputError::new_error(
"Cannot output assembly or binary outside of JSON in combined JSON mode.",
None,
None,
));
}
if self.standard_json {
if self.standard_json.is_some() {
if self.output_assembly || self.output_binary {
anyhow::bail!(
"Cannot output assembly or binary outside of JSON in standard JSON mode."
);
messages.push(SolcStandardJsonOutputError::new_error(
"Cannot output assembly or binary outside of JSON in standard JSON mode.",
None,
None,
));
}
if !self.inputs.is_empty() {
anyhow::bail!("Input files must be passed via standard JSON input.");
messages.push(SolcStandardJsonOutputError::new_error(
"Input files must be passed via standard JSON input.",
None,
None,
));
}
if !self.libraries.is_empty() {
anyhow::bail!("Libraries must be passed via standard JSON input.");
messages.push(SolcStandardJsonOutputError::new_error(
"Libraries must be passed via standard JSON input.",
None,
None,
));
}
if self.evm_version.is_some() {
anyhow::bail!("EVM version must be passed via standard JSON input.");
messages.push(SolcStandardJsonOutputError::new_error(
"EVM version must be passed via standard JSON input.",
None,
None,
));
}
if self.output_directory.is_some() {
anyhow::bail!("Output directory cannot be used in standard JSON mode.");
messages.push(SolcStandardJsonOutputError::new_error(
"Output directory cannot be used in standard JSON mode.",
None,
None,
));
}
if self.overwrite {
anyhow::bail!("Overwriting flag cannot be used in standard JSON mode.");
messages.push(SolcStandardJsonOutputError::new_error(
"Overwriting flag cannot be used in standard JSON mode.",
None,
None,
));
}
if self.disable_solc_optimizer {
anyhow::bail!(
"Disabling the solc optimizer must specified in standard JSON input settings."
);
messages.push(SolcStandardJsonOutputError::new_error(
"Disabling the solc optimizer must specified in standard JSON input settings.",
None,
None,
));
}
if self.optimization.is_some() {
anyhow::bail!("LLVM optimizations must specified in standard JSON input settings.");
}
if self.fallback_to_optimizing_for_size {
anyhow::bail!(
"Falling back to -Oz must specified in standard JSON input settings."
);
messages.push(SolcStandardJsonOutputError::new_error(
"LLVM optimizations must specified in standard JSON input settings.",
None,
None,
));
}
if self.metadata_hash.is_some() {
anyhow::bail!("Metadata hash mode must specified in standard JSON input settings.");
messages.push(SolcStandardJsonOutputError::new_error(
"Metadata hash mode must specified in standard JSON input settings.",
None,
None,
));
}
if self.heap_size.is_some() {
anyhow::bail!(
"Heap size must be specified in standard JSON input polkavm memory settings."
);
messages.push(SolcStandardJsonOutputError::new_error(
"Heap size must be specified in standard JSON input polkavm memory settings.",
None,
None,
));
}
if self.stack_size.is_some() {
anyhow::bail!(
"Stack size must be specified in standard JSON input polkavm memory settings."
);
messages.push(SolcStandardJsonOutputError::new_error(
"Stack size must be specified in standard JSON input polkavm memory settings.",
None,
None,
));
}
if self.emit_source_debug_info {
anyhow::bail!(
"Debug info must be requested in standard JSON input polkavm settings."
);
messages.push(SolcStandardJsonOutputError::new_error(
"Debug info must be requested in standard JSON input polkavm settings.",
None,
None,
));
}
if !self.llvm_arguments.is_empty() {
messages.push(SolcStandardJsonOutputError::new_error(
"LLVM arguments must be configured in standard JSON input polkavm settings.",
None,
None,
));
}
}
Ok(())
messages
}
/// Returns remappings from input paths.
pub fn split_input_files_and_remappings(
&self,
) -> anyhow::Result<(Vec<PathBuf>, Option<BTreeSet<String>>)> {
) -> anyhow::Result<(Vec<PathBuf>, BTreeSet<String>)> {
let mut input_files = Vec::with_capacity(self.inputs.len());
let mut remappings = BTreeSet::new();
@@ -367,7 +423,7 @@ impl Arguments {
}
if parts.len() != 2 {
anyhow::bail!(
"Invalid remapping `{}`: expected two parts separated by '='",
"Invalid remapping `{}`: expected two parts separated by '='.",
input
);
}
@@ -379,12 +435,6 @@ impl Arguments {
}
}
let remappings = if remappings.is_empty() {
None
} else {
Some(remappings)
};
Ok((input_files, remappings))
}
+127 -114
View File
@@ -1,36 +1,84 @@
//! Solidity to PolkaVM compiler binary.
pub mod arguments;
use std::io::Write;
use std::str::FromStr;
use std::{io::Write, path::PathBuf};
use clap::error::ErrorKind;
use resolc::Process;
use revive_common::{
deserialize_from_str, EVMVersion, MetadataHash, EXIT_CODE_FAILURE, EXIT_CODE_SUCCESS,
};
use revive_llvm_context::{initialize_llvm, DebugConfig, OptimizerSettings, PolkaVMTarget};
use revive_solc_json_interface::{
ResolcWarning, SolcStandardJsonInputSettingsPolkaVMMemory,
SolcStandardJsonInputSettingsSelection, SolcStandardJsonOutput, SolcStandardJsonOutputError,
};
use self::arguments::Arguments;
#[cfg(feature = "parallel")]
/// The rayon worker stack size.
const RAYON_WORKER_STACK_SIZE: usize = 16 * 1024 * 1024;
pub mod arguments;
#[cfg(target_env = "musl")]
#[global_allocator]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
fn main() -> anyhow::Result<()> {
std::process::exit(match main_inner() {
Ok(()) => revive_common::EXIT_CODE_SUCCESS,
Err(error) => {
writeln!(std::io::stderr(), "{error}")?;
revive_common::EXIT_CODE_FAILURE
let arguments = <Arguments as clap::Parser>::try_parse().inspect_err(|error| {
if let ErrorKind::DisplayHelp = error.kind() {
let _ = error.print();
std::process::exit(EXIT_CODE_SUCCESS);
}
})
})?;
let is_standard_json = arguments.standard_json.is_some();
let mut messages = arguments.validate();
if messages.iter().all(|error| error.severity != "error") {
if !is_standard_json {
std::io::stderr()
.write_all(
messages
.drain(..)
.map(|error| error.to_string())
.collect::<Vec<String>>()
.join("\n")
.as_bytes(),
)
.expect("Stderr writing error");
}
if let Err(error) = main_inner(arguments, &mut messages) {
messages.push(SolcStandardJsonOutputError::new_error(error, None, None));
}
}
if is_standard_json {
let output = SolcStandardJsonOutput::new_with_messages(messages);
output.write_and_exit(SolcStandardJsonInputSettingsSelection::default());
}
std::io::stderr()
.write_all(
messages
.iter()
.map(|error| error.to_string())
.collect::<Vec<String>>()
.join("\n")
.as_bytes(),
)
.expect("Stderr writing error");
std::process::exit(
if messages.iter().any(SolcStandardJsonOutputError::is_error) {
EXIT_CODE_FAILURE
} else {
EXIT_CODE_SUCCESS
},
);
}
fn main_inner() -> anyhow::Result<()> {
let arguments = <Arguments as clap::Parser>::try_parse()?;
arguments.validate()?;
fn main_inner(
arguments: Arguments,
messages: &mut Vec<SolcStandardJsonOutputError>,
) -> anyhow::Result<()> {
if arguments.version {
writeln!(
std::io::stdout(),
@@ -53,59 +101,60 @@ fn main_inner() -> anyhow::Result<()> {
#[cfg(feature = "parallel")]
rayon::ThreadPoolBuilder::new()
.stack_size(RAYON_WORKER_STACK_SIZE)
.stack_size(resolc::RAYON_WORKER_STACK_SIZE)
.build_global()
.expect("Thread pool configuration failure");
if arguments.recursive_process {
#[cfg(debug_assertions)]
if let Some(fname) = arguments.recursive_process_input {
let mut infile = std::fs::File::open(fname)?;
#[cfg(target_os = "emscripten")]
{
return resolc::WorkerProcess::run(Some(&mut infile));
}
#[cfg(not(target_os = "emscripten"))]
{
return resolc::NativeProcess::run(Some(&mut infile));
}
}
let input_json = std::io::read_to_string(std::io::stdin())
.map_err(|error| anyhow::anyhow!("Stdin reading error: {error}"))?;
let input: resolc::ProcessInput = deserialize_from_str(input_json.as_str())
.map_err(|error| anyhow::anyhow!("Stdin parsing error: {error}"))?;
initialize_llvm(
PolkaVMTarget::PVM,
resolc::DEFAULT_EXECUTABLE_NAME,
&input.llvm_arguments,
);
#[cfg(target_os = "emscripten")]
{
return resolc::WorkerProcess::run(None);
return resolc::WorkerProcess::run(input);
}
#[cfg(not(target_os = "emscripten"))]
{
return resolc::NativeProcess::run(None);
return resolc::NativeProcess::run(input);
}
}
initialize_llvm(
PolkaVMTarget::PVM,
resolc::DEFAULT_EXECUTABLE_NAME,
&arguments.llvm_arguments,
);
let debug_config = match arguments.debug_output_directory {
Some(ref debug_output_directory) => {
std::fs::create_dir_all(debug_output_directory.as_path())?;
revive_llvm_context::DebugConfig::new(
DebugConfig::new(
Some(debug_output_directory.to_owned()),
arguments.emit_source_debug_info,
)
}
None => revive_llvm_context::DebugConfig::new(None, arguments.emit_source_debug_info),
None => DebugConfig::new(None, arguments.emit_source_debug_info),
};
let (input_files, remappings) = arguments.split_input_files_and_remappings()?;
let suppressed_warnings = match arguments.suppress_warnings {
Some(warnings) => Some(revive_solc_json_interface::ResolcWarning::try_from_strings(
warnings.as_slice(),
)?),
None => None,
};
let suppressed_warnings = ResolcWarning::try_from_strings(
arguments.suppress_warnings.unwrap_or_default().as_slice(),
)?;
let mut solc = {
let solc = {
#[cfg(target_os = "emscripten")]
{
resolc::SoljsonCompiler
resolc::SoljsonCompiler {}
}
#[cfg(not(target_os = "emscripten"))]
{
resolc::SolcCompiler::new(
@@ -117,76 +166,61 @@ fn main_inner() -> anyhow::Result<()> {
};
let evm_version = match arguments.evm_version {
Some(evm_version) => Some(revive_common::EVMVersion::try_from(evm_version.as_str())?),
Some(evm_version) => Some(EVMVersion::try_from(evm_version.as_str())?),
None => None,
};
let mut optimizer_settings = match arguments.optimization {
Some(mode) => revive_llvm_context::OptimizerSettings::try_from_cli(mode)?,
None => revive_llvm_context::OptimizerSettings::size(),
Some(mode) => OptimizerSettings::try_from_cli(mode)?,
None => OptimizerSettings::size(),
};
if arguments.fallback_to_optimizing_for_size {
optimizer_settings.enable_fallback_to_size();
}
optimizer_settings.is_verify_each_enabled = arguments.llvm_verify_each;
optimizer_settings.is_debug_logging_enabled = arguments.llvm_debug_logging;
let include_metadata_hash = match arguments.metadata_hash {
Some(metadata_hash) => {
let metadata =
revive_solc_json_interface::SolcStandardJsonInputSettingsMetadataHash::from_str(
metadata_hash.as_str(),
)?;
metadata != revive_solc_json_interface::SolcStandardJsonInputSettingsMetadataHash::None
}
None => true,
let metadata_hash = match arguments.metadata_hash {
Some(ref hash_type) => MetadataHash::from_str(hash_type.as_str())?,
None => MetadataHash::Keccak256,
};
let memory_config = revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory::new(
arguments.heap_size,
arguments.stack_size,
);
let memory_config =
SolcStandardJsonInputSettingsPolkaVMMemory::new(arguments.heap_size, arguments.stack_size);
let build = if arguments.yul {
resolc::yul(
&solc,
input_files.as_slice(),
&mut solc,
arguments.libraries.as_slice(),
metadata_hash,
messages,
optimizer_settings,
include_metadata_hash,
debug_config,
&arguments.llvm_arguments,
memory_config,
)
} else if arguments.llvm_ir {
resolc::llvm_ir(
input_files.as_slice(),
optimizer_settings,
include_metadata_hash,
debug_config,
&arguments.llvm_arguments,
memory_config,
)
} else if arguments.standard_json {
} else if let Some(standard_json) = arguments.standard_json {
resolc::standard_json(
&mut solc,
arguments.detect_missing_libraries,
&solc,
metadata_hash,
messages,
standard_json.map(PathBuf::from),
arguments.base_path,
arguments.include_paths,
arguments.allow_paths,
debug_config,
&arguments.llvm_arguments,
arguments.detect_missing_libraries,
)?;
return Ok(());
} else if let Some(format) = arguments.combined_json {
resolc::combined_json(
format,
&solc,
input_files.as_slice(),
arguments.libraries,
&mut solc,
arguments.libraries.as_slice(),
metadata_hash,
messages,
evm_version,
format,
!arguments.disable_solc_optimizer,
optimizer_settings,
include_metadata_hash,
arguments.base_path,
arguments.include_paths,
arguments.allow_paths,
@@ -195,67 +229,46 @@ fn main_inner() -> anyhow::Result<()> {
debug_config,
arguments.output_directory,
arguments.overwrite,
&arguments.llvm_arguments,
arguments.llvm_arguments,
memory_config,
)?;
return Ok(());
} else if arguments.link {
return resolc::link(arguments.inputs, arguments.libraries);
} else {
resolc::standard_output(
&solc,
input_files.as_slice(),
arguments.libraries,
&mut solc,
arguments.libraries.as_slice(),
metadata_hash,
messages,
evm_version,
!arguments.disable_solc_optimizer,
optimizer_settings,
include_metadata_hash,
arguments.base_path,
arguments.include_paths,
arguments.allow_paths,
remappings,
suppressed_warnings,
debug_config,
&arguments.llvm_arguments,
arguments.llvm_arguments,
memory_config,
)
}?;
if let Some(output_directory) = arguments.output_directory {
std::fs::create_dir_all(&output_directory)?;
build.write_to_directory(
&output_directory,
arguments.output_metadata,
arguments.output_assembly,
arguments.output_binary,
arguments.overwrite,
)?;
writeln!(
std::io::stderr(),
"Compiler run successful. Artifact(s) can be found in directory {output_directory:?}."
)?;
} else if arguments.output_assembly || arguments.output_binary {
for (path, contract) in build.contracts.into_iter() {
if arguments.output_assembly {
let assembly_text = contract.build.assembly_text;
writeln!(
std::io::stdout(),
"Contract `{path}` assembly:\n\n{assembly_text}"
)?;
}
if arguments.output_binary {
writeln!(
std::io::stdout(),
"Contract `{}` bytecode: 0x{}",
path,
hex::encode(contract.build.bytecode)
)?;
}
}
} else {
writeln!(
std::io::stderr(),
"Compiler run successful. No output requested. Use --asm and --bin flags."
build.write_to_terminal(
arguments.output_metadata,
arguments.output_assembly,
arguments.output_binary,
)?;
}
+40 -18
View File
@@ -1,35 +1,37 @@
//! The Solidity compiler.
use std::collections::HashSet;
use std::path::PathBuf;
use revive_solc_json_interface::combined_json::CombinedJson;
use revive_solc_json_interface::CombinedJsonSelector;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonInputSettingsLibraries;
use revive_solc_json_interface::SolcStandardJsonInputSettingsSelection;
use revive_solc_json_interface::SolcStandardJsonOutput;
use revive_solc_json_interface::SolcStandardJsonOutputError;
use self::version::Version;
#[cfg(not(target_os = "emscripten"))]
pub mod solc_compiler;
#[cfg(target_os = "emscripten")]
pub mod soljson_compiler;
pub mod version;
use std::path::Path;
use std::path::PathBuf;
use revive_solc_json_interface::combined_json::CombinedJson;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonOutput;
use self::version::Version;
/// The first version of `solc` with the support of standard JSON interface.
pub const FIRST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 8, 0);
/// The last supported version of `solc`.
pub const LAST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 8, 30);
/// `--include-path` was introduced in solc `0.8.8` <https://github.com/ethereum/solidity/releases/tag/v0.8.8>
pub const FIRST_INCLUDE_PATH_VERSION: semver::Version = semver::Version::new(0, 8, 8);
/// The Solidity compiler.
pub trait Compiler {
/// Compiles the Solidity `--standard-json` input into Yul IR.
fn standard_json(
&mut self,
input: SolcStandardJsonInput,
&self,
input: &mut SolcStandardJsonInput,
messages: &mut Vec<SolcStandardJsonOutputError>,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
@@ -39,12 +41,32 @@ pub trait Compiler {
fn combined_json(
&self,
paths: &[PathBuf],
combined_json_argument: &str,
selectors: HashSet<CombinedJsonSelector>,
) -> anyhow::Result<CombinedJson>;
/// The `solc` Yul validator.
fn validate_yul(&self, path: &Path) -> anyhow::Result<()>;
/// Validates the Yul project as paths and libraries.
fn validate_yul_paths(
&self,
paths: &[PathBuf],
libraries: SolcStandardJsonInputSettingsLibraries,
messages: &mut Vec<SolcStandardJsonOutputError>,
) -> anyhow::Result<SolcStandardJsonOutput> {
let mut solc_input =
SolcStandardJsonInput::from_yul_paths(paths, libraries, Default::default(), vec![]);
self.validate_yul_standard_json(&mut solc_input, messages)
}
/// Validates the Yul project as standard JSON input.
fn validate_yul_standard_json(
&self,
solc_input: &mut SolcStandardJsonInput,
messages: &mut Vec<SolcStandardJsonOutputError>,
) -> anyhow::Result<SolcStandardJsonOutput> {
solc_input.extend_selection(SolcStandardJsonInputSettingsSelection::new_yul_validation());
let solc_output = self.standard_json(solc_input, messages, None, vec![], None)?;
Ok(solc_output)
}
/// The `solc --version` mini-parser.
fn version(&mut self) -> anyhow::Result<Version>;
fn version(&self) -> anyhow::Result<Version>;
}
+68 -111
View File
@@ -1,12 +1,15 @@
//! The Solidity compiler solc interface.
use std::collections::HashSet;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use revive_common::deserialize_from_slice;
use revive_solc_json_interface::combined_json::CombinedJson;
use revive_solc_json_interface::CombinedJsonSelector;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonOutput;
use revive_solc_json_interface::SolcStandardJsonOutputError;
use crate::solc::version::Version;
@@ -39,8 +42,9 @@ impl SolcCompiler {
impl Compiler for SolcCompiler {
/// Compiles the Solidity `--standard-json` input into Yul IR.
fn standard_json(
&mut self,
mut input: SolcStandardJsonInput,
&self,
input: &mut SolcStandardJsonInput,
messages: &mut Vec<SolcStandardJsonOutputError>,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
@@ -63,10 +67,6 @@ impl Compiler for SolcCompiler {
command.arg(allow_paths);
}
input.normalize();
let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default();
let input_json = serde_json::to_vec(&input).expect("Always valid");
let process = command.spawn().map_err(|error| {
@@ -92,22 +92,32 @@ impl Compiler for SolcCompiler {
);
}
let mut output: SolcStandardJsonOutput =
revive_common::deserialize_from_slice(output.stdout.as_slice()).map_err(|error| {
anyhow::anyhow!(
"{} subprocess output parsing error: {}\n{}",
self.executable,
error,
revive_common::deserialize_from_slice::<serde_json::Value>(
output.stdout.as_slice()
)
let mut output: SolcStandardJsonOutput = deserialize_from_slice(output.stdout.as_slice())
.map_err(|error| {
anyhow::anyhow!(
"{} subprocess output parsing error: {}\n{}",
self.executable,
error,
deserialize_from_slice::<serde_json::Value>(output.stdout.as_slice())
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(
|_| String::from_utf8_lossy(output.stdout.as_slice()).to_string()
),
)
})?;
output.preprocess_ast(suppressed_warnings.as_slice())?;
)
})?;
output
.errors
.retain(|error| match error.error_code.as_deref() {
Some(code) => !SolcStandardJsonOutputError::IGNORED_WARNING_CODES.contains(&code),
None => true,
});
output.errors.append(messages);
let mut suppressed_warnings = input.suppressed_warnings.clone();
suppressed_warnings.extend_from_slice(input.settings.suppressed_warnings.as_slice());
input.resolve_sources();
output.preprocess_ast(&input.sources, &suppressed_warnings)?;
Ok(output)
}
@@ -116,104 +126,58 @@ impl Compiler for SolcCompiler {
fn combined_json(
&self,
paths: &[PathBuf],
combined_json_argument: &str,
mut selectors: HashSet<CombinedJsonSelector>,
) -> anyhow::Result<CombinedJson> {
let mut command = std::process::Command::new(self.executable.as_str());
selectors.retain(|selector| selector.is_source_solc());
if selectors.is_empty() {
let version = &self.version()?.default;
return Ok(CombinedJson::new(version.to_owned(), None));
}
let executable = self.executable.to_owned();
let mut command = std::process::Command::new(executable.as_str());
command.stdout(std::process::Stdio::piped());
command.stderr(std::process::Stdio::piped());
command.args(paths);
let mut combined_json_flags = Vec::new();
let mut combined_json_fake_flag_pushed = false;
let mut filtered_flags = Vec::with_capacity(3);
for flag in combined_json_argument.split(',') {
match flag {
flag @ "asm" | flag @ "bin" | flag @ "bin-runtime" => filtered_flags.push(flag),
flag => combined_json_flags.push(flag),
}
}
if combined_json_flags.is_empty() {
combined_json_flags.push("ast");
combined_json_fake_flag_pushed = true;
}
command.arg("--combined-json");
command.arg(combined_json_flags.join(","));
command.arg(
selectors
.into_iter()
.map(|selector| selector.to_string())
.collect::<Vec<String>>()
.join(","),
);
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
let process = command
.spawn()
.map_err(|error| anyhow::anyhow!("{executable} subprocess spawning: {error:?}"))?;
let result = process.wait_with_output().map_err(|error| {
anyhow::anyhow!("{} subprocess output reading: {error:?}", self.executable)
})?;
if !output.status.success() {
writeln!(
std::io::stdout(),
"{}",
String::from_utf8_lossy(output.stdout.as_slice())
)?;
writeln!(
std::io::stdout(),
"{}",
String::from_utf8_lossy(output.stderr.as_slice())
)?;
if !result.status.success() {
anyhow::bail!(
"{} error: {}",
"{} subprocess failed with exit code {:?}:\n{}\n{}",
self.executable,
String::from_utf8_lossy(output.stdout.as_slice()).to_string()
result.status.code(),
String::from_utf8_lossy(result.stdout.as_slice()),
String::from_utf8_lossy(result.stderr.as_slice()),
);
}
let mut combined_json: CombinedJson =
revive_common::deserialize_from_slice(output.stdout.as_slice()).map_err(|error| {
anyhow::anyhow!(
"{} subprocess output parsing error: {}\n{}",
self.executable,
error,
revive_common::deserialize_from_slice::<serde_json::Value>(
output.stdout.as_slice()
)
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(
|_| String::from_utf8_lossy(output.stdout.as_slice()).to_string()
),
)
})?;
for filtered_flag in filtered_flags.into_iter() {
for (_path, contract) in combined_json.contracts.iter_mut() {
match filtered_flag {
"asm" => contract.asm = Some(serde_json::Value::Null),
"bin" => contract.bin = Some("".to_owned()),
"bin-runtime" => contract.bin_runtime = Some("".to_owned()),
_ => continue,
}
}
}
if combined_json_fake_flag_pushed {
combined_json.source_list = None;
combined_json.sources = None;
}
combined_json.remove_evm();
Ok(combined_json)
}
/// The `solc` Yul validator.
fn validate_yul(&self, path: &Path) -> anyhow::Result<()> {
let mut command = std::process::Command::new(self.executable.as_str());
command.arg("--strict-assembly");
command.arg(path);
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
deserialize_from_slice::<CombinedJson>(result.stdout.as_slice()).map_err(|error| {
anyhow::anyhow!(
"{} subprocess stdout parsing: {error:?} (stderr: {})",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
Ok(())
String::from_utf8_lossy(result.stderr.as_slice()),
)
})
}
/// The `solc --version` mini-parser.
fn version(&mut self) -> anyhow::Result<Version> {
fn version(&self) -> anyhow::Result<Version> {
let mut command = std::process::Command::new(self.executable.as_str());
command.arg("--version");
let output = command.output().map_err(|error| {
@@ -252,13 +216,6 @@ impl Compiler for SolcCompiler {
.parse()
.map_err(|error| anyhow::anyhow!("{} version parsing: {}", self.executable, error))?;
let l2_revision: Option<semver::Version> = stdout
.lines()
.nth(2)
.and_then(|line| line.split(' ').nth(1))
.and_then(|line| line.split('-').nth(1))
.and_then(|version| version.parse().ok());
Ok(Version::new(long, default, l2_revision))
Version::new(long, default).validate()
}
}
+25 -21
View File
@@ -1,11 +1,14 @@
//! The Solidity compiler solJson interface.
use std::path::Path;
use std::collections::HashSet;
use std::path::PathBuf;
use revive_common::deserialize_from_slice;
use revive_solc_json_interface::combined_json::CombinedJson;
use revive_solc_json_interface::CombinedJsonSelector;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonOutput;
use revive_solc_json_interface::SolcStandardJsonOutputError;
use crate::solc::version::Version;
use anyhow::Context;
@@ -24,8 +27,9 @@ pub struct SoljsonCompiler;
impl Compiler for SoljsonCompiler {
/// Compiles the Solidity `--standard-json` input into Yul IR.
fn standard_json(
&mut self,
mut input: SolcStandardJsonInput,
&self,
input: &mut SolcStandardJsonInput,
messages: &mut Vec<SolcStandardJsonOutputError>,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
@@ -40,23 +44,31 @@ impl Compiler for SoljsonCompiler {
anyhow::bail!("configuring allow paths is not supported with solJson")
}
input.normalize();
let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default();
let input_json = serde_json::to_string(&input).expect("Always valid");
let out = Self::compile_standard_json(input_json)?;
let mut output: SolcStandardJsonOutput =
revive_common::deserialize_from_slice(out.as_bytes()).map_err(|error| {
deserialize_from_slice(out.as_bytes()).map_err(|error| {
anyhow::anyhow!(
"Soljson output parsing error: {}\n{}",
error,
revive_common::deserialize_from_slice::<serde_json::Value>(out.as_bytes())
deserialize_from_slice::<serde_json::Value>(out.as_bytes())
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(|_| String::from_utf8_lossy(out.as_bytes()).to_string()),
)
})?;
output.preprocess_ast(suppressed_warnings.as_slice())?;
output
.errors
.retain(|error| match error.error_code.as_deref() {
Some(code) => !SolcStandardJsonOutputError::IGNORED_WARNING_CODES.contains(&code),
None => true,
});
output.errors.append(messages);
let mut suppressed_warnings = input.suppressed_warnings.clone();
suppressed_warnings.extend_from_slice(input.settings.suppressed_warnings.as_slice());
input.resolve_sources();
output.preprocess_ast(&input.sources, &suppressed_warnings)?;
Ok(output)
}
@@ -64,16 +76,12 @@ impl Compiler for SoljsonCompiler {
fn combined_json(
&self,
_paths: &[PathBuf],
_combined_json_argument: &str,
_selector: HashSet<CombinedJsonSelector>,
) -> anyhow::Result<CombinedJson> {
unimplemented!();
}
fn validate_yul(&self, _path: &Path) -> anyhow::Result<()> {
unimplemented!();
}
fn version(&mut self) -> anyhow::Result<Version> {
fn version(&self) -> anyhow::Result<Version> {
let version = Self::get_soljson_version()?;
let long = version.clone();
let default: semver::Version = version
@@ -82,11 +90,7 @@ impl Compiler for SoljsonCompiler {
.ok_or_else(|| anyhow::anyhow!("Soljson version parsing: metadata dropping"))?
.parse()
.map_err(|error| anyhow::anyhow!("Soljson version parsing: {}", error))?;
let l2_revision: Option<semver::Version> = version
.split('-')
.nth(1)
.and_then(|version| version.parse().ok());
Ok(Version::new(long, default, l2_revision))
Version::new(long, default).validate()
}
}
+4 -26
View File
@@ -10,34 +10,16 @@ pub struct Version {
pub long: String,
/// The short `semver`.
pub default: semver::Version,
/// The L2 revision additional versioning.
pub l2_revision: Option<semver::Version>,
}
impl Version {
/// A shortcut constructor.
pub fn new(
long: String,
default: semver::Version,
l2_revision: Option<semver::Version>,
) -> Self {
Self {
long,
default,
l2_revision,
}
pub fn new(long: String, default: semver::Version) -> Self {
Self { long, default }
}
/// A shortcut constructor for a simple version.
pub fn new_simple(version: semver::Version) -> Self {
Self {
long: version.to_string(),
default: version,
l2_revision: None,
}
}
pub fn validate(self, include_paths: &[String]) -> anyhow::Result<Self> {
/// Returns an error if an unsupported version is detected.
pub fn validate(self) -> anyhow::Result<Self> {
if self.default < super::FIRST_SUPPORTED_VERSION {
anyhow::bail!(
"`solc` versions <{} are not supported, found {}",
@@ -52,10 +34,6 @@ impl Version {
self.default
);
}
if !include_paths.is_empty() && self.default < super::FIRST_INCLUDE_PATH_VERSION {
anyhow::bail!("--include-path is not supported in solc {}", self.default);
}
Ok(self)
}
}
+377 -332
View File
@@ -1,19 +1,29 @@
//! Common utility used for in frontend and integration tests.
//! Common helper utilities used in tests and benchmarks.
use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::fmt::Display;
use std::path::PathBuf;
use std::sync::Mutex;
use once_cell::sync::Lazy;
use revive_common::MetadataHash;
use revive_llvm_context::initialize_llvm;
use revive_llvm_context::DebugConfig;
use revive_llvm_context::OptimizerSettings;
use revive_llvm_context::PolkaVMTarget;
use revive_solc_json_interface::standard_json::output::contract::evm::bytecode::Bytecode;
use revive_solc_json_interface::standard_json::output::contract::evm::bytecode::DeployedBytecode;
use revive_solc_json_interface::warning::Warning;
use revive_solc_json_interface::ResolcWarning;
use revive_solc_json_interface::SolcStandardJsonInput;
use revive_solc_json_interface::SolcStandardJsonInputSettingsLibraries;
use revive_solc_json_interface::SolcStandardJsonInputSettingsMetadata;
use revive_solc_json_interface::SolcStandardJsonInputSettingsOptimizer;
use revive_solc_json_interface::SolcStandardJsonInputSettingsSelection;
use revive_solc_json_interface::SolcStandardJsonInputSource;
use revive_solc_json_interface::SolcStandardJsonOutput;
use revive_solc_json_interface::SolcStandardJsonOutputErrorHandler;
use crate::project::Project;
use crate::solc::solc_compiler::SolcCompiler;
@@ -24,17 +34,373 @@ static EVM_BLOB_CACHE: Lazy<Mutex<HashMap<CachedBlob, Vec<u8>>>> = Lazy::new(Def
static EVM_RUNTIME_BLOB_CACHE: Lazy<Mutex<HashMap<CachedBlob, Vec<u8>>>> =
Lazy::new(Default::default);
const DEBUG_CONFIG: revive_llvm_context::DebugConfig =
revive_llvm_context::DebugConfig::new(None, true);
const DEBUG_CONFIG: revive_llvm_context::DebugConfig = DebugConfig::new(None, true);
/// Tests may share and re-use contract code.
/// The compiled blob cache helps avoiding duplicate compilation.
#[derive(Hash, PartialEq, Eq)]
struct CachedBlob {
/// The contract name.
contract_name: String,
solidity: String,
/// Whether the solc optimizer is enabled.
solc_optimizer_enabled: bool,
/// The contract code.
solidity: String,
/// The optimization level.
opt: String,
}
/// Builds the Solidity project and returns the standard JSON output.
pub fn build_solidity(
sources: BTreeMap<String, SolcStandardJsonInputSource>,
) -> anyhow::Result<SolcStandardJsonOutput> {
build_solidity_with_options(
sources,
Default::default(),
Default::default(),
OptimizerSettings::cycles(),
true,
Default::default(),
)
}
/// Builds the Solidity project and returns the standard JSON output.
pub fn build_solidity_with_options(
sources: BTreeMap<String, SolcStandardJsonInputSource>,
libraries: SolcStandardJsonInputSettingsLibraries,
remappings: BTreeSet<String>,
optimizer_settings: OptimizerSettings,
solc_optimizer_enabled: bool,
suppressed_warnings: Vec<ResolcWarning>,
) -> anyhow::Result<SolcStandardJsonOutput> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
initialize_llvm(PolkaVMTarget::PVM, crate::DEFAULT_EXECUTABLE_NAME, &[]);
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
let mut input = SolcStandardJsonInput::try_from_solidity_sources(
None,
sources.clone(),
libraries.clone(),
remappings,
SolcStandardJsonInputSettingsSelection::new_required_for_tests(),
SolcStandardJsonInputSettingsOptimizer::new(
solc_optimizer_enabled,
optimizer_settings
.middle_end_as_string()
.chars()
.last()
.unwrap(),
Default::default(),
),
SolcStandardJsonInputSettingsMetadata::default(),
suppressed_warnings,
Default::default(),
Default::default(),
false,
)?;
let mut output = solc.standard_json(&mut input, &mut vec![], None, vec![], None)?;
if output.has_errors() {
return Ok(output);
}
let debug_config = DebugConfig::new(None, optimizer_settings.middle_end_as_string() != "z");
let linker_symbols = libraries.as_linker_symbols()?;
let build = Project::try_from_standard_json_output(
&mut output,
libraries,
&solc_version,
&debug_config,
)?
.compile(
&mut vec![],
optimizer_settings,
MetadataHash::Keccak256,
&debug_config,
Default::default(),
Default::default(),
)?;
build.check_errors()?;
let build = build.link(linker_symbols, &debug_config);
build.check_errors()?;
build.write_to_standard_json(&mut output, &solc_version)?;
output.check_errors()?;
Ok(output)
}
/// Build a Solidity contract and get the EVM code
pub fn build_solidity_with_options_evm(
sources: BTreeMap<String, SolcStandardJsonInputSource>,
libraries: SolcStandardJsonInputSettingsLibraries,
remappings: BTreeSet<String>,
solc_optimizer_enabled: bool,
) -> anyhow::Result<BTreeMap<String, (Bytecode, DeployedBytecode)>> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
initialize_llvm(PolkaVMTarget::PVM, crate::DEFAULT_EXECUTABLE_NAME, &[]);
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let mut input = SolcStandardJsonInput::try_from_solidity_sources(
None,
sources.clone(),
libraries.clone(),
remappings,
SolcStandardJsonInputSettingsSelection::new_required_for_tests(),
SolcStandardJsonInputSettingsOptimizer::new(
solc_optimizer_enabled,
Default::default(),
Default::default(),
),
SolcStandardJsonInputSettingsMetadata::default(),
Default::default(),
Default::default(),
Default::default(),
false,
)?;
let mut contracts = BTreeMap::new();
for files in solc
.standard_json(&mut input, &mut vec![], None, vec![], None)?
.contracts
{
for (name, contract) in files.1 {
if let Some(evm) = contract.evm {
let (Some(bytecode), Some(deployed_bytecode)) =
(evm.bytecode.as_ref(), evm.deployed_bytecode.as_ref())
else {
continue;
};
contracts.insert(name.clone(), (bytecode.clone(), deployed_bytecode.clone()));
}
}
}
Ok(contracts)
}
/// Builds the Solidity project and returns the standard JSON output.
pub fn build_solidity_and_detect_missing_libraries<T: ToString>(
sources: &[(T, T)],
libraries: SolcStandardJsonInputSettingsLibraries,
) -> anyhow::Result<SolcStandardJsonOutput> {
check_dependencies();
let deployed_libraries = libraries.as_paths();
let sources = BTreeMap::from_iter(
sources
.iter()
.map(|(path, code)| (path.to_string(), code.to_string().into())),
);
inkwell::support::enable_llvm_pretty_stack_trace();
initialize_llvm(PolkaVMTarget::PVM, crate::DEFAULT_EXECUTABLE_NAME, &[]);
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
let mut input = SolcStandardJsonInput::try_from_solidity_sources(
None,
sources.clone(),
libraries.clone(),
Default::default(),
SolcStandardJsonInputSettingsSelection::new_required_for_tests(),
SolcStandardJsonInputSettingsOptimizer::default(),
SolcStandardJsonInputSettingsMetadata::default(),
Default::default(),
Default::default(),
Default::default(),
true,
)?;
let mut output = solc.standard_json(&mut input, &mut vec![], None, vec![], None)?;
if output.has_errors() {
return Ok(output);
}
let project = Project::try_from_standard_json_output(
&mut output,
libraries,
&solc_version,
&DEBUG_CONFIG,
)?;
let missing_libraries = project.get_missing_libraries(&deployed_libraries);
missing_libraries.write_to_standard_json(&mut output, &solc.version()?);
Ok(output)
}
/// Checks if the Yul project can be built without errors.
pub fn build_yul<T: ToString + Display>(sources: &[(T, T)]) -> anyhow::Result<()> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
initialize_llvm(PolkaVMTarget::PVM, crate::DEFAULT_EXECUTABLE_NAME, &[]);
let optimizer_settings = OptimizerSettings::none();
let sources = sources
.iter()
.map(|(path, source)| {
(
path.to_string(),
SolcStandardJsonInputSource::from(source.to_string()),
)
})
.collect();
let mut output = SolcStandardJsonOutput::new(&sources, &mut vec![]);
let project = Project::try_from_yul_sources(
sources,
Default::default(),
Some(&mut output),
&Default::default(),
)?;
let build = project.compile(
&mut vec![],
optimizer_settings,
MetadataHash::None,
&DEBUG_CONFIG,
Default::default(),
Default::default(),
)?;
build.check_errors()?;
let build = build.link(BTreeMap::new(), &DEBUG_CONFIG);
build.check_errors()?;
let solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
build.write_to_standard_json(&mut output, &solc.version()?)?;
output.check_errors()?;
Ok(())
}
/// Builds the Yul standard JSON and returns the standard JSON output.
pub fn build_yul_standard_json(
mut solc_input: SolcStandardJsonInput,
) -> anyhow::Result<SolcStandardJsonOutput> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
initialize_llvm(PolkaVMTarget::PVM, crate::DEFAULT_EXECUTABLE_NAME, &[]);
let solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let mut output = solc.validate_yul_standard_json(&mut solc_input, &mut vec![])?;
let build = Project::try_from_yul_sources(
solc_input.sources,
Default::default(),
Some(&mut output),
&Default::default(),
)?
.compile(
&mut vec![],
OptimizerSettings::try_from_cli(solc_input.settings.optimizer.mode)?,
MetadataHash::Keccak256,
&DEBUG_CONFIG,
Default::default(),
Default::default(),
)?;
build.check_errors()?;
let build = build.link(Default::default(), &Default::default());
build.check_errors()?;
build.write_to_standard_json(&mut output, &solc.version()?)?;
output.check_errors()?;
Ok(output)
}
/// Compile the blob of `contract_name` found in given `source_code`.
/// The `solc` optimizer will be enabled
pub fn compile_blob(contract_name: &str, source_code: &str) -> Vec<u8> {
compile_blob_with_options(
contract_name,
source_code,
true,
OptimizerSettings::cycles(),
)
}
/// Compile the blob of `contract_name` found in given `source_code`.
pub fn compile_blob_with_options(
contract_name: &str,
source_code: &str,
solc_optimizer_enabled: bool,
optimizer_settings: OptimizerSettings,
) -> Vec<u8> {
let id = CachedBlob {
contract_name: contract_name.to_owned(),
opt: optimizer_settings.middle_end_as_string(),
solc_optimizer_enabled,
solidity: source_code.to_owned(),
};
if let Some(blob) = PVM_BLOB_CACHE.lock().unwrap().get(&id) {
return blob.clone();
}
let file_name = "contract.sol";
let contracts = build_solidity_with_options(
BTreeMap::from([(
file_name.to_owned(),
SolcStandardJsonInputSource::from(source_code.to_owned()),
)]),
Default::default(),
Default::default(),
optimizer_settings,
solc_optimizer_enabled,
Default::default(),
)
.expect("source should compile")
.contracts;
let bytecode = contracts[file_name][contract_name]
.evm
.as_ref()
.expect("source should produce EVM output")
.bytecode
.as_ref()
.expect("source should produce assembly text")
.object
.as_str();
let blob = hex::decode(bytecode).expect("hex encoding should always be valid");
PVM_BLOB_CACHE.lock().unwrap().insert(id, blob.clone());
blob
}
/// Compile the EVM bin-runtime of `contract_name` found in given `source_code`.
/// The `solc` optimizer will be enabled
pub fn compile_evm_bin_runtime(contract_name: &str, source_code: &str) -> Vec<u8> {
compile_evm(contract_name, source_code, true, true)
}
/// Compile the EVM bin of `contract_name` found in given `source_code`.
pub fn compile_evm_deploy_code(
contract_name: &str,
source_code: &str,
solc_optimizer_enabled: bool,
) -> Vec<u8> {
compile_evm(contract_name, source_code, solc_optimizer_enabled, false)
}
/// Convert `(path, solidity)` tuples to a standard JSON input source.
pub fn sources<T: ToString>(sources: &[(T, T)]) -> BTreeMap<String, SolcStandardJsonInputSource> {
BTreeMap::from_iter(
sources
.iter()
.map(|(path, code)| (path.to_string(), code.to_string().into())),
)
}
/// Checks if the required executables are present in `${PATH}`.
fn check_dependencies() {
for executable in [
@@ -50,285 +416,7 @@ fn check_dependencies() {
}
}
/// Builds the Solidity project and returns the standard JSON output.
pub fn build_solidity(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
optimizer_settings: revive_llvm_context::OptimizerSettings,
) -> anyhow::Result<SolcStandardJsonOutput> {
build_solidity_with_options(sources, libraries, remappings, optimizer_settings, true)
}
/// Builds the Solidity project and returns the standard JSON output.
/// Gives control over additional options:
/// - `solc_optimizer_enabled`: Whether to use the `solc` optimizer
pub fn build_solidity_with_options(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
optimizer_settings: revive_llvm_context::OptimizerSettings,
solc_optimizer_enabled: bool,
) -> anyhow::Result<SolcStandardJsonOutput> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
revive_llvm_context::initialize_llvm(
revive_llvm_context::Target::PVM,
crate::DEFAULT_EXECUTABLE_NAME,
&[],
);
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
let input = SolcStandardJsonInput::try_from_sources(
None,
sources.clone(),
libraries.clone(),
remappings,
SolcStandardJsonInputSettingsSelection::new_required_for_tests(),
SolcStandardJsonInputSettingsOptimizer::new(
solc_optimizer_enabled,
optimizer_settings.middle_end_as_string().chars().last(),
&solc_version.default,
false,
),
None,
None,
None,
)?;
let mut output = solc.standard_json(input, None, vec![], None)?;
let debug_config = revive_llvm_context::DebugConfig::new(
None,
optimizer_settings.middle_end_as_string() != "z",
);
let project = Project::try_from_standard_json_output(
&output,
sources,
libraries,
&solc_version,
&debug_config,
)?;
let build: crate::Build = project.compile(
optimizer_settings,
false,
debug_config,
Default::default(),
Default::default(),
)?;
build.write_to_standard_json(&mut output, &solc_version)?;
Ok(output)
}
/// Build a Solidity contract and get the EVM code
pub fn build_solidity_with_options_evm(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
remappings: Option<BTreeSet<String>>,
solc_optimizer_enabled: bool,
) -> anyhow::Result<BTreeMap<String, (Bytecode, DeployedBytecode)>> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
revive_llvm_context::initialize_llvm(
revive_llvm_context::Target::PVM,
crate::DEFAULT_EXECUTABLE_NAME,
&[],
);
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
let input = SolcStandardJsonInput::try_from_sources(
None,
sources.clone(),
libraries.clone(),
remappings,
SolcStandardJsonInputSettingsSelection::new_required_for_tests(),
SolcStandardJsonInputSettingsOptimizer::new(
solc_optimizer_enabled,
None,
&solc_version.default,
false,
),
None,
None,
None,
)?;
let mut output = solc.standard_json(input, None, vec![], None)?;
let mut contracts = BTreeMap::new();
if let Some(files) = output.contracts.as_mut() {
for (_, file) in files.iter_mut() {
for (name, contract) in file.iter_mut() {
if let Some(evm) = contract.evm.as_mut() {
let (Some(bytecode), Some(deployed_bytecode)) =
(evm.bytecode.as_ref(), evm.deployed_bytecode.as_ref())
else {
continue;
};
contracts.insert(name.clone(), (bytecode.clone(), deployed_bytecode.clone()));
}
}
}
}
Ok(contracts)
}
/// Builds the Solidity project and returns the standard JSON output.
pub fn build_solidity_and_detect_missing_libraries(
sources: BTreeMap<String, String>,
libraries: BTreeMap<String, BTreeMap<String, String>>,
) -> anyhow::Result<SolcStandardJsonOutput> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
revive_llvm_context::initialize_llvm(
revive_llvm_context::Target::PVM,
crate::DEFAULT_EXECUTABLE_NAME,
&[],
);
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
let input = SolcStandardJsonInput::try_from_sources(
None,
sources.clone(),
libraries.clone(),
None,
SolcStandardJsonInputSettingsSelection::new_required_for_tests(),
SolcStandardJsonInputSettingsOptimizer::new(true, None, &solc_version.default, false),
None,
None,
None,
)?;
let mut output = solc.standard_json(input, None, vec![], None)?;
let project = Project::try_from_standard_json_output(
&output,
sources,
libraries,
&solc_version,
&DEBUG_CONFIG,
)?;
let missing_libraries = project.get_missing_libraries();
missing_libraries.write_to_standard_json(&mut output, &solc.version()?)?;
Ok(output)
}
/// Checks if the Yul project can be built without errors.
pub fn build_yul(source_code: &str) -> anyhow::Result<()> {
check_dependencies();
inkwell::support::enable_llvm_pretty_stack_trace();
revive_llvm_context::initialize_llvm(
revive_llvm_context::Target::PVM,
crate::DEFAULT_EXECUTABLE_NAME,
&[],
);
let optimizer_settings = revive_llvm_context::OptimizerSettings::none();
let project = Project::try_from_yul_string::<SolcCompiler>(
PathBuf::from("test.yul").as_path(),
source_code,
None,
)?;
let _build = project.compile(
optimizer_settings,
false,
DEBUG_CONFIG,
Default::default(),
Default::default(),
)?;
Ok(())
}
/// Checks if the built Solidity project contains the given warning.
pub fn check_solidity_warning(
source_code: &str,
warning_substring: &str,
libraries: BTreeMap<String, BTreeMap<String, String>>,
skip_for_revive_edition: bool,
suppressed_warnings: Option<Vec<Warning>>,
) -> anyhow::Result<bool> {
check_dependencies();
let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
if skip_for_revive_edition && solc_version.l2_revision.is_some() {
return Ok(true);
}
let mut sources = BTreeMap::new();
sources.insert("test.sol".to_string(), source_code.to_string());
let input = SolcStandardJsonInput::try_from_sources(
None,
sources.clone(),
libraries,
None,
SolcStandardJsonInputSettingsSelection::new_required_for_tests(),
SolcStandardJsonInputSettingsOptimizer::new(true, None, &solc_version.default, false),
None,
suppressed_warnings,
None,
)?;
let output = solc.standard_json(input, None, vec![], None)?;
let contains_warning = output
.errors
.ok_or_else(|| anyhow::anyhow!("Solidity compiler messages not found"))?
.iter()
.any(|error| error.formatted_message.contains(warning_substring));
Ok(contains_warning)
}
/// Compile the blob of `contract_name` found in given `source_code`.
/// The `solc` optimizer will be enabled
pub fn compile_blob(contract_name: &str, source_code: &str) -> Vec<u8> {
compile_blob_with_options(
contract_name,
source_code,
true,
OptimizerSettings::cycles(),
)
}
/// Compile the EVM bin-runtime of `contract_name` found in given `source_code`.
/// The `solc` optimizer will be enabled
pub fn compile_evm_bin_runtime(contract_name: &str, source_code: &str) -> Vec<u8> {
compile_evm(contract_name, source_code, true, true)
}
/// Compile the EVM bin of `contract_name` found in given `source_code`.
/// The `solc` optimizer will be enabled
pub fn compile_evm_deploy_code(
contract_name: &str,
source_code: &str,
solc_optimizer_enabled: bool,
) -> Vec<u8> {
compile_evm(contract_name, source_code, solc_optimizer_enabled, false)
}
/// The internal EVM bytecode compile helper.
fn compile_evm(
contract_name: &str,
source_code: &str,
@@ -353,9 +441,12 @@ fn compile_evm(
let file_name = "contract.sol";
let contracts = build_solidity_with_options_evm(
[(file_name.into(), source_code.into())].into(),
BTreeMap::from([(
file_name.into(),
SolcStandardJsonInputSource::from(source_code.to_owned()),
)]),
Default::default(),
Default::default(),
None,
solc_optimizer_enabled,
)
.expect("source should compile");
@@ -373,49 +464,3 @@ fn compile_evm(
blob
}
/// Compile the blob of `contract_name` found in given `source_code`.
pub fn compile_blob_with_options(
contract_name: &str,
source_code: &str,
solc_optimizer_enabled: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
) -> Vec<u8> {
let id = CachedBlob {
contract_name: contract_name.to_owned(),
solidity: source_code.to_owned(),
solc_optimizer_enabled,
opt: optimizer_settings.middle_end_as_string(),
};
if let Some(blob) = PVM_BLOB_CACHE.lock().unwrap().get(&id) {
return blob.clone();
}
let file_name = "contract.sol";
let contracts = build_solidity_with_options(
[(file_name.into(), source_code.into())].into(),
Default::default(),
None,
optimizer_settings,
solc_optimizer_enabled,
)
.expect("source should compile")
.contracts
.expect("source should contain at least one contract");
let bytecode = contracts[file_name][contract_name]
.evm
.as_ref()
.expect("source should produce EVM output")
.bytecode
.as_ref()
.expect("source should produce assembly text")
.object
.as_str();
let blob = hex::decode(bytecode).expect("hex encoding should always be valid");
PVM_BLOB_CACHE.lock().unwrap().insert(id, blob.clone());
blob
}
+1 -6
View File
@@ -1,7 +1,5 @@
//! The tests for running resolc with asm option.
#![cfg(test)]
use crate::tests::cli::utils;
const ASM_OPTION: &str = "--asm";
@@ -30,10 +28,7 @@ fn fails_without_input_file() {
utils::assert_command_failure(&resolc_result, "Omitting an input file");
let output = resolc_result.stderr.to_lowercase();
assert!(
output.contains("no input sources specified") || output.contains("compilation aborted"),
"Expected the output to contain a specific error message."
);
assert!(output.contains("no input sources specified"));
let solc_result = utils::execute_solc(arguments);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
+16 -32
View File
@@ -1,8 +1,6 @@
//! The tests for running resolc with combined JSON option.
#![cfg(test)]
use revive_common;
use revive_solc_json_interface::CombinedJsonInvalidSelectorMessage;
use crate::tests::cli::utils;
@@ -53,10 +51,9 @@ fn fails_with_invalid_json_argument() {
let resolc_result = utils::execute_resolc(arguments);
utils::assert_command_failure(&resolc_result, "Providing an invalid json argument");
assert!(
resolc_result.stdout.contains("Invalid option"),
"Expected the output to contain a specific error message."
);
assert!(resolc_result
.stderr
.contains(CombinedJsonInvalidSelectorMessage));
let solc_result = utils::execute_solc(arguments);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
@@ -73,16 +70,12 @@ fn fails_with_multiple_json_arguments() {
let resolc_result = utils::execute_resolc(arguments);
utils::assert_command_failure(&resolc_result, "Providing multiple json arguments");
assert!(
resolc_result
.stderr
.contains("reading error: No such file or directory"),
"Expected the output to contain a specific error message."
);
assert!(resolc_result
.stderr
.contains(&format!("Error: \"{}\" is not found.", JSON_ARGUMENTS[1])),);
// FIX: Resolc exit code == 101
// let solc_result = utils::execute_solc(arguments);
// utils::assert_equal_exit_codes(&solc_result, &resolc_result);
let solc_result = utils::execute_solc(arguments);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
}
#[test]
@@ -91,12 +84,9 @@ fn fails_without_json_argument() {
let resolc_result = utils::execute_resolc(arguments);
utils::assert_command_failure(&resolc_result, "Omitting a JSON argument");
assert!(
resolc_result.stderr.contains(
"a value is required for '--combined-json <COMBINED_JSON>' but none was supplied"
),
"Expected the output to contain a specific error message."
);
assert!(resolc_result.stderr.contains(
"a value is required for '--combined-json <COMBINED_JSON>' but none was supplied"
));
let solc_result = utils::execute_solc(arguments);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
@@ -108,10 +98,7 @@ fn fails_without_solidity_input_file() {
let resolc_result = utils::execute_resolc(arguments);
utils::assert_command_failure(&resolc_result, "Omitting a Solidity input file");
assert!(
resolc_result.stderr.contains("No input sources specified"),
"Expected the output to contain a specific error message."
);
assert!(resolc_result.stderr.contains("Error: No input files given"),);
let solc_result = utils::execute_solc(arguments);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
@@ -124,12 +111,9 @@ fn fails_with_yul_input_file() {
let resolc_result = utils::execute_resolc(arguments);
utils::assert_command_failure(&resolc_result, "Providing a Yul input file");
assert!(
resolc_result
.stderr
.contains("ParserError: Expected identifier"),
"Expected the output to contain a specific error message."
);
assert!(resolc_result
.stderr
.contains("Error: Expected identifier but got 'StringLiteral'"));
let solc_result = utils::execute_solc(arguments);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
+64
View File
@@ -0,0 +1,64 @@
use crate::tests::cli::utils::{assert_command_success, execute_resolc, DEPENDENCY_CONTRACT_PATH};
/// Test deploy time linking a contract with unresolved factory dependencies.
#[test]
fn deploy_time_linking_works() {
let temp_dir = tempfile::TempDir::new().unwrap();
let output_directory = temp_dir.path().to_path_buf();
let source_path = temp_dir.path().to_path_buf().join("dependency.sol");
std::fs::copy(DEPENDENCY_CONTRACT_PATH, &source_path).unwrap();
assert_command_success(
&execute_resolc(&[
source_path.to_str().unwrap(),
"--bin",
"-o",
&output_directory.to_string_lossy(),
]),
"Missing libraries should compile fine",
);
let dependency_blob_path = temp_dir
.path()
.to_path_buf()
.join("dependency.sol:Dependency.pvm");
let blob_path = temp_dir
.path()
.to_path_buf()
.join("dependency.sol:TestAssert.pvm");
let output = execute_resolc(&[
"--link",
blob_path.to_str().unwrap(),
dependency_blob_path.to_str().unwrap(),
]);
assert_command_success(&output, "The linker mode with missing library should work");
assert!(output.stdout.contains("still unresolved"));
let assert_library_path = format!(
"{}:Assert=0x0000000000000000000000000000000000000001",
source_path.to_str().unwrap()
);
let assert_ne_library_path = format!(
"{}:AssertNe=0x0000000000000000000000000000000000000002",
source_path.to_str().unwrap()
);
let output = execute_resolc(&[
"--link",
"--libraries",
&assert_library_path,
"--libraries",
&assert_ne_library_path,
blob_path.to_str().unwrap(),
dependency_blob_path.to_str().unwrap(),
]);
assert_command_success(&output, "The linker mode with all library should work");
assert!(!output.stdout.contains("still unresolved"));
}
#[test]
fn emits_unlinked_binary_warning() {
let output = execute_resolc(&[DEPENDENCY_CONTRACT_PATH, "--bin"]);
assert_command_success(&output, "Missing libraries should compile fine");
assert!(output.stderr.contains("is unlinked"));
}
@@ -0,0 +1,15 @@
use crate::tests::cli::utils::{
assert_command_success, execute_resolc, RESOLC_YUL_FLAG, YUL_CONTRACT_PATH,
};
#[test]
fn llvm_arguments_work_with_yul_input() {
let output_with_argument = execute_resolc(&[
RESOLC_YUL_FLAG,
YUL_CONTRACT_PATH,
"--llvm-arg=-riscv-soften-spills'",
"--bin",
]);
assert_command_success(&output_with_argument, "Providing LLVM arguments");
assert!(output_with_argument.success);
}
+3 -3
View File
@@ -1,9 +1,9 @@
//! The CLI tests.
#![cfg(test)]
//! The `resolc` CLI tests.
mod asm;
mod combined_json;
mod linker;
mod llvm_arguments;
mod optimization;
mod output_dir;
mod standard_json;
+26 -21
View File
@@ -1,10 +1,9 @@
//! The tests for running resolc with explicit optimization.
#![cfg(test)]
use revive_common;
use crate::tests::cli::{utils, yul};
use crate::tests::cli::utils::{
self, assert_command_failure, assert_command_success, assert_equal_exit_codes, execute_resolc,
execute_solc, RESOLC_YUL_FLAG, SOLIDITY_CONTRACT_PATH, YUL_MEMSET_CONTRACT_PATH,
};
const LEVELS: &[char] = &['0', '1', '2', '3', 's', 'z'];
@@ -12,11 +11,7 @@ const LEVELS: &[char] = &['0', '1', '2', '3', 's', 'z'];
fn runs_with_valid_level() {
for level in LEVELS {
let optimization_argument = format!("-O{level}");
let arguments = &[
utils::YUL_MEMSET_CONTRACT_PATH,
yul::YUL_OPTION,
&optimization_argument,
];
let arguments = &[YUL_MEMSET_CONTRACT_PATH, "--yul", &optimization_argument];
let resolc_result = utils::execute_resolc(arguments);
assert!(
resolc_result.success,
@@ -37,17 +32,27 @@ fn runs_with_valid_level() {
#[test]
fn fails_with_invalid_level() {
let arguments = &[utils::YUL_MEMSET_CONTRACT_PATH, yul::YUL_OPTION, "-O9"];
let resolc_result = utils::execute_resolc(arguments);
utils::assert_command_failure(&resolc_result, "Providing an invalid optimization level");
let arguments = &[YUL_MEMSET_CONTRACT_PATH, RESOLC_YUL_FLAG, "-O9"];
let resolc_result = execute_resolc(arguments);
assert_command_failure(&resolc_result, "Providing an invalid optimization level");
assert!(
resolc_result
.stderr
.contains("Unexpected optimization option"),
"Expected the output to contain a specific error message."
);
assert!(resolc_result
.stderr
.contains("Unexpected optimization option"));
let solc_result = utils::execute_solc(arguments);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
let solc_result = execute_solc(arguments);
assert_equal_exit_codes(&solc_result, &resolc_result);
}
#[test]
fn disable_solc_optimzer() {
let arguments = &[SOLIDITY_CONTRACT_PATH, "--bin", "--disable-solc-optimizer"];
let disabled = execute_resolc(arguments);
assert_command_success(&disabled, "Disabling the solc optimizer");
let arguments = &[SOLIDITY_CONTRACT_PATH, "--bin"];
let enabled = execute_resolc(arguments);
assert_command_success(&disabled, "Enabling the solc optimizer");
assert_ne!(enabled.stdout, disabled.stdout);
}
+35 -42
View File
@@ -1,52 +1,41 @@
//! The tests for running resolc with output directory option.
#![cfg(test)]
use std::path::Path;
use tempfile::tempdir;
use crate::tests::cli::utils;
const OUTPUT_DIRECTORY: &str = "src/tests/cli/artifacts";
const OUTPUT_BIN_FILE_PATH: &str = "src/tests/cli/artifacts/contract.sol:C.pvm";
const OUTPUT_ASM_FILE_PATH: &str = "src/tests/cli/artifacts/contract.sol:C.pvmasm";
const OUTPUT_LLVM_OPTIMIZED_FILE_PATH: &str =
"src/tests/cli/artifacts/src_tests_cli_contracts_solidity_contract.sol.C.optimized.ll";
const OUTPUT_BIN_FILE_PATH: &str = "contract.sol:C.pvm";
const OUTPUT_ASM_FILE_PATH: &str = "contract.sol:C.pvmasm";
const OUTPUT_LLVM_OPTIMIZED_FILE_PATH: &str = "src_tests_data_solidity_contract.sol.C.optimized.ll";
const OUTPUT_LLVM_UNOPTIMIZED_FILE_PATH: &str =
"src/tests/cli/artifacts/src_tests_cli_contracts_solidity_contract.sol.C.unoptimized.ll";
fn file_exists(path: &str) -> bool {
Path::new(path).try_exists().unwrap()
}
fn file_is_empty(path: &str) -> bool {
Path::new(path).metadata().unwrap().len() == 0
}
"src_tests_data_solidity_contract.sol.C.unoptimized.ll";
fn assert_valid_output_file(
result: &utils::CommandResult,
output_file_type: &str,
output_file_path: &str,
debug_output_directory: &Path,
output_file_name: &str,
) {
utils::assert_command_success(result, "Providing an output directory");
assert!(
result.stderr.contains("Compiler run successful"),
"Expected the compiler output to contain a success message.",
);
assert!(result.stderr.contains("Compiler run successful"),);
assert!(
file_exists(output_file_path),
"Expected the {output_file_type} output file `{output_file_path}` to exist."
);
let file = debug_output_directory.to_path_buf().join(output_file_name);
assert!(
!file_is_empty(output_file_path),
"Expected the {output_file_type} output file `{output_file_path}` to not be empty."
assert!(file.exists(), "Artifact should exist: {}", file.display());
assert_ne!(
file.metadata().unwrap().len(),
0,
"Artifact shouldn't be empty: {}",
file.display()
);
}
#[test]
fn writes_to_file() {
let temp_dir = tempdir().unwrap();
let arguments = &[
utils::SOLIDITY_CONTRACT_PATH,
"--overwrite",
@@ -54,15 +43,16 @@ fn writes_to_file() {
"--bin",
"--asm",
"--output-dir",
OUTPUT_DIRECTORY,
temp_dir.path().to_str().unwrap(),
];
let result = utils::execute_resolc(arguments);
assert_valid_output_file(&result, "--bin", OUTPUT_BIN_FILE_PATH);
assert_valid_output_file(&result, "--asm", OUTPUT_ASM_FILE_PATH);
assert_valid_output_file(&result, temp_dir.path(), OUTPUT_BIN_FILE_PATH);
assert_valid_output_file(&result, temp_dir.path(), OUTPUT_ASM_FILE_PATH);
}
#[test]
fn writes_debug_info_to_file_unoptimized() {
let temp_dir = tempdir().unwrap();
let arguments = &[
utils::SOLIDITY_CONTRACT_PATH,
"-g",
@@ -71,15 +61,16 @@ fn writes_debug_info_to_file_unoptimized() {
"--bin",
"--asm",
"--output-dir",
OUTPUT_DIRECTORY,
temp_dir.path().to_str().unwrap(),
];
let result = utils::execute_resolc(arguments);
assert_valid_output_file(&result, "--bin", OUTPUT_BIN_FILE_PATH);
assert_valid_output_file(&result, "--asm", OUTPUT_ASM_FILE_PATH);
assert_valid_output_file(&result, temp_dir.path(), OUTPUT_BIN_FILE_PATH);
assert_valid_output_file(&result, temp_dir.path(), OUTPUT_ASM_FILE_PATH);
}
#[test]
fn writes_debug_info_to_file_optimized() {
let temp_dir = tempdir().unwrap();
let arguments = &[
utils::SOLIDITY_CONTRACT_PATH,
"-g",
@@ -87,36 +78,38 @@ fn writes_debug_info_to_file_optimized() {
"--bin",
"--asm",
"--output-dir",
OUTPUT_DIRECTORY,
temp_dir.path().to_str().unwrap(),
];
let result = utils::execute_resolc(arguments);
assert_valid_output_file(&result, "--bin", OUTPUT_BIN_FILE_PATH);
assert_valid_output_file(&result, "--asm", OUTPUT_ASM_FILE_PATH);
assert_valid_output_file(&result, temp_dir.path(), OUTPUT_BIN_FILE_PATH);
assert_valid_output_file(&result, temp_dir.path(), OUTPUT_ASM_FILE_PATH);
}
#[test]
fn writes_llvm_debug_info_to_file_unoptimized() {
let temp_dir = tempdir().unwrap();
let arguments = &[
utils::SOLIDITY_CONTRACT_PATH,
"-g",
"--disable-solc-optimizer",
"--overwrite",
"--debug-output-dir",
OUTPUT_DIRECTORY,
temp_dir.path().to_str().unwrap(),
];
let result = utils::execute_resolc(arguments);
assert_valid_output_file(&result, "llvm", OUTPUT_LLVM_UNOPTIMIZED_FILE_PATH);
assert_valid_output_file(&result, temp_dir.path(), OUTPUT_LLVM_UNOPTIMIZED_FILE_PATH);
}
#[test]
fn writes_llvm_debug_info_to_file_optimized() {
let temp_dir = tempdir().unwrap();
let arguments = &[
utils::SOLIDITY_CONTRACT_PATH,
"-g",
"--overwrite",
"--debug-output-dir",
OUTPUT_DIRECTORY,
temp_dir.path().to_str().unwrap(),
];
let result = utils::execute_resolc(arguments);
assert_valid_output_file(&result, "llvm", OUTPUT_LLVM_OPTIMIZED_FILE_PATH);
assert_valid_output_file(&result, temp_dir.path(), OUTPUT_LLVM_OPTIMIZED_FILE_PATH);
}
+8 -9
View File
@@ -1,24 +1,23 @@
//! The tests for running resolc with standard JSON option.
#![cfg(test)]
use crate::tests::cli::utils;
use crate::tests::cli::utils::{
assert_command_success, assert_equal_exit_codes, execute_resolc_with_stdin_input,
execute_solc_with_stdin_input, STANDARD_JSON_CONTRACTS_PATH,
};
const JSON_OPTION: &str = "--standard-json";
#[test]
fn runs_with_valid_input_file() {
let arguments = &[JSON_OPTION];
let resolc_result =
utils::execute_resolc_with_stdin_input(arguments, utils::STANDARD_JSON_CONTRACTS_PATH);
utils::assert_command_success(&resolc_result, "Providing a valid input file to stdin");
let resolc_result = execute_resolc_with_stdin_input(arguments, STANDARD_JSON_CONTRACTS_PATH);
assert_command_success(&resolc_result, "Providing a valid input file to stdin");
assert!(
resolc_result.stdout.contains("contracts"),
"Expected the output to contain a `contracts` field."
);
let solc_result =
utils::execute_solc_with_stdin_input(arguments, utils::STANDARD_JSON_CONTRACTS_PATH);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
let solc_result = execute_solc_with_stdin_input(arguments, STANDARD_JSON_CONTRACTS_PATH);
assert_equal_exit_codes(&solc_result, &resolc_result);
}
+2 -11
View File
@@ -1,20 +1,14 @@
//! The tests for running resolc when expecting usage output.
#![cfg(test)]
use crate::tests::cli::utils;
#[test]
#[ignore = "Fix: 'resolc --help' should exit with success exit code"]
fn shows_usage_with_help() {
let arguments = &["--help"];
let resolc_result = utils::execute_resolc(arguments);
utils::assert_command_success(&resolc_result, "Providing the `--help` option");
assert!(
resolc_result.stdout.contains("Usage: resolc"),
"Expected the output to contain usage information."
);
assert!(resolc_result.stdout.contains("Usage: resolc"));
let solc_result = utils::execute_solc(arguments);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
@@ -25,10 +19,7 @@ fn fails_without_options() {
let resolc_result = utils::execute_resolc(&[]);
utils::assert_command_failure(&resolc_result, "Omitting options");
assert!(
resolc_result.stderr.contains("Usage: resolc"),
"Expected the output to contain usage information."
);
assert!(resolc_result.stderr.contains("Usage: resolc"));
let solc_result = utils::execute_solc(&[]);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
+28 -10
View File
@@ -5,15 +5,28 @@ use std::{
process::{Command, Stdio},
};
use revive_common;
use crate::SolcCompiler;
pub const SOLIDITY_CONTRACT_PATH: &str = "src/tests/cli/contracts/solidity/contract.sol";
pub const YUL_CONTRACT_PATH: &str = "src/tests/cli/contracts/yul/contract.yul";
pub const YUL_MEMSET_CONTRACT_PATH: &str = "src/tests/cli/contracts/yul/memset.yul";
/// The simple Solidity contract test fixture path.
pub const SOLIDITY_CONTRACT_PATH: &str = "src/tests/data/solidity/contract.sol";
/// The dependency Solidity contract test fixture path.
pub const DEPENDENCY_CONTRACT_PATH: &str = "src/tests/data/solidity/dependency.sol";
/// The simple YUL contract test fixture path.
pub const YUL_CONTRACT_PATH: &str = "src/tests/data/yul/contract.yul";
/// The memeset YUL contract test fixture path.
pub const YUL_MEMSET_CONTRACT_PATH: &str = "src/tests/data/yul/memset.yul";
/// The standard JSON contracts test fixture path.
///
pub const STANDARD_JSON_CONTRACTS_PATH: &str =
"src/tests/cli/contracts/standard_json/solidity_contracts.json";
"src/tests/data/standard_json/solidity_contracts.json";
/// The `resolc` YUL mode flag.
pub const RESOLC_YUL_FLAG: &str = "--yul";
/// The `--yul` option was deprecated in Solidity 0.8.27 in favor of `--strict-assembly`.
/// See section `--strict-assembly vs. --yul` in https://soliditylang.org/blog/2024/09/04/solidity-0.8.27-release-announcement/
pub const SOLC_YUL_FLAG: &str = "--strict-assembly";
/// The result of executing a command.
pub struct CommandResult {
@@ -52,6 +65,14 @@ fn execute_command(
arguments: &[&str],
stdin_file_path: Option<&str>,
) -> CommandResult {
println!(
"executing command: '{command} {}{}'",
arguments.join(" "),
stdin_file_path
.map(|argument| format!("< {argument}"))
.unwrap_or_default()
);
let stdin_config = match stdin_file_path {
Some(path) => Stdio::from(File::open(path).unwrap()),
None => Stdio::null(),
@@ -73,10 +94,7 @@ fn execute_command(
}
pub fn assert_equal_exit_codes(solc_result: &CommandResult, resolc_result: &CommandResult) {
assert_eq!(
solc_result.code, resolc_result.code,
"Expected solc and resolc to have the same exit code."
);
assert_eq!(solc_result.code, resolc_result.code,);
}
pub fn assert_command_success(result: &CommandResult, error_message_prefix: &str) {
+21 -33
View File
@@ -1,44 +1,32 @@
//! The tests for running resolc with yul option.
#![cfg(test)]
use crate::tests::cli::utils;
pub const YUL_OPTION: &str = "--yul";
/// The `--yul` option was deprecated in Solidity 0.8.27 in favor of `--strict-assembly`.
/// See section `--strict-assembly vs. --yul` in https://soliditylang.org/blog/2024/09/04/solidity-0.8.27-release-announcement/
const SOLC_YUL_OPTION: &str = "--strict-assembly";
use crate::tests::cli::utils::{
assert_command_success, assert_equal_exit_codes, execute_resolc, execute_solc, RESOLC_YUL_FLAG,
SOLC_YUL_FLAG, YUL_CONTRACT_PATH,
};
#[test]
fn runs_with_valid_input_file() {
let arguments = &[utils::YUL_CONTRACT_PATH, YUL_OPTION];
let resolc_result = utils::execute_resolc(arguments);
utils::assert_command_success(&resolc_result, "Providing a valid input file");
let resolc_result = execute_resolc(&[YUL_CONTRACT_PATH, RESOLC_YUL_FLAG]);
assert_command_success(&resolc_result, "Providing a valid input file");
assert!(
resolc_result
.stderr
.contains("Compiler run successful. No output requested"),
"Expected the output to contain a success message."
);
assert!(resolc_result
.stderr
.contains("Compiler run successful. No output requested"));
let solc_arguments = &[utils::YUL_CONTRACT_PATH, SOLC_YUL_OPTION];
let solc_result = utils::execute_solc(solc_arguments);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
let solc_result = execute_solc(&[YUL_CONTRACT_PATH, SOLC_YUL_FLAG]);
assert_equal_exit_codes(&solc_result, &resolc_result);
}
/// While the `solc` Solidity mode requires output selection,
/// the strict-assembly mode does not.
///
/// `resolc` exhibits consistent behavior for both modes.
#[test]
fn fails_without_input_file() {
let arguments = &[YUL_OPTION];
let resolc_result = utils::execute_resolc(arguments);
utils::assert_command_failure(&resolc_result, "Omitting an input file");
assert!(
resolc_result.stderr.contains("The input file is missing"),
"Expected the output to contain a specific error message."
);
let solc_arguments = &[SOLC_YUL_OPTION];
let solc_result = utils::execute_solc(solc_arguments);
utils::assert_equal_exit_codes(&solc_result, &resolc_result);
fn runs_without_input_file() {
let resolc_result = execute_resolc(&[RESOLC_YUL_FLAG]);
assert_command_success(&resolc_result, "Omitting an input file");
assert!(resolc_result
.stderr
.contains("Compiler run successful. No output requested"));
}

Some files were not shown because too many files have changed in this diff Show More