Merge pull request #111 from smiasojed/resolc.js

Add compilation to NodeJS module
This commit is contained in:
Sebastian Miasojed
2024-11-26 14:44:44 +01:00
committed by GitHub
29 changed files with 1324 additions and 445 deletions
+80
View File
@@ -0,0 +1,80 @@
name: Build revive-wasm
on:
push:
branches: ["main"]
pull_request:
branches: ["main"]
workflow_dispatch:
env:
CARGO_TERM_COLOR: always
REVIVE_WASM_INSTALL_DIR: ${{ github.workspace }}/js/dist/revive-cjs
EMSCRIPTEN_VERSION: 3.1.64
jobs:
build-revive-wasm:
runs-on: ubuntu-latest
defaults:
run:
shell: bash
steps:
- uses: actions/checkout@v4
- name: Cache LLVM build
id: cache-llvm
uses: actions/cache@v3
with:
path: |
llvm18.0-emscripten
# Use a unique key based on LLVM version or configuration files to avoid cache invalidation
key: llvm-build-${{ runner.os }}-${{ hashFiles('clone-llvm.sh', 'emscripten-build-llvm.sh') }}
- name: Install Dependencies
run: |
sudo apt-get update && sudo apt-get install -y cmake ninja-build libncurses5
rustup target add wasm32-unknown-emscripten
# Install LLVM required for the compiler runtime, runtime-api and stdlib
curl -sSL --output llvm.tar.xz https://github.com/llvm/llvm-project/releases/download/llvmorg-18.1.4/clang+llvm-18.1.4-x86_64-linux-gnu-ubuntu-18.04.tar.xz
tar Jxf llvm.tar.xz
mv clang+llvm-18.1.4-x86_64-linux-gnu-ubuntu-18.04 llvm18/
echo "$(pwd)/llvm18/bin" >> $GITHUB_PATH
# Install Emscripten
git clone https://github.com/emscripten-core/emsdk.git
cd emsdk
./emsdk install ${{ env.EMSCRIPTEN_VERSION }}
./emsdk activate ${{ env.EMSCRIPTEN_VERSION }}
- run: |
rustup show
cargo --version
rustup +nightly show
cargo +nightly --version
cmake --version
bash --version
llvm-config --version
- name: Build LLVM
if: steps.cache-llvm.outputs.cache-hit != 'true'
run: |
export EMSDK_ROOT=${PWD}/emsdk
./emscripten-build-llvm.sh
- name: Use Cached LLVM
if: steps.cache-llvm.outputs.cache-hit == 'true'
run: |
echo "Using cached LLVM"
- name: Build revive
run: |
export LLVM_LINK_PREFIX=${PWD}/llvm18.0-emscripten
source ./emsdk/emsdk_env.sh
make install-wasm
- uses: actions/upload-artifact@v4
with:
name: revive-wasm
path: |
${{ env.REVIVE_WASM_INSTALL_DIR }}/resolc.js
${{ env.REVIVE_WASM_INSTALL_DIR }}/worker.js
${{ env.REVIVE_WASM_INSTALL_DIR }}/resolc.wasm
retention-days: 1
+3
View File
@@ -8,9 +8,12 @@
/*.s
/llvm-project
/llvm18.0
/llvm18.0-emscripten
node_modules
artifacts
tmp
package-lock.json
/*.html
/js/src/resolc.*
/js/dist/
/build
Generated
+1
View File
@@ -8135,6 +8135,7 @@ dependencies = [
"colored",
"hex",
"inkwell",
"libc",
"md5",
"mimalloc",
"num",
+22 -1
View File
@@ -1,5 +1,19 @@
.PHONY: install format test test-solidity test-cli test-integration test-workspace clean docs docs-build
RUSTFLAGS_EMSCRIPTEN := \
-Clink-arg=-sEXPORTED_FUNCTIONS=_main,_free,_malloc \
-Clink-arg=-sNO_INVOKE_RUN \
-Clink-arg=-sEXIT_RUNTIME \
-Clink-arg=-sINITIAL_MEMORY=64MB \
-Clink-arg=-sTOTAL_MEMORY=3GB \
-Clink-arg=-sALLOW_MEMORY_GROWTH \
-Clink-arg=-sEXPORTED_RUNTIME_METHODS=FS,callMain,stringToNewUTF8,cwrap \
-Clink-arg=-sMODULARIZE \
-Clink-arg=-sEXPORT_ES6 \
-Clink-arg=-sEXPORT_NAME=createRevive \
-Clink-arg=--js-library=js/embed/soljson_interface.js \
-Clink-arg=--pre-js=js/embed/pre.js
install: install-bin install-npm
install-bin:
@@ -8,6 +22,11 @@ install-bin:
install-npm:
npm install && npm fund
install-wasm:
RUSTFLAGS='$(RUSTFLAGS_EMSCRIPTEN)' cargo build --target wasm32-unknown-emscripten -p revive-solidity --release --no-default-features
npm install
npm run build:revive
# install-revive: Build and install to the directory specified in REVIVE_INSTALL_DIR
ifeq ($(origin REVIVE_INSTALL_DIR), undefined)
REVIVE_INSTALL_DIR=`pwd`/release/revive-debian
@@ -58,4 +77,6 @@ clean:
rm -rf node_modules ; \
rm -rf crates/solidity/src/tests/cli-tests/artifacts ; \
cargo uninstall revive-solidity ; \
rm -f package-lock.json
rm -f package-lock.json ; \
rm -rf js/dist ; \
rm -f js/src/resolc.{wasm,js}
+29 -2
View File
@@ -28,7 +28,25 @@ resolc --version
### LLVM
`revive` requires a build of LLVM 18.1.4 or later including `compiler-rt`. Use the provided [build-llvm.sh](build-llvm.sh) build script to compile a compatible LLVM build locally in `$PWD/llvm18.0` (don't forget to add that to `$PATH` afterwards).
`revive` requires a build of LLVM 18.1.4 or later including `compiler-rt`. Use the provided [build-llvm.sh](build-llvm.sh) build script to compile a compatible LLVM build locally in `$PWD/llvm18.0` (don't forget to add that to `$PATH` afterwards).
### Cross-compilation to WASM
Cross-compiles the Revive compiler to WASM for running it in a Node.js or browser environment.
Install [emscripten](https://emscripten.org/docs/getting_started/downloads.html). Tested on version 3.1.64.
To build resolc.js execute:
```bash
bash build-llvm.sh
export PATH=${PWD}/llvm18.0/bin:$PATH
export EMSDK_ROOT=<PATH_TO_EMSCRIPTEN_SDK>
bash emscripten-build-llvm.sh
source $EMSDK_ROOT/emsdk_env.sh
export LLVM_LINK_PREFIX=${PWD}/llvm18.0-emscripten
export PATH=$PATH:$PWD/llvm18.0-emscripten/bin/
make install-wasm
```
### Development
@@ -36,6 +54,15 @@ Please consult the [Makefile](Makefile) targets to learn how to run tests and be
Ensure that your branch passes `make test` locally when submitting a pull request.
## Design overview
`revive` uses [solc](https://github.com/ethereum/solidity/), the Ethereum Solidity compiler, as the [Solidity frontend](crates/solidity/src/lib.rs) to process smart contracts written in Solidity. The YUL IR code (or legacy EVM assembly as a fallback for older `solc` versions) emitted by `solc` is then translated to LLVM IR, targetting [Polkadots `revive` pallet](https://docs.rs/pallet-revive/latest/pallet_revive/trait.SyscallDoc.html).
`revive` uses [solc](https://github.com/ethereum/solidity/), the Ethereum Solidity compiler, as the [Solidity frontend](crates/solidity/src/lib.rs) to process smart contracts written in Solidity. The YUL IR code (or legacy EVM assembly as a fallback for older `solc` versions) emitted by `solc` is then translated to LLVM IR, targetting [Polkadots `revive` pallet](https://docs.rs/pallet-revive/latest/pallet_revive/trait.SyscallDoc.html).
[Frontend](https://github.com/matter-labs/era-compiler-solidity) and [code generator](https://github.com/matter-labs/era-compiler-llvm-context) are based of ZKSync `zksolc`.
## Tests
Before running the tests, ensure that Geth (Go Ethereum) is installed on your system. Follow the installation guide here: [Installing Geth](https://geth.ethereum.org/docs/getting-started/installing-geth).
Once Geth is installed, you can run the tests using the following command:
```bash
make test
```
+3 -7
View File
@@ -5,17 +5,13 @@ set -euo pipefail
INSTALL_DIR="${PWD}/llvm18.0"
mkdir -p ${INSTALL_DIR}
# Clone LLVM 18 (any revision after commit bd32aaa is supposed to work)
if [ ! -d "llvm-project" ]; then
git clone --depth 1 --branch release/18.x https://github.com/llvm/llvm-project.git
fi
# Build LLVM, clang
LLVM_SRC_PREFIX=${PWD}/llvm-project
LLVM_SRC_DIR=${LLVM_SRC_PREFIX}/llvm
LLVM_BUILD_DIR=${PWD}/build/llvm
./clone-llvm.sh "${LLVM_SRC_PREFIX}"
if [ ! -d ${LLVM_BUILD_DIR} ] ; then
mkdir -p ${LLVM_BUILD_DIR}
fi
Executable
+18
View File
@@ -0,0 +1,18 @@
#!/bin/bash
set -euo pipefail
# Default directory for cloning the llvm-project repository
DEFAULT_DIR="llvm-project"
# Check if a directory argument is provided
if [ $# -eq 1 ]; then
DIR=$1
else
DIR=$DEFAULT_DIR
fi
# Clone LLVM 18 (any revision after commit bd32aaa is supposed to work)
if [ ! -d "${DIR}" ]; then
git clone --depth 1 --branch release/18.x https://github.com/llvm/llvm-project.git "${DIR}"
fi
+76 -8
View File
@@ -1,5 +1,19 @@
fn llvm_config(arg: &str) -> String {
let output = std::process::Command::new("llvm-config")
use std::{
env,
path::{Path, PathBuf},
};
const LLVM_LINK_PREFIX: &str = "LLVM_LINK_PREFIX";
fn locate_llvm_config() -> PathBuf {
let prefix = env::var_os(LLVM_LINK_PREFIX)
.map(|p| PathBuf::from(p).join("bin"))
.unwrap_or_default();
prefix.join("llvm-config")
}
fn llvm_config(llvm_config_path: &Path, arg: &str) -> String {
let output = std::process::Command::new(llvm_config_path)
.args([arg])
.output()
.unwrap_or_else(|_| panic!("`llvm-config {arg}` failed"));
@@ -8,8 +22,11 @@ fn llvm_config(arg: &str) -> String {
.unwrap_or_else(|_| panic!("output of `llvm-config {arg}` should be utf8"))
}
fn set_rustc_link_flags() {
println!("cargo:rustc-link-search=native={}", llvm_config("--libdir"));
fn set_rustc_link_flags(llvm_config_path: &Path) {
println!(
"cargo:rustc-link-search=native={}",
llvm_config(llvm_config_path, "--libdir")
);
for lib in [
"lldELF",
@@ -22,19 +39,70 @@ fn set_rustc_link_flags() {
"LLVMTargetParser",
"LLVMBinaryFormat",
"LLVMDemangle",
// The `llvm-sys` crate relies on `llvm-config` to obtain a list of required LLVM libraries
// during the build process. This works well in typical native environments, where `llvm-config`
// can accurately list the necessary libraries.
// However, when cross-compiling to WebAssembly using Emscripten, `llvm-config` fails to recognize
// JavaScript-based libraries, making it necessary to manually inject the required dependencies.
"LLVMRISCVDisassembler",
"LLVMRISCVAsmParser",
"LLVMRISCVCodeGen",
"LLVMRISCVDesc",
"LLVMRISCVInfo",
"LLVMExecutionEngine",
"LLVMOption",
"LLVMMCDisassembler",
"LLVMPasses",
"LLVMHipStdPar",
"LLVMCFGuard",
"LLVMCoroutines",
"LLVMipo",
"LLVMVectorize",
"LLVMInstrumentation",
"LLVMFrontendOpenMP",
"LLVMFrontendOffloading",
"LLVMGlobalISel",
"LLVMAsmPrinter",
"LLVMSelectionDAG",
"LLVMCodeGen",
"LLVMTarget",
"LLVMObjCARCOpts",
"LLVMCodeGenTypes",
"LLVMIRPrinter",
"LLVMScalarOpts",
"LLVMInstCombine",
"LLVMAggressiveInstCombine",
"LLVMTransformUtils",
"LLVMBitWriter",
"LLVMAnalysis",
"LLVMProfileData",
"LLVMDebugInfoDWARF",
"LLVMObject",
"LLVMMCParser",
"LLVMIRReader",
"LLVMAsmParser",
"LLVMMC",
"LLVMDebugInfoCodeView",
"LLVMBitReader",
"LLVMRemarks",
"LLVMBitstreamReader",
] {
println!("cargo:rustc-link-lib=static={lib}");
}
#[cfg(target_os = "linux")]
{
let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap_or_default();
if target_os == "linux" {
println!("cargo:rustc-link-lib=dylib=stdc++");
println!("cargo:rustc-link-lib=tinfo");
}
}
fn main() {
llvm_config("--cxxflags")
println!("cargo:rerun-if-env-changed={}", LLVM_LINK_PREFIX);
let llvm_config_path = locate_llvm_config();
llvm_config(&llvm_config_path, "--cxxflags")
.split_whitespace()
.fold(&mut cc::Build::new(), |builder, flag| builder.flag(flag))
.flag("-Wno-unused-parameter")
@@ -42,7 +110,7 @@ fn main() {
.file("src/linker.cpp")
.compile("liblinker.a");
set_rustc_link_flags();
set_rustc_link_flags(&llvm_config_path);
println!("cargo:rerun-if-changed=build.rs");
}
+9 -2
View File
@@ -24,7 +24,7 @@ thiserror = { workspace = true }
anyhow = { workspace = true }
which = { workspace = true }
path-slash = { workspace = true }
rayon = { workspace = true }
rayon = { workspace = true, optional = true }
serde = { workspace = true }
serde_json = { workspace = true }
@@ -41,6 +41,13 @@ inkwell = { workspace = true }
revive-common = { workspace = true }
revive-llvm-context = { workspace = true }
[target.'cfg(target_env = "musl")'.dependencies]
mimalloc = { version = "*", default-features = false }
[target.'cfg(target_os = "emscripten")'.dependencies]
libc = { workspace = true }
inkwell = { workspace = true, features = ["target-riscv", "llvm18-0-no-llvm-linking"]}
[features]
parallel = ["rayon"]
default = ["parallel"]
+21 -14
View File
@@ -15,15 +15,22 @@ pub use self::build::contract::Contract as ContractBuild;
pub use self::build::Build;
pub use self::missing_libraries::MissingLibraries;
pub use self::process::input::Input as ProcessInput;
#[cfg(not(target_os = "emscripten"))]
pub use self::process::native_process::NativeProcess;
pub use self::process::output::Output as ProcessOutput;
pub use self::process::run as run_process;
pub use self::process::EXECUTABLE;
#[cfg(target_os = "emscripten")]
pub use self::process::worker_process::WorkerProcess;
pub use self::process::Process;
pub use self::project::contract::Contract as ProjectContract;
pub use self::project::Project;
pub use self::r#const::*;
pub use self::solc::combined_json::contract::Contract as SolcCombinedJsonContract;
pub use self::solc::combined_json::CombinedJson as SolcCombinedJson;
pub use self::solc::pipeline::Pipeline as SolcPipeline;
#[cfg(not(target_os = "emscripten"))]
pub use self::solc::solc_compiler::SolcCompiler;
#[cfg(target_os = "emscripten")]
pub use self::solc::soljson_compiler::SoljsonCompiler;
pub use self::solc::standard_json::input::language::Language as SolcStandardJsonInputLanguage;
pub use self::solc::standard_json::input::settings::metadata::Metadata as SolcStandardJsonInputSettingsMetadata;
pub use self::solc::standard_json::input::settings::optimizer::Optimizer as SolcStandardJsonInputSettingsOptimizer;
@@ -38,10 +45,10 @@ pub use self::solc::standard_json::output::contract::evm::EVM as SolcStandardJso
pub use self::solc::standard_json::output::contract::Contract as SolcStandardJsonOutputContract;
pub use self::solc::standard_json::output::Output as SolcStandardJsonOutput;
pub use self::solc::version::Version as SolcVersion;
pub use self::solc::Compiler as SolcCompiler;
pub use self::solc::Compiler;
pub use self::version::Version as ResolcVersion;
pub use self::warning::Warning;
#[cfg(not(target_os = "emscripten"))]
pub mod test_utils;
pub mod tests;
@@ -49,9 +56,9 @@ use std::collections::BTreeSet;
use std::path::PathBuf;
/// Runs the Yul mode.
pub fn yul(
pub fn yul<T: Compiler>(
input_files: &[PathBuf],
solc: &mut SolcCompiler,
solc: &mut T,
optimizer_settings: revive_llvm_context::OptimizerSettings,
include_metadata_hash: bool,
debug_config: revive_llvm_context::DebugConfig,
@@ -65,10 +72,10 @@ pub fn yul(
),
};
if solc.version()?.default != SolcCompiler::LAST_SUPPORTED_VERSION {
if solc.version()?.default != solc::LAST_SUPPORTED_VERSION {
anyhow::bail!(
"The Yul mode is only supported with the most recent version of the Solidity compiler: {}",
SolcCompiler::LAST_SUPPORTED_VERSION,
solc::LAST_SUPPORTED_VERSION,
);
}
@@ -105,10 +112,10 @@ pub fn llvm_ir(
/// Runs the standard output mode.
#[allow(clippy::too_many_arguments)]
pub fn standard_output(
pub fn standard_output<T: Compiler>(
input_files: &[PathBuf],
libraries: Vec<String>,
solc: &mut SolcCompiler,
solc: &mut T,
evm_version: Option<revive_common::EVMVersion>,
solc_optimizer_enabled: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
@@ -188,8 +195,8 @@ pub fn standard_output(
/// Runs the standard JSON mode.
#[allow(clippy::too_many_arguments)]
pub fn standard_json(
solc: &mut SolcCompiler,
pub fn standard_json<T: Compiler>(
solc: &mut T,
detect_missing_libraries: bool,
force_evmla: bool,
base_path: Option<String>,
@@ -256,11 +263,11 @@ pub fn standard_json(
/// Runs the combined JSON mode.
#[allow(clippy::too_many_arguments)]
pub fn combined_json(
pub fn combined_json<T: Compiler>(
format: String,
input_files: &[PathBuf],
libraries: Vec<String>,
solc: &mut SolcCompiler,
solc: &mut T,
evm_version: Option<revive_common::EVMVersion>,
solc_optimizer_enabled: bool,
optimizer_settings: revive_llvm_context::OptimizerSettings,
+51 -107
View File
@@ -1,124 +1,68 @@
//! Process for compiling a single compilation unit.
pub mod input;
#[cfg(not(target_os = "emscripten"))]
pub mod native_process;
pub mod output;
#[cfg(target_os = "emscripten")]
pub mod worker_process;
use std::io::Read;
use std::io::Write;
use std::path::PathBuf;
use std::process::Command;
use once_cell::sync::OnceCell;
use std::io::{Read, Write};
use self::input::Input;
use self::output::Output;
/// The overriden executable name used when the compiler is run as a library.
pub static EXECUTABLE: OnceCell<PathBuf> = OnceCell::new();
pub trait Process {
/// Read input from `stdin`, compile a contract, and write the output to `stdout`.
fn run(input_file: Option<&mut std::fs::File>) -> anyhow::Result<()> {
let mut stdin = std::io::stdin();
let mut stdout = std::io::stdout();
let mut stderr = std::io::stderr();
/// Read input from `stdin`, compile a contract, and write the output to `stdout`.
pub fn run(input_file: Option<&mut std::fs::File>) -> anyhow::Result<()> {
let mut stdin = std::io::stdin();
let mut stdout = std::io::stdout();
let mut stderr = std::io::stderr();
let mut buffer = Vec::with_capacity(16384);
match input_file {
Some(ins) => {
if let Err(error) = ins.read_to_end(&mut buffer) {
anyhow::bail!("Failed to read recursive process input file: {:?}", error);
let mut buffer = Vec::with_capacity(16384);
match input_file {
Some(ins) => {
if let Err(error) = ins.read_to_end(&mut buffer) {
anyhow::bail!("Failed to read recursive process input file: {:?}", error);
}
}
None => {
if let Err(error) = stdin.read_to_end(&mut buffer) {
anyhow::bail!(
"Failed to read recursive process input from stdin: {:?}",
error
)
}
}
}
None => {
if let Err(error) = stdin.read_to_end(&mut buffer) {
anyhow::bail!(
"Failed to read recursive process input from stdin: {:?}",
error
)
}
}
}
let input: Input = revive_common::deserialize_from_slice(buffer.as_slice())?;
let result = input.contract.compile(
input.project,
input.optimizer_settings,
input.include_metadata_hash,
input.debug_config,
);
match result {
Ok(build) => {
let output = Output::new(build);
let json = serde_json::to_vec(&output).expect("Always valid");
stdout
.write_all(json.as_slice())
.expect("Stdout writing error");
Ok(())
}
Err(error) => {
let message = error.to_string();
stderr
.write_all(message.as_bytes())
.expect("Stderr writing error");
Err(error)
}
}
}
/// Runs this process recursively to compile a single contract.
pub fn call(input: Input) -> anyhow::Result<Output> {
let input_json = serde_json::to_vec(&input).expect("Always valid");
let executable = match EXECUTABLE.get() {
Some(executable) => executable.to_owned(),
None => std::env::current_exe()?,
};
let mut command = Command::new(executable.as_path());
command.stdin(std::process::Stdio::piped());
command.stdout(std::process::Stdio::piped());
command.stderr(std::process::Stdio::piped());
command.arg("--recursive-process");
let process = command.spawn().map_err(|error| {
anyhow::anyhow!("{:?} subprocess spawning error: {:?}", executable, error)
})?;
#[cfg(debug_assertions)]
input
.debug_config
.dump_stage_output(&input.contract.path, Some("stage"), &input_json)
.map_err(|error| {
anyhow::anyhow!(
"{:?} failed to log the recursive process output: {:?}",
executable,
error,
)
})?;
process
.stdin
.as_ref()
.ok_or_else(|| anyhow::anyhow!("{:?} stdin getting error", executable))?
.write_all(input_json.as_slice())
.map_err(|error| anyhow::anyhow!("{:?} stdin writing error: {:?}", executable, error))?;
let output = process.wait_with_output().map_err(|error| {
anyhow::anyhow!("{:?} subprocess output error: {:?}", executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{}",
String::from_utf8_lossy(output.stderr.as_slice()).to_string(),
let input: Input = revive_common::deserialize_from_slice(buffer.as_slice())?;
let result = input.contract.compile(
input.project,
input.optimizer_settings,
input.include_metadata_hash,
input.debug_config,
);
match result {
Ok(build) => {
let output = Output::new(build);
let json = serde_json::to_vec(&output).expect("Always valid");
stdout
.write_all(json.as_slice())
.expect("Stdout writing error");
Ok(())
}
Err(error) => {
let message = error.to_string();
stderr
.write_all(message.as_bytes())
.expect("Stderr writing error");
Err(error)
}
}
}
let output: Output =
revive_common::deserialize_from_slice(output.stdout.as_slice()).map_err(|error| {
anyhow::anyhow!(
"{:?} subprocess output parsing error: {}",
executable,
error,
)
})?;
Ok(output)
/// Runs this process recursively to compile a single contract.
fn call(input: Input) -> anyhow::Result<Output>;
}
@@ -0,0 +1,76 @@
//! Process for compiling a single compilation unit.
use std::io::Write;
use std::path::PathBuf;
use std::process::Command;
use once_cell::sync::OnceCell;
use super::Input;
use super::Output;
use super::Process;
/// The overriden executable name used when the compiler is run as a library.
pub static EXECUTABLE: OnceCell<PathBuf> = OnceCell::new();
pub struct NativeProcess;
impl Process for NativeProcess {
fn call(input: Input) -> anyhow::Result<Output> {
let input_json = serde_json::to_vec(&input).expect("Always valid");
let executable = match EXECUTABLE.get() {
Some(executable) => executable.to_owned(),
None => std::env::current_exe()?,
};
let mut command = Command::new(executable.as_path());
command.stdin(std::process::Stdio::piped());
command.stdout(std::process::Stdio::piped());
command.stderr(std::process::Stdio::piped());
command.arg("--recursive-process");
let process = command.spawn().map_err(|error| {
anyhow::anyhow!("{:?} subprocess spawning error: {:?}", executable, error)
})?;
#[cfg(debug_assertions)]
input
.debug_config
.dump_stage_output(&input.contract.path, Some("stage"), &input_json)
.map_err(|error| {
anyhow::anyhow!(
"{:?} failed to log the recursive process output: {:?}",
executable,
error,
)
})?;
process
.stdin
.as_ref()
.ok_or_else(|| anyhow::anyhow!("{:?} stdin getting error", executable))?
.write_all(input_json.as_slice())
.map_err(|error| {
anyhow::anyhow!("{:?} stdin writing error: {:?}", executable, error)
})?;
let output = process.wait_with_output().map_err(|error| {
anyhow::anyhow!("{:?} subprocess output error: {:?}", executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{}",
String::from_utf8_lossy(output.stderr.as_slice()).to_string(),
);
}
let output: Output = revive_common::deserialize_from_slice(output.stdout.as_slice())
.map_err(|error| {
anyhow::anyhow!(
"{:?} subprocess output parsing error: {}",
executable,
error,
)
})?;
Ok(output)
}
}
@@ -0,0 +1,69 @@
//! Process for compiling a single compilation unit using Web Workers.
use std::ffi::{c_char, c_void, CStr, CString};
use super::Input;
use super::Output;
use super::Process;
use anyhow::Context;
use serde::Deserialize;
#[derive(Deserialize)]
struct Error {
message: String,
}
#[derive(Deserialize)]
struct Success {
data: String,
}
#[derive(Deserialize)]
#[serde(tag = "type", rename_all = "snake_case")]
enum Response {
Success(Success),
Error(Error),
}
pub struct WorkerProcess;
impl Process for WorkerProcess {
fn call(input: Input) -> anyhow::Result<Output> {
let input_json = serde_json::to_vec(&input).expect("Always valid");
let input_str = String::from_utf8(input_json).expect("Input shall be valid");
// Prepare the input string for the Emscripten function
let input_cstring = CString::new(input_str).expect("CString allocation failed");
// Call the Emscripten function
let output_ptr =
unsafe { resolc_compile(input_cstring.as_ptr(), input_cstring.as_bytes().len()) };
// Convert the output pointer back to a Rust string
let output_str = unsafe {
CStr::from_ptr(output_ptr)
.to_str()
.with_context(|| "Failed to convert C string to Rust string")
.map(str::to_owned)
};
unsafe { libc::free(output_ptr as *mut c_void) };
let output_str = output_str?;
let response: Response = serde_json::from_str(&output_str)
.map_err(|error| anyhow::anyhow!("Worker output parsing error: {}", error,))?;
match response {
Response::Success(out) => {
let output: Output = revive_common::deserialize_from_slice(out.data.as_bytes())
.map_err(|error| {
anyhow::anyhow!("resolc.js subprocess output parsing error: {}", error,)
})?;
Ok(output)
}
Response::Error(err) => anyhow::bail!("Worker error: {}", err.message,),
}
}
}
extern "C" {
fn resolc_compile(input_ptr: *const c_char, input_len: usize) -> *const c_char;
}
+27 -14
View File
@@ -7,8 +7,8 @@ use std::collections::HashMap;
use std::collections::HashSet;
use std::path::Path;
use rayon::iter::IntoParallelIterator;
use rayon::iter::ParallelIterator;
#[cfg(feature = "parallel")]
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde::Deserialize;
use serde::Serialize;
use sha3::Digest;
@@ -17,9 +17,10 @@ use crate::build::contract::Contract as ContractBuild;
use crate::build::Build;
use crate::missing_libraries::MissingLibraries;
use crate::process::input::Input as ProcessInput;
use crate::process::Process;
use crate::project::contract::ir::IR;
use crate::solc::version::Version as SolcVersion;
use crate::solc::Compiler as SolcCompiler;
use crate::solc::Compiler;
use crate::yul::lexer::Lexer;
use crate::yul::parser::statement::object::Object;
@@ -66,18 +67,30 @@ impl Project {
debug_config: revive_llvm_context::DebugConfig,
) -> anyhow::Result<Build> {
let project = self.clone();
let results: BTreeMap<String, anyhow::Result<ContractBuild>> = self
.contracts
.into_par_iter()
#[cfg(feature = "parallel")]
let iter = self.contracts.into_par_iter();
#[cfg(not(feature = "parallel"))]
let iter = self.contracts.into_iter();
let results: BTreeMap<String, anyhow::Result<ContractBuild>> = iter
.map(|(full_path, contract)| {
let process_output = crate::process::call(ProcessInput::new(
let process_input = ProcessInput::new(
contract,
project.clone(),
include_metadata_hash,
optimizer_settings.clone(),
debug_config.clone(),
));
);
let process_output = {
#[cfg(target_os = "emscripten")]
{
crate::WorkerProcess::call(process_input)
}
#[cfg(not(target_os = "emscripten"))]
{
crate::NativeProcess::call(process_input)
}
};
(full_path, process_output.map(|output| output.build))
})
.collect();
@@ -155,9 +168,9 @@ impl Project {
}
/// Parses the Yul source code file and returns the source data.
pub fn try_from_yul_path(
pub fn try_from_yul_path<T: Compiler>(
path: &Path,
solc_validator: Option<&SolcCompiler>,
solc_validator: Option<&T>,
) -> anyhow::Result<Self> {
let source_code = std::fs::read_to_string(path)
.map_err(|error| anyhow::anyhow!("Yul file {:?} reading error: {}", path, error))?;
@@ -166,16 +179,16 @@ impl Project {
/// Parses the test Yul source code string and returns the source data.
/// Only for integration testing purposes.
pub fn try_from_yul_string(
pub fn try_from_yul_string<T: Compiler>(
path: &Path,
source_code: &str,
solc_validator: Option<&SolcCompiler>,
solc_validator: Option<&T>,
) -> anyhow::Result<Self> {
if let Some(solc) = solc_validator {
solc.validate_yul(path)?;
}
let source_version = SolcVersion::new_simple(SolcCompiler::LAST_SUPPORTED_VERSION);
let source_version = SolcVersion::new_simple(crate::solc::LAST_SUPPORTED_VERSION);
let path = path.to_string_lossy().to_string();
let source_hash = sha3::Keccak256::digest(source_code.as_bytes()).into();
+33 -8
View File
@@ -4,8 +4,11 @@ pub mod arguments;
use std::str::FromStr;
use revive_solidity::Process;
use self::arguments::Arguments;
#[cfg(feature = "parallel")]
/// The rayon worker stack size.
const RAYON_WORKER_STACK_SIZE: usize = 16 * 1024 * 1024;
@@ -46,6 +49,7 @@ fn main_inner() -> anyhow::Result<()> {
return Ok(());
}
#[cfg(feature = "parallel")]
rayon::ThreadPoolBuilder::new()
.stack_size(RAYON_WORKER_STACK_SIZE)
.build_global()
@@ -57,10 +61,23 @@ fn main_inner() -> anyhow::Result<()> {
#[cfg(debug_assertions)]
if let Some(fname) = arguments.recursive_process_input {
let mut infile = std::fs::File::open(fname)?;
return revive_solidity::run_process(Some(&mut infile));
#[cfg(target_os = "emscripten")]
{
return revive_solidity::WorkerProcess::run(Some(&mut infile));
}
#[cfg(not(target_os = "emscripten"))]
{
return revive_solidity::NativeProcess::run(Some(&mut infile));
}
}
#[cfg(target_os = "emscripten")]
{
return revive_solidity::WorkerProcess::run(None);
}
#[cfg(not(target_os = "emscripten"))]
{
return revive_solidity::NativeProcess::run(None);
}
return revive_solidity::run_process(None);
}
let debug_config = match arguments.debug_output_directory {
@@ -83,11 +100,19 @@ fn main_inner() -> anyhow::Result<()> {
None => None,
};
let mut solc = revive_solidity::SolcCompiler::new(
arguments
.solc
.unwrap_or_else(|| revive_solidity::SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned()),
)?;
let mut solc = {
#[cfg(target_os = "emscripten")]
{
revive_solidity::SoljsonCompiler { version: None }
}
#[cfg(not(target_os = "emscripten"))]
{
revive_solidity::SolcCompiler::new(arguments.solc.unwrap_or_else(|| {
revive_solidity::SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned()
}))?
}
};
let evm_version = match arguments.evm_version {
Some(evm_version) => Some(revive_common::EVMVersion::try_from(evm_version.as_str())?),
+24 -266
View File
@@ -2,10 +2,13 @@
pub mod combined_json;
pub mod pipeline;
#[cfg(not(target_os = "emscripten"))]
pub mod solc_compiler;
#[cfg(target_os = "emscripten")]
pub mod soljson_compiler;
pub mod standard_json;
pub mod version;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
@@ -15,285 +18,40 @@ use self::standard_json::input::Input as StandardJsonInput;
use self::standard_json::output::Output as StandardJsonOutput;
use self::version::Version;
/// The first version of `solc` with the support of standard JSON interface.
pub const FIRST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 4, 12);
/// The first version of `solc`, where Yul codegen is considered robust enough.
pub const FIRST_YUL_VERSION: semver::Version = semver::Version::new(0, 8, 0);
/// The first version of `solc`, where `--via-ir` codegen mode is supported.
pub const FIRST_VIA_IR_VERSION: semver::Version = semver::Version::new(0, 8, 13);
/// The last supported version of `solc`.
pub const LAST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 8, 28);
/// The Solidity compiler.
pub struct Compiler {
/// The binary executable name.
pub executable: String,
/// The lazily-initialized compiler version.
pub version: Option<Version>,
}
impl Compiler {
/// The default executable name.
pub const DEFAULT_EXECUTABLE_NAME: &'static str = "solc";
/// The first version of `solc` with the support of standard JSON interface.
pub const FIRST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 4, 12);
/// The first version of `solc`, where Yul codegen is considered robust enough.
pub const FIRST_YUL_VERSION: semver::Version = semver::Version::new(0, 8, 0);
/// The first version of `solc`, where `--via-ir` codegen mode is supported.
pub const FIRST_VIA_IR_VERSION: semver::Version = semver::Version::new(0, 8, 13);
/// The last supported version of `solc`.
pub const LAST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 8, 28);
/// A shortcut constructor.
/// Different tools may use different `executable` names. For example, the integration tester
/// uses `solc-<version>` format.
pub fn new(executable: String) -> anyhow::Result<Self> {
if let Err(error) = which::which(executable.as_str()) {
anyhow::bail!(
"The `{executable}` executable not found in ${{PATH}}: {}",
error
);
}
Ok(Self {
executable,
version: None,
})
}
pub trait Compiler {
/// Compiles the Solidity `--standard-json` input into Yul IR.
pub fn standard_json(
fn standard_json(
&mut self,
mut input: StandardJsonInput,
input: StandardJsonInput,
pipeline: Pipeline,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
) -> anyhow::Result<StandardJsonOutput> {
let version = self.version()?;
let mut command = std::process::Command::new(self.executable.as_str());
command.stdin(std::process::Stdio::piped());
command.stdout(std::process::Stdio::piped());
command.arg("--standard-json");
if let Some(base_path) = base_path {
command.arg("--base-path");
command.arg(base_path);
}
for include_path in include_paths.into_iter() {
command.arg("--include-path");
command.arg(include_path);
}
if let Some(allow_paths) = allow_paths {
command.arg("--allow-paths");
command.arg(allow_paths);
}
input.normalize(&version.default);
let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default();
let input_json = serde_json::to_vec(&input).expect("Always valid");
let process = command.spawn().map_err(|error| {
anyhow::anyhow!("{} subprocess spawning error: {:?}", self.executable, error)
})?;
process
.stdin
.as_ref()
.ok_or_else(|| anyhow::anyhow!("{} stdin getting error", self.executable))?
.write_all(input_json.as_slice())
.map_err(|error| {
anyhow::anyhow!("{} stdin writing error: {:?}", self.executable, error)
})?;
let output = process.wait_with_output().map_err(|error| {
anyhow::anyhow!("{} subprocess output error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
let mut output: StandardJsonOutput =
revive_common::deserialize_from_slice(output.stdout.as_slice()).map_err(|error| {
anyhow::anyhow!(
"{} subprocess output parsing error: {}\n{}",
self.executable,
error,
revive_common::deserialize_from_slice::<serde_json::Value>(
output.stdout.as_slice()
)
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(
|_| String::from_utf8_lossy(output.stdout.as_slice()).to_string()
),
)
})?;
output.preprocess_ast(&version, pipeline, suppressed_warnings.as_slice())?;
Ok(output)
}
) -> anyhow::Result<StandardJsonOutput>;
/// The `solc --combined-json abi,hashes...` mirror.
pub fn combined_json(
fn combined_json(
&self,
paths: &[PathBuf],
combined_json_argument: &str,
) -> anyhow::Result<CombinedJson> {
let mut command = std::process::Command::new(self.executable.as_str());
command.args(paths);
let mut combined_json_flags = Vec::new();
let mut combined_json_fake_flag_pushed = false;
let mut filtered_flags = Vec::with_capacity(3);
for flag in combined_json_argument.split(',') {
match flag {
flag @ "asm" | flag @ "bin" | flag @ "bin-runtime" => filtered_flags.push(flag),
flag => combined_json_flags.push(flag),
}
}
if combined_json_flags.is_empty() {
combined_json_flags.push("ast");
combined_json_fake_flag_pushed = true;
}
command.arg("--combined-json");
command.arg(combined_json_flags.join(","));
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
println!("{}", String::from_utf8_lossy(output.stdout.as_slice()));
println!("{}", String::from_utf8_lossy(output.stderr.as_slice()));
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stdout.as_slice()).to_string()
);
}
let mut combined_json: CombinedJson =
revive_common::deserialize_from_slice(output.stdout.as_slice()).map_err(|error| {
anyhow::anyhow!(
"{} subprocess output parsing error: {}\n{}",
self.executable,
error,
revive_common::deserialize_from_slice::<serde_json::Value>(
output.stdout.as_slice()
)
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(
|_| String::from_utf8_lossy(output.stdout.as_slice()).to_string()
),
)
})?;
for filtered_flag in filtered_flags.into_iter() {
for (_path, contract) in combined_json.contracts.iter_mut() {
match filtered_flag {
"asm" => contract.asm = Some(serde_json::Value::Null),
"bin" => contract.bin = Some("".to_owned()),
"bin-runtime" => contract.bin_runtime = Some("".to_owned()),
_ => continue,
}
}
}
if combined_json_fake_flag_pushed {
combined_json.source_list = None;
combined_json.sources = None;
}
combined_json.remove_evm();
Ok(combined_json)
}
) -> anyhow::Result<CombinedJson>;
/// The `solc` Yul validator.
pub fn validate_yul(&self, path: &Path) -> anyhow::Result<()> {
let mut command = std::process::Command::new(self.executable.as_str());
command.arg("--strict-assembly");
command.arg(path);
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
Ok(())
}
fn validate_yul(&self, path: &Path) -> anyhow::Result<()>;
/// The `solc --version` mini-parser.
pub fn version(&mut self) -> anyhow::Result<Version> {
if let Some(version) = self.version.as_ref() {
return Ok(version.to_owned());
}
let mut command = std::process::Command::new(self.executable.as_str());
command.arg("--version");
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
let stdout = String::from_utf8_lossy(output.stdout.as_slice());
let long = stdout
.lines()
.nth(1)
.ok_or_else(|| {
anyhow::anyhow!("{} version parsing: not enough lines", self.executable)
})?
.split(' ')
.nth(1)
.ok_or_else(|| {
anyhow::anyhow!(
"{} version parsing: not enough words in the 2nd line",
self.executable
)
})?
.to_owned();
let default: semver::Version = long
.split('+')
.next()
.ok_or_else(|| {
anyhow::anyhow!("{} version parsing: metadata dropping", self.executable)
})?
.parse()
.map_err(|error| anyhow::anyhow!("{} version parsing: {}", self.executable, error))?;
let l2_revision: Option<semver::Version> = stdout
.lines()
.nth(2)
.and_then(|line| line.split(' ').nth(1))
.and_then(|line| line.split('-').nth(1))
.and_then(|version| version.parse().ok());
let version = Version::new(long, default, l2_revision);
if version.default < Self::FIRST_SUPPORTED_VERSION {
anyhow::bail!(
"`solc` versions <{} are not supported, found {}",
Self::FIRST_SUPPORTED_VERSION,
version.default
);
}
if version.default > Self::LAST_SUPPORTED_VERSION {
anyhow::bail!(
"`solc` versions >{} are not supported, found {}",
Self::LAST_SUPPORTED_VERSION,
version.default
);
}
self.version = Some(version.clone());
Ok(version)
}
fn version(&mut self) -> anyhow::Result<Version>;
}
+1 -2
View File
@@ -3,7 +3,6 @@
use serde::{Deserialize, Serialize};
use crate::solc::version::Version as SolcVersion;
use crate::solc::Compiler as SolcCompiler;
/// The Solidity compiler pipeline type.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
@@ -19,7 +18,7 @@ pub enum Pipeline {
impl Pipeline {
/// We always use EVMLA for Solidity <=0.7, or if the user does not want to compile via Yul.
pub fn new(solc_version: &SolcVersion, force_evmla: bool) -> Self {
if solc_version.default < SolcCompiler::FIRST_YUL_VERSION || force_evmla {
if solc_version.default < crate::solc::FIRST_YUL_VERSION || force_evmla {
Self::EVMLA
} else {
Self::Yul
+286
View File
@@ -0,0 +1,286 @@
//! The Solidity compiler.
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use crate::solc::combined_json::CombinedJson;
use crate::solc::pipeline::Pipeline;
use crate::solc::standard_json::input::Input as StandardJsonInput;
use crate::solc::standard_json::output::Output as StandardJsonOutput;
use crate::solc::version::Version;
use super::Compiler;
/// The Solidity compiler.
pub struct SolcCompiler {
/// The binary executable name.
pub executable: String,
/// The lazily-initialized compiler version.
pub version: Option<Version>,
}
impl SolcCompiler {
/// The default executable name.
pub const DEFAULT_EXECUTABLE_NAME: &'static str = "solc";
/// A shortcut constructor.
/// Different tools may use different `executable` names. For example, the integration tester
/// uses `solc-<version>` format.
pub fn new(executable: String) -> anyhow::Result<Self> {
if let Err(error) = which::which(executable.as_str()) {
anyhow::bail!(
"The `{executable}` executable not found in ${{PATH}}: {}",
error
);
}
Ok(Self {
executable,
version: None,
})
}
}
impl Compiler for SolcCompiler {
/// Compiles the Solidity `--standard-json` input into Yul IR.
fn standard_json(
&mut self,
mut input: StandardJsonInput,
pipeline: Pipeline,
base_path: Option<String>,
include_paths: Vec<String>,
allow_paths: Option<String>,
) -> anyhow::Result<StandardJsonOutput> {
let version = self.version()?;
let mut command = std::process::Command::new(self.executable.as_str());
command.stdin(std::process::Stdio::piped());
command.stdout(std::process::Stdio::piped());
command.arg("--standard-json");
if let Some(base_path) = base_path {
command.arg("--base-path");
command.arg(base_path);
}
for include_path in include_paths.into_iter() {
command.arg("--include-path");
command.arg(include_path);
}
if let Some(allow_paths) = allow_paths {
command.arg("--allow-paths");
command.arg(allow_paths);
}
input.normalize(&version.default);
let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default();
let input_json = serde_json::to_vec(&input).expect("Always valid");
let process = command.spawn().map_err(|error| {
anyhow::anyhow!("{} subprocess spawning error: {:?}", self.executable, error)
})?;
process
.stdin
.as_ref()
.ok_or_else(|| anyhow::anyhow!("{} stdin getting error", self.executable))?
.write_all(input_json.as_slice())
.map_err(|error| {
anyhow::anyhow!("{} stdin writing error: {:?}", self.executable, error)
})?;
let output = process.wait_with_output().map_err(|error| {
anyhow::anyhow!("{} subprocess output error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
let mut output: StandardJsonOutput =
revive_common::deserialize_from_slice(output.stdout.as_slice()).map_err(|error| {
anyhow::anyhow!(
"{} subprocess output parsing error: {}\n{}",
self.executable,
error,
revive_common::deserialize_from_slice::<serde_json::Value>(
output.stdout.as_slice()
)
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(
|_| String::from_utf8_lossy(output.stdout.as_slice()).to_string()
),
)
})?;
output.preprocess_ast(&version, pipeline, suppressed_warnings.as_slice())?;
Ok(output)
}
/// The `solc --combined-json abi,hashes...` mirror.
fn combined_json(
&self,
paths: &[PathBuf],
combined_json_argument: &str,
) -> anyhow::Result<CombinedJson> {
let mut command = std::process::Command::new(self.executable.as_str());
command.args(paths);
let mut combined_json_flags = Vec::new();
let mut combined_json_fake_flag_pushed = false;
let mut filtered_flags = Vec::with_capacity(3);
for flag in combined_json_argument.split(',') {
match flag {
flag @ "asm" | flag @ "bin" | flag @ "bin-runtime" => filtered_flags.push(flag),
flag => combined_json_flags.push(flag),
}
}
if combined_json_flags.is_empty() {
combined_json_flags.push("ast");
combined_json_fake_flag_pushed = true;
}
command.arg("--combined-json");
command.arg(combined_json_flags.join(","));
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
println!("{}", String::from_utf8_lossy(output.stdout.as_slice()));
println!("{}", String::from_utf8_lossy(output.stderr.as_slice()));
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stdout.as_slice()).to_string()
);
}
let mut combined_json: CombinedJson =
revive_common::deserialize_from_slice(output.stdout.as_slice()).map_err(|error| {
anyhow::anyhow!(
"{} subprocess output parsing error: {}\n{}",
self.executable,
error,
revive_common::deserialize_from_slice::<serde_json::Value>(
output.stdout.as_slice()
)
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(
|_| String::from_utf8_lossy(output.stdout.as_slice()).to_string()
),
)
})?;
for filtered_flag in filtered_flags.into_iter() {
for (_path, contract) in combined_json.contracts.iter_mut() {
match filtered_flag {
"asm" => contract.asm = Some(serde_json::Value::Null),
"bin" => contract.bin = Some("".to_owned()),
"bin-runtime" => contract.bin_runtime = Some("".to_owned()),
_ => continue,
}
}
}
if combined_json_fake_flag_pushed {
combined_json.source_list = None;
combined_json.sources = None;
}
combined_json.remove_evm();
Ok(combined_json)
}
/// The `solc` Yul validator.
fn validate_yul(&self, path: &Path) -> anyhow::Result<()> {
let mut command = std::process::Command::new(self.executable.as_str());
command.arg("--strict-assembly");
command.arg(path);
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
Ok(())
}
/// The `solc --version` mini-parser.
fn version(&mut self) -> anyhow::Result<Version> {
if let Some(version) = self.version.as_ref() {
return Ok(version.to_owned());
}
let mut command = std::process::Command::new(self.executable.as_str());
command.arg("--version");
let output = command.output().map_err(|error| {
anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error)
})?;
if !output.status.success() {
anyhow::bail!(
"{} error: {}",
self.executable,
String::from_utf8_lossy(output.stderr.as_slice()).to_string()
);
}
let stdout = String::from_utf8_lossy(output.stdout.as_slice());
let long = stdout
.lines()
.nth(1)
.ok_or_else(|| {
anyhow::anyhow!("{} version parsing: not enough lines", self.executable)
})?
.split(' ')
.nth(1)
.ok_or_else(|| {
anyhow::anyhow!(
"{} version parsing: not enough words in the 2nd line",
self.executable
)
})?
.to_owned();
let default: semver::Version = long
.split('+')
.next()
.ok_or_else(|| {
anyhow::anyhow!("{} version parsing: metadata dropping", self.executable)
})?
.parse()
.map_err(|error| anyhow::anyhow!("{} version parsing: {}", self.executable, error))?;
let l2_revision: Option<semver::Version> = stdout
.lines()
.nth(2)
.and_then(|line| line.split(' ').nth(1))
.and_then(|line| line.split('-').nth(1))
.and_then(|version| version.parse().ok());
let version = Version::new(long, default, l2_revision);
if version.default < super::FIRST_SUPPORTED_VERSION {
anyhow::bail!(
"`solc` versions <{} are not supported, found {}",
super::FIRST_SUPPORTED_VERSION,
version.default
);
}
if version.default > super::LAST_SUPPORTED_VERSION {
anyhow::bail!(
"`solc` versions >{} are not supported, found {}",
super::LAST_SUPPORTED_VERSION,
version.default
);
}
self.version = Some(version.clone());
Ok(version)
}
}
@@ -0,0 +1,133 @@
//! The Solidity compiler.
use std::path::Path;
use std::path::PathBuf;
use crate::solc::combined_json::CombinedJson;
use crate::solc::pipeline::Pipeline;
use crate::solc::standard_json::input::Input as StandardJsonInput;
use crate::solc::standard_json::output::Output as StandardJsonOutput;
use crate::solc::version::Version;
use anyhow::Context;
use std::ffi::{c_char, c_void, CStr, CString};
use super::Compiler;
extern "C" {
fn soljson_version() -> *const c_char;
fn soljson_compile(inputPtr: *const c_char, inputLen: usize) -> *const c_char;
}
/// The Solidity compiler.
pub struct SoljsonCompiler {
/// The lazily-initialized compiler version.
pub version: Option<Version>,
}
impl Compiler for SoljsonCompiler {
/// Compiles the Solidity `--standard-json` input into Yul IR.
fn standard_json(
&mut self,
mut input: StandardJsonInput,
pipeline: Pipeline,
_base_path: Option<String>,
_include_paths: Vec<String>,
_allow_paths: Option<String>,
) -> anyhow::Result<StandardJsonOutput> {
let version = self.version()?;
let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default();
let input_json = serde_json::to_string(&input).expect("Always valid");
let out = Self::compile_standard_json(input_json)?;
let mut output: StandardJsonOutput = revive_common::deserialize_from_slice(out.as_bytes())
.map_err(|error| {
anyhow::anyhow!(
"Soljson output parsing error: {}\n{}",
error,
revive_common::deserialize_from_slice::<serde_json::Value>(out.as_bytes())
.map(|json| serde_json::to_string_pretty(&json).expect("Always valid"))
.unwrap_or_else(|_| String::from_utf8_lossy(out.as_bytes()).to_string()),
)
})?;
output.preprocess_ast(&version, pipeline, suppressed_warnings.as_slice())?;
Ok(output)
}
fn combined_json(
&self,
_paths: &[PathBuf],
_combined_json_argument: &str,
) -> anyhow::Result<CombinedJson> {
unimplemented!();
}
fn validate_yul(&self, _path: &Path) -> anyhow::Result<()> {
unimplemented!();
}
fn version(&mut self) -> anyhow::Result<Version> {
let version = Self::get_soljson_version()?;
let long = version.clone();
let default: semver::Version = version
.split('+')
.next()
.ok_or_else(|| anyhow::anyhow!("Soljson version parsing: metadata dropping"))?
.parse()
.map_err(|error| anyhow::anyhow!("Soljson version parsing: {}", error))?;
let l2_revision: Option<semver::Version> = version
.split('-')
.nth(1)
.and_then(|version| version.parse().ok());
let version = Version::new(long, default, l2_revision);
if version.default < super::FIRST_SUPPORTED_VERSION {
anyhow::bail!(
"`Soljson` versions <{} are not supported, found {}",
super::FIRST_SUPPORTED_VERSION,
version.default
);
}
if version.default > super::LAST_SUPPORTED_VERSION {
anyhow::bail!(
"`Soljson` versions >{} are not supported, found {}",
super::LAST_SUPPORTED_VERSION,
version.default
);
}
self.version = Some(version.clone());
Ok(version)
}
}
impl SoljsonCompiler {
fn get_soljson_version() -> anyhow::Result<String> {
unsafe {
let version_ptr = soljson_version();
let version = CStr::from_ptr(version_ptr)
.to_str()
.with_context(|| "Failed to convert C string to Rust string")
.map(str::to_owned);
libc::free(version_ptr as *mut c_void);
Ok(version?)
}
}
fn compile_standard_json(input: String) -> anyhow::Result<String> {
let c_input = CString::new(input).unwrap();
let c_input_len = c_input.as_bytes().len();
unsafe {
let output_ptr = soljson_compile(c_input.as_ptr(), c_input_len);
let output_json = CStr::from_ptr(output_ptr)
.to_str()
.with_context(|| "Failed to convert C string to Rust string")
.map(str::to_owned);
libc::free(output_ptr as *mut c_void);
Ok(output_json?)
}
}
}
@@ -8,8 +8,8 @@ use std::collections::BTreeMap;
use std::collections::BTreeSet;
use std::path::PathBuf;
use rayon::iter::IntoParallelIterator;
use rayon::iter::ParallelIterator;
#[cfg(feature = "parallel")]
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde::Deserialize;
use serde::Serialize;
@@ -64,8 +64,12 @@ impl Input {
via_ir: bool,
suppressed_warnings: Option<Vec<Warning>>,
) -> anyhow::Result<Self> {
let sources = paths
.into_par_iter()
#[cfg(feature = "parallel")]
let iter = paths.into_par_iter(); // Parallel iterator
#[cfg(not(feature = "parallel"))]
let iter = paths.iter(); // Sequential iterator
let sources = iter
.map(|path| {
let source = Source::try_from(path.as_path()).unwrap_or_else(|error| {
panic!("Source code file {path:?} reading error: {error}")
@@ -106,8 +110,12 @@ impl Input {
via_ir: bool,
suppressed_warnings: Option<Vec<Warning>>,
) -> anyhow::Result<Self> {
let sources = sources
.into_par_iter()
#[cfg(feature = "parallel")]
let iter = sources.into_par_iter(); // Parallel iterator
#[cfg(not(feature = "parallel"))]
let iter = sources.into_iter(); // Sequential iterator
let sources = iter
.map(|(path, content)| (path, Source::from(content)))
.collect();
+13 -6
View File
@@ -9,13 +9,14 @@ use once_cell::sync::Lazy;
use crate::project::Project;
use crate::solc::pipeline::Pipeline as SolcPipeline;
use crate::solc::solc_compiler::SolcCompiler;
use crate::solc::standard_json::input::settings::optimizer::Optimizer as SolcStandardJsonInputSettingsOptimizer;
use crate::solc::standard_json::input::settings::selection::Selection as SolcStandardJsonInputSettingsSelection;
use crate::solc::standard_json::input::Input as SolcStandardJsonInput;
use crate::solc::standard_json::output::contract::evm::bytecode::Bytecode;
use crate::solc::standard_json::output::contract::evm::bytecode::DeployedBytecode;
use crate::solc::standard_json::output::Output as SolcStandardJsonOutput;
use crate::solc::Compiler as SolcCompiler;
use crate::solc::Compiler;
use crate::warning::Warning;
static PVM_BLOB_CACHE: Lazy<Mutex<HashMap<CachedBlob, Vec<u8>>>> = Lazy::new(Default::default);
@@ -81,7 +82,8 @@ pub fn build_solidity_with_options(
inkwell::support::enable_llvm_pretty_stack_trace();
revive_llvm_context::initialize_target(revive_llvm_context::Target::PVM);
let _ = crate::process::EXECUTABLE.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
@@ -126,7 +128,8 @@ pub fn build_solidity_with_options_evm(
inkwell::support::enable_llvm_pretty_stack_trace();
revive_llvm_context::initialize_target(revive_llvm_context::Target::PVM);
let _ = crate::process::EXECUTABLE.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
@@ -179,7 +182,8 @@ pub fn build_solidity_and_detect_missing_libraries(
inkwell::support::enable_llvm_pretty_stack_trace();
revive_llvm_context::initialize_target(revive_llvm_context::Target::PVM);
let _ = crate::process::EXECUTABLE.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let _ = crate::process::native_process::EXECUTABLE
.set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME));
let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?;
let solc_version = solc.version()?;
@@ -215,8 +219,11 @@ pub fn build_yul(source_code: &str) -> anyhow::Result<()> {
revive_llvm_context::initialize_target(revive_llvm_context::Target::PVM);
let optimizer_settings = revive_llvm_context::OptimizerSettings::none();
let project =
Project::try_from_yul_string(PathBuf::from("test.yul").as_path(), source_code, None)?;
let project = Project::try_from_yul_string::<SolcCompiler>(
PathBuf::from("test.yul").as_path(),
source_code,
None,
)?;
let _build = project.compile(optimizer_settings, false, DEBUG_CONFIG)?;
Ok(())
+69
View File
@@ -0,0 +1,69 @@
#!/usr/bin/env bash
set -euo pipefail
INSTALL_DIR="${PWD}/llvm18.0-emscripten"
mkdir -p "${INSTALL_DIR}"
# Check if EMSDK_ROOT is defined
if [ -z "${EMSDK_ROOT:-}" ]; then
echo "Error: EMSDK_ROOT is not defined."
echo "Please set the EMSDK_ROOT environment variable to the root directory of your Emscripten SDK."
exit 1
fi
source "${EMSDK_ROOT}/emsdk_env.sh"
LLVM_SRC="${PWD}/llvm-project"
LLVM_NATIVE="${PWD}/build/llvm-tools"
LLVM_WASM="${PWD}/build/llvm-wasm"
./clone-llvm.sh "${LLVM_SRC}"
# Cross-compiling LLVM requires a native build of "llvm-tblgen", "clang-tblgen" and "llvm-config"
if [ ! -d "${LLVM_NATIVE}" ]; then
cmake -G Ninja \
-S "${LLVM_SRC}/llvm" \
-B "${LLVM_NATIVE}" \
-DCMAKE_BUILD_TYPE=Release \
-DLLVM_TARGETS_TO_BUILD=WebAssembly \
-DLLVM_ENABLE_PROJECTS="clang"
fi
cmake --build "${LLVM_NATIVE}" -- llvm-tblgen clang-tblgen llvm-config
if [ ! -d "${LLVM_WASM}" ]; then
EMCC_DEBUG=2 \
CXXFLAGS="-Dwait4=__syscall_wait4" \
LDFLAGS="-lnodefs.js -s NO_INVOKE_RUN -s EXIT_RUNTIME -s INITIAL_MEMORY=64MB -s ALLOW_MEMORY_GROWTH -s \
EXPORTED_RUNTIME_METHODS=FS,callMain,NODEFS -s MODULARIZE -s EXPORT_ES6 -s WASM_BIGINT" \
emcmake cmake -G Ninja \
-S "${LLVM_SRC}/llvm" \
-B "${LLVM_WASM}" \
-DCMAKE_BUILD_TYPE=Release \
-DLLVM_TARGETS_TO_BUILD='RISCV' \
-DLLVM_ENABLE_PROJECTS="clang;lld" \
-DLLVM_ENABLE_DUMP=OFF \
-DLLVM_ENABLE_ASSERTIONS=OFF \
-DLLVM_ENABLE_EXPENSIVE_CHECKS=OFF \
-DLLVM_ENABLE_BACKTRACES=OFF \
-DLLVM_BUILD_TOOLS=OFF \
-DLLVM_ENABLE_THREADS=OFF \
-DLLVM_BUILD_LLVM_DYLIB=OFF \
-DLLVM_INCLUDE_TESTS=OFF \
-DLLVM_ENABLE_TERMINFO=Off \
-DLLVM_ENABLE_LIBXML2=Off \
-DLLVM_ENABLE_ZLIB=Off \
-DLLVM_ENABLE_ZSTD=Off \
-DLLVM_TABLEGEN="${LLVM_NATIVE}/bin/llvm-tblgen" \
-DCLANG_TABLEGEN="${LLVM_NATIVE}/bin/clang-tblgen" \
-DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}"
fi
cmake --build "${LLVM_WASM}"
cmake --install "${LLVM_WASM}"
cp "${LLVM_NATIVE}/bin/llvm-config" "${INSTALL_DIR}/bin"
echo ""
echo "LLVM cross-compilation for WebAssembly completed successfully."
+49
View File
@@ -0,0 +1,49 @@
var Module = {
stdinData: "",
stdoutCallback: null,
stderrCallback: null,
// Function to set a callback for stdout
setStdoutCallback: function(callback) {
this.stdoutCallback = callback;
},
// Function to set a callback for stderr
setStderrCallback: function(callback) {
this.stderrCallback = callback;
},
// Function to set input data for stdin
setStdinData: function(data) {
this.stdinData = data;
},
// `preRun` is called before the program starts running
preRun: function() {
// Define a custom stdin function
function customStdin() {
if (Module.stdinData.length === 0) {
return null; // End of input (EOF)
}
const char = Module.stdinData.charCodeAt(0);
Module.stdinData = Module.stdinData.slice(1); // Remove the character from input
return char;
}
// Define a custom stdout function
function customStdout(char) {
if (Module.stdoutCallback) {
Module.stdoutCallback(String.fromCharCode(char));
}
}
// Define a custom stderr function
function customStderr(char) {
if (Module.stderrCallback) {
Module.stderrCallback(String.fromCharCode(char));
}
}
FS.init(customStdin, customStdout, customStderr);
},
};
+60
View File
@@ -0,0 +1,60 @@
mergeInto(LibraryManager.library, {
soljson_compile: function(inputPtr, inputLen) {
const inputJson = UTF8ToString(inputPtr, inputLen);
const output = Module.solc.compile(inputJson)
return stringToNewUTF8(output)
},
soljson_version: function() {
var version = Module.solc.version();
return stringToNewUTF8(version)
},
resolc_compile: function(inputPtr, inputLen) {
const { Worker } = require('worker_threads');
const deasync = require('deasync');
var inputJson = UTF8ToString(inputPtr, inputLen);
function compileWithWorker(inputJson, callback) {
return new Promise((resolve, reject) => {
const worker = new Worker(new URL('./worker.js', import.meta.url), {
type: 'module',
});
// Listen for messages from the worker
worker.on('message', (message) => {
resolve(message.output); // Resolve the promise with the output
callback(null, message.output);
worker.terminate(); // Terminate the worker after processing
});
// Listen for errors from the worker
worker.on('error', (error) => {
reject(error);
callback(error);
worker.terminate();
});
// Send the input JSON to the worker
worker.postMessage(inputJson);
});
}
let result = null;
let error = null;
// Use deasync to block until promise resolves
compileWithWorker(inputJson, function (err, res) {
error = err;
result = res;
});
// TODO: deasync is not present in browsers, another solution needs to be implemented
deasync.loopWhile(() => result === null && error === null);
if (error) {
const errorJson = JSON.stringify({ type: 'error', message: error.message || "Unknown error" });
return stringToNewUTF8(errorJson)
}
const resultJson = JSON.stringify({ type: 'success', data: result });
return stringToNewUTF8(resultJson);
},
});
+23
View File
@@ -0,0 +1,23 @@
{
"name": "revive",
"version": "1.0.0",
"description": "Revive compiler",
"main": "run_revive.js",
"type": "module",
"dependencies": {
"deasync": "^0.1.15",
"solc": "^0.8.28"
},
"scripts": {
"build": "cp ../target/wasm32-unknown-emscripten/release/resolc.js ../target/wasm32-unknown-emscripten/release/resolc.wasm ./src && npx rollup -c",
"test": "npm run build && node run_revive.js"
},
"devDependencies": {
"@babel/core": "^7.26.0",
"@babel/preset-env": "^7.26.0",
"@rollup/plugin-babel": "^6.0.4",
"@rollup/plugin-node-resolve": "^15.3.0",
"rollup": "^4.27.3",
"rollup-plugin-copy": "^3.5.0"
}
}
+35
View File
@@ -0,0 +1,35 @@
import babel from '@rollup/plugin-babel';
import copy from 'rollup-plugin-copy';
import resolve from '@rollup/plugin-node-resolve';
const outputDirCJS = 'dist/revive-cjs';
const outputDirESM = 'dist/revive-esm';
export default {
input: ['src/resolc.js', 'src/worker.js'], // Adjust this to your main entry file
output: [
{
dir: outputDirCJS,
format: 'cjs',
exports: 'auto',
},
{
dir: outputDirESM,
format: 'esm',
},
],
plugins: [
babel({
exclude: 'node_modules/**',
presets: ['@babel/preset-env'],
babelHelpers: 'inline',
}),
resolve(),
copy({
targets: [
{ src: 'src/resolc.wasm', dest: outputDirCJS },
{ src: 'src/resolc.wasm', dest: outputDirESM },
],
})
],
};
+62
View File
@@ -0,0 +1,62 @@
import solc from 'solc';
// Import the Emscripten module
import createRevive from './dist/revive-esm/resolc.js';
const compilerStandardJsonInput = {
language: 'Solidity',
sources: {
'MyContract.sol': {
content: `
// SPDX-License-Identifier: UNLICENSED
pragma solidity ^0.8.0;
contract MyContract {
function greet() public pure returns (string memory) {
return "Hello";
}
}
`,
},
},
settings: {
optimizer: {
enabled: true,
runs: 200,
},
outputSelection: {
'*': {
'*': ['abi'],
},
},
},
};
async function runCompiler() {
const m = await createRevive();
m.solc = solc;
// Set input data for stdin
m.setStdinData(JSON.stringify(compilerStandardJsonInput));
var stdoutString = "";
m.setStdoutCallback(function(char) {
if (char.charCodeAt(0) === '\n') {
console.log("new line")
exit
}
stdoutString += char;
});
var stderrString = "";
m.setStderrCallback(function(char) {
stderrString += char;
});
// Compile the Solidity source code
let x = m.callMain(['--standard-json']);
console.log("Stdout: " + stdoutString)
console.error("Stderr: " + stderrString)
}
runCompiler().catch(err => {
console.error('Error:', err);
});
+32
View File
@@ -0,0 +1,32 @@
import { parentPort } from 'worker_threads';
parentPort.on('message', async (inputJson) => {
const { default: createRevive } = await import(new URL('./resolc.js', import.meta.url));
const revive = await createRevive();
revive.setStdinData(inputJson);
let stdoutString = "";
revive.setStdoutCallback(function(char) {
if (char.charCodeAt(0) === '\n') {
console.log("new line")
exit
}
stdoutString += char;
});
let stderrString = "";
revive.setStderrCallback(function(char) {
stderrString += char;
});
// Call main on the new instance
const output = revive.callMain(['--recursive-process']);
if (stderrString.length > 0) {
// If /err is not empty, throw an error with its content
throw new Error(stderrString);
} else {
parentPort.postMessage({ output: stdoutString });
}
});
+5 -2
View File
@@ -2,9 +2,12 @@
"name": "root",
"private": true,
"scripts": {
"test:cli": "npm run test -w crates/solidity/src/tests/cli-tests"
"test:cli": "npm run test -w crates/solidity/src/tests/cli-tests",
"build:revive": "npm run build -w js",
"test:revive": "npm run test -w js"
},
"workspaces": [
"crates/solidity/src/tests/cli-tests"
"crates/solidity/src/tests/cli-tests",
"js"
]
}