Compare commits

...

9 Commits

Author SHA1 Message Date
pezkuwichain fd6b707687 feat: add pez-revive-dev-node platform aliases for Pezkuwi SDK compatibility 2026-01-27 15:14:44 +03:00
Omar 9fd6a8d408 Update resolc in ci to use a path (#233) 2026-01-26 21:56:40 +00:00
Omar 0d5e80f80f Update heapsize of resolc (#232)
* Add the ability to override the gas limit and other gas params in test steps

* Update the CI to accept resolc URL

* Update heapsize of resolc
2026-01-26 21:16:49 +00:00
Omar 340c2667e1 Override gas limit for tests (#231)
* Add the ability to override the gas limit and other gas params in test steps

* Update the CI to accept resolc URL
2026-01-26 21:15:29 +00:00
Omar 97d0cf1d1c Allow Targets in Test Cases (#229)
* Add an optional `targets` field to cases.

This PR adds an optional `targets` field to cases which takes presence
over that same field in the `Metadata`. The hope from this is to allow
us to limit specific tests so that they only run on specific platforms
only.

* Update the resolc tests submodule

* Update the default resolc version to use

* Update the default heap-size and stack-size in the cli

* Update the report processor
2026-01-22 13:33:01 +00:00
Omar 3c9f845287 Update the commit hash of the tests (#230) 2026-01-22 12:41:31 +00:00
Marian Radu 87758b4aff Skip contracts that have no bytecode (e.g., abstract contracts) (#228)
* Skip contracts that have no bytecode (e.g., abstract contracts)

* Update Cargo.lock
2026-01-19 15:04:53 +00:00
Marian Radu 9491263857 Add PVM heap-size and stack-size configuration parameters for resolc (#226)
* Update revive compiler dependencies

* Inject polkavm settings into resolc standard JSON input

* Add PVM heap-size and stack-size configuration for resolc
2026-01-19 10:05:37 +00:00
Omar 9d1c71756f Update Report Processor (#227)
* Add a report processing tool

* Add expectations tests to the CI action

* Fix an issue with CI

* Fix CI

* Fix the path of the workdir in CI

* Fix CI issue with the paths

* Update the format of the expectations file

* Update report processor to only include failures
2026-01-16 16:21:36 +00:00
14 changed files with 1300 additions and 1144 deletions
@@ -18,10 +18,9 @@ inputs:
required: false
default: "main"
type: string
resolc-version:
description: "The version of resolc to install and use in tests."
required: false
default: "0.5.0"
resolc-path:
description: "The path of the resolc compiler."
required: true
type: string
use-compilation-caches:
description: "Controls if the compilation caches will be used for the test run or not."
@@ -29,6 +28,10 @@ inputs:
default: true
type: boolean
# Test Execution Arguments
# TODO: We need a better way for people to pass arguments to retester. This way is not very good
# because we need to add support for each argument separately and support defaults and all of that
# perhaps having people pass in a JSON String of the arguments is the better long term solution
# for this.
platform:
description: "The identifier of the platform to run the tests on (e.g., geth-evm-solc, revive-dev-node-revm-solc)"
required: true
@@ -56,16 +59,6 @@ runs:
ref: ${{ inputs['revive-differential-tests-ref'] }}
path: revive-differential-tests
submodules: recursive
- name: Installing the Latest Resolc
shell: bash
if: ${{ runner.os == 'Linux' && runner.arch == 'X64' }}
run: |
VERSION="${{ inputs['resolc-version'] }}"
ASSET_URL="https://github.com/paritytech/revive/releases/download/v$VERSION/resolc-x86_64-unknown-linux-musl"
echo "Downloading resolc v$VERSION from $ASSET_URL"
curl -Lsf --show-error -o resolc "$ASSET_URL"
chmod +x resolc
./resolc --version
- name: Installing Retester
shell: bash
run: ${{ inputs['cargo-command'] }} install --locked --path revive-differential-tests/crates/core
@@ -120,11 +113,12 @@ runs:
--revive-dev-node.path ${{ inputs['polkadot-sdk-path'] }}/target/release/revive-dev-node \
--eth-rpc.path ${{ inputs['polkadot-sdk-path'] }}/target/release/eth-rpc \
--polkadot-omni-node.path ${{ inputs['polkadot-sdk-path'] }}/target/release/polkadot-omni-node \
--resolc.path ./resolc \
--resolc.path ${{ inputs['resolc-path'] }} \
--resolc.heap-size 500000 \
"${OMNI_ARGS[@]}" || true
- name: Generate the expectation file
shell: bash
run: report-processor generate-expectations-file --report-path ./workdir/report.json --output-path ./workdir/expectations.json --remove-prefix ./revive-differential-tests/resolc-compiler-tests
run: report-processor generate-expectations-file --report-path ./workdir/report.json --output-path ./workdir/expectations.json --remove-prefix ./revive-differential-tests/resolc-compiler-tests --include-status failed
- name: Upload the Report to the CI
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
Generated
+1065 -1076
View File
File diff suppressed because it is too large Load Diff
+3 -3
View File
@@ -74,9 +74,9 @@ indexmap = { version = "2.10.0", default-features = false }
itertools = { version = "0.14.0" }
# revive compiler
revive-solc-json-interface = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
revive-common = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
revive-differential = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
revive-solc-json-interface = { version = "0.5.0" }
revive-common = { version = "0.3.0" }
revive-differential = { version = "0.3.0" }
zombienet-sdk = { git = "https://github.com/paritytech/zombienet-sdk.git", rev = "891f6554354ce466abd496366dbf8b4f82141241" }
+6
View File
@@ -32,8 +32,12 @@ pub enum PlatformIdentifier {
/// The Lighthouse Go-ethereum reference full node EVM implementation with the solc compiler.
LighthouseGethEvmSolc,
/// The revive dev node with the PolkaVM backend with the resolc compiler.
#[strum(serialize = "revive-dev-node-polkavm-resolc", serialize = "pez-revive-dev-node-polkavm-resolc")]
#[serde(alias = "pez-revive-dev-node-polkavm-resolc")]
ReviveDevNodePolkavmResolc,
/// The revive dev node with the REVM backend with the solc compiler.
#[strum(serialize = "revive-dev-node-revm-solc", serialize = "pez-revive-dev-node-revm-solc")]
#[serde(alias = "pez-revive-dev-node-revm-solc")]
ReviveDevNodeRevmSolc,
/// A zombienet based Substrate/Polkadot node with the PolkaVM backend with the resolc compiler.
ZombienetPolkavmResolc,
@@ -98,6 +102,8 @@ pub enum NodeIdentifier {
/// The go-ethereum node implementation.
LighthouseGeth,
/// The revive dev node implementation.
#[strum(serialize = "revive-dev-node", serialize = "pez-revive-dev-node")]
#[serde(alias = "pez-revive-dev-node")]
ReviveDevNode,
/// A zombienet spawned nodes
Zombienet,
+73 -25
View File
@@ -12,9 +12,13 @@ use dashmap::DashMap;
use revive_dt_common::types::VersionOrRequirement;
use revive_dt_config::{ResolcConfiguration, SolcConfiguration, WorkingDirectoryConfiguration};
use revive_solc_json_interface::{
SolcStandardJsonInput, SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings,
SolcStandardJsonInputSettingsOptimizer, SolcStandardJsonInputSettingsSelection,
SolcStandardJsonOutput,
PolkaVMDefaultHeapMemorySize, PolkaVMDefaultStackMemorySize, SolcStandardJsonInput,
SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings,
SolcStandardJsonInputSettingsLibraries, SolcStandardJsonInputSettingsMetadata,
SolcStandardJsonInputSettingsOptimizer, SolcStandardJsonInputSettingsPolkaVM,
SolcStandardJsonInputSettingsPolkaVMMemory, SolcStandardJsonInputSettingsSelection,
SolcStandardJsonOutput, standard_json::input::settings::optimizer::Optimizer,
standard_json::input::settings::optimizer::details::Details,
};
use tracing::{Span, field::display};
@@ -25,6 +29,7 @@ use crate::{
use alloy::json_abi::JsonAbi;
use anyhow::{Context as _, Result};
use semver::Version;
use std::collections::BTreeSet;
use tokio::{io::AsyncWriteExt, process::Command as AsyncCommand};
/// A wrapper around the `resolc` binary, emitting PVM-compatible bytecode.
@@ -37,6 +42,10 @@ struct ResolcInner {
solc: Solc,
/// Path to the `resolc` executable
resolc_path: PathBuf,
/// The PVM heap size in bytes.
pvm_heap_size: u32,
/// The PVM stack size in bytes.
pvm_stack_size: u32,
}
impl Resolc {
@@ -63,10 +72,35 @@ impl Resolc {
Self(Arc::new(ResolcInner {
solc,
resolc_path: resolc_configuration.path.clone(),
pvm_heap_size: resolc_configuration
.heap_size
.unwrap_or(PolkaVMDefaultHeapMemorySize),
pvm_stack_size: resolc_configuration
.stack_size
.unwrap_or(PolkaVMDefaultStackMemorySize),
}))
})
.clone())
}
fn polkavm_settings(&self) -> SolcStandardJsonInputSettingsPolkaVM {
SolcStandardJsonInputSettingsPolkaVM::new(
Some(SolcStandardJsonInputSettingsPolkaVMMemory::new(
Some(self.0.pvm_heap_size),
Some(self.0.pvm_stack_size),
)),
false,
)
}
fn inject_polkavm_settings(&self, input: &SolcStandardJsonInput) -> Result<serde_json::Value> {
let mut input_value = serde_json::to_value(input)
.context("Failed to serialize Standard JSON input for resolc")?;
if let Some(settings) = input_value.get_mut("settings") {
settings["polkavm"] = serde_json::to_value(self.polkavm_settings()).unwrap();
}
Ok(input_value)
}
}
impl SolidityCompiler for Resolc {
@@ -121,8 +155,8 @@ impl SolidityCompiler for Resolc {
.collect(),
settings: SolcStandardJsonInputSettings {
evm_version,
libraries: Some(
libraries
libraries: SolcStandardJsonInputSettingsLibraries {
inner: libraries
.into_iter()
.map(|(source_code, libraries_map)| {
(
@@ -136,23 +170,29 @@ impl SolidityCompiler for Resolc {
)
})
.collect(),
),
remappings: None,
output_selection: Some(SolcStandardJsonInputSettingsSelection::new_required()),
},
remappings: BTreeSet::<String>::new(),
output_selection: SolcStandardJsonInputSettingsSelection::new_required(),
via_ir: Some(true),
optimizer: SolcStandardJsonInputSettingsOptimizer::new(
optimization
.unwrap_or(ModeOptimizerSetting::M0)
.optimizations_enabled(),
None,
&Version::new(0, 0, 0),
false,
Optimizer::default_mode(),
Details::disabled(&Version::new(0, 0, 0)),
),
metadata: None,
polkavm: None,
polkavm: self.polkavm_settings(),
metadata: SolcStandardJsonInputSettingsMetadata::default(),
detect_missing_libraries: false,
},
};
Span::current().record("json_in", display(serde_json::to_string(&input).unwrap()));
// Manually inject polkavm settings since it's marked skip_serializing in the upstream crate
let std_input_json = self.inject_polkavm_settings(&input)?;
Span::current().record(
"json_in",
display(serde_json::to_string(&std_input_json).unwrap()),
);
let path = &self.0.resolc_path;
let mut command = AsyncCommand::new(path);
@@ -181,8 +221,9 @@ impl SolidityCompiler for Resolc {
.with_context(|| format!("Failed to spawn resolc at {}", path.display()))?;
let stdin_pipe = child.stdin.as_mut().expect("stdin must be piped");
let serialized_input = serde_json::to_vec(&input)
let serialized_input = serde_json::to_vec(&std_input_json)
.context("Failed to serialize Standard JSON input for resolc")?;
stdin_pipe
.write_all(&serialized_input)
.await
@@ -228,7 +269,7 @@ impl SolidityCompiler for Resolc {
// Detecting if the compiler output contained errors and reporting them through logs and
// errors instead of returning the compiler output that might contain errors.
for error in parsed.errors.iter().flatten() {
for error in parsed.errors.iter() {
if error.severity == "error" {
tracing::error!(
?error,
@@ -240,12 +281,12 @@ impl SolidityCompiler for Resolc {
}
}
let Some(contracts) = parsed.contracts else {
if parsed.contracts.is_empty() {
anyhow::bail!("Unexpected error - resolc output doesn't have a contracts section");
};
}
let mut compiler_output = CompilerOutput::default();
for (source_path, contracts) in contracts.into_iter() {
for (source_path, contracts) in parsed.contracts.into_iter() {
let src_for_msg = source_path.clone();
let source_path = PathBuf::from(source_path)
.canonicalize()
@@ -253,15 +294,22 @@ impl SolidityCompiler for Resolc {
let map = compiler_output.contracts.entry(source_path).or_default();
for (contract_name, contract_information) in contracts.into_iter() {
let bytecode = contract_information
let Some(bytecode) = contract_information
.evm
.and_then(|evm| evm.bytecode.clone())
.context("Unexpected - Contract compiled with resolc has no bytecode")?;
else {
tracing::debug!(
"Skipping abstract or interface contract {} - no bytecode",
contract_name
);
continue;
};
let abi = {
let metadata = contract_information
.metadata
.as_ref()
.context("No metadata found for the contract")?;
let metadata = &contract_information.metadata;
if metadata.is_null() {
anyhow::bail!("No metadata found for the contract");
}
let solc_metadata_str = match metadata {
serde_json::Value::String(solc_metadata_str) => {
solc_metadata_str.as_str()
+12
View File
@@ -800,6 +800,18 @@ pub struct ResolcConfiguration {
/// provided in the user's $PATH.
#[clap(id = "resolc.path", long = "resolc.path", default_value = "resolc")]
pub path: PathBuf,
/// Specifies the PVM heap size in bytes.
///
/// If unspecified, the revive compiler default is used
#[clap(id = "resolc.heap-size", long = "resolc.heap-size")]
pub heap_size: Option<u32>,
/// Specifies the PVM stack size in bytes.
///
/// If unspecified, the revive compiler default is used
#[clap(id = "resolc.stack-size", long = "resolc.stack-size")]
pub stack_size: Option<u32>,
}
/// A set of configuration parameters for Polkadot Parachain.
+9 -9
View File
@@ -482,15 +482,16 @@ where
.context("Failed to find deployment receipt for constructor call"),
Method::Fallback | Method::FunctionName(_) => {
let resolver = self.platform_information.node.resolver().await?;
let tx = match step
let mut tx = step
.as_transaction(resolver.as_ref(), self.default_resolution_context())
.await
{
Ok(tx) => tx,
Err(err) => {
return Err(err);
}
};
.await?;
let gas_overrides = step
.gas_overrides
.get(&self.platform_information.platform.platform_identifier())
.copied()
.unwrap_or_default();
gas_overrides.apply_to::<Ethereum>(&mut tx);
self.platform_information.node.execute_transaction(tx).await
}
@@ -911,7 +912,6 @@ where
.get(contract_instance)
{
info!(
%address,
"Contract instance already deployed."
);
+15 -8
View File
@@ -223,17 +223,24 @@ impl<'a> TestDefinition<'a> {
/// Checks if the platforms all support the desired targets in the metadata file.
fn check_target_compatibility(&self) -> TestCheckFunctionResult {
let mut error_map = indexmap! {
"test_desired_targets" => json!(self.metadata.targets.as_ref()),
// The case targets takes presence over the metadata targets.
let Some(targets) = self
.case
.targets
.as_ref()
.or(self.metadata.targets.as_ref())
else {
return Ok(());
};
let mut error_map = indexmap! {
"test_desired_targets" => json!(targets),
};
let mut is_allowed = true;
for (_, platform_information) in self.platforms.iter() {
let is_allowed_for_platform = match self.metadata.targets.as_ref() {
None => true,
Some(required_vm_identifiers) => {
required_vm_identifiers.contains(&platform_information.platform.vm_identifier())
}
};
let is_allowed_for_platform =
targets.contains(&platform_information.platform.vm_identifier());
is_allowed &= is_allowed_for_platform;
error_map.insert(
platform_information.platform.platform_identifier().into(),
+8 -2
View File
@@ -1,16 +1,22 @@
use alloy::primitives::Address;
use alloy::primitives::{Address, map::HashSet};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use revive_dt_common::{
macros::define_wrapper_type,
types::{Mode, ParsedMode},
types::{Mode, ParsedMode, VmIdentifier},
};
use crate::steps::*;
#[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq, JsonSchema)]
pub struct Case {
/// An optional vector of targets that this Metadata file's cases can be executed on. As an
/// example, if we wish for the metadata file's cases to only be run on PolkaVM then we'd
/// specify a target of "PolkaVM" in here.
#[serde(skip_serializing_if = "Option::is_none")]
pub targets: Option<HashSet<VmIdentifier>>,
/// An optional name of the test case.
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
+2 -1
View File
@@ -8,6 +8,7 @@ use std::{
str::FromStr,
};
use alloy::primitives::map::HashSet;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
@@ -83,7 +84,7 @@ pub struct Metadata {
/// example, if we wish for the metadata file's cases to only be run on PolkaVM then we'd
/// specify a target of "PolkaVM" in here.
#[serde(skip_serializing_if = "Option::is_none")]
pub targets: Option<Vec<VmIdentifier>>,
pub targets: Option<HashSet<VmIdentifier>>,
/// A vector of the test cases and workloads contained within the metadata file. This is their
/// primary description.
+63
View File
@@ -1,6 +1,7 @@
use std::{collections::HashMap, fmt::Display, str::FromStr};
use alloy::hex::ToHexExt;
use alloy::network::Network;
use alloy::primitives::{FixedBytes, utils::parse_units};
use alloy::{
eips::BlockNumberOrTag,
@@ -11,6 +12,7 @@ use alloy::{
};
use anyhow::Context as _;
use futures::{FutureExt, StreamExt, TryFutureExt, TryStreamExt, stream};
use revive_dt_common::types::PlatformIdentifier;
use schemars::JsonSchema;
use semver::VersionReq;
use serde::{Deserialize, Serialize};
@@ -152,6 +154,11 @@ pub struct FunctionCallStep {
/// during the execution.
#[serde(skip_serializing_if = "Option::is_none")]
pub variable_assignments: Option<VariableAssignments>,
/// Allows for the test to set a specific value for the various gas parameter for each one of
/// the platforms we support. This is ignored for steps that perform contract deployments.
#[serde(default, skip_serializing_if = "HashMap::is_empty")]
pub gas_overrides: HashMap<PlatformIdentifier, GasOverrides>,
}
/// This represents a balance assertion step where the framework needs to query the balance of some
@@ -965,6 +972,62 @@ impl<'de> Deserialize<'de> for EtherValue {
}
}
#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, Eq, PartialEq, JsonSchema)]
pub struct GasOverrides {
#[serde(skip_serializing_if = "Option::is_none")]
pub gas_limit: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub gas_price: Option<u128>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_fee_per_gas: Option<u128>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_priority_fee_per_gas: Option<u128>,
}
impl GasOverrides {
pub fn new() -> Self {
Default::default()
}
pub fn with_gas_limit(mut self, value: impl Into<Option<u64>>) -> Self {
self.gas_limit = value.into();
self
}
pub fn with_gas_price(mut self, value: impl Into<Option<u128>>) -> Self {
self.gas_price = value.into();
self
}
pub fn with_max_fee_per_gas(mut self, value: impl Into<Option<u128>>) -> Self {
self.max_fee_per_gas = value.into();
self
}
pub fn with_max_priority_fee_per_gas(mut self, value: impl Into<Option<u128>>) -> Self {
self.max_priority_fee_per_gas = value.into();
self
}
pub fn apply_to<N: Network>(&self, transaction_request: &mut N::TransactionRequest) {
if let Some(gas_limit) = self.gas_limit {
transaction_request.set_gas_limit(gas_limit);
}
if let Some(gas_price) = self.gas_price {
transaction_request.set_gas_price(gas_price);
}
if let Some(max_fee_per_gas) = self.max_fee_per_gas {
transaction_request.set_max_fee_per_gas(max_fee_per_gas);
}
if let Some(max_priority_fee_per_gas) = self.max_priority_fee_per_gas {
transaction_request.set_max_priority_fee_per_gas(max_priority_fee_per_gas)
}
}
}
#[cfg(test)]
mod tests {
+1
View File
@@ -18,6 +18,7 @@ revive-dt-common = { workspace = true }
anyhow = { workspace = true }
clap = { workspace = true }
strum = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
+32 -3
View File
@@ -1,6 +1,6 @@
use std::{
borrow::Cow,
collections::{BTreeMap, BTreeSet},
collections::{BTreeMap, BTreeSet, HashSet},
fmt::Display,
fs::{File, OpenOptions},
ops::{Deref, DerefMut},
@@ -9,11 +9,12 @@ use std::{
};
use anyhow::{Context as _, Error, Result, bail};
use clap::Parser;
use clap::{Parser, ValueEnum};
use serde::{Deserialize, Serialize, de::DeserializeOwned};
use revive_dt_common::types::{Mode, ParsedTestSpecifier};
use revive_dt_report::{Report, TestCaseStatus};
use strum::EnumString;
fn main() -> Result<()> {
let cli = Cli::try_parse().context("Failed to parse the CLI arguments")?;
@@ -23,11 +24,14 @@ fn main() -> Result<()> {
report_path,
output_path: output_file,
remove_prefix,
include_status,
} => {
let remove_prefix = remove_prefix
.into_iter()
.map(|path| path.canonicalize().context("Failed to canonicalize path"))
.collect::<Result<Vec<_>>>()?;
let include_status =
include_status.map(|value| value.into_iter().collect::<HashSet<_>>());
let expectations = report_path
.execution_information
@@ -73,6 +77,12 @@ fn main() -> Result<()> {
Status::from(status),
)
})
.filter(|(_, status)| {
include_status
.as_ref()
.map(|allowed_status| allowed_status.contains(status))
.unwrap_or(true)
})
.collect::<Expectations>();
let output_file = OpenOptions::new()
@@ -142,6 +152,11 @@ pub enum Cli {
/// Prefix paths to remove from the paths in the final expectations file.
#[clap(long)]
remove_prefix: Vec<PathBuf>,
/// Controls which test case statuses are included in the generated expectations file. If
/// nothing is specified then it will include all of the test case status.
#[clap(long)]
include_status: Option<Vec<Status>>,
},
/// Compares two expectation files to ensure that they match each other.
@@ -156,7 +171,21 @@ pub enum Cli {
},
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
#[derive(
Clone,
Copy,
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Hash,
Serialize,
Deserialize,
ValueEnum,
EnumString,
)]
#[strum(serialize_all = "kebab-case")]
pub enum Status {
Succeeded,
Failed,