Compare commits

...

13 Commits

Author SHA1 Message Date
Omar Abdulla 270060b043 Update the number of cached blocks in revive-dev-node 2025-11-25 15:05:31 +03:00
Omar Abdulla e53da99f73 Update the commit hash of the tests 2025-11-24 18:15:27 +03:00
Omar a6e4932a08 Upload code when initializing the driver (#212) 2025-11-12 09:56:53 +00:00
Omar 06c2e023a9 Cleanup Repository & Fix CI (#211)
* Move all scripts to a single directory

* Switch to cargo-make

* Remove the polkadot-sdk from the submodules

* WIP: update the CI

* Add other jobs to CI

* Overhaul the polkadot-sdk caching step

* Add a testing step

* Fix the CI

* Install clang and llvm dependencies

* Update the version of clang

* Install llvm on macos

* Fix ci

* Fix ci

* Use 1.90.0 version of rust for the polkadot-sdk

* Fix CI

* Fix CI

* Fix CI

* Fix CI

* Fix CI

* Allow warnings

* Update runners

* Update runners

* Simplify CI

* Update MacOS runner

* Fix zombienet tests

* Make cache step faster
2025-11-10 23:08:36 +00:00
Omar 347dcb4488 Increase eth-rpc cache size (#210) 2025-11-10 07:05:11 +00:00
Omar f9a63a5641 feature/bump resolc compiler tests (#209)
* Bump the version of resolc compiler tests

* Bump the version of resolc compiler tests

* Reduce the timeout for transactions to 2 minutes

* Bump resolc compiler tests
2025-11-06 04:24:47 +00:00
Omar fb009f65c1 Bump resolc compiler tests (#208)
* Bump the version of resolc compiler tests

* Bump the version of resolc compiler tests

* Reduce the timeout for transactions to 2 minutes
2025-11-06 03:42:20 +00:00
Omar dff4c25e24 Bump the version of resolc compiler tests (#207) 2025-11-06 02:56:38 +00:00
Omar e433d93cbf Limit the solc version to a max of 0.8.30 (#206) 2025-11-04 18:58:14 +00:00
Omar 408754e8fb Remove the cwd setting from the export-chainspec command (#205) 2025-11-04 03:30:39 +00:00
Omar 59bfffe5fe Fix the working directory path canonicalization (#204)
* Update the commit hash of resolc compiler tests

* Fix an issue with file errors in substrate export-chainspec

* Update the resolc compiler tests

* Fix the working directory canonicalization
2025-11-04 03:13:48 +00:00
Omar 380ea693be Fix an error in substrate export chainspec (#203)
* Update the commit hash of resolc compiler tests

* Fix an issue with file errors in substrate export-chainspec

* Update the resolc compiler tests
2025-11-04 02:25:42 +00:00
Omar d02152b565 Update version of tests (#202)
* Update the commit hash of resolc compiler tests

* Update the version of tests
2025-11-02 23:48:23 +00:00
20 changed files with 282 additions and 246 deletions
+137 -169
View File
@@ -18,136 +18,95 @@ env:
POLKADOT_VERSION: polkadot-stable2506-2 POLKADOT_VERSION: polkadot-stable2506-2
jobs: jobs:
cache-polkadot: machete:
name: Build and cache Polkadot binaries on ${{ matrix.os }} name: Check for Unneeded Dependencies
runs-on: ${{ matrix.os }} runs-on: ubuntu-24.04
strategy: env:
matrix: SCCACHE_GHA_ENABLED: "true"
os: [ubuntu-24.04, macos-14] RUSTC_WRAPPER: "sccache"
steps: steps:
- name: Checkout repo and submodules - name: Checkout This Repository
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- name: Run Sccache
- name: Install dependencies (Linux) uses: mozilla-actions/sccache-action@v0.0.9
if: matrix.os == 'ubuntu-24.04' - name: Install the Rust Toolchain
run: | uses: actions-rust-lang/setup-rust-toolchain@v1
sudo apt-get update - name: Install the Cargo Make Binary
sudo apt-get install -y protobuf-compiler clang libclang-dev uses: davidB/rust-cargo-make@v1
rustup target add wasm32-unknown-unknown - name: Run Cargo Machete
rustup component add rust-src run: cargo make machete
check-fmt:
- name: Install dependencies (macOS) name: Check Formatting
if: matrix.os == 'macos-14' runs-on: ubuntu-24.04
run: | env:
brew install protobuf SCCACHE_GHA_ENABLED: "true"
rustup target add wasm32-unknown-unknown RUSTC_WRAPPER: "sccache"
rustup component add rust-src steps:
- name: Checkout This Repository
- name: Cache binaries uses: actions/checkout@v4
id: cache
uses: actions/cache@v3
with: with:
path: | submodules: recursive
~/.cargo/bin/revive-dev-node - name: Run Sccache
~/.cargo/bin/eth-rpc uses: mozilla-actions/sccache-action@v0.0.9
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}-with-dev-node - name: Install the Rust Toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Build revive-dev-node - name: Install the Cargo Make Binary
if: steps.cache.outputs.cache-hit != 'true' uses: davidB/rust-cargo-make@v1
run: | - name: Run Cargo Formatter
cd polkadot-sdk run: cargo make fmt-check
cargo install --locked --force --profile=production --path substrate/frame/revive/dev-node/node --bin revive-dev-node check-clippy:
name: Check Clippy Lints
- name: Build eth-rpc runs-on: ubuntu-24.04
if: steps.cache.outputs.cache-hit != 'true' env:
run: | SCCACHE_GHA_ENABLED: "true"
cd polkadot-sdk RUSTC_WRAPPER: "sccache"
cargo install --path substrate/frame/revive/rpc --bin eth-rpc steps:
- name: Checkout This Repository
- name: Cache downloaded Polkadot binaries uses: actions/checkout@v4
id: cache-polkadot
uses: actions/cache@v3
with: with:
path: | submodules: recursive
~/polkadot-cache/polkadot - name: Run Sccache
~/polkadot-cache/polkadot-execute-worker uses: mozilla-actions/sccache-action@v0.0.9
~/polkadot-cache/polkadot-prepare-worker - name: Install the Rust Toolchain
~/polkadot-cache/polkadot-parachain uses: actions-rust-lang/setup-rust-toolchain@v1
key: polkadot-downloaded-${{ matrix.os }}-${{ env.POLKADOT_VERSION }} - name: Install the Cargo Make Binary
uses: davidB/rust-cargo-make@v1
- name: Download Polkadot binaries on macOS - name: Run Cargo Clippy
if: matrix.os == 'macos-14' && steps.cache-polkadot.outputs.cache-hit != 'true' run: cargo make clippy
run: | test:
mkdir -p ~/polkadot-cache name: Unit Tests
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-aarch64-apple-darwin -o ~/polkadot-cache/polkadot runs-on: ${{ matrix.os }}
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-execute-worker-aarch64-apple-darwin -o ~/polkadot-cache/polkadot-execute-worker
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-prepare-worker-aarch64-apple-darwin -o ~/polkadot-cache/polkadot-prepare-worker
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-parachain-aarch64-apple-darwin -o ~/polkadot-cache/polkadot-parachain
chmod +x ~/polkadot-cache/*
- name: Download Polkadot binaries on Ubuntu
if: matrix.os == 'ubuntu-24.04' && steps.cache-polkadot.outputs.cache-hit != 'true'
run: |
mkdir -p ~/polkadot-cache
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot -o ~/polkadot-cache/polkadot
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-execute-worker -o ~/polkadot-cache/polkadot-execute-worker
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-prepare-worker -o ~/polkadot-cache/polkadot-prepare-worker
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-parachain -o ~/polkadot-cache/polkadot-parachain
chmod +x ~/polkadot-cache/*
ci:
name: CI on ${{ matrix.os }}
needs: cache-polkadot needs: cache-polkadot
runs-on: ${{ matrix.os }}
strategy: strategy:
matrix: matrix:
os: [ubuntu-24.04, macos-14] os: [ubuntu-24.04, macos-14]
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
POLKADOT_SDK_COMMIT_HASH: "30cda2aad8612a10ff729d494acd9d5353294d63"
steps: steps:
- name: Checkout repo - name: Checkout This Repository
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
submodules: recursive submodules: recursive
- name: Run Sccache
- name: Restore binaries from cache uses: mozilla-actions/sccache-action@v0.0.9
uses: actions/cache@v3 - name: Install the Rust Toolchain
with:
path: |
~/.cargo/bin/revive-dev-node
~/.cargo/bin/eth-rpc
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}-with-dev-node
- name: Restore downloaded Polkadot binaries from cache
uses: actions/cache@v3
with:
path: |
~/polkadot-cache/polkadot
~/polkadot-cache/polkadot-execute-worker
~/polkadot-cache/polkadot-prepare-worker
~/polkadot-cache/polkadot-parachain
key: polkadot-downloaded-${{ matrix.os }}-${{ env.POLKADOT_VERSION }}
- name: Install Polkadot binaries
run: |
sudo cp ~/polkadot-cache/polkadot /usr/local/bin/
sudo cp ~/polkadot-cache/polkadot-execute-worker /usr/local/bin/
sudo cp ~/polkadot-cache/polkadot-prepare-worker /usr/local/bin/
sudo cp ~/polkadot-cache/polkadot-parachain /usr/local/bin/
sudo chmod +x /usr/local/bin/polkadot*
- name: Setup Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1 uses: actions-rust-lang/setup-rust-toolchain@v1
with: with:
rustflags: "" target: "wasm32-unknown-unknown"
components: "rust-src,rust-std"
- name: Add wasm32 target and formatting - name: Install the Cargo Make Binary
run: | uses: davidB/rust-cargo-make@v1
rustup target add wasm32-unknown-unknown - name: Caching Step
rustup component add rust-src rustfmt clippy uses: actions/cache@v4
with:
path: |
~/.cargo/bin/eth-rpc
~/.cargo/bin/revive-dev-node
key: polkadot-binaries-${{ env.POLKADOT_SDK_COMMIT_HASH }}-${{ matrix.os }}
- name: Install Geth on Ubuntu - name: Install Geth on Ubuntu
if: matrix.os == 'ubuntu-24.04' if: matrix.os == 'ubuntu-24.04'
run: | run: |
@@ -180,7 +139,6 @@ jobs:
curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-x86_64-unknown-linux-musl -o resolc curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-x86_64-unknown-linux-musl -o resolc
chmod +x resolc chmod +x resolc
sudo mv resolc /usr/local/bin sudo mv resolc /usr/local/bin
- name: Install Geth on macOS - name: Install Geth on macOS
if: matrix.os == 'macos-14' if: matrix.os == 'macos-14'
run: | run: |
@@ -192,69 +150,79 @@ jobs:
curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-universal-apple-darwin -o resolc curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-universal-apple-darwin -o resolc
chmod +x resolc chmod +x resolc
sudo mv resolc /usr/local/bin sudo mv resolc /usr/local/bin
- name: Install Kurtosis on macOS - name: Install Kurtosis on macOS
if: matrix.os == 'macos-14' if: matrix.os == 'macos-14'
run: brew install kurtosis-tech/tap/kurtosis-cli run: brew install kurtosis-tech/tap/kurtosis-cli
- name: Install Kurtosis on Ubuntu - name: Install Kurtosis on Ubuntu
if: matrix.os == 'ubuntu-24.04' if: matrix.os == 'ubuntu-24.04'
run: | run: |
echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list
sudo apt update sudo apt update
sudo apt install kurtosis-cli sudo apt install kurtosis-cli
- name: Run Tests
- name: Install cargo-machete run: cargo make test
uses: clechasseur/rs-cargo@v2 cache-polkadot:
name: Build and Cache Polkadot Binaries on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-24.04, macos-14]
env:
SCCACHE_GHA_ENABLED: "true"
RUSTC_WRAPPER: "sccache"
RUSTFLAGS: "-Awarnings"
POLKADOT_SDK_COMMIT_HASH: "30cda2aad8612a10ff729d494acd9d5353294d63"
steps:
- name: Caching Step
id: cache-step
uses: actions/cache@v4
with: with:
command: install path: |
args: cargo-machete@0.7.0 ~/.cargo/bin/eth-rpc
- name: Machete ~/.cargo/bin/revive-dev-node
run: cargo machete crates key: polkadot-binaries-${{ env.POLKADOT_SDK_COMMIT_HASH }}-${{ matrix.os }}
- name: Checkout the Polkadot SDK Repository
uses: actions/checkout@v4
if: steps.cache-step.outputs.cache-hit != 'true'
with:
repository: paritytech/polkadot-sdk
ref: ${{ env.POLKADOT_SDK_COMMIT_HASH }}
submodules: recursive
- name: Run Sccache
uses: mozilla-actions/sccache-action@v0.0.9
if: steps.cache-step.outputs.cache-hit != 'true'
- name: Install the Rust Toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1
if: steps.cache-step.outputs.cache-hit != 'true'
with:
target: "wasm32-unknown-unknown"
components: "rust-src"
toolchain: "1.90.0"
- name: Format - name: Install dependencies (Linux)
run: make format if: matrix.os == 'ubuntu-24.04' && steps.cache-step.outputs.cache-hit != 'true'
- name: Clippy
run: make clippy
- name: Check revive-dev-node version
run: revive-dev-node --version
- name: Check eth-rpc version
run: eth-rpc --version
- name: Check resolc version
run: resolc --version
- name: Check polkadot version
run: polkadot --version
- name: Check polkadot-parachain version
run: polkadot-parachain --version
- name: Check polkadot-execute-worker version
run: polkadot-execute-worker --version
- name: Check polkadot-prepare-worker version
run: polkadot-prepare-worker --version
- name: Test Formatting
run: make format
- name: Test Clippy
run: make clippy
- name: Test Machete
run: make machete
- name: Unit Tests
if: matrix.os == 'ubuntu-24.04'
run: cargo test --workspace -- --nocapture
# We can't install docker in the MacOS image used in CI and therefore we need to skip the
# Kurtosis and lighthouse related tests when running the CI on MacOS.
- name: Unit Tests
if: matrix.os == 'macos-14'
run: | run: |
cargo test --workspace -- --nocapture --skip lighthouse_geth::tests:: sudo apt-get update
sudo apt-get install -y protobuf-compiler clang libclang-dev
- name: Install dependencies (macOS)
if: matrix.os == 'macos-14' && steps.cache-step.outputs.cache-hit != 'true'
run: |
brew install protobuf llvm
LLVM_PREFIX="$(brew --prefix llvm)"
echo "LDFLAGS=-L${LLVM_PREFIX}/lib" >> "$GITHUB_ENV"
echo "CPPFLAGS=-I${LLVM_PREFIX}/include" >> "$GITHUB_ENV"
echo "CMAKE_PREFIX_PATH=${LLVM_PREFIX}" >> "$GITHUB_ENV"
echo "LIBCLANG_PATH=${LLVM_PREFIX}/lib" >> "$GITHUB_ENV"
echo "DYLD_FALLBACK_LIBRARY_PATH=${LLVM_PREFIX}/lib" >> "$GITHUB_ENV"
echo "${LLVM_PREFIX}/bin" >> "$GITHUB_PATH"
- name: Build Polkadot Dependencies
if: steps.cache-step.outputs.cache-hit != 'true'
run: |
cargo build \
--locked \
--profile production \
--package revive-dev-node \
--package pallet-revive-eth-rpc;
mv ./target/production/revive-dev-node ~/.cargo/bin
mv ./target/production/eth-rpc ~/.cargo/bin
chmod +x ~/.cargo/bin/*
+2
View File
@@ -3,6 +3,7 @@
.DS_Store .DS_Store
node_modules node_modules
/*.json /*.json
*.sh
# We do not want to commit any log files that we produce from running the code locally so this is # We do not want to commit any log files that we produce from running the code locally so this is
# added to the .gitignore file. # added to the .gitignore file.
@@ -13,3 +14,4 @@ workdir
!/schema.json !/schema.json
!/dev-genesis.json !/dev-genesis.json
!/scripts/*
-3
View File
@@ -1,6 +1,3 @@
[submodule "polkadot-sdk"]
path = polkadot-sdk
url = https://github.com/paritytech/polkadot-sdk.git
[submodule "resolc-compiler-tests"] [submodule "resolc-compiler-tests"]
path = resolc-compiler-tests path = resolc-compiler-tests
url = https://github.com/paritytech/resolc-compiler-tests url = https://github.com/paritytech/resolc-compiler-tests
Generated
+1
View File
@@ -5657,6 +5657,7 @@ dependencies = [
"semver 1.0.26", "semver 1.0.26",
"serde", "serde",
"serde_json", "serde_json",
"subxt 0.44.0",
"tokio", "tokio",
"tracing", "tracing",
"tracing-appender", "tracing-appender",
-15
View File
@@ -1,15 +0,0 @@
.PHONY: format clippy test machete
format:
cargo fmt --all -- --check
clippy:
cargo clippy --all-features --workspace -- --deny warnings
machete:
cargo install cargo-machete
cargo machete crates
test: format clippy machete
cargo test --workspace -- --nocapture
+21
View File
@@ -0,0 +1,21 @@
[config]
default_to_workspace = false
[tasks.machete]
command = "cargo"
args = ["machete", "crates"]
install_crate = "cargo-machete"
[tasks.fmt-check]
command = "cargo"
args = ["fmt", "--all", "--", "--check"]
install_crate = "rustfmt"
[tasks.clippy]
command = "cargo"
args = ["clippy", "--all-features", "--workspace", "--", "--deny", "warnings"]
install_crate = "clippy"
[tasks.test]
command = "cargo"
args = ["test", "--workspace", "--", "--nocapture"]
Binary file not shown.
+5 -1
View File
@@ -16,6 +16,7 @@ use alloy::{
primitives::{B256, FixedBytes, U256}, primitives::{B256, FixedBytes, U256},
signers::local::PrivateKeySigner, signers::local::PrivateKeySigner,
}; };
use anyhow::Context as _;
use clap::{Parser, ValueEnum, ValueHint}; use clap::{Parser, ValueEnum, ValueHint};
use revive_dt_common::types::{ParsedTestSpecifier, PlatformIdentifier}; use revive_dt_common::types::{ParsedTestSpecifier, PlatformIdentifier};
use semver::Version; use semver::Version;
@@ -1079,7 +1080,10 @@ impl FromStr for WorkingDirectoryConfiguration {
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
match s { match s {
"" => Ok(Default::default()), "" => Ok(Default::default()),
_ => Ok(Self::Path(PathBuf::from(s))), _ => PathBuf::from(s)
.canonicalize()
.context("Failed to canonicalize the working directory path")
.map(Self::Path),
} }
} }
} }
+1
View File
@@ -37,6 +37,7 @@ schemars = { workspace = true }
semver = { workspace = true } semver = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }
subxt = { workspace = true }
[lints] [lints]
workspace = true workspace = true
@@ -127,6 +127,8 @@ where
.inspect_err(|err| error!(?err, "Pre-linking compilation failed")) .inspect_err(|err| error!(?err, "Pre-linking compilation failed"))
.context("Failed to produce the pre-linking compiled contracts")?; .context("Failed to produce the pre-linking compiled contracts")?;
let deployer_address = self.test_definition.case.deployer_address();
let mut deployed_libraries = None::<HashMap<_, _>>; let mut deployed_libraries = None::<HashMap<_, _>>;
let mut contract_sources = self let mut contract_sources = self
.test_definition .test_definition
@@ -159,23 +161,6 @@ where
let code = alloy::hex::decode(code)?; let code = alloy::hex::decode(code)?;
// Getting the deployer address from the cases themselves. This is to ensure
// that we're doing the deployments from different accounts and therefore we're
// not slowed down by the nonce.
let deployer_address = self
.test_definition
.case
.steps
.iter()
.filter_map(|step| match step {
Step::FunctionCall(input) => input.caller.as_address().copied(),
Step::BalanceAssertion(..) => None,
Step::StorageEmptyAssertion(..) => None,
Step::Repeat(..) => None,
Step::AllocateAccount(..) => None,
})
.next()
.unwrap_or(FunctionCallStep::default_caller_address());
let tx = TransactionBuilder::<Ethereum>::with_deploy_code( let tx = TransactionBuilder::<Ethereum>::with_deploy_code(
TransactionRequest::default().from(deployer_address), TransactionRequest::default().from(deployer_address),
code, code,
+61 -24
View File
@@ -8,7 +8,7 @@ use alloy::{
hex, hex,
json_abi::JsonAbi, json_abi::JsonAbi,
network::{Ethereum, TransactionBuilder}, network::{Ethereum, TransactionBuilder},
primitives::{Address, TxHash, U256}, primitives::{Address, TxHash, U256, address},
rpc::types::{ rpc::types::{
TransactionReceipt, TransactionRequest, TransactionReceipt, TransactionRequest,
trace::geth::{ trace::geth::{
@@ -18,9 +18,9 @@ use alloy::{
}, },
}; };
use anyhow::{Context as _, Result, bail}; use anyhow::{Context as _, Result, bail};
use futures::TryStreamExt; use futures::{TryStreamExt, future::try_join_all};
use indexmap::IndexMap; use indexmap::IndexMap;
use revive_dt_common::types::{PlatformIdentifier, PrivateKeyAllocator}; use revive_dt_common::types::{PlatformIdentifier, PrivateKeyAllocator, VmIdentifier};
use revive_dt_format::{ use revive_dt_format::{
metadata::{ContractInstance, ContractPathAndIdent}, metadata::{ContractInstance, ContractPathAndIdent},
steps::{ steps::{
@@ -30,6 +30,7 @@ use revive_dt_format::{
}, },
traits::ResolutionContext, traits::ResolutionContext,
}; };
use subxt::{ext::codec::Decode, metadata::Metadata, tx::Payload};
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tracing::{error, info, instrument}; use tracing::{error, info, instrument};
@@ -198,6 +199,8 @@ where
}) })
.context("Failed to produce the pre-linking compiled contracts")?; .context("Failed to produce the pre-linking compiled contracts")?;
let deployer_address = test_definition.case.deployer_address();
let mut deployed_libraries = None::<HashMap<_, _>>; let mut deployed_libraries = None::<HashMap<_, _>>;
let mut contract_sources = test_definition let mut contract_sources = test_definition
.metadata .metadata
@@ -232,22 +235,6 @@ where
let code = alloy::hex::decode(code)?; let code = alloy::hex::decode(code)?;
// Getting the deployer address from the cases themselves. This is to ensure
// that we're doing the deployments from different accounts and therefore we're
// not slowed down by the nonce.
let deployer_address = test_definition
.case
.steps
.iter()
.filter_map(|step| match step {
Step::FunctionCall(input) => input.caller.as_address().copied(),
Step::BalanceAssertion(..) => None,
Step::StorageEmptyAssertion(..) => None,
Step::Repeat(..) => None,
Step::AllocateAccount(..) => None,
})
.next()
.unwrap_or(FunctionCallStep::default_caller_address());
let tx = TransactionBuilder::<Ethereum>::with_deploy_code( let tx = TransactionBuilder::<Ethereum>::with_deploy_code(
TransactionRequest::default().from(deployer_address), TransactionRequest::default().from(deployer_address),
code, code,
@@ -295,6 +282,51 @@ where
}) })
.context("Failed to compile the post-link contracts")?; .context("Failed to compile the post-link contracts")?;
// Factory contracts on the PVM refer to the code that they're instantiating by hash rather
// than including the actual bytecode. This creates a problem where a factory contract could
// be deployed but the code it's supposed to create is not on chain. Therefore, we upload
// all the code to the chain prior to running any transactions on the driver.
if platform_information.platform.vm_identifier() == VmIdentifier::PolkaVM {
#[subxt::subxt(runtime_metadata_path = "../../assets/revive_metadata.scale")]
pub mod revive {}
let metadata_bytes = include_bytes!("../../../../assets/revive_metadata.scale");
let metadata = Metadata::decode(&mut &metadata_bytes[..])
.context("Failed to decode the revive metadata")?;
const RUNTIME_PALLET_ADDRESS: Address =
address!("0x6d6f646c70792f70616464720000000000000000");
let code_upload_tasks = compiler_output
.contracts
.values()
.flat_map(|item| item.values())
.map(|(code_string, _)| {
let metadata = metadata.clone();
async move {
let code = alloy::hex::decode(code_string)
.context("Failed to hex-decode the post-link code. This is a bug")?;
let payload = revive::tx().revive().upload_code(code, u128::MAX);
let encoded_payload = payload
.encode_call_data(&metadata)
.context("Failed to encode the upload code payload")?;
let tx_request = TransactionRequest::default()
.from(deployer_address)
.to(RUNTIME_PALLET_ADDRESS)
.input(encoded_payload.into());
platform_information
.node
.execute_transaction(tx_request)
.await
.context("Failed to execute transaction")
}
});
try_join_all(code_upload_tasks)
.await
.context("Code upload failed")?;
}
Ok(ExecutionState::new( Ok(ExecutionState::new(
compiler_output.contracts, compiler_output.contracts,
deployed_libraries.unwrap_or_default(), deployed_libraries.unwrap_or_default(),
@@ -359,7 +391,11 @@ where
Ok(()) Ok(())
} }
#[instrument(level = "info", skip_all)] #[instrument(
level = "info",
skip_all,
fields(block_number = tracing::field::Empty)
)]
pub async fn execute_function_call( pub async fn execute_function_call(
&mut self, &mut self,
_: &StepPath, _: &StepPath,
@@ -373,6 +409,7 @@ where
.handle_function_call_execution(step, deployment_receipts) .handle_function_call_execution(step, deployment_receipts)
.await .await
.context("Failed to handle the function call execution")?; .context("Failed to handle the function call execution")?;
tracing::Span::current().record("block_number", execution_receipt.block_number);
let tracing_result = self let tracing_result = self
.handle_function_call_call_frame_tracing(execution_receipt.transaction_hash) .handle_function_call_call_frame_tracing(execution_receipt.transaction_hash)
.await .await
@@ -616,8 +653,8 @@ where
} }
// Handling the calldata assertion // Handling the calldata assertion
if let Some(ref expected_calldata) = assertion.return_data { if let Some(ref expected_output) = assertion.return_data {
let expected = expected_calldata; let expected = expected_output;
let actual = &tracing_result.output.as_ref().unwrap_or_default(); let actual = &tracing_result.output.as_ref().unwrap_or_default();
if !expected if !expected
.is_equivalent(actual, resolver.as_ref(), resolution_context) .is_equivalent(actual, resolver.as_ref(), resolution_context)
@@ -628,9 +665,9 @@ where
?receipt, ?receipt,
?expected, ?expected,
%actual, %actual,
"Calldata assertion failed" "Output assertion failed"
); );
anyhow::bail!("Calldata assertion failed - Expected {expected:?} but got {actual}",); anyhow::bail!("Output assertion failed - Expected {expected:?} but got {actual}",);
} }
} }
+15
View File
@@ -1,3 +1,4 @@
use alloy::primitives::Address;
use schemars::JsonSchema; use schemars::JsonSchema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@@ -107,6 +108,20 @@ impl Case {
None => Mode::all().cloned().collect(), None => Mode::all().cloned().collect(),
} }
} }
pub fn deployer_address(&self) -> Address {
self.steps
.iter()
.filter_map(|step| match step {
Step::FunctionCall(input) => input.caller.as_address().copied(),
Step::BalanceAssertion(..) => None,
Step::StorageEmptyAssertion(..) => None,
Step::Repeat(..) => None,
Step::AllocateAccount(..) => None,
})
.next()
.unwrap_or(FunctionCallStep::default_caller_address())
}
} }
define_wrapper_type!( define_wrapper_type!(
+3 -2
View File
@@ -706,6 +706,7 @@ impl Calldata {
.await .await
.context("Failed to resolve calldata item during equivalence check")?; .context("Failed to resolve calldata item during equivalence check")?;
let other = U256::from_be_slice(&other); let other = U256::from_be_slice(&other);
Ok(this == other) Ok(this == other)
}) })
.buffered(0xFF) .buffered(0xFF)
@@ -718,7 +719,7 @@ impl Calldata {
} }
impl CalldataItem { impl CalldataItem {
#[instrument(level = "info", skip_all, err)] #[instrument(level = "info", skip_all, err(Debug))]
async fn resolve( async fn resolve(
&self, &self,
resolver: &(impl ResolverApi + ?Sized), resolver: &(impl ResolverApi + ?Sized),
@@ -906,7 +907,7 @@ impl<T: AsRef<str>> CalldataToken<T> {
let block_hash = resolver let block_hash = resolver
.block_hash(desired_block_number.into()) .block_hash(desired_block_number.into())
.await .await
.context("Failed to resolve block hash for desired block number")?; .context(format!("Failed to resolve the block hash of block number {desired_block_number}"))?;
Ok(U256::from_be_bytes(block_hash.0)) Ok(U256::from_be_bytes(block_hash.0))
} else if item == Self::BLOCK_NUMBER_VARIABLE { } else if item == Self::BLOCK_NUMBER_VARIABLE {
@@ -57,6 +57,9 @@ use crate::{
static NODE_COUNT: AtomicU32 = AtomicU32::new(0); static NODE_COUNT: AtomicU32 = AtomicU32::new(0);
/// The number of blocks that should be cached by the revive-dev-node and the eth-rpc.
const NUMBER_OF_CACHED_BLOCKS: u32 = 100_000;
/// A node implementation for Substrate based chains. Currently, this supports either substrate /// A node implementation for Substrate based chains. Currently, this supports either substrate
/// or the revive-dev-node which is done by changing the path and some of the other arguments passed /// or the revive-dev-node which is done by changing the path and some of the other arguments passed
/// to the command. /// to the command.
@@ -212,6 +215,8 @@ impl SubstrateNode {
.arg(u32::MAX.to_string()) .arg(u32::MAX.to_string())
.arg("--pool-kbytes") .arg("--pool-kbytes")
.arg(u32::MAX.to_string()) .arg(u32::MAX.to_string())
.arg("--state-pruning")
.arg(NUMBER_OF_CACHED_BLOCKS.to_string())
.env("RUST_LOG", Self::SUBSTRATE_LOG_ENV) .env("RUST_LOG", Self::SUBSTRATE_LOG_ENV)
.stdout(stdout_file) .stdout(stdout_file)
.stderr(stderr_file); .stderr(stderr_file);
@@ -251,6 +256,10 @@ impl SubstrateNode {
.arg(format!("ws://127.0.0.1:{substrate_rpc_port}")) .arg(format!("ws://127.0.0.1:{substrate_rpc_port}"))
.arg("--rpc-max-connections") .arg("--rpc-max-connections")
.arg(u32::MAX.to_string()) .arg(u32::MAX.to_string())
.arg("--index-last-n-blocks")
.arg(NUMBER_OF_CACHED_BLOCKS.to_string())
.arg("--cache-size")
.arg(NUMBER_OF_CACHED_BLOCKS.to_string())
.env("RUST_LOG", Self::PROXY_LOG_ENV) .env("RUST_LOG", Self::PROXY_LOG_ENV)
.stdout(stdout_file) .stdout(stdout_file)
.stderr(stderr_file); .stderr(stderr_file);
@@ -856,7 +856,7 @@ mod tests {
use utils::{new_node, test_config}; use utils::{new_node, test_config};
#[tokio::test] #[tokio::test]
#[ignore = "Ignored for the time being"] #[ignore = "Ignored since CI doesn't have zombienet installed"]
async fn test_transfer_transaction_should_return_receipt() { async fn test_transfer_transaction_should_return_receipt() {
// Arrange // Arrange
let (ctx, node) = new_node().await; let (ctx, node) = new_node().await;
@@ -882,6 +882,7 @@ mod tests {
} }
#[test] #[test]
#[ignore = "Ignored since CI doesn't have zombienet installed"]
fn print_eth_to_polkadot_mappings() { fn print_eth_to_polkadot_mappings() {
let eth_addresses = vec![ let eth_addresses = vec![
"0x90F8bf6A479f320ead074411a4B0e7944Ea8c9C1", "0x90F8bf6A479f320ead074411a4B0e7944Ea8c9C1",
@@ -897,6 +898,7 @@ mod tests {
} }
#[test] #[test]
#[ignore = "Ignored since CI doesn't have zombienet installed"]
fn test_eth_to_polkadot_address() { fn test_eth_to_polkadot_address() {
let cases = vec![ let cases = vec![
( (
@@ -927,6 +929,7 @@ mod tests {
} }
#[test] #[test]
#[ignore = "Ignored since CI doesn't have zombienet installed"]
fn eth_rpc_version_works() { fn eth_rpc_version_works() {
// Arrange // Arrange
let context = test_config(); let context = test_config();
@@ -946,6 +949,7 @@ mod tests {
} }
#[test] #[test]
#[ignore = "Ignored since CI doesn't have zombienet installed"]
fn version_works() { fn version_works() {
// Arrange // Arrange
let context = test_config(); let context = test_config();
@@ -965,7 +969,7 @@ mod tests {
} }
#[tokio::test] #[tokio::test]
#[ignore = "Ignored since they take a long time to run"] #[ignore = "Ignored since CI doesn't have zombienet installed"]
async fn get_chain_id_from_node_should_succeed() { async fn get_chain_id_from_node_should_succeed() {
// Arrange // Arrange
let node = shared_node().await; let node = shared_node().await;
@@ -984,7 +988,7 @@ mod tests {
} }
#[tokio::test] #[tokio::test]
#[ignore = "Ignored since they take a long time to run"] #[ignore = "Ignored since CI doesn't have zombienet installed"]
async fn can_get_gas_limit_from_node() { async fn can_get_gas_limit_from_node() {
// Arrange // Arrange
let node = shared_node().await; let node = shared_node().await;
@@ -1002,7 +1006,7 @@ mod tests {
} }
#[tokio::test] #[tokio::test]
#[ignore = "Ignored since they take a long time to run"] #[ignore = "Ignored since CI doesn't have zombienet installed"]
async fn can_get_coinbase_from_node() { async fn can_get_coinbase_from_node() {
// Arrange // Arrange
let node = shared_node().await; let node = shared_node().await;
@@ -1020,7 +1024,7 @@ mod tests {
} }
#[tokio::test] #[tokio::test]
#[ignore = "Ignored since they take a long time to run"] #[ignore = "Ignored since CI doesn't have zombienet installed"]
async fn can_get_block_difficulty_from_node() { async fn can_get_block_difficulty_from_node() {
// Arrange // Arrange
let node = shared_node().await; let node = shared_node().await;
@@ -1038,7 +1042,7 @@ mod tests {
} }
#[tokio::test] #[tokio::test]
#[ignore = "Ignored since they take a long time to run"] #[ignore = "Ignored since CI doesn't have zombienet installed"]
async fn can_get_block_hash_from_node() { async fn can_get_block_hash_from_node() {
// Arrange // Arrange
let node = shared_node().await; let node = shared_node().await;
@@ -1056,7 +1060,7 @@ mod tests {
} }
#[tokio::test] #[tokio::test]
#[ignore = "Ignored since they take a long time to run"] #[ignore = "Ignored since CI doesn't have zombienet installed"]
async fn can_get_block_timestamp_from_node() { async fn can_get_block_timestamp_from_node() {
// Arrange // Arrange
let node = shared_node().await; let node = shared_node().await;
@@ -1074,7 +1078,7 @@ mod tests {
} }
#[tokio::test] #[tokio::test]
#[ignore = "Ignored since they take a long time to run"] #[ignore = "Ignored since CI doesn't have zombienet installed"]
async fn can_get_block_number_from_node() { async fn can_get_block_number_from_node() {
// Arrange // Arrange
let node = shared_node().await; let node = shared_node().await;
+1 -1
View File
@@ -104,7 +104,7 @@ where
}; };
debug!(%tx_hash, "Submitted Transaction"); debug!(%tx_hash, "Submitted Transaction");
pending_transaction.set_timeout(Some(Duration::from_secs(240))); pending_transaction.set_timeout(Some(Duration::from_secs(120)));
let tx_hash = pending_transaction.watch().await.context(format!( let tx_hash = pending_transaction.watch().await.context(format!(
"Transaction inclusion watching timeout for {tx_hash}" "Transaction inclusion watching timeout for {tx_hash}"
))?; ))?;
+9 -2
View File
@@ -2,12 +2,13 @@
use std::{ use std::{
collections::HashMap, collections::HashMap,
str::FromStr,
sync::{LazyLock, Mutex}, sync::{LazyLock, Mutex},
}; };
use revive_dt_common::types::VersionOrRequirement; use revive_dt_common::types::VersionOrRequirement;
use semver::Version; use semver::{Version, VersionReq};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use crate::list::List; use crate::list::List;
@@ -65,6 +66,9 @@ impl SolcDownloader {
target: &'static str, target: &'static str,
list: &'static str, list: &'static str,
) -> anyhow::Result<Self> { ) -> anyhow::Result<Self> {
static MAXIMUM_COMPILER_VERSION_REQUIREMENT: LazyLock<VersionReq> =
LazyLock::new(|| VersionReq::from_str("<=0.8.30").unwrap());
let version_or_requirement = version.into(); let version_or_requirement = version.into();
match version_or_requirement { match version_or_requirement {
VersionOrRequirement::Version(version) => Ok(Self { VersionOrRequirement::Version(version) => Ok(Self {
@@ -79,7 +83,10 @@ impl SolcDownloader {
.builds .builds
.into_iter() .into_iter()
.map(|build| build.version) .map(|build| build.version)
.filter(|version| requirement.matches(version)) .filter(|version| {
MAXIMUM_COMPILER_VERSION_REQUIREMENT.matches(version)
&& requirement.matches(version)
})
.max() .max()
else { else {
anyhow::bail!("Failed to find a version that satisfies {requirement:?}"); anyhow::bail!("Failed to find a version that satisfies {requirement:?}");
Submodule polkadot-sdk deleted from a44be635e6