mirror of
https://github.com/pezkuwichain/revive-differential-tests.git
synced 2026-04-22 21:57:58 +00:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 556f6c0501 | |||
| 78ac7ee381 | |||
| 3edaebdcae | |||
| 66feb36b4e | |||
| cc753a1a2c | |||
| 31dfd67569 | |||
| a6e4932a08 | |||
| 06c2e023a9 | |||
| 347dcb4488 | |||
| f9a63a5641 | |||
| fb009f65c1 | |||
| dff4c25e24 | |||
| e433d93cbf | |||
| 408754e8fb | |||
| 59bfffe5fe | |||
| 380ea693be | |||
| d02152b565 |
+137
-169
@@ -18,136 +18,95 @@ env:
|
|||||||
POLKADOT_VERSION: polkadot-stable2506-2
|
POLKADOT_VERSION: polkadot-stable2506-2
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cache-polkadot:
|
machete:
|
||||||
name: Build and cache Polkadot binaries on ${{ matrix.os }}
|
name: Check for Unneeded Dependencies
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
env:
|
||||||
matrix:
|
SCCACHE_GHA_ENABLED: "true"
|
||||||
os: [ubuntu-24.04, macos-14]
|
RUSTC_WRAPPER: "sccache"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo and submodules
|
- name: Checkout This Repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
- name: Run Sccache
|
||||||
- name: Install dependencies (Linux)
|
uses: mozilla-actions/sccache-action@v0.0.9
|
||||||
if: matrix.os == 'ubuntu-24.04'
|
- name: Install the Rust Toolchain
|
||||||
run: |
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
sudo apt-get update
|
- name: Install the Cargo Make Binary
|
||||||
sudo apt-get install -y protobuf-compiler clang libclang-dev
|
uses: davidB/rust-cargo-make@v1
|
||||||
rustup target add wasm32-unknown-unknown
|
- name: Run Cargo Machete
|
||||||
rustup component add rust-src
|
run: cargo make machete
|
||||||
|
check-fmt:
|
||||||
- name: Install dependencies (macOS)
|
name: Check Formatting
|
||||||
if: matrix.os == 'macos-14'
|
runs-on: ubuntu-24.04
|
||||||
run: |
|
env:
|
||||||
brew install protobuf
|
SCCACHE_GHA_ENABLED: "true"
|
||||||
rustup target add wasm32-unknown-unknown
|
RUSTC_WRAPPER: "sccache"
|
||||||
rustup component add rust-src
|
steps:
|
||||||
|
- name: Checkout This Repository
|
||||||
- name: Cache binaries
|
uses: actions/checkout@v4
|
||||||
id: cache
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
with:
|
||||||
path: |
|
submodules: recursive
|
||||||
~/.cargo/bin/revive-dev-node
|
- name: Run Sccache
|
||||||
~/.cargo/bin/eth-rpc
|
uses: mozilla-actions/sccache-action@v0.0.9
|
||||||
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}-with-dev-node
|
- name: Install the Rust Toolchain
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
- name: Build revive-dev-node
|
- name: Install the Cargo Make Binary
|
||||||
if: steps.cache.outputs.cache-hit != 'true'
|
uses: davidB/rust-cargo-make@v1
|
||||||
run: |
|
- name: Run Cargo Formatter
|
||||||
cd polkadot-sdk
|
run: cargo make fmt-check
|
||||||
cargo install --locked --force --profile=production --path substrate/frame/revive/dev-node/node --bin revive-dev-node
|
check-clippy:
|
||||||
|
name: Check Clippy Lints
|
||||||
- name: Build eth-rpc
|
runs-on: ubuntu-24.04
|
||||||
if: steps.cache.outputs.cache-hit != 'true'
|
env:
|
||||||
run: |
|
SCCACHE_GHA_ENABLED: "true"
|
||||||
cd polkadot-sdk
|
RUSTC_WRAPPER: "sccache"
|
||||||
cargo install --path substrate/frame/revive/rpc --bin eth-rpc
|
steps:
|
||||||
|
- name: Checkout This Repository
|
||||||
- name: Cache downloaded Polkadot binaries
|
uses: actions/checkout@v4
|
||||||
id: cache-polkadot
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
with:
|
||||||
path: |
|
submodules: recursive
|
||||||
~/polkadot-cache/polkadot
|
- name: Run Sccache
|
||||||
~/polkadot-cache/polkadot-execute-worker
|
uses: mozilla-actions/sccache-action@v0.0.9
|
||||||
~/polkadot-cache/polkadot-prepare-worker
|
- name: Install the Rust Toolchain
|
||||||
~/polkadot-cache/polkadot-parachain
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
key: polkadot-downloaded-${{ matrix.os }}-${{ env.POLKADOT_VERSION }}
|
- name: Install the Cargo Make Binary
|
||||||
|
uses: davidB/rust-cargo-make@v1
|
||||||
- name: Download Polkadot binaries on macOS
|
- name: Run Cargo Clippy
|
||||||
if: matrix.os == 'macos-14' && steps.cache-polkadot.outputs.cache-hit != 'true'
|
run: cargo make clippy
|
||||||
run: |
|
test:
|
||||||
mkdir -p ~/polkadot-cache
|
name: Unit Tests
|
||||||
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-aarch64-apple-darwin -o ~/polkadot-cache/polkadot
|
runs-on: ${{ matrix.os }}
|
||||||
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-execute-worker-aarch64-apple-darwin -o ~/polkadot-cache/polkadot-execute-worker
|
|
||||||
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-prepare-worker-aarch64-apple-darwin -o ~/polkadot-cache/polkadot-prepare-worker
|
|
||||||
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-parachain-aarch64-apple-darwin -o ~/polkadot-cache/polkadot-parachain
|
|
||||||
chmod +x ~/polkadot-cache/*
|
|
||||||
|
|
||||||
- name: Download Polkadot binaries on Ubuntu
|
|
||||||
if: matrix.os == 'ubuntu-24.04' && steps.cache-polkadot.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
mkdir -p ~/polkadot-cache
|
|
||||||
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot -o ~/polkadot-cache/polkadot
|
|
||||||
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-execute-worker -o ~/polkadot-cache/polkadot-execute-worker
|
|
||||||
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-prepare-worker -o ~/polkadot-cache/polkadot-prepare-worker
|
|
||||||
curl -sL https://github.com/paritytech/polkadot-sdk/releases/download/${{ env.POLKADOT_VERSION }}/polkadot-parachain -o ~/polkadot-cache/polkadot-parachain
|
|
||||||
chmod +x ~/polkadot-cache/*
|
|
||||||
|
|
||||||
ci:
|
|
||||||
name: CI on ${{ matrix.os }}
|
|
||||||
needs: cache-polkadot
|
needs: cache-polkadot
|
||||||
runs-on: ${{ matrix.os }}
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-24.04, macos-14]
|
os: [ubuntu-24.04, macos-14]
|
||||||
|
env:
|
||||||
|
SCCACHE_GHA_ENABLED: "true"
|
||||||
|
RUSTC_WRAPPER: "sccache"
|
||||||
|
POLKADOT_SDK_COMMIT_HASH: "30cda2aad8612a10ff729d494acd9d5353294d63"
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout This Repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
- name: Run Sccache
|
||||||
- name: Restore binaries from cache
|
uses: mozilla-actions/sccache-action@v0.0.9
|
||||||
uses: actions/cache@v3
|
- name: Install the Rust Toolchain
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/.cargo/bin/revive-dev-node
|
|
||||||
~/.cargo/bin/eth-rpc
|
|
||||||
key: polkadot-binaries-${{ matrix.os }}-${{ hashFiles('polkadot-sdk/.git') }}-with-dev-node
|
|
||||||
|
|
||||||
- name: Restore downloaded Polkadot binaries from cache
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/polkadot-cache/polkadot
|
|
||||||
~/polkadot-cache/polkadot-execute-worker
|
|
||||||
~/polkadot-cache/polkadot-prepare-worker
|
|
||||||
~/polkadot-cache/polkadot-parachain
|
|
||||||
key: polkadot-downloaded-${{ matrix.os }}-${{ env.POLKADOT_VERSION }}
|
|
||||||
|
|
||||||
- name: Install Polkadot binaries
|
|
||||||
run: |
|
|
||||||
sudo cp ~/polkadot-cache/polkadot /usr/local/bin/
|
|
||||||
sudo cp ~/polkadot-cache/polkadot-execute-worker /usr/local/bin/
|
|
||||||
sudo cp ~/polkadot-cache/polkadot-prepare-worker /usr/local/bin/
|
|
||||||
sudo cp ~/polkadot-cache/polkadot-parachain /usr/local/bin/
|
|
||||||
sudo chmod +x /usr/local/bin/polkadot*
|
|
||||||
|
|
||||||
- name: Setup Rust toolchain
|
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
rustflags: ""
|
target: "wasm32-unknown-unknown"
|
||||||
|
components: "rust-src,rust-std"
|
||||||
- name: Add wasm32 target and formatting
|
- name: Install the Cargo Make Binary
|
||||||
run: |
|
uses: davidB/rust-cargo-make@v1
|
||||||
rustup target add wasm32-unknown-unknown
|
- name: Caching Step
|
||||||
rustup component add rust-src rustfmt clippy
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cargo/bin/eth-rpc
|
||||||
|
~/.cargo/bin/revive-dev-node
|
||||||
|
key: polkadot-binaries-${{ env.POLKADOT_SDK_COMMIT_HASH }}-${{ matrix.os }}
|
||||||
- name: Install Geth on Ubuntu
|
- name: Install Geth on Ubuntu
|
||||||
if: matrix.os == 'ubuntu-24.04'
|
if: matrix.os == 'ubuntu-24.04'
|
||||||
run: |
|
run: |
|
||||||
@@ -180,7 +139,6 @@ jobs:
|
|||||||
curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-x86_64-unknown-linux-musl -o resolc
|
curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-x86_64-unknown-linux-musl -o resolc
|
||||||
chmod +x resolc
|
chmod +x resolc
|
||||||
sudo mv resolc /usr/local/bin
|
sudo mv resolc /usr/local/bin
|
||||||
|
|
||||||
- name: Install Geth on macOS
|
- name: Install Geth on macOS
|
||||||
if: matrix.os == 'macos-14'
|
if: matrix.os == 'macos-14'
|
||||||
run: |
|
run: |
|
||||||
@@ -192,69 +150,79 @@ jobs:
|
|||||||
curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-universal-apple-darwin -o resolc
|
curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-universal-apple-darwin -o resolc
|
||||||
chmod +x resolc
|
chmod +x resolc
|
||||||
sudo mv resolc /usr/local/bin
|
sudo mv resolc /usr/local/bin
|
||||||
|
|
||||||
- name: Install Kurtosis on macOS
|
- name: Install Kurtosis on macOS
|
||||||
if: matrix.os == 'macos-14'
|
if: matrix.os == 'macos-14'
|
||||||
run: brew install kurtosis-tech/tap/kurtosis-cli
|
run: brew install kurtosis-tech/tap/kurtosis-cli
|
||||||
|
|
||||||
- name: Install Kurtosis on Ubuntu
|
- name: Install Kurtosis on Ubuntu
|
||||||
if: matrix.os == 'ubuntu-24.04'
|
if: matrix.os == 'ubuntu-24.04'
|
||||||
run: |
|
run: |
|
||||||
echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list
|
echo "deb [trusted=yes] https://apt.fury.io/kurtosis-tech/ /" | sudo tee /etc/apt/sources.list.d/kurtosis.list
|
||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt install kurtosis-cli
|
sudo apt install kurtosis-cli
|
||||||
|
- name: Run Tests
|
||||||
- name: Install cargo-machete
|
run: cargo make test
|
||||||
uses: clechasseur/rs-cargo@v2
|
cache-polkadot:
|
||||||
|
name: Build and Cache Polkadot Binaries on ${{ matrix.os }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-24.04, macos-14]
|
||||||
|
env:
|
||||||
|
SCCACHE_GHA_ENABLED: "true"
|
||||||
|
RUSTC_WRAPPER: "sccache"
|
||||||
|
RUSTFLAGS: "-Awarnings"
|
||||||
|
POLKADOT_SDK_COMMIT_HASH: "30cda2aad8612a10ff729d494acd9d5353294d63"
|
||||||
|
steps:
|
||||||
|
- name: Caching Step
|
||||||
|
id: cache-step
|
||||||
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
command: install
|
path: |
|
||||||
args: cargo-machete@0.7.0
|
~/.cargo/bin/eth-rpc
|
||||||
- name: Machete
|
~/.cargo/bin/revive-dev-node
|
||||||
run: cargo machete crates
|
key: polkadot-binaries-${{ env.POLKADOT_SDK_COMMIT_HASH }}-${{ matrix.os }}
|
||||||
|
- name: Checkout the Polkadot SDK Repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
if: steps.cache-step.outputs.cache-hit != 'true'
|
||||||
|
with:
|
||||||
|
repository: paritytech/polkadot-sdk
|
||||||
|
ref: ${{ env.POLKADOT_SDK_COMMIT_HASH }}
|
||||||
|
submodules: recursive
|
||||||
|
- name: Run Sccache
|
||||||
|
uses: mozilla-actions/sccache-action@v0.0.9
|
||||||
|
if: steps.cache-step.outputs.cache-hit != 'true'
|
||||||
|
- name: Install the Rust Toolchain
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
if: steps.cache-step.outputs.cache-hit != 'true'
|
||||||
|
with:
|
||||||
|
target: "wasm32-unknown-unknown"
|
||||||
|
components: "rust-src"
|
||||||
|
toolchain: "1.90.0"
|
||||||
|
|
||||||
- name: Format
|
- name: Install dependencies (Linux)
|
||||||
run: make format
|
if: matrix.os == 'ubuntu-24.04' && steps.cache-step.outputs.cache-hit != 'true'
|
||||||
|
|
||||||
- name: Clippy
|
|
||||||
run: make clippy
|
|
||||||
|
|
||||||
- name: Check revive-dev-node version
|
|
||||||
run: revive-dev-node --version
|
|
||||||
|
|
||||||
- name: Check eth-rpc version
|
|
||||||
run: eth-rpc --version
|
|
||||||
|
|
||||||
- name: Check resolc version
|
|
||||||
run: resolc --version
|
|
||||||
|
|
||||||
- name: Check polkadot version
|
|
||||||
run: polkadot --version
|
|
||||||
|
|
||||||
- name: Check polkadot-parachain version
|
|
||||||
run: polkadot-parachain --version
|
|
||||||
|
|
||||||
- name: Check polkadot-execute-worker version
|
|
||||||
run: polkadot-execute-worker --version
|
|
||||||
|
|
||||||
- name: Check polkadot-prepare-worker version
|
|
||||||
run: polkadot-prepare-worker --version
|
|
||||||
|
|
||||||
- name: Test Formatting
|
|
||||||
run: make format
|
|
||||||
|
|
||||||
- name: Test Clippy
|
|
||||||
run: make clippy
|
|
||||||
|
|
||||||
- name: Test Machete
|
|
||||||
run: make machete
|
|
||||||
|
|
||||||
- name: Unit Tests
|
|
||||||
if: matrix.os == 'ubuntu-24.04'
|
|
||||||
run: cargo test --workspace -- --nocapture
|
|
||||||
|
|
||||||
# We can't install docker in the MacOS image used in CI and therefore we need to skip the
|
|
||||||
# Kurtosis and lighthouse related tests when running the CI on MacOS.
|
|
||||||
- name: Unit Tests
|
|
||||||
if: matrix.os == 'macos-14'
|
|
||||||
run: |
|
run: |
|
||||||
cargo test --workspace -- --nocapture --skip lighthouse_geth::tests::
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y protobuf-compiler clang libclang-dev
|
||||||
|
- name: Install dependencies (macOS)
|
||||||
|
if: matrix.os == 'macos-14' && steps.cache-step.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
brew install protobuf llvm
|
||||||
|
LLVM_PREFIX="$(brew --prefix llvm)"
|
||||||
|
echo "LDFLAGS=-L${LLVM_PREFIX}/lib" >> "$GITHUB_ENV"
|
||||||
|
echo "CPPFLAGS=-I${LLVM_PREFIX}/include" >> "$GITHUB_ENV"
|
||||||
|
echo "CMAKE_PREFIX_PATH=${LLVM_PREFIX}" >> "$GITHUB_ENV"
|
||||||
|
echo "LIBCLANG_PATH=${LLVM_PREFIX}/lib" >> "$GITHUB_ENV"
|
||||||
|
echo "DYLD_FALLBACK_LIBRARY_PATH=${LLVM_PREFIX}/lib" >> "$GITHUB_ENV"
|
||||||
|
echo "${LLVM_PREFIX}/bin" >> "$GITHUB_PATH"
|
||||||
|
- name: Build Polkadot Dependencies
|
||||||
|
if: steps.cache-step.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
cargo build \
|
||||||
|
--locked \
|
||||||
|
--profile production \
|
||||||
|
--package revive-dev-node \
|
||||||
|
--package pallet-revive-eth-rpc;
|
||||||
|
mv ./target/production/revive-dev-node ~/.cargo/bin
|
||||||
|
mv ./target/production/eth-rpc ~/.cargo/bin
|
||||||
|
chmod +x ~/.cargo/bin/*
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
node_modules
|
node_modules
|
||||||
/*.json
|
/*.json
|
||||||
|
*.sh
|
||||||
|
|
||||||
# We do not want to commit any log files that we produce from running the code locally so this is
|
# We do not want to commit any log files that we produce from running the code locally so this is
|
||||||
# added to the .gitignore file.
|
# added to the .gitignore file.
|
||||||
@@ -13,3 +14,4 @@ workdir
|
|||||||
|
|
||||||
!/schema.json
|
!/schema.json
|
||||||
!/dev-genesis.json
|
!/dev-genesis.json
|
||||||
|
!/scripts/*
|
||||||
|
|||||||
@@ -1,6 +1,3 @@
|
|||||||
[submodule "polkadot-sdk"]
|
|
||||||
path = polkadot-sdk
|
|
||||||
url = https://github.com/paritytech/polkadot-sdk.git
|
|
||||||
[submodule "resolc-compiler-tests"]
|
[submodule "resolc-compiler-tests"]
|
||||||
path = resolc-compiler-tests
|
path = resolc-compiler-tests
|
||||||
url = https://github.com/paritytech/resolc-compiler-tests
|
url = https://github.com/paritytech/resolc-compiler-tests
|
||||||
|
|||||||
Generated
+1
@@ -5657,6 +5657,7 @@ dependencies = [
|
|||||||
"semver 1.0.26",
|
"semver 1.0.26",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"subxt 0.44.0",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-appender",
|
"tracing-appender",
|
||||||
|
|||||||
@@ -1,15 +0,0 @@
|
|||||||
.PHONY: format clippy test machete
|
|
||||||
|
|
||||||
format:
|
|
||||||
cargo fmt --all -- --check
|
|
||||||
|
|
||||||
clippy:
|
|
||||||
cargo clippy --all-features --workspace -- --deny warnings
|
|
||||||
|
|
||||||
machete:
|
|
||||||
cargo install cargo-machete
|
|
||||||
cargo machete crates
|
|
||||||
|
|
||||||
test: format clippy machete
|
|
||||||
cargo test --workspace -- --nocapture
|
|
||||||
|
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
[config]
|
||||||
|
default_to_workspace = false
|
||||||
|
|
||||||
|
[tasks.machete]
|
||||||
|
command = "cargo"
|
||||||
|
args = ["machete", "crates"]
|
||||||
|
install_crate = "cargo-machete"
|
||||||
|
|
||||||
|
[tasks.fmt-check]
|
||||||
|
command = "cargo"
|
||||||
|
args = ["fmt", "--all", "--", "--check"]
|
||||||
|
install_crate = "rustfmt"
|
||||||
|
|
||||||
|
[tasks.clippy]
|
||||||
|
command = "cargo"
|
||||||
|
args = ["clippy", "--all-features", "--workspace", "--", "--deny", "warnings"]
|
||||||
|
install_crate = "clippy"
|
||||||
|
|
||||||
|
[tasks.test]
|
||||||
|
command = "cargo"
|
||||||
|
args = ["test", "--workspace", "--", "--nocapture"]
|
||||||
Binary file not shown.
@@ -16,6 +16,7 @@ use alloy::{
|
|||||||
primitives::{B256, FixedBytes, U256},
|
primitives::{B256, FixedBytes, U256},
|
||||||
signers::local::PrivateKeySigner,
|
signers::local::PrivateKeySigner,
|
||||||
};
|
};
|
||||||
|
use anyhow::Context as _;
|
||||||
use clap::{Parser, ValueEnum, ValueHint};
|
use clap::{Parser, ValueEnum, ValueHint};
|
||||||
use revive_dt_common::types::{ParsedTestSpecifier, PlatformIdentifier};
|
use revive_dt_common::types::{ParsedTestSpecifier, PlatformIdentifier};
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
@@ -374,6 +375,23 @@ pub struct BenchmarkingContext {
|
|||||||
#[arg(short = 'r', long = "default-repetition-count", default_value_t = 1000)]
|
#[arg(short = 'r', long = "default-repetition-count", default_value_t = 1000)]
|
||||||
pub default_repetition_count: usize,
|
pub default_repetition_count: usize,
|
||||||
|
|
||||||
|
/// This transaction controls whether the benchmarking driver should await for transactions to
|
||||||
|
/// be included in a block before moving on to the next transaction in the sequence or not.
|
||||||
|
///
|
||||||
|
/// This behavior is useful in certain cases and not so useful in others. For example, in some
|
||||||
|
/// repetition block if there's some kind of relationship between txs n and n+1 (for example a
|
||||||
|
/// mint then a transfer) then you would want to wait for the minting to happen and then move on
|
||||||
|
/// to the transfers. On the other hand, if there's no relationship between the transactions n
|
||||||
|
/// and n+1 (e.g., mint and another mint of a different token) then awaiting the first mint to
|
||||||
|
/// be included in a block might not seem necessary.
|
||||||
|
///
|
||||||
|
/// By default, this behavior is set to false to allow the benchmarking framework to saturate
|
||||||
|
/// the node's mempool as quickly as possible. However, as explained above, there are cases
|
||||||
|
/// where it's needed and certain workloads where failure to provide this argument would lead to
|
||||||
|
/// inaccurate results.
|
||||||
|
#[arg(long)]
|
||||||
|
pub await_transaction_inclusion: bool,
|
||||||
|
|
||||||
/// Configuration parameters for the corpus files to use.
|
/// Configuration parameters for the corpus files to use.
|
||||||
#[clap(flatten, next_help_heading = "Corpus Configuration")]
|
#[clap(flatten, next_help_heading = "Corpus Configuration")]
|
||||||
pub corpus_configuration: CorpusConfiguration,
|
pub corpus_configuration: CorpusConfiguration,
|
||||||
@@ -1079,7 +1097,10 @@ impl FromStr for WorkingDirectoryConfiguration {
|
|||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
match s {
|
match s {
|
||||||
"" => Ok(Default::default()),
|
"" => Ok(Default::default()),
|
||||||
_ => Ok(Self::Path(PathBuf::from(s))),
|
_ => PathBuf::from(s)
|
||||||
|
.canonicalize()
|
||||||
|
.context("Failed to canonicalize the working directory path")
|
||||||
|
.map(Self::Path),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -37,6 +37,7 @@ schemars = { workspace = true }
|
|||||||
semver = { workspace = true }
|
semver = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
subxt = { workspace = true }
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
ops::ControlFlow,
|
|
||||||
sync::{
|
sync::{
|
||||||
Arc,
|
Arc,
|
||||||
atomic::{AtomicUsize, Ordering},
|
atomic::{AtomicUsize, Ordering},
|
||||||
@@ -13,6 +12,7 @@ use alloy::{
|
|||||||
json_abi::JsonAbi,
|
json_abi::JsonAbi,
|
||||||
network::{Ethereum, TransactionBuilder},
|
network::{Ethereum, TransactionBuilder},
|
||||||
primitives::{Address, TxHash, U256},
|
primitives::{Address, TxHash, U256},
|
||||||
|
providers::Provider,
|
||||||
rpc::types::{
|
rpc::types::{
|
||||||
TransactionReceipt, TransactionRequest,
|
TransactionReceipt, TransactionRequest,
|
||||||
trace::geth::{
|
trace::geth::{
|
||||||
@@ -22,12 +22,9 @@ use alloy::{
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
use anyhow::{Context as _, Result, bail};
|
use anyhow::{Context as _, Result, bail};
|
||||||
use futures::TryFutureExt;
|
use futures::{FutureExt as _, TryFutureExt};
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use revive_dt_common::{
|
use revive_dt_common::types::PrivateKeyAllocator;
|
||||||
futures::{PollingWaitBehavior, poll},
|
|
||||||
types::PrivateKeyAllocator,
|
|
||||||
};
|
|
||||||
use revive_dt_format::{
|
use revive_dt_format::{
|
||||||
metadata::{ContractInstance, ContractPathAndIdent},
|
metadata::{ContractInstance, ContractPathAndIdent},
|
||||||
steps::{
|
steps::{
|
||||||
@@ -37,7 +34,7 @@ use revive_dt_format::{
|
|||||||
traits::{ResolutionContext, ResolverApi},
|
traits::{ResolutionContext, ResolverApi},
|
||||||
};
|
};
|
||||||
use tokio::sync::{Mutex, OnceCell, mpsc::UnboundedSender};
|
use tokio::sync::{Mutex, OnceCell, mpsc::UnboundedSender};
|
||||||
use tracing::{Instrument, Span, debug, error, field::display, info, info_span, instrument};
|
use tracing::{Span, debug, error, field::display, info, instrument};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
differential_benchmarks::{ExecutionState, WatcherEvent},
|
differential_benchmarks::{ExecutionState, WatcherEvent},
|
||||||
@@ -73,6 +70,10 @@ pub struct Driver<'a, I> {
|
|||||||
/// The number of steps that were executed on the driver.
|
/// The number of steps that were executed on the driver.
|
||||||
steps_executed: usize,
|
steps_executed: usize,
|
||||||
|
|
||||||
|
/// This function controls if the driver should wait for transactions to be included in a block
|
||||||
|
/// or not before proceeding forward.
|
||||||
|
await_transaction_inclusion: bool,
|
||||||
|
|
||||||
/// This is the queue of steps that are to be executed by the driver for this test case. Each
|
/// This is the queue of steps that are to be executed by the driver for this test case. Each
|
||||||
/// time `execute_step` is called one of the steps is executed.
|
/// time `execute_step` is called one of the steps is executed.
|
||||||
steps_iterator: I,
|
steps_iterator: I,
|
||||||
@@ -89,6 +90,7 @@ where
|
|||||||
private_key_allocator: Arc<Mutex<PrivateKeyAllocator>>,
|
private_key_allocator: Arc<Mutex<PrivateKeyAllocator>>,
|
||||||
cached_compiler: &CachedCompiler<'a>,
|
cached_compiler: &CachedCompiler<'a>,
|
||||||
watcher_tx: UnboundedSender<WatcherEvent>,
|
watcher_tx: UnboundedSender<WatcherEvent>,
|
||||||
|
await_transaction_inclusion: bool,
|
||||||
steps: I,
|
steps: I,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let mut this = Driver {
|
let mut this = Driver {
|
||||||
@@ -104,6 +106,7 @@ where
|
|||||||
execution_state: ExecutionState::empty(),
|
execution_state: ExecutionState::empty(),
|
||||||
steps_executed: 0,
|
steps_executed: 0,
|
||||||
steps_iterator: steps,
|
steps_iterator: steps,
|
||||||
|
await_transaction_inclusion,
|
||||||
watcher_tx,
|
watcher_tx,
|
||||||
};
|
};
|
||||||
this.init_execution_state(cached_compiler)
|
this.init_execution_state(cached_compiler)
|
||||||
@@ -127,6 +130,8 @@ where
|
|||||||
.inspect_err(|err| error!(?err, "Pre-linking compilation failed"))
|
.inspect_err(|err| error!(?err, "Pre-linking compilation failed"))
|
||||||
.context("Failed to produce the pre-linking compiled contracts")?;
|
.context("Failed to produce the pre-linking compiled contracts")?;
|
||||||
|
|
||||||
|
let deployer_address = self.test_definition.case.deployer_address();
|
||||||
|
|
||||||
let mut deployed_libraries = None::<HashMap<_, _>>;
|
let mut deployed_libraries = None::<HashMap<_, _>>;
|
||||||
let mut contract_sources = self
|
let mut contract_sources = self
|
||||||
.test_definition
|
.test_definition
|
||||||
@@ -159,29 +164,12 @@ where
|
|||||||
|
|
||||||
let code = alloy::hex::decode(code)?;
|
let code = alloy::hex::decode(code)?;
|
||||||
|
|
||||||
// Getting the deployer address from the cases themselves. This is to ensure
|
|
||||||
// that we're doing the deployments from different accounts and therefore we're
|
|
||||||
// not slowed down by the nonce.
|
|
||||||
let deployer_address = self
|
|
||||||
.test_definition
|
|
||||||
.case
|
|
||||||
.steps
|
|
||||||
.iter()
|
|
||||||
.filter_map(|step| match step {
|
|
||||||
Step::FunctionCall(input) => input.caller.as_address().copied(),
|
|
||||||
Step::BalanceAssertion(..) => None,
|
|
||||||
Step::StorageEmptyAssertion(..) => None,
|
|
||||||
Step::Repeat(..) => None,
|
|
||||||
Step::AllocateAccount(..) => None,
|
|
||||||
})
|
|
||||||
.next()
|
|
||||||
.unwrap_or(FunctionCallStep::default_caller_address());
|
|
||||||
let tx = TransactionBuilder::<Ethereum>::with_deploy_code(
|
let tx = TransactionBuilder::<Ethereum>::with_deploy_code(
|
||||||
TransactionRequest::default().from(deployer_address),
|
TransactionRequest::default().from(deployer_address),
|
||||||
code,
|
code,
|
||||||
);
|
);
|
||||||
let receipt = self
|
let receipt = self
|
||||||
.execute_transaction(tx, None)
|
.execute_transaction(tx, None, Duration::from_secs(5 * 60))
|
||||||
.and_then(|(_, receipt_fut)| receipt_fut)
|
.and_then(|(_, receipt_fut)| receipt_fut)
|
||||||
.await
|
.await
|
||||||
.inspect_err(|err| {
|
.inspect_err(|err| {
|
||||||
@@ -380,7 +368,30 @@ where
|
|||||||
let tx = step
|
let tx = step
|
||||||
.as_transaction(self.resolver.as_ref(), self.default_resolution_context())
|
.as_transaction(self.resolver.as_ref(), self.default_resolution_context())
|
||||||
.await?;
|
.await?;
|
||||||
Ok(self.execute_transaction(tx, Some(step_path)).await?.0)
|
|
||||||
|
let (tx_hash, receipt_future) = self
|
||||||
|
.execute_transaction(tx.clone(), Some(step_path), Duration::from_secs(30 * 60))
|
||||||
|
.await?;
|
||||||
|
if self.await_transaction_inclusion {
|
||||||
|
let receipt = receipt_future
|
||||||
|
.await
|
||||||
|
.context("Failed while waiting for transaction inclusion in block")?;
|
||||||
|
|
||||||
|
if !receipt.status() {
|
||||||
|
error!(
|
||||||
|
?tx,
|
||||||
|
tx.hash = %receipt.transaction_hash,
|
||||||
|
?receipt,
|
||||||
|
"Encountered a failing benchmark transaction"
|
||||||
|
);
|
||||||
|
bail!(
|
||||||
|
"Encountered a failing transaction in benchmarks: {}",
|
||||||
|
receipt.transaction_hash
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(tx_hash)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -481,6 +492,7 @@ where
|
|||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
steps.into_iter()
|
steps.into_iter()
|
||||||
},
|
},
|
||||||
|
await_transaction_inclusion: self.await_transaction_inclusion,
|
||||||
watcher_tx: self.watcher_tx.clone(),
|
watcher_tx: self.watcher_tx.clone(),
|
||||||
})
|
})
|
||||||
.map(|driver| driver.execute_all());
|
.map(|driver| driver.execute_all());
|
||||||
@@ -647,7 +659,7 @@ where
|
|||||||
};
|
};
|
||||||
|
|
||||||
let receipt = match self
|
let receipt = match self
|
||||||
.execute_transaction(tx, step_path)
|
.execute_transaction(tx, step_path, Duration::from_secs(5 * 60))
|
||||||
.and_then(|(_, receipt_fut)| receipt_fut)
|
.and_then(|(_, receipt_fut)| receipt_fut)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
@@ -692,18 +704,33 @@ where
|
|||||||
#[instrument(
|
#[instrument(
|
||||||
level = "info",
|
level = "info",
|
||||||
skip_all,
|
skip_all,
|
||||||
fields(driver_id = self.driver_id, transaction_hash = tracing::field::Empty)
|
fields(
|
||||||
|
driver_id = self.driver_id,
|
||||||
|
transaction = ?transaction,
|
||||||
|
transaction_hash = tracing::field::Empty
|
||||||
|
),
|
||||||
|
err(Debug)
|
||||||
)]
|
)]
|
||||||
async fn execute_transaction(
|
async fn execute_transaction(
|
||||||
&self,
|
&self,
|
||||||
transaction: TransactionRequest,
|
transaction: TransactionRequest,
|
||||||
step_path: Option<&StepPath>,
|
step_path: Option<&StepPath>,
|
||||||
|
receipt_wait_duration: Duration,
|
||||||
) -> anyhow::Result<(TxHash, impl Future<Output = Result<TransactionReceipt>>)> {
|
) -> anyhow::Result<(TxHash, impl Future<Output = Result<TransactionReceipt>>)> {
|
||||||
let node = self.platform_information.node;
|
let node = self.platform_information.node;
|
||||||
let transaction_hash = node
|
let provider = node.provider().await.context("Creating provider failed")?;
|
||||||
.submit_transaction(transaction)
|
|
||||||
|
let pending_transaction_builder = provider
|
||||||
|
.send_transaction(transaction)
|
||||||
.await
|
.await
|
||||||
.context("Failed to submit transaction")?;
|
.context("Failed to submit transaction")?;
|
||||||
|
|
||||||
|
let transaction_hash = *pending_transaction_builder.tx_hash();
|
||||||
|
let receipt_future = pending_transaction_builder
|
||||||
|
.with_timeout(Some(receipt_wait_duration))
|
||||||
|
.with_required_confirmations(2)
|
||||||
|
.get_receipt()
|
||||||
|
.map(|res| res.context("Failed to get the receipt of the transaction"));
|
||||||
Span::current().record("transaction_hash", display(transaction_hash));
|
Span::current().record("transaction_hash", display(transaction_hash));
|
||||||
|
|
||||||
info!("Submitted transaction");
|
info!("Submitted transaction");
|
||||||
@@ -716,28 +743,7 @@ where
|
|||||||
.context("Failed to send the transaction hash to the watcher")?;
|
.context("Failed to send the transaction hash to the watcher")?;
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok((transaction_hash, async move {
|
Ok((transaction_hash, receipt_future))
|
||||||
info!("Starting to poll for transaction receipt");
|
|
||||||
poll(
|
|
||||||
Duration::from_secs(30 * 60),
|
|
||||||
PollingWaitBehavior::Constant(Duration::from_secs(1)),
|
|
||||||
|| {
|
|
||||||
async move {
|
|
||||||
match node.get_receipt(transaction_hash).await {
|
|
||||||
Ok(receipt) => {
|
|
||||||
info!("Polling succeeded, receipt found");
|
|
||||||
Ok(ControlFlow::Break(receipt))
|
|
||||||
}
|
|
||||||
Err(_) => Ok(ControlFlow::Continue(())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.instrument(info_span!("Polling for receipt"))
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.instrument(info_span!("Polling for receipt", %transaction_hash))
|
|
||||||
.await
|
|
||||||
.inspect(|_| info!("Found the transaction receipt"))
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
// endregion:Transaction Execution
|
// endregion:Transaction Execution
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -160,6 +160,7 @@ pub async fn handle_differential_benchmarks(
|
|||||||
private_key_allocator,
|
private_key_allocator,
|
||||||
cached_compiler.as_ref(),
|
cached_compiler.as_ref(),
|
||||||
watcher_tx.clone(),
|
watcher_tx.clone(),
|
||||||
|
context.await_transaction_inclusion,
|
||||||
test_definition
|
test_definition
|
||||||
.case
|
.case
|
||||||
.steps_iterator_for_benchmarks(context.default_repetition_count)
|
.steps_iterator_for_benchmarks(context.default_repetition_count)
|
||||||
|
|||||||
@@ -139,23 +139,18 @@ impl Watcher {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
info!(
|
|
||||||
block_number = block.ethereum_block_information.block_number,
|
|
||||||
block_tx_count = block.ethereum_block_information.transaction_hashes.len(),
|
|
||||||
remaining_transactions = watch_for_transaction_hashes.read().await.len(),
|
|
||||||
"Observed a block"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Remove all of the transaction hashes observed in this block from the txs we
|
// Remove all of the transaction hashes observed in this block from the txs we
|
||||||
// are currently watching for.
|
// are currently watching for.
|
||||||
let mut watch_for_transaction_hashes =
|
let mut watch_for_transaction_hashes =
|
||||||
watch_for_transaction_hashes.write().await;
|
watch_for_transaction_hashes.write().await;
|
||||||
|
let mut relevant_transactions_observed = 0;
|
||||||
for tx_hash in block.ethereum_block_information.transaction_hashes.iter() {
|
for tx_hash in block.ethereum_block_information.transaction_hashes.iter() {
|
||||||
let Some((step_path, submission_time)) =
|
let Some((step_path, submission_time)) =
|
||||||
watch_for_transaction_hashes.remove(tx_hash)
|
watch_for_transaction_hashes.remove(tx_hash)
|
||||||
else {
|
else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
relevant_transactions_observed += 1;
|
||||||
let transaction_information = TransactionInformation {
|
let transaction_information = TransactionInformation {
|
||||||
transaction_hash: *tx_hash,
|
transaction_hash: *tx_hash,
|
||||||
submission_timestamp: submission_time
|
submission_timestamp: submission_time
|
||||||
@@ -172,6 +167,14 @@ impl Watcher {
|
|||||||
)
|
)
|
||||||
.expect("Can't fail")
|
.expect("Can't fail")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
info!(
|
||||||
|
block_number = block.ethereum_block_information.block_number,
|
||||||
|
block_tx_count = block.ethereum_block_information.transaction_hashes.len(),
|
||||||
|
relevant_transactions_observed,
|
||||||
|
remaining_transactions = watch_for_transaction_hashes.len(),
|
||||||
|
"Observed a block"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("Watcher's Block Watching Task Finished");
|
info!("Watcher's Block Watching Task Finished");
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ use alloy::{
|
|||||||
hex,
|
hex,
|
||||||
json_abi::JsonAbi,
|
json_abi::JsonAbi,
|
||||||
network::{Ethereum, TransactionBuilder},
|
network::{Ethereum, TransactionBuilder},
|
||||||
primitives::{Address, TxHash, U256},
|
primitives::{Address, TxHash, U256, address},
|
||||||
rpc::types::{
|
rpc::types::{
|
||||||
TransactionReceipt, TransactionRequest,
|
TransactionReceipt, TransactionRequest,
|
||||||
trace::geth::{
|
trace::geth::{
|
||||||
@@ -18,9 +18,9 @@ use alloy::{
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
use anyhow::{Context as _, Result, bail};
|
use anyhow::{Context as _, Result, bail};
|
||||||
use futures::TryStreamExt;
|
use futures::{TryStreamExt, future::try_join_all};
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use revive_dt_common::types::{PlatformIdentifier, PrivateKeyAllocator};
|
use revive_dt_common::types::{PlatformIdentifier, PrivateKeyAllocator, VmIdentifier};
|
||||||
use revive_dt_format::{
|
use revive_dt_format::{
|
||||||
metadata::{ContractInstance, ContractPathAndIdent},
|
metadata::{ContractInstance, ContractPathAndIdent},
|
||||||
steps::{
|
steps::{
|
||||||
@@ -30,6 +30,7 @@ use revive_dt_format::{
|
|||||||
},
|
},
|
||||||
traits::ResolutionContext,
|
traits::ResolutionContext,
|
||||||
};
|
};
|
||||||
|
use subxt::{ext::codec::Decode, metadata::Metadata, tx::Payload};
|
||||||
use tokio::sync::Mutex;
|
use tokio::sync::Mutex;
|
||||||
use tracing::{error, info, instrument};
|
use tracing::{error, info, instrument};
|
||||||
|
|
||||||
@@ -198,6 +199,8 @@ where
|
|||||||
})
|
})
|
||||||
.context("Failed to produce the pre-linking compiled contracts")?;
|
.context("Failed to produce the pre-linking compiled contracts")?;
|
||||||
|
|
||||||
|
let deployer_address = test_definition.case.deployer_address();
|
||||||
|
|
||||||
let mut deployed_libraries = None::<HashMap<_, _>>;
|
let mut deployed_libraries = None::<HashMap<_, _>>;
|
||||||
let mut contract_sources = test_definition
|
let mut contract_sources = test_definition
|
||||||
.metadata
|
.metadata
|
||||||
@@ -232,22 +235,6 @@ where
|
|||||||
|
|
||||||
let code = alloy::hex::decode(code)?;
|
let code = alloy::hex::decode(code)?;
|
||||||
|
|
||||||
// Getting the deployer address from the cases themselves. This is to ensure
|
|
||||||
// that we're doing the deployments from different accounts and therefore we're
|
|
||||||
// not slowed down by the nonce.
|
|
||||||
let deployer_address = test_definition
|
|
||||||
.case
|
|
||||||
.steps
|
|
||||||
.iter()
|
|
||||||
.filter_map(|step| match step {
|
|
||||||
Step::FunctionCall(input) => input.caller.as_address().copied(),
|
|
||||||
Step::BalanceAssertion(..) => None,
|
|
||||||
Step::StorageEmptyAssertion(..) => None,
|
|
||||||
Step::Repeat(..) => None,
|
|
||||||
Step::AllocateAccount(..) => None,
|
|
||||||
})
|
|
||||||
.next()
|
|
||||||
.unwrap_or(FunctionCallStep::default_caller_address());
|
|
||||||
let tx = TransactionBuilder::<Ethereum>::with_deploy_code(
|
let tx = TransactionBuilder::<Ethereum>::with_deploy_code(
|
||||||
TransactionRequest::default().from(deployer_address),
|
TransactionRequest::default().from(deployer_address),
|
||||||
code,
|
code,
|
||||||
@@ -295,6 +282,51 @@ where
|
|||||||
})
|
})
|
||||||
.context("Failed to compile the post-link contracts")?;
|
.context("Failed to compile the post-link contracts")?;
|
||||||
|
|
||||||
|
// Factory contracts on the PVM refer to the code that they're instantiating by hash rather
|
||||||
|
// than including the actual bytecode. This creates a problem where a factory contract could
|
||||||
|
// be deployed but the code it's supposed to create is not on chain. Therefore, we upload
|
||||||
|
// all the code to the chain prior to running any transactions on the driver.
|
||||||
|
if platform_information.platform.vm_identifier() == VmIdentifier::PolkaVM {
|
||||||
|
#[subxt::subxt(runtime_metadata_path = "../../assets/revive_metadata.scale")]
|
||||||
|
pub mod revive {}
|
||||||
|
|
||||||
|
let metadata_bytes = include_bytes!("../../../../assets/revive_metadata.scale");
|
||||||
|
let metadata = Metadata::decode(&mut &metadata_bytes[..])
|
||||||
|
.context("Failed to decode the revive metadata")?;
|
||||||
|
|
||||||
|
const RUNTIME_PALLET_ADDRESS: Address =
|
||||||
|
address!("0x6d6f646c70792f70616464720000000000000000");
|
||||||
|
|
||||||
|
let code_upload_tasks = compiler_output
|
||||||
|
.contracts
|
||||||
|
.values()
|
||||||
|
.flat_map(|item| item.values())
|
||||||
|
.map(|(code_string, _)| {
|
||||||
|
let metadata = metadata.clone();
|
||||||
|
async move {
|
||||||
|
let code = alloy::hex::decode(code_string)
|
||||||
|
.context("Failed to hex-decode the post-link code. This is a bug")?;
|
||||||
|
let payload = revive::tx().revive().upload_code(code, u128::MAX);
|
||||||
|
let encoded_payload = payload
|
||||||
|
.encode_call_data(&metadata)
|
||||||
|
.context("Failed to encode the upload code payload")?;
|
||||||
|
|
||||||
|
let tx_request = TransactionRequest::default()
|
||||||
|
.from(deployer_address)
|
||||||
|
.to(RUNTIME_PALLET_ADDRESS)
|
||||||
|
.input(encoded_payload.into());
|
||||||
|
platform_information
|
||||||
|
.node
|
||||||
|
.execute_transaction(tx_request)
|
||||||
|
.await
|
||||||
|
.context("Failed to execute transaction")
|
||||||
|
}
|
||||||
|
});
|
||||||
|
try_join_all(code_upload_tasks)
|
||||||
|
.await
|
||||||
|
.context("Code upload failed")?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(ExecutionState::new(
|
Ok(ExecutionState::new(
|
||||||
compiler_output.contracts,
|
compiler_output.contracts,
|
||||||
deployed_libraries.unwrap_or_default(),
|
deployed_libraries.unwrap_or_default(),
|
||||||
@@ -359,7 +391,11 @@ where
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "info", skip_all)]
|
#[instrument(
|
||||||
|
level = "info",
|
||||||
|
skip_all,
|
||||||
|
fields(block_number = tracing::field::Empty)
|
||||||
|
)]
|
||||||
pub async fn execute_function_call(
|
pub async fn execute_function_call(
|
||||||
&mut self,
|
&mut self,
|
||||||
_: &StepPath,
|
_: &StepPath,
|
||||||
@@ -373,6 +409,7 @@ where
|
|||||||
.handle_function_call_execution(step, deployment_receipts)
|
.handle_function_call_execution(step, deployment_receipts)
|
||||||
.await
|
.await
|
||||||
.context("Failed to handle the function call execution")?;
|
.context("Failed to handle the function call execution")?;
|
||||||
|
tracing::Span::current().record("block_number", execution_receipt.block_number);
|
||||||
let tracing_result = self
|
let tracing_result = self
|
||||||
.handle_function_call_call_frame_tracing(execution_receipt.transaction_hash)
|
.handle_function_call_call_frame_tracing(execution_receipt.transaction_hash)
|
||||||
.await
|
.await
|
||||||
@@ -616,8 +653,8 @@ where
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Handling the calldata assertion
|
// Handling the calldata assertion
|
||||||
if let Some(ref expected_calldata) = assertion.return_data {
|
if let Some(ref expected_output) = assertion.return_data {
|
||||||
let expected = expected_calldata;
|
let expected = expected_output;
|
||||||
let actual = &tracing_result.output.as_ref().unwrap_or_default();
|
let actual = &tracing_result.output.as_ref().unwrap_or_default();
|
||||||
if !expected
|
if !expected
|
||||||
.is_equivalent(actual, resolver.as_ref(), resolution_context)
|
.is_equivalent(actual, resolver.as_ref(), resolution_context)
|
||||||
@@ -628,9 +665,9 @@ where
|
|||||||
?receipt,
|
?receipt,
|
||||||
?expected,
|
?expected,
|
||||||
%actual,
|
%actual,
|
||||||
"Calldata assertion failed"
|
"Output assertion failed"
|
||||||
);
|
);
|
||||||
anyhow::bail!("Calldata assertion failed - Expected {expected:?} but got {actual}",);
|
anyhow::bail!("Output assertion failed - Expected {expected:?} but got {actual}",);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
+14
-4
@@ -91,7 +91,8 @@ impl Platform for GethEvmSolcPlatform {
|
|||||||
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
let node = GethNode::new(context);
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
|
let node = GethNode::new(context, use_fallback_gas_filler);
|
||||||
let node = spawn_node::<GethNode>(node, genesis)?;
|
let node = spawn_node::<GethNode>(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
}))
|
}))
|
||||||
@@ -145,7 +146,8 @@ impl Platform for LighthouseGethEvmSolcPlatform {
|
|||||||
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
let node = LighthouseGethNode::new(context);
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
|
let node = LighthouseGethNode::new(context, use_fallback_gas_filler);
|
||||||
let node = spawn_node::<LighthouseGethNode>(node, genesis)?;
|
let node = spawn_node::<LighthouseGethNode>(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
}))
|
}))
|
||||||
@@ -206,12 +208,14 @@ impl Platform for ReviveDevNodePolkavmResolcPlatform {
|
|||||||
|
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
let node = SubstrateNode::new(
|
let node = SubstrateNode::new(
|
||||||
revive_dev_node_path,
|
revive_dev_node_path,
|
||||||
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
||||||
Some(revive_dev_node_consensus),
|
Some(revive_dev_node_consensus),
|
||||||
context,
|
context,
|
||||||
ð_rpc_connection_strings,
|
ð_rpc_connection_strings,
|
||||||
|
use_fallback_gas_filler,
|
||||||
);
|
);
|
||||||
let node = spawn_node(node, genesis)?;
|
let node = spawn_node(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
@@ -274,12 +278,14 @@ impl Platform for ReviveDevNodeRevmSolcPlatform {
|
|||||||
|
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
let node = SubstrateNode::new(
|
let node = SubstrateNode::new(
|
||||||
revive_dev_node_path,
|
revive_dev_node_path,
|
||||||
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
||||||
Some(revive_dev_node_consensus),
|
Some(revive_dev_node_consensus),
|
||||||
context,
|
context,
|
||||||
ð_rpc_connection_strings,
|
ð_rpc_connection_strings,
|
||||||
|
use_fallback_gas_filler,
|
||||||
);
|
);
|
||||||
let node = spawn_node(node, genesis)?;
|
let node = spawn_node(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
@@ -338,7 +344,9 @@ impl Platform for ZombienetPolkavmResolcPlatform {
|
|||||||
.clone();
|
.clone();
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
let node = ZombienetNode::new(polkadot_parachain_path, context);
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
|
let node =
|
||||||
|
ZombienetNode::new(polkadot_parachain_path, context, use_fallback_gas_filler);
|
||||||
let node = spawn_node(node, genesis)?;
|
let node = spawn_node(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
}))
|
}))
|
||||||
@@ -395,7 +403,9 @@ impl Platform for ZombienetRevmSolcPlatform {
|
|||||||
.clone();
|
.clone();
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
let node = ZombienetNode::new(polkadot_parachain_path, context);
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
|
let node =
|
||||||
|
ZombienetNode::new(polkadot_parachain_path, context, use_fallback_gas_filler);
|
||||||
let node = spawn_node(node, genesis)?;
|
let node = spawn_node(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
}))
|
}))
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
use alloy::primitives::Address;
|
||||||
use schemars::JsonSchema;
|
use schemars::JsonSchema;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
@@ -107,6 +108,20 @@ impl Case {
|
|||||||
None => Mode::all().cloned().collect(),
|
None => Mode::all().cloned().collect(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn deployer_address(&self) -> Address {
|
||||||
|
self.steps
|
||||||
|
.iter()
|
||||||
|
.filter_map(|step| match step {
|
||||||
|
Step::FunctionCall(input) => input.caller.as_address().copied(),
|
||||||
|
Step::BalanceAssertion(..) => None,
|
||||||
|
Step::StorageEmptyAssertion(..) => None,
|
||||||
|
Step::Repeat(..) => None,
|
||||||
|
Step::AllocateAccount(..) => None,
|
||||||
|
})
|
||||||
|
.next()
|
||||||
|
.unwrap_or(FunctionCallStep::default_caller_address())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
define_wrapper_type!(
|
define_wrapper_type!(
|
||||||
|
|||||||
@@ -706,6 +706,7 @@ impl Calldata {
|
|||||||
.await
|
.await
|
||||||
.context("Failed to resolve calldata item during equivalence check")?;
|
.context("Failed to resolve calldata item during equivalence check")?;
|
||||||
let other = U256::from_be_slice(&other);
|
let other = U256::from_be_slice(&other);
|
||||||
|
|
||||||
Ok(this == other)
|
Ok(this == other)
|
||||||
})
|
})
|
||||||
.buffered(0xFF)
|
.buffered(0xFF)
|
||||||
@@ -718,7 +719,7 @@ impl Calldata {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl CalldataItem {
|
impl CalldataItem {
|
||||||
#[instrument(level = "info", skip_all, err)]
|
#[instrument(level = "info", skip_all, err(Debug))]
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
&self,
|
&self,
|
||||||
resolver: &(impl ResolverApi + ?Sized),
|
resolver: &(impl ResolverApi + ?Sized),
|
||||||
@@ -906,7 +907,7 @@ impl<T: AsRef<str>> CalldataToken<T> {
|
|||||||
let block_hash = resolver
|
let block_hash = resolver
|
||||||
.block_hash(desired_block_number.into())
|
.block_hash(desired_block_number.into())
|
||||||
.await
|
.await
|
||||||
.context("Failed to resolve block hash for desired block number")?;
|
.context(format!("Failed to resolve the block hash of block number {desired_block_number}"))?;
|
||||||
|
|
||||||
Ok(U256::from_be_bytes(block_hash.0))
|
Ok(U256::from_be_bytes(block_hash.0))
|
||||||
} else if item == Self::BLOCK_NUMBER_VARIABLE {
|
} else if item == Self::BLOCK_NUMBER_VARIABLE {
|
||||||
|
|||||||
@@ -76,6 +76,7 @@ pub struct GethNode {
|
|||||||
wallet: Arc<EthereumWallet>,
|
wallet: Arc<EthereumWallet>,
|
||||||
nonce_manager: CachedNonceManager,
|
nonce_manager: CachedNonceManager,
|
||||||
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GethNode {
|
impl GethNode {
|
||||||
@@ -100,6 +101,7 @@ impl GethNode {
|
|||||||
+ AsRef<WalletConfiguration>
|
+ AsRef<WalletConfiguration>
|
||||||
+ AsRef<GethConfiguration>
|
+ AsRef<GethConfiguration>
|
||||||
+ Clone,
|
+ Clone,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let working_directory_configuration =
|
let working_directory_configuration =
|
||||||
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context);
|
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context);
|
||||||
@@ -126,6 +128,7 @@ impl GethNode {
|
|||||||
wallet: wallet.clone(),
|
wallet: wallet.clone(),
|
||||||
nonce_manager: Default::default(),
|
nonce_manager: Default::default(),
|
||||||
provider: Default::default(),
|
provider: Default::default(),
|
||||||
|
use_fallback_gas_filler,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -246,7 +249,8 @@ impl GethNode {
|
|||||||
.get_or_try_init(|| async move {
|
.get_or_try_init(|| async move {
|
||||||
construct_concurrency_limited_provider::<Ethereum, _>(
|
construct_concurrency_limited_provider::<Ethereum, _>(
|
||||||
self.connection_string.as_str(),
|
self.connection_string.as_str(),
|
||||||
FallbackGasFiller::default(),
|
FallbackGasFiller::default()
|
||||||
|
.with_use_fallback_gas_filler(self.use_fallback_gas_filler),
|
||||||
ChainIdFiller::new(Some(CHAIN_ID)),
|
ChainIdFiller::new(Some(CHAIN_ID)),
|
||||||
NonceFiller::new(self.nonce_manager.clone()),
|
NonceFiller::new(self.nonce_manager.clone()),
|
||||||
self.wallet.clone(),
|
self.wallet.clone(),
|
||||||
@@ -742,7 +746,7 @@ mod tests {
|
|||||||
|
|
||||||
fn new_node() -> (TestExecutionContext, GethNode) {
|
fn new_node() -> (TestExecutionContext, GethNode) {
|
||||||
let context = test_config();
|
let context = test_config();
|
||||||
let mut node = GethNode::new(&context);
|
let mut node = GethNode::new(&context, true);
|
||||||
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
||||||
.expect("Failed to initialize the node")
|
.expect("Failed to initialize the node")
|
||||||
.spawn_process()
|
.spawn_process()
|
||||||
|
|||||||
@@ -106,6 +106,8 @@ pub struct LighthouseGethNode {
|
|||||||
|
|
||||||
persistent_http_provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
persistent_http_provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
||||||
persistent_ws_provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
persistent_ws_provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
||||||
|
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LighthouseGethNode {
|
impl LighthouseGethNode {
|
||||||
@@ -127,6 +129,7 @@ impl LighthouseGethNode {
|
|||||||
+ AsRef<WalletConfiguration>
|
+ AsRef<WalletConfiguration>
|
||||||
+ AsRef<KurtosisConfiguration>
|
+ AsRef<KurtosisConfiguration>
|
||||||
+ Clone,
|
+ Clone,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let working_directory_configuration =
|
let working_directory_configuration =
|
||||||
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context);
|
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context);
|
||||||
@@ -176,6 +179,7 @@ impl LighthouseGethNode {
|
|||||||
nonce_manager: Default::default(),
|
nonce_manager: Default::default(),
|
||||||
persistent_http_provider: OnceCell::const_new(),
|
persistent_http_provider: OnceCell::const_new(),
|
||||||
persistent_ws_provider: OnceCell::const_new(),
|
persistent_ws_provider: OnceCell::const_new(),
|
||||||
|
use_fallback_gas_filler,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -374,7 +378,8 @@ impl LighthouseGethNode {
|
|||||||
.get_or_try_init(|| async move {
|
.get_or_try_init(|| async move {
|
||||||
construct_concurrency_limited_provider::<Ethereum, _>(
|
construct_concurrency_limited_provider::<Ethereum, _>(
|
||||||
self.ws_connection_string.as_str(),
|
self.ws_connection_string.as_str(),
|
||||||
FallbackGasFiller::default(),
|
FallbackGasFiller::default()
|
||||||
|
.with_use_fallback_gas_filler(self.use_fallback_gas_filler),
|
||||||
ChainIdFiller::new(Some(CHAIN_ID)),
|
ChainIdFiller::new(Some(CHAIN_ID)),
|
||||||
NonceFiller::new(self.nonce_manager.clone()),
|
NonceFiller::new(self.nonce_manager.clone()),
|
||||||
self.wallet.clone(),
|
self.wallet.clone(),
|
||||||
@@ -1152,7 +1157,7 @@ mod tests {
|
|||||||
let _guard = NODE_START_MUTEX.lock().unwrap();
|
let _guard = NODE_START_MUTEX.lock().unwrap();
|
||||||
|
|
||||||
let context = test_config();
|
let context = test_config();
|
||||||
let mut node = LighthouseGethNode::new(&context);
|
let mut node = LighthouseGethNode::new(&context, true);
|
||||||
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
||||||
.expect("Failed to initialize the node")
|
.expect("Failed to initialize the node")
|
||||||
.spawn_process()
|
.spawn_process()
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use std::{
|
|||||||
pin::Pin,
|
pin::Pin,
|
||||||
process::{Command, Stdio},
|
process::{Command, Stdio},
|
||||||
sync::{
|
sync::{
|
||||||
Arc,
|
Arc, Mutex,
|
||||||
atomic::{AtomicU32, Ordering},
|
atomic::{AtomicU32, Ordering},
|
||||||
},
|
},
|
||||||
time::Duration,
|
time::Duration,
|
||||||
@@ -32,7 +32,7 @@ use futures::{FutureExt, Stream, StreamExt};
|
|||||||
use revive_common::EVMVersion;
|
use revive_common::EVMVersion;
|
||||||
use revive_dt_common::fs::clear_directory;
|
use revive_dt_common::fs::clear_directory;
|
||||||
use revive_dt_format::traits::ResolverApi;
|
use revive_dt_format::traits::ResolverApi;
|
||||||
use serde_json::json;
|
use serde_json::{Value, json};
|
||||||
use sp_core::crypto::Ss58Codec;
|
use sp_core::crypto::Ss58Codec;
|
||||||
use sp_runtime::AccountId32;
|
use sp_runtime::AccountId32;
|
||||||
|
|
||||||
@@ -57,6 +57,9 @@ use crate::{
|
|||||||
|
|
||||||
static NODE_COUNT: AtomicU32 = AtomicU32::new(0);
|
static NODE_COUNT: AtomicU32 = AtomicU32::new(0);
|
||||||
|
|
||||||
|
/// The number of blocks that should be cached by the revive-dev-node and the eth-rpc.
|
||||||
|
const NUMBER_OF_CACHED_BLOCKS: u32 = 100_000;
|
||||||
|
|
||||||
/// A node implementation for Substrate based chains. Currently, this supports either substrate
|
/// A node implementation for Substrate based chains. Currently, this supports either substrate
|
||||||
/// or the revive-dev-node which is done by changing the path and some of the other arguments passed
|
/// or the revive-dev-node which is done by changing the path and some of the other arguments passed
|
||||||
/// to the command.
|
/// to the command.
|
||||||
@@ -76,6 +79,7 @@ pub struct SubstrateNode {
|
|||||||
nonce_manager: CachedNonceManager,
|
nonce_manager: CachedNonceManager,
|
||||||
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
||||||
consensus: Option<String>,
|
consensus: Option<String>,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SubstrateNode {
|
impl SubstrateNode {
|
||||||
@@ -102,6 +106,7 @@ impl SubstrateNode {
|
|||||||
+ AsRef<EthRpcConfiguration>
|
+ AsRef<EthRpcConfiguration>
|
||||||
+ AsRef<WalletConfiguration>,
|
+ AsRef<WalletConfiguration>,
|
||||||
existing_connection_strings: &[String],
|
existing_connection_strings: &[String],
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let working_directory_path =
|
let working_directory_path =
|
||||||
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context).as_path();
|
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context).as_path();
|
||||||
@@ -134,10 +139,13 @@ impl SubstrateNode {
|
|||||||
nonce_manager: Default::default(),
|
nonce_manager: Default::default(),
|
||||||
provider: Default::default(),
|
provider: Default::default(),
|
||||||
consensus,
|
consensus,
|
||||||
|
use_fallback_gas_filler,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init(&mut self, _: Genesis) -> anyhow::Result<&mut Self> {
|
fn init(&mut self, _: Genesis) -> anyhow::Result<&mut Self> {
|
||||||
|
static CHAINSPEC_MUTEX: Mutex<Option<Value>> = Mutex::new(None);
|
||||||
|
|
||||||
if !self.rpc_url.is_empty() {
|
if !self.rpc_url.is_empty() {
|
||||||
return Ok(self);
|
return Ok(self);
|
||||||
}
|
}
|
||||||
@@ -156,12 +164,22 @@ impl SubstrateNode {
|
|||||||
let template_chainspec_path = self.base_directory.join(Self::CHAIN_SPEC_JSON_FILE);
|
let template_chainspec_path = self.base_directory.join(Self::CHAIN_SPEC_JSON_FILE);
|
||||||
|
|
||||||
trace!("Creating the node genesis");
|
trace!("Creating the node genesis");
|
||||||
let chainspec_json = Self::node_genesis(
|
let chainspec_json = {
|
||||||
&self.node_binary,
|
let mut chainspec_mutex = CHAINSPEC_MUTEX.lock().expect("Poisoned");
|
||||||
&self.export_chainspec_command,
|
match chainspec_mutex.as_ref() {
|
||||||
&self.wallet,
|
Some(chainspec_json) => chainspec_json.clone(),
|
||||||
)
|
None => {
|
||||||
.context("Failed to prepare the chainspec command")?;
|
let chainspec_json = Self::node_genesis(
|
||||||
|
&self.node_binary,
|
||||||
|
&self.export_chainspec_command,
|
||||||
|
&self.wallet,
|
||||||
|
)
|
||||||
|
.context("Failed to prepare the chainspec command")?;
|
||||||
|
*chainspec_mutex = Some(chainspec_json.clone());
|
||||||
|
chainspec_json
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
trace!("Writing the node genesis");
|
trace!("Writing the node genesis");
|
||||||
serde_json::to_writer_pretty(
|
serde_json::to_writer_pretty(
|
||||||
@@ -212,6 +230,8 @@ impl SubstrateNode {
|
|||||||
.arg(u32::MAX.to_string())
|
.arg(u32::MAX.to_string())
|
||||||
.arg("--pool-kbytes")
|
.arg("--pool-kbytes")
|
||||||
.arg(u32::MAX.to_string())
|
.arg(u32::MAX.to_string())
|
||||||
|
.arg("--state-pruning")
|
||||||
|
.arg(NUMBER_OF_CACHED_BLOCKS.to_string())
|
||||||
.env("RUST_LOG", Self::SUBSTRATE_LOG_ENV)
|
.env("RUST_LOG", Self::SUBSTRATE_LOG_ENV)
|
||||||
.stdout(stdout_file)
|
.stdout(stdout_file)
|
||||||
.stderr(stderr_file);
|
.stderr(stderr_file);
|
||||||
@@ -251,6 +271,10 @@ impl SubstrateNode {
|
|||||||
.arg(format!("ws://127.0.0.1:{substrate_rpc_port}"))
|
.arg(format!("ws://127.0.0.1:{substrate_rpc_port}"))
|
||||||
.arg("--rpc-max-connections")
|
.arg("--rpc-max-connections")
|
||||||
.arg(u32::MAX.to_string())
|
.arg(u32::MAX.to_string())
|
||||||
|
.arg("--index-last-n-blocks")
|
||||||
|
.arg(NUMBER_OF_CACHED_BLOCKS.to_string())
|
||||||
|
.arg("--cache-size")
|
||||||
|
.arg(NUMBER_OF_CACHED_BLOCKS.to_string())
|
||||||
.env("RUST_LOG", Self::PROXY_LOG_ENV)
|
.env("RUST_LOG", Self::PROXY_LOG_ENV)
|
||||||
.stdout(stdout_file)
|
.stdout(stdout_file)
|
||||||
.stderr(stderr_file);
|
.stderr(stderr_file);
|
||||||
@@ -303,7 +327,12 @@ impl SubstrateNode {
|
|||||||
.get_or_try_init(|| async move {
|
.get_or_try_init(|| async move {
|
||||||
construct_concurrency_limited_provider::<Ethereum, _>(
|
construct_concurrency_limited_provider::<Ethereum, _>(
|
||||||
self.rpc_url.as_str(),
|
self.rpc_url.as_str(),
|
||||||
FallbackGasFiller::new(u64::MAX, 5_000_000_000, 1_000_000_000),
|
FallbackGasFiller::new(
|
||||||
|
u64::MAX,
|
||||||
|
50_000_000_000,
|
||||||
|
1_000_000_000,
|
||||||
|
self.use_fallback_gas_filler,
|
||||||
|
),
|
||||||
ChainIdFiller::new(Some(CHAIN_ID)),
|
ChainIdFiller::new(Some(CHAIN_ID)),
|
||||||
NonceFiller::new(self.nonce_manager.clone()),
|
NonceFiller::new(self.nonce_manager.clone()),
|
||||||
self.wallet.clone(),
|
self.wallet.clone(),
|
||||||
@@ -804,6 +833,7 @@ mod tests {
|
|||||||
None,
|
None,
|
||||||
&context,
|
&context,
|
||||||
&[],
|
&[],
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
||||||
.expect("Failed to initialize the node")
|
.expect("Failed to initialize the node")
|
||||||
@@ -875,6 +905,7 @@ mod tests {
|
|||||||
None,
|
None,
|
||||||
&context,
|
&context,
|
||||||
&[],
|
&[],
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Call `init()`
|
// Call `init()`
|
||||||
|
|||||||
@@ -114,6 +114,8 @@ pub struct ZombienetNode {
|
|||||||
nonce_manager: CachedNonceManager,
|
nonce_manager: CachedNonceManager,
|
||||||
|
|
||||||
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
||||||
|
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ZombienetNode {
|
impl ZombienetNode {
|
||||||
@@ -137,6 +139,7 @@ impl ZombienetNode {
|
|||||||
context: impl AsRef<WorkingDirectoryConfiguration>
|
context: impl AsRef<WorkingDirectoryConfiguration>
|
||||||
+ AsRef<EthRpcConfiguration>
|
+ AsRef<EthRpcConfiguration>
|
||||||
+ AsRef<WalletConfiguration>,
|
+ AsRef<WalletConfiguration>,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let eth_proxy_binary = AsRef::<EthRpcConfiguration>::as_ref(&context)
|
let eth_proxy_binary = AsRef::<EthRpcConfiguration>::as_ref(&context)
|
||||||
.path
|
.path
|
||||||
@@ -164,6 +167,7 @@ impl ZombienetNode {
|
|||||||
connection_string: String::new(),
|
connection_string: String::new(),
|
||||||
node_rpc_port: None,
|
node_rpc_port: None,
|
||||||
provider: Default::default(),
|
provider: Default::default(),
|
||||||
|
use_fallback_gas_filler,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -330,7 +334,12 @@ impl ZombienetNode {
|
|||||||
.get_or_try_init(|| async move {
|
.get_or_try_init(|| async move {
|
||||||
construct_concurrency_limited_provider::<Ethereum, _>(
|
construct_concurrency_limited_provider::<Ethereum, _>(
|
||||||
self.connection_string.as_str(),
|
self.connection_string.as_str(),
|
||||||
FallbackGasFiller::new(u64::MAX, 5_000_000_000, 1_000_000_000),
|
FallbackGasFiller::new(
|
||||||
|
u64::MAX,
|
||||||
|
5_000_000_000,
|
||||||
|
1_000_000_000,
|
||||||
|
self.use_fallback_gas_filler,
|
||||||
|
),
|
||||||
ChainIdFiller::default(), // TODO: use CHAIN_ID constant
|
ChainIdFiller::default(), // TODO: use CHAIN_ID constant
|
||||||
NonceFiller::new(self.nonce_manager.clone()),
|
NonceFiller::new(self.nonce_manager.clone()),
|
||||||
self.wallet.clone(),
|
self.wallet.clone(),
|
||||||
@@ -823,6 +832,7 @@ mod tests {
|
|||||||
let mut node = ZombienetNode::new(
|
let mut node = ZombienetNode::new(
|
||||||
context.polkadot_parachain_configuration.path.clone(),
|
context.polkadot_parachain_configuration.path.clone(),
|
||||||
&context,
|
&context,
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
let genesis = context.genesis_configuration.genesis().unwrap().clone();
|
let genesis = context.genesis_configuration.genesis().unwrap().clone();
|
||||||
node.init(genesis).unwrap();
|
node.init(genesis).unwrap();
|
||||||
@@ -856,7 +866,7 @@ mod tests {
|
|||||||
use utils::{new_node, test_config};
|
use utils::{new_node, test_config};
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[ignore = "Ignored for the time being"]
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
async fn test_transfer_transaction_should_return_receipt() {
|
async fn test_transfer_transaction_should_return_receipt() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let (ctx, node) = new_node().await;
|
let (ctx, node) = new_node().await;
|
||||||
@@ -882,6 +892,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
fn print_eth_to_polkadot_mappings() {
|
fn print_eth_to_polkadot_mappings() {
|
||||||
let eth_addresses = vec![
|
let eth_addresses = vec![
|
||||||
"0x90F8bf6A479f320ead074411a4B0e7944Ea8c9C1",
|
"0x90F8bf6A479f320ead074411a4B0e7944Ea8c9C1",
|
||||||
@@ -897,6 +908,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
fn test_eth_to_polkadot_address() {
|
fn test_eth_to_polkadot_address() {
|
||||||
let cases = vec![
|
let cases = vec![
|
||||||
(
|
(
|
||||||
@@ -927,12 +939,14 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
fn eth_rpc_version_works() {
|
fn eth_rpc_version_works() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let context = test_config();
|
let context = test_config();
|
||||||
let node = ZombienetNode::new(
|
let node = ZombienetNode::new(
|
||||||
context.polkadot_parachain_configuration.path.clone(),
|
context.polkadot_parachain_configuration.path.clone(),
|
||||||
&context,
|
&context,
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
@@ -946,12 +960,14 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
fn version_works() {
|
fn version_works() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let context = test_config();
|
let context = test_config();
|
||||||
let node = ZombienetNode::new(
|
let node = ZombienetNode::new(
|
||||||
context.polkadot_parachain_configuration.path.clone(),
|
context.polkadot_parachain_configuration.path.clone(),
|
||||||
&context,
|
&context,
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
@@ -965,7 +981,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[ignore = "Ignored since they take a long time to run"]
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
async fn get_chain_id_from_node_should_succeed() {
|
async fn get_chain_id_from_node_should_succeed() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node().await;
|
let node = shared_node().await;
|
||||||
@@ -984,7 +1000,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[ignore = "Ignored since they take a long time to run"]
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
async fn can_get_gas_limit_from_node() {
|
async fn can_get_gas_limit_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node().await;
|
let node = shared_node().await;
|
||||||
@@ -1002,7 +1018,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[ignore = "Ignored since they take a long time to run"]
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
async fn can_get_coinbase_from_node() {
|
async fn can_get_coinbase_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node().await;
|
let node = shared_node().await;
|
||||||
@@ -1020,7 +1036,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[ignore = "Ignored since they take a long time to run"]
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
async fn can_get_block_difficulty_from_node() {
|
async fn can_get_block_difficulty_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node().await;
|
let node = shared_node().await;
|
||||||
@@ -1038,7 +1054,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[ignore = "Ignored since they take a long time to run"]
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
async fn can_get_block_hash_from_node() {
|
async fn can_get_block_hash_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node().await;
|
let node = shared_node().await;
|
||||||
@@ -1056,7 +1072,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[ignore = "Ignored since they take a long time to run"]
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
async fn can_get_block_timestamp_from_node() {
|
async fn can_get_block_timestamp_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node().await;
|
let node = shared_node().await;
|
||||||
@@ -1074,7 +1090,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
#[ignore = "Ignored since they take a long time to run"]
|
#[ignore = "Ignored since CI doesn't have zombienet installed"]
|
||||||
async fn can_get_block_number_from_node() {
|
async fn can_get_block_number_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node().await;
|
let node = shared_node().await;
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use alloy::{
|
|||||||
Provider, SendableTx,
|
Provider, SendableTx,
|
||||||
fillers::{GasFiller, TxFiller},
|
fillers::{GasFiller, TxFiller},
|
||||||
},
|
},
|
||||||
transports::TransportResult,
|
transports::{TransportError, TransportResult},
|
||||||
};
|
};
|
||||||
|
|
||||||
// Percentage padding applied to estimated gas (e.g. 120 = 20% padding)
|
// Percentage padding applied to estimated gas (e.g. 120 = 20% padding)
|
||||||
@@ -17,6 +17,7 @@ pub struct FallbackGasFiller {
|
|||||||
default_gas_limit: u64,
|
default_gas_limit: u64,
|
||||||
default_max_fee_per_gas: u128,
|
default_max_fee_per_gas: u128,
|
||||||
default_priority_fee: u128,
|
default_priority_fee: u128,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FallbackGasFiller {
|
impl FallbackGasFiller {
|
||||||
@@ -24,19 +25,41 @@ impl FallbackGasFiller {
|
|||||||
default_gas_limit: u64,
|
default_gas_limit: u64,
|
||||||
default_max_fee_per_gas: u128,
|
default_max_fee_per_gas: u128,
|
||||||
default_priority_fee: u128,
|
default_priority_fee: u128,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
inner: GasFiller,
|
inner: GasFiller,
|
||||||
default_gas_limit,
|
default_gas_limit,
|
||||||
default_max_fee_per_gas,
|
default_max_fee_per_gas,
|
||||||
default_priority_fee,
|
default_priority_fee,
|
||||||
|
use_fallback_gas_filler,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn with_default_gas_limit(mut self, default_gas_limit: u64) -> Self {
|
||||||
|
self.default_gas_limit = default_gas_limit;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_default_max_fee_per_gas(mut self, default_max_fee_per_gas: u128) -> Self {
|
||||||
|
self.default_max_fee_per_gas = default_max_fee_per_gas;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_default_priority_fee(mut self, default_priority_fee: u128) -> Self {
|
||||||
|
self.default_priority_fee = default_priority_fee;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_use_fallback_gas_filler(mut self, use_fallback_gas_filler: bool) -> Self {
|
||||||
|
self.use_fallback_gas_filler = use_fallback_gas_filler;
|
||||||
|
self
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for FallbackGasFiller {
|
impl Default for FallbackGasFiller {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
FallbackGasFiller::new(25_000_000, 1_000_000_000, 1_000_000_000)
|
FallbackGasFiller::new(25_000_000, 1_000_000_000, 1_000_000_000, true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -64,7 +87,12 @@ where
|
|||||||
Ok(fill) => Ok(Some(fill)),
|
Ok(fill) => Ok(Some(fill)),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
tracing::debug!(error = ?err, "Gas Provider Estimation Failed, using fallback");
|
tracing::debug!(error = ?err, "Gas Provider Estimation Failed, using fallback");
|
||||||
Ok(None)
|
|
||||||
|
if !self.use_fallback_gas_filler {
|
||||||
|
Err(err)
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -86,13 +114,17 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(tx)
|
Ok(tx)
|
||||||
} else {
|
} else if self.use_fallback_gas_filler {
|
||||||
if let Some(builder) = tx.as_mut_builder() {
|
if let Some(builder) = tx.as_mut_builder() {
|
||||||
builder.set_gas_limit(self.default_gas_limit);
|
builder.set_gas_limit(self.default_gas_limit);
|
||||||
builder.set_max_fee_per_gas(self.default_max_fee_per_gas);
|
builder.set_max_fee_per_gas(self.default_max_fee_per_gas);
|
||||||
builder.set_max_priority_fee_per_gas(self.default_priority_fee);
|
builder.set_max_priority_fee_per_gas(self.default_priority_fee);
|
||||||
}
|
}
|
||||||
Ok(tx)
|
Ok(tx)
|
||||||
|
} else {
|
||||||
|
Err(TransportError::UnsupportedFeature(
|
||||||
|
"Fallback gas filler is disabled and we're attempting to do a gas estimate on a failing transaction",
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -104,7 +104,7 @@ where
|
|||||||
};
|
};
|
||||||
debug!(%tx_hash, "Submitted Transaction");
|
debug!(%tx_hash, "Submitted Transaction");
|
||||||
|
|
||||||
pending_transaction.set_timeout(Some(Duration::from_secs(240)));
|
pending_transaction.set_timeout(Some(Duration::from_secs(120)));
|
||||||
let tx_hash = pending_transaction.watch().await.context(format!(
|
let tx_hash = pending_transaction.watch().await.context(format!(
|
||||||
"Transaction inclusion watching timeout for {tx_hash}"
|
"Transaction inclusion watching timeout for {tx_hash}"
|
||||||
))?;
|
))?;
|
||||||
|
|||||||
@@ -2,12 +2,13 @@
|
|||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
|
str::FromStr,
|
||||||
sync::{LazyLock, Mutex},
|
sync::{LazyLock, Mutex},
|
||||||
};
|
};
|
||||||
|
|
||||||
use revive_dt_common::types::VersionOrRequirement;
|
use revive_dt_common::types::VersionOrRequirement;
|
||||||
|
|
||||||
use semver::Version;
|
use semver::{Version, VersionReq};
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
use crate::list::List;
|
use crate::list::List;
|
||||||
@@ -65,6 +66,9 @@ impl SolcDownloader {
|
|||||||
target: &'static str,
|
target: &'static str,
|
||||||
list: &'static str,
|
list: &'static str,
|
||||||
) -> anyhow::Result<Self> {
|
) -> anyhow::Result<Self> {
|
||||||
|
static MAXIMUM_COMPILER_VERSION_REQUIREMENT: LazyLock<VersionReq> =
|
||||||
|
LazyLock::new(|| VersionReq::from_str("<=0.8.30").unwrap());
|
||||||
|
|
||||||
let version_or_requirement = version.into();
|
let version_or_requirement = version.into();
|
||||||
match version_or_requirement {
|
match version_or_requirement {
|
||||||
VersionOrRequirement::Version(version) => Ok(Self {
|
VersionOrRequirement::Version(version) => Ok(Self {
|
||||||
@@ -79,7 +83,10 @@ impl SolcDownloader {
|
|||||||
.builds
|
.builds
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|build| build.version)
|
.map(|build| build.version)
|
||||||
.filter(|version| requirement.matches(version))
|
.filter(|version| {
|
||||||
|
MAXIMUM_COMPILER_VERSION_REQUIREMENT.matches(version)
|
||||||
|
&& requirement.matches(version)
|
||||||
|
})
|
||||||
.max()
|
.max()
|
||||||
else {
|
else {
|
||||||
anyhow::bail!("Failed to find a version that satisfies {requirement:?}");
|
anyhow::bail!("Failed to find a version that satisfies {requirement:?}");
|
||||||
|
|||||||
-1
Submodule polkadot-sdk deleted from a44be635e6
+1
-1
Submodule resolc-compiler-tests updated: a9d1f54b74...55da34c4f6
@@ -28,7 +28,7 @@ from __future__ import annotations
|
|||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
import csv
|
import csv
|
||||||
from typing import List, Mapping, TypedDict
|
from typing import List, Mapping, TypedDict, no_type_check
|
||||||
|
|
||||||
|
|
||||||
class EthereumMinedBlockInformation(TypedDict):
|
class EthereumMinedBlockInformation(TypedDict):
|
||||||
@@ -69,7 +69,43 @@ class MinedBlockInformation(TypedDict):
|
|||||||
"""Block-level information for a mined block with both EVM and optional Substrate fields."""
|
"""Block-level information for a mined block with both EVM and optional Substrate fields."""
|
||||||
|
|
||||||
ethereum_block_information: EthereumMinedBlockInformation
|
ethereum_block_information: EthereumMinedBlockInformation
|
||||||
substrate_block_information: SubstrateMinedBlockInformation
|
substrate_block_information: SubstrateMinedBlockInformation | None
|
||||||
|
|
||||||
|
|
||||||
|
def substrate_block_information_ref_time(
|
||||||
|
block: SubstrateMinedBlockInformation | None,
|
||||||
|
) -> int | None:
|
||||||
|
if block is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return block["ref_time"]
|
||||||
|
|
||||||
|
|
||||||
|
def substrate_block_information_max_ref_time(
|
||||||
|
block: SubstrateMinedBlockInformation | None,
|
||||||
|
) -> int | None:
|
||||||
|
if block is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return block["max_ref_time"]
|
||||||
|
|
||||||
|
|
||||||
|
def substrate_block_information_proof_size(
|
||||||
|
block: SubstrateMinedBlockInformation | None,
|
||||||
|
) -> int | None:
|
||||||
|
if block is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return block["proof_size"]
|
||||||
|
|
||||||
|
|
||||||
|
def substrate_block_information_max_proof_size(
|
||||||
|
block: SubstrateMinedBlockInformation | None,
|
||||||
|
) -> int | None:
|
||||||
|
if block is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return block["max_proof_size"]
|
||||||
|
|
||||||
|
|
||||||
class Metric(TypedDict):
|
class Metric(TypedDict):
|
||||||
@@ -100,8 +136,19 @@ class Metrics(TypedDict):
|
|||||||
transaction_per_second: Metric
|
transaction_per_second: Metric
|
||||||
gas_per_second: Metric
|
gas_per_second: Metric
|
||||||
gas_block_fullness: Metric
|
gas_block_fullness: Metric
|
||||||
ref_time_block_fullness: Metric
|
ref_time_block_fullness: Metric | None
|
||||||
proof_size_block_fullness: Metric
|
proof_size_block_fullness: Metric | None
|
||||||
|
|
||||||
|
|
||||||
|
@no_type_check
|
||||||
|
def metrics_raw_item(
|
||||||
|
metrics: Metrics, name: str, target: str, index: int
|
||||||
|
) -> int | None:
|
||||||
|
l: list[int] = metrics.get(name, dict()).get("raw", dict()).get(target, dict())
|
||||||
|
try:
|
||||||
|
return l[index]
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class ExecutionReport(TypedDict):
|
class ExecutionReport(TypedDict):
|
||||||
@@ -144,12 +191,15 @@ BlockInformation = TypedDict(
|
|||||||
"Transaction Count": int,
|
"Transaction Count": int,
|
||||||
"TPS": int | None,
|
"TPS": int | None,
|
||||||
"GPS": int | None,
|
"GPS": int | None,
|
||||||
"Ref Time": int,
|
"Gas Mined": int,
|
||||||
"Max Ref Time": int,
|
"Block Gas Limit": int,
|
||||||
"Block Fullness Ref Time": int,
|
"Block Fullness Gas": float,
|
||||||
"Proof Size": int,
|
"Ref Time": int | None,
|
||||||
"Max Proof Size": int,
|
"Max Ref Time": int | None,
|
||||||
"Block Fullness Proof Size": int,
|
"Block Fullness Ref Time": int | None,
|
||||||
|
"Proof Size": int | None,
|
||||||
|
"Max Proof Size": int | None,
|
||||||
|
"Block Fullness Proof Size": int | None,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
"""A typed dictionary used to hold all of the block information"""
|
"""A typed dictionary used to hold all of the block information"""
|
||||||
@@ -175,7 +225,7 @@ def main() -> None:
|
|||||||
report: ReportRoot = load_report(report_path)
|
report: ReportRoot = load_report(report_path)
|
||||||
|
|
||||||
# TODO: Remove this in the future, but for now, the target is fixed.
|
# TODO: Remove this in the future, but for now, the target is fixed.
|
||||||
target: str = "revive-dev-node-revm-solc"
|
target: str = sys.argv[2]
|
||||||
|
|
||||||
csv_writer = csv.writer(sys.stdout)
|
csv_writer = csv.writer(sys.stdout)
|
||||||
|
|
||||||
@@ -188,6 +238,12 @@ def main() -> None:
|
|||||||
|
|
||||||
resolved_blocks: list[BlockInformation] = []
|
resolved_blocks: list[BlockInformation] = []
|
||||||
for i, block_information in enumerate(blocks_information):
|
for i, block_information in enumerate(blocks_information):
|
||||||
|
mined_gas: int = block_information["ethereum_block_information"][
|
||||||
|
"mined_gas"
|
||||||
|
]
|
||||||
|
block_gas_limit: int = block_information[
|
||||||
|
"ethereum_block_information"
|
||||||
|
]["block_gas_limit"]
|
||||||
resolved_blocks.append(
|
resolved_blocks.append(
|
||||||
{
|
{
|
||||||
"Block Number": block_information[
|
"Block Number": block_information[
|
||||||
@@ -216,24 +272,37 @@ def main() -> None:
|
|||||||
"raw"
|
"raw"
|
||||||
][target][i - 1]
|
][target][i - 1]
|
||||||
),
|
),
|
||||||
"Ref Time": block_information[
|
"Gas Mined": block_information[
|
||||||
"substrate_block_information"
|
"ethereum_block_information"
|
||||||
]["ref_time"],
|
]["mined_gas"],
|
||||||
"Max Ref Time": block_information[
|
"Block Gas Limit": block_information[
|
||||||
"substrate_block_information"
|
"ethereum_block_information"
|
||||||
]["max_ref_time"],
|
]["block_gas_limit"],
|
||||||
"Block Fullness Ref Time": execution_report["metrics"][
|
"Block Fullness Gas": mined_gas / block_gas_limit,
|
||||||
"ref_time_block_fullness"
|
"Ref Time": substrate_block_information_ref_time(
|
||||||
]["raw"][target][i],
|
block_information["substrate_block_information"]
|
||||||
"Proof Size": block_information[
|
),
|
||||||
"substrate_block_information"
|
"Max Ref Time": substrate_block_information_max_ref_time(
|
||||||
]["proof_size"],
|
block_information["substrate_block_information"]
|
||||||
"Max Proof Size": block_information[
|
),
|
||||||
"substrate_block_information"
|
"Block Fullness Ref Time": metrics_raw_item(
|
||||||
]["max_proof_size"],
|
execution_report["metrics"],
|
||||||
"Block Fullness Proof Size": execution_report["metrics"][
|
"ref_time_block_fullness",
|
||||||
"proof_size_block_fullness"
|
target,
|
||||||
]["raw"][target][i],
|
i,
|
||||||
|
),
|
||||||
|
"Proof Size": substrate_block_information_proof_size(
|
||||||
|
block_information["substrate_block_information"]
|
||||||
|
),
|
||||||
|
"Max Proof Size": substrate_block_information_max_proof_size(
|
||||||
|
block_information["substrate_block_information"]
|
||||||
|
),
|
||||||
|
"Block Fullness Proof Size": metrics_raw_item(
|
||||||
|
execution_report["metrics"],
|
||||||
|
"proof_size_block_fullness",
|
||||||
|
target,
|
||||||
|
i,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user