mirror of
https://github.com/pezkuwichain/revive-differential-tests.git
synced 2026-04-22 21:57:58 +00:00
Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 4a151a8555 | |||
| 08c1572870 | |||
| cd6b7969ac | |||
| 78ac7ee381 | |||
| 3edaebdcae |
@@ -0,0 +1,105 @@
|
|||||||
|
name: "Run Revive Differential Tests"
|
||||||
|
description: "Builds and runs revive-differential-tests (retester) from this repo against the caller's Polkadot SDK."
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
# Setup arguments & environment
|
||||||
|
polkadot-sdk-path:
|
||||||
|
description: "The path of the polkadot-sdk that should be compiled for the tests to run against."
|
||||||
|
required: false
|
||||||
|
default: "."
|
||||||
|
type: string
|
||||||
|
cargo-command:
|
||||||
|
description: "The cargo command to use in compilations and running of tests (e.g., forklift cargo)."
|
||||||
|
required: false
|
||||||
|
default: "cargo"
|
||||||
|
type: string
|
||||||
|
revive-differential-tests-ref:
|
||||||
|
description: "The branch, tag or SHA to checkout for the revive-differential-tests."
|
||||||
|
required: false
|
||||||
|
default: "main"
|
||||||
|
type: string
|
||||||
|
resolc-version:
|
||||||
|
description: "The version of resolc to install and use in tests."
|
||||||
|
required: false
|
||||||
|
default: "0.5.0"
|
||||||
|
type: string
|
||||||
|
use-compilation-caches:
|
||||||
|
description: "Controls if the compilation caches will be used for the test run or not."
|
||||||
|
required: false
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
# Test Execution Arguments
|
||||||
|
platform:
|
||||||
|
description: "The identifier of the platform to run the tests on (e.g., geth-evm-solc, revive-dev-node-revm-solc)"
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: Checkout the Differential Tests Repository
|
||||||
|
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||||
|
with:
|
||||||
|
repository: paritytech/revive-differential-tests
|
||||||
|
ref: ${{ inputs['revive-differential-tests-ref'] }}
|
||||||
|
path: revive-differential-tests
|
||||||
|
submodules: recursive
|
||||||
|
- name: Installing the Latest Resolc
|
||||||
|
shell: bash
|
||||||
|
if: ${{ runner.os == 'Linux' && runner.arch == 'X64' }}
|
||||||
|
run: |
|
||||||
|
VERSION="${{ inputs['resolc-version'] }}"
|
||||||
|
ASSET_URL="https://github.com/paritytech/revive/releases/download/v$VERSION/resolc-x86_64-unknown-linux-musl"
|
||||||
|
echo "Downloading resolc v$VERSION from $ASSET_URL"
|
||||||
|
curl -Lsf --show-error -o resolc "$ASSET_URL"
|
||||||
|
chmod +x resolc
|
||||||
|
./resolc --version
|
||||||
|
- name: Installing Retester
|
||||||
|
shell: bash
|
||||||
|
run: ${{ inputs['cargo-command'] }} install --locked --path revive-differential-tests/crates/core
|
||||||
|
- name: Creating a workdir for retester
|
||||||
|
shell: bash
|
||||||
|
run: mkdir workdir
|
||||||
|
- name: Downloading & Initializing the compilation caches
|
||||||
|
shell: bash
|
||||||
|
if: ${{ inputs['use-compilation-caches'] == true }}
|
||||||
|
run: |
|
||||||
|
curl -fL --retry 3 --retry-all-errors --connect-timeout 10 -o cache.tar.gz "https://github.com/paritytech/revive-differential-tests/releases/download/compilation-caches-v1.1/cache.tar.gz"
|
||||||
|
tar -zxf cache.tar.gz -C ./workdir > /dev/null 2>&1
|
||||||
|
- name: Building the dependencies from the Polkadot SDK
|
||||||
|
shell: bash
|
||||||
|
run: ${{ inputs['cargo-command'] }} build --locked --profile release -p pallet-revive-eth-rpc -p revive-dev-node --manifest-path ${{ inputs['polkadot-sdk-path'] }}/Cargo.toml
|
||||||
|
- name: Running the Differential Tests
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
${{ inputs['cargo-command'] }} run --locked --manifest-path revive-differential-tests/Cargo.toml -- test \
|
||||||
|
--test ./revive-differential-tests/resolc-compiler-tests/fixtures/solidity/simple \
|
||||||
|
--test ./revive-differential-tests/resolc-compiler-tests/fixtures/solidity/complex \
|
||||||
|
--test ./revive-differential-tests/resolc-compiler-tests/fixtures/solidity/translated_semantic_tests \
|
||||||
|
--platform ${{ inputs['platform'] }} \
|
||||||
|
--concurrency.number-of-nodes 10 \
|
||||||
|
--concurrency.number-of-threads 10 \
|
||||||
|
--concurrency.number-of-concurrent-tasks 100 \
|
||||||
|
--working-directory ./workdir \
|
||||||
|
--revive-dev-node.consensus manual-seal-200 \
|
||||||
|
--revive-dev-node.path ${{ inputs['polkadot-sdk-path'] }}/target/release/revive-dev-node \
|
||||||
|
--eth-rpc.path ${{ inputs['polkadot-sdk-path'] }}/target/release/eth-rpc \
|
||||||
|
--resolc.path ./resolc
|
||||||
|
- name: Creating a markdown report of the test execution
|
||||||
|
shell: bash
|
||||||
|
if: ${{ always() }}
|
||||||
|
run: |
|
||||||
|
mv ./workdir/*.json report.json
|
||||||
|
python3 revive-differential-tests/scripts/process-differential-tests-report.py report.json ${{ inputs['platform'] }}
|
||||||
|
- name: Upload the Report to the CI
|
||||||
|
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
|
||||||
|
if: ${{ always() }}
|
||||||
|
with:
|
||||||
|
name: report-${{ inputs['platform'] }}.md
|
||||||
|
path: report.md
|
||||||
|
- name: Posting the report as a comment on the PR
|
||||||
|
uses: marocchino/sticky-pull-request-comment@773744901bac0e8cbb5a0dc842800d45e9b2b405
|
||||||
|
if: ${{ always() }}
|
||||||
|
with:
|
||||||
|
header: diff-tests-report-${{ inputs['platform'] }}
|
||||||
|
path: report.md
|
||||||
@@ -208,14 +208,18 @@ impl SolidityCompiler for Resolc {
|
|||||||
anyhow::bail!("Compilation failed with an error: {message}");
|
anyhow::bail!("Compilation failed with an error: {message}");
|
||||||
}
|
}
|
||||||
|
|
||||||
let parsed = serde_json::from_slice::<SolcStandardJsonOutput>(&stdout)
|
let parsed: SolcStandardJsonOutput = {
|
||||||
|
let mut deserializer = serde_json::Deserializer::from_slice(&stdout);
|
||||||
|
deserializer.disable_recursion_limit();
|
||||||
|
serde::de::Deserialize::deserialize(&mut deserializer)
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
anyhow::anyhow!(
|
anyhow::anyhow!(
|
||||||
"failed to parse resolc JSON output: {e}\nstderr: {}",
|
"failed to parse resolc JSON output: {e}\nstderr: {}",
|
||||||
String::from_utf8_lossy(&stderr)
|
String::from_utf8_lossy(&stderr)
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.context("Failed to parse resolc standard JSON output")?;
|
.context("Failed to parse resolc standard JSON output")?
|
||||||
|
};
|
||||||
|
|
||||||
tracing::debug!(
|
tracing::debug!(
|
||||||
output = %serde_json::to_string(&parsed).unwrap(),
|
output = %serde_json::to_string(&parsed).unwrap(),
|
||||||
|
|||||||
@@ -375,6 +375,23 @@ pub struct BenchmarkingContext {
|
|||||||
#[arg(short = 'r', long = "default-repetition-count", default_value_t = 1000)]
|
#[arg(short = 'r', long = "default-repetition-count", default_value_t = 1000)]
|
||||||
pub default_repetition_count: usize,
|
pub default_repetition_count: usize,
|
||||||
|
|
||||||
|
/// This transaction controls whether the benchmarking driver should await for transactions to
|
||||||
|
/// be included in a block before moving on to the next transaction in the sequence or not.
|
||||||
|
///
|
||||||
|
/// This behavior is useful in certain cases and not so useful in others. For example, in some
|
||||||
|
/// repetition block if there's some kind of relationship between txs n and n+1 (for example a
|
||||||
|
/// mint then a transfer) then you would want to wait for the minting to happen and then move on
|
||||||
|
/// to the transfers. On the other hand, if there's no relationship between the transactions n
|
||||||
|
/// and n+1 (e.g., mint and another mint of a different token) then awaiting the first mint to
|
||||||
|
/// be included in a block might not seem necessary.
|
||||||
|
///
|
||||||
|
/// By default, this behavior is set to false to allow the benchmarking framework to saturate
|
||||||
|
/// the node's mempool as quickly as possible. However, as explained above, there are cases
|
||||||
|
/// where it's needed and certain workloads where failure to provide this argument would lead to
|
||||||
|
/// inaccurate results.
|
||||||
|
#[arg(long)]
|
||||||
|
pub await_transaction_inclusion: bool,
|
||||||
|
|
||||||
/// Configuration parameters for the corpus files to use.
|
/// Configuration parameters for the corpus files to use.
|
||||||
#[clap(flatten, next_help_heading = "Corpus Configuration")]
|
#[clap(flatten, next_help_heading = "Corpus Configuration")]
|
||||||
pub corpus_configuration: CorpusConfiguration,
|
pub corpus_configuration: CorpusConfiguration,
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::HashMap,
|
||||||
ops::ControlFlow,
|
|
||||||
sync::{
|
sync::{
|
||||||
Arc,
|
Arc,
|
||||||
atomic::{AtomicUsize, Ordering},
|
atomic::{AtomicUsize, Ordering},
|
||||||
@@ -13,6 +12,7 @@ use alloy::{
|
|||||||
json_abi::JsonAbi,
|
json_abi::JsonAbi,
|
||||||
network::{Ethereum, TransactionBuilder},
|
network::{Ethereum, TransactionBuilder},
|
||||||
primitives::{Address, TxHash, U256},
|
primitives::{Address, TxHash, U256},
|
||||||
|
providers::Provider,
|
||||||
rpc::types::{
|
rpc::types::{
|
||||||
TransactionReceipt, TransactionRequest,
|
TransactionReceipt, TransactionRequest,
|
||||||
trace::geth::{
|
trace::geth::{
|
||||||
@@ -22,12 +22,9 @@ use alloy::{
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
use anyhow::{Context as _, Result, bail};
|
use anyhow::{Context as _, Result, bail};
|
||||||
use futures::TryFutureExt;
|
use futures::{FutureExt as _, TryFutureExt};
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use revive_dt_common::{
|
use revive_dt_common::types::PrivateKeyAllocator;
|
||||||
futures::{PollingWaitBehavior, poll},
|
|
||||||
types::PrivateKeyAllocator,
|
|
||||||
};
|
|
||||||
use revive_dt_format::{
|
use revive_dt_format::{
|
||||||
metadata::{ContractInstance, ContractPathAndIdent},
|
metadata::{ContractInstance, ContractPathAndIdent},
|
||||||
steps::{
|
steps::{
|
||||||
@@ -37,7 +34,7 @@ use revive_dt_format::{
|
|||||||
traits::{ResolutionContext, ResolverApi},
|
traits::{ResolutionContext, ResolverApi},
|
||||||
};
|
};
|
||||||
use tokio::sync::{Mutex, OnceCell, mpsc::UnboundedSender};
|
use tokio::sync::{Mutex, OnceCell, mpsc::UnboundedSender};
|
||||||
use tracing::{Instrument, Span, debug, error, field::display, info, info_span, instrument};
|
use tracing::{Span, debug, error, field::display, info, instrument};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
differential_benchmarks::{ExecutionState, WatcherEvent},
|
differential_benchmarks::{ExecutionState, WatcherEvent},
|
||||||
@@ -73,6 +70,10 @@ pub struct Driver<'a, I> {
|
|||||||
/// The number of steps that were executed on the driver.
|
/// The number of steps that were executed on the driver.
|
||||||
steps_executed: usize,
|
steps_executed: usize,
|
||||||
|
|
||||||
|
/// This function controls if the driver should wait for transactions to be included in a block
|
||||||
|
/// or not before proceeding forward.
|
||||||
|
await_transaction_inclusion: bool,
|
||||||
|
|
||||||
/// This is the queue of steps that are to be executed by the driver for this test case. Each
|
/// This is the queue of steps that are to be executed by the driver for this test case. Each
|
||||||
/// time `execute_step` is called one of the steps is executed.
|
/// time `execute_step` is called one of the steps is executed.
|
||||||
steps_iterator: I,
|
steps_iterator: I,
|
||||||
@@ -89,6 +90,7 @@ where
|
|||||||
private_key_allocator: Arc<Mutex<PrivateKeyAllocator>>,
|
private_key_allocator: Arc<Mutex<PrivateKeyAllocator>>,
|
||||||
cached_compiler: &CachedCompiler<'a>,
|
cached_compiler: &CachedCompiler<'a>,
|
||||||
watcher_tx: UnboundedSender<WatcherEvent>,
|
watcher_tx: UnboundedSender<WatcherEvent>,
|
||||||
|
await_transaction_inclusion: bool,
|
||||||
steps: I,
|
steps: I,
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let mut this = Driver {
|
let mut this = Driver {
|
||||||
@@ -104,6 +106,7 @@ where
|
|||||||
execution_state: ExecutionState::empty(),
|
execution_state: ExecutionState::empty(),
|
||||||
steps_executed: 0,
|
steps_executed: 0,
|
||||||
steps_iterator: steps,
|
steps_iterator: steps,
|
||||||
|
await_transaction_inclusion,
|
||||||
watcher_tx,
|
watcher_tx,
|
||||||
};
|
};
|
||||||
this.init_execution_state(cached_compiler)
|
this.init_execution_state(cached_compiler)
|
||||||
@@ -166,7 +169,7 @@ where
|
|||||||
code,
|
code,
|
||||||
);
|
);
|
||||||
let receipt = self
|
let receipt = self
|
||||||
.execute_transaction(tx, None)
|
.execute_transaction(tx, None, Duration::from_secs(5 * 60))
|
||||||
.and_then(|(_, receipt_fut)| receipt_fut)
|
.and_then(|(_, receipt_fut)| receipt_fut)
|
||||||
.await
|
.await
|
||||||
.inspect_err(|err| {
|
.inspect_err(|err| {
|
||||||
@@ -365,7 +368,30 @@ where
|
|||||||
let tx = step
|
let tx = step
|
||||||
.as_transaction(self.resolver.as_ref(), self.default_resolution_context())
|
.as_transaction(self.resolver.as_ref(), self.default_resolution_context())
|
||||||
.await?;
|
.await?;
|
||||||
Ok(self.execute_transaction(tx, Some(step_path)).await?.0)
|
|
||||||
|
let (tx_hash, receipt_future) = self
|
||||||
|
.execute_transaction(tx.clone(), Some(step_path), Duration::from_secs(30 * 60))
|
||||||
|
.await?;
|
||||||
|
if self.await_transaction_inclusion {
|
||||||
|
let receipt = receipt_future
|
||||||
|
.await
|
||||||
|
.context("Failed while waiting for transaction inclusion in block")?;
|
||||||
|
|
||||||
|
if !receipt.status() {
|
||||||
|
error!(
|
||||||
|
?tx,
|
||||||
|
tx.hash = %receipt.transaction_hash,
|
||||||
|
?receipt,
|
||||||
|
"Encountered a failing benchmark transaction"
|
||||||
|
);
|
||||||
|
bail!(
|
||||||
|
"Encountered a failing transaction in benchmarks: {}",
|
||||||
|
receipt.transaction_hash
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(tx_hash)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -466,6 +492,7 @@ where
|
|||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
steps.into_iter()
|
steps.into_iter()
|
||||||
},
|
},
|
||||||
|
await_transaction_inclusion: self.await_transaction_inclusion,
|
||||||
watcher_tx: self.watcher_tx.clone(),
|
watcher_tx: self.watcher_tx.clone(),
|
||||||
})
|
})
|
||||||
.map(|driver| driver.execute_all());
|
.map(|driver| driver.execute_all());
|
||||||
@@ -632,7 +659,7 @@ where
|
|||||||
};
|
};
|
||||||
|
|
||||||
let receipt = match self
|
let receipt = match self
|
||||||
.execute_transaction(tx, step_path)
|
.execute_transaction(tx, step_path, Duration::from_secs(5 * 60))
|
||||||
.and_then(|(_, receipt_fut)| receipt_fut)
|
.and_then(|(_, receipt_fut)| receipt_fut)
|
||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
@@ -677,18 +704,33 @@ where
|
|||||||
#[instrument(
|
#[instrument(
|
||||||
level = "info",
|
level = "info",
|
||||||
skip_all,
|
skip_all,
|
||||||
fields(driver_id = self.driver_id, transaction_hash = tracing::field::Empty)
|
fields(
|
||||||
|
driver_id = self.driver_id,
|
||||||
|
transaction = ?transaction,
|
||||||
|
transaction_hash = tracing::field::Empty
|
||||||
|
),
|
||||||
|
err(Debug)
|
||||||
)]
|
)]
|
||||||
async fn execute_transaction(
|
async fn execute_transaction(
|
||||||
&self,
|
&self,
|
||||||
transaction: TransactionRequest,
|
transaction: TransactionRequest,
|
||||||
step_path: Option<&StepPath>,
|
step_path: Option<&StepPath>,
|
||||||
|
receipt_wait_duration: Duration,
|
||||||
) -> anyhow::Result<(TxHash, impl Future<Output = Result<TransactionReceipt>>)> {
|
) -> anyhow::Result<(TxHash, impl Future<Output = Result<TransactionReceipt>>)> {
|
||||||
let node = self.platform_information.node;
|
let node = self.platform_information.node;
|
||||||
let transaction_hash = node
|
let provider = node.provider().await.context("Creating provider failed")?;
|
||||||
.submit_transaction(transaction)
|
|
||||||
|
let pending_transaction_builder = provider
|
||||||
|
.send_transaction(transaction)
|
||||||
.await
|
.await
|
||||||
.context("Failed to submit transaction")?;
|
.context("Failed to submit transaction")?;
|
||||||
|
|
||||||
|
let transaction_hash = *pending_transaction_builder.tx_hash();
|
||||||
|
let receipt_future = pending_transaction_builder
|
||||||
|
.with_timeout(Some(receipt_wait_duration))
|
||||||
|
.with_required_confirmations(2)
|
||||||
|
.get_receipt()
|
||||||
|
.map(|res| res.context("Failed to get the receipt of the transaction"));
|
||||||
Span::current().record("transaction_hash", display(transaction_hash));
|
Span::current().record("transaction_hash", display(transaction_hash));
|
||||||
|
|
||||||
info!("Submitted transaction");
|
info!("Submitted transaction");
|
||||||
@@ -701,28 +743,7 @@ where
|
|||||||
.context("Failed to send the transaction hash to the watcher")?;
|
.context("Failed to send the transaction hash to the watcher")?;
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok((transaction_hash, async move {
|
Ok((transaction_hash, receipt_future))
|
||||||
info!("Starting to poll for transaction receipt");
|
|
||||||
poll(
|
|
||||||
Duration::from_secs(30 * 60),
|
|
||||||
PollingWaitBehavior::Constant(Duration::from_secs(1)),
|
|
||||||
|| {
|
|
||||||
async move {
|
|
||||||
match node.get_receipt(transaction_hash).await {
|
|
||||||
Ok(receipt) => {
|
|
||||||
info!("Polling succeeded, receipt found");
|
|
||||||
Ok(ControlFlow::Break(receipt))
|
|
||||||
}
|
|
||||||
Err(_) => Ok(ControlFlow::Continue(())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.instrument(info_span!("Polling for receipt"))
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.instrument(info_span!("Polling for receipt", %transaction_hash))
|
|
||||||
.await
|
|
||||||
.inspect(|_| info!("Found the transaction receipt"))
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
// endregion:Transaction Execution
|
// endregion:Transaction Execution
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -160,6 +160,7 @@ pub async fn handle_differential_benchmarks(
|
|||||||
private_key_allocator,
|
private_key_allocator,
|
||||||
cached_compiler.as_ref(),
|
cached_compiler.as_ref(),
|
||||||
watcher_tx.clone(),
|
watcher_tx.clone(),
|
||||||
|
context.await_transaction_inclusion,
|
||||||
test_definition
|
test_definition
|
||||||
.case
|
.case
|
||||||
.steps_iterator_for_benchmarks(context.default_repetition_count)
|
.steps_iterator_for_benchmarks(context.default_repetition_count)
|
||||||
|
|||||||
@@ -139,23 +139,18 @@ impl Watcher {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
info!(
|
|
||||||
block_number = block.ethereum_block_information.block_number,
|
|
||||||
block_tx_count = block.ethereum_block_information.transaction_hashes.len(),
|
|
||||||
remaining_transactions = watch_for_transaction_hashes.read().await.len(),
|
|
||||||
"Observed a block"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Remove all of the transaction hashes observed in this block from the txs we
|
// Remove all of the transaction hashes observed in this block from the txs we
|
||||||
// are currently watching for.
|
// are currently watching for.
|
||||||
let mut watch_for_transaction_hashes =
|
let mut watch_for_transaction_hashes =
|
||||||
watch_for_transaction_hashes.write().await;
|
watch_for_transaction_hashes.write().await;
|
||||||
|
let mut relevant_transactions_observed = 0;
|
||||||
for tx_hash in block.ethereum_block_information.transaction_hashes.iter() {
|
for tx_hash in block.ethereum_block_information.transaction_hashes.iter() {
|
||||||
let Some((step_path, submission_time)) =
|
let Some((step_path, submission_time)) =
|
||||||
watch_for_transaction_hashes.remove(tx_hash)
|
watch_for_transaction_hashes.remove(tx_hash)
|
||||||
else {
|
else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
relevant_transactions_observed += 1;
|
||||||
let transaction_information = TransactionInformation {
|
let transaction_information = TransactionInformation {
|
||||||
transaction_hash: *tx_hash,
|
transaction_hash: *tx_hash,
|
||||||
submission_timestamp: submission_time
|
submission_timestamp: submission_time
|
||||||
@@ -172,6 +167,14 @@ impl Watcher {
|
|||||||
)
|
)
|
||||||
.expect("Can't fail")
|
.expect("Can't fail")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
info!(
|
||||||
|
block_number = block.ethereum_block_information.block_number,
|
||||||
|
block_tx_count = block.ethereum_block_information.transaction_hashes.len(),
|
||||||
|
relevant_transactions_observed,
|
||||||
|
remaining_transactions = watch_for_transaction_hashes.len(),
|
||||||
|
"Observed a block"
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("Watcher's Block Watching Task Finished");
|
info!("Watcher's Block Watching Task Finished");
|
||||||
|
|||||||
+14
-4
@@ -91,7 +91,8 @@ impl Platform for GethEvmSolcPlatform {
|
|||||||
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
let node = GethNode::new(context);
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
|
let node = GethNode::new(context, use_fallback_gas_filler);
|
||||||
let node = spawn_node::<GethNode>(node, genesis)?;
|
let node = spawn_node::<GethNode>(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
}))
|
}))
|
||||||
@@ -145,7 +146,8 @@ impl Platform for LighthouseGethEvmSolcPlatform {
|
|||||||
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
let genesis_configuration = AsRef::<GenesisConfiguration>::as_ref(&context);
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
let node = LighthouseGethNode::new(context);
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
|
let node = LighthouseGethNode::new(context, use_fallback_gas_filler);
|
||||||
let node = spawn_node::<LighthouseGethNode>(node, genesis)?;
|
let node = spawn_node::<LighthouseGethNode>(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
}))
|
}))
|
||||||
@@ -206,12 +208,14 @@ impl Platform for ReviveDevNodePolkavmResolcPlatform {
|
|||||||
|
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
let node = SubstrateNode::new(
|
let node = SubstrateNode::new(
|
||||||
revive_dev_node_path,
|
revive_dev_node_path,
|
||||||
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
||||||
Some(revive_dev_node_consensus),
|
Some(revive_dev_node_consensus),
|
||||||
context,
|
context,
|
||||||
ð_rpc_connection_strings,
|
ð_rpc_connection_strings,
|
||||||
|
use_fallback_gas_filler,
|
||||||
);
|
);
|
||||||
let node = spawn_node(node, genesis)?;
|
let node = spawn_node(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
@@ -274,12 +278,14 @@ impl Platform for ReviveDevNodeRevmSolcPlatform {
|
|||||||
|
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
let node = SubstrateNode::new(
|
let node = SubstrateNode::new(
|
||||||
revive_dev_node_path,
|
revive_dev_node_path,
|
||||||
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
SubstrateNode::REVIVE_DEV_NODE_EXPORT_CHAINSPEC_COMMAND,
|
||||||
Some(revive_dev_node_consensus),
|
Some(revive_dev_node_consensus),
|
||||||
context,
|
context,
|
||||||
ð_rpc_connection_strings,
|
ð_rpc_connection_strings,
|
||||||
|
use_fallback_gas_filler,
|
||||||
);
|
);
|
||||||
let node = spawn_node(node, genesis)?;
|
let node = spawn_node(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
@@ -338,7 +344,9 @@ impl Platform for ZombienetPolkavmResolcPlatform {
|
|||||||
.clone();
|
.clone();
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
let node = ZombienetNode::new(polkadot_parachain_path, context);
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
|
let node =
|
||||||
|
ZombienetNode::new(polkadot_parachain_path, context, use_fallback_gas_filler);
|
||||||
let node = spawn_node(node, genesis)?;
|
let node = spawn_node(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
}))
|
}))
|
||||||
@@ -395,7 +403,9 @@ impl Platform for ZombienetRevmSolcPlatform {
|
|||||||
.clone();
|
.clone();
|
||||||
let genesis = genesis_configuration.genesis()?.clone();
|
let genesis = genesis_configuration.genesis()?.clone();
|
||||||
Ok(thread::spawn(move || {
|
Ok(thread::spawn(move || {
|
||||||
let node = ZombienetNode::new(polkadot_parachain_path, context);
|
let use_fallback_gas_filler = matches!(context, Context::Test(..));
|
||||||
|
let node =
|
||||||
|
ZombienetNode::new(polkadot_parachain_path, context, use_fallback_gas_filler);
|
||||||
let node = spawn_node(node, genesis)?;
|
let node = spawn_node(node, genesis)?;
|
||||||
Ok(Box::new(node) as Box<_>)
|
Ok(Box::new(node) as Box<_>)
|
||||||
}))
|
}))
|
||||||
|
|||||||
+87
-5
@@ -2,9 +2,10 @@ mod differential_benchmarks;
|
|||||||
mod differential_tests;
|
mod differential_tests;
|
||||||
mod helpers;
|
mod helpers;
|
||||||
|
|
||||||
use anyhow::Context as _;
|
use anyhow::{Context as _, bail};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use revive_dt_report::ReportAggregator;
|
use revive_dt_common::types::ParsedTestSpecifier;
|
||||||
|
use revive_dt_report::{ReportAggregator, TestCaseStatus};
|
||||||
use schemars::schema_for;
|
use schemars::schema_for;
|
||||||
use tracing::{info, level_filters::LevelFilter};
|
use tracing::{info, level_filters::LevelFilter};
|
||||||
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
||||||
@@ -57,9 +58,48 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let differential_tests_handling_task =
|
let differential_tests_handling_task =
|
||||||
handle_differential_tests(*context, reporter);
|
handle_differential_tests(*context, reporter);
|
||||||
|
|
||||||
futures::future::try_join(differential_tests_handling_task, report_aggregator_task)
|
let (_, report) = futures::future::try_join(
|
||||||
|
differential_tests_handling_task,
|
||||||
|
report_aggregator_task,
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
// Error out if there are any failing tests.
|
||||||
|
let failures = report
|
||||||
|
.execution_information
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|(metadata_file_path, metadata_file_report)| {
|
||||||
|
metadata_file_report.case_reports.into_iter().flat_map(
|
||||||
|
move |(case_idx, case_report)| {
|
||||||
|
let metadata_file_path = metadata_file_path.clone();
|
||||||
|
case_report.mode_execution_reports.into_iter().filter_map(
|
||||||
|
move |(mode, execution_report)| {
|
||||||
|
if let Some(TestCaseStatus::Failed { reason }) =
|
||||||
|
execution_report.status
|
||||||
|
{
|
||||||
|
let parsed_test_specifier =
|
||||||
|
ParsedTestSpecifier::CaseWithMode {
|
||||||
|
metadata_file_path: metadata_file_path
|
||||||
|
.clone()
|
||||||
|
.into_inner(),
|
||||||
|
case_idx: case_idx.into_inner(),
|
||||||
|
mode,
|
||||||
|
};
|
||||||
|
Some((parsed_test_specifier, reason))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if !failures.is_empty() {
|
||||||
|
bail!("Some tests failed: {failures:#?}")
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}),
|
}),
|
||||||
Context::Benchmark(context) => tokio::runtime::Builder::new_multi_thread()
|
Context::Benchmark(context) => tokio::runtime::Builder::new_multi_thread()
|
||||||
@@ -71,12 +111,48 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let differential_benchmarks_handling_task =
|
let differential_benchmarks_handling_task =
|
||||||
handle_differential_benchmarks(*context, reporter);
|
handle_differential_benchmarks(*context, reporter);
|
||||||
|
|
||||||
futures::future::try_join(
|
let (_, report) = futures::future::try_join(
|
||||||
differential_benchmarks_handling_task,
|
differential_benchmarks_handling_task,
|
||||||
report_aggregator_task,
|
report_aggregator_task,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
// Error out if there are any failing tests.
|
||||||
|
let failures = report
|
||||||
|
.execution_information
|
||||||
|
.into_iter()
|
||||||
|
.flat_map(|(metadata_file_path, metadata_file_report)| {
|
||||||
|
metadata_file_report.case_reports.into_iter().flat_map(
|
||||||
|
move |(case_idx, case_report)| {
|
||||||
|
let metadata_file_path = metadata_file_path.clone();
|
||||||
|
case_report.mode_execution_reports.into_iter().filter_map(
|
||||||
|
move |(mode, execution_report)| {
|
||||||
|
if let Some(TestCaseStatus::Failed { reason }) =
|
||||||
|
execution_report.status
|
||||||
|
{
|
||||||
|
let parsed_test_specifier =
|
||||||
|
ParsedTestSpecifier::CaseWithMode {
|
||||||
|
metadata_file_path: metadata_file_path
|
||||||
|
.clone()
|
||||||
|
.into_inner(),
|
||||||
|
case_idx: case_idx.into_inner(),
|
||||||
|
mode,
|
||||||
|
};
|
||||||
|
Some((parsed_test_specifier, reason))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
if !failures.is_empty() {
|
||||||
|
bail!("Some tests failed: {failures:#?}")
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}),
|
}),
|
||||||
Context::ExportGenesis(ref export_genesis_context) => {
|
Context::ExportGenesis(ref export_genesis_context) => {
|
||||||
@@ -85,11 +161,17 @@ fn main() -> anyhow::Result<()> {
|
|||||||
let genesis_json = serde_json::to_string_pretty(&genesis)
|
let genesis_json = serde_json::to_string_pretty(&genesis)
|
||||||
.context("Failed to serialize the genesis to JSON")?;
|
.context("Failed to serialize the genesis to JSON")?;
|
||||||
println!("{genesis_json}");
|
println!("{genesis_json}");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
Context::ExportJsonSchema => {
|
Context::ExportJsonSchema => {
|
||||||
let schema = schema_for!(Metadata);
|
let schema = schema_for!(Metadata);
|
||||||
println!("{}", serde_json::to_string_pretty(&schema).unwrap());
|
println!(
|
||||||
|
"{}",
|
||||||
|
serde_json::to_string_pretty(&schema)
|
||||||
|
.context("Failed to export the JSON schema")?
|
||||||
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -76,6 +76,7 @@ pub struct GethNode {
|
|||||||
wallet: Arc<EthereumWallet>,
|
wallet: Arc<EthereumWallet>,
|
||||||
nonce_manager: CachedNonceManager,
|
nonce_manager: CachedNonceManager,
|
||||||
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GethNode {
|
impl GethNode {
|
||||||
@@ -100,6 +101,7 @@ impl GethNode {
|
|||||||
+ AsRef<WalletConfiguration>
|
+ AsRef<WalletConfiguration>
|
||||||
+ AsRef<GethConfiguration>
|
+ AsRef<GethConfiguration>
|
||||||
+ Clone,
|
+ Clone,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let working_directory_configuration =
|
let working_directory_configuration =
|
||||||
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context);
|
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context);
|
||||||
@@ -126,6 +128,7 @@ impl GethNode {
|
|||||||
wallet: wallet.clone(),
|
wallet: wallet.clone(),
|
||||||
nonce_manager: Default::default(),
|
nonce_manager: Default::default(),
|
||||||
provider: Default::default(),
|
provider: Default::default(),
|
||||||
|
use_fallback_gas_filler,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -246,7 +249,8 @@ impl GethNode {
|
|||||||
.get_or_try_init(|| async move {
|
.get_or_try_init(|| async move {
|
||||||
construct_concurrency_limited_provider::<Ethereum, _>(
|
construct_concurrency_limited_provider::<Ethereum, _>(
|
||||||
self.connection_string.as_str(),
|
self.connection_string.as_str(),
|
||||||
FallbackGasFiller::default(),
|
FallbackGasFiller::default()
|
||||||
|
.with_use_fallback_gas_filler(self.use_fallback_gas_filler),
|
||||||
ChainIdFiller::new(Some(CHAIN_ID)),
|
ChainIdFiller::new(Some(CHAIN_ID)),
|
||||||
NonceFiller::new(self.nonce_manager.clone()),
|
NonceFiller::new(self.nonce_manager.clone()),
|
||||||
self.wallet.clone(),
|
self.wallet.clone(),
|
||||||
@@ -742,7 +746,7 @@ mod tests {
|
|||||||
|
|
||||||
fn new_node() -> (TestExecutionContext, GethNode) {
|
fn new_node() -> (TestExecutionContext, GethNode) {
|
||||||
let context = test_config();
|
let context = test_config();
|
||||||
let mut node = GethNode::new(&context);
|
let mut node = GethNode::new(&context, true);
|
||||||
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
||||||
.expect("Failed to initialize the node")
|
.expect("Failed to initialize the node")
|
||||||
.spawn_process()
|
.spawn_process()
|
||||||
|
|||||||
@@ -106,6 +106,8 @@ pub struct LighthouseGethNode {
|
|||||||
|
|
||||||
persistent_http_provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
persistent_http_provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
||||||
persistent_ws_provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
persistent_ws_provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
||||||
|
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LighthouseGethNode {
|
impl LighthouseGethNode {
|
||||||
@@ -127,6 +129,7 @@ impl LighthouseGethNode {
|
|||||||
+ AsRef<WalletConfiguration>
|
+ AsRef<WalletConfiguration>
|
||||||
+ AsRef<KurtosisConfiguration>
|
+ AsRef<KurtosisConfiguration>
|
||||||
+ Clone,
|
+ Clone,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let working_directory_configuration =
|
let working_directory_configuration =
|
||||||
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context);
|
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context);
|
||||||
@@ -176,6 +179,7 @@ impl LighthouseGethNode {
|
|||||||
nonce_manager: Default::default(),
|
nonce_manager: Default::default(),
|
||||||
persistent_http_provider: OnceCell::const_new(),
|
persistent_http_provider: OnceCell::const_new(),
|
||||||
persistent_ws_provider: OnceCell::const_new(),
|
persistent_ws_provider: OnceCell::const_new(),
|
||||||
|
use_fallback_gas_filler,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -374,7 +378,8 @@ impl LighthouseGethNode {
|
|||||||
.get_or_try_init(|| async move {
|
.get_or_try_init(|| async move {
|
||||||
construct_concurrency_limited_provider::<Ethereum, _>(
|
construct_concurrency_limited_provider::<Ethereum, _>(
|
||||||
self.ws_connection_string.as_str(),
|
self.ws_connection_string.as_str(),
|
||||||
FallbackGasFiller::default(),
|
FallbackGasFiller::default()
|
||||||
|
.with_use_fallback_gas_filler(self.use_fallback_gas_filler),
|
||||||
ChainIdFiller::new(Some(CHAIN_ID)),
|
ChainIdFiller::new(Some(CHAIN_ID)),
|
||||||
NonceFiller::new(self.nonce_manager.clone()),
|
NonceFiller::new(self.nonce_manager.clone()),
|
||||||
self.wallet.clone(),
|
self.wallet.clone(),
|
||||||
@@ -1152,7 +1157,7 @@ mod tests {
|
|||||||
let _guard = NODE_START_MUTEX.lock().unwrap();
|
let _guard = NODE_START_MUTEX.lock().unwrap();
|
||||||
|
|
||||||
let context = test_config();
|
let context = test_config();
|
||||||
let mut node = LighthouseGethNode::new(&context);
|
let mut node = LighthouseGethNode::new(&context, true);
|
||||||
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
||||||
.expect("Failed to initialize the node")
|
.expect("Failed to initialize the node")
|
||||||
.spawn_process()
|
.spawn_process()
|
||||||
|
|||||||
@@ -79,6 +79,7 @@ pub struct SubstrateNode {
|
|||||||
nonce_manager: CachedNonceManager,
|
nonce_manager: CachedNonceManager,
|
||||||
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
||||||
consensus: Option<String>,
|
consensus: Option<String>,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SubstrateNode {
|
impl SubstrateNode {
|
||||||
@@ -105,6 +106,7 @@ impl SubstrateNode {
|
|||||||
+ AsRef<EthRpcConfiguration>
|
+ AsRef<EthRpcConfiguration>
|
||||||
+ AsRef<WalletConfiguration>,
|
+ AsRef<WalletConfiguration>,
|
||||||
existing_connection_strings: &[String],
|
existing_connection_strings: &[String],
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let working_directory_path =
|
let working_directory_path =
|
||||||
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context).as_path();
|
AsRef::<WorkingDirectoryConfiguration>::as_ref(&context).as_path();
|
||||||
@@ -137,6 +139,7 @@ impl SubstrateNode {
|
|||||||
nonce_manager: Default::default(),
|
nonce_manager: Default::default(),
|
||||||
provider: Default::default(),
|
provider: Default::default(),
|
||||||
consensus,
|
consensus,
|
||||||
|
use_fallback_gas_filler,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -324,7 +327,12 @@ impl SubstrateNode {
|
|||||||
.get_or_try_init(|| async move {
|
.get_or_try_init(|| async move {
|
||||||
construct_concurrency_limited_provider::<Ethereum, _>(
|
construct_concurrency_limited_provider::<Ethereum, _>(
|
||||||
self.rpc_url.as_str(),
|
self.rpc_url.as_str(),
|
||||||
FallbackGasFiller::new(u64::MAX, 50_000_000_000, 1_000_000_000),
|
FallbackGasFiller::new(
|
||||||
|
u64::MAX,
|
||||||
|
50_000_000_000,
|
||||||
|
1_000_000_000,
|
||||||
|
self.use_fallback_gas_filler,
|
||||||
|
),
|
||||||
ChainIdFiller::new(Some(CHAIN_ID)),
|
ChainIdFiller::new(Some(CHAIN_ID)),
|
||||||
NonceFiller::new(self.nonce_manager.clone()),
|
NonceFiller::new(self.nonce_manager.clone()),
|
||||||
self.wallet.clone(),
|
self.wallet.clone(),
|
||||||
@@ -825,6 +833,7 @@ mod tests {
|
|||||||
None,
|
None,
|
||||||
&context,
|
&context,
|
||||||
&[],
|
&[],
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
node.init(context.genesis_configuration.genesis().unwrap().clone())
|
||||||
.expect("Failed to initialize the node")
|
.expect("Failed to initialize the node")
|
||||||
@@ -896,6 +905,7 @@ mod tests {
|
|||||||
None,
|
None,
|
||||||
&context,
|
&context,
|
||||||
&[],
|
&[],
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Call `init()`
|
// Call `init()`
|
||||||
|
|||||||
@@ -114,6 +114,8 @@ pub struct ZombienetNode {
|
|||||||
nonce_manager: CachedNonceManager,
|
nonce_manager: CachedNonceManager,
|
||||||
|
|
||||||
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
provider: OnceCell<ConcreteProvider<Ethereum, Arc<EthereumWallet>>>,
|
||||||
|
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ZombienetNode {
|
impl ZombienetNode {
|
||||||
@@ -137,6 +139,7 @@ impl ZombienetNode {
|
|||||||
context: impl AsRef<WorkingDirectoryConfiguration>
|
context: impl AsRef<WorkingDirectoryConfiguration>
|
||||||
+ AsRef<EthRpcConfiguration>
|
+ AsRef<EthRpcConfiguration>
|
||||||
+ AsRef<WalletConfiguration>,
|
+ AsRef<WalletConfiguration>,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let eth_proxy_binary = AsRef::<EthRpcConfiguration>::as_ref(&context)
|
let eth_proxy_binary = AsRef::<EthRpcConfiguration>::as_ref(&context)
|
||||||
.path
|
.path
|
||||||
@@ -164,6 +167,7 @@ impl ZombienetNode {
|
|||||||
connection_string: String::new(),
|
connection_string: String::new(),
|
||||||
node_rpc_port: None,
|
node_rpc_port: None,
|
||||||
provider: Default::default(),
|
provider: Default::default(),
|
||||||
|
use_fallback_gas_filler,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -330,7 +334,12 @@ impl ZombienetNode {
|
|||||||
.get_or_try_init(|| async move {
|
.get_or_try_init(|| async move {
|
||||||
construct_concurrency_limited_provider::<Ethereum, _>(
|
construct_concurrency_limited_provider::<Ethereum, _>(
|
||||||
self.connection_string.as_str(),
|
self.connection_string.as_str(),
|
||||||
FallbackGasFiller::new(u64::MAX, 5_000_000_000, 1_000_000_000),
|
FallbackGasFiller::new(
|
||||||
|
u64::MAX,
|
||||||
|
5_000_000_000,
|
||||||
|
1_000_000_000,
|
||||||
|
self.use_fallback_gas_filler,
|
||||||
|
),
|
||||||
ChainIdFiller::default(), // TODO: use CHAIN_ID constant
|
ChainIdFiller::default(), // TODO: use CHAIN_ID constant
|
||||||
NonceFiller::new(self.nonce_manager.clone()),
|
NonceFiller::new(self.nonce_manager.clone()),
|
||||||
self.wallet.clone(),
|
self.wallet.clone(),
|
||||||
@@ -823,6 +832,7 @@ mod tests {
|
|||||||
let mut node = ZombienetNode::new(
|
let mut node = ZombienetNode::new(
|
||||||
context.polkadot_parachain_configuration.path.clone(),
|
context.polkadot_parachain_configuration.path.clone(),
|
||||||
&context,
|
&context,
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
let genesis = context.genesis_configuration.genesis().unwrap().clone();
|
let genesis = context.genesis_configuration.genesis().unwrap().clone();
|
||||||
node.init(genesis).unwrap();
|
node.init(genesis).unwrap();
|
||||||
@@ -936,6 +946,7 @@ mod tests {
|
|||||||
let node = ZombienetNode::new(
|
let node = ZombienetNode::new(
|
||||||
context.polkadot_parachain_configuration.path.clone(),
|
context.polkadot_parachain_configuration.path.clone(),
|
||||||
&context,
|
&context,
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
@@ -956,6 +967,7 @@ mod tests {
|
|||||||
let node = ZombienetNode::new(
|
let node = ZombienetNode::new(
|
||||||
context.polkadot_parachain_configuration.path.clone(),
|
context.polkadot_parachain_configuration.path.clone(),
|
||||||
&context,
|
&context,
|
||||||
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use alloy::{
|
|||||||
Provider, SendableTx,
|
Provider, SendableTx,
|
||||||
fillers::{GasFiller, TxFiller},
|
fillers::{GasFiller, TxFiller},
|
||||||
},
|
},
|
||||||
transports::TransportResult,
|
transports::{TransportError, TransportResult},
|
||||||
};
|
};
|
||||||
|
|
||||||
// Percentage padding applied to estimated gas (e.g. 120 = 20% padding)
|
// Percentage padding applied to estimated gas (e.g. 120 = 20% padding)
|
||||||
@@ -17,6 +17,7 @@ pub struct FallbackGasFiller {
|
|||||||
default_gas_limit: u64,
|
default_gas_limit: u64,
|
||||||
default_max_fee_per_gas: u128,
|
default_max_fee_per_gas: u128,
|
||||||
default_priority_fee: u128,
|
default_priority_fee: u128,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FallbackGasFiller {
|
impl FallbackGasFiller {
|
||||||
@@ -24,19 +25,41 @@ impl FallbackGasFiller {
|
|||||||
default_gas_limit: u64,
|
default_gas_limit: u64,
|
||||||
default_max_fee_per_gas: u128,
|
default_max_fee_per_gas: u128,
|
||||||
default_priority_fee: u128,
|
default_priority_fee: u128,
|
||||||
|
use_fallback_gas_filler: bool,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
inner: GasFiller,
|
inner: GasFiller,
|
||||||
default_gas_limit,
|
default_gas_limit,
|
||||||
default_max_fee_per_gas,
|
default_max_fee_per_gas,
|
||||||
default_priority_fee,
|
default_priority_fee,
|
||||||
|
use_fallback_gas_filler,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn with_default_gas_limit(mut self, default_gas_limit: u64) -> Self {
|
||||||
|
self.default_gas_limit = default_gas_limit;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_default_max_fee_per_gas(mut self, default_max_fee_per_gas: u128) -> Self {
|
||||||
|
self.default_max_fee_per_gas = default_max_fee_per_gas;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_default_priority_fee(mut self, default_priority_fee: u128) -> Self {
|
||||||
|
self.default_priority_fee = default_priority_fee;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_use_fallback_gas_filler(mut self, use_fallback_gas_filler: bool) -> Self {
|
||||||
|
self.use_fallback_gas_filler = use_fallback_gas_filler;
|
||||||
|
self
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for FallbackGasFiller {
|
impl Default for FallbackGasFiller {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
FallbackGasFiller::new(25_000_000, 1_000_000_000, 1_000_000_000)
|
FallbackGasFiller::new(25_000_000, 1_000_000_000, 1_000_000_000, true)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -64,10 +87,15 @@ where
|
|||||||
Ok(fill) => Ok(Some(fill)),
|
Ok(fill) => Ok(Some(fill)),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
tracing::debug!(error = ?err, "Gas Provider Estimation Failed, using fallback");
|
tracing::debug!(error = ?err, "Gas Provider Estimation Failed, using fallback");
|
||||||
|
|
||||||
|
if !self.use_fallback_gas_filler {
|
||||||
|
Err(err)
|
||||||
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async fn fill(
|
async fn fill(
|
||||||
&self,
|
&self,
|
||||||
@@ -86,13 +114,17 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(tx)
|
Ok(tx)
|
||||||
} else {
|
} else if self.use_fallback_gas_filler {
|
||||||
if let Some(builder) = tx.as_mut_builder() {
|
if let Some(builder) = tx.as_mut_builder() {
|
||||||
builder.set_gas_limit(self.default_gas_limit);
|
builder.set_gas_limit(self.default_gas_limit);
|
||||||
builder.set_max_fee_per_gas(self.default_max_fee_per_gas);
|
builder.set_max_fee_per_gas(self.default_max_fee_per_gas);
|
||||||
builder.set_max_priority_fee_per_gas(self.default_priority_fee);
|
builder.set_max_priority_fee_per_gas(self.default_priority_fee);
|
||||||
}
|
}
|
||||||
Ok(tx)
|
Ok(tx)
|
||||||
|
} else {
|
||||||
|
Err(TransportError::UnsupportedFeature(
|
||||||
|
"Fallback gas filler is disabled and we're attempting to do a gas estimate on a failing transaction",
|
||||||
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ impl ReportAggregator {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_task(mut self) -> (Reporter, impl Future<Output = Result<()>>) {
|
pub fn into_task(mut self) -> (Reporter, impl Future<Output = Result<Report>>) {
|
||||||
let reporter = self
|
let reporter = self
|
||||||
.runner_tx
|
.runner_tx
|
||||||
.take()
|
.take()
|
||||||
@@ -60,7 +60,7 @@ impl ReportAggregator {
|
|||||||
(reporter, async move { self.aggregate().await })
|
(reporter, async move { self.aggregate().await })
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn aggregate(mut self) -> Result<()> {
|
async fn aggregate(mut self) -> Result<Report> {
|
||||||
debug!("Starting to aggregate report");
|
debug!("Starting to aggregate report");
|
||||||
|
|
||||||
while let Some(event) = self.runner_rx.recv().await {
|
while let Some(event) = self.runner_rx.recv().await {
|
||||||
@@ -152,7 +152,7 @@ impl ReportAggregator {
|
|||||||
format!("Failed to serialize report JSON to {}", file_path.display())
|
format!("Failed to serialize report JSON to {}", file_path.display())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
Ok(())
|
Ok(self.report)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn handle_subscribe_to_events_event(&self, event: SubscribeToEventsEvent) {
|
fn handle_subscribe_to_events_event(&self, event: SubscribeToEventsEvent) {
|
||||||
|
|||||||
+1
-1
Submodule resolc-compiler-tests updated: 7bc445491e...55da34c4f6
@@ -28,7 +28,7 @@ from __future__ import annotations
|
|||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
import csv
|
import csv
|
||||||
from typing import List, Mapping, TypedDict
|
from typing import List, Mapping, TypedDict, no_type_check
|
||||||
|
|
||||||
|
|
||||||
class EthereumMinedBlockInformation(TypedDict):
|
class EthereumMinedBlockInformation(TypedDict):
|
||||||
@@ -69,7 +69,43 @@ class MinedBlockInformation(TypedDict):
|
|||||||
"""Block-level information for a mined block with both EVM and optional Substrate fields."""
|
"""Block-level information for a mined block with both EVM and optional Substrate fields."""
|
||||||
|
|
||||||
ethereum_block_information: EthereumMinedBlockInformation
|
ethereum_block_information: EthereumMinedBlockInformation
|
||||||
substrate_block_information: SubstrateMinedBlockInformation
|
substrate_block_information: SubstrateMinedBlockInformation | None
|
||||||
|
|
||||||
|
|
||||||
|
def substrate_block_information_ref_time(
|
||||||
|
block: SubstrateMinedBlockInformation | None,
|
||||||
|
) -> int | None:
|
||||||
|
if block is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return block["ref_time"]
|
||||||
|
|
||||||
|
|
||||||
|
def substrate_block_information_max_ref_time(
|
||||||
|
block: SubstrateMinedBlockInformation | None,
|
||||||
|
) -> int | None:
|
||||||
|
if block is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return block["max_ref_time"]
|
||||||
|
|
||||||
|
|
||||||
|
def substrate_block_information_proof_size(
|
||||||
|
block: SubstrateMinedBlockInformation | None,
|
||||||
|
) -> int | None:
|
||||||
|
if block is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return block["proof_size"]
|
||||||
|
|
||||||
|
|
||||||
|
def substrate_block_information_max_proof_size(
|
||||||
|
block: SubstrateMinedBlockInformation | None,
|
||||||
|
) -> int | None:
|
||||||
|
if block is None:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return block["max_proof_size"]
|
||||||
|
|
||||||
|
|
||||||
class Metric(TypedDict):
|
class Metric(TypedDict):
|
||||||
@@ -100,8 +136,19 @@ class Metrics(TypedDict):
|
|||||||
transaction_per_second: Metric
|
transaction_per_second: Metric
|
||||||
gas_per_second: Metric
|
gas_per_second: Metric
|
||||||
gas_block_fullness: Metric
|
gas_block_fullness: Metric
|
||||||
ref_time_block_fullness: Metric
|
ref_time_block_fullness: Metric | None
|
||||||
proof_size_block_fullness: Metric
|
proof_size_block_fullness: Metric | None
|
||||||
|
|
||||||
|
|
||||||
|
@no_type_check
|
||||||
|
def metrics_raw_item(
|
||||||
|
metrics: Metrics, name: str, target: str, index: int
|
||||||
|
) -> int | None:
|
||||||
|
l: list[int] = metrics.get(name, dict()).get("raw", dict()).get(target, dict())
|
||||||
|
try:
|
||||||
|
return l[index]
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class ExecutionReport(TypedDict):
|
class ExecutionReport(TypedDict):
|
||||||
@@ -144,12 +191,15 @@ BlockInformation = TypedDict(
|
|||||||
"Transaction Count": int,
|
"Transaction Count": int,
|
||||||
"TPS": int | None,
|
"TPS": int | None,
|
||||||
"GPS": int | None,
|
"GPS": int | None,
|
||||||
"Ref Time": int,
|
"Gas Mined": int,
|
||||||
"Max Ref Time": int,
|
"Block Gas Limit": int,
|
||||||
"Block Fullness Ref Time": int,
|
"Block Fullness Gas": float,
|
||||||
"Proof Size": int,
|
"Ref Time": int | None,
|
||||||
"Max Proof Size": int,
|
"Max Ref Time": int | None,
|
||||||
"Block Fullness Proof Size": int,
|
"Block Fullness Ref Time": int | None,
|
||||||
|
"Proof Size": int | None,
|
||||||
|
"Max Proof Size": int | None,
|
||||||
|
"Block Fullness Proof Size": int | None,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
"""A typed dictionary used to hold all of the block information"""
|
"""A typed dictionary used to hold all of the block information"""
|
||||||
@@ -175,7 +225,7 @@ def main() -> None:
|
|||||||
report: ReportRoot = load_report(report_path)
|
report: ReportRoot = load_report(report_path)
|
||||||
|
|
||||||
# TODO: Remove this in the future, but for now, the target is fixed.
|
# TODO: Remove this in the future, but for now, the target is fixed.
|
||||||
target: str = "revive-dev-node-revm-solc"
|
target: str = sys.argv[2]
|
||||||
|
|
||||||
csv_writer = csv.writer(sys.stdout)
|
csv_writer = csv.writer(sys.stdout)
|
||||||
|
|
||||||
@@ -188,6 +238,12 @@ def main() -> None:
|
|||||||
|
|
||||||
resolved_blocks: list[BlockInformation] = []
|
resolved_blocks: list[BlockInformation] = []
|
||||||
for i, block_information in enumerate(blocks_information):
|
for i, block_information in enumerate(blocks_information):
|
||||||
|
mined_gas: int = block_information["ethereum_block_information"][
|
||||||
|
"mined_gas"
|
||||||
|
]
|
||||||
|
block_gas_limit: int = block_information[
|
||||||
|
"ethereum_block_information"
|
||||||
|
]["block_gas_limit"]
|
||||||
resolved_blocks.append(
|
resolved_blocks.append(
|
||||||
{
|
{
|
||||||
"Block Number": block_information[
|
"Block Number": block_information[
|
||||||
@@ -216,24 +272,37 @@ def main() -> None:
|
|||||||
"raw"
|
"raw"
|
||||||
][target][i - 1]
|
][target][i - 1]
|
||||||
),
|
),
|
||||||
"Ref Time": block_information[
|
"Gas Mined": block_information[
|
||||||
"substrate_block_information"
|
"ethereum_block_information"
|
||||||
]["ref_time"],
|
]["mined_gas"],
|
||||||
"Max Ref Time": block_information[
|
"Block Gas Limit": block_information[
|
||||||
"substrate_block_information"
|
"ethereum_block_information"
|
||||||
]["max_ref_time"],
|
]["block_gas_limit"],
|
||||||
"Block Fullness Ref Time": execution_report["metrics"][
|
"Block Fullness Gas": mined_gas / block_gas_limit,
|
||||||
"ref_time_block_fullness"
|
"Ref Time": substrate_block_information_ref_time(
|
||||||
]["raw"][target][i],
|
block_information["substrate_block_information"]
|
||||||
"Proof Size": block_information[
|
),
|
||||||
"substrate_block_information"
|
"Max Ref Time": substrate_block_information_max_ref_time(
|
||||||
]["proof_size"],
|
block_information["substrate_block_information"]
|
||||||
"Max Proof Size": block_information[
|
),
|
||||||
"substrate_block_information"
|
"Block Fullness Ref Time": metrics_raw_item(
|
||||||
]["max_proof_size"],
|
execution_report["metrics"],
|
||||||
"Block Fullness Proof Size": execution_report["metrics"][
|
"ref_time_block_fullness",
|
||||||
"proof_size_block_fullness"
|
target,
|
||||||
]["raw"][target][i],
|
i,
|
||||||
|
),
|
||||||
|
"Proof Size": substrate_block_information_proof_size(
|
||||||
|
block_information["substrate_block_information"]
|
||||||
|
),
|
||||||
|
"Max Proof Size": substrate_block_information_max_proof_size(
|
||||||
|
block_information["substrate_block_information"]
|
||||||
|
),
|
||||||
|
"Block Fullness Proof Size": metrics_raw_item(
|
||||||
|
execution_report["metrics"],
|
||||||
|
"proof_size_block_fullness",
|
||||||
|
target,
|
||||||
|
i,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -5,51 +5,54 @@ CI. The full models used in the JSON report can be found in the revive different
|
|||||||
the models used in this script are just a partial reproduction of the full report models.
|
the models used in this script are just a partial reproduction of the full report models.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import TypedDict, Literal, Union
|
import json, typing, io, sys
|
||||||
|
|
||||||
import json, io
|
|
||||||
|
|
||||||
|
|
||||||
class Report(TypedDict):
|
class Report(typing.TypedDict):
|
||||||
context: "Context"
|
context: "Context"
|
||||||
execution_information: dict[
|
execution_information: dict["MetadataFilePathString", "MetadataFileReport"]
|
||||||
"MetadataFilePathString",
|
|
||||||
dict["ModeString", dict["CaseIdxString", "CaseReport"]],
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class Context(TypedDict):
|
class MetadataFileReport(typing.TypedDict):
|
||||||
|
case_reports: dict["CaseIdxString", "CaseReport"]
|
||||||
|
|
||||||
|
|
||||||
|
class CaseReport(typing.TypedDict):
|
||||||
|
mode_execution_reports: dict["ModeString", "ExecutionReport"]
|
||||||
|
|
||||||
|
|
||||||
|
class ExecutionReport(typing.TypedDict):
|
||||||
|
status: "TestCaseStatus"
|
||||||
|
|
||||||
|
|
||||||
|
class Context(typing.TypedDict):
|
||||||
Test: "TestContext"
|
Test: "TestContext"
|
||||||
|
|
||||||
|
|
||||||
class TestContext(TypedDict):
|
class TestContext(typing.TypedDict):
|
||||||
corpus_configuration: "CorpusConfiguration"
|
corpus_configuration: "CorpusConfiguration"
|
||||||
|
|
||||||
|
|
||||||
class CorpusConfiguration(TypedDict):
|
class CorpusConfiguration(typing.TypedDict):
|
||||||
test_specifiers: list["TestSpecifier"]
|
test_specifiers: list["TestSpecifier"]
|
||||||
|
|
||||||
|
|
||||||
class CaseReport(TypedDict):
|
class CaseStatusSuccess(typing.TypedDict):
|
||||||
status: "CaseStatus"
|
status: typing.Literal["Succeeded"]
|
||||||
|
|
||||||
|
|
||||||
class CaseStatusSuccess(TypedDict):
|
|
||||||
status: Literal["Succeeded"]
|
|
||||||
steps_executed: int
|
steps_executed: int
|
||||||
|
|
||||||
|
|
||||||
class CaseStatusFailure(TypedDict):
|
class CaseStatusFailure(typing.TypedDict):
|
||||||
status: Literal["Failed"]
|
status: typing.Literal["Failed"]
|
||||||
reason: str
|
reason: str
|
||||||
|
|
||||||
|
|
||||||
class CaseStatusIgnored(TypedDict):
|
class CaseStatusIgnored(typing.TypedDict):
|
||||||
status: Literal["Ignored"]
|
status: typing.Literal["Ignored"]
|
||||||
reason: str
|
reason: str
|
||||||
|
|
||||||
|
|
||||||
CaseStatus = Union[CaseStatusSuccess, CaseStatusFailure, CaseStatusIgnored]
|
TestCaseStatus = typing.Union[CaseStatusSuccess, CaseStatusFailure, CaseStatusIgnored]
|
||||||
"""A union type of all of the possible statuses that could be reported for a case."""
|
"""A union type of all of the possible statuses that could be reported for a case."""
|
||||||
|
|
||||||
TestSpecifier = str
|
TestSpecifier = str
|
||||||
@@ -64,6 +67,12 @@ MetadataFilePathString = str
|
|||||||
CaseIdxString = str
|
CaseIdxString = str
|
||||||
"""The index of a case as a string. For example '0'"""
|
"""The index of a case as a string. For example '0'"""
|
||||||
|
|
||||||
|
PlatformString = typing.Union[
|
||||||
|
typing.Literal["revive-dev-node-revm-solc"],
|
||||||
|
typing.Literal["revive-dev-node-polkavm-resolc"],
|
||||||
|
]
|
||||||
|
"""A string of the platform on which the test was run"""
|
||||||
|
|
||||||
|
|
||||||
def path_relative_to_resolc_compiler_test_directory(path: str) -> str:
|
def path_relative_to_resolc_compiler_test_directory(path: str) -> str:
|
||||||
"""
|
"""
|
||||||
@@ -78,12 +87,22 @@ def path_relative_to_resolc_compiler_test_directory(path: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
def main() -> None:
|
||||||
with open("report.json", "r") as file:
|
with open(sys.argv[1], "r") as file:
|
||||||
report: Report = json.load(file)
|
report: Report = json.load(file)
|
||||||
|
|
||||||
|
# Getting the platform string and resolving it into a simpler version of
|
||||||
|
# itself.
|
||||||
|
platform_identifier: PlatformString = typing.cast(PlatformString, sys.argv[2])
|
||||||
|
if platform_identifier == "revive-dev-node-polkavm-resolc":
|
||||||
|
platform: str = "PolkaVM"
|
||||||
|
elif platform_identifier == "revive-dev-node-revm-solc":
|
||||||
|
platform: str = "REVM"
|
||||||
|
else:
|
||||||
|
platform: str = platform_identifier
|
||||||
|
|
||||||
# Starting the markdown document and adding information to it as we go.
|
# Starting the markdown document and adding information to it as we go.
|
||||||
markdown_document: io.TextIOWrapper = open("report.md", "w")
|
markdown_document: io.TextIOWrapper = open("report.md", "w")
|
||||||
print("# Differential Tests Results", file=markdown_document)
|
print(f"# Differential Tests Results ({platform})", file=markdown_document)
|
||||||
|
|
||||||
# Getting all of the test specifiers from the report and making them relative to the tests dir.
|
# Getting all of the test specifiers from the report and making them relative to the tests dir.
|
||||||
test_specifiers: list[str] = list(
|
test_specifiers: list[str] = list(
|
||||||
@@ -94,7 +113,7 @@ def main() -> None:
|
|||||||
)
|
)
|
||||||
print("## Specified Tests", file=markdown_document)
|
print("## Specified Tests", file=markdown_document)
|
||||||
for test_specifier in test_specifiers:
|
for test_specifier in test_specifiers:
|
||||||
print(f"* `{test_specifier}`", file=markdown_document)
|
print(f"* ``{test_specifier}``", file=markdown_document)
|
||||||
|
|
||||||
# Counting the total number of test cases, successes, failures, and ignored tests
|
# Counting the total number of test cases, successes, failures, and ignored tests
|
||||||
total_number_of_cases: int = 0
|
total_number_of_cases: int = 0
|
||||||
@@ -102,9 +121,13 @@ def main() -> None:
|
|||||||
total_number_of_failures: int = 0
|
total_number_of_failures: int = 0
|
||||||
total_number_of_ignores: int = 0
|
total_number_of_ignores: int = 0
|
||||||
for _, mode_to_case_mapping in report["execution_information"].items():
|
for _, mode_to_case_mapping in report["execution_information"].items():
|
||||||
for _, case_idx_to_report_mapping in mode_to_case_mapping.items():
|
for _, case_idx_to_report_mapping in mode_to_case_mapping[
|
||||||
for _, case_report in case_idx_to_report_mapping.items():
|
"case_reports"
|
||||||
status: CaseStatus = case_report["status"]
|
].items():
|
||||||
|
for _, execution_report in case_idx_to_report_mapping[
|
||||||
|
"mode_execution_reports"
|
||||||
|
].items():
|
||||||
|
status: TestCaseStatus = execution_report["status"]
|
||||||
|
|
||||||
total_number_of_cases += 1
|
total_number_of_cases += 1
|
||||||
if status["status"] == "Succeeded":
|
if status["status"] == "Succeeded":
|
||||||
@@ -144,9 +167,13 @@ def main() -> None:
|
|||||||
for metadata_file_path, mode_to_case_mapping in report[
|
for metadata_file_path, mode_to_case_mapping in report[
|
||||||
"execution_information"
|
"execution_information"
|
||||||
].items():
|
].items():
|
||||||
for mode_string, case_idx_to_report_mapping in mode_to_case_mapping.items():
|
for case_idx_string, case_idx_to_report_mapping in mode_to_case_mapping[
|
||||||
for case_idx_string, case_report in case_idx_to_report_mapping.items():
|
"case_reports"
|
||||||
status: CaseStatus = case_report["status"]
|
].items():
|
||||||
|
for mode_string, execution_report in case_idx_to_report_mapping[
|
||||||
|
"mode_execution_reports"
|
||||||
|
].items():
|
||||||
|
status: TestCaseStatus = execution_report["status"]
|
||||||
metadata_file_path: str = (
|
metadata_file_path: str = (
|
||||||
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
|
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
|
||||||
)
|
)
|
||||||
@@ -183,9 +210,13 @@ def main() -> None:
|
|||||||
for metadata_file_path, mode_to_case_mapping in report[
|
for metadata_file_path, mode_to_case_mapping in report[
|
||||||
"execution_information"
|
"execution_information"
|
||||||
].items():
|
].items():
|
||||||
for mode_string, case_idx_to_report_mapping in mode_to_case_mapping.items():
|
for case_idx_string, case_idx_to_report_mapping in mode_to_case_mapping[
|
||||||
for case_idx_string, case_report in case_idx_to_report_mapping.items():
|
"case_reports"
|
||||||
status: CaseStatus = case_report["status"]
|
].items():
|
||||||
|
for mode_string, execution_report in case_idx_to_report_mapping[
|
||||||
|
"mode_execution_reports"
|
||||||
|
].items():
|
||||||
|
status: TestCaseStatus = execution_report["status"]
|
||||||
metadata_file_path: str = (
|
metadata_file_path: str = (
|
||||||
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
|
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
|
||||||
)
|
)
|
||||||
@@ -194,7 +225,9 @@ def main() -> None:
|
|||||||
if status["status"] != "Failed":
|
if status["status"] != "Failed":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
failure_reason: str = status["reason"].replace("\n", " ")
|
failure_reason: str = (
|
||||||
|
status["reason"].replace("\n", " ").replace("|", " ")
|
||||||
|
)
|
||||||
|
|
||||||
note: str = ""
|
note: str = ""
|
||||||
modes_where_this_case_succeeded: set[ModeString] = (
|
modes_where_this_case_succeeded: set[ModeString] = (
|
||||||
@@ -212,7 +245,7 @@ def main() -> None:
|
|||||||
f"{metadata_file_path}::{case_idx_string}::{mode_string}"
|
f"{metadata_file_path}::{case_idx_string}::{mode_string}"
|
||||||
)
|
)
|
||||||
print(
|
print(
|
||||||
f"| `{test_specifier}` | `{failure_reason}` | {note} |",
|
f"| ``{test_specifier}`` | ``{failure_reason}`` | {note} |",
|
||||||
file=markdown_document,
|
file=markdown_document,
|
||||||
)
|
)
|
||||||
print("\n\n</details>", file=markdown_document)
|
print("\n\n</details>", file=markdown_document)
|
||||||
|
|||||||
Reference in New Issue
Block a user