mirror of
https://github.com/pezkuwichain/revive-differential-tests.git
synced 2026-04-28 10:47:59 +00:00
Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| bb718fedfc | |||
| ac8051b03e | |||
| 55322165ad | |||
| f7ca7a1de5 |
Generated
+9
@@ -2929,6 +2929,12 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "indoc"
|
||||||
|
version = "2.0.6"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "integer-sqrt"
|
name = "integer-sqrt"
|
||||||
version = "0.1.5"
|
version = "0.1.5"
|
||||||
@@ -4095,6 +4101,9 @@ dependencies = [
|
|||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"alloy-sol-types",
|
"alloy-sol-types",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
"indoc",
|
||||||
|
"regex",
|
||||||
|
"revive-common",
|
||||||
"revive-dt-common",
|
"revive-dt-common",
|
||||||
"semver 1.0.26",
|
"semver 1.0.26",
|
||||||
"serde",
|
"serde",
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ foundry-compilers-artifacts = { version = "0.18.0" }
|
|||||||
futures = { version = "0.3.31" }
|
futures = { version = "0.3.31" }
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
reqwest = { version = "0.12.15", features = ["json"] }
|
reqwest = { version = "0.12.15", features = ["json"] }
|
||||||
|
regex = { version = "1.11.1" }
|
||||||
once_cell = "1.21"
|
once_cell = "1.21"
|
||||||
semver = { version = "1.0", features = ["serde"] }
|
semver = { version = "1.0", features = ["serde"] }
|
||||||
serde = { version = "1.0", default-features = false, features = ["derive"] }
|
serde = { version = "1.0", default-features = false, features = ["derive"] }
|
||||||
@@ -55,6 +56,7 @@ tracing-subscriber = { version = "0.3.19", default-features = false, features =
|
|||||||
"env-filter",
|
"env-filter",
|
||||||
] }
|
] }
|
||||||
indexmap = { version = "2.10.0", default-features = false }
|
indexmap = { version = "2.10.0", default-features = false }
|
||||||
|
indoc = { version = "2.0.6", default-features = false }
|
||||||
|
|
||||||
# revive compiler
|
# revive compiler
|
||||||
revive-solc-json-interface = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
|
revive-solc-json-interface = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
|
||||||
|
|||||||
@@ -8,18 +8,6 @@
|
|||||||
{
|
{
|
||||||
"name": "first",
|
"name": "first",
|
||||||
"inputs": [
|
"inputs": [
|
||||||
{
|
|
||||||
"address": "0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"expected_balance": "1233"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"is_storage_empty": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"address": "0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"is_storage_empty": false
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"instance": "WBTC_1",
|
"instance": "WBTC_1",
|
||||||
"method": "#deployer",
|
"method": "#deployer",
|
||||||
|
|||||||
+25
-220
@@ -4,7 +4,6 @@ use std::collections::HashMap;
|
|||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use alloy::consensus::EMPTY_ROOT_HASH;
|
|
||||||
use alloy::hex;
|
use alloy::hex;
|
||||||
use alloy::json_abi::JsonAbi;
|
use alloy::json_abi::JsonAbi;
|
||||||
use alloy::network::{Ethereum, TransactionBuilder};
|
use alloy::network::{Ethereum, TransactionBuilder};
|
||||||
@@ -27,12 +26,9 @@ use revive_dt_format::traits::{ResolutionContext, ResolverApi};
|
|||||||
use semver::Version;
|
use semver::Version;
|
||||||
|
|
||||||
use revive_dt_format::case::{Case, CaseIdx};
|
use revive_dt_format::case::{Case, CaseIdx};
|
||||||
use revive_dt_format::input::{
|
use revive_dt_format::input::{Calldata, EtherValue, Expected, ExpectedOutput, Method};
|
||||||
BalanceAssertion, Calldata, EtherValue, Expected, ExpectedOutput, Input, Method,
|
|
||||||
StorageEmptyAssertion,
|
|
||||||
};
|
|
||||||
use revive_dt_format::metadata::{ContractInstance, ContractPathAndIdent};
|
use revive_dt_format::metadata::{ContractInstance, ContractPathAndIdent};
|
||||||
use revive_dt_format::{input::Step, metadata::Metadata};
|
use revive_dt_format::{input::Input, metadata::Metadata};
|
||||||
use revive_dt_node::Node;
|
use revive_dt_node::Node;
|
||||||
use revive_dt_node_interaction::EthereumNode;
|
use revive_dt_node_interaction::EthereumNode;
|
||||||
use tracing::Instrument;
|
use tracing::Instrument;
|
||||||
@@ -74,32 +70,6 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn handle_step(
|
|
||||||
&mut self,
|
|
||||||
metadata: &Metadata,
|
|
||||||
case_idx: CaseIdx,
|
|
||||||
step: &Step,
|
|
||||||
node: &T::Blockchain,
|
|
||||||
) -> anyhow::Result<StepOutput> {
|
|
||||||
match step {
|
|
||||||
Step::FunctionCall(input) => {
|
|
||||||
let (receipt, geth_trace, diff_mode) =
|
|
||||||
self.handle_input(metadata, case_idx, input, node).await?;
|
|
||||||
Ok(StepOutput::FunctionCall(receipt, geth_trace, diff_mode))
|
|
||||||
}
|
|
||||||
Step::BalanceAssertion(balance_assertion) => {
|
|
||||||
self.handle_balance_assertion(metadata, case_idx, balance_assertion, node)
|
|
||||||
.await?;
|
|
||||||
Ok(StepOutput::BalanceAssertion)
|
|
||||||
}
|
|
||||||
Step::StorageEmptyAssertion(storage_empty) => {
|
|
||||||
self.handle_storage_empty(metadata, case_idx, storage_empty, node)
|
|
||||||
.await?;
|
|
||||||
Ok(StepOutput::StorageEmptyAssertion)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_input(
|
pub async fn handle_input(
|
||||||
&mut self,
|
&mut self,
|
||||||
metadata: &Metadata,
|
metadata: &Metadata,
|
||||||
@@ -108,7 +78,7 @@ where
|
|||||||
node: &T::Blockchain,
|
node: &T::Blockchain,
|
||||||
) -> anyhow::Result<(TransactionReceipt, GethTrace, DiffMode)> {
|
) -> anyhow::Result<(TransactionReceipt, GethTrace, DiffMode)> {
|
||||||
let deployment_receipts = self
|
let deployment_receipts = self
|
||||||
.handle_input_contract_deployment(metadata, case_idx, input, node)
|
.handle_contract_deployment(metadata, case_idx, input, node)
|
||||||
.await?;
|
.await?;
|
||||||
let execution_receipt = self
|
let execution_receipt = self
|
||||||
.handle_input_execution(input, deployment_receipts, node)
|
.handle_input_execution(input, deployment_receipts, node)
|
||||||
@@ -123,36 +93,8 @@ where
|
|||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn handle_balance_assertion(
|
|
||||||
&mut self,
|
|
||||||
metadata: &Metadata,
|
|
||||||
_: CaseIdx,
|
|
||||||
balance_assertion: &BalanceAssertion,
|
|
||||||
node: &T::Blockchain,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
self.handle_balance_assertion_contract_deployment(metadata, balance_assertion, node)
|
|
||||||
.await?;
|
|
||||||
self.handle_balance_assertion_execution(balance_assertion, node)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_storage_empty(
|
|
||||||
&mut self,
|
|
||||||
metadata: &Metadata,
|
|
||||||
_: CaseIdx,
|
|
||||||
storage_empty: &StorageEmptyAssertion,
|
|
||||||
node: &T::Blockchain,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
self.handle_storage_empty_assertion_contract_deployment(metadata, storage_empty, node)
|
|
||||||
.await?;
|
|
||||||
self.handle_storage_empty_assertion_execution(storage_empty, node)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Handles the contract deployment for a given input performing it if it needs to be performed.
|
/// Handles the contract deployment for a given input performing it if it needs to be performed.
|
||||||
async fn handle_input_contract_deployment(
|
async fn handle_contract_deployment(
|
||||||
&mut self,
|
&mut self,
|
||||||
metadata: &Metadata,
|
metadata: &Metadata,
|
||||||
case_idx: CaseIdx,
|
case_idx: CaseIdx,
|
||||||
@@ -520,126 +462,6 @@ where
|
|||||||
Ok((execution_receipt, trace, diff))
|
Ok((execution_receipt, trace, diff))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn handle_balance_assertion_contract_deployment(
|
|
||||||
&mut self,
|
|
||||||
metadata: &Metadata,
|
|
||||||
balance_assertion: &BalanceAssertion,
|
|
||||||
node: &T::Blockchain,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let Some(instance) = balance_assertion
|
|
||||||
.address
|
|
||||||
.strip_prefix(".address")
|
|
||||||
.map(ContractInstance::new)
|
|
||||||
else {
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
self.get_or_deploy_contract_instance(
|
|
||||||
&instance,
|
|
||||||
metadata,
|
|
||||||
Input::default_caller(),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
node,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_balance_assertion_execution(
|
|
||||||
&mut self,
|
|
||||||
BalanceAssertion {
|
|
||||||
address: address_string,
|
|
||||||
expected_balance: amount,
|
|
||||||
}: &BalanceAssertion,
|
|
||||||
node: &T::Blockchain,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let address = Address::from_slice(
|
|
||||||
Calldata::new_compound([address_string])
|
|
||||||
.calldata(node, self.default_resolution_context())
|
|
||||||
.await?
|
|
||||||
.get(12..32)
|
|
||||||
.expect("Can't fail"),
|
|
||||||
);
|
|
||||||
|
|
||||||
let balance = node.balance_of(address).await?;
|
|
||||||
|
|
||||||
let expected = *amount;
|
|
||||||
let actual = balance;
|
|
||||||
if expected != actual {
|
|
||||||
tracing::error!(%expected, %actual, %address, "Balance assertion failed");
|
|
||||||
anyhow::bail!(
|
|
||||||
"Balance assertion failed - Expected {} but got {} for {} resolved to {}",
|
|
||||||
expected,
|
|
||||||
actual,
|
|
||||||
address_string,
|
|
||||||
address,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_storage_empty_assertion_contract_deployment(
|
|
||||||
&mut self,
|
|
||||||
metadata: &Metadata,
|
|
||||||
storage_empty_assertion: &StorageEmptyAssertion,
|
|
||||||
node: &T::Blockchain,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let Some(instance) = storage_empty_assertion
|
|
||||||
.address
|
|
||||||
.strip_prefix(".address")
|
|
||||||
.map(ContractInstance::new)
|
|
||||||
else {
|
|
||||||
return Ok(());
|
|
||||||
};
|
|
||||||
self.get_or_deploy_contract_instance(
|
|
||||||
&instance,
|
|
||||||
metadata,
|
|
||||||
Input::default_caller(),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
node,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn handle_storage_empty_assertion_execution(
|
|
||||||
&mut self,
|
|
||||||
StorageEmptyAssertion {
|
|
||||||
address: address_string,
|
|
||||||
is_storage_empty,
|
|
||||||
}: &StorageEmptyAssertion,
|
|
||||||
node: &T::Blockchain,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let address = Address::from_slice(
|
|
||||||
Calldata::new_compound([address_string])
|
|
||||||
.calldata(node, self.default_resolution_context())
|
|
||||||
.await?
|
|
||||||
.get(12..32)
|
|
||||||
.expect("Can't fail"),
|
|
||||||
);
|
|
||||||
|
|
||||||
let storage = node.latest_state_proof(address, Default::default()).await?;
|
|
||||||
let is_empty = storage.storage_hash == EMPTY_ROOT_HASH;
|
|
||||||
|
|
||||||
let expected = is_storage_empty;
|
|
||||||
let actual = is_empty;
|
|
||||||
|
|
||||||
if *expected != actual {
|
|
||||||
tracing::error!(%expected, %actual, %address, "Storage Empty Assertion failed");
|
|
||||||
anyhow::bail!(
|
|
||||||
"Storage Empty Assertion failed - Expected {} but got {} for {} resolved to {}",
|
|
||||||
expected,
|
|
||||||
actual,
|
|
||||||
address_string,
|
|
||||||
address,
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Gets the information of a deployed contract or library from the state. If it's found to not
|
/// Gets the information of a deployed contract or library from the state. If it's found to not
|
||||||
/// be deployed then it will be deployed.
|
/// be deployed then it will be deployed.
|
||||||
///
|
///
|
||||||
@@ -829,55 +651,38 @@ where
|
|||||||
return Ok(0);
|
return Ok(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut steps_executed = 0;
|
let mut inputs_executed = 0;
|
||||||
for (step_idx, step) in self.case.steps_iterator().enumerate() {
|
for (input_idx, input) in self.case.inputs_iterator().enumerate() {
|
||||||
let tracing_span = tracing::info_span!("Handling input", step_idx);
|
let tracing_span = tracing::info_span!("Handling input", input_idx);
|
||||||
|
|
||||||
let leader_step_output = self
|
let (leader_receipt, _, leader_diff) = self
|
||||||
.leader_state
|
.leader_state
|
||||||
.handle_step(self.metadata, self.case_idx, &step, self.leader_node)
|
.handle_input(self.metadata, self.case_idx, &input, self.leader_node)
|
||||||
.instrument(tracing_span.clone())
|
.instrument(tracing_span.clone())
|
||||||
.await?;
|
.await?;
|
||||||
let follower_step_output = self
|
let (follower_receipt, _, follower_diff) = self
|
||||||
.follower_state
|
.follower_state
|
||||||
.handle_step(self.metadata, self.case_idx, &step, self.follower_node)
|
.handle_input(self.metadata, self.case_idx, &input, self.follower_node)
|
||||||
.instrument(tracing_span)
|
.instrument(tracing_span)
|
||||||
.await?;
|
.await?;
|
||||||
match (leader_step_output, follower_step_output) {
|
|
||||||
(
|
|
||||||
StepOutput::FunctionCall(leader_receipt, _, leader_diff),
|
|
||||||
StepOutput::FunctionCall(follower_receipt, _, follower_diff),
|
|
||||||
) => {
|
|
||||||
if leader_diff == follower_diff {
|
|
||||||
tracing::debug!("State diffs match between leader and follower.");
|
|
||||||
} else {
|
|
||||||
tracing::debug!("State diffs mismatch between leader and follower.");
|
|
||||||
Self::trace_diff_mode("Leader", &leader_diff);
|
|
||||||
Self::trace_diff_mode("Follower", &follower_diff);
|
|
||||||
}
|
|
||||||
|
|
||||||
if leader_receipt.logs() != follower_receipt.logs() {
|
if leader_diff == follower_diff {
|
||||||
tracing::debug!("Log/event mismatch between leader and follower.");
|
tracing::debug!("State diffs match between leader and follower.");
|
||||||
tracing::trace!("Leader logs: {:?}", leader_receipt.logs());
|
} else {
|
||||||
tracing::trace!("Follower logs: {:?}", follower_receipt.logs());
|
tracing::debug!("State diffs mismatch between leader and follower.");
|
||||||
}
|
Self::trace_diff_mode("Leader", &leader_diff);
|
||||||
}
|
Self::trace_diff_mode("Follower", &follower_diff);
|
||||||
(StepOutput::BalanceAssertion, StepOutput::BalanceAssertion) => {}
|
|
||||||
(StepOutput::StorageEmptyAssertion, StepOutput::StorageEmptyAssertion) => {}
|
|
||||||
_ => unreachable!("The two step outputs can not be of a different kind"),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
steps_executed += 1;
|
if leader_receipt.logs() != follower_receipt.logs() {
|
||||||
|
tracing::debug!("Log/event mismatch between leader and follower.");
|
||||||
|
tracing::trace!("Leader logs: {:?}", leader_receipt.logs());
|
||||||
|
tracing::trace!("Follower logs: {:?}", follower_receipt.logs());
|
||||||
|
}
|
||||||
|
|
||||||
|
inputs_executed += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(steps_executed)
|
Ok(inputs_executed)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
#[allow(clippy::large_enum_variant)]
|
|
||||||
pub enum StepOutput {
|
|
||||||
FunctionCall(TransactionReceipt, GethTrace, DiffMode),
|
|
||||||
BalanceAssertion,
|
|
||||||
StorageEmptyAssertion,
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ use revive_dt_core::{
|
|||||||
use revive_dt_format::{
|
use revive_dt_format::{
|
||||||
case::{Case, CaseIdx},
|
case::{Case, CaseIdx},
|
||||||
corpus::Corpus,
|
corpus::Corpus,
|
||||||
input::{Input, Step},
|
input::Input,
|
||||||
metadata::{ContractInstance, ContractPathAndIdent, Metadata, MetadataFile},
|
metadata::{ContractInstance, ContractPathAndIdent, Metadata, MetadataFile},
|
||||||
mode::SolcMode,
|
mode::SolcMode,
|
||||||
};
|
};
|
||||||
@@ -446,13 +446,9 @@ where
|
|||||||
// doing the deployments from different accounts and therefore we're not slowed down by
|
// doing the deployments from different accounts and therefore we're not slowed down by
|
||||||
// the nonce.
|
// the nonce.
|
||||||
let deployer_address = case
|
let deployer_address = case
|
||||||
.steps
|
.inputs
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|step| match step {
|
.map(|input| input.caller)
|
||||||
Step::FunctionCall(input) => Some(input.caller),
|
|
||||||
Step::BalanceAssertion(..) => None,
|
|
||||||
Step::StorageEmptyAssertion(..) => None,
|
|
||||||
})
|
|
||||||
.next()
|
.next()
|
||||||
.unwrap_or(Input::default_caller());
|
.unwrap_or(Input::default_caller());
|
||||||
let leader_tx = TransactionBuilder::<Ethereum>::with_deploy_code(
|
let leader_tx = TransactionBuilder::<Ethereum>::with_deploy_code(
|
||||||
|
|||||||
@@ -16,9 +16,13 @@ alloy-primitives = { workspace = true }
|
|||||||
alloy-sol-types = { workspace = true }
|
alloy-sol-types = { workspace = true }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
|
regex = { workspace = true }
|
||||||
semver = { workspace = true }
|
semver = { workspace = true }
|
||||||
serde = { workspace = true, features = ["derive"] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|
||||||
|
revive-common = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
|
indoc = { workspace = true }
|
||||||
|
|||||||
+11
-23
@@ -1,44 +1,32 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::Deserialize;
|
||||||
|
|
||||||
use revive_dt_common::macros::define_wrapper_type;
|
use revive_dt_common::macros::define_wrapper_type;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
input::{Expected, Step},
|
input::{Expected, Input},
|
||||||
mode::Mode,
|
mode::Mode,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)]
|
#[derive(Debug, Default, Deserialize, Clone, Eq, PartialEq)]
|
||||||
pub struct Case {
|
pub struct Case {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub name: Option<String>,
|
pub name: Option<String>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub comment: Option<String>,
|
pub comment: Option<String>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub modes: Option<Vec<Mode>>,
|
pub modes: Option<Vec<Mode>>,
|
||||||
#[serde(rename = "inputs")]
|
pub inputs: Vec<Input>,
|
||||||
pub steps: Vec<Step>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub group: Option<String>,
|
pub group: Option<String>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub expected: Option<Expected>,
|
pub expected: Option<Expected>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub ignore: Option<bool>,
|
pub ignore: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Case {
|
impl Case {
|
||||||
#[allow(irrefutable_let_patterns)]
|
pub fn inputs_iterator(&self) -> impl Iterator<Item = Input> {
|
||||||
pub fn steps_iterator(&self) -> impl Iterator<Item = Step> {
|
let inputs_len = self.inputs.len();
|
||||||
let steps_len = self.steps.len();
|
self.inputs
|
||||||
self.steps
|
|
||||||
.clone()
|
.clone()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(move |(idx, mut step)| {
|
.map(move |(idx, mut input)| {
|
||||||
let Step::FunctionCall(ref mut input) = step else {
|
if idx + 1 == inputs_len {
|
||||||
return step;
|
|
||||||
};
|
|
||||||
|
|
||||||
if idx + 1 == steps_len {
|
|
||||||
if input.expected.is_none() {
|
if input.expected.is_none() {
|
||||||
input.expected = self.expected.clone();
|
input.expected = self.expected.clone();
|
||||||
}
|
}
|
||||||
@@ -48,9 +36,9 @@ impl Case {
|
|||||||
// the case? What are we supposed to do with that final expected field on the
|
// the case? What are we supposed to do with that final expected field on the
|
||||||
// case?
|
// case?
|
||||||
|
|
||||||
step
|
input
|
||||||
} else {
|
} else {
|
||||||
step
|
input
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -17,79 +17,23 @@ use revive_dt_common::macros::define_wrapper_type;
|
|||||||
use crate::traits::ResolverApi;
|
use crate::traits::ResolverApi;
|
||||||
use crate::{metadata::ContractInstance, traits::ResolutionContext};
|
use crate::{metadata::ContractInstance, traits::ResolutionContext};
|
||||||
|
|
||||||
/// A test step.
|
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq)]
|
||||||
///
|
|
||||||
/// A test step can be anything. It could be an invocation to a function, an assertion, or any other
|
|
||||||
/// action that needs to be run or executed on the nodes used in the tests.
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
|
|
||||||
#[serde(untagged)]
|
|
||||||
pub enum Step {
|
|
||||||
/// A function call or an invocation to some function on some smart contract.
|
|
||||||
FunctionCall(Box<Input>),
|
|
||||||
/// A step for performing a balance assertion on some account or contract.
|
|
||||||
BalanceAssertion(Box<BalanceAssertion>),
|
|
||||||
/// A step for asserting that the storage of some contract or account is empty.
|
|
||||||
StorageEmptyAssertion(Box<StorageEmptyAssertion>),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
|
||||||
pub struct Input {
|
pub struct Input {
|
||||||
#[serde(default = "Input::default_caller")]
|
#[serde(default = "Input::default_caller")]
|
||||||
pub caller: Address,
|
pub caller: Address,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub comment: Option<String>,
|
pub comment: Option<String>,
|
||||||
#[serde(default = "Input::default_instance")]
|
#[serde(default = "Input::default_instance")]
|
||||||
pub instance: ContractInstance,
|
pub instance: ContractInstance,
|
||||||
pub method: Method,
|
pub method: Method,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub calldata: Calldata,
|
pub calldata: Calldata,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub expected: Option<Expected>,
|
pub expected: Option<Expected>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub value: Option<EtherValue>,
|
pub value: Option<EtherValue>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub storage: Option<HashMap<String, Calldata>>,
|
pub storage: Option<HashMap<String, Calldata>>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub variable_assignments: Option<VariableAssignments>,
|
pub variable_assignments: Option<VariableAssignments>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
#[derive(Clone, Debug, Deserialize, Eq, PartialEq)]
|
||||||
pub struct BalanceAssertion {
|
|
||||||
/// An optional comment on the balance assertion.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub comment: Option<String>,
|
|
||||||
|
|
||||||
/// The address that the balance assertion should be done on.
|
|
||||||
///
|
|
||||||
/// This is a string which will be resolved into an address when being processed. Therefore,
|
|
||||||
/// this could be a normal hex address, a variable such as `Test.address`, or perhaps even a
|
|
||||||
/// full on variable like `$VARIABLE:Uniswap`. It follows the same resolution rules that are
|
|
||||||
/// followed in the calldata.
|
|
||||||
pub address: String,
|
|
||||||
|
|
||||||
/// The amount of balance to assert that the account or contract has.
|
|
||||||
pub expected_balance: U256,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
|
||||||
pub struct StorageEmptyAssertion {
|
|
||||||
/// An optional comment on the storage empty assertion.
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub comment: Option<String>,
|
|
||||||
|
|
||||||
/// The address that the balance assertion should be done on.
|
|
||||||
///
|
|
||||||
/// This is a string which will be resolved into an address when being processed. Therefore,
|
|
||||||
/// this could be a normal hex address, a variable such as `Test.address`, or perhaps even a
|
|
||||||
/// full on variable like `$VARIABLE:Uniswap`. It follows the same resolution rules that are
|
|
||||||
/// followed in the calldata.
|
|
||||||
pub address: String,
|
|
||||||
|
|
||||||
/// A boolean of whether the storage of the address is empty or not.
|
|
||||||
pub is_storage_empty: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
|
|
||||||
#[serde(untagged)]
|
#[serde(untagged)]
|
||||||
pub enum Expected {
|
pub enum Expected {
|
||||||
Calldata(Calldata),
|
Calldata(Calldata),
|
||||||
@@ -97,21 +41,17 @@ pub enum Expected {
|
|||||||
ExpectedMany(Vec<ExpectedOutput>),
|
ExpectedMany(Vec<ExpectedOutput>),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq)]
|
||||||
pub struct ExpectedOutput {
|
pub struct ExpectedOutput {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub compiler_version: Option<VersionReq>,
|
pub compiler_version: Option<VersionReq>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub return_data: Option<Calldata>,
|
pub return_data: Option<Calldata>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub events: Option<Vec<Event>>,
|
pub events: Option<Vec<Event>>,
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub exception: bool,
|
pub exception: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq)]
|
||||||
pub struct Event {
|
pub struct Event {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub address: Option<String>,
|
pub address: Option<String>,
|
||||||
pub topics: Vec<String>,
|
pub topics: Vec<String>,
|
||||||
pub values: Calldata,
|
pub values: Calldata,
|
||||||
@@ -168,7 +108,7 @@ pub struct Event {
|
|||||||
/// [`Single`]: Calldata::Single
|
/// [`Single`]: Calldata::Single
|
||||||
/// [`Compound`]: Calldata::Compound
|
/// [`Compound`]: Calldata::Compound
|
||||||
/// [reverse polish notation]: https://en.wikipedia.org/wiki/Reverse_Polish_notation
|
/// [reverse polish notation]: https://en.wikipedia.org/wiki/Reverse_Polish_notation
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
|
#[derive(Clone, Debug, Deserialize, Eq, PartialEq)]
|
||||||
#[serde(untagged)]
|
#[serde(untagged)]
|
||||||
pub enum Calldata {
|
pub enum Calldata {
|
||||||
Single(Bytes),
|
Single(Bytes),
|
||||||
@@ -202,7 +142,7 @@ enum Operation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Specify how the contract is called.
|
/// Specify how the contract is called.
|
||||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)]
|
#[derive(Debug, Default, Deserialize, Clone, Eq, PartialEq)]
|
||||||
pub enum Method {
|
pub enum Method {
|
||||||
/// Initiate a deploy transaction, calling contracts constructor.
|
/// Initiate a deploy transaction, calling contracts constructor.
|
||||||
///
|
///
|
||||||
@@ -227,7 +167,7 @@ define_wrapper_type!(
|
|||||||
pub struct EtherValue(U256);
|
pub struct EtherValue(U256);
|
||||||
);
|
);
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
|
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq)]
|
||||||
pub struct VariableAssignments {
|
pub struct VariableAssignments {
|
||||||
/// A vector of the variable names to assign to the return data.
|
/// A vector of the variable names to assign to the return data.
|
||||||
///
|
///
|
||||||
|
|||||||
@@ -6,3 +6,5 @@ pub mod input;
|
|||||||
pub mod metadata;
|
pub mod metadata;
|
||||||
pub mod mode;
|
pub mod mode;
|
||||||
pub mod traits;
|
pub mod traits;
|
||||||
|
|
||||||
|
mod semantic_tests;
|
||||||
|
|||||||
@@ -43,20 +43,15 @@ impl Deref for MetadataFile {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)]
|
#[derive(Debug, Default, Deserialize, Clone, Eq, PartialEq)]
|
||||||
pub struct Metadata {
|
pub struct Metadata {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub targets: Option<Vec<String>>,
|
pub targets: Option<Vec<String>>,
|
||||||
pub cases: Vec<Case>,
|
pub cases: Vec<Case>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub contracts: Option<BTreeMap<ContractInstance, ContractPathAndIdent>>,
|
pub contracts: Option<BTreeMap<ContractInstance, ContractPathAndIdent>>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
// TODO: Convert into wrapper types for clarity.
|
||||||
pub libraries: Option<BTreeMap<PathBuf, BTreeMap<ContractIdent, ContractInstance>>>,
|
pub libraries: Option<BTreeMap<PathBuf, BTreeMap<ContractIdent, ContractInstance>>>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub ignore: Option<bool>,
|
pub ignore: Option<bool>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub modes: Option<Vec<Mode>>,
|
pub modes: Option<Vec<Mode>>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub file_path: Option<PathBuf>,
|
pub file_path: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -296,7 +291,7 @@ impl FromStr for ContractPathAndIdent {
|
|||||||
type Err = anyhow::Error;
|
type Err = anyhow::Error;
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
let mut splitted_string = s.split(":").peekable();
|
let mut splitted_string = s.split(':').peekable();
|
||||||
let mut path = None::<String>;
|
let mut path = None::<String>;
|
||||||
let mut identifier = None::<String>;
|
let mut identifier = None::<String>;
|
||||||
loop {
|
loop {
|
||||||
@@ -321,7 +316,7 @@ impl FromStr for ContractPathAndIdent {
|
|||||||
contract_ident: ContractIdent::new(identifier),
|
contract_ident: ContractIdent::new(identifier),
|
||||||
}),
|
}),
|
||||||
(None, Some(path)) | (Some(path), None) => {
|
(None, Some(path)) | (Some(path), None) => {
|
||||||
let Some(identifier) = path.split(".").next().map(ToOwned::to_owned) else {
|
let Some(identifier) = path.split('.').next().map(ToOwned::to_owned) else {
|
||||||
anyhow::bail!("Failed to find identifier");
|
anyhow::bail!("Failed to find identifier");
|
||||||
};
|
};
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ pub struct SolcMode {
|
|||||||
pub solc_version: Option<semver::VersionReq>,
|
pub solc_version: Option<semver::VersionReq>,
|
||||||
solc_optimize: Option<bool>,
|
solc_optimize: Option<bool>,
|
||||||
pub llvm_optimizer_settings: Vec<String>,
|
pub llvm_optimizer_settings: Vec<String>,
|
||||||
mode_string: String,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SolcMode {
|
impl SolcMode {
|
||||||
@@ -30,10 +29,7 @@ impl SolcMode {
|
|||||||
/// - A solc `SemVer version requirement` string
|
/// - A solc `SemVer version requirement` string
|
||||||
/// - One or more `-OX` where X is a supposed to be an LLVM opt mode
|
/// - One or more `-OX` where X is a supposed to be an LLVM opt mode
|
||||||
pub fn parse_from_mode_string(mode_string: &str) -> Option<Self> {
|
pub fn parse_from_mode_string(mode_string: &str) -> Option<Self> {
|
||||||
let mut result = Self {
|
let mut result = Self::default();
|
||||||
mode_string: mode_string.to_string(),
|
|
||||||
..Default::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut parts = mode_string.trim().split(" ");
|
let mut parts = mode_string.trim().split(" ");
|
||||||
|
|
||||||
@@ -108,16 +104,3 @@ impl<'de> Deserialize<'de> for Mode {
|
|||||||
Ok(Self::Unknown(mode_string))
|
Ok(Self::Unknown(mode_string))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Serialize for Mode {
|
|
||||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
|
||||||
where
|
|
||||||
S: serde::Serializer,
|
|
||||||
{
|
|
||||||
let string = match self {
|
|
||||||
Mode::Solidity(solc_mode) => &solc_mode.mode_string,
|
|
||||||
Mode::Unknown(string) => string,
|
|
||||||
};
|
|
||||||
string.serialize(serializer)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -0,0 +1,584 @@
|
|||||||
|
use std::io::{Read, Seek};
|
||||||
|
|
||||||
|
use anyhow::{Result, anyhow};
|
||||||
|
|
||||||
|
use revive_dt_common::define_wrapper_type;
|
||||||
|
|
||||||
|
trait ReadExt: Read + Seek {
|
||||||
|
fn read_while(
|
||||||
|
&mut self,
|
||||||
|
buf: &mut Vec<u8>,
|
||||||
|
callback: impl Fn(&u8) -> bool + Clone,
|
||||||
|
) -> std::io::Result<()> {
|
||||||
|
for byte in self.bytes() {
|
||||||
|
let byte = byte?;
|
||||||
|
let include_byte = callback(&byte);
|
||||||
|
if include_byte {
|
||||||
|
buf.push(byte)
|
||||||
|
} else {
|
||||||
|
self.seek(std::io::SeekFrom::Current(-1))?;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn skip_while(&mut self, callback: impl Fn(&u8) -> bool + Clone) -> std::io::Result<()> {
|
||||||
|
for byte in self.bytes() {
|
||||||
|
let byte = byte?;
|
||||||
|
let skip = callback(&byte);
|
||||||
|
if !skip {
|
||||||
|
self.seek(std::io::SeekFrom::Current(-1))?;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R> ReadExt for R where R: Read + Seek {}
|
||||||
|
|
||||||
|
trait Parse: Sized {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self>;
|
||||||
|
|
||||||
|
fn peek(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
let pos = token_stream.stream_position()?;
|
||||||
|
let this = Self::parse(token_stream);
|
||||||
|
token_stream.seek(std::io::SeekFrom::Start(pos))?;
|
||||||
|
this
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! impl_parse_for_tuple {
|
||||||
|
($first_ident: ident $(, $($ident: ident),*)?) => {
|
||||||
|
impl<$first_ident: Parse, $($($ident: Parse),*)?> Parse for ($first_ident, $($($ident),*)?) {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Ok((
|
||||||
|
$first_ident::parse(token_stream)?,
|
||||||
|
$(
|
||||||
|
$($ident::parse(token_stream)?),*
|
||||||
|
)?
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$(impl_parse_for_tuple!( $($ident),* );)?
|
||||||
|
};
|
||||||
|
() => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl_parse_for_tuple!(
|
||||||
|
A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W, X, Y, Z
|
||||||
|
);
|
||||||
|
|
||||||
|
impl Parse for String {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
let mut buffer = Vec::new();
|
||||||
|
token_stream.read_while(&mut buffer, |char| {
|
||||||
|
char.is_ascii_alphanumeric() || char.is_ascii_whitespace()
|
||||||
|
})?;
|
||||||
|
let string = String::from_utf8(buffer)?;
|
||||||
|
if string.trim().is_empty() {
|
||||||
|
Err(anyhow!("Parsing string resulted in an empty string"))
|
||||||
|
} else {
|
||||||
|
Ok(string.trim().to_owned())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parse for u64 {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
token_stream.skip_while(u8::is_ascii_whitespace)?;
|
||||||
|
|
||||||
|
let mut buffer = Vec::new();
|
||||||
|
token_stream.read_while(&mut buffer, |char| matches!(char, b'0'..=b'9'))?;
|
||||||
|
let string = String::from_utf8(buffer)?;
|
||||||
|
string.parse().map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
struct Function {
|
||||||
|
ident: FunctionIdent,
|
||||||
|
arg_types: Parenthesized<FunctionArgumentType, ','>,
|
||||||
|
colon: ColonToken,
|
||||||
|
function_arguments: Vec<FunctionArgument>,
|
||||||
|
arrow_token: ArrowToken,
|
||||||
|
function_returns: Vec<FunctionReturn>,
|
||||||
|
functions_options: Vec<PostFunctionOptions>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parse for Function {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
ident: Parse::parse(token_stream)?,
|
||||||
|
arg_types: Parse::parse(token_stream)?,
|
||||||
|
colon: Parse::parse(token_stream)?,
|
||||||
|
function_arguments: {
|
||||||
|
let mut arguments = Vec::default();
|
||||||
|
loop {
|
||||||
|
if arguments.is_empty() {
|
||||||
|
if FunctionArgument::peek(token_stream).is_ok() {
|
||||||
|
arguments.push(FunctionArgument::parse(token_stream)?);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if CommaToken::peek(token_stream).is_ok() {
|
||||||
|
CommaToken::parse(token_stream)?;
|
||||||
|
arguments.push(FunctionArgument::parse(token_stream)?);
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
arguments
|
||||||
|
},
|
||||||
|
arrow_token: Parse::parse(token_stream)?,
|
||||||
|
function_returns: {
|
||||||
|
let mut returns = Vec::default();
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if returns.is_empty() || CommaToken::peek(token_stream).is_ok() {
|
||||||
|
if !returns.is_empty() {
|
||||||
|
CommaToken::parse(token_stream)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
token_stream
|
||||||
|
.read_while(&mut buf, |byte| *byte != b'\n' && *byte != b',')?;
|
||||||
|
if NewLineToken::peek(token_stream).is_ok() {
|
||||||
|
NewLineToken::parse(token_stream)?;
|
||||||
|
} else if CommaToken::peek(token_stream).is_ok() {
|
||||||
|
CommaToken::peek(token_stream)?;
|
||||||
|
}
|
||||||
|
let string = String::from_utf8(buf)?;
|
||||||
|
let trimmed = string.trim();
|
||||||
|
if trimmed.chars().all(|char| char.is_whitespace()) {
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
returns.push(FunctionReturn(trimmed.to_string()));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
returns
|
||||||
|
},
|
||||||
|
functions_options: {
|
||||||
|
let mut options = Vec::default();
|
||||||
|
|
||||||
|
while PostFunctionOptions::peek(token_stream).is_ok() {
|
||||||
|
options.push(PostFunctionOptions::parse(token_stream)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
options
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
struct Parenthesized<T, const SEP: char>(pub Vec<T>);
|
||||||
|
|
||||||
|
impl<T, const SEP: char> Parse for Parenthesized<T, SEP>
|
||||||
|
where
|
||||||
|
T: Parse,
|
||||||
|
{
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
OpenParenToken::parse(token_stream)?;
|
||||||
|
|
||||||
|
let mut inner = Vec::new();
|
||||||
|
loop {
|
||||||
|
if CloseParenToken::peek(token_stream).is_ok() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
inner.push(T::parse(token_stream)?);
|
||||||
|
|
||||||
|
let reached_the_end = CloseParenToken::peek(token_stream).is_ok();
|
||||||
|
if reached_the_end {
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
SingleCharToken::<SEP>::parse(token_stream)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
CloseParenToken::parse(token_stream)?;
|
||||||
|
|
||||||
|
Ok(Self(inner))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
define_wrapper_type!(
|
||||||
|
/// A wrapper type for a function identifier token.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
struct FunctionIdent(String);
|
||||||
|
);
|
||||||
|
|
||||||
|
impl Parse for FunctionIdent {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Parse::parse(token_stream).map(Self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
define_wrapper_type!(
|
||||||
|
/// A wrapper type for a function argument token.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
struct FunctionArgumentType(String);
|
||||||
|
);
|
||||||
|
|
||||||
|
impl Parse for FunctionArgumentType {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Parse::parse(token_stream).map(Self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
define_wrapper_type!(
|
||||||
|
/// A wrapper type for a function argument token.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
struct FunctionArgument(String);
|
||||||
|
);
|
||||||
|
|
||||||
|
impl Parse for FunctionArgument {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Parse::parse(token_stream).map(Self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
define_wrapper_type!(
|
||||||
|
/// A wrapper type for a function return token.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
struct FunctionReturn(String);
|
||||||
|
);
|
||||||
|
|
||||||
|
impl Parse for FunctionReturn {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Parse::parse(token_stream).map(Self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||||
|
struct SingleCharToken<const CHAR: char>;
|
||||||
|
|
||||||
|
impl<const CHAR: char> Parse for SingleCharToken<CHAR> {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
let mut buf = [0; 1];
|
||||||
|
loop {
|
||||||
|
token_stream.read(&mut buf)?;
|
||||||
|
let [byte] = buf;
|
||||||
|
if byte == CHAR as u8 {
|
||||||
|
return Ok(Self);
|
||||||
|
} else if byte.is_ascii_whitespace() {
|
||||||
|
continue;
|
||||||
|
} else {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Invalid character encountered {} expected {}",
|
||||||
|
byte as char,
|
||||||
|
CHAR
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Bit of a hack, but I do this because Rust analyzer doesn't like `SingleCharToken<'>'>` and it
|
||||||
|
// messes up with the syntax highlighting.
|
||||||
|
const GT_CHAR: char = '>';
|
||||||
|
|
||||||
|
type ColonToken = SingleCharToken<':'>;
|
||||||
|
type CommaToken = SingleCharToken<','>;
|
||||||
|
type OpenParenToken = SingleCharToken<'('>;
|
||||||
|
type CloseParenToken = SingleCharToken<')'>;
|
||||||
|
type DashToken = SingleCharToken<'-'>;
|
||||||
|
type GtToken = SingleCharToken<{ GT_CHAR }>;
|
||||||
|
type NewLineToken = SingleCharToken<'\n'>;
|
||||||
|
type SpaceToken = SingleCharToken<' '>;
|
||||||
|
type ArrowToken = (DashToken, GtToken);
|
||||||
|
|
||||||
|
macro_rules! string_literal_token {
|
||||||
|
(
|
||||||
|
$($ty_ident: ident => $str: expr),* $(,)?
|
||||||
|
) => {
|
||||||
|
$(
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||||
|
pub struct $ty_ident;
|
||||||
|
|
||||||
|
impl Parse for $ty_ident {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
token_stream.skip_while(u8::is_ascii_whitespace)?;
|
||||||
|
|
||||||
|
let mut buffer = [0; $str.len()];
|
||||||
|
token_stream.read(&mut buffer)?;
|
||||||
|
while SpaceToken::peek(token_stream).is_ok() {
|
||||||
|
SpaceToken::parse(token_stream)?;
|
||||||
|
}
|
||||||
|
if $str.as_bytes() == buffer {
|
||||||
|
Ok(Self)
|
||||||
|
} else {
|
||||||
|
Err(anyhow!("Invalid string - expected {} but got {:?}", $str, str::from_utf8(&buffer)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)*
|
||||||
|
};
|
||||||
|
}
|
||||||
|
string_literal_token! {
|
||||||
|
GasLiteralStringToken => "gas",
|
||||||
|
IrOptimizedLiteralStringToken => "irOptimized",
|
||||||
|
LegacyLiteralStringToken => "legacy",
|
||||||
|
LegacyOptimizedLiteralStringToken => "legacyOptimized",
|
||||||
|
CodeLiteralStringToken => "code",
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub enum PostFunctionOptions {
|
||||||
|
IrOptimizedGasOption(IrOptimizedGasOption),
|
||||||
|
IrOptimizedGasCodeOption(IrOptimizedGasCodeOption),
|
||||||
|
LegacyGasOption(LegacyGasOption),
|
||||||
|
LegacyGasCodeOption(LegacyGasCodeOption),
|
||||||
|
LegacyOptimizedGasOption(LegacyOptimizedGasOption),
|
||||||
|
LegacyOptimizedGasCodeOption(LegacyOptimizedGasCodeOption),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parse for PostFunctionOptions {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
if IrOptimizedGasOption::peek(token_stream).is_ok() {
|
||||||
|
IrOptimizedGasOption::parse(token_stream).map(Self::IrOptimizedGasOption)
|
||||||
|
} else if IrOptimizedGasCodeOption::peek(token_stream).is_ok() {
|
||||||
|
IrOptimizedGasCodeOption::parse(token_stream).map(Self::IrOptimizedGasCodeOption)
|
||||||
|
} else if LegacyGasOption::peek(token_stream).is_ok() {
|
||||||
|
LegacyGasOption::parse(token_stream).map(Self::LegacyGasOption)
|
||||||
|
} else if LegacyGasCodeOption::peek(token_stream).is_ok() {
|
||||||
|
LegacyGasCodeOption::parse(token_stream).map(Self::LegacyGasCodeOption)
|
||||||
|
} else if LegacyOptimizedGasOption::peek(token_stream).is_ok() {
|
||||||
|
LegacyOptimizedGasOption::parse(token_stream).map(Self::LegacyOptimizedGasOption)
|
||||||
|
} else if LegacyOptimizedGasCodeOption::peek(token_stream).is_ok() {
|
||||||
|
LegacyOptimizedGasCodeOption::parse(token_stream)
|
||||||
|
.map(Self::LegacyOptimizedGasCodeOption)
|
||||||
|
} else {
|
||||||
|
Err(anyhow!("Failed to parse post function options"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||||
|
struct IrOptimizedGasOption {
|
||||||
|
pub gas_token: GasLiteralStringToken,
|
||||||
|
pub gas_option: IrOptimizedLiteralStringToken,
|
||||||
|
pub colon: ColonToken,
|
||||||
|
pub value: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parse for IrOptimizedGasOption {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
gas_token: Parse::parse(token_stream)?,
|
||||||
|
gas_option: Parse::parse(token_stream)?,
|
||||||
|
colon: Parse::parse(token_stream)?,
|
||||||
|
value: Parse::parse(token_stream)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||||
|
struct IrOptimizedGasCodeOption {
|
||||||
|
pub gas_token: GasLiteralStringToken,
|
||||||
|
pub gas_option: IrOptimizedLiteralStringToken,
|
||||||
|
pub code: CodeLiteralStringToken,
|
||||||
|
pub colon: ColonToken,
|
||||||
|
pub value: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parse for IrOptimizedGasCodeOption {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
gas_token: Parse::parse(token_stream)?,
|
||||||
|
gas_option: Parse::parse(token_stream)?,
|
||||||
|
code: Parse::parse(token_stream)?,
|
||||||
|
colon: Parse::parse(token_stream)?,
|
||||||
|
value: Parse::parse(token_stream)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||||
|
struct LegacyGasOption {
|
||||||
|
pub gas_token: GasLiteralStringToken,
|
||||||
|
pub gas_option: LegacyLiteralStringToken,
|
||||||
|
pub colon: ColonToken,
|
||||||
|
pub value: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parse for LegacyGasOption {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
gas_token: Parse::parse(token_stream)?,
|
||||||
|
gas_option: Parse::parse(token_stream)?,
|
||||||
|
colon: Parse::parse(token_stream)?,
|
||||||
|
value: Parse::parse(token_stream)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||||
|
struct LegacyGasCodeOption {
|
||||||
|
pub gas_token: GasLiteralStringToken,
|
||||||
|
pub gas_option: LegacyLiteralStringToken,
|
||||||
|
pub code: CodeLiteralStringToken,
|
||||||
|
pub colon: ColonToken,
|
||||||
|
pub value: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parse for LegacyGasCodeOption {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
gas_token: Parse::parse(token_stream)?,
|
||||||
|
gas_option: Parse::parse(token_stream)?,
|
||||||
|
code: Parse::parse(token_stream)?,
|
||||||
|
colon: Parse::parse(token_stream)?,
|
||||||
|
value: Parse::parse(token_stream)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||||
|
struct LegacyOptimizedGasOption {
|
||||||
|
pub gas_token: GasLiteralStringToken,
|
||||||
|
pub gas_option: LegacyOptimizedLiteralStringToken,
|
||||||
|
pub colon: ColonToken,
|
||||||
|
pub value: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parse for LegacyOptimizedGasOption {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
gas_token: Parse::parse(token_stream)?,
|
||||||
|
gas_option: Parse::parse(token_stream)?,
|
||||||
|
colon: Parse::parse(token_stream)?,
|
||||||
|
value: Parse::parse(token_stream)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||||
|
struct LegacyOptimizedGasCodeOption {
|
||||||
|
pub gas_token: GasLiteralStringToken,
|
||||||
|
pub gas_option: LegacyOptimizedLiteralStringToken,
|
||||||
|
pub code: CodeLiteralStringToken,
|
||||||
|
pub colon: ColonToken,
|
||||||
|
pub value: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Parse for LegacyOptimizedGasCodeOption {
|
||||||
|
fn parse(token_stream: &mut (impl Read + Seek)) -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
gas_token: Parse::parse(token_stream)?,
|
||||||
|
gas_option: Parse::parse(token_stream)?,
|
||||||
|
code: Parse::parse(token_stream)?,
|
||||||
|
colon: Parse::parse(token_stream)?,
|
||||||
|
value: Parse::parse(token_stream)?,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use std::io::Cursor;
|
||||||
|
|
||||||
|
use indoc::indoc;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn complex_function_can_be_parsed() {
|
||||||
|
// Arrange
|
||||||
|
let string = indoc!(
|
||||||
|
r#"
|
||||||
|
myFunction(uint256, uint64,
|
||||||
|
)
|
||||||
|
:
|
||||||
|
1, 2
|
||||||
|
, 3
|
||||||
|
-> 1, 2, 3, 4
|
||||||
|
gas irOptimized: 135499
|
||||||
|
gas legacy: 137095
|
||||||
|
gas legacyOptimized: 135823
|
||||||
|
gas irOptimized code: 135499
|
||||||
|
gas legacy code: 137095
|
||||||
|
gas legacyOptimized code: 135823
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
let mut token_stream = Cursor::new(string);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let function = Function::parse(&mut token_stream);
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let function = function.expect("Function parsing failed");
|
||||||
|
assert_eq!(
|
||||||
|
function,
|
||||||
|
Function {
|
||||||
|
ident: FunctionIdent::new("myFunction"),
|
||||||
|
arg_types: Parenthesized(vec![
|
||||||
|
FunctionArgumentType::new("uint256"),
|
||||||
|
FunctionArgumentType::new("uint64")
|
||||||
|
]),
|
||||||
|
colon: ColonToken::default(),
|
||||||
|
function_arguments: vec![
|
||||||
|
FunctionArgument::new("1"),
|
||||||
|
FunctionArgument::new("2"),
|
||||||
|
FunctionArgument::new("3")
|
||||||
|
],
|
||||||
|
arrow_token: ArrowToken::default(),
|
||||||
|
function_returns: vec![
|
||||||
|
FunctionReturn::new("1"),
|
||||||
|
FunctionReturn::new("2"),
|
||||||
|
FunctionReturn::new("3"),
|
||||||
|
FunctionReturn::new("4"),
|
||||||
|
],
|
||||||
|
functions_options: vec![
|
||||||
|
PostFunctionOptions::IrOptimizedGasOption(IrOptimizedGasOption {
|
||||||
|
gas_token: Default::default(),
|
||||||
|
gas_option: Default::default(),
|
||||||
|
colon: Default::default(),
|
||||||
|
value: 135499
|
||||||
|
}),
|
||||||
|
PostFunctionOptions::LegacyGasOption(LegacyGasOption {
|
||||||
|
gas_token: Default::default(),
|
||||||
|
gas_option: Default::default(),
|
||||||
|
colon: Default::default(),
|
||||||
|
value: 137095
|
||||||
|
}),
|
||||||
|
PostFunctionOptions::LegacyOptimizedGasOption(LegacyOptimizedGasOption {
|
||||||
|
gas_token: Default::default(),
|
||||||
|
gas_option: Default::default(),
|
||||||
|
colon: Default::default(),
|
||||||
|
value: 135823
|
||||||
|
}),
|
||||||
|
PostFunctionOptions::IrOptimizedGasCodeOption(IrOptimizedGasCodeOption {
|
||||||
|
gas_token: Default::default(),
|
||||||
|
gas_option: Default::default(),
|
||||||
|
code: Default::default(),
|
||||||
|
colon: Default::default(),
|
||||||
|
value: 135499
|
||||||
|
}),
|
||||||
|
PostFunctionOptions::LegacyGasCodeOption(LegacyGasCodeOption {
|
||||||
|
gas_token: Default::default(),
|
||||||
|
gas_option: Default::default(),
|
||||||
|
code: Default::default(),
|
||||||
|
colon: Default::default(),
|
||||||
|
value: 137095
|
||||||
|
}),
|
||||||
|
PostFunctionOptions::LegacyOptimizedGasCodeOption(
|
||||||
|
LegacyOptimizedGasCodeOption {
|
||||||
|
gas_token: Default::default(),
|
||||||
|
gas_option: Default::default(),
|
||||||
|
code: Default::default(),
|
||||||
|
colon: Default::default(),
|
||||||
|
value: 135823
|
||||||
|
}
|
||||||
|
),
|
||||||
|
]
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
//! This module contains a parser for the Solidity semantic tests allowing them to be parsed into
|
||||||
|
//! regular [`Metadata`] objects that can be executed by the testing framework.
|
||||||
|
//!
|
||||||
|
//! [`Metadata`]: crate::metadata::Metadata
|
||||||
|
|
||||||
|
mod function_parser;
|
||||||
|
mod sections;
|
||||||
|
mod test_configuration;
|
||||||
|
|
||||||
|
pub use function_parser::*;
|
||||||
|
pub use sections::*;
|
||||||
|
pub use test_configuration::*;
|
||||||
@@ -0,0 +1,338 @@
|
|||||||
|
use std::{collections::VecDeque, path::PathBuf, sync::LazyLock};
|
||||||
|
|
||||||
|
use anyhow::{Context, Result, anyhow};
|
||||||
|
use regex::Regex;
|
||||||
|
|
||||||
|
use crate::semantic_tests::TestConfiguration;
|
||||||
|
|
||||||
|
/// This enum describes the various sections that a semantic test can contain.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||||
|
pub enum SemanticTestSection {
|
||||||
|
/// A source code section that consists of Solidity code.
|
||||||
|
///
|
||||||
|
/// Source code sections might have a file name and they might not. Take the following section
|
||||||
|
/// as an example which doesn't contain a filename
|
||||||
|
///
|
||||||
|
/// ```solidity
|
||||||
|
/// contract C {
|
||||||
|
/// bytes data;
|
||||||
|
/// function () pure returns (bytes memory) f;
|
||||||
|
/// constructor() {
|
||||||
|
/// data = M.longdata();
|
||||||
|
/// f = M.longdata;
|
||||||
|
/// }
|
||||||
|
/// function test() public view returns (bool) {
|
||||||
|
/// return keccak256(data) == keccak256(f());
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// The above will translate into this enum variant and without a defined filename for the code.
|
||||||
|
/// However, the following will translate into this variant of the enum with a defined file name
|
||||||
|
///
|
||||||
|
/// ```solidity
|
||||||
|
/// ==== Source: main.sol ====
|
||||||
|
/// contract C {
|
||||||
|
/// bytes data;
|
||||||
|
/// function () pure returns (bytes memory) f;
|
||||||
|
/// constructor() {
|
||||||
|
/// data = M.longdata();
|
||||||
|
/// f = M.longdata;
|
||||||
|
/// }
|
||||||
|
/// function test() public view returns (bool) {
|
||||||
|
/// return keccak256(data) == keccak256(f());
|
||||||
|
/// }
|
||||||
|
/// }
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// This is because of the use of the `Source` directive at the start of the section.
|
||||||
|
///
|
||||||
|
/// Note the following: All tests will be run on the last declared contract in the semantic test
|
||||||
|
/// and therefore the order of the contracts matters.
|
||||||
|
SourceCode {
|
||||||
|
file_name: Option<PathBuf>,
|
||||||
|
content: String,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// An external source section from the solidity semantic tests.
|
||||||
|
///
|
||||||
|
/// External source sections from the solidity semantic tests are the simplest sections out of
|
||||||
|
/// them all. They look like the following:
|
||||||
|
///
|
||||||
|
/// ```solidity
|
||||||
|
/// ==== ExternalSource: _prbmath/PRBMathSD59x18.sol ====
|
||||||
|
/// ```
|
||||||
|
///
|
||||||
|
/// And they can be thought of as a directive to the compiler to include these contracts when
|
||||||
|
/// compiling the test contract.
|
||||||
|
ExternalSource { path: PathBuf },
|
||||||
|
|
||||||
|
/// A test configuration section
|
||||||
|
///
|
||||||
|
/// This section contains various configuration and filters that are used for the tests and its
|
||||||
|
/// always the section that comes right before the actual tests. This section looks like the
|
||||||
|
/// following:
|
||||||
|
///
|
||||||
|
/// ```solidity
|
||||||
|
/// // ====
|
||||||
|
/// // ABIEncoderV1Only: true
|
||||||
|
/// // compileViaYul: false
|
||||||
|
/// // ----
|
||||||
|
/// ```
|
||||||
|
TestConfiguration { configuration: TestConfiguration },
|
||||||
|
|
||||||
|
/// A test inputs section.
|
||||||
|
///
|
||||||
|
/// This section consists of all of the lines that make up the test inputs or the test steps
|
||||||
|
/// which is the final section found in the semantic test files. This section looks like the
|
||||||
|
/// following:
|
||||||
|
///
|
||||||
|
/// ```solidity
|
||||||
|
/// // ----
|
||||||
|
/// // f1() -> 0x20, 0x40, 0x20, 0
|
||||||
|
/// // f2(string): 0x20, 0 -> 0x20, 0x40, 0x20, 0
|
||||||
|
/// // f2(string): 0x20, 0, 0 -> 0x20, 0x40, 0x20, 0
|
||||||
|
/// // g1() -> 32, 0
|
||||||
|
/// // g2(string): 0x20, 0 -> 0x20, 0
|
||||||
|
/// // g2(string): 0x20, 0, 0 -> 0x20, 0
|
||||||
|
/// ```
|
||||||
|
TestInputs { lines: Vec<String> },
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SemanticTestSection {
|
||||||
|
const SOURCE_SECTION_MARKER: &str = "==== Source:";
|
||||||
|
const EXTERNAL_SOURCE_SECTION_MARKER: &str = "==== ExternalSource:";
|
||||||
|
const TEST_CONFIGURATION_SECTION_MARKER: &str = "// ====";
|
||||||
|
const TEST_INPUTS_SECTION_MARKER: &str = "// ----";
|
||||||
|
|
||||||
|
pub fn parse_source_into_sections(source: impl AsRef<str>) -> Result<Vec<Self>> {
|
||||||
|
let mut sections = VecDeque::<Self>::new();
|
||||||
|
sections.push_back(Self::SourceCode {
|
||||||
|
file_name: None,
|
||||||
|
content: Default::default(),
|
||||||
|
});
|
||||||
|
|
||||||
|
for line in source.as_ref().split('\n') {
|
||||||
|
if let Some(new_section) = sections
|
||||||
|
.back_mut()
|
||||||
|
.expect("Impossible case - we have at least one item in the sections")
|
||||||
|
.append_line(line)?
|
||||||
|
{
|
||||||
|
sections.push_back(new_section);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let first_section = sections
|
||||||
|
.front()
|
||||||
|
.expect("Impossible case - there's always at least one section");
|
||||||
|
let remove_first_section = match first_section {
|
||||||
|
SemanticTestSection::SourceCode { file_name, content } => {
|
||||||
|
file_name.is_none() && content.is_empty()
|
||||||
|
}
|
||||||
|
SemanticTestSection::ExternalSource { .. }
|
||||||
|
| SemanticTestSection::TestConfiguration { .. }
|
||||||
|
| SemanticTestSection::TestInputs { .. } => false,
|
||||||
|
};
|
||||||
|
if remove_first_section {
|
||||||
|
sections.pop_front();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(sections.into_iter().collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Appends a line to a semantic test section.
|
||||||
|
///
|
||||||
|
/// This method takes in the current section and a new line and attempts to append it to parse
|
||||||
|
/// it and append it to the current section. If the line is found to be the start of a new
|
||||||
|
/// section then no changes will be made to the current section and instead the line will be
|
||||||
|
/// interpreted according to the rules of new sections.
|
||||||
|
pub fn append_line(&mut self, line: impl AsRef<str>) -> Result<Option<Self>> {
|
||||||
|
static COMMENT_REPLACEMENT_REGEX: LazyLock<Regex> =
|
||||||
|
LazyLock::new(|| Regex::new("#.*#$").unwrap());
|
||||||
|
|
||||||
|
let line = line.as_ref();
|
||||||
|
if line.is_empty() {
|
||||||
|
Ok(None)
|
||||||
|
} else if let Some(source_path) = line.strip_prefix(Self::SOURCE_SECTION_MARKER) {
|
||||||
|
let source_code_file_path = source_path
|
||||||
|
.trim()
|
||||||
|
.split(' ')
|
||||||
|
.next()
|
||||||
|
.context("Failed to find the source code file path")?;
|
||||||
|
Ok(Some(Self::SourceCode {
|
||||||
|
file_name: Some(PathBuf::from(source_code_file_path)),
|
||||||
|
content: Default::default(),
|
||||||
|
}))
|
||||||
|
} else if let Some(external_source_path) =
|
||||||
|
line.strip_prefix(Self::EXTERNAL_SOURCE_SECTION_MARKER)
|
||||||
|
{
|
||||||
|
let source_code_file_path = external_source_path
|
||||||
|
.trim()
|
||||||
|
.split(' ')
|
||||||
|
.next()
|
||||||
|
.context("Failed to find the source code file path")?;
|
||||||
|
Ok(Some(Self::ExternalSource {
|
||||||
|
path: PathBuf::from(source_code_file_path),
|
||||||
|
}))
|
||||||
|
} else if line == Self::TEST_CONFIGURATION_SECTION_MARKER {
|
||||||
|
Ok(Some(Self::TestConfiguration {
|
||||||
|
configuration: Default::default(),
|
||||||
|
}))
|
||||||
|
} else if line == Self::TEST_INPUTS_SECTION_MARKER {
|
||||||
|
Ok(Some(Self::TestInputs {
|
||||||
|
lines: Default::default(),
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
match self {
|
||||||
|
SemanticTestSection::SourceCode { content, .. } => {
|
||||||
|
content.push('\n');
|
||||||
|
content.push_str(line);
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
SemanticTestSection::ExternalSource { .. } => Ok(Some(Self::SourceCode {
|
||||||
|
file_name: None,
|
||||||
|
content: line.to_owned(),
|
||||||
|
})),
|
||||||
|
SemanticTestSection::TestConfiguration { configuration } => {
|
||||||
|
let line = line
|
||||||
|
.strip_prefix("//")
|
||||||
|
.with_context(|| {
|
||||||
|
format!("Line doesn't contain test configuration prefix: {line}")
|
||||||
|
})?
|
||||||
|
.trim();
|
||||||
|
let mut splitted = line.split(':');
|
||||||
|
let key = splitted.next().context("Failed to find the key")?.trim();
|
||||||
|
let value = splitted.next().context("Failed to find the value")?.trim();
|
||||||
|
configuration.with_config(key, value)?;
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
SemanticTestSection::TestInputs { lines } => {
|
||||||
|
let line = line
|
||||||
|
.strip_prefix("//")
|
||||||
|
.ok_or_else(|| anyhow!("Line doesn't contain test input prefix: {line}"))
|
||||||
|
.map(str::trim)?;
|
||||||
|
let line = COMMENT_REPLACEMENT_REGEX.replace_all(line, "");
|
||||||
|
if !line.starts_with('#') && !line.chars().all(|char| char.is_whitespace()) {
|
||||||
|
lines.push(line.to_string());
|
||||||
|
}
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use indoc::indoc;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parses_a_simple_file_correctly() {
|
||||||
|
// Arrange
|
||||||
|
const SIMPLE_FILE: &str = indoc!(
|
||||||
|
r#"
|
||||||
|
==== Source: main.sol ====
|
||||||
|
contract C {
|
||||||
|
function f() public pure returns (uint) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// ====
|
||||||
|
// compileViaYul: true
|
||||||
|
// ----
|
||||||
|
// f() -> 1
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let sections =
|
||||||
|
SemanticTestSection::parse_source_into_sections(SIMPLE_FILE).expect("Failed to parse");
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
assert_eq!(
|
||||||
|
sections,
|
||||||
|
vec![
|
||||||
|
SemanticTestSection::SourceCode {
|
||||||
|
file_name: Some("main.sol".into()),
|
||||||
|
content: "\ncontract C {\n function f() public pure returns (uint) {\n return 1;\n }\n}".to_string()
|
||||||
|
},
|
||||||
|
SemanticTestSection::TestConfiguration {
|
||||||
|
configuration: TestConfiguration { compile_via_yul: Some(true.into()), ..Default::default() },
|
||||||
|
},
|
||||||
|
SemanticTestSection::TestInputs {
|
||||||
|
lines: vec!["f() -> 1".to_string()]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn parses_a_complex_file_correctly() {
|
||||||
|
// Arrange
|
||||||
|
const COMPLEX_FILE: &str = indoc!(
|
||||||
|
r#"
|
||||||
|
==== Source: main.sol ====
|
||||||
|
import "./lib.sol";
|
||||||
|
contract C {
|
||||||
|
function f() public pure returns (uint) {
|
||||||
|
return Lib.f();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
==== Source: lib.sol ====
|
||||||
|
library Lib {
|
||||||
|
function f() internal pure returns (uint) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// ====
|
||||||
|
// compileViaYul: true
|
||||||
|
// ----
|
||||||
|
// # This is a comment
|
||||||
|
// f() -> 1
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let sections =
|
||||||
|
SemanticTestSection::parse_source_into_sections(COMPLEX_FILE).expect("Failed to parse");
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
assert_eq!(
|
||||||
|
sections,
|
||||||
|
vec![
|
||||||
|
SemanticTestSection::SourceCode {
|
||||||
|
file_name: Some("main.sol".into()),
|
||||||
|
content: "\nimport \"./lib.sol\";\ncontract C {\n function f() public pure returns (uint) {\n return Lib.f();\n }\n}".to_string()
|
||||||
|
},
|
||||||
|
SemanticTestSection::SourceCode {
|
||||||
|
file_name: Some("lib.sol".into()),
|
||||||
|
content: "\nlibrary Lib {\n function f() internal pure returns (uint) {\n return 1;\n }\n}".to_string()
|
||||||
|
},
|
||||||
|
SemanticTestSection::TestConfiguration {
|
||||||
|
configuration: TestConfiguration { compile_via_yul: Some(true.into()), ..Default::default() },
|
||||||
|
},
|
||||||
|
SemanticTestSection::TestInputs {
|
||||||
|
lines: vec!["f() -> 1".to_string()]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[ignore = "Ignored and should be removed before making a PR"]
|
||||||
|
fn test() {
|
||||||
|
let files = revive_dt_common::iterators::FilesWithExtensionIterator::new(
|
||||||
|
"/Users/omarabdulla/parity/resolc-compiler-tests/fixtures/solidity/ethereum",
|
||||||
|
)
|
||||||
|
.with_allowed_extension("sol");
|
||||||
|
|
||||||
|
for file in files {
|
||||||
|
let content = std::fs::read_to_string(file).unwrap();
|
||||||
|
let sections = SemanticTestSection::parse_source_into_sections(content).unwrap();
|
||||||
|
|
||||||
|
println!("{sections:#?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,200 @@
|
|||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
use revive_common::EVMVersion;
|
||||||
|
|
||||||
|
use anyhow::{Error, Result, bail};
|
||||||
|
|
||||||
|
/// The configuration parameters provided in the solidity semantic tests.
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||||
|
pub struct TestConfiguration {
|
||||||
|
/// Controls if the test case compiles through the Yul IR.
|
||||||
|
pub compile_via_yul: Option<ItemConfig>,
|
||||||
|
/// Controls if the compilation should be done to EWASM.
|
||||||
|
pub compile_to_ewasm: Option<ItemConfig>,
|
||||||
|
/// Controls if ABI encoding should be restricted to the V1 ABI encoder.
|
||||||
|
pub abi_encoder_v1_only: Option<ItemConfig>,
|
||||||
|
/// Controls the EVM Version that the test is compatible with.
|
||||||
|
pub evm_version: Option<EvmVersionRequirement>,
|
||||||
|
/// Controls how the revert strings should be handled.
|
||||||
|
pub revert_strings: Option<RevertString>,
|
||||||
|
/// Controls if non-existent functions should be permitted or not.
|
||||||
|
pub allow_non_existing_functions: Option<bool>,
|
||||||
|
/// The list of bytecode formats that this test should be run against.
|
||||||
|
pub bytecode_format: Option<Vec<BytecodeFormat>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TestConfiguration {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_config(
|
||||||
|
&mut self,
|
||||||
|
key: impl AsRef<str>,
|
||||||
|
value: impl AsRef<str>,
|
||||||
|
) -> Result<&mut Self> {
|
||||||
|
match key.as_ref() {
|
||||||
|
"compileViaYul" => self.compile_via_yul = Some(value.as_ref().parse()?),
|
||||||
|
"compileToEwasm" => self.compile_to_ewasm = Some(value.as_ref().parse()?),
|
||||||
|
"ABIEncoderV1Only" => self.abi_encoder_v1_only = Some(value.as_ref().parse()?),
|
||||||
|
"EVMVersion" => self.evm_version = Some(value.as_ref().parse()?),
|
||||||
|
"revertStrings" => self.revert_strings = Some(value.as_ref().parse()?),
|
||||||
|
"allowNonExistingFunctions" => {
|
||||||
|
self.allow_non_existing_functions = Some(value.as_ref().parse()?)
|
||||||
|
}
|
||||||
|
"bytecodeFormat" => {
|
||||||
|
self.bytecode_format = Some(
|
||||||
|
value
|
||||||
|
.as_ref()
|
||||||
|
.split(',')
|
||||||
|
.map(str::trim)
|
||||||
|
.map(FromStr::from_str)
|
||||||
|
.collect::<Result<Vec<_>>>()?,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
_ => bail!("Unknown test configuration {}", key.as_ref()),
|
||||||
|
};
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_from_pairs(
|
||||||
|
pairs: impl IntoIterator<Item = (impl AsRef<str>, impl AsRef<str>)>,
|
||||||
|
) -> Result<Self> {
|
||||||
|
let mut this = Self::default();
|
||||||
|
pairs
|
||||||
|
.into_iter()
|
||||||
|
.try_fold(&mut this, |this, (key, value)| this.with_config(key, value))?;
|
||||||
|
Ok(this)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The configuration of a single item in the test configuration.
|
||||||
|
#[derive(Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub enum ItemConfig {
|
||||||
|
/// The configuration is set to e a boolean that's either `true` or `false`.
|
||||||
|
Boolean(bool),
|
||||||
|
/// The `also`
|
||||||
|
Also,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for ItemConfig {
|
||||||
|
type Err = Error;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||||
|
match s {
|
||||||
|
"true" => Ok(Self::Boolean(true)),
|
||||||
|
"false" => Ok(Self::Boolean(false)),
|
||||||
|
"also" => Ok(Self::Also),
|
||||||
|
_ => bail!("Invalid ItemConfig {s}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<bool> for ItemConfig {
|
||||||
|
fn from(value: bool) -> Self {
|
||||||
|
Self::Boolean(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<String> for ItemConfig {
|
||||||
|
type Error = <ItemConfig as FromStr>::Err;
|
||||||
|
|
||||||
|
fn try_from(value: String) -> std::result::Result<Self, Self::Error> {
|
||||||
|
value.as_str().parse()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The options available for the revert strings.
|
||||||
|
#[derive(Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||||
|
pub enum RevertString {
|
||||||
|
#[default]
|
||||||
|
Default,
|
||||||
|
Debug,
|
||||||
|
Strip,
|
||||||
|
VerboseDebug,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for RevertString {
|
||||||
|
type Err = Error;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||||
|
match s {
|
||||||
|
"default" => Ok(Self::Default),
|
||||||
|
"debug" => Ok(Self::Debug),
|
||||||
|
"strip" => Ok(Self::Strip),
|
||||||
|
"verboseDebug" => Ok(Self::VerboseDebug),
|
||||||
|
_ => bail!("Invalid RevertString {s}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<String> for RevertString {
|
||||||
|
type Error = <RevertString as FromStr>::Err;
|
||||||
|
|
||||||
|
fn try_from(value: String) -> std::result::Result<Self, Self::Error> {
|
||||||
|
value.as_str().parse()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The set of available bytecode formats.
|
||||||
|
#[derive(Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub enum BytecodeFormat {
|
||||||
|
Legacy,
|
||||||
|
EofVersionGreaterThanOne,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for BytecodeFormat {
|
||||||
|
type Err = Error;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||||
|
match s {
|
||||||
|
"legacy" => Ok(Self::Legacy),
|
||||||
|
">=EOFv1" => Ok(Self::EofVersionGreaterThanOne),
|
||||||
|
_ => bail!("Invalid BytecodeFormat {s}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<String> for BytecodeFormat {
|
||||||
|
type Error = <BytecodeFormat as FromStr>::Err;
|
||||||
|
|
||||||
|
fn try_from(value: String) -> std::result::Result<Self, Self::Error> {
|
||||||
|
value.as_str().parse()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub enum EvmVersionRequirement {
|
||||||
|
GreaterThan(EVMVersion),
|
||||||
|
GreaterThanOrEqual(EVMVersion),
|
||||||
|
LessThan(EVMVersion),
|
||||||
|
LessThanOrEqual(EVMVersion),
|
||||||
|
EqualTo(EVMVersion),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for EvmVersionRequirement {
|
||||||
|
type Err = Error;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||||
|
match s.as_bytes() {
|
||||||
|
[b'>', b'=', remaining @ ..] => Ok(Self::GreaterThanOrEqual(
|
||||||
|
str::from_utf8(remaining)?.try_into()?,
|
||||||
|
)),
|
||||||
|
[b'>', remaining @ ..] => Ok(Self::GreaterThan(str::from_utf8(remaining)?.try_into()?)),
|
||||||
|
[b'<', b'=', remaining @ ..] => Ok(Self::LessThanOrEqual(
|
||||||
|
str::from_utf8(remaining)?.try_into()?,
|
||||||
|
)),
|
||||||
|
[b'<', remaining @ ..] => Ok(Self::LessThan(str::from_utf8(remaining)?.try_into()?)),
|
||||||
|
[b'=', remaining @ ..] => Ok(Self::EqualTo(str::from_utf8(remaining)?.try_into()?)),
|
||||||
|
_ => bail!("Invalid EVM version requirement {s}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<String> for EvmVersionRequirement {
|
||||||
|
type Error = <EvmVersionRequirement as FromStr>::Err;
|
||||||
|
|
||||||
|
fn try_from(value: String) -> std::result::Result<Self, Self::Error> {
|
||||||
|
value.as_str().parse()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,8 +1,7 @@
|
|||||||
//! This crate implements all node interactions.
|
//! This crate implements all node interactions.
|
||||||
|
|
||||||
use alloy::primitives::{Address, StorageKey, U256};
|
|
||||||
use alloy::rpc::types::trace::geth::{DiffMode, GethDebugTracingOptions, GethTrace};
|
use alloy::rpc::types::trace::geth::{DiffMode, GethDebugTracingOptions, GethTrace};
|
||||||
use alloy::rpc::types::{EIP1186AccountProofResponse, TransactionReceipt, TransactionRequest};
|
use alloy::rpc::types::{TransactionReceipt, TransactionRequest};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
|
||||||
/// An interface for all interactions with Ethereum compatible nodes.
|
/// An interface for all interactions with Ethereum compatible nodes.
|
||||||
@@ -22,14 +21,4 @@ pub trait EthereumNode {
|
|||||||
|
|
||||||
/// Returns the state diff of the transaction hash in the [TransactionReceipt].
|
/// Returns the state diff of the transaction hash in the [TransactionReceipt].
|
||||||
fn state_diff(&self, receipt: &TransactionReceipt) -> impl Future<Output = Result<DiffMode>>;
|
fn state_diff(&self, receipt: &TransactionReceipt) -> impl Future<Output = Result<DiffMode>>;
|
||||||
|
|
||||||
/// Returns the balance of the provided [`Address`] back.
|
|
||||||
fn balance_of(&self, address: Address) -> impl Future<Output = Result<U256>>;
|
|
||||||
|
|
||||||
/// Returns the latest storage proof of the provided [`Address`]
|
|
||||||
fn latest_state_proof(
|
|
||||||
&self,
|
|
||||||
address: Address,
|
|
||||||
keys: Vec<StorageKey>,
|
|
||||||
) -> impl Future<Output = Result<EIP1186AccountProofResponse>>;
|
|
||||||
}
|
}
|
||||||
|
|||||||
+2
-27
@@ -17,16 +17,14 @@ use alloy::{
|
|||||||
eips::BlockNumberOrTag,
|
eips::BlockNumberOrTag,
|
||||||
genesis::{Genesis, GenesisAccount},
|
genesis::{Genesis, GenesisAccount},
|
||||||
network::{Ethereum, EthereumWallet, NetworkWallet},
|
network::{Ethereum, EthereumWallet, NetworkWallet},
|
||||||
primitives::{
|
primitives::{Address, BlockHash, BlockNumber, BlockTimestamp, FixedBytes, TxHash, U256},
|
||||||
Address, BlockHash, BlockNumber, BlockTimestamp, FixedBytes, StorageKey, TxHash, U256,
|
|
||||||
},
|
|
||||||
providers::{
|
providers::{
|
||||||
Provider, ProviderBuilder,
|
Provider, ProviderBuilder,
|
||||||
ext::DebugApi,
|
ext::DebugApi,
|
||||||
fillers::{CachedNonceManager, ChainIdFiller, FillProvider, NonceFiller, TxFiller},
|
fillers::{CachedNonceManager, ChainIdFiller, FillProvider, NonceFiller, TxFiller},
|
||||||
},
|
},
|
||||||
rpc::types::{
|
rpc::types::{
|
||||||
EIP1186AccountProofResponse, TransactionReceipt, TransactionRequest,
|
TransactionReceipt, TransactionRequest,
|
||||||
trace::geth::{DiffMode, GethDebugTracingOptions, PreStateConfig, PreStateFrame},
|
trace::geth::{DiffMode, GethDebugTracingOptions, PreStateConfig, PreStateFrame},
|
||||||
},
|
},
|
||||||
signers::local::PrivateKeySigner,
|
signers::local::PrivateKeySigner,
|
||||||
@@ -373,29 +371,6 @@ impl EthereumNode for GethNode {
|
|||||||
_ => anyhow::bail!("expected a diff mode trace"),
|
_ => anyhow::bail!("expected a diff mode trace"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
|
||||||
async fn balance_of(&self, address: Address) -> anyhow::Result<U256> {
|
|
||||||
self.provider()
|
|
||||||
.await?
|
|
||||||
.get_balance(address)
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
|
||||||
async fn latest_state_proof(
|
|
||||||
&self,
|
|
||||||
address: Address,
|
|
||||||
keys: Vec<StorageKey>,
|
|
||||||
) -> anyhow::Result<EIP1186AccountProofResponse> {
|
|
||||||
self.provider()
|
|
||||||
.await?
|
|
||||||
.get_proof(address, keys)
|
|
||||||
.latest()
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ResolverApi for GethNode {
|
impl ResolverApi for GethNode {
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ use alloy::{
|
|||||||
},
|
},
|
||||||
primitives::{
|
primitives::{
|
||||||
Address, B64, B256, BlockHash, BlockNumber, BlockTimestamp, Bloom, Bytes, FixedBytes,
|
Address, B64, B256, BlockHash, BlockNumber, BlockTimestamp, Bloom, Bytes, FixedBytes,
|
||||||
StorageKey, TxHash, U256,
|
TxHash, U256,
|
||||||
},
|
},
|
||||||
providers::{
|
providers::{
|
||||||
Provider, ProviderBuilder,
|
Provider, ProviderBuilder,
|
||||||
@@ -25,7 +25,7 @@ use alloy::{
|
|||||||
fillers::{CachedNonceManager, ChainIdFiller, FillProvider, NonceFiller, TxFiller},
|
fillers::{CachedNonceManager, ChainIdFiller, FillProvider, NonceFiller, TxFiller},
|
||||||
},
|
},
|
||||||
rpc::types::{
|
rpc::types::{
|
||||||
EIP1186AccountProofResponse, TransactionReceipt,
|
TransactionReceipt,
|
||||||
eth::{Block, Header, Transaction},
|
eth::{Block, Header, Transaction},
|
||||||
trace::geth::{DiffMode, GethDebugTracingOptions, PreStateConfig, PreStateFrame},
|
trace::geth::{DiffMode, GethDebugTracingOptions, PreStateConfig, PreStateFrame},
|
||||||
},
|
},
|
||||||
@@ -428,29 +428,6 @@ impl EthereumNode for KitchensinkNode {
|
|||||||
_ => anyhow::bail!("expected a diff mode trace"),
|
_ => anyhow::bail!("expected a diff mode trace"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
|
||||||
async fn balance_of(&self, address: Address) -> anyhow::Result<U256> {
|
|
||||||
self.provider()
|
|
||||||
.await?
|
|
||||||
.get_balance(address)
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
|
||||||
async fn latest_state_proof(
|
|
||||||
&self,
|
|
||||||
address: Address,
|
|
||||||
keys: Vec<StorageKey>,
|
|
||||||
) -> anyhow::Result<EIP1186AccountProofResponse> {
|
|
||||||
self.provider()
|
|
||||||
.await?
|
|
||||||
.get_proof(address, keys)
|
|
||||||
.latest()
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ResolverApi for KitchensinkNode {
|
impl ResolverApi for KitchensinkNode {
|
||||||
|
|||||||
Reference in New Issue
Block a user