mirror of
https://github.com/pezkuwichain/revive-differential-tests.git
synced 2026-04-22 21:57:58 +00:00
Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 74cbafd42a | |||
| 330d342d3b | |||
| 57ba765aa2 | |||
| c2c3e81125 | |||
| 390402b6cc | |||
| 11bba5add5 | |||
| 2d26f5d8c7 | |||
| 01e31a4333 | |||
| 989c064983 | |||
| 330a773a1c | |||
| f51693cb9f | |||
| 4db7009640 | |||
| 5a36e242ec | |||
| 33329632b5 | |||
| 429f2e92a2 |
@@ -99,9 +99,12 @@ jobs:
|
|||||||
- name: Install Geth on Ubuntu
|
- name: Install Geth on Ubuntu
|
||||||
if: matrix.os == 'ubuntu-24.04'
|
if: matrix.os == 'ubuntu-24.04'
|
||||||
run: |
|
run: |
|
||||||
|
sudo add-apt-repository -y ppa:ethereum/ethereum
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y protobuf-compiler
|
sudo apt-get install -y protobuf-compiler
|
||||||
|
|
||||||
|
sudo apt-get install -y solc
|
||||||
|
|
||||||
# We were facing some issues in CI with the 1.16.* versions of geth, and specifically on
|
# We were facing some issues in CI with the 1.16.* versions of geth, and specifically on
|
||||||
# Ubuntu. Eventually, we found out that the last version of geth that worked in our CI was
|
# Ubuntu. Eventually, we found out that the last version of geth that worked in our CI was
|
||||||
# version 1.15.11. Thus, this is the version that we want to use in CI. The PPA sadly does
|
# version 1.15.11. Thus, this is the version that we want to use in CI. The PPA sadly does
|
||||||
@@ -122,12 +125,22 @@ jobs:
|
|||||||
wget -qO- "$URL" | sudo tar xz -C /usr/local/bin --strip-components=1
|
wget -qO- "$URL" | sudo tar xz -C /usr/local/bin --strip-components=1
|
||||||
geth --version
|
geth --version
|
||||||
|
|
||||||
|
curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-x86_64-unknown-linux-musl -o resolc
|
||||||
|
chmod +x resolc
|
||||||
|
sudo mv resolc /usr/local/bin
|
||||||
|
|
||||||
- name: Install Geth on macOS
|
- name: Install Geth on macOS
|
||||||
if: matrix.os == 'macos-14'
|
if: matrix.os == 'macos-14'
|
||||||
run: |
|
run: |
|
||||||
brew tap ethereum/ethereum
|
brew tap ethereum/ethereum
|
||||||
brew install ethereum protobuf
|
brew install ethereum protobuf
|
||||||
|
|
||||||
|
brew install solidity
|
||||||
|
|
||||||
|
curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-universal-apple-darwin -o resolc
|
||||||
|
chmod +x resolc
|
||||||
|
sudo mv resolc /usr/local/bin
|
||||||
|
|
||||||
- name: Machete
|
- name: Machete
|
||||||
uses: bnjbvr/cargo-machete@v0.7.1
|
uses: bnjbvr/cargo-machete@v0.7.1
|
||||||
|
|
||||||
@@ -143,5 +156,8 @@ jobs:
|
|||||||
- name: Check eth-rpc version
|
- name: Check eth-rpc version
|
||||||
run: eth-rpc --version
|
run: eth-rpc --version
|
||||||
|
|
||||||
|
- name: Check resolc version
|
||||||
|
run: resolc --version
|
||||||
|
|
||||||
- name: Test cargo workspace
|
- name: Test cargo workspace
|
||||||
run: make test
|
run: make test
|
||||||
|
|||||||
Generated
+127
-15
@@ -339,6 +339,7 @@ dependencies = [
|
|||||||
"const-hex",
|
"const-hex",
|
||||||
"derive_more 2.0.1",
|
"derive_more 2.0.1",
|
||||||
"foldhash",
|
"foldhash",
|
||||||
|
"getrandom 0.3.3",
|
||||||
"hashbrown 0.15.3",
|
"hashbrown 0.15.3",
|
||||||
"indexmap 2.10.0",
|
"indexmap 2.10.0",
|
||||||
"itoa",
|
"itoa",
|
||||||
@@ -2227,6 +2228,66 @@ dependencies = [
|
|||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "foundry-compilers-artifacts"
|
||||||
|
version = "0.18.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "c2676d70082ed23680fe2d08c0b750d5f7f2438c6d946f1cb140a76c5e5e0392"
|
||||||
|
dependencies = [
|
||||||
|
"foundry-compilers-artifacts-solc",
|
||||||
|
"foundry-compilers-artifacts-vyper",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "foundry-compilers-artifacts-solc"
|
||||||
|
version = "0.18.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "a3ada94dc5946334bb08df574855ba345ab03ba8c6f233560c72c8d61fa9db80"
|
||||||
|
dependencies = [
|
||||||
|
"alloy-json-abi",
|
||||||
|
"alloy-primitives",
|
||||||
|
"foundry-compilers-core",
|
||||||
|
"path-slash",
|
||||||
|
"regex",
|
||||||
|
"semver 1.0.26",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"thiserror 2.0.12",
|
||||||
|
"tracing",
|
||||||
|
"yansi",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "foundry-compilers-artifacts-vyper"
|
||||||
|
version = "0.18.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "372052af72652e375a6e7eed22179bd8935114e25e1c5a8cca7f00e8f20bd94c"
|
||||||
|
dependencies = [
|
||||||
|
"alloy-json-abi",
|
||||||
|
"alloy-primitives",
|
||||||
|
"foundry-compilers-artifacts-solc",
|
||||||
|
"foundry-compilers-core",
|
||||||
|
"path-slash",
|
||||||
|
"semver 1.0.26",
|
||||||
|
"serde",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "foundry-compilers-core"
|
||||||
|
version = "0.18.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bf0962c46855979300f6526ed57f987ccf6a025c2b92ce574b281d9cb2ef666b"
|
||||||
|
dependencies = [
|
||||||
|
"alloy-primitives",
|
||||||
|
"cfg-if",
|
||||||
|
"dunce",
|
||||||
|
"path-slash",
|
||||||
|
"semver 1.0.26",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"thiserror 2.0.12",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fs-err"
|
name = "fs-err"
|
||||||
version = "2.11.0"
|
version = "2.11.0"
|
||||||
@@ -2652,7 +2713,7 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"socket2",
|
"socket2 0.5.10",
|
||||||
"system-configuration",
|
"system-configuration",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tower-service",
|
"tower-service",
|
||||||
@@ -2892,6 +2953,17 @@ dependencies = [
|
|||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "io-uring"
|
||||||
|
version = "0.7.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4"
|
||||||
|
dependencies = [
|
||||||
|
"bitflags 2.9.1",
|
||||||
|
"cfg-if",
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ipnet"
|
name = "ipnet"
|
||||||
version = "2.11.0"
|
version = "2.11.0"
|
||||||
@@ -3456,6 +3528,12 @@ version = "1.0.15"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
|
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "path-slash"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pbkdf2"
|
name = "pbkdf2"
|
||||||
version = "0.12.2"
|
version = "0.12.2"
|
||||||
@@ -3902,9 +3980,7 @@ dependencies = [
|
|||||||
"base64",
|
"base64",
|
||||||
"bytes",
|
"bytes",
|
||||||
"encoding_rs",
|
"encoding_rs",
|
||||||
"futures-channel",
|
|
||||||
"futures-core",
|
"futures-core",
|
||||||
"futures-util",
|
|
||||||
"h2",
|
"h2",
|
||||||
"http",
|
"http",
|
||||||
"http-body",
|
"http-body",
|
||||||
@@ -3953,24 +4029,26 @@ name = "revive-dt-common"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"futures",
|
"semver 1.0.26",
|
||||||
"once_cell",
|
|
||||||
"tokio",
|
|
||||||
"tracing",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "revive-dt-compiler"
|
name = "revive-dt-compiler"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"alloy",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
"foundry-compilers-artifacts",
|
||||||
"revive-common",
|
"revive-common",
|
||||||
|
"revive-dt-common",
|
||||||
"revive-dt-config",
|
"revive-dt-config",
|
||||||
"revive-dt-solc-binaries",
|
"revive-dt-solc-binaries",
|
||||||
"revive-solc-json-interface",
|
"revive-solc-json-interface",
|
||||||
"semver 1.0.26",
|
"semver 1.0.26",
|
||||||
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -3992,8 +4070,8 @@ dependencies = [
|
|||||||
"alloy",
|
"alloy",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap",
|
"clap",
|
||||||
|
"futures",
|
||||||
"indexmap 2.10.0",
|
"indexmap 2.10.0",
|
||||||
"rayon",
|
|
||||||
"revive-dt-common",
|
"revive-dt-common",
|
||||||
"revive-dt-compiler",
|
"revive-dt-compiler",
|
||||||
"revive-dt-config",
|
"revive-dt-config",
|
||||||
@@ -4001,9 +4079,9 @@ dependencies = [
|
|||||||
"revive-dt-node",
|
"revive-dt-node",
|
||||||
"revive-dt-node-interaction",
|
"revive-dt-node-interaction",
|
||||||
"revive-dt-report",
|
"revive-dt-report",
|
||||||
"revive-solc-json-interface",
|
"semver 1.0.26",
|
||||||
"serde_json",
|
|
||||||
"temp-dir",
|
"temp-dir",
|
||||||
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
]
|
]
|
||||||
@@ -4020,6 +4098,7 @@ dependencies = [
|
|||||||
"semver 1.0.26",
|
"semver 1.0.26",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -4055,9 +4134,9 @@ name = "revive-dt-report"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
"revive-dt-compiler",
|
||||||
"revive-dt-config",
|
"revive-dt-config",
|
||||||
"revive-dt-format",
|
"revive-dt-format",
|
||||||
"revive-solc-json-interface",
|
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tracing",
|
"tracing",
|
||||||
@@ -4070,9 +4149,11 @@ dependencies = [
|
|||||||
"anyhow",
|
"anyhow",
|
||||||
"hex",
|
"hex",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
|
"revive-dt-common",
|
||||||
"semver 1.0.26",
|
"semver 1.0.26",
|
||||||
"serde",
|
"serde",
|
||||||
"sha2 0.10.9",
|
"sha2 0.10.9",
|
||||||
|
"tokio",
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -4167,6 +4248,9 @@ name = "rustc-hash"
|
|||||||
version = "2.1.1"
|
version = "2.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
|
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
|
||||||
|
dependencies = [
|
||||||
|
"rand 0.8.5",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-hex"
|
name = "rustc-hex"
|
||||||
@@ -4626,6 +4710,15 @@ version = "1.3.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "signal-hook-registry"
|
||||||
|
version = "1.4.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "signature"
|
name = "signature"
|
||||||
version = "2.2.0"
|
version = "2.2.0"
|
||||||
@@ -4670,6 +4763,16 @@ dependencies = [
|
|||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "socket2"
|
||||||
|
version = "0.6.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"windows-sys 0.59.0",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sp-application-crypto"
|
name = "sp-application-crypto"
|
||||||
version = "40.1.0"
|
version = "40.1.0"
|
||||||
@@ -5329,18 +5432,21 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio"
|
name = "tokio"
|
||||||
version = "1.45.1"
|
version = "1.47.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779"
|
checksum = "43864ed400b6043a4757a25c7a64a8efde741aed79a056a2fb348a406701bb35"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"backtrace",
|
"backtrace",
|
||||||
"bytes",
|
"bytes",
|
||||||
|
"io-uring",
|
||||||
"libc",
|
"libc",
|
||||||
"mio",
|
"mio",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"socket2",
|
"signal-hook-registry",
|
||||||
|
"slab",
|
||||||
|
"socket2 0.6.0",
|
||||||
"tokio-macros",
|
"tokio-macros",
|
||||||
"windows-sys 0.52.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -6217,6 +6323,12 @@ dependencies = [
|
|||||||
"tap",
|
"tap",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "yansi"
|
||||||
|
version = "1.0.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "yoke"
|
name = "yoke"
|
||||||
version = "0.8.0"
|
version = "0.8.0"
|
||||||
|
|||||||
+5
-3
@@ -26,11 +26,11 @@ alloy-primitives = "1.2.1"
|
|||||||
alloy-sol-types = "1.2.1"
|
alloy-sol-types = "1.2.1"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
clap = { version = "4", features = ["derive"] }
|
clap = { version = "4", features = ["derive"] }
|
||||||
|
foundry-compilers-artifacts = { version = "0.18.0" }
|
||||||
futures = { version = "0.3.31" }
|
futures = { version = "0.3.31" }
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
reqwest = { version = "0.12.15", features = ["blocking", "json"] }
|
reqwest = { version = "0.12.15", features = ["json"] }
|
||||||
once_cell = "1.21"
|
once_cell = "1.21"
|
||||||
rayon = { version = "1.10" }
|
|
||||||
semver = { version = "1.0", features = ["serde"] }
|
semver = { version = "1.0", features = ["serde"] }
|
||||||
serde = { version = "1.0", default-features = false, features = ["derive"] }
|
serde = { version = "1.0", default-features = false, features = ["derive"] }
|
||||||
serde_json = { version = "1.0", default-features = false, features = [
|
serde_json = { version = "1.0", default-features = false, features = [
|
||||||
@@ -42,8 +42,10 @@ sp-core = "36.1.0"
|
|||||||
sp-runtime = "41.1.0"
|
sp-runtime = "41.1.0"
|
||||||
temp-dir = { version = "0.1.16" }
|
temp-dir = { version = "0.1.16" }
|
||||||
tempfile = "3.3"
|
tempfile = "3.3"
|
||||||
tokio = { version = "1", default-features = false, features = [
|
tokio = { version = "1.47.0", default-features = false, features = [
|
||||||
"rt-multi-thread",
|
"rt-multi-thread",
|
||||||
|
"process",
|
||||||
|
"rt",
|
||||||
] }
|
] }
|
||||||
uuid = { version = "1.8", features = ["v4"] }
|
uuid = { version = "1.8", features = ["v4"] }
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
|
|||||||
@@ -10,7 +10,4 @@ rust-version.workspace = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
futures = { workspace = true }
|
semver = { workspace = true }
|
||||||
tracing = { workspace = true }
|
|
||||||
once_cell = { workspace = true }
|
|
||||||
tokio = { workspace = true }
|
|
||||||
|
|||||||
@@ -1,225 +0,0 @@
|
|||||||
//! The alloy crate __requires__ a tokio runtime.
|
|
||||||
//! We contain any async rust right here.
|
|
||||||
|
|
||||||
use std::{any::Any, panic::AssertUnwindSafe, pin::Pin, thread};
|
|
||||||
|
|
||||||
use futures::FutureExt;
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use tokio::{
|
|
||||||
runtime::Builder,
|
|
||||||
sync::{mpsc::UnboundedSender, oneshot},
|
|
||||||
};
|
|
||||||
use tracing::Instrument;
|
|
||||||
|
|
||||||
/// A blocking async executor.
|
|
||||||
///
|
|
||||||
/// This struct exposes the abstraction of a blocking async executor. It is a global and static
|
|
||||||
/// executor which means that it doesn't require for new instances of it to be created, it's a
|
|
||||||
/// singleton and can be accessed by any thread that wants to perform some async computation on the
|
|
||||||
/// blocking executor thread.
|
|
||||||
///
|
|
||||||
/// The API of the blocking executor is created in a way so that it's very natural, simple to use,
|
|
||||||
/// and unbounded to specific tasks or return types. The following is an example of using this
|
|
||||||
/// executor to drive an async computation:
|
|
||||||
///
|
|
||||||
/// ```rust
|
|
||||||
/// use revive_dt_common::concepts::*;
|
|
||||||
///
|
|
||||||
/// fn blocking_function() {
|
|
||||||
/// let result = BlockingExecutor::execute(async move {
|
|
||||||
/// tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
|
||||||
/// 0xFFu8
|
|
||||||
/// })
|
|
||||||
/// .expect("Computation failed");
|
|
||||||
///
|
|
||||||
/// assert_eq!(result, 0xFF);
|
|
||||||
/// }
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// Users get to pass in their async tasks without needing to worry about putting them in a [`Box`],
|
|
||||||
/// [`Pin`], needing to perform down-casting, or the internal channel mechanism used by the runtime.
|
|
||||||
/// To the user, it just looks like a function that converts some async code into sync code.
|
|
||||||
///
|
|
||||||
/// This struct also handled panics that occur in the passed futures and converts them into errors
|
|
||||||
/// that can be handled by the user. This is done to allow the executor to be robust.
|
|
||||||
///
|
|
||||||
/// Internally, the executor communicates with the tokio runtime thread through channels which carry
|
|
||||||
/// the [`TaskMessage`] and the results of the execution.
|
|
||||||
pub struct BlockingExecutor;
|
|
||||||
|
|
||||||
impl BlockingExecutor {
|
|
||||||
pub fn execute<R>(future: impl Future<Output = R> + Send + 'static) -> Result<R, anyhow::Error>
|
|
||||||
where
|
|
||||||
R: Send + 'static,
|
|
||||||
{
|
|
||||||
// Note: The blocking executor is a singleton and therefore we store its state in a static
|
|
||||||
// so that it's assigned only once. Additionally, when we set the state of the executor we
|
|
||||||
// spawn the thread where the async runtime runs.
|
|
||||||
static STATE: Lazy<ExecutorState> = Lazy::new(|| {
|
|
||||||
tracing::trace!("Initializing the BlockingExecutor state");
|
|
||||||
|
|
||||||
// All communication with the tokio runtime thread happens over mspc channels where the
|
|
||||||
// producers here are the threads that want to run async tasks and the consumer here is
|
|
||||||
// the tokio runtime thread.
|
|
||||||
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel::<TaskMessage>();
|
|
||||||
|
|
||||||
thread::spawn(move || {
|
|
||||||
tracing::info!(
|
|
||||||
thread_id = ?std::thread::current().id(),
|
|
||||||
"Starting async runtime thread"
|
|
||||||
);
|
|
||||||
|
|
||||||
let runtime = Builder::new_current_thread()
|
|
||||||
.enable_all()
|
|
||||||
.build()
|
|
||||||
.expect("Failed to create the async runtime");
|
|
||||||
|
|
||||||
runtime.block_on(async move {
|
|
||||||
while let Some(TaskMessage {
|
|
||||||
future: task,
|
|
||||||
response_tx: response_channel,
|
|
||||||
}) = rx.recv().await
|
|
||||||
{
|
|
||||||
tracing::trace!("Received a new future to execute");
|
|
||||||
tokio::spawn(async move {
|
|
||||||
// One of the things that the blocking executor does is that it allows
|
|
||||||
// us to catch panics if they occur. By wrapping the given future in an
|
|
||||||
// AssertUnwindSafe::catch_unwind we are able to catch all panic unwinds
|
|
||||||
// in the given future and convert them into errors.
|
|
||||||
let task = AssertUnwindSafe(task).catch_unwind();
|
|
||||||
|
|
||||||
let result = task.await;
|
|
||||||
let _ = response_channel.send(result);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
})
|
|
||||||
});
|
|
||||||
|
|
||||||
ExecutorState { tx }
|
|
||||||
});
|
|
||||||
|
|
||||||
// We need to perform blocking synchronous communication between the current thread and the
|
|
||||||
// tokio runtime thread with the result of the async computation and the oneshot channels
|
|
||||||
// from tokio allows us to do that. The sender side of the channel will be given to the
|
|
||||||
// tokio runtime thread to send the result when the computation is completed and the receive
|
|
||||||
// side of the channel will be kept with this thread to await for the response of the async
|
|
||||||
// task to come back.
|
|
||||||
let (response_tx, response_rx) =
|
|
||||||
oneshot::channel::<Result<Box<dyn Any + Send>, Box<dyn Any + Send>>>();
|
|
||||||
|
|
||||||
// The tokio runtime thread expects a Future<Output = Box<dyn Any + Send>> + Send to be
|
|
||||||
// sent to it to execute. However, this function has a typed Future<Output = R> + Send and
|
|
||||||
// therefore we need to change the type of the future to fit what the runtime thread expects
|
|
||||||
// in the task message. In doing this conversion, we lose some of the type information since
|
|
||||||
// we're converting R => dyn Any. However, we will perform down-casting on the result to
|
|
||||||
// convert it back into R.
|
|
||||||
let future = Box::pin(
|
|
||||||
async move { Box::new(future.await) as Box<dyn Any + Send> }.in_current_span(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let task = TaskMessage::new(future, response_tx);
|
|
||||||
if let Err(error) = STATE.tx.send(task) {
|
|
||||||
tracing::error!(?error, "Failed to send the task to the blocking executor");
|
|
||||||
anyhow::bail!("Failed to send the task to the blocking executor: {error:?}")
|
|
||||||
}
|
|
||||||
|
|
||||||
let result = match response_rx.blocking_recv() {
|
|
||||||
Ok(result) => result,
|
|
||||||
Err(error) => {
|
|
||||||
tracing::error!(
|
|
||||||
?error,
|
|
||||||
"Failed to get the response from the blocking executor"
|
|
||||||
);
|
|
||||||
anyhow::bail!("Failed to get the response from the blocking executor: {error:?}")
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let result = match result {
|
|
||||||
Ok(result) => result,
|
|
||||||
Err(error) => {
|
|
||||||
tracing::error!(?error, "An error occurred when running the async task");
|
|
||||||
anyhow::bail!("An error occurred when running the async task: {error:?}")
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(*result
|
|
||||||
.downcast::<R>()
|
|
||||||
.expect("An error occurred when downcasting into R. This is a bug"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// Represents the state of the async runtime. This runtime is designed to be a singleton runtime
|
|
||||||
/// which means that in the current running program there's just a single thread that has an async
|
|
||||||
/// runtime.
|
|
||||||
struct ExecutorState {
|
|
||||||
/// The sending side of the task messages channel. This is used by all of the other threads to
|
|
||||||
/// communicate with the async runtime thread.
|
|
||||||
tx: UnboundedSender<TaskMessage>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Represents a message that contains an asynchronous task that's to be executed by the runtime
|
|
||||||
/// as well as a way for the runtime to report back on the result of the execution.
|
|
||||||
struct TaskMessage {
|
|
||||||
/// The task that's being requested to run. This is a future that returns an object that does
|
|
||||||
/// implement [`Any`] and [`Send`] to allow it to be sent between the requesting thread and the
|
|
||||||
/// async thread.
|
|
||||||
future: Pin<Box<dyn Future<Output = Box<dyn Any + Send>> + Send>>,
|
|
||||||
|
|
||||||
/// A one shot sender channel where the sender of the task is expecting to hear back on the
|
|
||||||
/// result of the task.
|
|
||||||
response_tx: oneshot::Sender<Result<Box<dyn Any + Send>, Box<dyn Any + Send>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TaskMessage {
|
|
||||||
pub fn new(
|
|
||||||
future: Pin<Box<dyn Future<Output = Box<dyn Any + Send>> + Send>>,
|
|
||||||
response_tx: oneshot::Sender<Result<Box<dyn Any + Send>, Box<dyn Any + Send>>>,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
future,
|
|
||||||
response_tx,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn simple_future_works() {
|
|
||||||
// Act
|
|
||||||
let result = BlockingExecutor::execute(async move {
|
|
||||||
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
|
||||||
0xFFu8
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
assert_eq!(result, 0xFFu8);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[allow(unreachable_code, clippy::unreachable)]
|
|
||||||
fn panics_in_futures_are_caught() {
|
|
||||||
// Act
|
|
||||||
let result = BlockingExecutor::execute(async move {
|
|
||||||
panic!(
|
|
||||||
"If this panic causes, well, a panic, then this is an issue. If it's caught then all good!"
|
|
||||||
);
|
|
||||||
0xFFu8
|
|
||||||
});
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
assert!(result.is_err());
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let result = BlockingExecutor::execute(async move {
|
|
||||||
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
|
||||||
0xFFu8
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
assert_eq!(result, 0xFFu8)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
mod blocking_executor;
|
|
||||||
|
|
||||||
pub use blocking_executor::*;
|
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
use std::{
|
||||||
|
fs::{read_dir, remove_dir_all, remove_file},
|
||||||
|
path::Path,
|
||||||
|
};
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
/// This method clears the passed directory of all of the files and directories contained within
|
||||||
|
/// without deleting the directory.
|
||||||
|
pub fn clear_directory(path: impl AsRef<Path>) -> Result<()> {
|
||||||
|
for entry in read_dir(path.as_ref())? {
|
||||||
|
let entry = entry?;
|
||||||
|
let entry_path = entry.path();
|
||||||
|
|
||||||
|
if entry_path.is_file() {
|
||||||
|
remove_file(entry_path)?
|
||||||
|
} else {
|
||||||
|
remove_dir_all(entry_path)?
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -0,0 +1,3 @@
|
|||||||
|
mod clear_dir;
|
||||||
|
|
||||||
|
pub use clear_dir::*;
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
//! This crate provides common concepts, functionality, types, macros, and more that other crates in
|
//! This crate provides common concepts, functionality, types, macros, and more that other crates in
|
||||||
//! the workspace can benefit from.
|
//! the workspace can benefit from.
|
||||||
|
|
||||||
pub mod concepts;
|
pub mod fs;
|
||||||
pub mod iterators;
|
pub mod iterators;
|
||||||
pub mod macros;
|
pub mod macros;
|
||||||
|
pub mod types;
|
||||||
|
|||||||
@@ -0,0 +1,3 @@
|
|||||||
|
mod version_or_requirement;
|
||||||
|
|
||||||
|
pub use version_or_requirement::*;
|
||||||
@@ -0,0 +1,41 @@
|
|||||||
|
use semver::{Version, VersionReq};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum VersionOrRequirement {
|
||||||
|
Version(Version),
|
||||||
|
Requirement(VersionReq),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Version> for VersionOrRequirement {
|
||||||
|
fn from(value: Version) -> Self {
|
||||||
|
Self::Version(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<VersionReq> for VersionOrRequirement {
|
||||||
|
fn from(value: VersionReq) -> Self {
|
||||||
|
Self::Requirement(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<VersionOrRequirement> for Version {
|
||||||
|
type Error = anyhow::Error;
|
||||||
|
|
||||||
|
fn try_from(value: VersionOrRequirement) -> Result<Self, Self::Error> {
|
||||||
|
let VersionOrRequirement::Version(version) = value else {
|
||||||
|
anyhow::bail!("Version or requirement was not a version");
|
||||||
|
};
|
||||||
|
Ok(version)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<VersionOrRequirement> for VersionReq {
|
||||||
|
type Error = anyhow::Error;
|
||||||
|
|
||||||
|
fn try_from(value: VersionOrRequirement) -> Result<Self, Self::Error> {
|
||||||
|
let VersionOrRequirement::Requirement(requirement) = value else {
|
||||||
|
anyhow::bail!("Version or requirement was not a requirement");
|
||||||
|
};
|
||||||
|
Ok(requirement)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -10,12 +10,17 @@ rust-version.workspace = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
revive-solc-json-interface = { workspace = true }
|
revive-solc-json-interface = { workspace = true }
|
||||||
|
revive-dt-common = { workspace = true }
|
||||||
revive-dt-config = { workspace = true }
|
revive-dt-config = { workspace = true }
|
||||||
revive-dt-solc-binaries = { workspace = true }
|
revive-dt-solc-binaries = { workspace = true }
|
||||||
revive-common = { workspace = true }
|
revive-common = { workspace = true }
|
||||||
|
|
||||||
|
alloy = { workspace = true }
|
||||||
alloy-primitives = { workspace = true }
|
alloy-primitives = { workspace = true }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
|
foundry-compilers-artifacts = { workspace = true }
|
||||||
semver = { workspace = true }
|
semver = { workspace = true }
|
||||||
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
|||||||
+81
-114
@@ -4,21 +4,20 @@
|
|||||||
//! - Polkadot revive Wasm compiler
|
//! - Polkadot revive Wasm compiler
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
fs::read_to_string,
|
fs::read_to_string,
|
||||||
hash::Hash,
|
hash::Hash,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use alloy::json_abi::JsonAbi;
|
||||||
use alloy_primitives::Address;
|
use alloy_primitives::Address;
|
||||||
use revive_dt_config::Arguments;
|
use semver::Version;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use revive_common::EVMVersion;
|
use revive_common::EVMVersion;
|
||||||
use revive_solc_json_interface::{
|
use revive_dt_common::types::VersionOrRequirement;
|
||||||
SolcStandardJsonInput, SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings,
|
use revive_dt_config::Arguments;
|
||||||
SolcStandardJsonInputSettingsOptimizer, SolcStandardJsonInputSettingsSelection,
|
|
||||||
SolcStandardJsonOutput,
|
|
||||||
};
|
|
||||||
use semver::Version;
|
|
||||||
|
|
||||||
pub mod revive_js;
|
pub mod revive_js;
|
||||||
pub mod revive_resolc;
|
pub mod revive_resolc;
|
||||||
@@ -32,63 +31,44 @@ pub trait SolidityCompiler {
|
|||||||
/// The low-level compiler interface.
|
/// The low-level compiler interface.
|
||||||
fn build(
|
fn build(
|
||||||
&self,
|
&self,
|
||||||
input: CompilerInput<Self::Options>,
|
input: CompilerInput,
|
||||||
) -> anyhow::Result<CompilerOutput<Self::Options>>;
|
additional_options: Self::Options,
|
||||||
|
) -> impl Future<Output = anyhow::Result<CompilerOutput>>;
|
||||||
|
|
||||||
fn new(solc_executable: PathBuf) -> Self;
|
fn new(solc_executable: PathBuf) -> Self;
|
||||||
|
|
||||||
fn get_compiler_executable(config: &Arguments, version: Version) -> anyhow::Result<PathBuf>;
|
fn get_compiler_executable(
|
||||||
|
config: &Arguments,
|
||||||
|
version: impl Into<VersionOrRequirement>,
|
||||||
|
) -> impl Future<Output = anyhow::Result<PathBuf>>;
|
||||||
|
|
||||||
|
fn version(&self) -> anyhow::Result<Version>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The generic compilation input configuration.
|
/// The generic compilation input configuration.
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct CompilerInput<T: PartialEq + Eq + Hash> {
|
pub struct CompilerInput {
|
||||||
pub extra_options: T,
|
pub enable_optimization: Option<bool>,
|
||||||
pub input: SolcStandardJsonInput,
|
pub via_ir: Option<bool>,
|
||||||
|
pub evm_version: Option<EVMVersion>,
|
||||||
pub allow_paths: Vec<PathBuf>,
|
pub allow_paths: Vec<PathBuf>,
|
||||||
pub base_path: Option<PathBuf>,
|
pub base_path: Option<PathBuf>,
|
||||||
|
pub sources: HashMap<PathBuf, String>,
|
||||||
|
pub libraries: HashMap<PathBuf, HashMap<String, Address>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The generic compilation output configuration.
|
/// The generic compilation output configuration.
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
||||||
pub struct CompilerOutput<T: PartialEq + Eq + Hash> {
|
pub struct CompilerOutput {
|
||||||
/// The solc standard JSON input.
|
/// The compiled contracts. The bytecode of the contract is kept as a string incase linking is
|
||||||
pub input: CompilerInput<T>,
|
/// required and the compiled source has placeholders.
|
||||||
/// The produced solc standard JSON output.
|
pub contracts: HashMap<PathBuf, HashMap<String, (String, JsonAbi)>>,
|
||||||
pub output: SolcStandardJsonOutput,
|
|
||||||
/// The error message in case the compiler returns abnormally.
|
|
||||||
pub error: Option<String>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> PartialEq for CompilerInput<T>
|
/// A generic builder style interface for configuring the supported compiler options.
|
||||||
where
|
|
||||||
T: PartialEq + Eq + Hash,
|
|
||||||
{
|
|
||||||
fn eq(&self, other: &Self) -> bool {
|
|
||||||
let self_input = serde_json::to_vec(&self.input).unwrap_or_default();
|
|
||||||
let other_input = serde_json::to_vec(&self.input).unwrap_or_default();
|
|
||||||
self.extra_options.eq(&other.extra_options) && self_input == other_input
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> Eq for CompilerInput<T> where T: PartialEq + Eq + Hash {}
|
|
||||||
|
|
||||||
impl<T> Hash for CompilerInput<T>
|
|
||||||
where
|
|
||||||
T: PartialEq + Eq + Hash,
|
|
||||||
{
|
|
||||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
|
||||||
self.extra_options.hash(state);
|
|
||||||
state.write(&serde_json::to_vec(&self.input).unwrap_or_default());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A generic builder style interface for configuring all compiler options.
|
|
||||||
pub struct Compiler<T: SolidityCompiler> {
|
pub struct Compiler<T: SolidityCompiler> {
|
||||||
input: SolcStandardJsonInput,
|
input: CompilerInput,
|
||||||
extra_options: T::Options,
|
additional_options: T::Options,
|
||||||
allow_paths: Vec<PathBuf>,
|
|
||||||
base_path: Option<PathBuf>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Compiler<solc::Solc> {
|
impl Default for Compiler<solc::Solc> {
|
||||||
@@ -103,93 +83,80 @@ where
|
|||||||
{
|
{
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
input: SolcStandardJsonInput {
|
input: CompilerInput {
|
||||||
language: SolcStandardJsonInputLanguage::Solidity,
|
enable_optimization: Default::default(),
|
||||||
sources: Default::default(),
|
via_ir: Default::default(),
|
||||||
settings: SolcStandardJsonInputSettings::new(
|
evm_version: Default::default(),
|
||||||
None,
|
|
||||||
Default::default(),
|
|
||||||
None,
|
|
||||||
SolcStandardJsonInputSettingsSelection::new_required(),
|
|
||||||
SolcStandardJsonInputSettingsOptimizer::new(
|
|
||||||
false,
|
|
||||||
None,
|
|
||||||
&Version::new(0, 0, 0),
|
|
||||||
false,
|
|
||||||
),
|
|
||||||
None,
|
|
||||||
None,
|
|
||||||
),
|
|
||||||
},
|
|
||||||
extra_options: Default::default(),
|
|
||||||
allow_paths: Default::default(),
|
allow_paths: Default::default(),
|
||||||
base_path: None,
|
base_path: Default::default(),
|
||||||
|
sources: Default::default(),
|
||||||
|
libraries: Default::default(),
|
||||||
|
},
|
||||||
|
additional_options: T::Options::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn solc_optimizer(mut self, enabled: bool) -> Self {
|
pub fn with_optimization(mut self, value: impl Into<Option<bool>>) -> Self {
|
||||||
self.input.settings.optimizer.enabled = enabled;
|
self.input.enable_optimization = value.into();
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_source(mut self, path: &Path) -> anyhow::Result<Self> {
|
pub fn with_via_ir(mut self, value: impl Into<Option<bool>>) -> Self {
|
||||||
|
self.input.via_ir = value.into();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_evm_version(mut self, version: impl Into<Option<EVMVersion>>) -> Self {
|
||||||
|
self.input.evm_version = version.into();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_allow_path(mut self, path: impl AsRef<Path>) -> Self {
|
||||||
|
self.input.allow_paths.push(path.as_ref().into());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_base_path(mut self, path: impl Into<Option<PathBuf>>) -> Self {
|
||||||
|
self.input.base_path = path.into();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn with_source(mut self, path: impl AsRef<Path>) -> anyhow::Result<Self> {
|
||||||
self.input
|
self.input
|
||||||
.sources
|
.sources
|
||||||
.insert(path.display().to_string(), read_to_string(path)?.into());
|
.insert(path.as_ref().to_path_buf(), read_to_string(path.as_ref())?);
|
||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn evm_version(mut self, evm_version: EVMVersion) -> Self {
|
|
||||||
self.input.settings.evm_version = Some(evm_version);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extra_options(mut self, extra_options: T::Options) -> Self {
|
|
||||||
self.extra_options = extra_options;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn allow_path(mut self, path: PathBuf) -> Self {
|
|
||||||
self.allow_paths.push(path);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn base_path(mut self, base_path: PathBuf) -> Self {
|
|
||||||
self.base_path = Some(base_path);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_library(
|
pub fn with_library(
|
||||||
mut self,
|
mut self,
|
||||||
scope: impl AsRef<Path>,
|
path: impl AsRef<Path>,
|
||||||
library_ident: impl AsRef<str>,
|
name: impl AsRef<str>,
|
||||||
library_address: Address,
|
address: Address,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
self.input
|
self.input
|
||||||
.settings
|
|
||||||
.libraries
|
.libraries
|
||||||
.get_or_insert_with(Default::default)
|
.entry(path.as_ref().to_path_buf())
|
||||||
.entry(scope.as_ref().display().to_string())
|
|
||||||
.or_default()
|
.or_default()
|
||||||
.insert(
|
.insert(name.as_ref().into(), address);
|
||||||
library_ident.as_ref().to_owned(),
|
|
||||||
library_address.to_string(),
|
|
||||||
);
|
|
||||||
|
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn try_build(self, solc_path: PathBuf) -> anyhow::Result<CompilerOutput<T::Options>> {
|
pub fn with_additional_options(mut self, options: impl Into<T::Options>) -> Self {
|
||||||
T::new(solc_path).build(CompilerInput {
|
self.additional_options = options.into();
|
||||||
extra_options: self.extra_options,
|
self
|
||||||
input: self.input,
|
|
||||||
allow_paths: self.allow_paths,
|
|
||||||
base_path: self.base_path,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the compiler JSON input.
|
pub async fn try_build(
|
||||||
pub fn input(&self) -> SolcStandardJsonInput {
|
self,
|
||||||
|
compiler_path: impl AsRef<Path>,
|
||||||
|
) -> anyhow::Result<CompilerOutput> {
|
||||||
|
T::new(compiler_path.as_ref().to_path_buf())
|
||||||
|
.build(self.input, self.additional_options)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn input(&self) -> CompilerInput {
|
||||||
self.input.clone()
|
self.input.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,9 +6,20 @@ use std::{
|
|||||||
process::{Command, Stdio},
|
process::{Command, Stdio},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{CompilerInput, CompilerOutput, SolidityCompiler};
|
use revive_dt_common::types::VersionOrRequirement;
|
||||||
use revive_dt_config::Arguments;
|
use revive_dt_config::Arguments;
|
||||||
use revive_solc_json_interface::SolcStandardJsonOutput;
|
use revive_solc_json_interface::{
|
||||||
|
SolcStandardJsonInput, SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings,
|
||||||
|
SolcStandardJsonInputSettingsOptimizer, SolcStandardJsonInputSettingsSelection,
|
||||||
|
SolcStandardJsonOutput,
|
||||||
|
};
|
||||||
|
|
||||||
|
use crate::{CompilerInput, CompilerOutput, SolidityCompiler};
|
||||||
|
|
||||||
|
use alloy::json_abi::JsonAbi;
|
||||||
|
use anyhow::Context;
|
||||||
|
use semver::Version;
|
||||||
|
use tokio::{io::AsyncWriteExt, process::Command as AsyncCommand};
|
||||||
|
|
||||||
// TODO: I believe that we need to also pass the solc compiler to resolc so that resolc uses the
|
// TODO: I believe that we need to also pass the solc compiler to resolc so that resolc uses the
|
||||||
// specified solc compiler. I believe that currently we completely ignore the specified solc binary
|
// specified solc compiler. I believe that currently we completely ignore the specified solc binary
|
||||||
@@ -25,24 +36,71 @@ impl SolidityCompiler for Resolc {
|
|||||||
type Options = Vec<String>;
|
type Options = Vec<String>;
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", ret)]
|
#[tracing::instrument(level = "debug", ret)]
|
||||||
fn build(
|
async fn build(
|
||||||
&self,
|
&self,
|
||||||
input: CompilerInput<Self::Options>,
|
CompilerInput {
|
||||||
) -> anyhow::Result<CompilerOutput<Self::Options>> {
|
enable_optimization,
|
||||||
let mut command = Command::new(&self.resolc_path);
|
// Ignored and not honored since this is required for the resolc compilation.
|
||||||
|
via_ir: _via_ir,
|
||||||
|
evm_version,
|
||||||
|
allow_paths,
|
||||||
|
base_path,
|
||||||
|
sources,
|
||||||
|
libraries,
|
||||||
|
}: CompilerInput,
|
||||||
|
additional_options: Self::Options,
|
||||||
|
) -> anyhow::Result<CompilerOutput> {
|
||||||
|
let input = SolcStandardJsonInput {
|
||||||
|
language: SolcStandardJsonInputLanguage::Solidity,
|
||||||
|
sources: sources
|
||||||
|
.into_iter()
|
||||||
|
.map(|(path, source)| (path.display().to_string(), source.into()))
|
||||||
|
.collect(),
|
||||||
|
settings: SolcStandardJsonInputSettings {
|
||||||
|
evm_version,
|
||||||
|
libraries: Some(
|
||||||
|
libraries
|
||||||
|
.into_iter()
|
||||||
|
.map(|(source_code, libraries_map)| {
|
||||||
|
(
|
||||||
|
source_code.display().to_string(),
|
||||||
|
libraries_map
|
||||||
|
.into_iter()
|
||||||
|
.map(|(library_ident, library_address)| {
|
||||||
|
(library_ident, library_address.to_string())
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
),
|
||||||
|
remappings: None,
|
||||||
|
output_selection: Some(SolcStandardJsonInputSettingsSelection::new_required()),
|
||||||
|
via_ir: Some(true),
|
||||||
|
optimizer: SolcStandardJsonInputSettingsOptimizer::new(
|
||||||
|
enable_optimization.unwrap_or(false),
|
||||||
|
None,
|
||||||
|
&Version::new(0, 0, 0),
|
||||||
|
false,
|
||||||
|
),
|
||||||
|
metadata: None,
|
||||||
|
polkavm: None,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut command = AsyncCommand::new(&self.resolc_path);
|
||||||
command
|
command
|
||||||
.stdin(Stdio::piped())
|
.stdin(Stdio::piped())
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.stderr(Stdio::piped())
|
.stderr(Stdio::piped())
|
||||||
.arg("--standard-json");
|
.arg("--standard-json");
|
||||||
|
|
||||||
if let Some(ref base_path) = input.base_path {
|
if let Some(ref base_path) = base_path {
|
||||||
command.arg("--base-path").arg(base_path);
|
command.arg("--base-path").arg(base_path);
|
||||||
}
|
}
|
||||||
if !input.allow_paths.is_empty() {
|
if !allow_paths.is_empty() {
|
||||||
command.arg("--allow-paths").arg(
|
command.arg("--allow-paths").arg(
|
||||||
input
|
allow_paths
|
||||||
.allow_paths
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|path| path.display().to_string())
|
.map(|path| path.display().to_string())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
@@ -52,102 +110,96 @@ impl SolidityCompiler for Resolc {
|
|||||||
let mut child = command.spawn()?;
|
let mut child = command.spawn()?;
|
||||||
|
|
||||||
let stdin_pipe = child.stdin.as_mut().expect("stdin must be piped");
|
let stdin_pipe = child.stdin.as_mut().expect("stdin must be piped");
|
||||||
serde_json::to_writer(stdin_pipe, &input.input)?;
|
let serialized_input = serde_json::to_vec(&input)?;
|
||||||
|
stdin_pipe.write_all(&serialized_input).await?;
|
||||||
|
|
||||||
let json_in = serde_json::to_string_pretty(&input.input)?;
|
let output = child.wait_with_output().await?;
|
||||||
|
|
||||||
let output = child.wait_with_output()?;
|
|
||||||
let stdout = output.stdout;
|
let stdout = output.stdout;
|
||||||
let stderr = output.stderr;
|
let stderr = output.stderr;
|
||||||
|
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
|
let json_in = serde_json::to_string_pretty(&input)?;
|
||||||
let message = String::from_utf8_lossy(&stderr);
|
let message = String::from_utf8_lossy(&stderr);
|
||||||
tracing::error!(
|
tracing::error!(
|
||||||
"resolc failed exit={} stderr={} JSON-in={} ",
|
status = %output.status,
|
||||||
output.status,
|
message = %message,
|
||||||
&message,
|
json_input = json_in,
|
||||||
json_in,
|
"Compilation using resolc failed"
|
||||||
);
|
);
|
||||||
return Ok(CompilerOutput {
|
anyhow::bail!("Compilation failed with an error: {message}");
|
||||||
input,
|
|
||||||
output: Default::default(),
|
|
||||||
error: Some(message.into()),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut parsed =
|
let parsed = serde_json::from_slice::<SolcStandardJsonOutput>(&stdout).map_err(|e| {
|
||||||
serde_json::from_slice::<SolcStandardJsonOutput>(&stdout).map_err(|e| {
|
|
||||||
anyhow::anyhow!(
|
anyhow::anyhow!(
|
||||||
"failed to parse resolc JSON output: {e}\nstderr: {}",
|
"failed to parse resolc JSON output: {e}\nstderr: {}",
|
||||||
String::from_utf8_lossy(&stderr)
|
String::from_utf8_lossy(&stderr)
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// Detecting if the compiler output contained errors and reporting them through logs and
|
|
||||||
// errors instead of returning the compiler output that might contain errors.
|
|
||||||
for error in parsed.errors.iter().flatten() {
|
|
||||||
if error.severity == "error" {
|
|
||||||
tracing::error!(?error, ?input, "Encountered an error in the compilation");
|
|
||||||
anyhow::bail!("Encountered an error in the compilation: {error}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// We need to do some post processing on the output to make it in the same format that solc
|
|
||||||
// outputs. More specifically, for each contract, the `.metadata` field should be replaced
|
|
||||||
// with the `.metadata.solc_metadata` field which contains the ABI and other information
|
|
||||||
// about the compiled contracts. We do this because we do not want any downstream logic to
|
|
||||||
// need to differentiate between which compiler is being used when extracting the ABI of the
|
|
||||||
// contracts.
|
|
||||||
if let Some(ref mut contracts) = parsed.contracts {
|
|
||||||
for (contract_path, contracts_map) in contracts.iter_mut() {
|
|
||||||
for (contract_name, contract_info) in contracts_map.iter_mut() {
|
|
||||||
let Some(metadata) = contract_info.metadata.take() else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Get the `solc_metadata` in the metadata of the contract.
|
|
||||||
let Some(solc_metadata) = metadata
|
|
||||||
.get("solc_metadata")
|
|
||||||
.and_then(|metadata| metadata.as_str())
|
|
||||||
else {
|
|
||||||
tracing::error!(
|
|
||||||
contract_path,
|
|
||||||
contract_name,
|
|
||||||
metadata = serde_json::to_string(&metadata).unwrap(),
|
|
||||||
"Encountered a contract compiled with resolc that has no solc_metadata"
|
|
||||||
);
|
|
||||||
anyhow::bail!(
|
|
||||||
"Contract {} compiled with resolc that has no solc_metadata",
|
|
||||||
contract_name
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Replace the original metadata with the new solc_metadata.
|
|
||||||
contract_info.metadata =
|
|
||||||
Some(serde_json::Value::String(solc_metadata.to_string()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::debug!(
|
tracing::debug!(
|
||||||
output = %serde_json::to_string(&parsed).unwrap(),
|
output = %serde_json::to_string(&parsed).unwrap(),
|
||||||
"Compiled successfully"
|
"Compiled successfully"
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(CompilerOutput {
|
// Detecting if the compiler output contained errors and reporting them through logs and
|
||||||
input,
|
// errors instead of returning the compiler output that might contain errors.
|
||||||
output: parsed,
|
for error in parsed.errors.iter().flatten() {
|
||||||
error: None,
|
if error.severity == "error" {
|
||||||
|
tracing::error!(
|
||||||
|
?error,
|
||||||
|
?input,
|
||||||
|
output = %serde_json::to_string(&parsed).unwrap(),
|
||||||
|
"Encountered an error in the compilation"
|
||||||
|
);
|
||||||
|
anyhow::bail!("Encountered an error in the compilation: {error}")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(contracts) = parsed.contracts else {
|
||||||
|
anyhow::bail!("Unexpected error - resolc output doesn't have a contracts section");
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut compiler_output = CompilerOutput::default();
|
||||||
|
for (source_path, contracts) in contracts.into_iter() {
|
||||||
|
let source_path = PathBuf::from(source_path).canonicalize()?;
|
||||||
|
|
||||||
|
let map = compiler_output.contracts.entry(source_path).or_default();
|
||||||
|
for (contract_name, contract_information) in contracts.into_iter() {
|
||||||
|
let bytecode = contract_information
|
||||||
|
.evm
|
||||||
|
.and_then(|evm| evm.bytecode.clone())
|
||||||
|
.context("Unexpected - Contract compiled with resolc has no bytecode")?;
|
||||||
|
let abi = contract_information
|
||||||
|
.metadata
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|metadata| metadata.as_object())
|
||||||
|
.and_then(|metadata| metadata.get("solc_metadata"))
|
||||||
|
.and_then(|solc_metadata| solc_metadata.as_str())
|
||||||
|
.and_then(|metadata| serde_json::from_str::<serde_json::Value>(metadata).ok())
|
||||||
|
.and_then(|metadata| {
|
||||||
|
metadata.get("output").and_then(|output| {
|
||||||
|
output
|
||||||
|
.get("abi")
|
||||||
|
.and_then(|abi| serde_json::from_value::<JsonAbi>(abi.clone()).ok())
|
||||||
})
|
})
|
||||||
|
})
|
||||||
|
.context(
|
||||||
|
"Unexpected - Failed to get the ABI for a contract compiled with resolc",
|
||||||
|
)?;
|
||||||
|
map.insert(contract_name, (bytecode.object, abi));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(compiler_output)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new(resolc_path: PathBuf) -> Self {
|
fn new(resolc_path: PathBuf) -> Self {
|
||||||
Resolc { resolc_path }
|
Resolc { resolc_path }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_compiler_executable(
|
async fn get_compiler_executable(
|
||||||
config: &Arguments,
|
config: &Arguments,
|
||||||
_version: semver::Version,
|
_version: impl Into<VersionOrRequirement>,
|
||||||
) -> anyhow::Result<PathBuf> {
|
) -> anyhow::Result<PathBuf> {
|
||||||
if !config.resolc.as_os_str().is_empty() {
|
if !config.resolc.as_os_str().is_empty() {
|
||||||
return Ok(config.resolc.clone());
|
return Ok(config.resolc.clone());
|
||||||
@@ -155,4 +207,47 @@ impl SolidityCompiler for Resolc {
|
|||||||
|
|
||||||
Ok(PathBuf::from("resolc"))
|
Ok(PathBuf::from("resolc"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn version(&self) -> anyhow::Result<semver::Version> {
|
||||||
|
// Logic for parsing the resolc version from the following string:
|
||||||
|
// Solidity frontend for the revive compiler version 0.3.0+commit.b238913.llvm-18.1.8
|
||||||
|
|
||||||
|
let output = Command::new(self.resolc_path.as_path())
|
||||||
|
.arg("--version")
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.spawn()?
|
||||||
|
.wait_with_output()?
|
||||||
|
.stdout;
|
||||||
|
let output = String::from_utf8_lossy(&output);
|
||||||
|
let version_string = output
|
||||||
|
.split("version ")
|
||||||
|
.nth(1)
|
||||||
|
.context("Version parsing failed")?
|
||||||
|
.split("+")
|
||||||
|
.next()
|
||||||
|
.context("Version parsing failed")?;
|
||||||
|
|
||||||
|
Version::parse(version_string).map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn compiler_version_can_be_obtained() {
|
||||||
|
// Arrange
|
||||||
|
let args = Arguments::default();
|
||||||
|
let path = Resolc::get_compiler_executable(&args, Version::new(0, 7, 6))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
let compiler = Resolc::new(path);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let version = compiler.version();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let _ = version.expect("Failed to get version");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
+167
-29
@@ -6,10 +6,22 @@ use std::{
|
|||||||
process::{Command, Stdio},
|
process::{Command, Stdio},
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{CompilerInput, CompilerOutput, SolidityCompiler};
|
use revive_dt_common::types::VersionOrRequirement;
|
||||||
use revive_dt_config::Arguments;
|
use revive_dt_config::Arguments;
|
||||||
use revive_dt_solc_binaries::download_solc;
|
use revive_dt_solc_binaries::download_solc;
|
||||||
use revive_solc_json_interface::SolcStandardJsonOutput;
|
|
||||||
|
use crate::{CompilerInput, CompilerOutput, SolidityCompiler};
|
||||||
|
|
||||||
|
use anyhow::Context;
|
||||||
|
use foundry_compilers_artifacts::{
|
||||||
|
output_selection::{
|
||||||
|
BytecodeOutputSelection, ContractOutputSelection, EvmOutputSelection, OutputSelection,
|
||||||
|
},
|
||||||
|
solc::CompilerOutput as SolcOutput,
|
||||||
|
solc::*,
|
||||||
|
};
|
||||||
|
use semver::Version;
|
||||||
|
use tokio::{io::AsyncWriteExt, process::Command as AsyncCommand};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Solc {
|
pub struct Solc {
|
||||||
@@ -20,24 +32,78 @@ impl SolidityCompiler for Solc {
|
|||||||
type Options = ();
|
type Options = ();
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", ret)]
|
#[tracing::instrument(level = "debug", ret)]
|
||||||
fn build(
|
async fn build(
|
||||||
&self,
|
&self,
|
||||||
input: CompilerInput<Self::Options>,
|
CompilerInput {
|
||||||
) -> anyhow::Result<CompilerOutput<Self::Options>> {
|
enable_optimization,
|
||||||
let mut command = Command::new(&self.solc_path);
|
via_ir,
|
||||||
|
evm_version,
|
||||||
|
allow_paths,
|
||||||
|
base_path,
|
||||||
|
sources,
|
||||||
|
libraries,
|
||||||
|
}: CompilerInput,
|
||||||
|
_: Self::Options,
|
||||||
|
) -> anyhow::Result<CompilerOutput> {
|
||||||
|
let input = SolcInput {
|
||||||
|
language: SolcLanguage::Solidity,
|
||||||
|
sources: Sources(
|
||||||
|
sources
|
||||||
|
.into_iter()
|
||||||
|
.map(|(source_path, source_code)| (source_path, Source::new(source_code)))
|
||||||
|
.collect(),
|
||||||
|
),
|
||||||
|
settings: Settings {
|
||||||
|
optimizer: Optimizer {
|
||||||
|
enabled: enable_optimization,
|
||||||
|
details: Some(Default::default()),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
output_selection: OutputSelection::common_output_selection(
|
||||||
|
[
|
||||||
|
ContractOutputSelection::Abi,
|
||||||
|
ContractOutputSelection::Evm(EvmOutputSelection::ByteCode(
|
||||||
|
BytecodeOutputSelection::Object,
|
||||||
|
)),
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.map(|item| item.to_string()),
|
||||||
|
),
|
||||||
|
evm_version: evm_version.map(|version| version.to_string().parse().unwrap()),
|
||||||
|
via_ir,
|
||||||
|
libraries: Libraries {
|
||||||
|
libs: libraries
|
||||||
|
.into_iter()
|
||||||
|
.map(|(file_path, libraries)| {
|
||||||
|
(
|
||||||
|
file_path,
|
||||||
|
libraries
|
||||||
|
.into_iter()
|
||||||
|
.map(|(library_name, library_address)| {
|
||||||
|
(library_name, library_address.to_string())
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect(),
|
||||||
|
},
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut command = AsyncCommand::new(&self.solc_path);
|
||||||
command
|
command
|
||||||
.stdin(Stdio::piped())
|
.stdin(Stdio::piped())
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.stderr(Stdio::piped())
|
.stderr(Stdio::piped())
|
||||||
.arg("--standard-json");
|
.arg("--standard-json");
|
||||||
|
|
||||||
if let Some(ref base_path) = input.base_path {
|
if let Some(ref base_path) = base_path {
|
||||||
command.arg("--base-path").arg(base_path);
|
command.arg("--base-path").arg(base_path);
|
||||||
}
|
}
|
||||||
if !input.allow_paths.is_empty() {
|
if !allow_paths.is_empty() {
|
||||||
command.arg("--allow-paths").arg(
|
command.arg("--allow-paths").arg(
|
||||||
input
|
allow_paths
|
||||||
.allow_paths
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|path| path.display().to_string())
|
.map(|path| path.display().to_string())
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
@@ -47,21 +113,23 @@ impl SolidityCompiler for Solc {
|
|||||||
let mut child = command.spawn()?;
|
let mut child = command.spawn()?;
|
||||||
|
|
||||||
let stdin = child.stdin.as_mut().expect("should be piped");
|
let stdin = child.stdin.as_mut().expect("should be piped");
|
||||||
serde_json::to_writer(stdin, &input.input)?;
|
let serialized_input = serde_json::to_vec(&input)?;
|
||||||
let output = child.wait_with_output()?;
|
stdin.write_all(&serialized_input).await?;
|
||||||
|
let output = child.wait_with_output().await?;
|
||||||
|
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
|
let json_in = serde_json::to_string_pretty(&input)?;
|
||||||
let message = String::from_utf8_lossy(&output.stderr);
|
let message = String::from_utf8_lossy(&output.stderr);
|
||||||
tracing::error!("solc failed exit={} stderr={}", output.status, &message);
|
tracing::error!(
|
||||||
return Ok(CompilerOutput {
|
status = %output.status,
|
||||||
input,
|
message = %message,
|
||||||
output: Default::default(),
|
json_input = json_in,
|
||||||
error: Some(message.into()),
|
"Compilation using solc failed"
|
||||||
});
|
);
|
||||||
|
anyhow::bail!("Compilation failed with an error: {message}");
|
||||||
}
|
}
|
||||||
|
|
||||||
let parsed =
|
let parsed = serde_json::from_slice::<SolcOutput>(&output.stdout).map_err(|e| {
|
||||||
serde_json::from_slice::<SolcStandardJsonOutput>(&output.stdout).map_err(|e| {
|
|
||||||
anyhow::anyhow!(
|
anyhow::anyhow!(
|
||||||
"failed to parse resolc JSON output: {e}\nstderr: {}",
|
"failed to parse resolc JSON output: {e}\nstderr: {}",
|
||||||
String::from_utf8_lossy(&output.stdout)
|
String::from_utf8_lossy(&output.stdout)
|
||||||
@@ -70,8 +138,8 @@ impl SolidityCompiler for Solc {
|
|||||||
|
|
||||||
// Detecting if the compiler output contained errors and reporting them through logs and
|
// Detecting if the compiler output contained errors and reporting them through logs and
|
||||||
// errors instead of returning the compiler output that might contain errors.
|
// errors instead of returning the compiler output that might contain errors.
|
||||||
for error in parsed.errors.iter().flatten() {
|
for error in parsed.errors.iter() {
|
||||||
if error.severity == "error" {
|
if error.severity == Severity::Error {
|
||||||
tracing::error!(?error, ?input, "Encountered an error in the compilation");
|
tracing::error!(?error, ?input, "Encountered an error in the compilation");
|
||||||
anyhow::bail!("Encountered an error in the compilation: {error}")
|
anyhow::bail!("Encountered an error in the compilation: {error}")
|
||||||
}
|
}
|
||||||
@@ -82,22 +150,92 @@ impl SolidityCompiler for Solc {
|
|||||||
"Compiled successfully"
|
"Compiled successfully"
|
||||||
);
|
);
|
||||||
|
|
||||||
Ok(CompilerOutput {
|
let mut compiler_output = CompilerOutput::default();
|
||||||
input,
|
for (contract_path, contracts) in parsed.contracts {
|
||||||
output: parsed,
|
let map = compiler_output
|
||||||
error: None,
|
.contracts
|
||||||
|
.entry(contract_path.canonicalize()?)
|
||||||
|
.or_default();
|
||||||
|
for (contract_name, contract_info) in contracts.into_iter() {
|
||||||
|
let source_code = contract_info
|
||||||
|
.evm
|
||||||
|
.and_then(|evm| evm.bytecode)
|
||||||
|
.map(|bytecode| match bytecode.object {
|
||||||
|
BytecodeObject::Bytecode(bytecode) => bytecode.to_string(),
|
||||||
|
BytecodeObject::Unlinked(unlinked) => unlinked,
|
||||||
})
|
})
|
||||||
|
.context("Unexpected - contract compiled with solc has no source code")?;
|
||||||
|
let abi = contract_info
|
||||||
|
.abi
|
||||||
|
.context("Unexpected - contract compiled with solc as no ABI")?;
|
||||||
|
map.insert(contract_name, (source_code, abi));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(compiler_output)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new(solc_path: PathBuf) -> Self {
|
fn new(solc_path: PathBuf) -> Self {
|
||||||
Self { solc_path }
|
Self { solc_path }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_compiler_executable(
|
async fn get_compiler_executable(
|
||||||
config: &Arguments,
|
config: &Arguments,
|
||||||
version: semver::Version,
|
version: impl Into<VersionOrRequirement>,
|
||||||
) -> anyhow::Result<PathBuf> {
|
) -> anyhow::Result<PathBuf> {
|
||||||
let path = download_solc(config.directory(), version, config.wasm)?;
|
let path = download_solc(config.directory(), version, config.wasm).await?;
|
||||||
Ok(path)
|
Ok(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn version(&self) -> anyhow::Result<semver::Version> {
|
||||||
|
// The following is the parsing code for the version from the solc version strings which
|
||||||
|
// look like the following:
|
||||||
|
// ```
|
||||||
|
// solc, the solidity compiler commandline interface
|
||||||
|
// Version: 0.8.30+commit.73712a01.Darwin.appleclang
|
||||||
|
// ```
|
||||||
|
|
||||||
|
let child = Command::new(self.solc_path.as_path())
|
||||||
|
.arg("--version")
|
||||||
|
.stdout(Stdio::piped())
|
||||||
|
.spawn()?;
|
||||||
|
let output = child.wait_with_output()?;
|
||||||
|
let output = String::from_utf8_lossy(&output.stdout);
|
||||||
|
let version_line = output
|
||||||
|
.split("Version: ")
|
||||||
|
.nth(1)
|
||||||
|
.context("Version parsing failed")?;
|
||||||
|
let version_string = version_line
|
||||||
|
.split("+")
|
||||||
|
.next()
|
||||||
|
.context("Version parsing failed")?;
|
||||||
|
|
||||||
|
Version::parse(version_string).map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn compiler_version_can_be_obtained() {
|
||||||
|
// Arrange
|
||||||
|
let args = Arguments::default();
|
||||||
|
println!("Getting compiler path");
|
||||||
|
let path = Solc::get_compiler_executable(&args, Version::new(0, 7, 6))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
println!("Got compiler path");
|
||||||
|
let compiler = Solc::new(path);
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let version = compiler.version();
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
assert_eq!(
|
||||||
|
version.expect("Failed to get version"),
|
||||||
|
Version::new(0, 7, 6)
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,9 @@
|
|||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
pragma solidity >=0.6.9;
|
||||||
|
|
||||||
|
contract Callable {
|
||||||
|
function f(uint[1] memory p1) public pure returns(uint) {
|
||||||
|
return p1[0];
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,13 @@
|
|||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
// Report https://linear.app/matterlabs/issue/CPR-269/call-with-calldata-variable-bug
|
||||||
|
|
||||||
|
pragma solidity >=0.6.9;
|
||||||
|
|
||||||
|
import "./callable.sol";
|
||||||
|
|
||||||
|
contract Main {
|
||||||
|
function main(uint[1] calldata p1, Callable callable) public returns(uint) {
|
||||||
|
return callable.f(p1);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,21 @@
|
|||||||
|
{ "cases": [ {
|
||||||
|
"name": "first",
|
||||||
|
"inputs": [
|
||||||
|
{
|
||||||
|
"instance": "Main",
|
||||||
|
"method": "main",
|
||||||
|
"calldata": [
|
||||||
|
"1",
|
||||||
|
"Callable.address"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"expected": [
|
||||||
|
"1"
|
||||||
|
]
|
||||||
|
} ],
|
||||||
|
"contracts": {
|
||||||
|
"Main": "main.sol:Main",
|
||||||
|
"Callable": "callable.sol:Callable"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,88 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use revive_dt_compiler::{Compiler, SolidityCompiler, revive_resolc::Resolc, solc::Solc};
|
||||||
|
use revive_dt_config::Arguments;
|
||||||
|
use semver::Version;
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn contracts_can_be_compiled_with_solc() {
|
||||||
|
// Arrange
|
||||||
|
let args = Arguments::default();
|
||||||
|
let compiler_path = Solc::get_compiler_executable(&args, Version::new(0, 8, 30))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
println!("About to assert");
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let output = Compiler::<Solc>::new()
|
||||||
|
.with_source("./tests/assets/array_one_element/callable.sol")
|
||||||
|
.unwrap()
|
||||||
|
.with_source("./tests/assets/array_one_element/main.sol")
|
||||||
|
.unwrap()
|
||||||
|
.try_build(compiler_path)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let output = output.expect("Failed to compile");
|
||||||
|
assert_eq!(output.contracts.len(), 2);
|
||||||
|
|
||||||
|
let main_file_contracts = output
|
||||||
|
.contracts
|
||||||
|
.get(
|
||||||
|
&PathBuf::from("./tests/assets/array_one_element/main.sol")
|
||||||
|
.canonicalize()
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let callable_file_contracts = output
|
||||||
|
.contracts
|
||||||
|
.get(
|
||||||
|
&PathBuf::from("./tests/assets/array_one_element/callable.sol")
|
||||||
|
.canonicalize()
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert!(main_file_contracts.contains_key("Main"));
|
||||||
|
assert!(callable_file_contracts.contains_key("Callable"));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn contracts_can_be_compiled_with_resolc() {
|
||||||
|
// Arrange
|
||||||
|
let args = Arguments::default();
|
||||||
|
let compiler_path = Resolc::get_compiler_executable(&args, Version::new(0, 8, 30))
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Act
|
||||||
|
let output = Compiler::<Resolc>::new()
|
||||||
|
.with_source("./tests/assets/array_one_element/callable.sol")
|
||||||
|
.unwrap()
|
||||||
|
.with_source("./tests/assets/array_one_element/main.sol")
|
||||||
|
.unwrap()
|
||||||
|
.try_build(compiler_path)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
let output = output.expect("Failed to compile");
|
||||||
|
assert_eq!(output.contracts.len(), 2);
|
||||||
|
|
||||||
|
let main_file_contracts = output
|
||||||
|
.contracts
|
||||||
|
.get(
|
||||||
|
&PathBuf::from("./tests/assets/array_one_element/main.sol")
|
||||||
|
.canonicalize()
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
let callable_file_contracts = output
|
||||||
|
.contracts
|
||||||
|
.get(
|
||||||
|
&PathBuf::from("./tests/assets/array_one_element/callable.sol")
|
||||||
|
.canonicalize()
|
||||||
|
.unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
assert!(main_file_contracts.contains_key("Main"));
|
||||||
|
assert!(callable_file_contracts.contains_key("Callable"));
|
||||||
|
}
|
||||||
@@ -3,6 +3,7 @@
|
|||||||
use std::{
|
use std::{
|
||||||
fmt::Display,
|
fmt::Display,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
sync::LazyLock,
|
||||||
};
|
};
|
||||||
|
|
||||||
use alloy::{network::EthereumWallet, signers::local::PrivateKeySigner};
|
use alloy::{network::EthereumWallet, signers::local::PrivateKeySigner};
|
||||||
@@ -54,7 +55,7 @@ pub struct Arguments {
|
|||||||
pub geth: PathBuf,
|
pub geth: PathBuf,
|
||||||
|
|
||||||
/// The maximum time in milliseconds to wait for geth to start.
|
/// The maximum time in milliseconds to wait for geth to start.
|
||||||
#[arg(long = "geth-start-timeout", default_value = "2000")]
|
#[arg(long = "geth-start-timeout", default_value = "5000")]
|
||||||
pub geth_start_timeout: u64,
|
pub geth_start_timeout: u64,
|
||||||
|
|
||||||
/// The test network chain ID.
|
/// The test network chain ID.
|
||||||
@@ -76,7 +77,7 @@ pub struct Arguments {
|
|||||||
/// This argument controls which private keys the nodes should have access to and be added to
|
/// This argument controls which private keys the nodes should have access to and be added to
|
||||||
/// its wallet signers. With a value of N, private keys (0, N] will be added to the signer set
|
/// its wallet signers. With a value of N, private keys (0, N] will be added to the signer set
|
||||||
/// of the node.
|
/// of the node.
|
||||||
#[arg(long = "private-keys-count", default_value_t = 30)]
|
#[arg(long = "private-keys-count", default_value_t = 15_000)]
|
||||||
pub private_keys_to_add: usize,
|
pub private_keys_to_add: usize,
|
||||||
|
|
||||||
/// The differential testing leader node implementation.
|
/// The differential testing leader node implementation.
|
||||||
@@ -91,9 +92,13 @@ pub struct Arguments {
|
|||||||
#[arg(long = "compile-only")]
|
#[arg(long = "compile-only")]
|
||||||
pub compile_only: Option<TestingPlatform>,
|
pub compile_only: Option<TestingPlatform>,
|
||||||
|
|
||||||
/// Determines the amount of tests that are executed in parallel.
|
/// Determines the amount of nodes that will be spawned for each chain.
|
||||||
#[arg(long = "workers", default_value = "12")]
|
#[arg(long, default_value = "1")]
|
||||||
pub workers: usize,
|
pub number_of_nodes: usize,
|
||||||
|
|
||||||
|
/// Determines the amount of threads that will will be used.
|
||||||
|
#[arg(long, default_value = "12")]
|
||||||
|
pub number_of_threads: usize,
|
||||||
|
|
||||||
/// Extract problems back to the test corpus.
|
/// Extract problems back to the test corpus.
|
||||||
#[arg(short, long = "extract-problems")]
|
#[arg(short, long = "extract-problems")]
|
||||||
@@ -144,14 +149,23 @@ impl Arguments {
|
|||||||
|
|
||||||
impl Default for Arguments {
|
impl Default for Arguments {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Arguments::parse_from(["retester"])
|
static TEMP_DIR: LazyLock<TempDir> = LazyLock::new(|| TempDir::new().unwrap());
|
||||||
|
|
||||||
|
let default = Arguments::parse_from(["retester"]);
|
||||||
|
|
||||||
|
Arguments {
|
||||||
|
temp_dir: Some(&TEMP_DIR),
|
||||||
|
..default
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The Solidity compatible node implementation.
|
/// The Solidity compatible node implementation.
|
||||||
///
|
///
|
||||||
/// This describes the solutions to be tested against on a high level.
|
/// This describes the solutions to be tested against on a high level.
|
||||||
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, ValueEnum, Serialize, Deserialize)]
|
#[derive(
|
||||||
|
Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, ValueEnum, Serialize, Deserialize,
|
||||||
|
)]
|
||||||
#[clap(rename_all = "lower")]
|
#[clap(rename_all = "lower")]
|
||||||
pub enum TestingPlatform {
|
pub enum TestingPlatform {
|
||||||
/// The go-ethereum reference full node EVM implementation.
|
/// The go-ethereum reference full node EVM implementation.
|
||||||
|
|||||||
@@ -24,10 +24,10 @@ revive-dt-report = { workspace = true }
|
|||||||
alloy = { workspace = true }
|
alloy = { workspace = true }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
clap = { workspace = true }
|
clap = { workspace = true }
|
||||||
|
futures = { workspace = true }
|
||||||
indexmap = { workspace = true }
|
indexmap = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
tracing-subscriber = { workspace = true }
|
tracing-subscriber = { workspace = true }
|
||||||
rayon = { workspace = true }
|
semver = { workspace = true }
|
||||||
revive-solc-json-interface = { workspace = true }
|
|
||||||
serde_json = { workspace = true }
|
|
||||||
temp-dir = { workspace = true }
|
temp-dir = { workspace = true }
|
||||||
|
|||||||
+198
-719
File diff suppressed because it is too large
Load Diff
@@ -26,7 +26,7 @@ pub trait Platform {
|
|||||||
pub struct Geth;
|
pub struct Geth;
|
||||||
|
|
||||||
impl Platform for Geth {
|
impl Platform for Geth {
|
||||||
type Blockchain = geth::Instance;
|
type Blockchain = geth::GethNode;
|
||||||
type Compiler = solc::Solc;
|
type Compiler = solc::Solc;
|
||||||
|
|
||||||
fn config_id() -> TestingPlatform {
|
fn config_id() -> TestingPlatform {
|
||||||
|
|||||||
+444
-61
@@ -1,37 +1,75 @@
|
|||||||
use std::{collections::HashMap, sync::LazyLock};
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
path::Path,
|
||||||
|
sync::{Arc, LazyLock},
|
||||||
|
};
|
||||||
|
|
||||||
|
use alloy::{
|
||||||
|
json_abi::JsonAbi,
|
||||||
|
network::{Ethereum, TransactionBuilder},
|
||||||
|
primitives::Address,
|
||||||
|
rpc::types::TransactionRequest,
|
||||||
|
};
|
||||||
|
use anyhow::Context;
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use rayon::{ThreadPoolBuilder, prelude::*};
|
use futures::StreamExt;
|
||||||
|
use revive_dt_common::iterators::FilesWithExtensionIterator;
|
||||||
|
use revive_dt_node_interaction::EthereumNode;
|
||||||
|
use semver::Version;
|
||||||
|
use temp_dir::TempDir;
|
||||||
|
use tokio::sync::{Mutex, RwLock};
|
||||||
|
use tracing::{Instrument, Level};
|
||||||
|
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
||||||
|
|
||||||
|
use revive_dt_compiler::SolidityCompiler;
|
||||||
|
use revive_dt_compiler::{Compiler, CompilerOutput};
|
||||||
use revive_dt_config::*;
|
use revive_dt_config::*;
|
||||||
use revive_dt_core::{
|
use revive_dt_core::{
|
||||||
Geth, Kitchensink, Platform,
|
Geth, Kitchensink, Platform,
|
||||||
driver::{Driver, State},
|
driver::{CaseDriver, CaseState},
|
||||||
|
};
|
||||||
|
use revive_dt_format::{
|
||||||
|
case::{Case, CaseIdx},
|
||||||
|
corpus::Corpus,
|
||||||
|
input::Input,
|
||||||
|
metadata::{ContractInstance, ContractPathAndIdent, Metadata, MetadataFile},
|
||||||
|
mode::SolcMode,
|
||||||
};
|
};
|
||||||
use revive_dt_format::{corpus::Corpus, metadata::MetadataFile};
|
|
||||||
use revive_dt_node::pool::NodePool;
|
use revive_dt_node::pool::NodePool;
|
||||||
use revive_dt_report::reporter::{Report, Span};
|
use revive_dt_report::reporter::{Report, Span};
|
||||||
use temp_dir::TempDir;
|
|
||||||
use tracing::Level;
|
|
||||||
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
|
||||||
|
|
||||||
static TEMP_DIR: LazyLock<TempDir> = LazyLock::new(|| TempDir::new().unwrap());
|
static TEMP_DIR: LazyLock<TempDir> = LazyLock::new(|| TempDir::new().unwrap());
|
||||||
|
|
||||||
|
type CompilationCache<'a> = Arc<
|
||||||
|
RwLock<
|
||||||
|
HashMap<
|
||||||
|
(&'a Path, SolcMode, TestingPlatform),
|
||||||
|
Arc<Mutex<Option<Arc<(Version, CompilerOutput)>>>>,
|
||||||
|
>,
|
||||||
|
>,
|
||||||
|
>;
|
||||||
|
|
||||||
fn main() -> anyhow::Result<()> {
|
fn main() -> anyhow::Result<()> {
|
||||||
let args = init_cli()?;
|
let args = init_cli()?;
|
||||||
|
|
||||||
|
let body = async {
|
||||||
for (corpus, tests) in collect_corpora(&args)? {
|
for (corpus, tests) in collect_corpora(&args)? {
|
||||||
let span = Span::new(corpus, args.clone())?;
|
let span = Span::new(corpus, args.clone())?;
|
||||||
|
|
||||||
match &args.compile_only {
|
match &args.compile_only {
|
||||||
Some(platform) => compile_corpus(&args, &tests, platform, span),
|
Some(platform) => compile_corpus(&args, &tests, platform, span).await,
|
||||||
None => execute_corpus(&args, &tests, span)?,
|
None => execute_corpus(&args, &tests, span).await?,
|
||||||
}
|
}
|
||||||
|
|
||||||
Report::save()?;
|
Report::save()?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
};
|
||||||
|
|
||||||
|
tokio::runtime::Builder::new_multi_thread()
|
||||||
|
.worker_threads(args.number_of_threads)
|
||||||
|
.enable_all()
|
||||||
|
.build()
|
||||||
|
.expect("Failed building the Runtime")
|
||||||
|
.block_on(body)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init_cli() -> anyhow::Result<Arguments> {
|
fn init_cli() -> anyhow::Result<Arguments> {
|
||||||
@@ -62,10 +100,6 @@ fn init_cli() -> anyhow::Result<Arguments> {
|
|||||||
}
|
}
|
||||||
tracing::info!("workdir: {}", args.directory().display());
|
tracing::info!("workdir: {}", args.directory().display());
|
||||||
|
|
||||||
ThreadPoolBuilder::new()
|
|
||||||
.num_threads(args.workers)
|
|
||||||
.build_global()?;
|
|
||||||
|
|
||||||
Ok(args)
|
Ok(args)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -83,7 +117,11 @@ fn collect_corpora(args: &Arguments) -> anyhow::Result<HashMap<Corpus, Vec<Metad
|
|||||||
Ok(corpora)
|
Ok(corpora)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn run_driver<L, F>(args: &Arguments, tests: &[MetadataFile], span: Span) -> anyhow::Result<()>
|
async fn run_driver<L, F>(
|
||||||
|
args: &Arguments,
|
||||||
|
tests: &[MetadataFile],
|
||||||
|
span: Span,
|
||||||
|
) -> anyhow::Result<()>
|
||||||
where
|
where
|
||||||
L: Platform,
|
L: Platform,
|
||||||
F: Platform,
|
F: Platform,
|
||||||
@@ -93,61 +131,389 @@ where
|
|||||||
let leader_nodes = NodePool::<L::Blockchain>::new(args)?;
|
let leader_nodes = NodePool::<L::Blockchain>::new(args)?;
|
||||||
let follower_nodes = NodePool::<F::Blockchain>::new(args)?;
|
let follower_nodes = NodePool::<F::Blockchain>::new(args)?;
|
||||||
|
|
||||||
tests.par_iter().for_each(
|
let test_cases = tests
|
||||||
|
.iter()
|
||||||
|
.flat_map(
|
||||||
|MetadataFile {
|
|MetadataFile {
|
||||||
|
path,
|
||||||
content: metadata,
|
content: metadata,
|
||||||
path: metadata_file_path,
|
|
||||||
}| {
|
}| {
|
||||||
// Starting a new tracing span for this metadata file. This allows our logs to be clear
|
metadata
|
||||||
// about which metadata file the logs belong to. We can add other information into this
|
.cases
|
||||||
// as well to be able to associate the logs with the correct metadata file and case
|
.iter()
|
||||||
// that's being executed.
|
.enumerate()
|
||||||
|
.flat_map(move |(case_idx, case)| {
|
||||||
|
metadata
|
||||||
|
.solc_modes()
|
||||||
|
.into_iter()
|
||||||
|
.map(move |solc_mode| (path, metadata, case_idx, case, solc_mode))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let compilation_cache = Arc::new(RwLock::new(HashMap::new()));
|
||||||
|
futures::stream::iter(test_cases)
|
||||||
|
.for_each_concurrent(
|
||||||
|
None,
|
||||||
|
|(metadata_file_path, metadata, case_idx, case, solc_mode)| {
|
||||||
|
let compilation_cache = compilation_cache.clone();
|
||||||
|
let leader_node = leader_nodes.round_robbin();
|
||||||
|
let follower_node = follower_nodes.round_robbin();
|
||||||
let tracing_span = tracing::span!(
|
let tracing_span = tracing::span!(
|
||||||
Level::INFO,
|
Level::INFO,
|
||||||
"Running driver",
|
"Running driver",
|
||||||
metadata_file_path = metadata_file_path.display().to_string(),
|
metadata_file_path = %metadata_file_path.display(),
|
||||||
|
case_idx = case_idx,
|
||||||
|
solc_mode = ?solc_mode,
|
||||||
);
|
);
|
||||||
let _guard = tracing_span.enter();
|
async move {
|
||||||
|
let result = handle_case_driver::<L, F>(
|
||||||
let mut driver = Driver::<L, F>::new(
|
metadata_file_path.as_path(),
|
||||||
metadata,
|
metadata,
|
||||||
|
case_idx.into(),
|
||||||
|
case,
|
||||||
|
solc_mode,
|
||||||
args,
|
args,
|
||||||
leader_nodes.round_robbin(),
|
compilation_cache.clone(),
|
||||||
follower_nodes.round_robbin(),
|
leader_node,
|
||||||
);
|
follower_node,
|
||||||
|
span,
|
||||||
let execution_result = driver.execute(span);
|
)
|
||||||
tracing::info!(
|
.await;
|
||||||
case_success_count = execution_result.successful_cases_count,
|
match result {
|
||||||
case_failure_count = execution_result.failed_cases_count,
|
Ok(inputs_executed) => {
|
||||||
"Execution completed"
|
tracing::info!(inputs_executed, "Execution succeeded")
|
||||||
);
|
|
||||||
|
|
||||||
let mut error_count = 0;
|
|
||||||
for result in execution_result.results.iter() {
|
|
||||||
if !result.is_success() {
|
|
||||||
tracing::error!(execution_error = ?result, "Encountered an error");
|
|
||||||
error_count += 1;
|
|
||||||
}
|
}
|
||||||
|
Err(error) => tracing::info!(%error, "Execution failed"),
|
||||||
}
|
}
|
||||||
if error_count == 0 {
|
tracing::info!("Execution completed");
|
||||||
tracing::info!("Execution succeeded");
|
|
||||||
} else {
|
|
||||||
tracing::info!("Execution failed");
|
|
||||||
}
|
}
|
||||||
|
.instrument(tracing_span)
|
||||||
},
|
},
|
||||||
);
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn execute_corpus(args: &Arguments, tests: &[MetadataFile], span: Span) -> anyhow::Result<()> {
|
#[allow(clippy::too_many_arguments)]
|
||||||
|
async fn handle_case_driver<'a, L, F>(
|
||||||
|
metadata_file_path: &'a Path,
|
||||||
|
metadata: &'a Metadata,
|
||||||
|
case_idx: CaseIdx,
|
||||||
|
case: &Case,
|
||||||
|
mode: SolcMode,
|
||||||
|
config: &Arguments,
|
||||||
|
compilation_cache: CompilationCache<'a>,
|
||||||
|
leader_node: &L::Blockchain,
|
||||||
|
follower_node: &F::Blockchain,
|
||||||
|
_: Span,
|
||||||
|
) -> anyhow::Result<usize>
|
||||||
|
where
|
||||||
|
L: Platform,
|
||||||
|
F: Platform,
|
||||||
|
L::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
|
||||||
|
F::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
|
||||||
|
{
|
||||||
|
let leader_pre_link_contracts = get_or_build_contracts::<L>(
|
||||||
|
metadata,
|
||||||
|
metadata_file_path,
|
||||||
|
mode.clone(),
|
||||||
|
config,
|
||||||
|
compilation_cache.clone(),
|
||||||
|
&HashMap::new(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let follower_pre_link_contracts = get_or_build_contracts::<F>(
|
||||||
|
metadata,
|
||||||
|
metadata_file_path,
|
||||||
|
mode.clone(),
|
||||||
|
config,
|
||||||
|
compilation_cache.clone(),
|
||||||
|
&HashMap::new(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut leader_deployed_libraries = HashMap::new();
|
||||||
|
let mut follower_deployed_libraries = HashMap::new();
|
||||||
|
let mut contract_sources = metadata.contract_sources()?;
|
||||||
|
for library_instance in metadata
|
||||||
|
.libraries
|
||||||
|
.iter()
|
||||||
|
.flatten()
|
||||||
|
.flat_map(|(_, map)| map.values())
|
||||||
|
{
|
||||||
|
let ContractPathAndIdent {
|
||||||
|
contract_source_path: library_source_path,
|
||||||
|
contract_ident: library_ident,
|
||||||
|
} = contract_sources
|
||||||
|
.remove(library_instance)
|
||||||
|
.context("Failed to find the contract source")?;
|
||||||
|
|
||||||
|
let (leader_code, leader_abi) = leader_pre_link_contracts
|
||||||
|
.1
|
||||||
|
.contracts
|
||||||
|
.get(&library_source_path)
|
||||||
|
.and_then(|contracts| contracts.get(library_ident.as_str()))
|
||||||
|
.context("Declared library was not compiled")?;
|
||||||
|
let (follower_code, follower_abi) = follower_pre_link_contracts
|
||||||
|
.1
|
||||||
|
.contracts
|
||||||
|
.get(&library_source_path)
|
||||||
|
.and_then(|contracts| contracts.get(library_ident.as_str()))
|
||||||
|
.context("Declared library was not compiled")?;
|
||||||
|
|
||||||
|
let leader_code = match alloy::hex::decode(leader_code) {
|
||||||
|
Ok(code) => code,
|
||||||
|
Err(error) => {
|
||||||
|
tracing::error!(
|
||||||
|
?error,
|
||||||
|
contract_source_path = library_source_path.display().to_string(),
|
||||||
|
contract_ident = library_ident.as_ref(),
|
||||||
|
"Failed to hex-decode byte code - This could possibly mean that the bytecode requires linking"
|
||||||
|
);
|
||||||
|
anyhow::bail!("Failed to hex-decode the byte code {}", error)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let follower_code = match alloy::hex::decode(follower_code) {
|
||||||
|
Ok(code) => code,
|
||||||
|
Err(error) => {
|
||||||
|
tracing::error!(
|
||||||
|
?error,
|
||||||
|
contract_source_path = library_source_path.display().to_string(),
|
||||||
|
contract_ident = library_ident.as_ref(),
|
||||||
|
"Failed to hex-decode byte code - This could possibly mean that the bytecode requires linking"
|
||||||
|
);
|
||||||
|
anyhow::bail!("Failed to hex-decode the byte code {}", error)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Getting the deployer address from the cases themselves. This is to ensure that we're
|
||||||
|
// doing the deployments from different accounts and therefore we're not slowed down by
|
||||||
|
// the nonce.
|
||||||
|
let deployer_address = case
|
||||||
|
.inputs
|
||||||
|
.iter()
|
||||||
|
.map(|input| input.caller)
|
||||||
|
.next()
|
||||||
|
.unwrap_or(Input::default_caller());
|
||||||
|
let leader_tx = TransactionBuilder::<Ethereum>::with_deploy_code(
|
||||||
|
TransactionRequest::default().from(deployer_address),
|
||||||
|
leader_code,
|
||||||
|
);
|
||||||
|
let follower_tx = TransactionBuilder::<Ethereum>::with_deploy_code(
|
||||||
|
TransactionRequest::default().from(deployer_address),
|
||||||
|
follower_code,
|
||||||
|
);
|
||||||
|
|
||||||
|
let leader_receipt = match leader_node.execute_transaction(leader_tx).await {
|
||||||
|
Ok(receipt) => receipt,
|
||||||
|
Err(error) => {
|
||||||
|
tracing::error!(
|
||||||
|
node = std::any::type_name::<L>(),
|
||||||
|
?error,
|
||||||
|
"Contract deployment transaction failed."
|
||||||
|
);
|
||||||
|
return Err(error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let follower_receipt = match follower_node.execute_transaction(follower_tx).await {
|
||||||
|
Ok(receipt) => receipt,
|
||||||
|
Err(error) => {
|
||||||
|
tracing::error!(
|
||||||
|
node = std::any::type_name::<F>(),
|
||||||
|
?error,
|
||||||
|
"Contract deployment transaction failed."
|
||||||
|
);
|
||||||
|
return Err(error);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(leader_library_address) = leader_receipt.contract_address else {
|
||||||
|
tracing::error!("Contract deployment transaction didn't return an address");
|
||||||
|
anyhow::bail!("Contract deployment didn't return an address");
|
||||||
|
};
|
||||||
|
let Some(follower_library_address) = follower_receipt.contract_address else {
|
||||||
|
tracing::error!("Contract deployment transaction didn't return an address");
|
||||||
|
anyhow::bail!("Contract deployment didn't return an address");
|
||||||
|
};
|
||||||
|
|
||||||
|
leader_deployed_libraries.insert(
|
||||||
|
library_instance.clone(),
|
||||||
|
(leader_library_address, leader_abi.clone()),
|
||||||
|
);
|
||||||
|
follower_deployed_libraries.insert(
|
||||||
|
library_instance.clone(),
|
||||||
|
(follower_library_address, follower_abi.clone()),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let metadata_file_contains_libraries = metadata
|
||||||
|
.libraries
|
||||||
|
.iter()
|
||||||
|
.flat_map(|map| map.iter())
|
||||||
|
.flat_map(|(_, value)| value.iter())
|
||||||
|
.next()
|
||||||
|
.is_some();
|
||||||
|
let compiled_contracts_require_linking = leader_pre_link_contracts
|
||||||
|
.1
|
||||||
|
.contracts
|
||||||
|
.values()
|
||||||
|
.chain(follower_pre_link_contracts.1.contracts.values())
|
||||||
|
.flat_map(|value| value.values())
|
||||||
|
.any(|(code, _)| !code.chars().all(|char| char.is_ascii_hexdigit()));
|
||||||
|
let (leader_compiled_contracts, follower_compiled_contracts) =
|
||||||
|
if metadata_file_contains_libraries && compiled_contracts_require_linking {
|
||||||
|
let leader_key = (metadata_file_path, mode.clone(), L::config_id());
|
||||||
|
let follower_key = (metadata_file_path, mode.clone(), L::config_id());
|
||||||
|
{
|
||||||
|
let mut cache = compilation_cache.write().await;
|
||||||
|
cache.remove(&leader_key);
|
||||||
|
cache.remove(&follower_key);
|
||||||
|
}
|
||||||
|
|
||||||
|
let leader_post_link_contracts = get_or_build_contracts::<L>(
|
||||||
|
metadata,
|
||||||
|
metadata_file_path,
|
||||||
|
mode.clone(),
|
||||||
|
config,
|
||||||
|
compilation_cache.clone(),
|
||||||
|
&leader_deployed_libraries,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let follower_post_link_contracts = get_or_build_contracts::<F>(
|
||||||
|
metadata,
|
||||||
|
metadata_file_path,
|
||||||
|
mode.clone(),
|
||||||
|
config,
|
||||||
|
compilation_cache,
|
||||||
|
&follower_deployed_libraries,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
(leader_post_link_contracts, follower_post_link_contracts)
|
||||||
|
} else {
|
||||||
|
(leader_pre_link_contracts, follower_pre_link_contracts)
|
||||||
|
};
|
||||||
|
|
||||||
|
let leader_state = CaseState::<L>::new(
|
||||||
|
leader_compiled_contracts.0.clone(),
|
||||||
|
leader_compiled_contracts.1.contracts.clone(),
|
||||||
|
leader_deployed_libraries,
|
||||||
|
);
|
||||||
|
let follower_state = CaseState::<F>::new(
|
||||||
|
follower_compiled_contracts.0.clone(),
|
||||||
|
follower_compiled_contracts.1.contracts.clone(),
|
||||||
|
follower_deployed_libraries,
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut driver = CaseDriver::<L, F>::new(
|
||||||
|
metadata,
|
||||||
|
case,
|
||||||
|
case_idx,
|
||||||
|
leader_node,
|
||||||
|
follower_node,
|
||||||
|
leader_state,
|
||||||
|
follower_state,
|
||||||
|
);
|
||||||
|
driver.execute().await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_or_build_contracts<'a, P: Platform>(
|
||||||
|
metadata: &'a Metadata,
|
||||||
|
metadata_file_path: &'a Path,
|
||||||
|
mode: SolcMode,
|
||||||
|
config: &Arguments,
|
||||||
|
compilation_cache: CompilationCache<'a>,
|
||||||
|
deployed_libraries: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
||||||
|
) -> anyhow::Result<Arc<(Version, CompilerOutput)>> {
|
||||||
|
let key = (metadata_file_path, mode.clone(), P::config_id());
|
||||||
|
if let Some(compilation_artifact) = compilation_cache.read().await.get(&key).cloned() {
|
||||||
|
let mut compilation_artifact = compilation_artifact.lock().await;
|
||||||
|
match *compilation_artifact {
|
||||||
|
Some(ref compiled_contracts) => {
|
||||||
|
tracing::debug!(?key, "Compiled contracts cache hit");
|
||||||
|
return Ok(compiled_contracts.clone());
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
tracing::debug!(?key, "Compiled contracts cache miss");
|
||||||
|
let compiled_contracts = Arc::new(
|
||||||
|
compile_contracts::<P>(metadata, &mode, config, deployed_libraries).await?,
|
||||||
|
);
|
||||||
|
*compilation_artifact = Some(compiled_contracts.clone());
|
||||||
|
return Ok(compiled_contracts.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
tracing::debug!(?key, "Compiled contracts cache miss");
|
||||||
|
let mutex = {
|
||||||
|
let mut compilation_cache = compilation_cache.write().await;
|
||||||
|
let mutex = Arc::new(Mutex::new(None));
|
||||||
|
compilation_cache.insert(key, mutex.clone());
|
||||||
|
mutex
|
||||||
|
};
|
||||||
|
let mut compilation_artifact = mutex.lock().await;
|
||||||
|
let compiled_contracts =
|
||||||
|
Arc::new(compile_contracts::<P>(metadata, &mode, config, deployed_libraries).await?);
|
||||||
|
*compilation_artifact = Some(compiled_contracts.clone());
|
||||||
|
Ok(compiled_contracts.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn compile_contracts<P: Platform>(
|
||||||
|
metadata: &Metadata,
|
||||||
|
mode: &SolcMode,
|
||||||
|
config: &Arguments,
|
||||||
|
deployed_libraries: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
||||||
|
) -> anyhow::Result<(Version, CompilerOutput)> {
|
||||||
|
let compiler_version_or_requirement = mode.compiler_version_to_use(config.solc.clone());
|
||||||
|
let compiler_path =
|
||||||
|
P::Compiler::get_compiler_executable(config, compiler_version_or_requirement).await?;
|
||||||
|
let compiler_version = P::Compiler::new(compiler_path.clone()).version()?;
|
||||||
|
|
||||||
|
let compiler = Compiler::<P::Compiler>::new()
|
||||||
|
.with_allow_path(metadata.directory()?)
|
||||||
|
.with_optimization(mode.solc_optimize());
|
||||||
|
let mut compiler = metadata
|
||||||
|
.files_to_compile()?
|
||||||
|
.try_fold(compiler, |compiler, path| compiler.with_source(&path))?;
|
||||||
|
for (library_instance, (library_address, _)) in deployed_libraries.iter() {
|
||||||
|
let library_ident = &metadata
|
||||||
|
.contracts
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|contracts| contracts.get(library_instance))
|
||||||
|
.expect("Impossible for library to not be found in contracts")
|
||||||
|
.contract_ident;
|
||||||
|
|
||||||
|
// Note the following: we need to tell solc which files require the libraries to be
|
||||||
|
// linked into them. We do not have access to this information and therefore we choose
|
||||||
|
// an easier, yet more compute intensive route, of telling solc that all of the files
|
||||||
|
// need to link the library and it will only perform the linking for the files that do
|
||||||
|
// actually need the library.
|
||||||
|
compiler = FilesWithExtensionIterator::new(metadata.directory()?)
|
||||||
|
.with_allowed_extension("sol")
|
||||||
|
.fold(compiler, |compiler, path| {
|
||||||
|
compiler.with_library(&path, library_ident.as_str(), *library_address)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let compiler_output = compiler.try_build(compiler_path).await?;
|
||||||
|
|
||||||
|
Ok((compiler_version, compiler_output))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn execute_corpus(
|
||||||
|
args: &Arguments,
|
||||||
|
tests: &[MetadataFile],
|
||||||
|
span: Span,
|
||||||
|
) -> anyhow::Result<()> {
|
||||||
match (&args.leader, &args.follower) {
|
match (&args.leader, &args.follower) {
|
||||||
(TestingPlatform::Geth, TestingPlatform::Kitchensink) => {
|
(TestingPlatform::Geth, TestingPlatform::Kitchensink) => {
|
||||||
run_driver::<Geth, Kitchensink>(args, tests, span)?
|
run_driver::<Geth, Kitchensink>(args, tests, span).await?
|
||||||
}
|
}
|
||||||
(TestingPlatform::Geth, TestingPlatform::Geth) => {
|
(TestingPlatform::Geth, TestingPlatform::Geth) => {
|
||||||
run_driver::<Geth, Geth>(args, tests, span)?
|
run_driver::<Geth, Geth>(args, tests, span).await?
|
||||||
}
|
}
|
||||||
_ => unimplemented!(),
|
_ => unimplemented!(),
|
||||||
}
|
}
|
||||||
@@ -155,24 +521,41 @@ fn execute_corpus(args: &Arguments, tests: &[MetadataFile], span: Span) -> anyho
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn compile_corpus(
|
async fn compile_corpus(
|
||||||
config: &Arguments,
|
config: &Arguments,
|
||||||
tests: &[MetadataFile],
|
tests: &[MetadataFile],
|
||||||
platform: &TestingPlatform,
|
platform: &TestingPlatform,
|
||||||
span: Span,
|
_: Span,
|
||||||
) {
|
) {
|
||||||
tests.par_iter().for_each(|metadata| {
|
let tests = tests.iter().flat_map(|metadata| {
|
||||||
for mode in &metadata.solc_modes() {
|
metadata
|
||||||
|
.solc_modes()
|
||||||
|
.into_iter()
|
||||||
|
.map(move |solc_mode| (metadata, solc_mode))
|
||||||
|
});
|
||||||
|
|
||||||
|
futures::stream::iter(tests)
|
||||||
|
.for_each_concurrent(None, |(metadata, mode)| async move {
|
||||||
match platform {
|
match platform {
|
||||||
TestingPlatform::Geth => {
|
TestingPlatform::Geth => {
|
||||||
let mut state = State::<Geth>::new(config, span);
|
let _ = compile_contracts::<Geth>(
|
||||||
let _ = state.build_contracts(mode, metadata);
|
&metadata.content,
|
||||||
|
&mode,
|
||||||
|
config,
|
||||||
|
&Default::default(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
TestingPlatform::Kitchensink => {
|
TestingPlatform::Kitchensink => {
|
||||||
let mut state = State::<Kitchensink>::new(config, span);
|
let _ = compile_contracts::<Geth>(
|
||||||
let _ = state.build_contracts(mode, metadata);
|
&metadata.content,
|
||||||
|
&mode,
|
||||||
|
config,
|
||||||
|
&Default::default(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
};
|
|
||||||
}
|
}
|
||||||
});
|
})
|
||||||
|
.await;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,3 +19,6 @@ tracing = { workspace = true }
|
|||||||
semver = { workspace = true }
|
semver = { workspace = true }
|
||||||
serde = { workspace = true, features = ["derive"] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tokio = { workspace = true }
|
||||||
|
|||||||
@@ -17,7 +17,25 @@ impl Corpus {
|
|||||||
/// Try to read and parse the corpus definition file at given `path`.
|
/// Try to read and parse the corpus definition file at given `path`.
|
||||||
pub fn try_from_path(path: &Path) -> anyhow::Result<Self> {
|
pub fn try_from_path(path: &Path) -> anyhow::Result<Self> {
|
||||||
let file = File::open(path)?;
|
let file = File::open(path)?;
|
||||||
Ok(serde_json::from_reader(file)?)
|
let mut corpus: Corpus = serde_json::from_reader(file)?;
|
||||||
|
|
||||||
|
// Ensure that the path mentioned in the corpus is relative to the corpus file.
|
||||||
|
// Canonicalizing also helps make the path in any errors unambiguous.
|
||||||
|
corpus.path = path
|
||||||
|
.parent()
|
||||||
|
.ok_or_else(|| {
|
||||||
|
anyhow::anyhow!("Corpus path '{}' does not point to a file", path.display())
|
||||||
|
})?
|
||||||
|
.canonicalize()
|
||||||
|
.map_err(|error| {
|
||||||
|
anyhow::anyhow!(
|
||||||
|
"Failed to canonicalize path to corpus '{}': {error}",
|
||||||
|
path.display()
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.join(corpus.path);
|
||||||
|
|
||||||
|
Ok(corpus)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Scan the corpus base directory and return all tests found.
|
/// Scan the corpus base directory and return all tests found.
|
||||||
@@ -35,6 +53,7 @@ impl Corpus {
|
|||||||
///
|
///
|
||||||
/// `path` is expected to be a directory.
|
/// `path` is expected to be a directory.
|
||||||
pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
|
pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
|
||||||
|
if path.is_dir() {
|
||||||
let dir_entry = match std::fs::read_dir(path) {
|
let dir_entry = match std::fs::read_dir(path) {
|
||||||
Ok(dir_entry) => dir_entry,
|
Ok(dir_entry) => dir_entry,
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
@@ -64,4 +83,17 @@ pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
let Some(extension) = path.extension() else {
|
||||||
|
tracing::error!("Failed to get file extension");
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
if extension.eq_ignore_ascii_case("sol") || extension.eq_ignore_ascii_case("json") {
|
||||||
|
if let Some(metadata) = MetadataFile::try_from_file(path) {
|
||||||
|
tests.push(metadata)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
tracing::error!(?extension, "Unsupported file extension");
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
+202
-104
@@ -2,6 +2,7 @@ use std::collections::HashMap;
|
|||||||
|
|
||||||
use alloy::{
|
use alloy::{
|
||||||
eips::BlockNumberOrTag,
|
eips::BlockNumberOrTag,
|
||||||
|
hex::ToHexExt,
|
||||||
json_abi::JsonAbi,
|
json_abi::JsonAbi,
|
||||||
network::TransactionBuilder,
|
network::TransactionBuilder,
|
||||||
primitives::{Address, Bytes, U256},
|
primitives::{Address, Bytes, U256},
|
||||||
@@ -30,6 +31,7 @@ pub struct Input {
|
|||||||
pub expected: Option<Expected>,
|
pub expected: Option<Expected>,
|
||||||
pub value: Option<EtherValue>,
|
pub value: Option<EtherValue>,
|
||||||
pub storage: Option<HashMap<String, Calldata>>,
|
pub storage: Option<HashMap<String, Calldata>>,
|
||||||
|
pub variable_assignments: Option<VariableAssignments>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Deserialize, Eq, PartialEq)]
|
#[derive(Clone, Debug, Deserialize, Eq, PartialEq)]
|
||||||
@@ -136,6 +138,8 @@ enum Operation {
|
|||||||
BitwiseAnd,
|
BitwiseAnd,
|
||||||
BitwiseOr,
|
BitwiseOr,
|
||||||
BitwiseXor,
|
BitwiseXor,
|
||||||
|
ShiftLeft,
|
||||||
|
ShiftRight,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Specify how the contract is called.
|
/// Specify how the contract is called.
|
||||||
@@ -164,6 +168,14 @@ define_wrapper_type!(
|
|||||||
pub struct EtherValue(U256);
|
pub struct EtherValue(U256);
|
||||||
);
|
);
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq)]
|
||||||
|
pub struct VariableAssignments {
|
||||||
|
/// A vector of the variable names to assign to the return data.
|
||||||
|
///
|
||||||
|
/// Example: `UniswapV3PoolAddress`
|
||||||
|
pub return_data: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
impl Input {
|
impl Input {
|
||||||
pub const fn default_caller() -> Address {
|
pub const fn default_caller() -> Address {
|
||||||
Address(FixedBytes(alloy::hex!(
|
Address(FixedBytes(alloy::hex!(
|
||||||
@@ -186,16 +198,18 @@ impl Input {
|
|||||||
.ok_or_else(|| anyhow::anyhow!("instance {instance:?} not deployed"))
|
.ok_or_else(|| anyhow::anyhow!("instance {instance:?} not deployed"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn encoded_input(
|
pub async fn encoded_input<'a>(
|
||||||
&self,
|
&'a self,
|
||||||
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
||||||
|
variables: impl Into<Option<&'a HashMap<String, U256>>> + Clone,
|
||||||
chain_state_provider: &impl ResolverApi,
|
chain_state_provider: &impl ResolverApi,
|
||||||
) -> anyhow::Result<Bytes> {
|
) -> anyhow::Result<Bytes> {
|
||||||
match self.method {
|
match self.method {
|
||||||
Method::Deployer | Method::Fallback => {
|
Method::Deployer | Method::Fallback => {
|
||||||
let calldata = self
|
let calldata = self
|
||||||
.calldata
|
.calldata
|
||||||
.calldata(deployed_contracts, chain_state_provider)?;
|
.calldata(deployed_contracts, variables, chain_state_provider)
|
||||||
|
.await?;
|
||||||
|
|
||||||
Ok(calldata.into())
|
Ok(calldata.into())
|
||||||
}
|
}
|
||||||
@@ -241,11 +255,14 @@ impl Input {
|
|||||||
// a new buffer for each one of the resolved arguments.
|
// a new buffer for each one of the resolved arguments.
|
||||||
let mut calldata = Vec::<u8>::with_capacity(4 + self.calldata.size_requirement());
|
let mut calldata = Vec::<u8>::with_capacity(4 + self.calldata.size_requirement());
|
||||||
calldata.extend(function.selector().0);
|
calldata.extend(function.selector().0);
|
||||||
self.calldata.calldata_into_slice(
|
self.calldata
|
||||||
|
.calldata_into_slice(
|
||||||
&mut calldata,
|
&mut calldata,
|
||||||
deployed_contracts,
|
deployed_contracts,
|
||||||
|
variables,
|
||||||
chain_state_provider,
|
chain_state_provider,
|
||||||
)?;
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
Ok(calldata.into())
|
Ok(calldata.into())
|
||||||
}
|
}
|
||||||
@@ -253,12 +270,15 @@ impl Input {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Parse this input into a legacy transaction.
|
/// Parse this input into a legacy transaction.
|
||||||
pub fn legacy_transaction(
|
pub async fn legacy_transaction<'a>(
|
||||||
&self,
|
&'a self,
|
||||||
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
||||||
|
variables: impl Into<Option<&'a HashMap<String, U256>>> + Clone,
|
||||||
chain_state_provider: &impl ResolverApi,
|
chain_state_provider: &impl ResolverApi,
|
||||||
) -> anyhow::Result<TransactionRequest> {
|
) -> anyhow::Result<TransactionRequest> {
|
||||||
let input_data = self.encoded_input(deployed_contracts, chain_state_provider)?;
|
let input_data = self
|
||||||
|
.encoded_input(deployed_contracts, variables, chain_state_provider)
|
||||||
|
.await?;
|
||||||
let transaction_request = TransactionRequest::default().from(self.caller).value(
|
let transaction_request = TransactionRequest::default().from(self.caller).value(
|
||||||
self.value
|
self.value
|
||||||
.map(|value| value.into_inner())
|
.map(|value| value.into_inner())
|
||||||
@@ -336,20 +356,28 @@ impl Calldata {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn calldata(
|
pub async fn calldata<'a>(
|
||||||
&self,
|
&'a self,
|
||||||
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
||||||
|
variables: impl Into<Option<&'a HashMap<String, U256>>> + Clone,
|
||||||
chain_state_provider: &impl ResolverApi,
|
chain_state_provider: &impl ResolverApi,
|
||||||
) -> anyhow::Result<Vec<u8>> {
|
) -> anyhow::Result<Vec<u8>> {
|
||||||
let mut buffer = Vec::<u8>::with_capacity(self.size_requirement());
|
let mut buffer = Vec::<u8>::with_capacity(self.size_requirement());
|
||||||
self.calldata_into_slice(&mut buffer, deployed_contracts, chain_state_provider)?;
|
self.calldata_into_slice(
|
||||||
|
&mut buffer,
|
||||||
|
deployed_contracts,
|
||||||
|
variables,
|
||||||
|
chain_state_provider,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
Ok(buffer)
|
Ok(buffer)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn calldata_into_slice(
|
pub async fn calldata_into_slice<'a>(
|
||||||
&self,
|
&'a self,
|
||||||
buffer: &mut Vec<u8>,
|
buffer: &mut Vec<u8>,
|
||||||
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
||||||
|
variables: impl Into<Option<&'a HashMap<String, U256>>> + Clone,
|
||||||
chain_state_provider: &impl ResolverApi,
|
chain_state_provider: &impl ResolverApi,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
@@ -358,7 +386,10 @@ impl Calldata {
|
|||||||
}
|
}
|
||||||
Calldata::Compound(items) => {
|
Calldata::Compound(items) => {
|
||||||
for (arg_idx, arg) in items.iter().enumerate() {
|
for (arg_idx, arg) in items.iter().enumerate() {
|
||||||
match arg.resolve(deployed_contracts, chain_state_provider) {
|
match arg
|
||||||
|
.resolve(deployed_contracts, variables.clone(), chain_state_provider)
|
||||||
|
.await
|
||||||
|
{
|
||||||
Ok(resolved) => {
|
Ok(resolved) => {
|
||||||
buffer.extend(resolved.to_be_bytes::<32>());
|
buffer.extend(resolved.to_be_bytes::<32>());
|
||||||
}
|
}
|
||||||
@@ -381,10 +412,11 @@ impl Calldata {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if this [`Calldata`] is equivalent to the passed calldata bytes.
|
/// Checks if this [`Calldata`] is equivalent to the passed calldata bytes.
|
||||||
pub fn is_equivalent(
|
pub async fn is_equivalent<'a>(
|
||||||
&self,
|
&'a self,
|
||||||
other: &[u8],
|
other: &[u8],
|
||||||
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
||||||
|
variables: impl Into<Option<&'a HashMap<String, U256>>> + Clone,
|
||||||
chain_state_provider: &impl ResolverApi,
|
chain_state_provider: &impl ResolverApi,
|
||||||
) -> anyhow::Result<bool> {
|
) -> anyhow::Result<bool> {
|
||||||
match self {
|
match self {
|
||||||
@@ -407,7 +439,9 @@ impl Calldata {
|
|||||||
std::borrow::Cow::Borrowed(other)
|
std::borrow::Cow::Borrowed(other)
|
||||||
};
|
};
|
||||||
|
|
||||||
let this = this.resolve(deployed_contracts, chain_state_provider)?;
|
let this = this
|
||||||
|
.resolve(deployed_contracts, variables.clone(), chain_state_provider)
|
||||||
|
.await?;
|
||||||
let other = U256::from_be_slice(&other);
|
let other = U256::from_be_slice(&other);
|
||||||
if this != other {
|
if this != other {
|
||||||
return Ok(false);
|
return Ok(false);
|
||||||
@@ -420,18 +454,19 @@ impl Calldata {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl CalldataItem {
|
impl CalldataItem {
|
||||||
fn resolve(
|
async fn resolve<'a>(
|
||||||
&self,
|
&'a self,
|
||||||
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
||||||
|
variables: impl Into<Option<&'a HashMap<String, U256>>> + Clone,
|
||||||
chain_state_provider: &impl ResolverApi,
|
chain_state_provider: &impl ResolverApi,
|
||||||
) -> anyhow::Result<U256> {
|
) -> anyhow::Result<U256> {
|
||||||
let mut stack = Vec::<CalldataToken<U256>>::new();
|
let mut stack = Vec::<CalldataToken<U256>>::new();
|
||||||
|
|
||||||
for token in self
|
for token in self
|
||||||
.calldata_tokens()
|
.calldata_tokens()
|
||||||
.map(|token| token.resolve(deployed_contracts, chain_state_provider))
|
.map(|token| token.resolve(deployed_contracts, variables.clone(), chain_state_provider))
|
||||||
{
|
{
|
||||||
let token = token?;
|
let token = token.await?;
|
||||||
let new_token = match token {
|
let new_token = match token {
|
||||||
CalldataToken::Item(_) => token,
|
CalldataToken::Item(_) => token,
|
||||||
CalldataToken::Operation(operation) => {
|
CalldataToken::Operation(operation) => {
|
||||||
@@ -452,8 +487,14 @@ impl CalldataItem {
|
|||||||
Operation::BitwiseAnd => Some(left_operand & right_operand),
|
Operation::BitwiseAnd => Some(left_operand & right_operand),
|
||||||
Operation::BitwiseOr => Some(left_operand | right_operand),
|
Operation::BitwiseOr => Some(left_operand | right_operand),
|
||||||
Operation::BitwiseXor => Some(left_operand ^ right_operand),
|
Operation::BitwiseXor => Some(left_operand ^ right_operand),
|
||||||
|
Operation::ShiftLeft => {
|
||||||
|
Some(left_operand << usize::try_from(right_operand)?)
|
||||||
}
|
}
|
||||||
.context("Invalid calldata arithmetic operation")?;
|
Operation::ShiftRight => {
|
||||||
|
Some(left_operand >> usize::try_from(right_operand)?)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.context("Invalid calldata arithmetic operation - Invalid operation")?;
|
||||||
|
|
||||||
CalldataToken::Item(result)
|
CalldataToken::Item(result)
|
||||||
}
|
}
|
||||||
@@ -464,8 +505,17 @@ impl CalldataItem {
|
|||||||
match stack.as_slice() {
|
match stack.as_slice() {
|
||||||
// Empty stack means that we got an empty compound calldata which we resolve to zero.
|
// Empty stack means that we got an empty compound calldata which we resolve to zero.
|
||||||
[] => Ok(U256::ZERO),
|
[] => Ok(U256::ZERO),
|
||||||
[CalldataToken::Item(item)] => Ok(*item),
|
[CalldataToken::Item(item)] => {
|
||||||
_ => Err(anyhow::anyhow!("Invalid calldata arithmetic operation")),
|
tracing::debug!(
|
||||||
|
original = self.0,
|
||||||
|
resolved = item.to_be_bytes::<32>().encode_hex(),
|
||||||
|
"Resolved a Calldata item"
|
||||||
|
);
|
||||||
|
Ok(*item)
|
||||||
|
}
|
||||||
|
_ => Err(anyhow::anyhow!(
|
||||||
|
"Invalid calldata arithmetic operation - Invalid stack"
|
||||||
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -478,6 +528,8 @@ impl CalldataItem {
|
|||||||
"&" => CalldataToken::Operation(Operation::BitwiseAnd),
|
"&" => CalldataToken::Operation(Operation::BitwiseAnd),
|
||||||
"|" => CalldataToken::Operation(Operation::BitwiseOr),
|
"|" => CalldataToken::Operation(Operation::BitwiseOr),
|
||||||
"^" => CalldataToken::Operation(Operation::BitwiseXor),
|
"^" => CalldataToken::Operation(Operation::BitwiseXor),
|
||||||
|
"<<" => CalldataToken::Operation(Operation::ShiftLeft),
|
||||||
|
">>" => CalldataToken::Operation(Operation::ShiftRight),
|
||||||
_ => CalldataToken::Item(item),
|
_ => CalldataToken::Item(item),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -494,6 +546,7 @@ impl<T> CalldataToken<T> {
|
|||||||
const BLOCK_HASH_VARIABLE_PREFIX: &str = "$BLOCK_HASH";
|
const BLOCK_HASH_VARIABLE_PREFIX: &str = "$BLOCK_HASH";
|
||||||
const BLOCK_NUMBER_VARIABLE: &str = "$BLOCK_NUMBER";
|
const BLOCK_NUMBER_VARIABLE: &str = "$BLOCK_NUMBER";
|
||||||
const BLOCK_TIMESTAMP_VARIABLE: &str = "$BLOCK_TIMESTAMP";
|
const BLOCK_TIMESTAMP_VARIABLE: &str = "$BLOCK_TIMESTAMP";
|
||||||
|
const VARIABLE_PREFIX: &str = "$VARIABLE:";
|
||||||
|
|
||||||
fn into_item(self) -> Option<T> {
|
fn into_item(self) -> Option<T> {
|
||||||
match self {
|
match self {
|
||||||
@@ -512,9 +565,10 @@ impl<T: AsRef<str>> CalldataToken<T> {
|
|||||||
/// This piece of code is taken from the matter-labs-tester repository which is licensed under
|
/// This piece of code is taken from the matter-labs-tester repository which is licensed under
|
||||||
/// MIT or Apache. The original source code can be found here:
|
/// MIT or Apache. The original source code can be found here:
|
||||||
/// https://github.com/matter-labs/era-compiler-tester/blob/0ed598a27f6eceee7008deab3ff2311075a2ec69/compiler_tester/src/test/case/input/value.rs#L43-L146
|
/// https://github.com/matter-labs/era-compiler-tester/blob/0ed598a27f6eceee7008deab3ff2311075a2ec69/compiler_tester/src/test/case/input/value.rs#L43-L146
|
||||||
fn resolve(
|
async fn resolve<'a>(
|
||||||
self,
|
self,
|
||||||
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
||||||
|
variables: impl Into<Option<&'a HashMap<String, U256>>> + Clone,
|
||||||
chain_state_provider: &impl ResolverApi,
|
chain_state_provider: &impl ResolverApi,
|
||||||
) -> anyhow::Result<CalldataToken<U256>> {
|
) -> anyhow::Result<CalldataToken<U256>> {
|
||||||
match self {
|
match self {
|
||||||
@@ -545,18 +599,22 @@ impl<T: AsRef<str>> CalldataToken<T> {
|
|||||||
anyhow::anyhow!("Invalid hexadecimal literal: {}", error)
|
anyhow::anyhow!("Invalid hexadecimal literal: {}", error)
|
||||||
})?)
|
})?)
|
||||||
} else if item == Self::CHAIN_VARIABLE {
|
} else if item == Self::CHAIN_VARIABLE {
|
||||||
let chain_id = chain_state_provider.chain_id()?;
|
let chain_id = chain_state_provider.chain_id().await?;
|
||||||
Ok(U256::from(chain_id))
|
Ok(U256::from(chain_id))
|
||||||
} else if item == Self::GAS_LIMIT_VARIABLE {
|
} else if item == Self::GAS_LIMIT_VARIABLE {
|
||||||
let gas_limit =
|
let gas_limit = chain_state_provider
|
||||||
chain_state_provider.block_gas_limit(BlockNumberOrTag::Latest)?;
|
.block_gas_limit(BlockNumberOrTag::Latest)
|
||||||
|
.await?;
|
||||||
Ok(U256::from(gas_limit))
|
Ok(U256::from(gas_limit))
|
||||||
} else if item == Self::COINBASE_VARIABLE {
|
} else if item == Self::COINBASE_VARIABLE {
|
||||||
let coinbase = chain_state_provider.block_coinbase(BlockNumberOrTag::Latest)?;
|
let coinbase = chain_state_provider
|
||||||
|
.block_coinbase(BlockNumberOrTag::Latest)
|
||||||
|
.await?;
|
||||||
Ok(U256::from_be_slice(coinbase.as_ref()))
|
Ok(U256::from_be_slice(coinbase.as_ref()))
|
||||||
} else if item == Self::DIFFICULTY_VARIABLE {
|
} else if item == Self::DIFFICULTY_VARIABLE {
|
||||||
let block_difficulty =
|
let block_difficulty = chain_state_provider
|
||||||
chain_state_provider.block_difficulty(BlockNumberOrTag::Latest)?;
|
.block_difficulty(BlockNumberOrTag::Latest)
|
||||||
|
.await?;
|
||||||
Ok(block_difficulty)
|
Ok(block_difficulty)
|
||||||
} else if item.starts_with(Self::BLOCK_HASH_VARIABLE_PREFIX) {
|
} else if item.starts_with(Self::BLOCK_HASH_VARIABLE_PREFIX) {
|
||||||
let offset: u64 = item
|
let offset: u64 = item
|
||||||
@@ -565,20 +623,32 @@ impl<T: AsRef<str>> CalldataToken<T> {
|
|||||||
.and_then(|value| value.parse().ok())
|
.and_then(|value| value.parse().ok())
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
let current_block_number = chain_state_provider.last_block_number()?;
|
let current_block_number = chain_state_provider.last_block_number().await?;
|
||||||
let desired_block_number = current_block_number - offset;
|
let desired_block_number = current_block_number - offset;
|
||||||
|
|
||||||
let block_hash =
|
let block_hash = chain_state_provider
|
||||||
chain_state_provider.block_hash(desired_block_number.into())?;
|
.block_hash(desired_block_number.into())
|
||||||
|
.await?;
|
||||||
|
|
||||||
Ok(U256::from_be_bytes(block_hash.0))
|
Ok(U256::from_be_bytes(block_hash.0))
|
||||||
} else if item == Self::BLOCK_NUMBER_VARIABLE {
|
} else if item == Self::BLOCK_NUMBER_VARIABLE {
|
||||||
let current_block_number = chain_state_provider.last_block_number()?;
|
let current_block_number = chain_state_provider.last_block_number().await?;
|
||||||
Ok(U256::from(current_block_number))
|
Ok(U256::from(current_block_number))
|
||||||
} else if item == Self::BLOCK_TIMESTAMP_VARIABLE {
|
} else if item == Self::BLOCK_TIMESTAMP_VARIABLE {
|
||||||
let timestamp =
|
let timestamp = chain_state_provider
|
||||||
chain_state_provider.block_timestamp(BlockNumberOrTag::Latest)?;
|
.block_timestamp(BlockNumberOrTag::Latest)
|
||||||
|
.await?;
|
||||||
Ok(U256::from(timestamp))
|
Ok(U256::from(timestamp))
|
||||||
|
} else if let Some(variable_name) = item.strip_prefix(Self::VARIABLE_PREFIX) {
|
||||||
|
let Some(variables) = variables.into() else {
|
||||||
|
anyhow::bail!(
|
||||||
|
"Variable resolution required but no variables were passed in"
|
||||||
|
);
|
||||||
|
};
|
||||||
|
let Some(variable) = variables.get(variable_name) else {
|
||||||
|
anyhow::bail!("No variable found with the name {}", variable_name)
|
||||||
|
};
|
||||||
|
Ok(*variable)
|
||||||
} else {
|
} else {
|
||||||
Ok(U256::from_str_radix(item, 10)
|
Ok(U256::from_str_radix(item, 10)
|
||||||
.map_err(|error| anyhow::anyhow!("Invalid decimal literal: {}", error))?)
|
.map_err(|error| anyhow::anyhow!("Invalid decimal literal: {}", error))?)
|
||||||
@@ -628,43 +698,46 @@ mod tests {
|
|||||||
struct MockResolver;
|
struct MockResolver;
|
||||||
|
|
||||||
impl ResolverApi for MockResolver {
|
impl ResolverApi for MockResolver {
|
||||||
fn chain_id(&self) -> anyhow::Result<alloy_primitives::ChainId> {
|
async fn chain_id(&self) -> anyhow::Result<alloy_primitives::ChainId> {
|
||||||
Ok(0x123)
|
Ok(0x123)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn block_gas_limit(&self, _: alloy::eips::BlockNumberOrTag) -> anyhow::Result<u128> {
|
async fn block_gas_limit(&self, _: alloy::eips::BlockNumberOrTag) -> anyhow::Result<u128> {
|
||||||
Ok(0x1234)
|
Ok(0x1234)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn block_coinbase(&self, _: alloy::eips::BlockNumberOrTag) -> anyhow::Result<Address> {
|
async fn block_coinbase(
|
||||||
|
&self,
|
||||||
|
_: alloy::eips::BlockNumberOrTag,
|
||||||
|
) -> anyhow::Result<Address> {
|
||||||
Ok(Address::ZERO)
|
Ok(Address::ZERO)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn block_difficulty(&self, _: alloy::eips::BlockNumberOrTag) -> anyhow::Result<U256> {
|
async fn block_difficulty(&self, _: alloy::eips::BlockNumberOrTag) -> anyhow::Result<U256> {
|
||||||
Ok(U256::from(0x12345u128))
|
Ok(U256::from(0x12345u128))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn block_hash(
|
async fn block_hash(
|
||||||
&self,
|
&self,
|
||||||
_: alloy::eips::BlockNumberOrTag,
|
_: alloy::eips::BlockNumberOrTag,
|
||||||
) -> anyhow::Result<alloy_primitives::BlockHash> {
|
) -> anyhow::Result<alloy_primitives::BlockHash> {
|
||||||
Ok([0xEE; 32].into())
|
Ok([0xEE; 32].into())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn block_timestamp(
|
async fn block_timestamp(
|
||||||
&self,
|
&self,
|
||||||
_: alloy::eips::BlockNumberOrTag,
|
_: alloy::eips::BlockNumberOrTag,
|
||||||
) -> anyhow::Result<alloy_primitives::BlockTimestamp> {
|
) -> anyhow::Result<alloy_primitives::BlockTimestamp> {
|
||||||
Ok(0x123456)
|
Ok(0x123456)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn last_block_number(&self) -> anyhow::Result<alloy_primitives::BlockNumber> {
|
async fn last_block_number(&self) -> anyhow::Result<alloy_primitives::BlockNumber> {
|
||||||
Ok(0x1234567)
|
Ok(0x1234567)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_encoded_input_uint256() {
|
async fn test_encoded_input_uint256() {
|
||||||
let raw_metadata = r#"
|
let raw_metadata = r#"
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
@@ -699,7 +772,10 @@ mod tests {
|
|||||||
(Address::ZERO, parsed_abi),
|
(Address::ZERO, parsed_abi),
|
||||||
);
|
);
|
||||||
|
|
||||||
let encoded = input.encoded_input(&contracts, &MockResolver).unwrap();
|
let encoded = input
|
||||||
|
.encoded_input(&contracts, None, &MockResolver)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
assert!(encoded.0.starts_with(&selector));
|
assert!(encoded.0.starts_with(&selector));
|
||||||
|
|
||||||
type T = (u64,);
|
type T = (u64,);
|
||||||
@@ -707,8 +783,8 @@ mod tests {
|
|||||||
assert_eq!(decoded.0, 42);
|
assert_eq!(decoded.0, 42);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_encoded_input_address_with_signature() {
|
async fn test_encoded_input_address_with_signature() {
|
||||||
let raw_abi = r#"[
|
let raw_abi = r#"[
|
||||||
{
|
{
|
||||||
"inputs": [{"name": "recipient", "type": "address"}],
|
"inputs": [{"name": "recipient", "type": "address"}],
|
||||||
@@ -741,7 +817,10 @@ mod tests {
|
|||||||
(Address::ZERO, parsed_abi),
|
(Address::ZERO, parsed_abi),
|
||||||
);
|
);
|
||||||
|
|
||||||
let encoded = input.encoded_input(&contracts, &MockResolver).unwrap();
|
let encoded = input
|
||||||
|
.encoded_input(&contracts, None, &MockResolver)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
assert!(encoded.0.starts_with(&selector));
|
assert!(encoded.0.starts_with(&selector));
|
||||||
|
|
||||||
type T = (alloy_primitives::Address,);
|
type T = (alloy_primitives::Address,);
|
||||||
@@ -752,8 +831,8 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_encoded_input_address() {
|
async fn test_encoded_input_address() {
|
||||||
let raw_abi = r#"[
|
let raw_abi = r#"[
|
||||||
{
|
{
|
||||||
"inputs": [{"name": "recipient", "type": "address"}],
|
"inputs": [{"name": "recipient", "type": "address"}],
|
||||||
@@ -786,7 +865,10 @@ mod tests {
|
|||||||
(Address::ZERO, parsed_abi),
|
(Address::ZERO, parsed_abi),
|
||||||
);
|
);
|
||||||
|
|
||||||
let encoded = input.encoded_input(&contracts, &MockResolver).unwrap();
|
let encoded = input
|
||||||
|
.encoded_input(&contracts, None, &MockResolver)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
assert!(encoded.0.starts_with(&selector));
|
assert!(encoded.0.starts_with(&selector));
|
||||||
|
|
||||||
type T = (alloy_primitives::Address,);
|
type T = (alloy_primitives::Address,);
|
||||||
@@ -797,50 +879,57 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_calldata_item(
|
async fn resolve_calldata_item(
|
||||||
input: &str,
|
input: &str,
|
||||||
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
||||||
chain_state_provider: &impl ResolverApi,
|
chain_state_provider: &impl ResolverApi,
|
||||||
) -> anyhow::Result<U256> {
|
) -> anyhow::Result<U256> {
|
||||||
CalldataItem::new(input).resolve(deployed_contracts, chain_state_provider)
|
CalldataItem::new(input)
|
||||||
|
.resolve(deployed_contracts, None, chain_state_provider)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn resolver_can_resolve_chain_id_variable() {
|
async fn resolver_can_resolve_chain_id_variable() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "$CHAIN_ID";
|
let input = "$CHAIN_ID";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
assert_eq!(resolved, U256::from(MockResolver.chain_id().unwrap()))
|
assert_eq!(resolved, U256::from(MockResolver.chain_id().await.unwrap()))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn resolver_can_resolve_gas_limit_variable() {
|
async fn resolver_can_resolve_gas_limit_variable() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "$GAS_LIMIT";
|
let input = "$GAS_LIMIT";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resolved,
|
resolved,
|
||||||
U256::from(MockResolver.block_gas_limit(Default::default()).unwrap())
|
U256::from(
|
||||||
|
MockResolver
|
||||||
|
.block_gas_limit(Default::default())
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn resolver_can_resolve_coinbase_variable() {
|
async fn resolver_can_resolve_coinbase_variable() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "$COINBASE";
|
let input = "$COINBASE";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
@@ -849,163 +938,172 @@ mod tests {
|
|||||||
U256::from_be_slice(
|
U256::from_be_slice(
|
||||||
MockResolver
|
MockResolver
|
||||||
.block_coinbase(Default::default())
|
.block_coinbase(Default::default())
|
||||||
|
.await
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.as_ref()
|
.as_ref()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn resolver_can_resolve_block_difficulty_variable() {
|
async fn resolver_can_resolve_block_difficulty_variable() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "$DIFFICULTY";
|
let input = "$DIFFICULTY";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resolved,
|
resolved,
|
||||||
MockResolver.block_difficulty(Default::default()).unwrap()
|
MockResolver
|
||||||
|
.block_difficulty(Default::default())
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn resolver_can_resolve_block_hash_variable() {
|
async fn resolver_can_resolve_block_hash_variable() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "$BLOCK_HASH";
|
let input = "$BLOCK_HASH";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resolved,
|
resolved,
|
||||||
U256::from_be_bytes(MockResolver.block_hash(Default::default()).unwrap().0)
|
U256::from_be_bytes(MockResolver.block_hash(Default::default()).await.unwrap().0)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn resolver_can_resolve_block_number_variable() {
|
async fn resolver_can_resolve_block_number_variable() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "$BLOCK_NUMBER";
|
let input = "$BLOCK_NUMBER";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resolved,
|
resolved,
|
||||||
U256::from(MockResolver.last_block_number().unwrap())
|
U256::from(MockResolver.last_block_number().await.unwrap())
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn resolver_can_resolve_block_timestamp_variable() {
|
async fn resolver_can_resolve_block_timestamp_variable() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "$BLOCK_TIMESTAMP";
|
let input = "$BLOCK_TIMESTAMP";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resolved,
|
resolved,
|
||||||
U256::from(MockResolver.block_timestamp(Default::default()).unwrap())
|
U256::from(
|
||||||
|
MockResolver
|
||||||
|
.block_timestamp(Default::default())
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn simple_addition_can_be_resolved() {
|
async fn simple_addition_can_be_resolved() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "2 4 +";
|
let input = "2 4 +";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
assert_eq!(resolved, U256::from(6));
|
assert_eq!(resolved, U256::from(6));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn simple_subtraction_can_be_resolved() {
|
async fn simple_subtraction_can_be_resolved() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "4 2 -";
|
let input = "4 2 -";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
assert_eq!(resolved, U256::from(2));
|
assert_eq!(resolved, U256::from(2));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn simple_multiplication_can_be_resolved() {
|
async fn simple_multiplication_can_be_resolved() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "4 2 *";
|
let input = "4 2 *";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
assert_eq!(resolved, U256::from(8));
|
assert_eq!(resolved, U256::from(8));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn simple_division_can_be_resolved() {
|
async fn simple_division_can_be_resolved() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "4 2 /";
|
let input = "4 2 /";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
assert_eq!(resolved, U256::from(2));
|
assert_eq!(resolved, U256::from(2));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn arithmetic_errors_are_not_panics() {
|
async fn arithmetic_errors_are_not_panics() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "4 0 /";
|
let input = "4 0 /";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
assert!(resolved.is_err())
|
assert!(resolved.is_err())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn arithmetic_with_resolution_works() {
|
async fn arithmetic_with_resolution_works() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "$BLOCK_NUMBER 10 +";
|
let input = "$BLOCK_NUMBER 10 +";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let resolved = resolved.expect("Failed to resolve argument");
|
let resolved = resolved.expect("Failed to resolve argument");
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
resolved,
|
resolved,
|
||||||
U256::from(MockResolver.last_block_number().unwrap() + 10)
|
U256::from(MockResolver.last_block_number().await.unwrap() + 10)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn incorrect_number_of_arguments_errors() {
|
async fn incorrect_number_of_arguments_errors() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let input = "$BLOCK_NUMBER 10 + +";
|
let input = "$BLOCK_NUMBER 10 + +";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver);
|
let resolved = resolve_calldata_item(input, &Default::default(), &MockResolver).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
assert!(resolved.is_err())
|
assert!(resolved.is_err())
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ use std::{
|
|||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use revive_dt_common::macros::define_wrapper_type;
|
use revive_dt_common::{iterators::FilesWithExtensionIterator, macros::define_wrapper_type};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
case::Case,
|
case::Case,
|
||||||
@@ -193,7 +193,7 @@ impl Metadata {
|
|||||||
metadata.file_path = Some(path.to_path_buf());
|
metadata.file_path = Some(path.to_path_buf());
|
||||||
metadata.contracts = Some(
|
metadata.contracts = Some(
|
||||||
[(
|
[(
|
||||||
ContractInstance::new("test"),
|
ContractInstance::new("Test"),
|
||||||
ContractPathAndIdent {
|
ContractPathAndIdent {
|
||||||
contract_source_path: path.to_path_buf(),
|
contract_source_path: path.to_path_buf(),
|
||||||
contract_ident: ContractIdent::new("Test"),
|
contract_ident: ContractIdent::new("Test"),
|
||||||
@@ -212,6 +212,29 @@ impl Metadata {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns an iterator over all of the solidity files that needs to be compiled for this
|
||||||
|
/// [`Metadata`] object
|
||||||
|
///
|
||||||
|
/// Note: if the metadata is contained within a solidity file then this is the only file that
|
||||||
|
/// we wish to compile since this is a self-contained test. Otherwise, if it's a JSON file
|
||||||
|
/// then we need to compile all of the contracts that are in the directory since imports are
|
||||||
|
/// allowed in there.
|
||||||
|
pub fn files_to_compile(&self) -> anyhow::Result<Box<dyn Iterator<Item = PathBuf>>> {
|
||||||
|
let Some(ref metadata_file_path) = self.file_path else {
|
||||||
|
anyhow::bail!("The metadata file path is not defined");
|
||||||
|
};
|
||||||
|
if metadata_file_path
|
||||||
|
.extension()
|
||||||
|
.is_some_and(|extension| extension.eq_ignore_ascii_case("sol"))
|
||||||
|
{
|
||||||
|
Ok(Box::new(std::iter::once(metadata_file_path.clone())))
|
||||||
|
} else {
|
||||||
|
Ok(Box::new(
|
||||||
|
FilesWithExtensionIterator::new(self.directory()?).with_allowed_extension("sol"),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
define_wrapper_type!(
|
define_wrapper_type!(
|
||||||
@@ -287,17 +310,23 @@ impl FromStr for ContractPathAndIdent {
|
|||||||
identifier = Some(next_item.to_owned())
|
identifier = Some(next_item.to_owned())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let Some(path) = path else {
|
match (path, identifier) {
|
||||||
anyhow::bail!("Path is not defined");
|
(Some(path), Some(identifier)) => Ok(Self {
|
||||||
};
|
contract_source_path: PathBuf::from(path),
|
||||||
let Some(identifier) = identifier else {
|
contract_ident: ContractIdent::new(identifier),
|
||||||
anyhow::bail!("Contract identifier is not defined")
|
}),
|
||||||
|
(None, Some(path)) | (Some(path), None) => {
|
||||||
|
let Some(identifier) = path.split(".").next().map(ToOwned::to_owned) else {
|
||||||
|
anyhow::bail!("Failed to find identifier");
|
||||||
};
|
};
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
contract_source_path: PathBuf::from(path),
|
contract_source_path: PathBuf::from(path),
|
||||||
contract_ident: ContractIdent::new(identifier),
|
contract_ident: ContractIdent::new(identifier),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
(None, None) => anyhow::bail!("Failed to find the path and identifier"),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TryFrom<String> for ContractPathAndIdent {
|
impl TryFrom<String> for ContractPathAndIdent {
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
use revive_dt_common::types::VersionOrRequirement;
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use serde::de::Deserializer;
|
use serde::de::Deserializer;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@@ -78,6 +79,15 @@ impl SolcMode {
|
|||||||
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Resolves the [`SolcMode`]'s solidity version requirement into a [`VersionOrRequirement`] if
|
||||||
|
/// the requirement is present on the object. Otherwise, the passed default version is used.
|
||||||
|
pub fn compiler_version_to_use(&self, default: Version) -> VersionOrRequirement {
|
||||||
|
match self.solc_version {
|
||||||
|
Some(ref requirement) => requirement.clone().into(),
|
||||||
|
None => default.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for Mode {
|
impl<'de> Deserialize<'de> for Mode {
|
||||||
|
|||||||
@@ -6,25 +6,28 @@ use anyhow::Result;
|
|||||||
/// crate implements to go from string calldata and into the bytes calldata.
|
/// crate implements to go from string calldata and into the bytes calldata.
|
||||||
pub trait ResolverApi {
|
pub trait ResolverApi {
|
||||||
/// Returns the ID of the chain that the node is on.
|
/// Returns the ID of the chain that the node is on.
|
||||||
fn chain_id(&self) -> Result<ChainId>;
|
fn chain_id(&self) -> impl Future<Output = Result<ChainId>>;
|
||||||
|
|
||||||
// TODO: This is currently a u128 due to Kitchensink needing more than 64 bits for its gas limit
|
// TODO: This is currently a u128 due to Kitchensink needing more than 64 bits for its gas limit
|
||||||
// when we implement the changes to the gas we need to adjust this to be a u64.
|
// when we implement the changes to the gas we need to adjust this to be a u64.
|
||||||
/// Returns the gas limit of the specified block.
|
/// Returns the gas limit of the specified block.
|
||||||
fn block_gas_limit(&self, number: BlockNumberOrTag) -> Result<u128>;
|
fn block_gas_limit(&self, number: BlockNumberOrTag) -> impl Future<Output = Result<u128>>;
|
||||||
|
|
||||||
/// Returns the coinbase of the specified block.
|
/// Returns the coinbase of the specified block.
|
||||||
fn block_coinbase(&self, number: BlockNumberOrTag) -> Result<Address>;
|
fn block_coinbase(&self, number: BlockNumberOrTag) -> impl Future<Output = Result<Address>>;
|
||||||
|
|
||||||
/// Returns the difficulty of the specified block.
|
/// Returns the difficulty of the specified block.
|
||||||
fn block_difficulty(&self, number: BlockNumberOrTag) -> Result<U256>;
|
fn block_difficulty(&self, number: BlockNumberOrTag) -> impl Future<Output = Result<U256>>;
|
||||||
|
|
||||||
/// Returns the hash of the specified block.
|
/// Returns the hash of the specified block.
|
||||||
fn block_hash(&self, number: BlockNumberOrTag) -> Result<BlockHash>;
|
fn block_hash(&self, number: BlockNumberOrTag) -> impl Future<Output = Result<BlockHash>>;
|
||||||
|
|
||||||
/// Returns the timestamp of the specified block,
|
/// Returns the timestamp of the specified block,
|
||||||
fn block_timestamp(&self, number: BlockNumberOrTag) -> Result<BlockTimestamp>;
|
fn block_timestamp(
|
||||||
|
&self,
|
||||||
|
number: BlockNumberOrTag,
|
||||||
|
) -> impl Future<Output = Result<BlockTimestamp>>;
|
||||||
|
|
||||||
/// Returns the number of the last block.
|
/// Returns the number of the last block.
|
||||||
fn last_block_number(&self) -> Result<BlockNumber>;
|
fn last_block_number(&self) -> impl Future<Output = Result<BlockNumber>>;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,15 +7,18 @@ use anyhow::Result;
|
|||||||
/// An interface for all interactions with Ethereum compatible nodes.
|
/// An interface for all interactions with Ethereum compatible nodes.
|
||||||
pub trait EthereumNode {
|
pub trait EthereumNode {
|
||||||
/// Execute the [TransactionRequest] and return a [TransactionReceipt].
|
/// Execute the [TransactionRequest] and return a [TransactionReceipt].
|
||||||
fn execute_transaction(&self, transaction: TransactionRequest) -> Result<TransactionReceipt>;
|
fn execute_transaction(
|
||||||
|
&self,
|
||||||
|
transaction: TransactionRequest,
|
||||||
|
) -> impl Future<Output = Result<TransactionReceipt>>;
|
||||||
|
|
||||||
/// Trace the transaction in the [TransactionReceipt] and return a [GethTrace].
|
/// Trace the transaction in the [TransactionReceipt] and return a [GethTrace].
|
||||||
fn trace_transaction(
|
fn trace_transaction(
|
||||||
&self,
|
&self,
|
||||||
receipt: &TransactionReceipt,
|
receipt: &TransactionReceipt,
|
||||||
trace_options: GethDebugTracingOptions,
|
trace_options: GethDebugTracingOptions,
|
||||||
) -> Result<GethTrace>;
|
) -> impl Future<Output = Result<GethTrace>>;
|
||||||
|
|
||||||
/// Returns the state diff of the transaction hash in the [TransactionReceipt].
|
/// Returns the state diff of the transaction hash in the [TransactionReceipt].
|
||||||
fn state_diff(&self, receipt: &TransactionReceipt) -> Result<DiffMode>;
|
fn state_diff(&self, receipt: &TransactionReceipt) -> impl Future<Output = Result<DiffMode>>;
|
||||||
}
|
}
|
||||||
|
|||||||
+74
-81
@@ -25,7 +25,7 @@ use alloy::{
|
|||||||
},
|
},
|
||||||
signers::local::PrivateKeySigner,
|
signers::local::PrivateKeySigner,
|
||||||
};
|
};
|
||||||
use revive_dt_common::concepts::BlockingExecutor;
|
use revive_dt_common::fs::clear_directory;
|
||||||
use revive_dt_config::Arguments;
|
use revive_dt_config::Arguments;
|
||||||
use revive_dt_format::traits::ResolverApi;
|
use revive_dt_format::traits::ResolverApi;
|
||||||
use revive_dt_node_interaction::EthereumNode;
|
use revive_dt_node_interaction::EthereumNode;
|
||||||
@@ -43,7 +43,7 @@ static NODE_COUNT: AtomicU32 = AtomicU32::new(0);
|
|||||||
///
|
///
|
||||||
/// Prunes the child process and the base directory on drop.
|
/// Prunes the child process and the base directory on drop.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Instance {
|
pub struct GethNode {
|
||||||
connection_string: String,
|
connection_string: String,
|
||||||
base_directory: PathBuf,
|
base_directory: PathBuf,
|
||||||
data_directory: PathBuf,
|
data_directory: PathBuf,
|
||||||
@@ -62,7 +62,7 @@ pub struct Instance {
|
|||||||
logs_file_to_flush: Vec<File>,
|
logs_file_to_flush: Vec<File>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Instance {
|
impl GethNode {
|
||||||
const BASE_DIRECTORY: &str = "geth";
|
const BASE_DIRECTORY: &str = "geth";
|
||||||
const DATA_DIRECTORY: &str = "data";
|
const DATA_DIRECTORY: &str = "data";
|
||||||
const LOGS_DIRECTORY: &str = "logs";
|
const LOGS_DIRECTORY: &str = "logs";
|
||||||
@@ -81,6 +81,9 @@ impl Instance {
|
|||||||
/// Create the node directory and call `geth init` to configure the genesis.
|
/// Create the node directory and call `geth init` to configure the genesis.
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn init(&mut self, genesis: String) -> anyhow::Result<&mut Self> {
|
fn init(&mut self, genesis: String) -> anyhow::Result<&mut Self> {
|
||||||
|
let _ = clear_directory(&self.base_directory);
|
||||||
|
let _ = clear_directory(&self.logs_directory);
|
||||||
|
|
||||||
create_dir_all(&self.base_directory)?;
|
create_dir_all(&self.base_directory)?;
|
||||||
create_dir_all(&self.logs_directory)?;
|
create_dir_all(&self.logs_directory)?;
|
||||||
|
|
||||||
@@ -152,6 +155,10 @@ impl Instance {
|
|||||||
.arg("--nodiscover")
|
.arg("--nodiscover")
|
||||||
.arg("--maxpeers")
|
.arg("--maxpeers")
|
||||||
.arg("0")
|
.arg("0")
|
||||||
|
.arg("--txlookuplimit")
|
||||||
|
.arg("0")
|
||||||
|
.arg("--cache.blocklogs")
|
||||||
|
.arg("512")
|
||||||
.stderr(stderr_logs_file.try_clone()?)
|
.stderr(stderr_logs_file.try_clone()?)
|
||||||
.stdout(stdout_logs_file.try_clone()?)
|
.stdout(stdout_logs_file.try_clone()?)
|
||||||
.spawn()?
|
.spawn()?
|
||||||
@@ -240,18 +247,16 @@ impl Instance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EthereumNode for Instance {
|
impl EthereumNode for GethNode {
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn execute_transaction(
|
async fn execute_transaction(
|
||||||
&self,
|
&self,
|
||||||
transaction: TransactionRequest,
|
transaction: TransactionRequest,
|
||||||
) -> anyhow::Result<alloy::rpc::types::TransactionReceipt> {
|
) -> anyhow::Result<alloy::rpc::types::TransactionReceipt> {
|
||||||
let provider = self.provider();
|
|
||||||
BlockingExecutor::execute(async move {
|
|
||||||
let outer_span = tracing::debug_span!("Submitting transaction", ?transaction);
|
let outer_span = tracing::debug_span!("Submitting transaction", ?transaction);
|
||||||
let _outer_guard = outer_span.enter();
|
let _outer_guard = outer_span.enter();
|
||||||
|
|
||||||
let provider = provider.await?;
|
let provider = self.provider().await?;
|
||||||
|
|
||||||
let pending_transaction = provider.send_transaction(transaction).await?;
|
let pending_transaction = provider.send_transaction(transaction).await?;
|
||||||
let transaction_hash = pending_transaction.tx_hash();
|
let transaction_hash = pending_transaction.tx_hash();
|
||||||
@@ -294,7 +299,10 @@ impl EthereumNode for Instance {
|
|||||||
}
|
}
|
||||||
|
|
||||||
match provider.get_transaction_receipt(*transaction_hash).await {
|
match provider.get_transaction_receipt(*transaction_hash).await {
|
||||||
Ok(Some(receipt)) => break Ok(receipt),
|
Ok(Some(receipt)) => {
|
||||||
|
tracing::info!(?total_wait_duration, "Found receipt");
|
||||||
|
break Ok(receipt);
|
||||||
|
}
|
||||||
Ok(None) => {}
|
Ok(None) => {}
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
let error_string = error.to_string();
|
let error_string = error.to_string();
|
||||||
@@ -311,34 +319,32 @@ impl EthereumNode for Instance {
|
|||||||
|
|
||||||
tokio::time::sleep(next_wait_duration).await;
|
tokio::time::sleep(next_wait_duration).await;
|
||||||
}
|
}
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn trace_transaction(
|
async fn trace_transaction(
|
||||||
&self,
|
&self,
|
||||||
transaction: &TransactionReceipt,
|
transaction: &TransactionReceipt,
|
||||||
trace_options: GethDebugTracingOptions,
|
trace_options: GethDebugTracingOptions,
|
||||||
) -> anyhow::Result<alloy::rpc::types::trace::geth::GethTrace> {
|
) -> anyhow::Result<alloy::rpc::types::trace::geth::GethTrace> {
|
||||||
let tx_hash = transaction.transaction_hash;
|
let tx_hash = transaction.transaction_hash;
|
||||||
let provider = self.provider();
|
Ok(self
|
||||||
BlockingExecutor::execute(async move {
|
.provider()
|
||||||
Ok(provider
|
|
||||||
.await?
|
.await?
|
||||||
.debug_trace_transaction(tx_hash, trace_options)
|
.debug_trace_transaction(tx_hash, trace_options)
|
||||||
.await?)
|
.await?)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn state_diff(&self, transaction: &TransactionReceipt) -> anyhow::Result<DiffMode> {
|
async fn state_diff(&self, transaction: &TransactionReceipt) -> anyhow::Result<DiffMode> {
|
||||||
let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig {
|
let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig {
|
||||||
diff_mode: Some(true),
|
diff_mode: Some(true),
|
||||||
disable_code: None,
|
disable_code: None,
|
||||||
disable_storage: None,
|
disable_storage: None,
|
||||||
});
|
});
|
||||||
match self
|
match self
|
||||||
.trace_transaction(transaction, trace_options)?
|
.trace_transaction(transaction, trace_options)
|
||||||
|
.await?
|
||||||
.try_into_pre_state_frame()?
|
.try_into_pre_state_frame()?
|
||||||
{
|
{
|
||||||
PreStateFrame::Diff(diff) => Ok(diff),
|
PreStateFrame::Diff(diff) => Ok(diff),
|
||||||
@@ -347,90 +353,77 @@ impl EthereumNode for Instance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ResolverApi for Instance {
|
impl ResolverApi for GethNode {
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn chain_id(&self) -> anyhow::Result<alloy::primitives::ChainId> {
|
async fn chain_id(&self) -> anyhow::Result<alloy::primitives::ChainId> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
.await?
|
||||||
provider.await?.get_chain_id().await.map_err(Into::into)
|
.get_chain_id()
|
||||||
})?
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn block_gas_limit(&self, number: BlockNumberOrTag) -> anyhow::Result<u128> {
|
async fn block_gas_limit(&self, number: BlockNumberOrTag) -> anyhow::Result<u128> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
|
||||||
provider
|
|
||||||
.await?
|
.await?
|
||||||
.get_block_by_number(number)
|
.get_block_by_number(number)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
||||||
.map(|block| block.header.gas_limit as _)
|
.map(|block| block.header.gas_limit as _)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn block_coinbase(&self, number: BlockNumberOrTag) -> anyhow::Result<Address> {
|
async fn block_coinbase(&self, number: BlockNumberOrTag) -> anyhow::Result<Address> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
|
||||||
provider
|
|
||||||
.await?
|
.await?
|
||||||
.get_block_by_number(number)
|
.get_block_by_number(number)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
||||||
.map(|block| block.header.beneficiary)
|
.map(|block| block.header.beneficiary)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn block_difficulty(&self, number: BlockNumberOrTag) -> anyhow::Result<U256> {
|
async fn block_difficulty(&self, number: BlockNumberOrTag) -> anyhow::Result<U256> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
|
||||||
provider
|
|
||||||
.await?
|
.await?
|
||||||
.get_block_by_number(number)
|
.get_block_by_number(number)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
||||||
.map(|block| block.header.difficulty)
|
.map(|block| block.header.difficulty)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn block_hash(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockHash> {
|
async fn block_hash(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockHash> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
|
||||||
provider
|
|
||||||
.await?
|
.await?
|
||||||
.get_block_by_number(number)
|
.get_block_by_number(number)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
||||||
.map(|block| block.header.hash)
|
.map(|block| block.header.hash)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn block_timestamp(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockTimestamp> {
|
async fn block_timestamp(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockTimestamp> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
|
||||||
provider
|
|
||||||
.await?
|
.await?
|
||||||
.get_block_by_number(number)
|
.get_block_by_number(number)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
||||||
.map(|block| block.header.timestamp)
|
.map(|block| block.header.timestamp)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn last_block_number(&self) -> anyhow::Result<BlockNumber> {
|
async fn last_block_number(&self) -> anyhow::Result<BlockNumber> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
.await?
|
||||||
provider.await?.get_block_number().await.map_err(Into::into)
|
.get_block_number()
|
||||||
})?
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Node for Instance {
|
impl Node for GethNode {
|
||||||
fn new(config: &Arguments) -> Self {
|
fn new(config: &Arguments) -> Self {
|
||||||
let geth_directory = config.directory().join(Self::BASE_DIRECTORY);
|
let geth_directory = config.directory().join(Self::BASE_DIRECTORY);
|
||||||
let id = NODE_COUNT.fetch_add(1, Ordering::SeqCst);
|
let id = NODE_COUNT.fetch_add(1, Ordering::SeqCst);
|
||||||
@@ -518,7 +511,7 @@ impl Node for Instance {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for Instance {
|
impl Drop for GethNode {
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
self.shutdown().expect("Failed to shutdown")
|
self.shutdown().expect("Failed to shutdown")
|
||||||
@@ -543,9 +536,9 @@ mod tests {
|
|||||||
(config, temp_dir)
|
(config, temp_dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_node() -> (Instance, TempDir) {
|
fn new_node() -> (GethNode, TempDir) {
|
||||||
let (args, temp_dir) = test_config();
|
let (args, temp_dir) = test_config();
|
||||||
let mut node = Instance::new(&args);
|
let mut node = GethNode::new(&args);
|
||||||
node.init(GENESIS_JSON.to_owned())
|
node.init(GENESIS_JSON.to_owned())
|
||||||
.expect("Failed to initialize the node")
|
.expect("Failed to initialize the node")
|
||||||
.spawn_process()
|
.spawn_process()
|
||||||
@@ -555,110 +548,110 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn init_works() {
|
fn init_works() {
|
||||||
Instance::new(&test_config().0)
|
GethNode::new(&test_config().0)
|
||||||
.init(GENESIS_JSON.to_string())
|
.init(GENESIS_JSON.to_string())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn spawn_works() {
|
fn spawn_works() {
|
||||||
Instance::new(&test_config().0)
|
GethNode::new(&test_config().0)
|
||||||
.spawn(GENESIS_JSON.to_string())
|
.spawn(GENESIS_JSON.to_string())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn version_works() {
|
fn version_works() {
|
||||||
let version = Instance::new(&test_config().0).version().unwrap();
|
let version = GethNode::new(&test_config().0).version().unwrap();
|
||||||
assert!(
|
assert!(
|
||||||
version.starts_with("geth version"),
|
version.starts_with("geth version"),
|
||||||
"expected version string, got: '{version}'"
|
"expected version string, got: '{version}'"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_chain_id_from_node() {
|
async fn can_get_chain_id_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let (node, _temp_dir) = new_node();
|
let (node, _temp_dir) = new_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let chain_id = node.chain_id();
|
let chain_id = node.chain_id().await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let chain_id = chain_id.expect("Failed to get the chain id");
|
let chain_id = chain_id.expect("Failed to get the chain id");
|
||||||
assert_eq!(chain_id, 420_420_420);
|
assert_eq!(chain_id, 420_420_420);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_gas_limit_from_node() {
|
async fn can_get_gas_limit_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let (node, _temp_dir) = new_node();
|
let (node, _temp_dir) = new_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let gas_limit = node.block_gas_limit(BlockNumberOrTag::Latest);
|
let gas_limit = node.block_gas_limit(BlockNumberOrTag::Latest).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let gas_limit = gas_limit.expect("Failed to get the gas limit");
|
let gas_limit = gas_limit.expect("Failed to get the gas limit");
|
||||||
assert_eq!(gas_limit, u32::MAX as u128)
|
assert_eq!(gas_limit, u32::MAX as u128)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_coinbase_from_node() {
|
async fn can_get_coinbase_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let (node, _temp_dir) = new_node();
|
let (node, _temp_dir) = new_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let coinbase = node.block_coinbase(BlockNumberOrTag::Latest);
|
let coinbase = node.block_coinbase(BlockNumberOrTag::Latest).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let coinbase = coinbase.expect("Failed to get the coinbase");
|
let coinbase = coinbase.expect("Failed to get the coinbase");
|
||||||
assert_eq!(coinbase, Address::new([0xFF; 20]))
|
assert_eq!(coinbase, Address::new([0xFF; 20]))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_block_difficulty_from_node() {
|
async fn can_get_block_difficulty_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let (node, _temp_dir) = new_node();
|
let (node, _temp_dir) = new_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let block_difficulty = node.block_difficulty(BlockNumberOrTag::Latest);
|
let block_difficulty = node.block_difficulty(BlockNumberOrTag::Latest).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let block_difficulty = block_difficulty.expect("Failed to get the block difficulty");
|
let block_difficulty = block_difficulty.expect("Failed to get the block difficulty");
|
||||||
assert_eq!(block_difficulty, U256::ZERO)
|
assert_eq!(block_difficulty, U256::ZERO)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_block_hash_from_node() {
|
async fn can_get_block_hash_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let (node, _temp_dir) = new_node();
|
let (node, _temp_dir) = new_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let block_hash = node.block_hash(BlockNumberOrTag::Latest);
|
let block_hash = node.block_hash(BlockNumberOrTag::Latest).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let _ = block_hash.expect("Failed to get the block hash");
|
let _ = block_hash.expect("Failed to get the block hash");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_block_timestamp_from_node() {
|
async fn can_get_block_timestamp_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let (node, _temp_dir) = new_node();
|
let (node, _temp_dir) = new_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let block_timestamp = node.block_timestamp(BlockNumberOrTag::Latest);
|
let block_timestamp = node.block_timestamp(BlockNumberOrTag::Latest).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let _ = block_timestamp.expect("Failed to get the block timestamp");
|
let _ = block_timestamp.expect("Failed to get the block timestamp");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_block_number_from_node() {
|
async fn can_get_block_number_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let (node, _temp_dir) = new_node();
|
let (node, _temp_dir) = new_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let block_number = node.last_block_number();
|
let block_number = node.last_block_number().await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let block_number = block_number.expect("Failed to get the block number");
|
let block_number = block_number.expect("Failed to get the block number");
|
||||||
|
|||||||
+85
-102
@@ -30,6 +30,7 @@ use alloy::{
|
|||||||
},
|
},
|
||||||
signers::local::PrivateKeySigner,
|
signers::local::PrivateKeySigner,
|
||||||
};
|
};
|
||||||
|
use revive_dt_common::fs::clear_directory;
|
||||||
use revive_dt_format::traits::ResolverApi;
|
use revive_dt_format::traits::ResolverApi;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{Value as JsonValue, json};
|
use serde_json::{Value as JsonValue, json};
|
||||||
@@ -37,7 +38,6 @@ use sp_core::crypto::Ss58Codec;
|
|||||||
use sp_runtime::AccountId32;
|
use sp_runtime::AccountId32;
|
||||||
use tracing::Level;
|
use tracing::Level;
|
||||||
|
|
||||||
use revive_dt_common::concepts::BlockingExecutor;
|
|
||||||
use revive_dt_config::Arguments;
|
use revive_dt_config::Arguments;
|
||||||
use revive_dt_node_interaction::EthereumNode;
|
use revive_dt_node_interaction::EthereumNode;
|
||||||
|
|
||||||
@@ -86,6 +86,9 @@ impl KitchensinkNode {
|
|||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
||||||
fn init(&mut self, genesis: &str) -> anyhow::Result<&mut Self> {
|
fn init(&mut self, genesis: &str) -> anyhow::Result<&mut Self> {
|
||||||
|
let _ = clear_directory(&self.base_directory);
|
||||||
|
let _ = clear_directory(&self.logs_directory);
|
||||||
|
|
||||||
create_dir_all(&self.base_directory)?;
|
create_dir_all(&self.base_directory)?;
|
||||||
create_dir_all(&self.logs_directory)?;
|
create_dir_all(&self.logs_directory)?;
|
||||||
|
|
||||||
@@ -377,49 +380,46 @@ impl KitchensinkNode {
|
|||||||
|
|
||||||
impl EthereumNode for KitchensinkNode {
|
impl EthereumNode for KitchensinkNode {
|
||||||
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
||||||
fn execute_transaction(
|
async fn execute_transaction(
|
||||||
&self,
|
&self,
|
||||||
transaction: alloy::rpc::types::TransactionRequest,
|
transaction: alloy::rpc::types::TransactionRequest,
|
||||||
) -> anyhow::Result<TransactionReceipt> {
|
) -> anyhow::Result<TransactionReceipt> {
|
||||||
tracing::debug!(?transaction, "Submitting transaction");
|
tracing::debug!(?transaction, "Submitting transaction");
|
||||||
let provider = self.provider();
|
let receipt = self
|
||||||
let receipt = BlockingExecutor::execute(async move {
|
.provider()
|
||||||
Ok(provider
|
|
||||||
.await?
|
.await?
|
||||||
.send_transaction(transaction)
|
.send_transaction(transaction)
|
||||||
.await?
|
.await?
|
||||||
.get_receipt()
|
.get_receipt()
|
||||||
.await?)
|
.await?;
|
||||||
})?;
|
|
||||||
tracing::info!(?receipt, "Submitted tx to kitchensink");
|
tracing::info!(?receipt, "Submitted tx to kitchensink");
|
||||||
receipt
|
Ok(receipt)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
||||||
fn trace_transaction(
|
async fn trace_transaction(
|
||||||
&self,
|
&self,
|
||||||
transaction: &TransactionReceipt,
|
transaction: &TransactionReceipt,
|
||||||
trace_options: GethDebugTracingOptions,
|
trace_options: GethDebugTracingOptions,
|
||||||
) -> anyhow::Result<alloy::rpc::types::trace::geth::GethTrace> {
|
) -> anyhow::Result<alloy::rpc::types::trace::geth::GethTrace> {
|
||||||
let tx_hash = transaction.transaction_hash;
|
let tx_hash = transaction.transaction_hash;
|
||||||
let provider = self.provider();
|
Ok(self
|
||||||
BlockingExecutor::execute(async move {
|
.provider()
|
||||||
Ok(provider
|
|
||||||
.await?
|
.await?
|
||||||
.debug_trace_transaction(tx_hash, trace_options)
|
.debug_trace_transaction(tx_hash, trace_options)
|
||||||
.await?)
|
.await?)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
||||||
fn state_diff(&self, transaction: &TransactionReceipt) -> anyhow::Result<DiffMode> {
|
async fn state_diff(&self, transaction: &TransactionReceipt) -> anyhow::Result<DiffMode> {
|
||||||
let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig {
|
let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig {
|
||||||
diff_mode: Some(true),
|
diff_mode: Some(true),
|
||||||
disable_code: None,
|
disable_code: None,
|
||||||
disable_storage: None,
|
disable_storage: None,
|
||||||
});
|
});
|
||||||
match self
|
match self
|
||||||
.trace_transaction(transaction, trace_options)?
|
.trace_transaction(transaction, trace_options)
|
||||||
|
.await?
|
||||||
.try_into_pre_state_frame()?
|
.try_into_pre_state_frame()?
|
||||||
{
|
{
|
||||||
PreStateFrame::Diff(diff) => Ok(diff),
|
PreStateFrame::Diff(diff) => Ok(diff),
|
||||||
@@ -429,85 +429,72 @@ impl EthereumNode for KitchensinkNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ResolverApi for KitchensinkNode {
|
impl ResolverApi for KitchensinkNode {
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
||||||
fn chain_id(&self) -> anyhow::Result<alloy::primitives::ChainId> {
|
async fn chain_id(&self) -> anyhow::Result<alloy::primitives::ChainId> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
.await?
|
||||||
provider.await?.get_chain_id().await.map_err(Into::into)
|
.get_chain_id()
|
||||||
})?
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
||||||
fn block_gas_limit(&self, number: BlockNumberOrTag) -> anyhow::Result<u128> {
|
async fn block_gas_limit(&self, number: BlockNumberOrTag) -> anyhow::Result<u128> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
|
||||||
provider
|
|
||||||
.await?
|
.await?
|
||||||
.get_block_by_number(number)
|
.get_block_by_number(number)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
||||||
.map(|block| block.header.gas_limit)
|
.map(|block| block.header.gas_limit as _)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
||||||
fn block_coinbase(&self, number: BlockNumberOrTag) -> anyhow::Result<Address> {
|
async fn block_coinbase(&self, number: BlockNumberOrTag) -> anyhow::Result<Address> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
|
||||||
provider
|
|
||||||
.await?
|
.await?
|
||||||
.get_block_by_number(number)
|
.get_block_by_number(number)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
||||||
.map(|block| block.header.beneficiary)
|
.map(|block| block.header.beneficiary)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
||||||
fn block_difficulty(&self, number: BlockNumberOrTag) -> anyhow::Result<U256> {
|
async fn block_difficulty(&self, number: BlockNumberOrTag) -> anyhow::Result<U256> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
|
||||||
provider
|
|
||||||
.await?
|
.await?
|
||||||
.get_block_by_number(number)
|
.get_block_by_number(number)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
||||||
.map(|block| block.header.difficulty)
|
.map(|block| block.header.difficulty)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
||||||
fn block_hash(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockHash> {
|
async fn block_hash(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockHash> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
|
||||||
provider
|
|
||||||
.await?
|
.await?
|
||||||
.get_block_by_number(number)
|
.get_block_by_number(number)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
||||||
.map(|block| block.header.hash)
|
.map(|block| block.header.hash)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
||||||
fn block_timestamp(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockTimestamp> {
|
async fn block_timestamp(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockTimestamp> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
|
||||||
provider
|
|
||||||
.await?
|
.await?
|
||||||
.get_block_by_number(number)
|
.get_block_by_number(number)
|
||||||
.await?
|
.await?
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
||||||
.map(|block| block.header.timestamp)
|
.map(|block| block.header.timestamp)
|
||||||
})?
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
|
||||||
fn last_block_number(&self) -> anyhow::Result<BlockNumber> {
|
async fn last_block_number(&self) -> anyhow::Result<BlockNumber> {
|
||||||
let provider = self.provider();
|
self.provider()
|
||||||
BlockingExecutor::execute(async move {
|
.await?
|
||||||
provider.await?.get_block_number().await.map_err(Into::into)
|
.get_block_number()
|
||||||
})?
|
.await
|
||||||
|
.map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1043,26 +1030,21 @@ mod tests {
|
|||||||
use revive_dt_config::Arguments;
|
use revive_dt_config::Arguments;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::{LazyLock, Mutex};
|
use std::sync::{LazyLock, Mutex};
|
||||||
use temp_dir::TempDir;
|
|
||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{GENESIS_JSON, Node};
|
use crate::{GENESIS_JSON, Node};
|
||||||
|
|
||||||
fn test_config() -> (Arguments, TempDir) {
|
fn test_config() -> Arguments {
|
||||||
let mut config = Arguments::default();
|
Arguments {
|
||||||
let temp_dir = TempDir::new().unwrap();
|
kitchensink: PathBuf::from("substrate-node"),
|
||||||
|
eth_proxy: PathBuf::from("eth-rpc"),
|
||||||
config.working_directory = temp_dir.path().to_path_buf().into();
|
..Default::default()
|
||||||
|
}
|
||||||
config.kitchensink = PathBuf::from("substrate-node");
|
|
||||||
config.eth_proxy = PathBuf::from("eth-rpc");
|
|
||||||
|
|
||||||
(config, temp_dir)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_node() -> (KitchensinkNode, Arguments, TempDir) {
|
fn new_node() -> (KitchensinkNode, Arguments) {
|
||||||
// Note: When we run the tests in the CI we found that if they're all
|
// Note: When we run the tests in the CI we found that if they're all
|
||||||
// run in parallel then the CI is unable to start all of the nodes in
|
// run in parallel then the CI is unable to start all of the nodes in
|
||||||
// time and their start up times-out. Therefore, we want all of the
|
// time and their start up times-out. Therefore, we want all of the
|
||||||
@@ -1081,20 +1063,20 @@ mod tests {
|
|||||||
static NODE_START_MUTEX: Mutex<()> = Mutex::new(());
|
static NODE_START_MUTEX: Mutex<()> = Mutex::new(());
|
||||||
let _guard = NODE_START_MUTEX.lock().unwrap();
|
let _guard = NODE_START_MUTEX.lock().unwrap();
|
||||||
|
|
||||||
let (args, temp_dir) = test_config();
|
let args = test_config();
|
||||||
let mut node = KitchensinkNode::new(&args);
|
let mut node = KitchensinkNode::new(&args);
|
||||||
node.init(GENESIS_JSON)
|
node.init(GENESIS_JSON)
|
||||||
.expect("Failed to initialize the node")
|
.expect("Failed to initialize the node")
|
||||||
.spawn_process()
|
.spawn_process()
|
||||||
.expect("Failed to spawn the node process");
|
.expect("Failed to spawn the node process");
|
||||||
(node, args, temp_dir)
|
(node, args)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A shared node that multiple tests can use. It starts up once.
|
/// A shared node that multiple tests can use. It starts up once.
|
||||||
fn shared_node() -> &'static KitchensinkNode {
|
fn shared_node() -> &'static KitchensinkNode {
|
||||||
static NODE: LazyLock<(KitchensinkNode, TempDir)> = LazyLock::new(|| {
|
static NODE: LazyLock<(KitchensinkNode, Arguments)> = LazyLock::new(|| {
|
||||||
let (node, _, temp_dir) = new_node();
|
let (node, args) = new_node();
|
||||||
(node, temp_dir)
|
(node, args)
|
||||||
});
|
});
|
||||||
&NODE.0
|
&NODE.0
|
||||||
}
|
}
|
||||||
@@ -1102,7 +1084,7 @@ mod tests {
|
|||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn node_mines_simple_transfer_transaction_and_returns_receipt() {
|
async fn node_mines_simple_transfer_transaction_and_returns_receipt() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let (node, args, _temp_dir) = new_node();
|
let (node, args) = new_node();
|
||||||
|
|
||||||
let provider = node.provider().await.expect("Failed to create provider");
|
let provider = node.provider().await.expect("Failed to create provider");
|
||||||
|
|
||||||
@@ -1137,7 +1119,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let mut dummy_node = KitchensinkNode::new(&test_config().0);
|
let mut dummy_node = KitchensinkNode::new(&test_config());
|
||||||
|
|
||||||
// Call `init()`
|
// Call `init()`
|
||||||
dummy_node.init(genesis_content).expect("init failed");
|
dummy_node.init(genesis_content).expect("init failed");
|
||||||
@@ -1181,7 +1163,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
"#;
|
"#;
|
||||||
|
|
||||||
let node = KitchensinkNode::new(&test_config().0);
|
let node = KitchensinkNode::new(&test_config());
|
||||||
|
|
||||||
let result = node
|
let result = node
|
||||||
.extract_balance_from_genesis_file(&serde_json::from_str(genesis_json).unwrap())
|
.extract_balance_from_genesis_file(&serde_json::from_str(genesis_json).unwrap())
|
||||||
@@ -1252,15 +1234,16 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn spawn_works() {
|
fn spawn_works() {
|
||||||
let (config, _temp_dir) = test_config();
|
let config = test_config();
|
||||||
|
|
||||||
let mut node = KitchensinkNode::new(&config);
|
let mut node = KitchensinkNode::new(&config);
|
||||||
|
|
||||||
node.spawn(GENESIS_JSON.to_string()).unwrap();
|
node.spawn(GENESIS_JSON.to_string()).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn version_works() {
|
fn version_works() {
|
||||||
let (config, _temp_dir) = test_config();
|
let config = test_config();
|
||||||
|
|
||||||
let node = KitchensinkNode::new(&config);
|
let node = KitchensinkNode::new(&config);
|
||||||
let version = node.version().unwrap();
|
let version = node.version().unwrap();
|
||||||
@@ -1273,7 +1256,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn eth_rpc_version_works() {
|
fn eth_rpc_version_works() {
|
||||||
let (config, _temp_dir) = test_config();
|
let config = test_config();
|
||||||
|
|
||||||
let node = KitchensinkNode::new(&config);
|
let node = KitchensinkNode::new(&config);
|
||||||
let version = node.eth_rpc_version().unwrap();
|
let version = node.eth_rpc_version().unwrap();
|
||||||
@@ -1284,86 +1267,86 @@ mod tests {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_chain_id_from_node() {
|
async fn can_get_chain_id_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node();
|
let node = shared_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let chain_id = node.chain_id();
|
let chain_id = node.chain_id().await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let chain_id = chain_id.expect("Failed to get the chain id");
|
let chain_id = chain_id.expect("Failed to get the chain id");
|
||||||
assert_eq!(chain_id, 420_420_420);
|
assert_eq!(chain_id, 420_420_420);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_gas_limit_from_node() {
|
async fn can_get_gas_limit_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node();
|
let node = shared_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let gas_limit = node.block_gas_limit(BlockNumberOrTag::Latest);
|
let gas_limit = node.block_gas_limit(BlockNumberOrTag::Latest).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let _ = gas_limit.expect("Failed to get the gas limit");
|
let _ = gas_limit.expect("Failed to get the gas limit");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_coinbase_from_node() {
|
async fn can_get_coinbase_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node();
|
let node = shared_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let coinbase = node.block_coinbase(BlockNumberOrTag::Latest);
|
let coinbase = node.block_coinbase(BlockNumberOrTag::Latest).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let _ = coinbase.expect("Failed to get the coinbase");
|
let _ = coinbase.expect("Failed to get the coinbase");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_block_difficulty_from_node() {
|
async fn can_get_block_difficulty_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node();
|
let node = shared_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let block_difficulty = node.block_difficulty(BlockNumberOrTag::Latest);
|
let block_difficulty = node.block_difficulty(BlockNumberOrTag::Latest).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let _ = block_difficulty.expect("Failed to get the block difficulty");
|
let _ = block_difficulty.expect("Failed to get the block difficulty");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_block_hash_from_node() {
|
async fn can_get_block_hash_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node();
|
let node = shared_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let block_hash = node.block_hash(BlockNumberOrTag::Latest);
|
let block_hash = node.block_hash(BlockNumberOrTag::Latest).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let _ = block_hash.expect("Failed to get the block hash");
|
let _ = block_hash.expect("Failed to get the block hash");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_block_timestamp_from_node() {
|
async fn can_get_block_timestamp_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node();
|
let node = shared_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let block_timestamp = node.block_timestamp(BlockNumberOrTag::Latest);
|
let block_timestamp = node.block_timestamp(BlockNumberOrTag::Latest).await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let _ = block_timestamp.expect("Failed to get the block timestamp");
|
let _ = block_timestamp.expect("Failed to get the block timestamp");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn can_get_block_number_from_node() {
|
async fn can_get_block_number_from_node() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let node = shared_node();
|
let node = shared_node();
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let block_number = node.last_block_number();
|
let block_number = node.last_block_number().await;
|
||||||
|
|
||||||
// Assert
|
// Assert
|
||||||
let _ = block_number.expect("Failed to get the block number");
|
let _ = block_number.expect("Failed to get the block number");
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ where
|
|||||||
{
|
{
|
||||||
/// Create a new Pool. This will start as many nodes as there are workers in `config`.
|
/// Create a new Pool. This will start as many nodes as there are workers in `config`.
|
||||||
pub fn new(config: &Arguments) -> anyhow::Result<Self> {
|
pub fn new(config: &Arguments) -> anyhow::Result<Self> {
|
||||||
let nodes = config.workers;
|
let nodes = config.number_of_nodes;
|
||||||
let genesis = read_to_string(&config.genesis_file).context(format!(
|
let genesis = read_to_string(&config.genesis_file).context(format!(
|
||||||
"can not read genesis file: {}",
|
"can not read genesis file: {}",
|
||||||
config.genesis_file.display()
|
config.genesis_file.display()
|
||||||
|
|||||||
@@ -10,9 +10,9 @@ rust-version.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
revive-dt-config = { workspace = true }
|
revive-dt-config = { workspace = true }
|
||||||
revive-dt-format = { workspace = true }
|
revive-dt-format = { workspace = true }
|
||||||
|
revive-dt-compiler = { workspace = true }
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
revive-solc-json-interface = { workspace = true }
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
//! The report analyzer enriches the raw report data.
|
//! The report analyzer enriches the raw report data.
|
||||||
|
|
||||||
|
use revive_dt_compiler::CompilerOutput;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::reporter::CompilationTask;
|
use crate::reporter::CompilationTask;
|
||||||
@@ -13,41 +14,27 @@ pub struct CompilerStatistics {
|
|||||||
pub mean_code_size: usize,
|
pub mean_code_size: usize,
|
||||||
/// The mean size of the optimized YUL IR.
|
/// The mean size of the optimized YUL IR.
|
||||||
pub mean_yul_size: usize,
|
pub mean_yul_size: usize,
|
||||||
/// Is a proxy because the YUL also containes a lot of comments.
|
/// Is a proxy because the YUL also contains a lot of comments.
|
||||||
pub yul_to_bytecode_size_ratio: f32,
|
pub yul_to_bytecode_size_ratio: f32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CompilerStatistics {
|
impl CompilerStatistics {
|
||||||
/// Cumulatively update the statistics with the next compiler task.
|
/// Cumulatively update the statistics with the next compiler task.
|
||||||
pub fn sample(&mut self, compilation_task: &CompilationTask) {
|
pub fn sample(&mut self, compilation_task: &CompilationTask) {
|
||||||
let Some(output) = &compilation_task.json_output else {
|
let Some(CompilerOutput { contracts }) = &compilation_task.json_output else {
|
||||||
return;
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(contracts) = &output.contracts else {
|
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
for (_solidity, contracts) in contracts.iter() {
|
for (_solidity, contracts) in contracts.iter() {
|
||||||
for (_name, contract) in contracts.iter() {
|
for (_name, (bytecode, _)) in contracts.iter() {
|
||||||
let Some(evm) = &contract.evm else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
let Some(deploy_code) = &evm.deployed_bytecode else {
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
|
|
||||||
// The EVM bytecode can be unlinked and thus is not necessarily a decodable hex
|
// The EVM bytecode can be unlinked and thus is not necessarily a decodable hex
|
||||||
// string; for our statistics this is a good enough approximation.
|
// string; for our statistics this is a good enough approximation.
|
||||||
let bytecode_size = deploy_code.object.len() / 2;
|
let bytecode_size = bytecode.len() / 2;
|
||||||
|
|
||||||
let yul_size = contract
|
// TODO: for the time being we set the yul_size to be zero. We need to change this
|
||||||
.ir_optimized
|
// when we overhaul the reporting.
|
||||||
.as_ref()
|
|
||||||
.expect("if the contract has a deploy code it should also have the opimized IR")
|
|
||||||
.len();
|
|
||||||
|
|
||||||
self.update_sizes(bytecode_size, yul_size);
|
self.update_sizes(bytecode_size, 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,11 +12,11 @@ use std::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
use revive_dt_compiler::{CompilerInput, CompilerOutput};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use revive_dt_config::{Arguments, TestingPlatform};
|
use revive_dt_config::{Arguments, TestingPlatform};
|
||||||
use revive_dt_format::{corpus::Corpus, mode::SolcMode};
|
use revive_dt_format::{corpus::Corpus, mode::SolcMode};
|
||||||
use revive_solc_json_interface::{SolcStandardJsonInput, SolcStandardJsonOutput};
|
|
||||||
|
|
||||||
use crate::analyzer::CompilerStatistics;
|
use crate::analyzer::CompilerStatistics;
|
||||||
|
|
||||||
@@ -44,9 +44,9 @@ pub struct Report {
|
|||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub struct CompilationTask {
|
pub struct CompilationTask {
|
||||||
/// The observed compiler input.
|
/// The observed compiler input.
|
||||||
pub json_input: SolcStandardJsonInput,
|
pub json_input: CompilerInput,
|
||||||
/// The observed compiler output.
|
/// The observed compiler output.
|
||||||
pub json_output: Option<SolcStandardJsonOutput>,
|
pub json_output: Option<CompilerOutput>,
|
||||||
/// The observed compiler mode.
|
/// The observed compiler mode.
|
||||||
pub mode: SolcMode,
|
pub mode: SolcMode,
|
||||||
/// The observed compiler version.
|
/// The observed compiler version.
|
||||||
@@ -152,15 +152,7 @@ impl Report {
|
|||||||
for (platform, results) in self.compiler_results.iter() {
|
for (platform, results) in self.compiler_results.iter() {
|
||||||
for result in results {
|
for result in results {
|
||||||
// ignore if there were no errors
|
// ignore if there were no errors
|
||||||
if result.compilation_task.error.is_none()
|
if result.compilation_task.error.is_none() {
|
||||||
&& result
|
|
||||||
.compilation_task
|
|
||||||
.json_output
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|output| output.errors.as_ref())
|
|
||||||
.map(|errors| errors.is_empty())
|
|
||||||
.unwrap_or(true)
|
|
||||||
{
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,9 +9,12 @@ repository.workspace = true
|
|||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
revive-dt-common = { workspace = true }
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
hex = { workspace = true }
|
hex = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
semver = { workspace = true }
|
semver = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
|
|||||||
@@ -6,15 +6,17 @@ use std::{
|
|||||||
io::{BufWriter, Write},
|
io::{BufWriter, Write},
|
||||||
os::unix::fs::PermissionsExt,
|
os::unix::fs::PermissionsExt,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::{LazyLock, Mutex},
|
sync::LazyLock,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use tokio::sync::Mutex;
|
||||||
|
|
||||||
use crate::download::GHDownloader;
|
use crate::download::GHDownloader;
|
||||||
|
|
||||||
pub const SOLC_CACHE_DIRECTORY: &str = "solc";
|
pub const SOLC_CACHE_DIRECTORY: &str = "solc";
|
||||||
pub(crate) static SOLC_CACHER: LazyLock<Mutex<HashSet<PathBuf>>> = LazyLock::new(Default::default);
|
pub(crate) static SOLC_CACHER: LazyLock<Mutex<HashSet<PathBuf>>> = LazyLock::new(Default::default);
|
||||||
|
|
||||||
pub(crate) fn get_or_download(
|
pub(crate) async fn get_or_download(
|
||||||
working_directory: &Path,
|
working_directory: &Path,
|
||||||
downloader: &GHDownloader,
|
downloader: &GHDownloader,
|
||||||
) -> anyhow::Result<PathBuf> {
|
) -> anyhow::Result<PathBuf> {
|
||||||
@@ -23,20 +25,20 @@ pub(crate) fn get_or_download(
|
|||||||
.join(downloader.version.to_string());
|
.join(downloader.version.to_string());
|
||||||
let target_file = target_directory.join(downloader.target);
|
let target_file = target_directory.join(downloader.target);
|
||||||
|
|
||||||
let mut cache = SOLC_CACHER.lock().unwrap();
|
let mut cache = SOLC_CACHER.lock().await;
|
||||||
if cache.contains(&target_file) {
|
if cache.contains(&target_file) {
|
||||||
tracing::debug!("using cached solc: {}", target_file.display());
|
tracing::debug!("using cached solc: {}", target_file.display());
|
||||||
return Ok(target_file);
|
return Ok(target_file);
|
||||||
}
|
}
|
||||||
|
|
||||||
create_dir_all(target_directory)?;
|
create_dir_all(target_directory)?;
|
||||||
download_to_file(&target_file, downloader)?;
|
download_to_file(&target_file, downloader).await?;
|
||||||
cache.insert(target_file.clone());
|
cache.insert(target_file.clone());
|
||||||
|
|
||||||
Ok(target_file)
|
Ok(target_file)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn download_to_file(path: &Path, downloader: &GHDownloader) -> anyhow::Result<()> {
|
async fn download_to_file(path: &Path, downloader: &GHDownloader) -> anyhow::Result<()> {
|
||||||
tracing::info!("caching file: {}", path.display());
|
tracing::info!("caching file: {}", path.display());
|
||||||
|
|
||||||
let Ok(file) = File::create_new(path) else {
|
let Ok(file) = File::create_new(path) else {
|
||||||
@@ -52,7 +54,7 @@ fn download_to_file(path: &Path, downloader: &GHDownloader) -> anyhow::Result<()
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut file = BufWriter::new(file);
|
let mut file = BufWriter::new(file);
|
||||||
file.write_all(&downloader.download()?)?;
|
file.write_all(&downloader.download().await?)?;
|
||||||
file.flush()?;
|
file.flush()?;
|
||||||
drop(file);
|
drop(file);
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ use std::{
|
|||||||
sync::{LazyLock, Mutex},
|
sync::{LazyLock, Mutex},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use revive_dt_common::types::VersionOrRequirement;
|
||||||
|
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
@@ -23,12 +25,12 @@ impl List {
|
|||||||
///
|
///
|
||||||
/// Caches the list retrieved from the `url` into [LIST_CACHE],
|
/// Caches the list retrieved from the `url` into [LIST_CACHE],
|
||||||
/// subsequent calls with the same `url` will return the cached list.
|
/// subsequent calls with the same `url` will return the cached list.
|
||||||
pub fn download(url: &'static str) -> anyhow::Result<Self> {
|
pub async fn download(url: &'static str) -> anyhow::Result<Self> {
|
||||||
if let Some(list) = LIST_CACHE.lock().unwrap().get(url) {
|
if let Some(list) = LIST_CACHE.lock().unwrap().get(url) {
|
||||||
return Ok(list.clone());
|
return Ok(list.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let body: List = reqwest::blocking::get(url)?.json()?;
|
let body: List = reqwest::get(url).await?.json().await?;
|
||||||
|
|
||||||
LIST_CACHE.lock().unwrap().insert(url, body.clone());
|
LIST_CACHE.lock().unwrap().insert(url, body.clone());
|
||||||
|
|
||||||
@@ -52,28 +54,52 @@ impl GHDownloader {
|
|||||||
pub const WINDOWS_NAME: &str = "solc-windows.exe";
|
pub const WINDOWS_NAME: &str = "solc-windows.exe";
|
||||||
pub const WASM_NAME: &str = "soljson.js";
|
pub const WASM_NAME: &str = "soljson.js";
|
||||||
|
|
||||||
fn new(version: Version, target: &'static str, list: &'static str) -> Self {
|
async fn new(
|
||||||
Self {
|
version: impl Into<VersionOrRequirement>,
|
||||||
|
target: &'static str,
|
||||||
|
list: &'static str,
|
||||||
|
) -> anyhow::Result<Self> {
|
||||||
|
let version_or_requirement = version.into();
|
||||||
|
match version_or_requirement {
|
||||||
|
VersionOrRequirement::Version(version) => Ok(Self {
|
||||||
version,
|
version,
|
||||||
target,
|
target,
|
||||||
list,
|
list,
|
||||||
|
}),
|
||||||
|
VersionOrRequirement::Requirement(requirement) => {
|
||||||
|
let Some(version) = List::download(list)
|
||||||
|
.await?
|
||||||
|
.builds
|
||||||
|
.into_iter()
|
||||||
|
.map(|build| build.version)
|
||||||
|
.filter(|version| requirement.matches(version))
|
||||||
|
.max()
|
||||||
|
else {
|
||||||
|
anyhow::bail!("Failed to find a version that satisfies {requirement:?}");
|
||||||
|
};
|
||||||
|
Ok(Self {
|
||||||
|
version,
|
||||||
|
target,
|
||||||
|
list,
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn linux(version: Version) -> Self {
|
pub async fn linux(version: impl Into<VersionOrRequirement>) -> anyhow::Result<Self> {
|
||||||
Self::new(version, Self::LINUX_NAME, List::LINUX_URL)
|
Self::new(version, Self::LINUX_NAME, List::LINUX_URL).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn macosx(version: Version) -> Self {
|
pub async fn macosx(version: impl Into<VersionOrRequirement>) -> anyhow::Result<Self> {
|
||||||
Self::new(version, Self::MACOSX_NAME, List::MACOSX_URL)
|
Self::new(version, Self::MACOSX_NAME, List::MACOSX_URL).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn windows(version: Version) -> Self {
|
pub async fn windows(version: impl Into<VersionOrRequirement>) -> anyhow::Result<Self> {
|
||||||
Self::new(version, Self::WINDOWS_NAME, List::WINDOWS_URL)
|
Self::new(version, Self::WINDOWS_NAME, List::WINDOWS_URL).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn wasm(version: Version) -> Self {
|
pub async fn wasm(version: impl Into<VersionOrRequirement>) -> anyhow::Result<Self> {
|
||||||
Self::new(version, Self::WASM_NAME, List::WASM_URL)
|
Self::new(version, Self::WASM_NAME, List::WASM_URL).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the download link.
|
/// Returns the download link.
|
||||||
@@ -85,16 +111,17 @@ impl GHDownloader {
|
|||||||
///
|
///
|
||||||
/// Errors out if the download fails or the digest of the downloaded file
|
/// Errors out if the download fails or the digest of the downloaded file
|
||||||
/// mismatches the expected digest from the release [List].
|
/// mismatches the expected digest from the release [List].
|
||||||
pub fn download(&self) -> anyhow::Result<Vec<u8>> {
|
pub async fn download(&self) -> anyhow::Result<Vec<u8>> {
|
||||||
tracing::info!("downloading solc: {self:?}");
|
tracing::info!("downloading solc: {self:?}");
|
||||||
let expected_digest = List::download(self.list)?
|
let expected_digest = List::download(self.list)
|
||||||
|
.await?
|
||||||
.builds
|
.builds
|
||||||
.iter()
|
.iter()
|
||||||
.find(|build| build.version == self.version)
|
.find(|build| build.version == self.version)
|
||||||
.ok_or_else(|| anyhow::anyhow!("solc v{} not found builds", self.version))
|
.ok_or_else(|| anyhow::anyhow!("solc v{} not found builds", self.version))
|
||||||
.map(|b| b.sha256.strip_prefix("0x").unwrap_or(&b.sha256).to_string())?;
|
.map(|b| b.sha256.strip_prefix("0x").unwrap_or(&b.sha256).to_string())?;
|
||||||
|
|
||||||
let file = reqwest::blocking::get(self.url())?.bytes()?.to_vec();
|
let file = reqwest::get(self.url()).await?.bytes().await?.to_vec();
|
||||||
|
|
||||||
if hex::encode(Sha256::digest(&file)) != expected_digest {
|
if hex::encode(Sha256::digest(&file)) != expected_digest {
|
||||||
anyhow::bail!("sha256 mismatch for solc version {}", self.version);
|
anyhow::bail!("sha256 mismatch for solc version {}", self.version);
|
||||||
@@ -108,27 +135,56 @@ impl GHDownloader {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use crate::{download::GHDownloader, list::List};
|
use crate::{download::GHDownloader, list::List};
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn try_get_windows() {
|
async fn try_get_windows() {
|
||||||
let version = List::download(List::WINDOWS_URL).unwrap().latest_release;
|
let version = List::download(List::WINDOWS_URL)
|
||||||
GHDownloader::windows(version).download().unwrap();
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.latest_release;
|
||||||
|
GHDownloader::windows(version)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.download()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn try_get_macosx() {
|
async fn try_get_macosx() {
|
||||||
let version = List::download(List::MACOSX_URL).unwrap().latest_release;
|
let version = List::download(List::MACOSX_URL)
|
||||||
GHDownloader::macosx(version).download().unwrap();
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.latest_release;
|
||||||
|
GHDownloader::macosx(version)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.download()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn try_get_linux() {
|
async fn try_get_linux() {
|
||||||
let version = List::download(List::LINUX_URL).unwrap().latest_release;
|
let version = List::download(List::LINUX_URL)
|
||||||
GHDownloader::linux(version).download().unwrap();
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.latest_release;
|
||||||
|
GHDownloader::linux(version)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.download()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn try_get_wasm() {
|
async fn try_get_wasm() {
|
||||||
let version = List::download(List::WASM_URL).unwrap().latest_release;
|
let version = List::download(List::WASM_URL).await.unwrap().latest_release;
|
||||||
GHDownloader::wasm(version).download().unwrap();
|
GHDownloader::wasm(version)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.download()
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,8 @@ use std::path::{Path, PathBuf};
|
|||||||
|
|
||||||
use cache::get_or_download;
|
use cache::get_or_download;
|
||||||
use download::GHDownloader;
|
use download::GHDownloader;
|
||||||
use semver::Version;
|
|
||||||
|
use revive_dt_common::types::VersionOrRequirement;
|
||||||
|
|
||||||
pub mod cache;
|
pub mod cache;
|
||||||
pub mod download;
|
pub mod download;
|
||||||
@@ -18,22 +19,22 @@ pub mod list;
|
|||||||
///
|
///
|
||||||
/// Subsequent calls for the same version will use a cached artifact
|
/// Subsequent calls for the same version will use a cached artifact
|
||||||
/// and not download it again.
|
/// and not download it again.
|
||||||
pub fn download_solc(
|
pub async fn download_solc(
|
||||||
cache_directory: &Path,
|
cache_directory: &Path,
|
||||||
version: Version,
|
version: impl Into<VersionOrRequirement>,
|
||||||
wasm: bool,
|
wasm: bool,
|
||||||
) -> anyhow::Result<PathBuf> {
|
) -> anyhow::Result<PathBuf> {
|
||||||
let downloader = if wasm {
|
let downloader = if wasm {
|
||||||
GHDownloader::wasm(version)
|
GHDownloader::wasm(version).await
|
||||||
} else if cfg!(target_os = "linux") {
|
} else if cfg!(target_os = "linux") {
|
||||||
GHDownloader::linux(version)
|
GHDownloader::linux(version).await
|
||||||
} else if cfg!(target_os = "macos") {
|
} else if cfg!(target_os = "macos") {
|
||||||
GHDownloader::macosx(version)
|
GHDownloader::macosx(version).await
|
||||||
} else if cfg!(target_os = "windows") {
|
} else if cfg!(target_os = "windows") {
|
||||||
GHDownloader::windows(version)
|
GHDownloader::windows(version).await
|
||||||
} else {
|
} else {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
};
|
}?;
|
||||||
|
|
||||||
get_or_download(cache_directory, &downloader)
|
get_or_download(cache_directory, &downloader).await
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user