mirror of
https://github.com/pezkuwichain/revive-differential-tests.git
synced 2026-04-22 21:57:58 +00:00
Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 83c20b1be3 | |||
| 075c8235a7 | |||
| 43e0d0e592 | |||
| 2373872230 | |||
| e3723e780a |
@@ -99,12 +99,9 @@ jobs:
|
|||||||
- name: Install Geth on Ubuntu
|
- name: Install Geth on Ubuntu
|
||||||
if: matrix.os == 'ubuntu-24.04'
|
if: matrix.os == 'ubuntu-24.04'
|
||||||
run: |
|
run: |
|
||||||
sudo add-apt-repository -y ppa:ethereum/ethereum
|
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y protobuf-compiler
|
sudo apt-get install -y protobuf-compiler
|
||||||
|
|
||||||
sudo apt-get install -y solc
|
|
||||||
|
|
||||||
# We were facing some issues in CI with the 1.16.* versions of geth, and specifically on
|
# We were facing some issues in CI with the 1.16.* versions of geth, and specifically on
|
||||||
# Ubuntu. Eventually, we found out that the last version of geth that worked in our CI was
|
# Ubuntu. Eventually, we found out that the last version of geth that worked in our CI was
|
||||||
# version 1.15.11. Thus, this is the version that we want to use in CI. The PPA sadly does
|
# version 1.15.11. Thus, this is the version that we want to use in CI. The PPA sadly does
|
||||||
@@ -125,22 +122,12 @@ jobs:
|
|||||||
wget -qO- "$URL" | sudo tar xz -C /usr/local/bin --strip-components=1
|
wget -qO- "$URL" | sudo tar xz -C /usr/local/bin --strip-components=1
|
||||||
geth --version
|
geth --version
|
||||||
|
|
||||||
curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-x86_64-unknown-linux-musl -o resolc
|
|
||||||
chmod +x resolc
|
|
||||||
sudo mv resolc /usr/local/bin
|
|
||||||
|
|
||||||
- name: Install Geth on macOS
|
- name: Install Geth on macOS
|
||||||
if: matrix.os == 'macos-14'
|
if: matrix.os == 'macos-14'
|
||||||
run: |
|
run: |
|
||||||
brew tap ethereum/ethereum
|
brew tap ethereum/ethereum
|
||||||
brew install ethereum protobuf
|
brew install ethereum protobuf
|
||||||
|
|
||||||
brew install solidity
|
|
||||||
|
|
||||||
curl -sL https://github.com/paritytech/revive/releases/download/v0.3.0/resolc-universal-apple-darwin -o resolc
|
|
||||||
chmod +x resolc
|
|
||||||
sudo mv resolc /usr/local/bin
|
|
||||||
|
|
||||||
- name: Machete
|
- name: Machete
|
||||||
uses: bnjbvr/cargo-machete@v0.7.1
|
uses: bnjbvr/cargo-machete@v0.7.1
|
||||||
|
|
||||||
@@ -156,8 +143,5 @@ jobs:
|
|||||||
- name: Check eth-rpc version
|
- name: Check eth-rpc version
|
||||||
run: eth-rpc --version
|
run: eth-rpc --version
|
||||||
|
|
||||||
- name: Check resolc version
|
|
||||||
run: resolc --version
|
|
||||||
|
|
||||||
- name: Test cargo workspace
|
- name: Test cargo workspace
|
||||||
run: make test
|
run: make test
|
||||||
|
|||||||
Generated
+90
-223
@@ -67,9 +67,9 @@ checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy"
|
name = "alloy"
|
||||||
version = "1.0.22"
|
version = "1.0.20"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8ad4eb51e7845257b70c51b38ef8d842d5e5e93196701fcbd757577971a043c6"
|
checksum = "ae58d888221eecf621595e2096836ce7cfc37be06bfa39d7f64aa6a3ea4c9e5b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-consensus",
|
"alloy-consensus",
|
||||||
"alloy-contract",
|
"alloy-contract",
|
||||||
@@ -102,16 +102,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-consensus"
|
name = "alloy-consensus"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ca3b746060277f3d7f9c36903bb39b593a741cb7afcb0044164c28f0e9b673f0"
|
checksum = "ad451f9a70c341d951bca4e811d74dbe1e193897acd17e9dbac1353698cc430b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-eips",
|
"alloy-eips",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"alloy-rlp",
|
"alloy-rlp",
|
||||||
"alloy-serde",
|
"alloy-serde",
|
||||||
"alloy-trie",
|
"alloy-trie",
|
||||||
"alloy-tx-macros",
|
|
||||||
"auto_impl",
|
"auto_impl",
|
||||||
"c-kzg",
|
"c-kzg",
|
||||||
"derive_more 2.0.1",
|
"derive_more 2.0.1",
|
||||||
@@ -127,9 +126,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-consensus-any"
|
name = "alloy-consensus-any"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bf98679329fa708fa809ea596db6d974da892b068ad45e48ac1956f582edf946"
|
checksum = "142daffb15d5be1a2b20d2cd540edbcef03037b55d4ff69dc06beb4d06286dba"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-consensus",
|
"alloy-consensus",
|
||||||
"alloy-eips",
|
"alloy-eips",
|
||||||
@@ -141,9 +140,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-contract"
|
name = "alloy-contract"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a10e47f5305ea08c37b1772086c1573e9a0a257227143996841172d37d3831bb"
|
checksum = "ebf25443920ecb9728cb087fe4dc04a0b290bd6ac85638c58fe94aba70f1a44e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-consensus",
|
"alloy-consensus",
|
||||||
"alloy-dyn-abi",
|
"alloy-dyn-abi",
|
||||||
@@ -158,7 +157,6 @@ dependencies = [
|
|||||||
"alloy-transport",
|
"alloy-transport",
|
||||||
"futures",
|
"futures",
|
||||||
"futures-util",
|
"futures-util",
|
||||||
"serde_json",
|
|
||||||
"thiserror 2.0.12",
|
"thiserror 2.0.12",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -229,9 +227,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-eips"
|
name = "alloy-eips"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f562a81278a3ed83290e68361f2d1c75d018ae3b8589a314faf9303883e18ec9"
|
checksum = "3056872f6da48046913e76edb5ddced272861f6032f09461aea1a2497be5ae5d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-eip2124",
|
"alloy-eip2124",
|
||||||
"alloy-eip2930",
|
"alloy-eip2930",
|
||||||
@@ -249,16 +247,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-genesis"
|
name = "alloy-genesis"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "dc41384e9ab8c9b2fb387c52774d9d432656a28edcda1c2d4083e96051524518"
|
checksum = "c98fb40f07997529235cc474de814cd7bd9de561e101716289095696c0e4639d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-eips",
|
"alloy-eips",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"alloy-serde",
|
"alloy-serde",
|
||||||
"alloy-trie",
|
"alloy-trie",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_with",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -275,13 +272,12 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-json-rpc"
|
name = "alloy-json-rpc"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "12c454fcfcd5d26ed3b8cae5933cbee9da5f0b05df19b46d4bd4446d1f082565"
|
checksum = "dc08b31ebf9273839bd9a01f9333cbb7a3abb4e820c312ade349dd18bdc79581"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"alloy-sol-types",
|
"alloy-sol-types",
|
||||||
"http",
|
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"thiserror 2.0.12",
|
"thiserror 2.0.12",
|
||||||
@@ -290,9 +286,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-network"
|
name = "alloy-network"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "42d6d39eabe5c7b3d8f23ac47b0b683b99faa4359797114636c66e0743103d05"
|
checksum = "ed117b08f0cc190312bf0c38c34cf4f0dabfb4ea8f330071c587cd7160a88cb2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-consensus",
|
"alloy-consensus",
|
||||||
"alloy-consensus-any",
|
"alloy-consensus-any",
|
||||||
@@ -316,9 +312,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-network-primitives"
|
name = "alloy-network-primitives"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3704fa8b7ba9ba3f378d99b3d628c8bc8c2fc431b709947930f154e22a8368b6"
|
checksum = "c7162ff7be8649c0c391f4e248d1273e85c62076703a1f3ec7daf76b283d886d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-consensus",
|
"alloy-consensus",
|
||||||
"alloy-eips",
|
"alloy-eips",
|
||||||
@@ -339,15 +335,14 @@ dependencies = [
|
|||||||
"const-hex",
|
"const-hex",
|
||||||
"derive_more 2.0.1",
|
"derive_more 2.0.1",
|
||||||
"foldhash",
|
"foldhash",
|
||||||
"getrandom 0.3.3",
|
|
||||||
"hashbrown 0.15.3",
|
"hashbrown 0.15.3",
|
||||||
"indexmap 2.10.0",
|
"indexmap 2.9.0",
|
||||||
"itoa",
|
"itoa",
|
||||||
"k256",
|
"k256",
|
||||||
"keccak-asm",
|
"keccak-asm",
|
||||||
"paste",
|
"paste",
|
||||||
"proptest",
|
"proptest",
|
||||||
"rand 0.9.2",
|
"rand 0.9.1",
|
||||||
"ruint",
|
"ruint",
|
||||||
"rustc-hash",
|
"rustc-hash",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -357,9 +352,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-provider"
|
name = "alloy-provider"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "08800e8cbe70c19e2eb7cf3d7ff4b28bdd9b3933f8e1c8136c7d910617ba03bf"
|
checksum = "d84eba1fd8b6fe8b02f2acd5dd7033d0f179e304bd722d11e817db570d1fa6c4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-chains",
|
"alloy-chains",
|
||||||
"alloy-consensus",
|
"alloy-consensus",
|
||||||
@@ -385,7 +380,6 @@ dependencies = [
|
|||||||
"either",
|
"either",
|
||||||
"futures",
|
"futures",
|
||||||
"futures-utils-wasm",
|
"futures-utils-wasm",
|
||||||
"http",
|
|
||||||
"lru",
|
"lru",
|
||||||
"parking_lot",
|
"parking_lot",
|
||||||
"pin-project",
|
"pin-project",
|
||||||
@@ -401,9 +395,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-pubsub"
|
name = "alloy-pubsub"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ae68457a2c2ead6bd7d7acb5bf5f1623324b1962d4f8e7b0250657a3c3ab0a0b"
|
checksum = "8550f7306e0230fc835eb2ff4af0a96362db4b6fc3f25767d161e0ad0ac765bf"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-json-rpc",
|
"alloy-json-rpc",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
@@ -444,9 +438,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-rpc-client"
|
name = "alloy-rpc-client"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "162301b5a57d4d8f000bf30f4dcb82f9f468f3e5e846eeb8598dd39e7886932c"
|
checksum = "518a699422a3eab800f3dac2130d8f2edba8e4fff267b27a9c7dc6a2b0d313ee"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-json-rpc",
|
"alloy-json-rpc",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
@@ -454,6 +448,7 @@ dependencies = [
|
|||||||
"alloy-transport",
|
"alloy-transport",
|
||||||
"alloy-transport-http",
|
"alloy-transport-http",
|
||||||
"alloy-transport-ipc",
|
"alloy-transport-ipc",
|
||||||
|
"async-stream",
|
||||||
"futures",
|
"futures",
|
||||||
"pin-project",
|
"pin-project",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
@@ -463,15 +458,16 @@ dependencies = [
|
|||||||
"tokio-stream",
|
"tokio-stream",
|
||||||
"tower",
|
"tower",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
"tracing-futures",
|
||||||
"url",
|
"url",
|
||||||
"wasmtimer",
|
"wasmtimer",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-rpc-types"
|
name = "alloy-rpc-types"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6cd8ca94ae7e2b32cc3895d9981f3772aab0b4756aa60e9ed0bcfee50f0e1328"
|
checksum = "c000cab4ec26a4b3e29d144e999e1c539c2fa0abed871bf90311eb3466187ca8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"alloy-rpc-types-eth",
|
"alloy-rpc-types-eth",
|
||||||
@@ -482,9 +478,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-rpc-types-any"
|
name = "alloy-rpc-types-any"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "076b47e834b367d8618c52dd0a0d6a711ddf66154636df394805300af4923b8a"
|
checksum = "508b2fbe66d952089aa694e53802327798806498cd29ff88c75135770ecaabfc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-consensus-any",
|
"alloy-consensus-any",
|
||||||
"alloy-rpc-types-eth",
|
"alloy-rpc-types-eth",
|
||||||
@@ -493,9 +489,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-rpc-types-debug"
|
name = "alloy-rpc-types-debug"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "94a2a86ad7b7d718c15e79d0779bd255561b6b22968dc5ed2e7c0fbc43bb55fe"
|
checksum = "8c832f2e851801093928dbb4b7bd83cd22270faf76b2e080646b806a285c8757"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -503,9 +499,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-rpc-types-eth"
|
name = "alloy-rpc-types-eth"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2c2f847e635ec0be819d06e2ada4bcc4e4204026a83c4bfd78ae8d550e027ae7"
|
checksum = "fcaf7dff0fdd756a714d58014f4f8354a1706ebf9fa2cf73431e0aeec3c9431e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-consensus",
|
"alloy-consensus",
|
||||||
"alloy-consensus-any",
|
"alloy-consensus-any",
|
||||||
@@ -518,15 +514,14 @@ dependencies = [
|
|||||||
"itertools 0.14.0",
|
"itertools 0.14.0",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_with",
|
|
||||||
"thiserror 2.0.12",
|
"thiserror 2.0.12",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-rpc-types-trace"
|
name = "alloy-rpc-types-trace"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6fc58180302a94c934d455eeedb3ecb99cdc93da1dbddcdbbdb79dd6fe618b2a"
|
checksum = "6e3507a04e868dd83219ad3cd6a8c58aefccb64d33f426b3934423a206343e84"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"alloy-rpc-types-eth",
|
"alloy-rpc-types-eth",
|
||||||
@@ -538,9 +533,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-serde"
|
name = "alloy-serde"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ae699248d02ade9db493bbdae61822277dc14ae0f82a5a4153203b60e34422a6"
|
checksum = "730e8f2edf2fc224cabd1c25d090e1655fa6137b2e409f92e5eec735903f1507"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -549,9 +544,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-signer"
|
name = "alloy-signer"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3cf7d793c813515e2b627b19a15693960b3ed06670f9f66759396d06ebe5747b"
|
checksum = "6b0d2428445ec13edc711909e023d7779618504c4800be055a5b940025dbafe3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -564,9 +559,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-signer-local"
|
name = "alloy-signer-local"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "51a424bc5a11df0d898ce0fd15906b88ebe2a6e4f17a514b51bc93946bb756bd"
|
checksum = "e14fe6fedb7fe6e0dfae47fe020684f1d8e063274ef14bca387ddb7a6efa8ec1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-consensus",
|
"alloy-consensus",
|
||||||
"alloy-network",
|
"alloy-network",
|
||||||
@@ -602,7 +597,7 @@ dependencies = [
|
|||||||
"alloy-sol-macro-input",
|
"alloy-sol-macro-input",
|
||||||
"const-hex",
|
"const-hex",
|
||||||
"heck",
|
"heck",
|
||||||
"indexmap 2.10.0",
|
"indexmap 2.9.0",
|
||||||
"proc-macro-error2",
|
"proc-macro-error2",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -653,9 +648,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-transport"
|
name = "alloy-transport"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4f317d20f047b3de4d9728c556e2e9a92c9a507702d2016424cd8be13a74ca5e"
|
checksum = "a712bdfeff42401a7dd9518f72f617574c36226a9b5414537fedc34350b73bf9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-json-rpc",
|
"alloy-json-rpc",
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
@@ -676,9 +671,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-transport-http"
|
name = "alloy-transport-http"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ff084ac7b1f318c87b579d221f11b748341d68b9ddaa4ffca5e62ed2b8cfefb4"
|
checksum = "7ea5a76d7f2572174a382aedf36875bedf60bcc41116c9f031cf08040703a2dc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-json-rpc",
|
"alloy-json-rpc",
|
||||||
"alloy-transport",
|
"alloy-transport",
|
||||||
@@ -691,9 +686,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-transport-ipc"
|
name = "alloy-transport-ipc"
|
||||||
version = "1.0.22"
|
version = "1.0.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "edb099cdad8ed2e6a80811cdf9bbf715ebf4e34c981b4a6e2d1f9daacbf8b218"
|
checksum = "606af17a7e064d219746f6d2625676122c79d78bf73dfe746d6db9ecd7dbcb85"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-json-rpc",
|
"alloy-json-rpc",
|
||||||
"alloy-pubsub",
|
"alloy-pubsub",
|
||||||
@@ -711,9 +706,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "alloy-trie"
|
name = "alloy-trie"
|
||||||
version = "0.9.0"
|
version = "0.8.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bada1fc392a33665de0dc50d401a3701b62583c655e3522a323490a5da016962"
|
checksum = "983d99aa81f586cef9dae38443245e585840fcf0fc58b09aee0b1f27aed1d500"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"alloy-rlp",
|
"alloy-rlp",
|
||||||
@@ -725,19 +720,6 @@ dependencies = [
|
|||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "alloy-tx-macros"
|
|
||||||
version = "1.0.22"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1154c8187a5ff985c95a8b2daa2fedcf778b17d7668e5e50e556c4ff9c881154"
|
|
||||||
dependencies = [
|
|
||||||
"alloy-primitives",
|
|
||||||
"darling",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn 2.0.101",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "android-tzdata"
|
name = "android-tzdata"
|
||||||
version = "0.1.1"
|
version = "0.1.1"
|
||||||
@@ -2228,66 +2210,6 @@ dependencies = [
|
|||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "foundry-compilers-artifacts"
|
|
||||||
version = "0.18.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "c2676d70082ed23680fe2d08c0b750d5f7f2438c6d946f1cb140a76c5e5e0392"
|
|
||||||
dependencies = [
|
|
||||||
"foundry-compilers-artifacts-solc",
|
|
||||||
"foundry-compilers-artifacts-vyper",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "foundry-compilers-artifacts-solc"
|
|
||||||
version = "0.18.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a3ada94dc5946334bb08df574855ba345ab03ba8c6f233560c72c8d61fa9db80"
|
|
||||||
dependencies = [
|
|
||||||
"alloy-json-abi",
|
|
||||||
"alloy-primitives",
|
|
||||||
"foundry-compilers-core",
|
|
||||||
"path-slash",
|
|
||||||
"regex",
|
|
||||||
"semver 1.0.26",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"thiserror 2.0.12",
|
|
||||||
"tracing",
|
|
||||||
"yansi",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "foundry-compilers-artifacts-vyper"
|
|
||||||
version = "0.18.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "372052af72652e375a6e7eed22179bd8935114e25e1c5a8cca7f00e8f20bd94c"
|
|
||||||
dependencies = [
|
|
||||||
"alloy-json-abi",
|
|
||||||
"alloy-primitives",
|
|
||||||
"foundry-compilers-artifacts-solc",
|
|
||||||
"foundry-compilers-core",
|
|
||||||
"path-slash",
|
|
||||||
"semver 1.0.26",
|
|
||||||
"serde",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "foundry-compilers-core"
|
|
||||||
version = "0.18.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "bf0962c46855979300f6526ed57f987ccf6a025c2b92ce574b281d9cb2ef666b"
|
|
||||||
dependencies = [
|
|
||||||
"alloy-primitives",
|
|
||||||
"cfg-if",
|
|
||||||
"dunce",
|
|
||||||
"path-slash",
|
|
||||||
"semver 1.0.26",
|
|
||||||
"serde",
|
|
||||||
"serde_json",
|
|
||||||
"thiserror 2.0.12",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fs-err"
|
name = "fs-err"
|
||||||
version = "2.11.0"
|
version = "2.11.0"
|
||||||
@@ -2478,7 +2400,7 @@ dependencies = [
|
|||||||
"futures-core",
|
"futures-core",
|
||||||
"futures-sink",
|
"futures-sink",
|
||||||
"http",
|
"http",
|
||||||
"indexmap 2.10.0",
|
"indexmap 2.9.0",
|
||||||
"slab",
|
"slab",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-util",
|
"tokio-util",
|
||||||
@@ -2713,7 +2635,7 @@ dependencies = [
|
|||||||
"libc",
|
"libc",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"socket2 0.5.10",
|
"socket2",
|
||||||
"system-configuration",
|
"system-configuration",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tower-service",
|
"tower-service",
|
||||||
@@ -2920,9 +2842,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "indexmap"
|
name = "indexmap"
|
||||||
version = "2.10.0"
|
version = "2.9.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
|
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"equivalent",
|
"equivalent",
|
||||||
"hashbrown 0.15.3",
|
"hashbrown 0.15.3",
|
||||||
@@ -2953,17 +2875,6 @@ dependencies = [
|
|||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "io-uring"
|
|
||||||
version = "0.7.9"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4"
|
|
||||||
dependencies = [
|
|
||||||
"bitflags 2.9.1",
|
|
||||||
"cfg-if",
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ipnet"
|
name = "ipnet"
|
||||||
version = "2.11.0"
|
version = "2.11.0"
|
||||||
@@ -3357,14 +3268,13 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nybbles"
|
name = "nybbles"
|
||||||
version = "0.4.1"
|
version = "0.3.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "675b3a54e5b12af997abc8b6638b0aee51a28caedab70d4967e0d5db3a3f1d06"
|
checksum = "8983bb634df7248924ee0c4c3a749609b5abcb082c28fffe3254b3eb3602b307"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy-rlp",
|
"alloy-rlp",
|
||||||
"cfg-if",
|
"const-hex",
|
||||||
"proptest",
|
"proptest",
|
||||||
"ruint",
|
|
||||||
"serde",
|
"serde",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
]
|
]
|
||||||
@@ -3528,12 +3438,6 @@ version = "1.0.15"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
|
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "path-slash"
|
|
||||||
version = "0.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1e91099d4268b0e11973f036e885d652fb0b21fedcf69738c627f94db6a44f42"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pbkdf2"
|
name = "pbkdf2"
|
||||||
version = "0.12.2"
|
version = "0.12.2"
|
||||||
@@ -3815,9 +3719,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rand"
|
name = "rand"
|
||||||
version = "0.9.2"
|
version = "0.9.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
|
checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rand_chacha 0.9.0",
|
"rand_chacha 0.9.0",
|
||||||
"rand_core 0.9.3",
|
"rand_core 0.9.3",
|
||||||
@@ -3980,7 +3884,9 @@ dependencies = [
|
|||||||
"base64",
|
"base64",
|
||||||
"bytes",
|
"bytes",
|
||||||
"encoding_rs",
|
"encoding_rs",
|
||||||
|
"futures-channel",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
|
"futures-util",
|
||||||
"h2",
|
"h2",
|
||||||
"http",
|
"http",
|
||||||
"http-body",
|
"http-body",
|
||||||
@@ -4024,31 +3930,17 @@ dependencies = [
|
|||||||
"serde_stacker",
|
"serde_stacker",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "revive-dt-common"
|
|
||||||
version = "0.1.0"
|
|
||||||
dependencies = [
|
|
||||||
"anyhow",
|
|
||||||
"semver 1.0.26",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "revive-dt-compiler"
|
name = "revive-dt-compiler"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy",
|
|
||||||
"alloy-primitives",
|
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"foundry-compilers-artifacts",
|
|
||||||
"revive-common",
|
"revive-common",
|
||||||
"revive-dt-common",
|
|
||||||
"revive-dt-config",
|
"revive-dt-config",
|
||||||
"revive-dt-solc-binaries",
|
"revive-dt-solc-binaries",
|
||||||
"revive-solc-json-interface",
|
"revive-solc-json-interface",
|
||||||
"semver 1.0.26",
|
"semver 1.0.26",
|
||||||
"serde",
|
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tokio",
|
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -4070,18 +3962,16 @@ dependencies = [
|
|||||||
"alloy",
|
"alloy",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap",
|
"clap",
|
||||||
"futures",
|
"rayon",
|
||||||
"indexmap 2.10.0",
|
|
||||||
"revive-dt-common",
|
|
||||||
"revive-dt-compiler",
|
"revive-dt-compiler",
|
||||||
"revive-dt-config",
|
"revive-dt-config",
|
||||||
"revive-dt-format",
|
"revive-dt-format",
|
||||||
"revive-dt-node",
|
"revive-dt-node",
|
||||||
"revive-dt-node-interaction",
|
"revive-dt-node-interaction",
|
||||||
"revive-dt-report",
|
"revive-dt-report",
|
||||||
"semver 1.0.26",
|
"revive-solc-json-interface",
|
||||||
|
"serde_json",
|
||||||
"temp-dir",
|
"temp-dir",
|
||||||
"tokio",
|
|
||||||
"tracing",
|
"tracing",
|
||||||
"tracing-subscriber",
|
"tracing-subscriber",
|
||||||
]
|
]
|
||||||
@@ -4094,11 +3984,9 @@ dependencies = [
|
|||||||
"alloy-primitives",
|
"alloy-primitives",
|
||||||
"alloy-sol-types",
|
"alloy-sol-types",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"revive-dt-common",
|
|
||||||
"semver 1.0.26",
|
"semver 1.0.26",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tokio",
|
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -4108,11 +3996,8 @@ version = "0.1.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy",
|
"alloy",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"revive-dt-common",
|
|
||||||
"revive-dt-config",
|
"revive-dt-config",
|
||||||
"revive-dt-format",
|
|
||||||
"revive-dt-node-interaction",
|
"revive-dt-node-interaction",
|
||||||
"serde",
|
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"sp-core",
|
"sp-core",
|
||||||
"sp-runtime",
|
"sp-runtime",
|
||||||
@@ -4127,6 +4012,9 @@ version = "0.1.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"alloy",
|
"alloy",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
"once_cell",
|
||||||
|
"tokio",
|
||||||
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -4134,9 +4022,9 @@ name = "revive-dt-report"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"revive-dt-compiler",
|
|
||||||
"revive-dt-config",
|
"revive-dt-config",
|
||||||
"revive-dt-format",
|
"revive-dt-format",
|
||||||
|
"revive-solc-json-interface",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tracing",
|
"tracing",
|
||||||
@@ -4149,11 +4037,9 @@ dependencies = [
|
|||||||
"anyhow",
|
"anyhow",
|
||||||
"hex",
|
"hex",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
"revive-dt-common",
|
|
||||||
"semver 1.0.26",
|
"semver 1.0.26",
|
||||||
"serde",
|
"serde",
|
||||||
"sha2 0.10.9",
|
"sha2 0.10.9",
|
||||||
"tokio",
|
|
||||||
"tracing",
|
"tracing",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -4223,7 +4109,7 @@ dependencies = [
|
|||||||
"primitive-types 0.12.2",
|
"primitive-types 0.12.2",
|
||||||
"proptest",
|
"proptest",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"rand 0.9.2",
|
"rand 0.9.1",
|
||||||
"rlp",
|
"rlp",
|
||||||
"ruint-macro",
|
"ruint-macro",
|
||||||
"serde",
|
"serde",
|
||||||
@@ -4248,9 +4134,6 @@ name = "rustc-hash"
|
|||||||
version = "2.1.1"
|
version = "2.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
|
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
|
||||||
dependencies = [
|
|
||||||
"rand 0.8.5",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rustc-hex"
|
name = "rustc-hex"
|
||||||
@@ -4621,7 +4504,7 @@ dependencies = [
|
|||||||
"chrono",
|
"chrono",
|
||||||
"hex",
|
"hex",
|
||||||
"indexmap 1.9.3",
|
"indexmap 1.9.3",
|
||||||
"indexmap 2.10.0",
|
"indexmap 2.9.0",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@@ -4710,15 +4593,6 @@ version = "1.3.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "signal-hook-registry"
|
|
||||||
version = "1.4.5"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "signature"
|
name = "signature"
|
||||||
version = "2.2.0"
|
version = "2.2.0"
|
||||||
@@ -4763,16 +4637,6 @@ dependencies = [
|
|||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "socket2"
|
|
||||||
version = "0.6.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
"windows-sys 0.59.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sp-application-crypto"
|
name = "sp-application-crypto"
|
||||||
version = "40.1.0"
|
version = "40.1.0"
|
||||||
@@ -5432,21 +5296,18 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio"
|
name = "tokio"
|
||||||
version = "1.47.0"
|
version = "1.45.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "43864ed400b6043a4757a25c7a64a8efde741aed79a056a2fb348a406701bb35"
|
checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"backtrace",
|
"backtrace",
|
||||||
"bytes",
|
"bytes",
|
||||||
"io-uring",
|
|
||||||
"libc",
|
"libc",
|
||||||
"mio",
|
"mio",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"signal-hook-registry",
|
"socket2",
|
||||||
"slab",
|
|
||||||
"socket2 0.6.0",
|
|
||||||
"tokio-macros",
|
"tokio-macros",
|
||||||
"windows-sys 0.59.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -5532,7 +5393,7 @@ version = "0.22.26"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e"
|
checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"indexmap 2.10.0",
|
"indexmap 2.9.0",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_spanned",
|
"serde_spanned",
|
||||||
"toml_datetime",
|
"toml_datetime",
|
||||||
@@ -5624,6 +5485,18 @@ dependencies = [
|
|||||||
"valuable",
|
"valuable",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "tracing-futures"
|
||||||
|
version = "0.2.5"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2"
|
||||||
|
dependencies = [
|
||||||
|
"futures",
|
||||||
|
"futures-task",
|
||||||
|
"pin-project",
|
||||||
|
"tracing",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-log"
|
name = "tracing-log"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
@@ -6323,12 +6196,6 @@ dependencies = [
|
|||||||
"tap",
|
"tap",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "yansi"
|
|
||||||
version = "1.0.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "yoke"
|
name = "yoke"
|
||||||
version = "0.8.0"
|
version = "0.8.0"
|
||||||
|
|||||||
+4
-10
@@ -11,7 +11,6 @@ repository = "https://github.com/paritytech/revive-differential-testing.git"
|
|||||||
rust-version = "1.85.0"
|
rust-version = "1.85.0"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
revive-dt-common = { version = "0.1.0", path = "crates/common" }
|
|
||||||
revive-dt-compiler = { version = "0.1.0", path = "crates/compiler" }
|
revive-dt-compiler = { version = "0.1.0", path = "crates/compiler" }
|
||||||
revive-dt-config = { version = "0.1.0", path = "crates/config" }
|
revive-dt-config = { version = "0.1.0", path = "crates/config" }
|
||||||
revive-dt-core = { version = "0.1.0", path = "crates/core" }
|
revive-dt-core = { version = "0.1.0", path = "crates/core" }
|
||||||
@@ -26,11 +25,10 @@ alloy-primitives = "1.2.1"
|
|||||||
alloy-sol-types = "1.2.1"
|
alloy-sol-types = "1.2.1"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
clap = { version = "4", features = ["derive"] }
|
clap = { version = "4", features = ["derive"] }
|
||||||
foundry-compilers-artifacts = { version = "0.18.0" }
|
|
||||||
futures = { version = "0.3.31" }
|
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
reqwest = { version = "0.12.15", features = ["json"] }
|
reqwest = { version = "0.12.15", features = ["blocking", "json"] }
|
||||||
once_cell = "1.21"
|
once_cell = "1.21"
|
||||||
|
rayon = { version = "1.10" }
|
||||||
semver = { version = "1.0", features = ["serde"] }
|
semver = { version = "1.0", features = ["serde"] }
|
||||||
serde = { version = "1.0", default-features = false, features = ["derive"] }
|
serde = { version = "1.0", default-features = false, features = ["derive"] }
|
||||||
serde_json = { version = "1.0", default-features = false, features = [
|
serde_json = { version = "1.0", default-features = false, features = [
|
||||||
@@ -42,10 +40,8 @@ sp-core = "36.1.0"
|
|||||||
sp-runtime = "41.1.0"
|
sp-runtime = "41.1.0"
|
||||||
temp-dir = { version = "0.1.16" }
|
temp-dir = { version = "0.1.16" }
|
||||||
tempfile = "3.3"
|
tempfile = "3.3"
|
||||||
tokio = { version = "1.47.0", default-features = false, features = [
|
tokio = { version = "1", default-features = false, features = [
|
||||||
"rt-multi-thread",
|
"rt-multi-thread",
|
||||||
"process",
|
|
||||||
"rt",
|
|
||||||
] }
|
] }
|
||||||
uuid = { version = "1.8", features = ["v4"] }
|
uuid = { version = "1.8", features = ["v4"] }
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
@@ -54,7 +50,6 @@ tracing-subscriber = { version = "0.3.19", default-features = false, features =
|
|||||||
"json",
|
"json",
|
||||||
"env-filter",
|
"env-filter",
|
||||||
] }
|
] }
|
||||||
indexmap = { version = "2.10.0", default-features = false }
|
|
||||||
|
|
||||||
# revive compiler
|
# revive compiler
|
||||||
revive-solc-json-interface = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
|
revive-solc-json-interface = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
|
||||||
@@ -62,7 +57,7 @@ revive-common = { git = "https://github.com/paritytech/revive", rev = "3389865af
|
|||||||
revive-differential = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
|
revive-differential = { git = "https://github.com/paritytech/revive", rev = "3389865af7c3ff6f29a586d82157e8bc573c1a8e" }
|
||||||
|
|
||||||
[workspace.dependencies.alloy]
|
[workspace.dependencies.alloy]
|
||||||
version = "1.0.22"
|
version = "1.0"
|
||||||
default-features = false
|
default-features = false
|
||||||
features = [
|
features = [
|
||||||
"json-abi",
|
"json-abi",
|
||||||
@@ -76,7 +71,6 @@ features = [
|
|||||||
"network",
|
"network",
|
||||||
"serde",
|
"serde",
|
||||||
"rpc-types-eth",
|
"rpc-types-eth",
|
||||||
"genesis",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[profile.bench]
|
[profile.bench]
|
||||||
|
|||||||
@@ -1,326 +0,0 @@
|
|||||||
{
|
|
||||||
"modes": [
|
|
||||||
"Y >=0.8.9",
|
|
||||||
"E",
|
|
||||||
"I"
|
|
||||||
],
|
|
||||||
"cases": [
|
|
||||||
{
|
|
||||||
"name": "first",
|
|
||||||
"inputs": [
|
|
||||||
{
|
|
||||||
"instance": "WBTC_1",
|
|
||||||
"method": "#deployer",
|
|
||||||
"calldata": [
|
|
||||||
"0x40",
|
|
||||||
"0x80",
|
|
||||||
"4",
|
|
||||||
"0x5742544300000000000000000000000000000000000000000000000000000000",
|
|
||||||
"14",
|
|
||||||
"0x5772617070656420425443000000000000000000000000000000000000000000"
|
|
||||||
],
|
|
||||||
"expected": [
|
|
||||||
"WBTC_1.address"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"instance": "WBTC_2",
|
|
||||||
"method": "#deployer",
|
|
||||||
"calldata": [
|
|
||||||
"0x40",
|
|
||||||
"0x80",
|
|
||||||
"4",
|
|
||||||
"0x5742544300000000000000000000000000000000000000000000000000000000",
|
|
||||||
"14",
|
|
||||||
"0x5772617070656420425443000000000000000000000000000000000000000000"
|
|
||||||
],
|
|
||||||
"expected": [
|
|
||||||
"WBTC_2.address"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"instance": "Mooniswap",
|
|
||||||
"method": "#deployer",
|
|
||||||
"calldata": [
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000060",
|
|
||||||
"0x00000000000000000000000000000000000000000000000000000000000000c0",
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000100",
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
"WBTC_1.address",
|
|
||||||
"WBTC_2.address",
|
|
||||||
"4",
|
|
||||||
"0x5742544300000000000000000000000000000000000000000000000000000000",
|
|
||||||
"14",
|
|
||||||
"0x5772617070656420425443000000000000000000000000000000000000000000"
|
|
||||||
],
|
|
||||||
"expected": {
|
|
||||||
"return_data": [
|
|
||||||
"Mooniswap.address"
|
|
||||||
],
|
|
||||||
"events": [
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0x8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e0",
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
"0xdeadbeef01000000000000000000000000000000"
|
|
||||||
],
|
|
||||||
"values": []
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"exception": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"instance": "WBTC_1",
|
|
||||||
"method": "_mint",
|
|
||||||
"calldata": [
|
|
||||||
"0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"1000000000"
|
|
||||||
],
|
|
||||||
"expected": {
|
|
||||||
"return_data": [],
|
|
||||||
"events": [
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"1000000000"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"exception": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"instance": "WBTC_2",
|
|
||||||
"method": "_mint",
|
|
||||||
"calldata": [
|
|
||||||
"0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"1000000000"
|
|
||||||
],
|
|
||||||
"expected": {
|
|
||||||
"return_data": [],
|
|
||||||
"events": [
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"1000000000"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"exception": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"instance": "WBTC_1",
|
|
||||||
"caller": "0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"method": "approve",
|
|
||||||
"calldata": [
|
|
||||||
"Mooniswap.address",
|
|
||||||
"500000000"
|
|
||||||
],
|
|
||||||
"expected": {
|
|
||||||
"return_data": [
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000001"
|
|
||||||
],
|
|
||||||
"events": [
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"Mooniswap.address"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"500000000"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"exception": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"instance": "WBTC_2",
|
|
||||||
"caller": "0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"method": "approve",
|
|
||||||
"calldata": [
|
|
||||||
"Mooniswap.address",
|
|
||||||
"500000000"
|
|
||||||
],
|
|
||||||
"expected": {
|
|
||||||
"return_data": [
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000001"
|
|
||||||
],
|
|
||||||
"events": [
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"Mooniswap.address"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"500000000"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"exception": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"instance": "Mooniswap",
|
|
||||||
"caller": "0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"method": "deposit",
|
|
||||||
"calldata": [
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000040",
|
|
||||||
"0x00000000000000000000000000000000000000000000000000000000000000a0",
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
"10000000",
|
|
||||||
"10000000",
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000002",
|
|
||||||
"1000000",
|
|
||||||
"1000000"
|
|
||||||
],
|
|
||||||
"expected": {
|
|
||||||
"return_data": [
|
|
||||||
"10000000"
|
|
||||||
],
|
|
||||||
"events": [
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
"Mooniswap.address"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"1000"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"Mooniswap.address"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"10000000"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"Mooniswap.address"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"490000000"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"Mooniswap.address"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"10000000"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"Mooniswap.address"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"490000000"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
|
|
||||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"10000000"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0x2da466a7b24304f47e87fa2e1e5a81b9831ce54fec19055ce277ca2f39ba42c4",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"10000000"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"exception": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"instance": "Mooniswap",
|
|
||||||
"caller": "0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"method": "swap",
|
|
||||||
"calldata": [
|
|
||||||
"WBTC_1.address",
|
|
||||||
"WBTC_2.address",
|
|
||||||
"5000",
|
|
||||||
"5000",
|
|
||||||
"0"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"expected": {
|
|
||||||
"return_data": [
|
|
||||||
"5000"
|
|
||||||
],
|
|
||||||
"events": [
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"Mooniswap.address"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"5000"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"topics": [
|
|
||||||
"0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925",
|
|
||||||
"0xdeadbeef00000000000000000000000000000042",
|
|
||||||
"Mooniswap.address"
|
|
||||||
],
|
|
||||||
"values": [
|
|
||||||
"489995000"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"exception": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"contracts": {
|
|
||||||
"Mooniswap": "Mooniswap.sol:Mooniswap",
|
|
||||||
"WBTC_1": "ERC20/ERC20.sol:ERC20",
|
|
||||||
"WBTC_2": "ERC20/ERC20.sol:ERC20",
|
|
||||||
"VirtualBalance": "Mooniswap.sol:VirtualBalance",
|
|
||||||
"Math": "math/Math.sol:Math"
|
|
||||||
},
|
|
||||||
"libraries": {
|
|
||||||
"Mooniswap.sol": {
|
|
||||||
"VirtualBalance": "VirtualBalance"
|
|
||||||
},
|
|
||||||
"math/Math.sol": {
|
|
||||||
"Math": "Math"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"group": "Real life"
|
|
||||||
}
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "revive-dt-common"
|
|
||||||
description = "A library containing common concepts that other crates in the workspace can rely on"
|
|
||||||
version.workspace = true
|
|
||||||
authors.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
edition.workspace = true
|
|
||||||
repository.workspace = true
|
|
||||||
rust-version.workspace = true
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
anyhow = { workspace = true }
|
|
||||||
semver = { workspace = true }
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
use std::{
|
|
||||||
fs::{read_dir, remove_dir_all, remove_file},
|
|
||||||
path::Path,
|
|
||||||
};
|
|
||||||
|
|
||||||
use anyhow::Result;
|
|
||||||
|
|
||||||
/// This method clears the passed directory of all of the files and directories contained within
|
|
||||||
/// without deleting the directory.
|
|
||||||
pub fn clear_directory(path: impl AsRef<Path>) -> Result<()> {
|
|
||||||
for entry in read_dir(path.as_ref())? {
|
|
||||||
let entry = entry?;
|
|
||||||
let entry_path = entry.path();
|
|
||||||
|
|
||||||
if entry_path.is_file() {
|
|
||||||
remove_file(entry_path)?
|
|
||||||
} else {
|
|
||||||
remove_dir_all(entry_path)?
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
mod clear_dir;
|
|
||||||
|
|
||||||
pub use clear_dir::*;
|
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
use std::{borrow::Cow, collections::HashSet, path::PathBuf};
|
|
||||||
|
|
||||||
/// An iterator that finds files of a certain extension in the provided directory. You can think of
|
|
||||||
/// this a glob pattern similar to: `${path}/**/*.md`
|
|
||||||
pub struct FilesWithExtensionIterator {
|
|
||||||
/// The set of allowed extensions that that match the requirement and that should be returned
|
|
||||||
/// when found.
|
|
||||||
allowed_extensions: HashSet<Cow<'static, str>>,
|
|
||||||
|
|
||||||
/// The set of directories to visit next. This iterator does BFS and so these directories will
|
|
||||||
/// only be visited if we can't find any files in our state.
|
|
||||||
directories_to_search: Vec<PathBuf>,
|
|
||||||
|
|
||||||
/// The set of files matching the allowed extensions that were found. If there are entries in
|
|
||||||
/// this vector then they will be returned when the [`Iterator::next`] method is called. If not
|
|
||||||
/// then we visit one of the next directories to visit.
|
|
||||||
files_matching_allowed_extensions: Vec<PathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FilesWithExtensionIterator {
|
|
||||||
pub fn new(root_directory: PathBuf) -> Self {
|
|
||||||
Self {
|
|
||||||
allowed_extensions: Default::default(),
|
|
||||||
directories_to_search: vec![root_directory],
|
|
||||||
files_matching_allowed_extensions: Default::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_allowed_extension(
|
|
||||||
mut self,
|
|
||||||
allowed_extension: impl Into<Cow<'static, str>>,
|
|
||||||
) -> Self {
|
|
||||||
self.allowed_extensions.insert(allowed_extension.into());
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Iterator for FilesWithExtensionIterator {
|
|
||||||
type Item = PathBuf;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
if let Some(file_path) = self.files_matching_allowed_extensions.pop() {
|
|
||||||
return Some(file_path);
|
|
||||||
};
|
|
||||||
|
|
||||||
let directory_to_search = self.directories_to_search.pop()?;
|
|
||||||
|
|
||||||
// Read all of the entries in the directory. If we failed to read this dir's entires then we
|
|
||||||
// elect to just ignore it and look in the next directory, we do that by calling the next
|
|
||||||
// method again on the iterator, which is an intentional decision that we made here instead
|
|
||||||
// of panicking.
|
|
||||||
let Ok(dir_entries) = std::fs::read_dir(directory_to_search) else {
|
|
||||||
return self.next();
|
|
||||||
};
|
|
||||||
|
|
||||||
for entry in dir_entries.flatten() {
|
|
||||||
let entry_path = entry.path();
|
|
||||||
if entry_path.is_dir() {
|
|
||||||
self.directories_to_search.push(entry_path)
|
|
||||||
} else if entry_path.is_file()
|
|
||||||
&& entry_path.extension().is_some_and(|ext| {
|
|
||||||
self.allowed_extensions
|
|
||||||
.iter()
|
|
||||||
.any(|allowed| ext.eq_ignore_ascii_case(allowed.as_ref()))
|
|
||||||
})
|
|
||||||
{
|
|
||||||
self.files_matching_allowed_extensions.push(entry_path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.next()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
mod files_with_extension_iterator;
|
|
||||||
|
|
||||||
pub use files_with_extension_iterator::*;
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
//! This crate provides common concepts, functionality, types, macros, and more that other crates in
|
|
||||||
//! the workspace can benefit from.
|
|
||||||
|
|
||||||
pub mod fs;
|
|
||||||
pub mod iterators;
|
|
||||||
pub mod macros;
|
|
||||||
pub mod types;
|
|
||||||
@@ -1,106 +0,0 @@
|
|||||||
/// Defines wrappers around types.
|
|
||||||
///
|
|
||||||
/// For example, the macro invocation seen below:
|
|
||||||
///
|
|
||||||
/// ```rust,ignore
|
|
||||||
/// define_wrapper_type!(CaseId => usize);
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// Would define a wrapper type that looks like the following:
|
|
||||||
///
|
|
||||||
/// ```rust,ignore
|
|
||||||
/// pub struct CaseId(usize);
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// And would also implement a number of methods on this type making it easier to use.
|
|
||||||
///
|
|
||||||
/// These wrapper types become very useful as they make the code a lot easier to read.
|
|
||||||
///
|
|
||||||
/// Take the following as an example:
|
|
||||||
///
|
|
||||||
/// ```rust,ignore
|
|
||||||
/// struct State {
|
|
||||||
/// contracts: HashMap<usize, HashMap<String, Vec<u8>>>
|
|
||||||
/// }
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// In the above code it's hard to understand what the various types refer to or what to expect them
|
|
||||||
/// to contain.
|
|
||||||
///
|
|
||||||
/// With these wrapper types we're able to create code that's self-documenting in that the types
|
|
||||||
/// tell us what the code is referring to. The above code is transformed into
|
|
||||||
///
|
|
||||||
/// ```rust,ignore
|
|
||||||
/// struct State {
|
|
||||||
/// contracts: HashMap<CaseId, HashMap<ContractName, ContractByteCode>>
|
|
||||||
/// }
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// Note that we follow the same syntax for defining wrapper structs but we do not permit the use of
|
|
||||||
/// generics.
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! define_wrapper_type {
|
|
||||||
(
|
|
||||||
$(#[$meta: meta])*
|
|
||||||
$vis:vis struct $ident: ident($ty: ty);
|
|
||||||
) => {
|
|
||||||
$(#[$meta])*
|
|
||||||
$vis struct $ident($ty);
|
|
||||||
|
|
||||||
impl $ident {
|
|
||||||
pub fn new(value: impl Into<$ty>) -> Self {
|
|
||||||
Self(value.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn into_inner(self) -> $ty {
|
|
||||||
self.0
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_inner(&self) -> &$ty {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AsRef<$ty> for $ident {
|
|
||||||
fn as_ref(&self) -> &$ty {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AsMut<$ty> for $ident {
|
|
||||||
fn as_mut(&mut self) -> &mut $ty {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::ops::Deref for $ident {
|
|
||||||
type Target = $ty;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::ops::DerefMut for $ident {
|
|
||||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
|
||||||
&mut self.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<$ty> for $ident {
|
|
||||||
fn from(value: $ty) -> Self {
|
|
||||||
Self(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<$ident> for $ty {
|
|
||||||
fn from(value: $ident) -> Self {
|
|
||||||
value.0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Technically not needed but this allows for the macro to be found in the `macros` module of the
|
|
||||||
/// crate in addition to being found in the root of the crate.
|
|
||||||
pub use define_wrapper_type;
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
mod define_wrapper_type;
|
|
||||||
|
|
||||||
pub use define_wrapper_type::*;
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
mod version_or_requirement;
|
|
||||||
|
|
||||||
pub use version_or_requirement::*;
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
use semver::{Version, VersionReq};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub enum VersionOrRequirement {
|
|
||||||
Version(Version),
|
|
||||||
Requirement(VersionReq),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Version> for VersionOrRequirement {
|
|
||||||
fn from(value: Version) -> Self {
|
|
||||||
Self::Version(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<VersionReq> for VersionOrRequirement {
|
|
||||||
fn from(value: VersionReq) -> Self {
|
|
||||||
Self::Requirement(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<VersionOrRequirement> for Version {
|
|
||||||
type Error = anyhow::Error;
|
|
||||||
|
|
||||||
fn try_from(value: VersionOrRequirement) -> Result<Self, Self::Error> {
|
|
||||||
let VersionOrRequirement::Version(version) = value else {
|
|
||||||
anyhow::bail!("Version or requirement was not a version");
|
|
||||||
};
|
|
||||||
Ok(version)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<VersionOrRequirement> for VersionReq {
|
|
||||||
type Error = anyhow::Error;
|
|
||||||
|
|
||||||
fn try_from(value: VersionOrRequirement) -> Result<Self, Self::Error> {
|
|
||||||
let VersionOrRequirement::Requirement(requirement) = value else {
|
|
||||||
anyhow::bail!("Version or requirement was not a requirement");
|
|
||||||
};
|
|
||||||
Ok(requirement)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -9,18 +9,11 @@ repository.workspace = true
|
|||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
anyhow = { workspace = true }
|
||||||
revive-solc-json-interface = { workspace = true }
|
revive-solc-json-interface = { workspace = true }
|
||||||
revive-dt-common = { workspace = true }
|
|
||||||
revive-dt-config = { workspace = true }
|
revive-dt-config = { workspace = true }
|
||||||
revive-dt-solc-binaries = { workspace = true }
|
revive-dt-solc-binaries = { workspace = true }
|
||||||
revive-common = { workspace = true }
|
revive-common = { workspace = true }
|
||||||
|
|
||||||
alloy = { workspace = true }
|
|
||||||
alloy-primitives = { workspace = true }
|
|
||||||
anyhow = { workspace = true }
|
|
||||||
foundry-compilers-artifacts = { workspace = true }
|
|
||||||
semver = { workspace = true }
|
semver = { workspace = true }
|
||||||
serde = { workspace = true }
|
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
tokio = { workspace = true }
|
|
||||||
|
|||||||
+93
-86
@@ -4,20 +4,20 @@
|
|||||||
//! - Polkadot revive Wasm compiler
|
//! - Polkadot revive Wasm compiler
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
|
||||||
fs::read_to_string,
|
fs::read_to_string,
|
||||||
hash::Hash,
|
hash::Hash,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
use alloy::json_abi::JsonAbi;
|
use revive_dt_config::Arguments;
|
||||||
use alloy_primitives::Address;
|
|
||||||
use semver::Version;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
use revive_common::EVMVersion;
|
use revive_common::EVMVersion;
|
||||||
use revive_dt_common::types::VersionOrRequirement;
|
use revive_solc_json_interface::{
|
||||||
use revive_dt_config::Arguments;
|
SolcStandardJsonInput, SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings,
|
||||||
|
SolcStandardJsonInputSettingsOptimizer, SolcStandardJsonInputSettingsSelection,
|
||||||
|
SolcStandardJsonOutput,
|
||||||
|
};
|
||||||
|
use semver::Version;
|
||||||
|
|
||||||
pub mod revive_js;
|
pub mod revive_js;
|
||||||
pub mod revive_resolc;
|
pub mod revive_resolc;
|
||||||
@@ -31,44 +31,60 @@ pub trait SolidityCompiler {
|
|||||||
/// The low-level compiler interface.
|
/// The low-level compiler interface.
|
||||||
fn build(
|
fn build(
|
||||||
&self,
|
&self,
|
||||||
input: CompilerInput,
|
input: CompilerInput<Self::Options>,
|
||||||
additional_options: Self::Options,
|
) -> anyhow::Result<CompilerOutput<Self::Options>>;
|
||||||
) -> impl Future<Output = anyhow::Result<CompilerOutput>>;
|
|
||||||
|
|
||||||
fn new(solc_executable: PathBuf) -> Self;
|
fn new(solc_executable: PathBuf) -> Self;
|
||||||
|
|
||||||
fn get_compiler_executable(
|
fn get_compiler_executable(config: &Arguments, version: Version) -> anyhow::Result<PathBuf>;
|
||||||
config: &Arguments,
|
|
||||||
version: impl Into<VersionOrRequirement>,
|
|
||||||
) -> impl Future<Output = anyhow::Result<PathBuf>>;
|
|
||||||
|
|
||||||
fn version(&self) -> anyhow::Result<Version>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The generic compilation input configuration.
|
/// The generic compilation input configuration.
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug)]
|
||||||
pub struct CompilerInput {
|
pub struct CompilerInput<T: PartialEq + Eq + Hash> {
|
||||||
pub enable_optimization: Option<bool>,
|
pub extra_options: T,
|
||||||
pub via_ir: Option<bool>,
|
pub input: SolcStandardJsonInput,
|
||||||
pub evm_version: Option<EVMVersion>,
|
|
||||||
pub allow_paths: Vec<PathBuf>,
|
|
||||||
pub base_path: Option<PathBuf>,
|
|
||||||
pub sources: HashMap<PathBuf, String>,
|
|
||||||
pub libraries: HashMap<PathBuf, HashMap<String, Address>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The generic compilation output configuration.
|
/// The generic compilation output configuration.
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
|
pub struct CompilerOutput<T: PartialEq + Eq + Hash> {
|
||||||
pub struct CompilerOutput {
|
/// The solc standard JSON input.
|
||||||
/// The compiled contracts. The bytecode of the contract is kept as a string incase linking is
|
pub input: CompilerInput<T>,
|
||||||
/// required and the compiled source has placeholders.
|
/// The produced solc standard JSON output.
|
||||||
pub contracts: HashMap<PathBuf, HashMap<String, (String, JsonAbi)>>,
|
pub output: SolcStandardJsonOutput,
|
||||||
|
/// The error message in case the compiler returns abnormally.
|
||||||
|
pub error: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A generic builder style interface for configuring the supported compiler options.
|
impl<T> PartialEq for CompilerInput<T>
|
||||||
|
where
|
||||||
|
T: PartialEq + Eq + Hash,
|
||||||
|
{
|
||||||
|
fn eq(&self, other: &Self) -> bool {
|
||||||
|
let self_input = serde_json::to_vec(&self.input).unwrap_or_default();
|
||||||
|
let other_input = serde_json::to_vec(&self.input).unwrap_or_default();
|
||||||
|
self.extra_options.eq(&other.extra_options) && self_input == other_input
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Eq for CompilerInput<T> where T: PartialEq + Eq + Hash {}
|
||||||
|
|
||||||
|
impl<T> Hash for CompilerInput<T>
|
||||||
|
where
|
||||||
|
T: PartialEq + Eq + Hash,
|
||||||
|
{
|
||||||
|
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||||
|
self.extra_options.hash(state);
|
||||||
|
state.write(&serde_json::to_vec(&self.input).unwrap_or_default());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A generic builder style interface for configuring all compiler options.
|
||||||
pub struct Compiler<T: SolidityCompiler> {
|
pub struct Compiler<T: SolidityCompiler> {
|
||||||
input: CompilerInput,
|
input: SolcStandardJsonInput,
|
||||||
additional_options: T::Options,
|
extra_options: T::Options,
|
||||||
|
allow_paths: Vec<String>,
|
||||||
|
base_path: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Compiler<solc::Solc> {
|
impl Default for Compiler<solc::Solc> {
|
||||||
@@ -83,80 +99,71 @@ where
|
|||||||
{
|
{
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
input: CompilerInput {
|
input: SolcStandardJsonInput {
|
||||||
enable_optimization: Default::default(),
|
language: SolcStandardJsonInputLanguage::Solidity,
|
||||||
via_ir: Default::default(),
|
|
||||||
evm_version: Default::default(),
|
|
||||||
allow_paths: Default::default(),
|
|
||||||
base_path: Default::default(),
|
|
||||||
sources: Default::default(),
|
sources: Default::default(),
|
||||||
libraries: Default::default(),
|
settings: SolcStandardJsonInputSettings::new(
|
||||||
|
None,
|
||||||
|
Default::default(),
|
||||||
|
None,
|
||||||
|
SolcStandardJsonInputSettingsSelection::new_required(),
|
||||||
|
SolcStandardJsonInputSettingsOptimizer::new(
|
||||||
|
false,
|
||||||
|
None,
|
||||||
|
&Version::new(0, 0, 0),
|
||||||
|
false,
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
),
|
||||||
},
|
},
|
||||||
additional_options: T::Options::default(),
|
extra_options: Default::default(),
|
||||||
|
allow_paths: Default::default(),
|
||||||
|
base_path: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_optimization(mut self, value: impl Into<Option<bool>>) -> Self {
|
pub fn solc_optimizer(mut self, enabled: bool) -> Self {
|
||||||
self.input.enable_optimization = value.into();
|
self.input.settings.optimizer.enabled = enabled;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_via_ir(mut self, value: impl Into<Option<bool>>) -> Self {
|
pub fn with_source(mut self, path: &Path) -> anyhow::Result<Self> {
|
||||||
self.input.via_ir = value.into();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_evm_version(mut self, version: impl Into<Option<EVMVersion>>) -> Self {
|
|
||||||
self.input.evm_version = version.into();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_allow_path(mut self, path: impl AsRef<Path>) -> Self {
|
|
||||||
self.input.allow_paths.push(path.as_ref().into());
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_base_path(mut self, path: impl Into<Option<PathBuf>>) -> Self {
|
|
||||||
self.input.base_path = path.into();
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_source(mut self, path: impl AsRef<Path>) -> anyhow::Result<Self> {
|
|
||||||
self.input
|
self.input
|
||||||
.sources
|
.sources
|
||||||
.insert(path.as_ref().to_path_buf(), read_to_string(path.as_ref())?);
|
.insert(path.display().to_string(), read_to_string(path)?.into());
|
||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_library(
|
pub fn evm_version(mut self, evm_version: EVMVersion) -> Self {
|
||||||
mut self,
|
self.input.settings.evm_version = Some(evm_version);
|
||||||
path: impl AsRef<Path>,
|
|
||||||
name: impl AsRef<str>,
|
|
||||||
address: Address,
|
|
||||||
) -> Self {
|
|
||||||
self.input
|
|
||||||
.libraries
|
|
||||||
.entry(path.as_ref().to_path_buf())
|
|
||||||
.or_default()
|
|
||||||
.insert(name.as_ref().into(), address);
|
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_additional_options(mut self, options: impl Into<T::Options>) -> Self {
|
pub fn extra_options(mut self, extra_options: T::Options) -> Self {
|
||||||
self.additional_options = options.into();
|
self.extra_options = extra_options;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn try_build(
|
pub fn allow_path(mut self, path: String) -> Self {
|
||||||
self,
|
self.allow_paths.push(path);
|
||||||
compiler_path: impl AsRef<Path>,
|
self
|
||||||
) -> anyhow::Result<CompilerOutput> {
|
|
||||||
T::new(compiler_path.as_ref().to_path_buf())
|
|
||||||
.build(self.input, self.additional_options)
|
|
||||||
.await
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn input(&self) -> CompilerInput {
|
pub fn base_path(mut self, base_path: String) -> Self {
|
||||||
|
self.base_path = Some(base_path);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn try_build(self, solc_path: PathBuf) -> anyhow::Result<CompilerOutput<T::Options>> {
|
||||||
|
T::new(solc_path).build(CompilerInput {
|
||||||
|
extra_options: self.extra_options,
|
||||||
|
input: self.input,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the compiler JSON input.
|
||||||
|
pub fn input(&self) -> SolcStandardJsonInput {
|
||||||
self.input.clone()
|
self.input.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,27 +6,11 @@ use std::{
|
|||||||
process::{Command, Stdio},
|
process::{Command, Stdio},
|
||||||
};
|
};
|
||||||
|
|
||||||
use revive_dt_common::types::VersionOrRequirement;
|
|
||||||
use revive_dt_config::Arguments;
|
|
||||||
use revive_solc_json_interface::{
|
|
||||||
SolcStandardJsonInput, SolcStandardJsonInputLanguage, SolcStandardJsonInputSettings,
|
|
||||||
SolcStandardJsonInputSettingsOptimizer, SolcStandardJsonInputSettingsSelection,
|
|
||||||
SolcStandardJsonOutput,
|
|
||||||
};
|
|
||||||
|
|
||||||
use crate::{CompilerInput, CompilerOutput, SolidityCompiler};
|
use crate::{CompilerInput, CompilerOutput, SolidityCompiler};
|
||||||
|
use revive_dt_config::Arguments;
|
||||||
use alloy::json_abi::JsonAbi;
|
use revive_solc_json_interface::SolcStandardJsonOutput;
|
||||||
use anyhow::Context;
|
|
||||||
use semver::Version;
|
|
||||||
use tokio::{io::AsyncWriteExt, process::Command as AsyncCommand};
|
|
||||||
|
|
||||||
// TODO: I believe that we need to also pass the solc compiler to resolc so that resolc uses the
|
|
||||||
// specified solc compiler. I believe that currently we completely ignore the specified solc binary
|
|
||||||
// when invoking resolc which doesn't seem right if we're using solc as a compiler frontend.
|
|
||||||
|
|
||||||
/// A wrapper around the `resolc` binary, emitting PVM-compatible bytecode.
|
/// A wrapper around the `resolc` binary, emitting PVM-compatible bytecode.
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Resolc {
|
pub struct Resolc {
|
||||||
/// Path to the `resolc` executable
|
/// Path to the `resolc` executable
|
||||||
resolc_path: PathBuf,
|
resolc_path: PathBuf,
|
||||||
@@ -35,171 +19,63 @@ pub struct Resolc {
|
|||||||
impl SolidityCompiler for Resolc {
|
impl SolidityCompiler for Resolc {
|
||||||
type Options = Vec<String>;
|
type Options = Vec<String>;
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", ret)]
|
fn build(
|
||||||
async fn build(
|
|
||||||
&self,
|
&self,
|
||||||
CompilerInput {
|
input: CompilerInput<Self::Options>,
|
||||||
enable_optimization,
|
) -> anyhow::Result<CompilerOutput<Self::Options>> {
|
||||||
// Ignored and not honored since this is required for the resolc compilation.
|
let mut child = Command::new(&self.resolc_path)
|
||||||
via_ir: _via_ir,
|
.arg("--standard-json")
|
||||||
evm_version,
|
.args(&input.extra_options)
|
||||||
allow_paths,
|
|
||||||
base_path,
|
|
||||||
sources,
|
|
||||||
libraries,
|
|
||||||
}: CompilerInput,
|
|
||||||
additional_options: Self::Options,
|
|
||||||
) -> anyhow::Result<CompilerOutput> {
|
|
||||||
let input = SolcStandardJsonInput {
|
|
||||||
language: SolcStandardJsonInputLanguage::Solidity,
|
|
||||||
sources: sources
|
|
||||||
.into_iter()
|
|
||||||
.map(|(path, source)| (path.display().to_string(), source.into()))
|
|
||||||
.collect(),
|
|
||||||
settings: SolcStandardJsonInputSettings {
|
|
||||||
evm_version,
|
|
||||||
libraries: Some(
|
|
||||||
libraries
|
|
||||||
.into_iter()
|
|
||||||
.map(|(source_code, libraries_map)| {
|
|
||||||
(
|
|
||||||
source_code.display().to_string(),
|
|
||||||
libraries_map
|
|
||||||
.into_iter()
|
|
||||||
.map(|(library_ident, library_address)| {
|
|
||||||
(library_ident, library_address.to_string())
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
),
|
|
||||||
remappings: None,
|
|
||||||
output_selection: Some(SolcStandardJsonInputSettingsSelection::new_required()),
|
|
||||||
via_ir: Some(true),
|
|
||||||
optimizer: SolcStandardJsonInputSettingsOptimizer::new(
|
|
||||||
enable_optimization.unwrap_or(false),
|
|
||||||
None,
|
|
||||||
&Version::new(0, 0, 0),
|
|
||||||
false,
|
|
||||||
),
|
|
||||||
metadata: None,
|
|
||||||
polkavm: None,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut command = AsyncCommand::new(&self.resolc_path);
|
|
||||||
command
|
|
||||||
.stdin(Stdio::piped())
|
.stdin(Stdio::piped())
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.stderr(Stdio::piped())
|
.stderr(Stdio::piped())
|
||||||
.arg("--standard-json");
|
.spawn()?;
|
||||||
|
|
||||||
if let Some(ref base_path) = base_path {
|
|
||||||
command.arg("--base-path").arg(base_path);
|
|
||||||
}
|
|
||||||
if !allow_paths.is_empty() {
|
|
||||||
command.arg("--allow-paths").arg(
|
|
||||||
allow_paths
|
|
||||||
.iter()
|
|
||||||
.map(|path| path.display().to_string())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(","),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let mut child = command.spawn()?;
|
|
||||||
|
|
||||||
let stdin_pipe = child.stdin.as_mut().expect("stdin must be piped");
|
let stdin_pipe = child.stdin.as_mut().expect("stdin must be piped");
|
||||||
let serialized_input = serde_json::to_vec(&input)?;
|
serde_json::to_writer(stdin_pipe, &input.input)?;
|
||||||
stdin_pipe.write_all(&serialized_input).await?;
|
|
||||||
|
|
||||||
let output = child.wait_with_output().await?;
|
let json_in = serde_json::to_string_pretty(&input.input)?;
|
||||||
|
|
||||||
|
let output = child.wait_with_output()?;
|
||||||
let stdout = output.stdout;
|
let stdout = output.stdout;
|
||||||
let stderr = output.stderr;
|
let stderr = output.stderr;
|
||||||
|
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
let json_in = serde_json::to_string_pretty(&input)?;
|
|
||||||
let message = String::from_utf8_lossy(&stderr);
|
let message = String::from_utf8_lossy(&stderr);
|
||||||
tracing::error!(
|
tracing::error!(
|
||||||
status = %output.status,
|
"resolc failed exit={} stderr={} JSON-in={} ",
|
||||||
message = %message,
|
output.status,
|
||||||
json_input = json_in,
|
&message,
|
||||||
"Compilation using resolc failed"
|
json_in,
|
||||||
);
|
);
|
||||||
anyhow::bail!("Compilation failed with an error: {message}");
|
return Ok(CompilerOutput {
|
||||||
|
input,
|
||||||
|
output: Default::default(),
|
||||||
|
error: Some(message.into()),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let parsed = serde_json::from_slice::<SolcStandardJsonOutput>(&stdout).map_err(|e| {
|
let parsed: SolcStandardJsonOutput = serde_json::from_slice(&stdout).map_err(|e| {
|
||||||
anyhow::anyhow!(
|
anyhow::anyhow!(
|
||||||
"failed to parse resolc JSON output: {e}\nstderr: {}",
|
"failed to parse resolc JSON output: {e}\nstderr: {}",
|
||||||
String::from_utf8_lossy(&stderr)
|
String::from_utf8_lossy(&stderr)
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
tracing::debug!(
|
Ok(CompilerOutput {
|
||||||
output = %serde_json::to_string(&parsed).unwrap(),
|
input,
|
||||||
"Compiled successfully"
|
output: parsed,
|
||||||
);
|
error: None,
|
||||||
|
})
|
||||||
// Detecting if the compiler output contained errors and reporting them through logs and
|
|
||||||
// errors instead of returning the compiler output that might contain errors.
|
|
||||||
for error in parsed.errors.iter().flatten() {
|
|
||||||
if error.severity == "error" {
|
|
||||||
tracing::error!(
|
|
||||||
?error,
|
|
||||||
?input,
|
|
||||||
output = %serde_json::to_string(&parsed).unwrap(),
|
|
||||||
"Encountered an error in the compilation"
|
|
||||||
);
|
|
||||||
anyhow::bail!("Encountered an error in the compilation: {error}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let Some(contracts) = parsed.contracts else {
|
|
||||||
anyhow::bail!("Unexpected error - resolc output doesn't have a contracts section");
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut compiler_output = CompilerOutput::default();
|
|
||||||
for (source_path, contracts) in contracts.into_iter() {
|
|
||||||
let source_path = PathBuf::from(source_path).canonicalize()?;
|
|
||||||
|
|
||||||
let map = compiler_output.contracts.entry(source_path).or_default();
|
|
||||||
for (contract_name, contract_information) in contracts.into_iter() {
|
|
||||||
let bytecode = contract_information
|
|
||||||
.evm
|
|
||||||
.and_then(|evm| evm.bytecode.clone())
|
|
||||||
.context("Unexpected - Contract compiled with resolc has no bytecode")?;
|
|
||||||
let abi = contract_information
|
|
||||||
.metadata
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|metadata| metadata.as_object())
|
|
||||||
.and_then(|metadata| metadata.get("solc_metadata"))
|
|
||||||
.and_then(|solc_metadata| solc_metadata.as_str())
|
|
||||||
.and_then(|metadata| serde_json::from_str::<serde_json::Value>(metadata).ok())
|
|
||||||
.and_then(|metadata| {
|
|
||||||
metadata.get("output").and_then(|output| {
|
|
||||||
output
|
|
||||||
.get("abi")
|
|
||||||
.and_then(|abi| serde_json::from_value::<JsonAbi>(abi.clone()).ok())
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.context(
|
|
||||||
"Unexpected - Failed to get the ABI for a contract compiled with resolc",
|
|
||||||
)?;
|
|
||||||
map.insert(contract_name, (bytecode.object, abi));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(compiler_output)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new(resolc_path: PathBuf) -> Self {
|
fn new(resolc_path: PathBuf) -> Self {
|
||||||
Resolc { resolc_path }
|
Resolc { resolc_path }
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_compiler_executable(
|
fn get_compiler_executable(
|
||||||
config: &Arguments,
|
config: &Arguments,
|
||||||
_version: impl Into<VersionOrRequirement>,
|
_version: semver::Version,
|
||||||
) -> anyhow::Result<PathBuf> {
|
) -> anyhow::Result<PathBuf> {
|
||||||
if !config.resolc.as_os_str().is_empty() {
|
if !config.resolc.as_os_str().is_empty() {
|
||||||
return Ok(config.resolc.clone());
|
return Ok(config.resolc.clone());
|
||||||
@@ -207,47 +83,4 @@ impl SolidityCompiler for Resolc {
|
|||||||
|
|
||||||
Ok(PathBuf::from("resolc"))
|
Ok(PathBuf::from("resolc"))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn version(&self) -> anyhow::Result<semver::Version> {
|
|
||||||
// Logic for parsing the resolc version from the following string:
|
|
||||||
// Solidity frontend for the revive compiler version 0.3.0+commit.b238913.llvm-18.1.8
|
|
||||||
|
|
||||||
let output = Command::new(self.resolc_path.as_path())
|
|
||||||
.arg("--version")
|
|
||||||
.stdout(Stdio::piped())
|
|
||||||
.spawn()?
|
|
||||||
.wait_with_output()?
|
|
||||||
.stdout;
|
|
||||||
let output = String::from_utf8_lossy(&output);
|
|
||||||
let version_string = output
|
|
||||||
.split("version ")
|
|
||||||
.nth(1)
|
|
||||||
.context("Version parsing failed")?
|
|
||||||
.split("+")
|
|
||||||
.next()
|
|
||||||
.context("Version parsing failed")?;
|
|
||||||
|
|
||||||
Version::parse(version_string).map_err(Into::into)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn compiler_version_can_be_obtained() {
|
|
||||||
// Arrange
|
|
||||||
let args = Arguments::default();
|
|
||||||
let path = Resolc::get_compiler_executable(&args, Version::new(0, 7, 6))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
let compiler = Resolc::new(path);
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let version = compiler.version();
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
let _ = version.expect("Failed to get version");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
+23
-201
@@ -6,24 +6,10 @@ use std::{
|
|||||||
process::{Command, Stdio},
|
process::{Command, Stdio},
|
||||||
};
|
};
|
||||||
|
|
||||||
use revive_dt_common::types::VersionOrRequirement;
|
use crate::{CompilerInput, CompilerOutput, SolidityCompiler};
|
||||||
use revive_dt_config::Arguments;
|
use revive_dt_config::Arguments;
|
||||||
use revive_dt_solc_binaries::download_solc;
|
use revive_dt_solc_binaries::download_solc;
|
||||||
|
|
||||||
use crate::{CompilerInput, CompilerOutput, SolidityCompiler};
|
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use foundry_compilers_artifacts::{
|
|
||||||
output_selection::{
|
|
||||||
BytecodeOutputSelection, ContractOutputSelection, EvmOutputSelection, OutputSelection,
|
|
||||||
},
|
|
||||||
solc::CompilerOutput as SolcOutput,
|
|
||||||
solc::*,
|
|
||||||
};
|
|
||||||
use semver::Version;
|
|
||||||
use tokio::{io::AsyncWriteExt, process::Command as AsyncCommand};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Solc {
|
pub struct Solc {
|
||||||
solc_path: PathBuf,
|
solc_path: PathBuf,
|
||||||
}
|
}
|
||||||
@@ -31,211 +17,47 @@ pub struct Solc {
|
|||||||
impl SolidityCompiler for Solc {
|
impl SolidityCompiler for Solc {
|
||||||
type Options = ();
|
type Options = ();
|
||||||
|
|
||||||
#[tracing::instrument(level = "debug", ret)]
|
fn build(
|
||||||
async fn build(
|
|
||||||
&self,
|
&self,
|
||||||
CompilerInput {
|
input: CompilerInput<Self::Options>,
|
||||||
enable_optimization,
|
) -> anyhow::Result<CompilerOutput<Self::Options>> {
|
||||||
via_ir,
|
let mut child = Command::new(&self.solc_path)
|
||||||
evm_version,
|
|
||||||
allow_paths,
|
|
||||||
base_path,
|
|
||||||
sources,
|
|
||||||
libraries,
|
|
||||||
}: CompilerInput,
|
|
||||||
_: Self::Options,
|
|
||||||
) -> anyhow::Result<CompilerOutput> {
|
|
||||||
let input = SolcInput {
|
|
||||||
language: SolcLanguage::Solidity,
|
|
||||||
sources: Sources(
|
|
||||||
sources
|
|
||||||
.into_iter()
|
|
||||||
.map(|(source_path, source_code)| (source_path, Source::new(source_code)))
|
|
||||||
.collect(),
|
|
||||||
),
|
|
||||||
settings: Settings {
|
|
||||||
optimizer: Optimizer {
|
|
||||||
enabled: enable_optimization,
|
|
||||||
details: Some(Default::default()),
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
output_selection: OutputSelection::common_output_selection(
|
|
||||||
[
|
|
||||||
ContractOutputSelection::Abi,
|
|
||||||
ContractOutputSelection::Evm(EvmOutputSelection::ByteCode(
|
|
||||||
BytecodeOutputSelection::Object,
|
|
||||||
)),
|
|
||||||
]
|
|
||||||
.into_iter()
|
|
||||||
.map(|item| item.to_string()),
|
|
||||||
),
|
|
||||||
evm_version: evm_version.map(|version| version.to_string().parse().unwrap()),
|
|
||||||
via_ir,
|
|
||||||
libraries: Libraries {
|
|
||||||
libs: libraries
|
|
||||||
.into_iter()
|
|
||||||
.map(|(file_path, libraries)| {
|
|
||||||
(
|
|
||||||
file_path,
|
|
||||||
libraries
|
|
||||||
.into_iter()
|
|
||||||
.map(|(library_name, library_address)| {
|
|
||||||
(library_name, library_address.to_string())
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
},
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut command = AsyncCommand::new(&self.solc_path);
|
|
||||||
command
|
|
||||||
.stdin(Stdio::piped())
|
.stdin(Stdio::piped())
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.stderr(Stdio::piped())
|
.stderr(Stdio::piped())
|
||||||
.arg("--standard-json");
|
.arg("--standard-json")
|
||||||
|
.spawn()?;
|
||||||
if let Some(ref base_path) = base_path {
|
|
||||||
command.arg("--base-path").arg(base_path);
|
|
||||||
}
|
|
||||||
if !allow_paths.is_empty() {
|
|
||||||
command.arg("--allow-paths").arg(
|
|
||||||
allow_paths
|
|
||||||
.iter()
|
|
||||||
.map(|path| path.display().to_string())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(","),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let mut child = command.spawn()?;
|
|
||||||
|
|
||||||
let stdin = child.stdin.as_mut().expect("should be piped");
|
let stdin = child.stdin.as_mut().expect("should be piped");
|
||||||
let serialized_input = serde_json::to_vec(&input)?;
|
serde_json::to_writer(stdin, &input.input)?;
|
||||||
stdin.write_all(&serialized_input).await?;
|
let output = child.wait_with_output()?;
|
||||||
let output = child.wait_with_output().await?;
|
|
||||||
|
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
let json_in = serde_json::to_string_pretty(&input)?;
|
|
||||||
let message = String::from_utf8_lossy(&output.stderr);
|
let message = String::from_utf8_lossy(&output.stderr);
|
||||||
tracing::error!(
|
tracing::error!("solc failed exit={} stderr={}", output.status, &message);
|
||||||
status = %output.status,
|
return Ok(CompilerOutput {
|
||||||
message = %message,
|
input,
|
||||||
json_input = json_in,
|
output: Default::default(),
|
||||||
"Compilation using solc failed"
|
error: Some(message.into()),
|
||||||
);
|
});
|
||||||
anyhow::bail!("Compilation failed with an error: {message}");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let parsed = serde_json::from_slice::<SolcOutput>(&output.stdout).map_err(|e| {
|
Ok(CompilerOutput {
|
||||||
anyhow::anyhow!(
|
input,
|
||||||
"failed to parse resolc JSON output: {e}\nstderr: {}",
|
output: serde_json::from_slice(&output.stdout)?,
|
||||||
String::from_utf8_lossy(&output.stdout)
|
error: None,
|
||||||
)
|
})
|
||||||
})?;
|
|
||||||
|
|
||||||
// Detecting if the compiler output contained errors and reporting them through logs and
|
|
||||||
// errors instead of returning the compiler output that might contain errors.
|
|
||||||
for error in parsed.errors.iter() {
|
|
||||||
if error.severity == Severity::Error {
|
|
||||||
tracing::error!(?error, ?input, "Encountered an error in the compilation");
|
|
||||||
anyhow::bail!("Encountered an error in the compilation: {error}")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tracing::debug!(
|
|
||||||
output = %String::from_utf8_lossy(&output.stdout).to_string(),
|
|
||||||
"Compiled successfully"
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut compiler_output = CompilerOutput::default();
|
|
||||||
for (contract_path, contracts) in parsed.contracts {
|
|
||||||
let map = compiler_output
|
|
||||||
.contracts
|
|
||||||
.entry(contract_path.canonicalize()?)
|
|
||||||
.or_default();
|
|
||||||
for (contract_name, contract_info) in contracts.into_iter() {
|
|
||||||
let source_code = contract_info
|
|
||||||
.evm
|
|
||||||
.and_then(|evm| evm.bytecode)
|
|
||||||
.map(|bytecode| match bytecode.object {
|
|
||||||
BytecodeObject::Bytecode(bytecode) => bytecode.to_string(),
|
|
||||||
BytecodeObject::Unlinked(unlinked) => unlinked,
|
|
||||||
})
|
|
||||||
.context("Unexpected - contract compiled with solc has no source code")?;
|
|
||||||
let abi = contract_info
|
|
||||||
.abi
|
|
||||||
.context("Unexpected - contract compiled with solc as no ABI")?;
|
|
||||||
map.insert(contract_name, (source_code, abi));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(compiler_output)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new(solc_path: PathBuf) -> Self {
|
fn new(solc_path: PathBuf) -> Self {
|
||||||
Self { solc_path }
|
Self { solc_path }
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_compiler_executable(
|
fn get_compiler_executable(
|
||||||
config: &Arguments,
|
config: &Arguments,
|
||||||
version: impl Into<VersionOrRequirement>,
|
version: semver::Version,
|
||||||
) -> anyhow::Result<PathBuf> {
|
) -> anyhow::Result<PathBuf> {
|
||||||
let path = download_solc(config.directory(), version, config.wasm).await?;
|
let path = download_solc(config.directory(), version, config.wasm)?;
|
||||||
Ok(path)
|
Ok(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn version(&self) -> anyhow::Result<semver::Version> {
|
|
||||||
// The following is the parsing code for the version from the solc version strings which
|
|
||||||
// look like the following:
|
|
||||||
// ```
|
|
||||||
// solc, the solidity compiler commandline interface
|
|
||||||
// Version: 0.8.30+commit.73712a01.Darwin.appleclang
|
|
||||||
// ```
|
|
||||||
|
|
||||||
let child = Command::new(self.solc_path.as_path())
|
|
||||||
.arg("--version")
|
|
||||||
.stdout(Stdio::piped())
|
|
||||||
.spawn()?;
|
|
||||||
let output = child.wait_with_output()?;
|
|
||||||
let output = String::from_utf8_lossy(&output.stdout);
|
|
||||||
let version_line = output
|
|
||||||
.split("Version: ")
|
|
||||||
.nth(1)
|
|
||||||
.context("Version parsing failed")?;
|
|
||||||
let version_string = version_line
|
|
||||||
.split("+")
|
|
||||||
.next()
|
|
||||||
.context("Version parsing failed")?;
|
|
||||||
|
|
||||||
Version::parse(version_string).map_err(Into::into)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn compiler_version_can_be_obtained() {
|
|
||||||
// Arrange
|
|
||||||
let args = Arguments::default();
|
|
||||||
println!("Getting compiler path");
|
|
||||||
let path = Solc::get_compiler_executable(&args, Version::new(0, 7, 6))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
println!("Got compiler path");
|
|
||||||
let compiler = Solc::new(path);
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let version = compiler.version();
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
assert_eq!(
|
|
||||||
version.expect("Failed to get version"),
|
|
||||||
Version::new(0, 7, 6)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +0,0 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
|
||||||
|
|
||||||
pragma solidity >=0.6.9;
|
|
||||||
|
|
||||||
contract Callable {
|
|
||||||
function f(uint[1] memory p1) public pure returns(uint) {
|
|
||||||
return p1[0];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
// SPDX-License-Identifier: MIT
|
|
||||||
|
|
||||||
// Report https://linear.app/matterlabs/issue/CPR-269/call-with-calldata-variable-bug
|
|
||||||
|
|
||||||
pragma solidity >=0.6.9;
|
|
||||||
|
|
||||||
import "./callable.sol";
|
|
||||||
|
|
||||||
contract Main {
|
|
||||||
function main(uint[1] calldata p1, Callable callable) public returns(uint) {
|
|
||||||
return callable.f(p1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
{ "cases": [ {
|
|
||||||
"name": "first",
|
|
||||||
"inputs": [
|
|
||||||
{
|
|
||||||
"instance": "Main",
|
|
||||||
"method": "main",
|
|
||||||
"calldata": [
|
|
||||||
"1",
|
|
||||||
"Callable.address"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"expected": [
|
|
||||||
"1"
|
|
||||||
]
|
|
||||||
} ],
|
|
||||||
"contracts": {
|
|
||||||
"Main": "main.sol:Main",
|
|
||||||
"Callable": "callable.sol:Callable"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,88 +0,0 @@
|
|||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use revive_dt_compiler::{Compiler, SolidityCompiler, revive_resolc::Resolc, solc::Solc};
|
|
||||||
use revive_dt_config::Arguments;
|
|
||||||
use semver::Version;
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn contracts_can_be_compiled_with_solc() {
|
|
||||||
// Arrange
|
|
||||||
let args = Arguments::default();
|
|
||||||
let compiler_path = Solc::get_compiler_executable(&args, Version::new(0, 8, 30))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
println!("About to assert");
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let output = Compiler::<Solc>::new()
|
|
||||||
.with_source("./tests/assets/array_one_element/callable.sol")
|
|
||||||
.unwrap()
|
|
||||||
.with_source("./tests/assets/array_one_element/main.sol")
|
|
||||||
.unwrap()
|
|
||||||
.try_build(compiler_path)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
let output = output.expect("Failed to compile");
|
|
||||||
assert_eq!(output.contracts.len(), 2);
|
|
||||||
|
|
||||||
let main_file_contracts = output
|
|
||||||
.contracts
|
|
||||||
.get(
|
|
||||||
&PathBuf::from("./tests/assets/array_one_element/main.sol")
|
|
||||||
.canonicalize()
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
let callable_file_contracts = output
|
|
||||||
.contracts
|
|
||||||
.get(
|
|
||||||
&PathBuf::from("./tests/assets/array_one_element/callable.sol")
|
|
||||||
.canonicalize()
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
assert!(main_file_contracts.contains_key("Main"));
|
|
||||||
assert!(callable_file_contracts.contains_key("Callable"));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn contracts_can_be_compiled_with_resolc() {
|
|
||||||
// Arrange
|
|
||||||
let args = Arguments::default();
|
|
||||||
let compiler_path = Resolc::get_compiler_executable(&args, Version::new(0, 8, 30))
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let output = Compiler::<Resolc>::new()
|
|
||||||
.with_source("./tests/assets/array_one_element/callable.sol")
|
|
||||||
.unwrap()
|
|
||||||
.with_source("./tests/assets/array_one_element/main.sol")
|
|
||||||
.unwrap()
|
|
||||||
.try_build(compiler_path)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
let output = output.expect("Failed to compile");
|
|
||||||
assert_eq!(output.contracts.len(), 2);
|
|
||||||
|
|
||||||
let main_file_contracts = output
|
|
||||||
.contracts
|
|
||||||
.get(
|
|
||||||
&PathBuf::from("./tests/assets/array_one_element/main.sol")
|
|
||||||
.canonicalize()
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
let callable_file_contracts = output
|
|
||||||
.contracts
|
|
||||||
.get(
|
|
||||||
&PathBuf::from("./tests/assets/array_one_element/callable.sol")
|
|
||||||
.canonicalize()
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
assert!(main_file_contracts.contains_key("Main"));
|
|
||||||
assert!(callable_file_contracts.contains_key("Callable"));
|
|
||||||
}
|
|
||||||
@@ -3,7 +3,6 @@
|
|||||||
use std::{
|
use std::{
|
||||||
fmt::Display,
|
fmt::Display,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::LazyLock,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use alloy::{network::EthereumWallet, signers::local::PrivateKeySigner};
|
use alloy::{network::EthereumWallet, signers::local::PrivateKeySigner};
|
||||||
@@ -55,7 +54,7 @@ pub struct Arguments {
|
|||||||
pub geth: PathBuf,
|
pub geth: PathBuf,
|
||||||
|
|
||||||
/// The maximum time in milliseconds to wait for geth to start.
|
/// The maximum time in milliseconds to wait for geth to start.
|
||||||
#[arg(long = "geth-start-timeout", default_value = "5000")]
|
#[arg(long = "geth-start-timeout", default_value = "2000")]
|
||||||
pub geth_start_timeout: u64,
|
pub geth_start_timeout: u64,
|
||||||
|
|
||||||
/// The test network chain ID.
|
/// The test network chain ID.
|
||||||
@@ -74,12 +73,6 @@ pub struct Arguments {
|
|||||||
)]
|
)]
|
||||||
pub account: String,
|
pub account: String,
|
||||||
|
|
||||||
/// This argument controls which private keys the nodes should have access to and be added to
|
|
||||||
/// its wallet signers. With a value of N, private keys (0, N] will be added to the signer set
|
|
||||||
/// of the node.
|
|
||||||
#[arg(long = "private-keys-count", default_value_t = 15_000)]
|
|
||||||
pub private_keys_to_add: usize,
|
|
||||||
|
|
||||||
/// The differential testing leader node implementation.
|
/// The differential testing leader node implementation.
|
||||||
#[arg(short, long = "leader", default_value = "geth")]
|
#[arg(short, long = "leader", default_value = "geth")]
|
||||||
pub leader: TestingPlatform,
|
pub leader: TestingPlatform,
|
||||||
@@ -92,13 +85,9 @@ pub struct Arguments {
|
|||||||
#[arg(long = "compile-only")]
|
#[arg(long = "compile-only")]
|
||||||
pub compile_only: Option<TestingPlatform>,
|
pub compile_only: Option<TestingPlatform>,
|
||||||
|
|
||||||
/// Determines the amount of nodes that will be spawned for each chain.
|
/// Determines the amount of tests that are executed in parallel.
|
||||||
#[arg(long, default_value = "1")]
|
#[arg(long = "workers", default_value = "12")]
|
||||||
pub number_of_nodes: usize,
|
pub workers: usize,
|
||||||
|
|
||||||
/// Determines the amount of threads that will will be used.
|
|
||||||
#[arg(long, default_value = "12")]
|
|
||||||
pub number_of_threads: usize,
|
|
||||||
|
|
||||||
/// Extract problems back to the test corpus.
|
/// Extract problems back to the test corpus.
|
||||||
#[arg(short, long = "extract-problems")]
|
#[arg(short, long = "extract-problems")]
|
||||||
@@ -149,23 +138,14 @@ impl Arguments {
|
|||||||
|
|
||||||
impl Default for Arguments {
|
impl Default for Arguments {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
static TEMP_DIR: LazyLock<TempDir> = LazyLock::new(|| TempDir::new().unwrap());
|
Arguments::parse_from(["retester"])
|
||||||
|
|
||||||
let default = Arguments::parse_from(["retester"]);
|
|
||||||
|
|
||||||
Arguments {
|
|
||||||
temp_dir: Some(&TEMP_DIR),
|
|
||||||
..default
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The Solidity compatible node implementation.
|
/// The Solidity compatible node implementation.
|
||||||
///
|
///
|
||||||
/// This describes the solutions to be tested against on a high level.
|
/// This describes the solutions to be tested against on a high level.
|
||||||
#[derive(
|
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, ValueEnum, Serialize, Deserialize)]
|
||||||
Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, ValueEnum, Serialize, Deserialize,
|
|
||||||
)]
|
|
||||||
#[clap(rename_all = "lower")]
|
#[clap(rename_all = "lower")]
|
||||||
pub enum TestingPlatform {
|
pub enum TestingPlatform {
|
||||||
/// The go-ethereum reference full node EVM implementation.
|
/// The go-ethereum reference full node EVM implementation.
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ name = "retester"
|
|||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
revive-dt-common = { workspace = true }
|
|
||||||
revive-dt-compiler = { workspace = true }
|
revive-dt-compiler = { workspace = true }
|
||||||
revive-dt-config = { workspace = true }
|
revive-dt-config = { workspace = true }
|
||||||
revive-dt-format = { workspace = true }
|
revive-dt-format = { workspace = true }
|
||||||
@@ -24,10 +23,9 @@ revive-dt-report = { workspace = true }
|
|||||||
alloy = { workspace = true }
|
alloy = { workspace = true }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
clap = { workspace = true }
|
clap = { workspace = true }
|
||||||
futures = { workspace = true }
|
|
||||||
indexmap = { workspace = true }
|
|
||||||
tokio = { workspace = true }
|
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
tracing-subscriber = { workspace = true }
|
tracing-subscriber = { workspace = true }
|
||||||
semver = { workspace = true }
|
rayon = { workspace = true }
|
||||||
|
revive-solc-json-interface = { workspace = true }
|
||||||
|
serde_json = { workspace = true }
|
||||||
temp-dir = { workspace = true }
|
temp-dir = { workspace = true }
|
||||||
|
|||||||
+379
-583
File diff suppressed because it is too large
Load Diff
@@ -1,12 +1,11 @@
|
|||||||
//! The revive differential testing core library.
|
//! The revive differential testing core library.
|
||||||
//!
|
//!
|
||||||
//! This crate defines the testing configuration and
|
//! This crate defines the testing configuration and
|
||||||
//! provides a helper utility to execute tests.
|
//! provides a helper utilty to execute tests.
|
||||||
|
|
||||||
use revive_dt_compiler::{SolidityCompiler, revive_resolc, solc};
|
use revive_dt_compiler::{SolidityCompiler, revive_resolc, solc};
|
||||||
use revive_dt_config::TestingPlatform;
|
use revive_dt_config::TestingPlatform;
|
||||||
use revive_dt_format::traits::ResolverApi;
|
use revive_dt_node::{geth, kitchensink::KitchensinkNode};
|
||||||
use revive_dt_node::{Node, geth, kitchensink::KitchensinkNode};
|
|
||||||
use revive_dt_node_interaction::EthereumNode;
|
use revive_dt_node_interaction::EthereumNode;
|
||||||
|
|
||||||
pub mod driver;
|
pub mod driver;
|
||||||
@@ -15,7 +14,7 @@ pub mod driver;
|
|||||||
///
|
///
|
||||||
/// For this we need a blockchain node implementation and a compiler.
|
/// For this we need a blockchain node implementation and a compiler.
|
||||||
pub trait Platform {
|
pub trait Platform {
|
||||||
type Blockchain: EthereumNode + Node + ResolverApi;
|
type Blockchain: EthereumNode;
|
||||||
type Compiler: SolidityCompiler;
|
type Compiler: SolidityCompiler;
|
||||||
|
|
||||||
/// Returns the matching [TestingPlatform] of the [revive_dt_config::Arguments].
|
/// Returns the matching [TestingPlatform] of the [revive_dt_config::Arguments].
|
||||||
@@ -26,7 +25,7 @@ pub trait Platform {
|
|||||||
pub struct Geth;
|
pub struct Geth;
|
||||||
|
|
||||||
impl Platform for Geth {
|
impl Platform for Geth {
|
||||||
type Blockchain = geth::GethNode;
|
type Blockchain = geth::Instance;
|
||||||
type Compiler = solc::Solc;
|
type Compiler = solc::Solc;
|
||||||
|
|
||||||
fn config_id() -> TestingPlatform {
|
fn config_id() -> TestingPlatform {
|
||||||
|
|||||||
+69
-563
@@ -1,75 +1,37 @@
|
|||||||
use std::{
|
use std::{collections::HashMap, sync::LazyLock};
|
||||||
collections::HashMap,
|
|
||||||
path::Path,
|
|
||||||
sync::{Arc, LazyLock},
|
|
||||||
};
|
|
||||||
|
|
||||||
use alloy::{
|
|
||||||
json_abi::JsonAbi,
|
|
||||||
network::{Ethereum, TransactionBuilder},
|
|
||||||
primitives::Address,
|
|
||||||
rpc::types::TransactionRequest,
|
|
||||||
};
|
|
||||||
use anyhow::Context;
|
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use futures::StreamExt;
|
use rayon::{ThreadPoolBuilder, prelude::*};
|
||||||
use revive_dt_common::iterators::FilesWithExtensionIterator;
|
|
||||||
use revive_dt_node_interaction::EthereumNode;
|
|
||||||
use semver::Version;
|
|
||||||
use temp_dir::TempDir;
|
|
||||||
use tokio::sync::{Mutex, RwLock};
|
|
||||||
use tracing::{Instrument, Level};
|
|
||||||
use tracing_subscriber::{EnvFilter, FmtSubscriber};
|
|
||||||
|
|
||||||
use revive_dt_compiler::SolidityCompiler;
|
|
||||||
use revive_dt_compiler::{Compiler, CompilerOutput};
|
|
||||||
use revive_dt_config::*;
|
use revive_dt_config::*;
|
||||||
use revive_dt_core::{
|
use revive_dt_core::{
|
||||||
Geth, Kitchensink, Platform,
|
Geth, Kitchensink, Platform,
|
||||||
driver::{CaseDriver, CaseState},
|
driver::{Driver, State},
|
||||||
};
|
|
||||||
use revive_dt_format::{
|
|
||||||
case::{Case, CaseIdx},
|
|
||||||
corpus::Corpus,
|
|
||||||
input::Input,
|
|
||||||
metadata::{ContractInstance, ContractPathAndIdent, Metadata, MetadataFile},
|
|
||||||
mode::SolcMode,
|
|
||||||
};
|
};
|
||||||
|
use revive_dt_format::{corpus::Corpus, metadata::MetadataFile};
|
||||||
use revive_dt_node::pool::NodePool;
|
use revive_dt_node::pool::NodePool;
|
||||||
use revive_dt_report::reporter::{Report, Span};
|
use revive_dt_report::reporter::{Report, Span};
|
||||||
|
use temp_dir::TempDir;
|
||||||
|
use tracing::Level;
|
||||||
|
use tracing_subscriber::{EnvFilter, FmtSubscriber, fmt::format::FmtSpan};
|
||||||
|
|
||||||
static TEMP_DIR: LazyLock<TempDir> = LazyLock::new(|| TempDir::new().unwrap());
|
static TEMP_DIR: LazyLock<TempDir> = LazyLock::new(|| TempDir::new().unwrap());
|
||||||
|
|
||||||
type CompilationCache<'a> = Arc<
|
|
||||||
RwLock<
|
|
||||||
HashMap<
|
|
||||||
(&'a Path, SolcMode, TestingPlatform),
|
|
||||||
Arc<Mutex<Option<Arc<(Version, CompilerOutput)>>>>,
|
|
||||||
>,
|
|
||||||
>,
|
|
||||||
>;
|
|
||||||
|
|
||||||
fn main() -> anyhow::Result<()> {
|
fn main() -> anyhow::Result<()> {
|
||||||
let args = init_cli()?;
|
let args = init_cli()?;
|
||||||
|
|
||||||
let body = async {
|
for (corpus, tests) in collect_corpora(&args)? {
|
||||||
for (corpus, tests) in collect_corpora(&args)? {
|
let span = Span::new(corpus, args.clone())?;
|
||||||
let span = Span::new(corpus, args.clone())?;
|
|
||||||
match &args.compile_only {
|
|
||||||
Some(platform) => compile_corpus(&args, &tests, platform, span).await,
|
|
||||||
None => execute_corpus(&args, &tests, span).await?,
|
|
||||||
}
|
|
||||||
Report::save()?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
};
|
|
||||||
|
|
||||||
tokio::runtime::Builder::new_multi_thread()
|
match &args.compile_only {
|
||||||
.worker_threads(args.number_of_threads)
|
Some(platform) => compile_corpus(&args, &tests, platform, span),
|
||||||
.enable_all()
|
None => execute_corpus(&args, &tests, span)?,
|
||||||
.build()
|
}
|
||||||
.expect("Failed building the Runtime")
|
|
||||||
.block_on(body)
|
Report::save()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn init_cli() -> anyhow::Result<Arguments> {
|
fn init_cli() -> anyhow::Result<Arguments> {
|
||||||
@@ -77,7 +39,7 @@ fn init_cli() -> anyhow::Result<Arguments> {
|
|||||||
.with_thread_ids(true)
|
.with_thread_ids(true)
|
||||||
.with_thread_names(true)
|
.with_thread_names(true)
|
||||||
.with_env_filter(EnvFilter::from_default_env())
|
.with_env_filter(EnvFilter::from_default_env())
|
||||||
.with_ansi(false)
|
.with_span_events(FmtSpan::ENTER | FmtSpan::CLOSE)
|
||||||
.pretty()
|
.pretty()
|
||||||
.finish();
|
.finish();
|
||||||
tracing::subscriber::set_global_default(subscriber)?;
|
tracing::subscriber::set_global_default(subscriber)?;
|
||||||
@@ -100,6 +62,10 @@ fn init_cli() -> anyhow::Result<Arguments> {
|
|||||||
}
|
}
|
||||||
tracing::info!("workdir: {}", args.directory().display());
|
tracing::info!("workdir: {}", args.directory().display());
|
||||||
|
|
||||||
|
ThreadPoolBuilder::new()
|
||||||
|
.num_threads(args.workers)
|
||||||
|
.build_global()?;
|
||||||
|
|
||||||
Ok(args)
|
Ok(args)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -117,11 +83,7 @@ fn collect_corpora(args: &Arguments) -> anyhow::Result<HashMap<Corpus, Vec<Metad
|
|||||||
Ok(corpora)
|
Ok(corpora)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn run_driver<L, F>(
|
fn run_driver<L, F>(args: &Arguments, tests: &[MetadataFile], span: Span) -> anyhow::Result<()>
|
||||||
args: &Arguments,
|
|
||||||
tests: &[MetadataFile],
|
|
||||||
span: Span,
|
|
||||||
) -> anyhow::Result<()>
|
|
||||||
where
|
where
|
||||||
L: Platform,
|
L: Platform,
|
||||||
F: Platform,
|
F: Platform,
|
||||||
@@ -131,495 +93,56 @@ where
|
|||||||
let leader_nodes = NodePool::<L::Blockchain>::new(args)?;
|
let leader_nodes = NodePool::<L::Blockchain>::new(args)?;
|
||||||
let follower_nodes = NodePool::<F::Blockchain>::new(args)?;
|
let follower_nodes = NodePool::<F::Blockchain>::new(args)?;
|
||||||
|
|
||||||
let test_cases = tests
|
tests.par_iter().for_each(
|
||||||
.iter()
|
|MetadataFile {
|
||||||
.flat_map(
|
content: metadata,
|
||||||
|MetadataFile {
|
path: metadata_file_path,
|
||||||
path,
|
}| {
|
||||||
content: metadata,
|
// Starting a new tracing span for this metadata file. This allows our logs to be clear
|
||||||
}| {
|
// about which metadata file the logs belong to. We can add other information into this
|
||||||
metadata
|
// as well to be able to associate the logs with the correct metadata file and case
|
||||||
.cases
|
// that's being executed.
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
.flat_map(move |(case_idx, case)| {
|
|
||||||
metadata
|
|
||||||
.solc_modes()
|
|
||||||
.into_iter()
|
|
||||||
.map(move |solc_mode| (path, metadata, case_idx, case, solc_mode))
|
|
||||||
})
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let metadata_case_status = Arc::new(RwLock::new(test_cases.iter().fold(
|
|
||||||
HashMap::<_, HashMap<_, _>>::new(),
|
|
||||||
|mut map, (path, _, case_idx, case, solc_mode)| {
|
|
||||||
map.entry((path.to_path_buf(), solc_mode.clone()))
|
|
||||||
.or_default()
|
|
||||||
.insert((CaseIdx::new(*case_idx), case.name.clone()), None::<bool>);
|
|
||||||
map
|
|
||||||
},
|
|
||||||
)));
|
|
||||||
let status_reporter_task = {
|
|
||||||
let metadata_case_status = metadata_case_status.clone();
|
|
||||||
async move {
|
|
||||||
const GREEN: &str = "\x1B[32m";
|
|
||||||
const RED: &str = "\x1B[31m";
|
|
||||||
const RESET: &str = "\x1B[0m";
|
|
||||||
|
|
||||||
let mut entries_to_delete = Vec::new();
|
|
||||||
loop {
|
|
||||||
let metadata_case_status_read = metadata_case_status.read().await;
|
|
||||||
if metadata_case_status_read.is_empty() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
for ((metadata_file_path, solc_mode), case_status) in
|
|
||||||
metadata_case_status_read.iter()
|
|
||||||
{
|
|
||||||
if case_status.values().any(|value| value.is_none()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let contains_failures = case_status
|
|
||||||
.values()
|
|
||||||
.any(|value| value.is_some_and(|value| !value));
|
|
||||||
|
|
||||||
if !contains_failures {
|
|
||||||
eprintln!(
|
|
||||||
"{}Succeeded:{} {} - {:?}",
|
|
||||||
GREEN,
|
|
||||||
RESET,
|
|
||||||
metadata_file_path.display(),
|
|
||||||
solc_mode
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
eprintln!(
|
|
||||||
"{}Failed:{} {} - {:?}",
|
|
||||||
RED,
|
|
||||||
RESET,
|
|
||||||
metadata_file_path.display(),
|
|
||||||
solc_mode
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut case_status = case_status
|
|
||||||
.iter()
|
|
||||||
.map(|((case_idx, case_name), case_status)| {
|
|
||||||
(case_idx.into_inner(), case_name, case_status.unwrap())
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
case_status.sort_by(|a, b| a.0.cmp(&b.0));
|
|
||||||
for (_, case_name, case_status) in case_status.into_iter() {
|
|
||||||
if case_status {
|
|
||||||
eprintln!(
|
|
||||||
"{GREEN} Case Succeeded:{RESET} {}",
|
|
||||||
case_name
|
|
||||||
.as_ref()
|
|
||||||
.map(|string| string.as_str())
|
|
||||||
.unwrap_or("Unnamed case")
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
eprintln!(
|
|
||||||
"{RED} Case Failed:{RESET} {}",
|
|
||||||
case_name
|
|
||||||
.as_ref()
|
|
||||||
.map(|string| string.as_str())
|
|
||||||
.unwrap_or("Unnamed case")
|
|
||||||
)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
eprintln!();
|
|
||||||
|
|
||||||
entries_to_delete.push((metadata_file_path.clone(), solc_mode.clone()));
|
|
||||||
}
|
|
||||||
|
|
||||||
drop(metadata_case_status_read);
|
|
||||||
let mut metadata_case_status_write = metadata_case_status.write().await;
|
|
||||||
for entry in entries_to_delete.drain(..) {
|
|
||||||
metadata_case_status_write.remove(&entry);
|
|
||||||
}
|
|
||||||
|
|
||||||
tokio::time::sleep(std::time::Duration::from_secs(3)).await;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let compilation_cache = Arc::new(RwLock::new(HashMap::new()));
|
|
||||||
let driver_task = futures::stream::iter(test_cases).for_each_concurrent(
|
|
||||||
None,
|
|
||||||
|(metadata_file_path, metadata, case_idx, case, solc_mode)| {
|
|
||||||
let compilation_cache = compilation_cache.clone();
|
|
||||||
let leader_node = leader_nodes.round_robbin();
|
|
||||||
let follower_node = follower_nodes.round_robbin();
|
|
||||||
let tracing_span = tracing::span!(
|
let tracing_span = tracing::span!(
|
||||||
Level::INFO,
|
Level::INFO,
|
||||||
"Running driver",
|
"Running driver",
|
||||||
metadata_file_path = %metadata_file_path.display(),
|
metadata_file_path = metadata_file_path.display().to_string(),
|
||||||
case_idx = case_idx,
|
|
||||||
solc_mode = ?solc_mode,
|
|
||||||
);
|
);
|
||||||
let metadata_case_status = metadata_case_status.clone();
|
let _guard = tracing_span.enter();
|
||||||
async move {
|
|
||||||
let result = handle_case_driver::<L, F>(
|
let mut driver = Driver::<L, F>::new(
|
||||||
metadata_file_path.as_path(),
|
metadata,
|
||||||
metadata,
|
args,
|
||||||
case_idx.into(),
|
leader_nodes.round_robbin(),
|
||||||
case,
|
follower_nodes.round_robbin(),
|
||||||
solc_mode.clone(),
|
);
|
||||||
args,
|
|
||||||
compilation_cache.clone(),
|
match driver.execute(span) {
|
||||||
leader_node,
|
Ok(_) => {
|
||||||
follower_node,
|
tracing::info!(
|
||||||
span,
|
"metadata {} success",
|
||||||
)
|
metadata.directory().as_ref().unwrap().display()
|
||||||
.await;
|
);
|
||||||
let mut metadata_case_status = metadata_case_status.write().await;
|
}
|
||||||
match result {
|
Err(error) => {
|
||||||
Ok(inputs_executed) => {
|
tracing::warn!(
|
||||||
tracing::info!(inputs_executed, "Execution succeeded");
|
"metadata {} failure: {error:?}",
|
||||||
metadata_case_status
|
metadata.file_path.as_ref().unwrap().display()
|
||||||
.entry((metadata_file_path.clone(), solc_mode))
|
);
|
||||||
.or_default()
|
|
||||||
.insert((CaseIdx::new(case_idx), case.name.clone()), Some(true));
|
|
||||||
}
|
|
||||||
Err(error) => {
|
|
||||||
metadata_case_status
|
|
||||||
.entry((metadata_file_path.clone(), solc_mode))
|
|
||||||
.or_default()
|
|
||||||
.insert((CaseIdx::new(case_idx), case.name.clone()), Some(false));
|
|
||||||
tracing::info!(%error, "Execution failed")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
tracing::info!("Execution completed");
|
|
||||||
}
|
}
|
||||||
.instrument(tracing_span)
|
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
tokio::join!(status_reporter_task, driver_task);
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
fn execute_corpus(args: &Arguments, tests: &[MetadataFile], span: Span) -> anyhow::Result<()> {
|
||||||
async fn handle_case_driver<'a, L, F>(
|
|
||||||
metadata_file_path: &'a Path,
|
|
||||||
metadata: &'a Metadata,
|
|
||||||
case_idx: CaseIdx,
|
|
||||||
case: &Case,
|
|
||||||
mode: SolcMode,
|
|
||||||
config: &Arguments,
|
|
||||||
compilation_cache: CompilationCache<'a>,
|
|
||||||
leader_node: &L::Blockchain,
|
|
||||||
follower_node: &F::Blockchain,
|
|
||||||
_: Span,
|
|
||||||
) -> anyhow::Result<usize>
|
|
||||||
where
|
|
||||||
L: Platform,
|
|
||||||
F: Platform,
|
|
||||||
L::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
|
|
||||||
F::Blockchain: revive_dt_node::Node + Send + Sync + 'static,
|
|
||||||
{
|
|
||||||
let leader_pre_link_contracts = get_or_build_contracts::<L>(
|
|
||||||
metadata,
|
|
||||||
metadata_file_path,
|
|
||||||
mode.clone(),
|
|
||||||
config,
|
|
||||||
compilation_cache.clone(),
|
|
||||||
&HashMap::new(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
let follower_pre_link_contracts = get_or_build_contracts::<F>(
|
|
||||||
metadata,
|
|
||||||
metadata_file_path,
|
|
||||||
mode.clone(),
|
|
||||||
config,
|
|
||||||
compilation_cache.clone(),
|
|
||||||
&HashMap::new(),
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut leader_deployed_libraries = HashMap::new();
|
|
||||||
let mut follower_deployed_libraries = HashMap::new();
|
|
||||||
let mut contract_sources = metadata.contract_sources()?;
|
|
||||||
for library_instance in metadata
|
|
||||||
.libraries
|
|
||||||
.iter()
|
|
||||||
.flatten()
|
|
||||||
.flat_map(|(_, map)| map.values())
|
|
||||||
{
|
|
||||||
let ContractPathAndIdent {
|
|
||||||
contract_source_path: library_source_path,
|
|
||||||
contract_ident: library_ident,
|
|
||||||
} = contract_sources
|
|
||||||
.remove(library_instance)
|
|
||||||
.context("Failed to find the contract source")?;
|
|
||||||
|
|
||||||
let (leader_code, leader_abi) = leader_pre_link_contracts
|
|
||||||
.1
|
|
||||||
.contracts
|
|
||||||
.get(&library_source_path)
|
|
||||||
.and_then(|contracts| contracts.get(library_ident.as_str()))
|
|
||||||
.context("Declared library was not compiled")?;
|
|
||||||
let (follower_code, follower_abi) = follower_pre_link_contracts
|
|
||||||
.1
|
|
||||||
.contracts
|
|
||||||
.get(&library_source_path)
|
|
||||||
.and_then(|contracts| contracts.get(library_ident.as_str()))
|
|
||||||
.context("Declared library was not compiled")?;
|
|
||||||
|
|
||||||
let leader_code = match alloy::hex::decode(leader_code) {
|
|
||||||
Ok(code) => code,
|
|
||||||
Err(error) => {
|
|
||||||
tracing::error!(
|
|
||||||
?error,
|
|
||||||
contract_source_path = library_source_path.display().to_string(),
|
|
||||||
contract_ident = library_ident.as_ref(),
|
|
||||||
"Failed to hex-decode byte code - This could possibly mean that the bytecode requires linking"
|
|
||||||
);
|
|
||||||
anyhow::bail!("Failed to hex-decode the byte code {}", error)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let follower_code = match alloy::hex::decode(follower_code) {
|
|
||||||
Ok(code) => code,
|
|
||||||
Err(error) => {
|
|
||||||
tracing::error!(
|
|
||||||
?error,
|
|
||||||
contract_source_path = library_source_path.display().to_string(),
|
|
||||||
contract_ident = library_ident.as_ref(),
|
|
||||||
"Failed to hex-decode byte code - This could possibly mean that the bytecode requires linking"
|
|
||||||
);
|
|
||||||
anyhow::bail!("Failed to hex-decode the byte code {}", error)
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Getting the deployer address from the cases themselves. This is to ensure that we're
|
|
||||||
// doing the deployments from different accounts and therefore we're not slowed down by
|
|
||||||
// the nonce.
|
|
||||||
let deployer_address = case
|
|
||||||
.inputs
|
|
||||||
.iter()
|
|
||||||
.map(|input| input.caller)
|
|
||||||
.next()
|
|
||||||
.unwrap_or(Input::default_caller());
|
|
||||||
let leader_tx = TransactionBuilder::<Ethereum>::with_deploy_code(
|
|
||||||
TransactionRequest::default().from(deployer_address),
|
|
||||||
leader_code,
|
|
||||||
);
|
|
||||||
let follower_tx = TransactionBuilder::<Ethereum>::with_deploy_code(
|
|
||||||
TransactionRequest::default().from(deployer_address),
|
|
||||||
follower_code,
|
|
||||||
);
|
|
||||||
|
|
||||||
let leader_receipt = match leader_node.execute_transaction(leader_tx).await {
|
|
||||||
Ok(receipt) => receipt,
|
|
||||||
Err(error) => {
|
|
||||||
tracing::error!(
|
|
||||||
node = std::any::type_name::<L>(),
|
|
||||||
?error,
|
|
||||||
"Contract deployment transaction failed."
|
|
||||||
);
|
|
||||||
return Err(error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let follower_receipt = match follower_node.execute_transaction(follower_tx).await {
|
|
||||||
Ok(receipt) => receipt,
|
|
||||||
Err(error) => {
|
|
||||||
tracing::error!(
|
|
||||||
node = std::any::type_name::<F>(),
|
|
||||||
?error,
|
|
||||||
"Contract deployment transaction failed."
|
|
||||||
);
|
|
||||||
return Err(error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let Some(leader_library_address) = leader_receipt.contract_address else {
|
|
||||||
tracing::error!("Contract deployment transaction didn't return an address");
|
|
||||||
anyhow::bail!("Contract deployment didn't return an address");
|
|
||||||
};
|
|
||||||
let Some(follower_library_address) = follower_receipt.contract_address else {
|
|
||||||
tracing::error!("Contract deployment transaction didn't return an address");
|
|
||||||
anyhow::bail!("Contract deployment didn't return an address");
|
|
||||||
};
|
|
||||||
|
|
||||||
leader_deployed_libraries.insert(
|
|
||||||
library_instance.clone(),
|
|
||||||
(leader_library_address, leader_abi.clone()),
|
|
||||||
);
|
|
||||||
follower_deployed_libraries.insert(
|
|
||||||
library_instance.clone(),
|
|
||||||
(follower_library_address, follower_abi.clone()),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let metadata_file_contains_libraries = metadata
|
|
||||||
.libraries
|
|
||||||
.iter()
|
|
||||||
.flat_map(|map| map.iter())
|
|
||||||
.flat_map(|(_, value)| value.iter())
|
|
||||||
.next()
|
|
||||||
.is_some();
|
|
||||||
let compiled_contracts_require_linking = leader_pre_link_contracts
|
|
||||||
.1
|
|
||||||
.contracts
|
|
||||||
.values()
|
|
||||||
.chain(follower_pre_link_contracts.1.contracts.values())
|
|
||||||
.flat_map(|value| value.values())
|
|
||||||
.any(|(code, _)| !code.chars().all(|char| char.is_ascii_hexdigit()));
|
|
||||||
let (leader_compiled_contracts, follower_compiled_contracts) =
|
|
||||||
if metadata_file_contains_libraries && compiled_contracts_require_linking {
|
|
||||||
let leader_key = (metadata_file_path, mode.clone(), L::config_id());
|
|
||||||
let follower_key = (metadata_file_path, mode.clone(), L::config_id());
|
|
||||||
{
|
|
||||||
let mut cache = compilation_cache.write().await;
|
|
||||||
cache.remove(&leader_key);
|
|
||||||
cache.remove(&follower_key);
|
|
||||||
}
|
|
||||||
|
|
||||||
let leader_post_link_contracts = get_or_build_contracts::<L>(
|
|
||||||
metadata,
|
|
||||||
metadata_file_path,
|
|
||||||
mode.clone(),
|
|
||||||
config,
|
|
||||||
compilation_cache.clone(),
|
|
||||||
&leader_deployed_libraries,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
let follower_post_link_contracts = get_or_build_contracts::<F>(
|
|
||||||
metadata,
|
|
||||||
metadata_file_path,
|
|
||||||
mode.clone(),
|
|
||||||
config,
|
|
||||||
compilation_cache,
|
|
||||||
&follower_deployed_libraries,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
(leader_post_link_contracts, follower_post_link_contracts)
|
|
||||||
} else {
|
|
||||||
(leader_pre_link_contracts, follower_pre_link_contracts)
|
|
||||||
};
|
|
||||||
|
|
||||||
let leader_state = CaseState::<L>::new(
|
|
||||||
leader_compiled_contracts.0.clone(),
|
|
||||||
leader_compiled_contracts.1.contracts.clone(),
|
|
||||||
leader_deployed_libraries,
|
|
||||||
);
|
|
||||||
let follower_state = CaseState::<F>::new(
|
|
||||||
follower_compiled_contracts.0.clone(),
|
|
||||||
follower_compiled_contracts.1.contracts.clone(),
|
|
||||||
follower_deployed_libraries,
|
|
||||||
);
|
|
||||||
|
|
||||||
let mut driver = CaseDriver::<L, F>::new(
|
|
||||||
metadata,
|
|
||||||
case,
|
|
||||||
case_idx,
|
|
||||||
leader_node,
|
|
||||||
follower_node,
|
|
||||||
leader_state,
|
|
||||||
follower_state,
|
|
||||||
);
|
|
||||||
driver.execute().await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_or_build_contracts<'a, P: Platform>(
|
|
||||||
metadata: &'a Metadata,
|
|
||||||
metadata_file_path: &'a Path,
|
|
||||||
mode: SolcMode,
|
|
||||||
config: &Arguments,
|
|
||||||
compilation_cache: CompilationCache<'a>,
|
|
||||||
deployed_libraries: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
|
||||||
) -> anyhow::Result<Arc<(Version, CompilerOutput)>> {
|
|
||||||
let key = (metadata_file_path, mode.clone(), P::config_id());
|
|
||||||
if let Some(compilation_artifact) = compilation_cache.read().await.get(&key).cloned() {
|
|
||||||
let mut compilation_artifact = compilation_artifact.lock().await;
|
|
||||||
match *compilation_artifact {
|
|
||||||
Some(ref compiled_contracts) => {
|
|
||||||
tracing::debug!(?key, "Compiled contracts cache hit");
|
|
||||||
return Ok(compiled_contracts.clone());
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
tracing::debug!(?key, "Compiled contracts cache miss");
|
|
||||||
let compiled_contracts = Arc::new(
|
|
||||||
compile_contracts::<P>(metadata, &mode, config, deployed_libraries).await?,
|
|
||||||
);
|
|
||||||
*compilation_artifact = Some(compiled_contracts.clone());
|
|
||||||
return Ok(compiled_contracts.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
tracing::debug!(?key, "Compiled contracts cache miss");
|
|
||||||
let mutex = {
|
|
||||||
let mut compilation_cache = compilation_cache.write().await;
|
|
||||||
let mutex = Arc::new(Mutex::new(None));
|
|
||||||
compilation_cache.insert(key, mutex.clone());
|
|
||||||
mutex
|
|
||||||
};
|
|
||||||
let mut compilation_artifact = mutex.lock().await;
|
|
||||||
let compiled_contracts =
|
|
||||||
Arc::new(compile_contracts::<P>(metadata, &mode, config, deployed_libraries).await?);
|
|
||||||
*compilation_artifact = Some(compiled_contracts.clone());
|
|
||||||
Ok(compiled_contracts.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn compile_contracts<P: Platform>(
|
|
||||||
metadata: &Metadata,
|
|
||||||
mode: &SolcMode,
|
|
||||||
config: &Arguments,
|
|
||||||
deployed_libraries: &HashMap<ContractInstance, (Address, JsonAbi)>,
|
|
||||||
) -> anyhow::Result<(Version, CompilerOutput)> {
|
|
||||||
let compiler_version_or_requirement = mode.compiler_version_to_use(config.solc.clone());
|
|
||||||
let compiler_path =
|
|
||||||
P::Compiler::get_compiler_executable(config, compiler_version_or_requirement).await?;
|
|
||||||
let compiler_version = P::Compiler::new(compiler_path.clone()).version()?;
|
|
||||||
|
|
||||||
let compiler = Compiler::<P::Compiler>::new()
|
|
||||||
.with_allow_path(metadata.directory()?)
|
|
||||||
.with_optimization(mode.solc_optimize());
|
|
||||||
let mut compiler = metadata
|
|
||||||
.files_to_compile()?
|
|
||||||
.try_fold(compiler, |compiler, path| compiler.with_source(&path))?;
|
|
||||||
for (library_instance, (library_address, _)) in deployed_libraries.iter() {
|
|
||||||
let library_ident = &metadata
|
|
||||||
.contracts
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|contracts| contracts.get(library_instance))
|
|
||||||
.expect("Impossible for library to not be found in contracts")
|
|
||||||
.contract_ident;
|
|
||||||
|
|
||||||
// Note the following: we need to tell solc which files require the libraries to be
|
|
||||||
// linked into them. We do not have access to this information and therefore we choose
|
|
||||||
// an easier, yet more compute intensive route, of telling solc that all of the files
|
|
||||||
// need to link the library and it will only perform the linking for the files that do
|
|
||||||
// actually need the library.
|
|
||||||
compiler = FilesWithExtensionIterator::new(metadata.directory()?)
|
|
||||||
.with_allowed_extension("sol")
|
|
||||||
.fold(compiler, |compiler, path| {
|
|
||||||
compiler.with_library(&path, library_ident.as_str(), *library_address)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let compiler_output = compiler.try_build(compiler_path).await?;
|
|
||||||
|
|
||||||
Ok((compiler_version, compiler_output))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn execute_corpus(
|
|
||||||
args: &Arguments,
|
|
||||||
tests: &[MetadataFile],
|
|
||||||
span: Span,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
match (&args.leader, &args.follower) {
|
match (&args.leader, &args.follower) {
|
||||||
(TestingPlatform::Geth, TestingPlatform::Kitchensink) => {
|
(TestingPlatform::Geth, TestingPlatform::Kitchensink) => {
|
||||||
run_driver::<Geth, Kitchensink>(args, tests, span).await?
|
run_driver::<Geth, Kitchensink>(args, tests, span)?
|
||||||
}
|
}
|
||||||
(TestingPlatform::Geth, TestingPlatform::Geth) => {
|
(TestingPlatform::Geth, TestingPlatform::Geth) => {
|
||||||
run_driver::<Geth, Geth>(args, tests, span).await?
|
run_driver::<Geth, Geth>(args, tests, span)?
|
||||||
}
|
}
|
||||||
_ => unimplemented!(),
|
_ => unimplemented!(),
|
||||||
}
|
}
|
||||||
@@ -627,41 +150,24 @@ async fn execute_corpus(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn compile_corpus(
|
fn compile_corpus(
|
||||||
config: &Arguments,
|
config: &Arguments,
|
||||||
tests: &[MetadataFile],
|
tests: &[MetadataFile],
|
||||||
platform: &TestingPlatform,
|
platform: &TestingPlatform,
|
||||||
_: Span,
|
span: Span,
|
||||||
) {
|
) {
|
||||||
let tests = tests.iter().flat_map(|metadata| {
|
tests.par_iter().for_each(|metadata| {
|
||||||
metadata
|
for mode in &metadata.solc_modes() {
|
||||||
.solc_modes()
|
|
||||||
.into_iter()
|
|
||||||
.map(move |solc_mode| (metadata, solc_mode))
|
|
||||||
});
|
|
||||||
|
|
||||||
futures::stream::iter(tests)
|
|
||||||
.for_each_concurrent(None, |(metadata, mode)| async move {
|
|
||||||
match platform {
|
match platform {
|
||||||
TestingPlatform::Geth => {
|
TestingPlatform::Geth => {
|
||||||
let _ = compile_contracts::<Geth>(
|
let mut state = State::<Geth>::new(config, span);
|
||||||
&metadata.content,
|
let _ = state.build_contracts(mode, metadata);
|
||||||
&mode,
|
|
||||||
config,
|
|
||||||
&Default::default(),
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
}
|
}
|
||||||
TestingPlatform::Kitchensink => {
|
TestingPlatform::Kitchensink => {
|
||||||
let _ = compile_contracts::<Geth>(
|
let mut state = State::<Kitchensink>::new(config, span);
|
||||||
&metadata.content,
|
let _ = state.build_contracts(mode, metadata);
|
||||||
&mode,
|
|
||||||
config,
|
|
||||||
&Default::default(),
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
})
|
}
|
||||||
.await;
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,8 +9,6 @@ repository.workspace = true
|
|||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
revive-dt-common = { workspace = true }
|
|
||||||
|
|
||||||
alloy = { workspace = true }
|
alloy = { workspace = true }
|
||||||
alloy-primitives = { workspace = true }
|
alloy-primitives = { workspace = true }
|
||||||
alloy-sol-types = { workspace = true }
|
alloy-sol-types = { workspace = true }
|
||||||
@@ -19,6 +17,3 @@ tracing = { workspace = true }
|
|||||||
semver = { workspace = true }
|
semver = { workspace = true }
|
||||||
serde = { workspace = true, features = ["derive"] }
|
serde = { workspace = true, features = ["derive"] }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio = { workspace = true }
|
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
|
|
||||||
use revive_dt_common::macros::define_wrapper_type;
|
use crate::{input::Input, mode::Mode};
|
||||||
|
|
||||||
use crate::{
|
|
||||||
input::{Expected, Input},
|
|
||||||
mode::Mode,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Deserialize, Clone, Eq, PartialEq)]
|
#[derive(Debug, Default, Deserialize, Clone, Eq, PartialEq)]
|
||||||
pub struct Case {
|
pub struct Case {
|
||||||
@@ -14,37 +9,4 @@ pub struct Case {
|
|||||||
pub modes: Option<Vec<Mode>>,
|
pub modes: Option<Vec<Mode>>,
|
||||||
pub inputs: Vec<Input>,
|
pub inputs: Vec<Input>,
|
||||||
pub group: Option<String>,
|
pub group: Option<String>,
|
||||||
pub expected: Option<Expected>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Case {
|
|
||||||
pub fn inputs_iterator(&self) -> impl Iterator<Item = Input> {
|
|
||||||
let inputs_len = self.inputs.len();
|
|
||||||
self.inputs
|
|
||||||
.clone()
|
|
||||||
.into_iter()
|
|
||||||
.enumerate()
|
|
||||||
.map(move |(idx, mut input)| {
|
|
||||||
if idx + 1 == inputs_len {
|
|
||||||
if input.expected.is_none() {
|
|
||||||
input.expected = self.expected.clone();
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: What does it mean for us to have an `expected` field on the case itself
|
|
||||||
// but the final input also has an expected field that doesn't match the one on
|
|
||||||
// the case? What are we supposed to do with that final expected field on the
|
|
||||||
// case?
|
|
||||||
|
|
||||||
input
|
|
||||||
} else {
|
|
||||||
input
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
define_wrapper_type!(
|
|
||||||
/// A wrapper type for the index of test cases found in metadata file.
|
|
||||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
|
||||||
pub struct CaseIdx(usize);
|
|
||||||
);
|
|
||||||
|
|||||||
+22
-54
@@ -17,25 +17,7 @@ impl Corpus {
|
|||||||
/// Try to read and parse the corpus definition file at given `path`.
|
/// Try to read and parse the corpus definition file at given `path`.
|
||||||
pub fn try_from_path(path: &Path) -> anyhow::Result<Self> {
|
pub fn try_from_path(path: &Path) -> anyhow::Result<Self> {
|
||||||
let file = File::open(path)?;
|
let file = File::open(path)?;
|
||||||
let mut corpus: Corpus = serde_json::from_reader(file)?;
|
Ok(serde_json::from_reader(file)?)
|
||||||
|
|
||||||
// Ensure that the path mentioned in the corpus is relative to the corpus file.
|
|
||||||
// Canonicalizing also helps make the path in any errors unambiguous.
|
|
||||||
corpus.path = path
|
|
||||||
.parent()
|
|
||||||
.ok_or_else(|| {
|
|
||||||
anyhow::anyhow!("Corpus path '{}' does not point to a file", path.display())
|
|
||||||
})?
|
|
||||||
.canonicalize()
|
|
||||||
.map_err(|error| {
|
|
||||||
anyhow::anyhow!(
|
|
||||||
"Failed to canonicalize path to corpus '{}': {error}",
|
|
||||||
path.display()
|
|
||||||
)
|
|
||||||
})?
|
|
||||||
.join(corpus.path);
|
|
||||||
|
|
||||||
Ok(corpus)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Scan the corpus base directory and return all tests found.
|
/// Scan the corpus base directory and return all tests found.
|
||||||
@@ -53,47 +35,33 @@ impl Corpus {
|
|||||||
///
|
///
|
||||||
/// `path` is expected to be a directory.
|
/// `path` is expected to be a directory.
|
||||||
pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
|
pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) {
|
||||||
if path.is_dir() {
|
let dir_entry = match std::fs::read_dir(path) {
|
||||||
let dir_entry = match std::fs::read_dir(path) {
|
Ok(dir_entry) => dir_entry,
|
||||||
Ok(dir_entry) => dir_entry,
|
Err(error) => {
|
||||||
|
tracing::error!("failed to read dir '{}': {error}", path.display());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
for entry in dir_entry {
|
||||||
|
let entry = match entry {
|
||||||
|
Ok(entry) => entry,
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
tracing::error!("failed to read dir '{}': {error}", path.display());
|
tracing::error!("error reading dir entry: {error}");
|
||||||
return;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
for entry in dir_entry {
|
|
||||||
let entry = match entry {
|
|
||||||
Ok(entry) => entry,
|
|
||||||
Err(error) => {
|
|
||||||
tracing::error!("error reading dir entry: {error}");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let path = entry.path();
|
|
||||||
if path.is_dir() {
|
|
||||||
collect_metadata(&path, tests);
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if path.is_file() {
|
|
||||||
if let Some(metadata) = MetadataFile::try_from_file(&path) {
|
|
||||||
tests.push(metadata)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let Some(extension) = path.extension() else {
|
|
||||||
tracing::error!("Failed to get file extension");
|
|
||||||
return;
|
|
||||||
};
|
};
|
||||||
if extension.eq_ignore_ascii_case("sol") || extension.eq_ignore_ascii_case("json") {
|
|
||||||
if let Some(metadata) = MetadataFile::try_from_file(path) {
|
let path = entry.path();
|
||||||
|
if path.is_dir() {
|
||||||
|
collect_metadata(&path, tests);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if path.is_file() {
|
||||||
|
if let Some(metadata) = MetadataFile::try_from_file(&path) {
|
||||||
tests.push(metadata)
|
tests.push(metadata)
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
tracing::error!(?extension, "Unsupported file extension");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
+163
-984
File diff suppressed because it is too large
Load Diff
@@ -5,4 +5,3 @@ pub mod corpus;
|
|||||||
pub mod input;
|
pub mod input;
|
||||||
pub mod metadata;
|
pub mod metadata;
|
||||||
pub mod mode;
|
pub mod mode;
|
||||||
pub mod traits;
|
|
||||||
|
|||||||
+24
-210
@@ -1,15 +1,11 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::BTreeMap,
|
collections::BTreeMap,
|
||||||
fmt::Display,
|
|
||||||
fs::{File, read_to_string},
|
fs::{File, read_to_string},
|
||||||
ops::Deref,
|
ops::Deref,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
str::FromStr,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::Deserialize;
|
||||||
|
|
||||||
use revive_dt_common::{iterators::FilesWithExtensionIterator, macros::define_wrapper_type};
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
case::Case,
|
case::Case,
|
||||||
@@ -45,11 +41,9 @@ impl Deref for MetadataFile {
|
|||||||
|
|
||||||
#[derive(Debug, Default, Deserialize, Clone, Eq, PartialEq)]
|
#[derive(Debug, Default, Deserialize, Clone, Eq, PartialEq)]
|
||||||
pub struct Metadata {
|
pub struct Metadata {
|
||||||
pub targets: Option<Vec<String>>,
|
|
||||||
pub cases: Vec<Case>,
|
pub cases: Vec<Case>,
|
||||||
pub contracts: Option<BTreeMap<ContractInstance, ContractPathAndIdent>>,
|
pub contracts: Option<BTreeMap<String, String>>,
|
||||||
// TODO: Convert into wrapper types for clarity.
|
pub libraries: Option<BTreeMap<String, BTreeMap<String, String>>>,
|
||||||
pub libraries: Option<BTreeMap<PathBuf, BTreeMap<ContractIdent, ContractInstance>>>,
|
|
||||||
pub ignore: Option<bool>,
|
pub ignore: Option<bool>,
|
||||||
pub modes: Option<Vec<Mode>>,
|
pub modes: Option<Vec<Mode>>,
|
||||||
pub file_path: Option<PathBuf>,
|
pub file_path: Option<PathBuf>,
|
||||||
@@ -83,35 +77,28 @@ impl Metadata {
|
|||||||
.to_path_buf())
|
.to_path_buf())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the contract sources with canonicalized paths for the files
|
/// Extract the contract sources.
|
||||||
pub fn contract_sources(
|
///
|
||||||
&self,
|
/// Returns a mapping of contract IDs to their source path and contract name.
|
||||||
) -> anyhow::Result<BTreeMap<ContractInstance, ContractPathAndIdent>> {
|
pub fn contract_sources(&self) -> anyhow::Result<BTreeMap<String, (PathBuf, String)>> {
|
||||||
let directory = self.directory()?;
|
let directory = self.directory()?;
|
||||||
let mut sources = BTreeMap::new();
|
let mut sources = BTreeMap::new();
|
||||||
let Some(contracts) = &self.contracts else {
|
let Some(contracts) = &self.contracts else {
|
||||||
return Ok(sources);
|
return Ok(sources);
|
||||||
};
|
};
|
||||||
|
|
||||||
for (
|
for (id, contract) in contracts {
|
||||||
alias,
|
// TODO: broken if a colon is in the dir name..
|
||||||
ContractPathAndIdent {
|
let mut parts = contract.split(':');
|
||||||
contract_source_path,
|
let (Some(file_name), Some(contract_name)) = (parts.next(), parts.next()) else {
|
||||||
contract_ident,
|
anyhow::bail!("metadata contains invalid contract: {contract}");
|
||||||
},
|
};
|
||||||
) in contracts
|
let file = directory.to_path_buf().join(file_name);
|
||||||
{
|
if !file.is_file() {
|
||||||
let alias = alias.clone();
|
anyhow::bail!("contract {id} is not a file: {}", file.display());
|
||||||
let absolute_path = directory.join(contract_source_path).canonicalize()?;
|
}
|
||||||
let contract_ident = contract_ident.clone();
|
|
||||||
|
|
||||||
sources.insert(
|
sources.insert(id.clone(), (file, contract_name.to_string()));
|
||||||
alias,
|
|
||||||
ContractPathAndIdent {
|
|
||||||
contract_source_path: absolute_path,
|
|
||||||
contract_ident,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(sources)
|
Ok(sources)
|
||||||
@@ -191,16 +178,12 @@ impl Metadata {
|
|||||||
match serde_json::from_str::<Self>(&spec) {
|
match serde_json::from_str::<Self>(&spec) {
|
||||||
Ok(mut metadata) => {
|
Ok(mut metadata) => {
|
||||||
metadata.file_path = Some(path.to_path_buf());
|
metadata.file_path = Some(path.to_path_buf());
|
||||||
metadata.contracts = Some(
|
let name = path
|
||||||
[(
|
.file_name()
|
||||||
ContractInstance::new("Test"),
|
.expect("this should be the path to a Solidity file")
|
||||||
ContractPathAndIdent {
|
.to_str()
|
||||||
contract_source_path: path.to_path_buf(),
|
.expect("the file name should be valid UTF-8k");
|
||||||
contract_ident: ContractIdent::new("Test"),
|
metadata.contracts = Some([(String::from("Test"), format!("{name}:Test"))].into());
|
||||||
},
|
|
||||||
)]
|
|
||||||
.into(),
|
|
||||||
);
|
|
||||||
Some(metadata)
|
Some(metadata)
|
||||||
}
|
}
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
@@ -212,173 +195,4 @@ impl Metadata {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator over all of the solidity files that needs to be compiled for this
|
|
||||||
/// [`Metadata`] object
|
|
||||||
///
|
|
||||||
/// Note: if the metadata is contained within a solidity file then this is the only file that
|
|
||||||
/// we wish to compile since this is a self-contained test. Otherwise, if it's a JSON file
|
|
||||||
/// then we need to compile all of the contracts that are in the directory since imports are
|
|
||||||
/// allowed in there.
|
|
||||||
pub fn files_to_compile(&self) -> anyhow::Result<Box<dyn Iterator<Item = PathBuf>>> {
|
|
||||||
let Some(ref metadata_file_path) = self.file_path else {
|
|
||||||
anyhow::bail!("The metadata file path is not defined");
|
|
||||||
};
|
|
||||||
if metadata_file_path
|
|
||||||
.extension()
|
|
||||||
.is_some_and(|extension| extension.eq_ignore_ascii_case("sol"))
|
|
||||||
{
|
|
||||||
Ok(Box::new(std::iter::once(metadata_file_path.clone())))
|
|
||||||
} else {
|
|
||||||
Ok(Box::new(
|
|
||||||
FilesWithExtensionIterator::new(self.directory()?).with_allowed_extension("sol"),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
define_wrapper_type!(
|
|
||||||
/// Represents a contract instance found a metadata file.
|
|
||||||
///
|
|
||||||
/// Typically, this is used as the key to the "contracts" field of metadata files.
|
|
||||||
#[derive(
|
|
||||||
Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize,
|
|
||||||
)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct ContractInstance(String);
|
|
||||||
);
|
|
||||||
|
|
||||||
define_wrapper_type!(
|
|
||||||
/// Represents a contract identifier found a metadata file.
|
|
||||||
///
|
|
||||||
/// A contract identifier is the name of the contract in the source code.
|
|
||||||
#[derive(
|
|
||||||
Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize,
|
|
||||||
)]
|
|
||||||
#[serde(transparent)]
|
|
||||||
pub struct ContractIdent(String);
|
|
||||||
);
|
|
||||||
|
|
||||||
/// Represents an identifier used for contracts.
|
|
||||||
///
|
|
||||||
/// The type supports serialization from and into the following string format:
|
|
||||||
///
|
|
||||||
/// ```text
|
|
||||||
/// ${path}:${contract_ident}
|
|
||||||
/// ```
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
|
||||||
#[serde(try_from = "String", into = "String")]
|
|
||||||
pub struct ContractPathAndIdent {
|
|
||||||
/// The path of the contract source code relative to the directory containing the metadata file.
|
|
||||||
pub contract_source_path: PathBuf,
|
|
||||||
|
|
||||||
/// The identifier of the contract.
|
|
||||||
pub contract_ident: ContractIdent,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for ContractPathAndIdent {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}:{}",
|
|
||||||
self.contract_source_path.display(),
|
|
||||||
self.contract_ident.as_ref()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for ContractPathAndIdent {
|
|
||||||
type Err = anyhow::Error;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
let mut splitted_string = s.split(":").peekable();
|
|
||||||
let mut path = None::<String>;
|
|
||||||
let mut identifier = None::<String>;
|
|
||||||
loop {
|
|
||||||
let Some(next_item) = splitted_string.next() else {
|
|
||||||
break;
|
|
||||||
};
|
|
||||||
if splitted_string.peek().is_some() {
|
|
||||||
match path {
|
|
||||||
Some(ref mut path) => {
|
|
||||||
path.push(':');
|
|
||||||
path.push_str(next_item);
|
|
||||||
}
|
|
||||||
None => path = Some(next_item.to_owned()),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
identifier = Some(next_item.to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
match (path, identifier) {
|
|
||||||
(Some(path), Some(identifier)) => Ok(Self {
|
|
||||||
contract_source_path: PathBuf::from(path),
|
|
||||||
contract_ident: ContractIdent::new(identifier),
|
|
||||||
}),
|
|
||||||
(None, Some(path)) | (Some(path), None) => {
|
|
||||||
let Some(identifier) = path.split(".").next().map(ToOwned::to_owned) else {
|
|
||||||
anyhow::bail!("Failed to find identifier");
|
|
||||||
};
|
|
||||||
Ok(Self {
|
|
||||||
contract_source_path: PathBuf::from(path),
|
|
||||||
contract_ident: ContractIdent::new(identifier),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
(None, None) => anyhow::bail!("Failed to find the path and identifier"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<String> for ContractPathAndIdent {
|
|
||||||
type Error = anyhow::Error;
|
|
||||||
|
|
||||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
|
||||||
Self::from_str(&value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<ContractPathAndIdent> for String {
|
|
||||||
fn from(value: ContractPathAndIdent) -> Self {
|
|
||||||
value.to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn contract_identifier_respects_roundtrip_property() {
|
|
||||||
// Arrange
|
|
||||||
let string = "ERC20/ERC20.sol:ERC20";
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let identifier = ContractPathAndIdent::from_str(string);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
let identifier = identifier.expect("Failed to parse");
|
|
||||||
assert_eq!(
|
|
||||||
identifier.contract_source_path.display().to_string(),
|
|
||||||
"ERC20/ERC20.sol"
|
|
||||||
);
|
|
||||||
assert_eq!(identifier.contract_ident, "ERC20".to_owned().into());
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let reserialized = identifier.to_string();
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
assert_eq!(string, reserialized);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn complex_metadata_file_can_be_deserialized() {
|
|
||||||
// Arrange
|
|
||||||
const JSON: &str = include_str!("../../../assets/test_metadata.json");
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let metadata = serde_json::from_str::<Metadata>(JSON);
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
metadata.expect("Failed to deserialize metadata");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
use revive_dt_common::types::VersionOrRequirement;
|
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use serde::de::Deserializer;
|
use serde::de::Deserializer;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@@ -79,15 +78,6 @@ impl SolcMode {
|
|||||||
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolves the [`SolcMode`]'s solidity version requirement into a [`VersionOrRequirement`] if
|
|
||||||
/// the requirement is present on the object. Otherwise, the passed default version is used.
|
|
||||||
pub fn compiler_version_to_use(&self, default: Version) -> VersionOrRequirement {
|
|
||||||
match self.solc_version {
|
|
||||||
Some(ref requirement) => requirement.clone().into(),
|
|
||||||
None => default.into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for Mode {
|
impl<'de> Deserialize<'de> for Mode {
|
||||||
|
|||||||
@@ -1,33 +0,0 @@
|
|||||||
use alloy::eips::BlockNumberOrTag;
|
|
||||||
use alloy::primitives::{Address, BlockHash, BlockNumber, BlockTimestamp, ChainId, U256};
|
|
||||||
use anyhow::Result;
|
|
||||||
|
|
||||||
/// A trait of the interface are required to implement to be used by the resolution logic that this
|
|
||||||
/// crate implements to go from string calldata and into the bytes calldata.
|
|
||||||
pub trait ResolverApi {
|
|
||||||
/// Returns the ID of the chain that the node is on.
|
|
||||||
fn chain_id(&self) -> impl Future<Output = Result<ChainId>>;
|
|
||||||
|
|
||||||
// TODO: This is currently a u128 due to Kitchensink needing more than 64 bits for its gas limit
|
|
||||||
// when we implement the changes to the gas we need to adjust this to be a u64.
|
|
||||||
/// Returns the gas limit of the specified block.
|
|
||||||
fn block_gas_limit(&self, number: BlockNumberOrTag) -> impl Future<Output = Result<u128>>;
|
|
||||||
|
|
||||||
/// Returns the coinbase of the specified block.
|
|
||||||
fn block_coinbase(&self, number: BlockNumberOrTag) -> impl Future<Output = Result<Address>>;
|
|
||||||
|
|
||||||
/// Returns the difficulty of the specified block.
|
|
||||||
fn block_difficulty(&self, number: BlockNumberOrTag) -> impl Future<Output = Result<U256>>;
|
|
||||||
|
|
||||||
/// Returns the hash of the specified block.
|
|
||||||
fn block_hash(&self, number: BlockNumberOrTag) -> impl Future<Output = Result<BlockHash>>;
|
|
||||||
|
|
||||||
/// Returns the timestamp of the specified block,
|
|
||||||
fn block_timestamp(
|
|
||||||
&self,
|
|
||||||
number: BlockNumberOrTag,
|
|
||||||
) -> impl Future<Output = Result<BlockTimestamp>>;
|
|
||||||
|
|
||||||
/// Returns the number of the last block.
|
|
||||||
fn last_block_number(&self) -> impl Future<Output = Result<BlockNumber>>;
|
|
||||||
}
|
|
||||||
@@ -11,3 +11,6 @@ rust-version.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
alloy = { workspace = true }
|
alloy = { workspace = true }
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
|
tracing = { workspace = true }
|
||||||
|
once_cell = { workspace = true }
|
||||||
|
tokio = { workspace = true }
|
||||||
|
|||||||
@@ -1,8 +1,14 @@
|
|||||||
//! This crate implements all node interactions.
|
//! This crate implements all node interactions.
|
||||||
|
|
||||||
use alloy::rpc::types::trace::geth::{DiffMode, GethDebugTracingOptions, GethTrace};
|
use alloy::primitives::Address;
|
||||||
|
use alloy::rpc::types::trace::geth::{DiffMode, GethTrace};
|
||||||
use alloy::rpc::types::{TransactionReceipt, TransactionRequest};
|
use alloy::rpc::types::{TransactionReceipt, TransactionRequest};
|
||||||
use anyhow::Result;
|
use tokio_runtime::TO_TOKIO;
|
||||||
|
|
||||||
|
pub mod nonce;
|
||||||
|
mod tokio_runtime;
|
||||||
|
pub mod trace;
|
||||||
|
pub mod transaction;
|
||||||
|
|
||||||
/// An interface for all interactions with Ethereum compatible nodes.
|
/// An interface for all interactions with Ethereum compatible nodes.
|
||||||
pub trait EthereumNode {
|
pub trait EthereumNode {
|
||||||
@@ -10,15 +16,14 @@ pub trait EthereumNode {
|
|||||||
fn execute_transaction(
|
fn execute_transaction(
|
||||||
&self,
|
&self,
|
||||||
transaction: TransactionRequest,
|
transaction: TransactionRequest,
|
||||||
) -> impl Future<Output = Result<TransactionReceipt>>;
|
) -> anyhow::Result<TransactionReceipt>;
|
||||||
|
|
||||||
/// Trace the transaction in the [TransactionReceipt] and return a [GethTrace].
|
/// Trace the transaction in the [TransactionReceipt] and return a [GethTrace].
|
||||||
fn trace_transaction(
|
fn trace_transaction(&self, transaction: TransactionReceipt) -> anyhow::Result<GethTrace>;
|
||||||
&self,
|
|
||||||
receipt: &TransactionReceipt,
|
|
||||||
trace_options: GethDebugTracingOptions,
|
|
||||||
) -> impl Future<Output = Result<GethTrace>>;
|
|
||||||
|
|
||||||
/// Returns the state diff of the transaction hash in the [TransactionReceipt].
|
/// Returns the state diff of the transaction hash in the [TransactionReceipt].
|
||||||
fn state_diff(&self, receipt: &TransactionReceipt) -> impl Future<Output = Result<DiffMode>>;
|
fn state_diff(&self, transaction: TransactionReceipt) -> anyhow::Result<DiffMode>;
|
||||||
|
|
||||||
|
/// Returns the next available nonce for the given [Address].
|
||||||
|
fn fetch_add_nonce(&self, address: Address) -> anyhow::Result<u64>;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,55 @@
|
|||||||
|
use std::pin::Pin;
|
||||||
|
|
||||||
|
use alloy::{
|
||||||
|
primitives::Address,
|
||||||
|
providers::{Provider, ProviderBuilder},
|
||||||
|
};
|
||||||
|
use tokio::sync::oneshot;
|
||||||
|
|
||||||
|
use crate::{TO_TOKIO, tokio_runtime::AsyncNodeInteraction};
|
||||||
|
|
||||||
|
pub type Task = Pin<Box<dyn Future<Output = anyhow::Result<u64>> + Send>>;
|
||||||
|
|
||||||
|
pub(crate) struct Nonce {
|
||||||
|
sender: oneshot::Sender<anyhow::Result<u64>>,
|
||||||
|
task: Task,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsyncNodeInteraction for Nonce {
|
||||||
|
type Output = anyhow::Result<u64>;
|
||||||
|
|
||||||
|
fn split(
|
||||||
|
self,
|
||||||
|
) -> (
|
||||||
|
std::pin::Pin<Box<dyn Future<Output = Self::Output> + Send>>,
|
||||||
|
oneshot::Sender<Self::Output>,
|
||||||
|
) {
|
||||||
|
(self.task, self.sender)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This is like `trace_transaction`, just for nonces.
|
||||||
|
pub fn fetch_onchain_nonce(
|
||||||
|
connection: String,
|
||||||
|
wallet: alloy::network::EthereumWallet,
|
||||||
|
address: Address,
|
||||||
|
) -> anyhow::Result<u64> {
|
||||||
|
let sender = TO_TOKIO.lock().unwrap().nonce_sender.clone();
|
||||||
|
|
||||||
|
let (tx, rx) = oneshot::channel();
|
||||||
|
let task: Task = Box::pin(async move {
|
||||||
|
let provider = ProviderBuilder::new()
|
||||||
|
.wallet(wallet)
|
||||||
|
.connect(&connection)
|
||||||
|
.await?;
|
||||||
|
let onchain = provider.get_transaction_count(address).await?;
|
||||||
|
Ok(onchain)
|
||||||
|
});
|
||||||
|
|
||||||
|
sender
|
||||||
|
.blocking_send(Nonce { task, sender: tx })
|
||||||
|
.expect("not in async context");
|
||||||
|
|
||||||
|
rx.blocking_recv()
|
||||||
|
.unwrap_or_else(|err| anyhow::bail!("nonce fetch failed: {err}"))
|
||||||
|
}
|
||||||
@@ -0,0 +1,87 @@
|
|||||||
|
//! The alloy crate __requires__ a tokio runtime.
|
||||||
|
//! We contain any async rust right here.
|
||||||
|
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use std::pin::Pin;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
use std::thread;
|
||||||
|
use tokio::runtime::Runtime;
|
||||||
|
use tokio::spawn;
|
||||||
|
use tokio::sync::{mpsc, oneshot};
|
||||||
|
use tokio::task::JoinError;
|
||||||
|
|
||||||
|
use crate::nonce::Nonce;
|
||||||
|
use crate::trace::Trace;
|
||||||
|
use crate::transaction::Transaction;
|
||||||
|
|
||||||
|
pub(crate) static TO_TOKIO: Lazy<Mutex<TokioRuntime>> =
|
||||||
|
Lazy::new(|| Mutex::new(TokioRuntime::spawn()));
|
||||||
|
|
||||||
|
/// Common interface for executing async node interactions from a non-async context.
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
|
pub(crate) trait AsyncNodeInteraction: Send + 'static {
|
||||||
|
type Output: Send;
|
||||||
|
|
||||||
|
//// Returns the task and the output sender.
|
||||||
|
fn split(
|
||||||
|
self,
|
||||||
|
) -> (
|
||||||
|
Pin<Box<dyn Future<Output = Self::Output> + Send>>,
|
||||||
|
oneshot::Sender<Self::Output>,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) struct TokioRuntime {
|
||||||
|
pub(crate) transaction_sender: mpsc::Sender<Transaction>,
|
||||||
|
pub(crate) trace_sender: mpsc::Sender<Trace>,
|
||||||
|
pub(crate) nonce_sender: mpsc::Sender<Nonce>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokioRuntime {
|
||||||
|
fn spawn() -> Self {
|
||||||
|
let rt = Runtime::new().expect("should be able to create the tokio runtime");
|
||||||
|
let (transaction_sender, transaction_receiver) = mpsc::channel::<Transaction>(1024);
|
||||||
|
let (trace_sender, trace_receiver) = mpsc::channel::<Trace>(1024);
|
||||||
|
let (nonce_sender, nonce_receiver) = mpsc::channel::<Nonce>(1024);
|
||||||
|
|
||||||
|
thread::spawn(move || {
|
||||||
|
rt.block_on(async move {
|
||||||
|
let transaction_task = spawn(interaction::<Transaction>(transaction_receiver));
|
||||||
|
let trace_task = spawn(interaction::<Trace>(trace_receiver));
|
||||||
|
let nonce_task = spawn(interaction::<Nonce>(nonce_receiver));
|
||||||
|
|
||||||
|
if let Err(error) = transaction_task.await {
|
||||||
|
tracing::error!("tokio transaction task failed: {error}");
|
||||||
|
}
|
||||||
|
if let Err(error) = trace_task.await {
|
||||||
|
tracing::error!("tokio trace transaction task failed: {error}");
|
||||||
|
}
|
||||||
|
if let Err(error) = nonce_task.await {
|
||||||
|
tracing::error!("tokio nonce task failed: {error}");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
Self {
|
||||||
|
transaction_sender,
|
||||||
|
trace_sender,
|
||||||
|
nonce_sender,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn interaction<T>(mut receiver: mpsc::Receiver<T>) -> Result<(), JoinError>
|
||||||
|
where
|
||||||
|
T: AsyncNodeInteraction,
|
||||||
|
{
|
||||||
|
while let Some(task) = receiver.recv().await {
|
||||||
|
spawn(async move {
|
||||||
|
let (task, sender) = task.split();
|
||||||
|
sender
|
||||||
|
.send(task.await)
|
||||||
|
.unwrap_or_else(|_| panic!("failed to send task output"));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
//! Trace transactions in a sync context.
|
||||||
|
|
||||||
|
use std::pin::Pin;
|
||||||
|
|
||||||
|
use alloy::rpc::types::trace::geth::GethTrace;
|
||||||
|
use tokio::sync::oneshot;
|
||||||
|
|
||||||
|
use crate::TO_TOKIO;
|
||||||
|
use crate::tokio_runtime::AsyncNodeInteraction;
|
||||||
|
|
||||||
|
pub type Task = Pin<Box<dyn Future<Output = anyhow::Result<GethTrace>> + Send>>;
|
||||||
|
|
||||||
|
pub(crate) struct Trace {
|
||||||
|
sender: oneshot::Sender<anyhow::Result<GethTrace>>,
|
||||||
|
task: Task,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsyncNodeInteraction for Trace {
|
||||||
|
type Output = anyhow::Result<GethTrace>;
|
||||||
|
|
||||||
|
fn split(
|
||||||
|
self,
|
||||||
|
) -> (
|
||||||
|
std::pin::Pin<Box<dyn Future<Output = Self::Output> + Send>>,
|
||||||
|
oneshot::Sender<Self::Output>,
|
||||||
|
) {
|
||||||
|
(self.task, self.sender)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Execute some [Task] that return a [GethTrace] result.
|
||||||
|
pub fn trace_transaction(task: Task) -> anyhow::Result<GethTrace> {
|
||||||
|
let task_sender = TO_TOKIO.lock().unwrap().trace_sender.clone();
|
||||||
|
let (sender, receiver) = oneshot::channel();
|
||||||
|
|
||||||
|
task_sender
|
||||||
|
.blocking_send(Trace { task, sender })
|
||||||
|
.expect("we are not calling this from an async context");
|
||||||
|
|
||||||
|
receiver
|
||||||
|
.blocking_recv()
|
||||||
|
.unwrap_or_else(|error| anyhow::bail!("no trace received: {error}"))
|
||||||
|
}
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
//! Execute transactions in a sync context.
|
||||||
|
|
||||||
|
use std::pin::Pin;
|
||||||
|
|
||||||
|
use alloy::rpc::types::TransactionReceipt;
|
||||||
|
use tokio::sync::oneshot;
|
||||||
|
|
||||||
|
use crate::TO_TOKIO;
|
||||||
|
use crate::tokio_runtime::AsyncNodeInteraction;
|
||||||
|
|
||||||
|
pub type Task = Pin<Box<dyn Future<Output = anyhow::Result<TransactionReceipt>> + Send>>;
|
||||||
|
|
||||||
|
pub(crate) struct Transaction {
|
||||||
|
receipt_sender: oneshot::Sender<anyhow::Result<TransactionReceipt>>,
|
||||||
|
task: Task,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsyncNodeInteraction for Transaction {
|
||||||
|
type Output = anyhow::Result<TransactionReceipt>;
|
||||||
|
|
||||||
|
fn split(
|
||||||
|
self,
|
||||||
|
) -> (
|
||||||
|
Pin<Box<dyn Future<Output = Self::Output> + Send>>,
|
||||||
|
oneshot::Sender<Self::Output>,
|
||||||
|
) {
|
||||||
|
(self.task, self.receipt_sender)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Execute some [Task] that returns a [TransactionReceipt].
|
||||||
|
pub fn execute_transaction(task: Task) -> anyhow::Result<TransactionReceipt> {
|
||||||
|
let request_sender = TO_TOKIO.lock().unwrap().transaction_sender.clone();
|
||||||
|
let (receipt_sender, receipt_receiver) = oneshot::channel();
|
||||||
|
|
||||||
|
request_sender
|
||||||
|
.blocking_send(Transaction {
|
||||||
|
receipt_sender,
|
||||||
|
task,
|
||||||
|
})
|
||||||
|
.expect("we are not calling this from an async context");
|
||||||
|
|
||||||
|
receipt_receiver
|
||||||
|
.blocking_recv()
|
||||||
|
.unwrap_or_else(|error| anyhow::bail!("no receipt received: {error}"))
|
||||||
|
}
|
||||||
@@ -14,12 +14,9 @@ alloy = { workspace = true }
|
|||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
tokio = { workspace = true }
|
tokio = { workspace = true }
|
||||||
|
|
||||||
revive-dt-common = { workspace = true }
|
|
||||||
revive-dt-config = { workspace = true }
|
|
||||||
revive-dt-format = { workspace = true }
|
|
||||||
revive-dt-node-interaction = { workspace = true }
|
revive-dt-node-interaction = { workspace = true }
|
||||||
|
revive-dt-config = { workspace = true }
|
||||||
|
|
||||||
serde = { workspace = true }
|
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
|
||||||
sp-core = { workspace = true }
|
sp-core = { workspace = true }
|
||||||
@@ -27,4 +24,3 @@ sp-runtime = { workspace = true }
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
temp-dir = { workspace = true }
|
temp-dir = { workspace = true }
|
||||||
tokio = { workspace = true }
|
|
||||||
|
|||||||
@@ -1,78 +0,0 @@
|
|||||||
use alloy::{
|
|
||||||
network::{Network, TransactionBuilder},
|
|
||||||
providers::{
|
|
||||||
Provider, SendableTx,
|
|
||||||
fillers::{GasFiller, TxFiller},
|
|
||||||
},
|
|
||||||
transports::TransportResult,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct FallbackGasFiller {
|
|
||||||
inner: GasFiller,
|
|
||||||
default_gas_limit: u64,
|
|
||||||
default_max_fee_per_gas: u128,
|
|
||||||
default_priority_fee: u128,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FallbackGasFiller {
|
|
||||||
pub fn new(
|
|
||||||
default_gas_limit: u64,
|
|
||||||
default_max_fee_per_gas: u128,
|
|
||||||
default_priority_fee: u128,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
inner: GasFiller,
|
|
||||||
default_gas_limit,
|
|
||||||
default_max_fee_per_gas,
|
|
||||||
default_priority_fee,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<N> TxFiller<N> for FallbackGasFiller
|
|
||||||
where
|
|
||||||
N: Network,
|
|
||||||
{
|
|
||||||
type Fillable = Option<<GasFiller as TxFiller<N>>::Fillable>;
|
|
||||||
|
|
||||||
fn status(
|
|
||||||
&self,
|
|
||||||
tx: &<N as Network>::TransactionRequest,
|
|
||||||
) -> alloy::providers::fillers::FillerControlFlow {
|
|
||||||
<GasFiller as TxFiller<N>>::status(&self.inner, tx)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn fill_sync(&self, _: &mut alloy::providers::SendableTx<N>) {}
|
|
||||||
|
|
||||||
async fn prepare<P: Provider<N>>(
|
|
||||||
&self,
|
|
||||||
provider: &P,
|
|
||||||
tx: &<N as Network>::TransactionRequest,
|
|
||||||
) -> TransportResult<Self::Fillable> {
|
|
||||||
// Try to fetch GasFiller’s “fillable” (gas_price, base_fee, estimate_gas, …)
|
|
||||||
// If it errors (i.e. tx would revert under eth_estimateGas), swallow it.
|
|
||||||
match self.inner.prepare(provider, tx).await {
|
|
||||||
Ok(fill) => Ok(Some(fill)),
|
|
||||||
Err(_) => Ok(None),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn fill(
|
|
||||||
&self,
|
|
||||||
fillable: Self::Fillable,
|
|
||||||
mut tx: alloy::providers::SendableTx<N>,
|
|
||||||
) -> TransportResult<SendableTx<N>> {
|
|
||||||
if let Some(fill) = fillable {
|
|
||||||
// our inner GasFiller succeeded — use it
|
|
||||||
self.inner.fill(fill, tx).await
|
|
||||||
} else {
|
|
||||||
if let Some(builder) = tx.as_mut_builder() {
|
|
||||||
builder.set_gas_limit(self.default_gas_limit);
|
|
||||||
builder.set_max_fee_per_gas(self.default_max_fee_per_gas);
|
|
||||||
builder.set_max_priority_fee_per_gas(self.default_priority_fee);
|
|
||||||
}
|
|
||||||
Ok(tx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
/// This constant defines how much Wei accounts are pre-seeded with in genesis.
|
|
||||||
///
|
|
||||||
/// Note: After changing this number, check that the tests for kitchensink work as we encountered
|
|
||||||
/// some issues with different values of the initial balance on Kitchensink.
|
|
||||||
pub const INITIAL_BALANCE: u128 = 10u128.pow(37);
|
|
||||||
+133
-334
@@ -1,37 +1,35 @@
|
|||||||
//! The go-ethereum node implementation.
|
//! The go-ethereum node implementation.
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
fs::{File, OpenOptions, create_dir_all, remove_dir_all},
|
fs::{File, OpenOptions, create_dir_all, remove_dir_all},
|
||||||
io::{BufRead, BufReader, Read, Write},
|
io::{BufRead, BufReader, Read, Write},
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
process::{Child, Command, Stdio},
|
process::{Child, Command, Stdio},
|
||||||
sync::atomic::{AtomicU32, Ordering},
|
sync::{
|
||||||
|
Mutex,
|
||||||
|
atomic::{AtomicU32, Ordering},
|
||||||
|
},
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
|
||||||
use alloy::{
|
use alloy::{
|
||||||
eips::BlockNumberOrTag,
|
network::EthereumWallet,
|
||||||
genesis::{Genesis, GenesisAccount},
|
primitives::Address,
|
||||||
network::{Ethereum, EthereumWallet, NetworkWallet},
|
providers::{Provider, ProviderBuilder, ext::DebugApi},
|
||||||
primitives::{Address, BlockHash, BlockNumber, BlockTimestamp, FixedBytes, U256},
|
|
||||||
providers::{
|
|
||||||
Provider, ProviderBuilder,
|
|
||||||
ext::DebugApi,
|
|
||||||
fillers::{CachedNonceManager, ChainIdFiller, FillProvider, NonceFiller, TxFiller},
|
|
||||||
},
|
|
||||||
rpc::types::{
|
rpc::types::{
|
||||||
TransactionReceipt, TransactionRequest,
|
TransactionReceipt, TransactionRequest,
|
||||||
trace::geth::{DiffMode, GethDebugTracingOptions, PreStateConfig, PreStateFrame},
|
trace::geth::{DiffMode, GethDebugTracingOptions, PreStateConfig, PreStateFrame},
|
||||||
},
|
},
|
||||||
signers::local::PrivateKeySigner,
|
|
||||||
};
|
};
|
||||||
use revive_dt_common::fs::clear_directory;
|
|
||||||
use revive_dt_config::Arguments;
|
use revive_dt_config::Arguments;
|
||||||
use revive_dt_format::traits::ResolverApi;
|
use revive_dt_node_interaction::{
|
||||||
use revive_dt_node_interaction::EthereumNode;
|
EthereumNode, nonce::fetch_onchain_nonce, trace::trace_transaction,
|
||||||
|
transaction::execute_transaction,
|
||||||
|
};
|
||||||
use tracing::Level;
|
use tracing::Level;
|
||||||
|
|
||||||
use crate::{Node, common::FallbackGasFiller, constants::INITIAL_BALANCE};
|
use crate::Node;
|
||||||
|
|
||||||
static NODE_COUNT: AtomicU32 = AtomicU32::new(0);
|
static NODE_COUNT: AtomicU32 = AtomicU32::new(0);
|
||||||
|
|
||||||
@@ -43,7 +41,7 @@ static NODE_COUNT: AtomicU32 = AtomicU32::new(0);
|
|||||||
///
|
///
|
||||||
/// Prunes the child process and the base directory on drop.
|
/// Prunes the child process and the base directory on drop.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct GethNode {
|
pub struct Instance {
|
||||||
connection_string: String,
|
connection_string: String,
|
||||||
base_directory: PathBuf,
|
base_directory: PathBuf,
|
||||||
data_directory: PathBuf,
|
data_directory: PathBuf,
|
||||||
@@ -54,7 +52,7 @@ pub struct GethNode {
|
|||||||
network_id: u64,
|
network_id: u64,
|
||||||
start_timeout: u64,
|
start_timeout: u64,
|
||||||
wallet: EthereumWallet,
|
wallet: EthereumWallet,
|
||||||
nonce_manager: CachedNonceManager,
|
nonces: Mutex<HashMap<Address, u64>>,
|
||||||
/// This vector stores [`File`] objects that we use for logging which we want to flush when the
|
/// This vector stores [`File`] objects that we use for logging which we want to flush when the
|
||||||
/// node object is dropped. We do not store them in a structured fashion at the moment (in
|
/// node object is dropped. We do not store them in a structured fashion at the moment (in
|
||||||
/// separate fields) as the logic that we need to apply to them is all the same regardless of
|
/// separate fields) as the logic that we need to apply to them is all the same regardless of
|
||||||
@@ -62,7 +60,7 @@ pub struct GethNode {
|
|||||||
logs_file_to_flush: Vec<File>,
|
logs_file_to_flush: Vec<File>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl GethNode {
|
impl Instance {
|
||||||
const BASE_DIRECTORY: &str = "geth";
|
const BASE_DIRECTORY: &str = "geth";
|
||||||
const DATA_DIRECTORY: &str = "data";
|
const DATA_DIRECTORY: &str = "data";
|
||||||
const LOGS_DIRECTORY: &str = "logs";
|
const LOGS_DIRECTORY: &str = "logs";
|
||||||
@@ -76,30 +74,14 @@ impl GethNode {
|
|||||||
const GETH_STDOUT_LOG_FILE_NAME: &str = "node_stdout.log";
|
const GETH_STDOUT_LOG_FILE_NAME: &str = "node_stdout.log";
|
||||||
const GETH_STDERR_LOG_FILE_NAME: &str = "node_stderr.log";
|
const GETH_STDERR_LOG_FILE_NAME: &str = "node_stderr.log";
|
||||||
|
|
||||||
const TRANSACTION_INDEXING_ERROR: &str = "transaction indexing is in progress";
|
|
||||||
|
|
||||||
/// Create the node directory and call `geth init` to configure the genesis.
|
/// Create the node directory and call `geth init` to configure the genesis.
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn init(&mut self, genesis: String) -> anyhow::Result<&mut Self> {
|
fn init(&mut self, genesis: String) -> anyhow::Result<&mut Self> {
|
||||||
let _ = clear_directory(&self.base_directory);
|
|
||||||
let _ = clear_directory(&self.logs_directory);
|
|
||||||
|
|
||||||
create_dir_all(&self.base_directory)?;
|
create_dir_all(&self.base_directory)?;
|
||||||
create_dir_all(&self.logs_directory)?;
|
create_dir_all(&self.logs_directory)?;
|
||||||
|
|
||||||
let mut genesis = serde_json::from_str::<Genesis>(&genesis)?;
|
|
||||||
for signer_address in
|
|
||||||
<EthereumWallet as NetworkWallet<Ethereum>>::signer_addresses(&self.wallet)
|
|
||||||
{
|
|
||||||
// Note, the use of the entry API here means that we only modify the entries for any
|
|
||||||
// account that is not in the `alloc` field of the genesis state.
|
|
||||||
genesis
|
|
||||||
.alloc
|
|
||||||
.entry(signer_address)
|
|
||||||
.or_insert(GenesisAccount::default().with_balance(U256::from(INITIAL_BALANCE)));
|
|
||||||
}
|
|
||||||
let genesis_path = self.base_directory.join(Self::GENESIS_JSON_FILE);
|
let genesis_path = self.base_directory.join(Self::GENESIS_JSON_FILE);
|
||||||
serde_json::to_writer(File::create(&genesis_path)?, &genesis)?;
|
File::create(&genesis_path)?.write_all(genesis.as_bytes())?;
|
||||||
|
|
||||||
let mut child = Command::new(&self.geth)
|
let mut child = Command::new(&self.geth)
|
||||||
.arg("init")
|
.arg("init")
|
||||||
@@ -155,10 +137,6 @@ impl GethNode {
|
|||||||
.arg("--nodiscover")
|
.arg("--nodiscover")
|
||||||
.arg("--maxpeers")
|
.arg("--maxpeers")
|
||||||
.arg("0")
|
.arg("0")
|
||||||
.arg("--txlookuplimit")
|
|
||||||
.arg("0")
|
|
||||||
.arg("--cache.blocklogs")
|
|
||||||
.arg("512")
|
|
||||||
.stderr(stderr_logs_file.try_clone()?)
|
.stderr(stderr_logs_file.try_clone()?)
|
||||||
.stdout(stdout_logs_file.try_clone()?)
|
.stdout(stdout_logs_file.try_clone()?)
|
||||||
.spawn()?
|
.spawn()?
|
||||||
@@ -216,228 +194,157 @@ impl GethNode {
|
|||||||
fn geth_stderr_log_file_path(&self) -> PathBuf {
|
fn geth_stderr_log_file_path(&self) -> PathBuf {
|
||||||
self.logs_directory.join(Self::GETH_STDERR_LOG_FILE_NAME)
|
self.logs_directory.join(Self::GETH_STDERR_LOG_FILE_NAME)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn provider(
|
|
||||||
&self,
|
|
||||||
) -> impl Future<
|
|
||||||
Output = anyhow::Result<
|
|
||||||
FillProvider<impl TxFiller<Ethereum>, impl Provider<Ethereum>, Ethereum>,
|
|
||||||
>,
|
|
||||||
> + 'static {
|
|
||||||
let connection_string = self.connection_string();
|
|
||||||
let wallet = self.wallet.clone();
|
|
||||||
|
|
||||||
// Note: We would like all providers to make use of the same nonce manager so that we have
|
|
||||||
// monotonically increasing nonces that are cached. The cached nonce manager uses Arc's in
|
|
||||||
// its implementation and therefore it means that when we clone it then it still references
|
|
||||||
// the same state.
|
|
||||||
let nonce_manager = self.nonce_manager.clone();
|
|
||||||
|
|
||||||
Box::pin(async move {
|
|
||||||
ProviderBuilder::new()
|
|
||||||
.disable_recommended_fillers()
|
|
||||||
.filler(FallbackGasFiller::new(500_000_000, 500_000_000, 1))
|
|
||||||
.filler(ChainIdFiller::default())
|
|
||||||
.filler(NonceFiller::new(nonce_manager))
|
|
||||||
.wallet(wallet)
|
|
||||||
.connect(&connection_string)
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EthereumNode for GethNode {
|
impl EthereumNode for Instance {
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
async fn execute_transaction(
|
fn execute_transaction(
|
||||||
&self,
|
&self,
|
||||||
transaction: TransactionRequest,
|
transaction: TransactionRequest,
|
||||||
) -> anyhow::Result<alloy::rpc::types::TransactionReceipt> {
|
) -> anyhow::Result<alloy::rpc::types::TransactionReceipt> {
|
||||||
let outer_span = tracing::debug_span!("Submitting transaction", ?transaction);
|
let connection_string = self.connection_string();
|
||||||
let _outer_guard = outer_span.enter();
|
let wallet = self.wallet.clone();
|
||||||
|
|
||||||
let provider = self.provider().await?;
|
execute_transaction(Box::pin(async move {
|
||||||
|
let outer_span = tracing::debug_span!("Submitting transaction", ?transaction,);
|
||||||
|
let _outer_guard = outer_span.enter();
|
||||||
|
|
||||||
let pending_transaction = provider.send_transaction(transaction).await?;
|
let provider = ProviderBuilder::new()
|
||||||
let transaction_hash = pending_transaction.tx_hash();
|
.wallet(wallet)
|
||||||
|
.connect(&connection_string)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let span = tracing::info_span!("Awaiting transaction receipt", ?transaction_hash);
|
let pending_transaction = provider.send_transaction(transaction).await?;
|
||||||
let _guard = span.enter();
|
let transaction_hash = pending_transaction.tx_hash();
|
||||||
|
|
||||||
// The following is a fix for the "transaction indexing is in progress" error that we
|
let span = tracing::info_span!("Awaiting transaction receipt", ?transaction_hash);
|
||||||
// used to get. You can find more information on this in the following GH issue in geth
|
let _guard = span.enter();
|
||||||
// https://github.com/ethereum/go-ethereum/issues/28877. To summarize what's going on,
|
|
||||||
// before we can get the receipt of the transaction it needs to have been indexed by the
|
|
||||||
// node's indexer. Just because the transaction has been confirmed it doesn't mean that
|
|
||||||
// it has been indexed. When we call alloy's `get_receipt` it checks if the transaction
|
|
||||||
// was confirmed. If it has been, then it will call `eth_getTransactionReceipt` method
|
|
||||||
// which _might_ return the above error if the tx has not yet been indexed yet. So, we
|
|
||||||
// need to implement a retry mechanism for the receipt to keep retrying to get it until
|
|
||||||
// it eventually works, but we only do that if the error we get back is the "transaction
|
|
||||||
// indexing is in progress" error or if the receipt is None.
|
|
||||||
//
|
|
||||||
// Getting the transaction indexed and taking a receipt can take a long time especially
|
|
||||||
// when a lot of transactions are being submitted to the node. Thus, while initially we
|
|
||||||
// only allowed for 60 seconds of waiting with a 1 second delay in polling, we need to
|
|
||||||
// allow for a larger wait time. Therefore, in here we allow for 5 minutes of waiting
|
|
||||||
// with exponential backoff each time we attempt to get the receipt and find that it's
|
|
||||||
// not available.
|
|
||||||
let mut retries = 0;
|
|
||||||
let mut total_wait_duration = Duration::from_secs(0);
|
|
||||||
let max_allowed_wait_duration = Duration::from_secs(5 * 60);
|
|
||||||
loop {
|
|
||||||
if total_wait_duration >= max_allowed_wait_duration {
|
|
||||||
tracing::error!(
|
|
||||||
?total_wait_duration,
|
|
||||||
?max_allowed_wait_duration,
|
|
||||||
retry_count = retries,
|
|
||||||
"Failed to get receipt after polling for it"
|
|
||||||
);
|
|
||||||
anyhow::bail!(
|
|
||||||
"Polled for receipt for {total_wait_duration:?} but failed to get it"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
match provider.get_transaction_receipt(*transaction_hash).await {
|
// The following is a fix for the "transaction indexing is in progress" error that we
|
||||||
Ok(Some(receipt)) => {
|
// used to get. You can find more information on this in the following GH issue in geth
|
||||||
tracing::info!(?total_wait_duration, "Found receipt");
|
// https://github.com/ethereum/go-ethereum/issues/28877. To summarize what's going on,
|
||||||
break Ok(receipt);
|
// before we can get the receipt of the transaction it needs to have been indexed by the
|
||||||
}
|
// node's indexer. Just because the transaction has been confirmed it doesn't mean that
|
||||||
Ok(None) => {}
|
// it has been indexed. When we call alloy's `get_receipt` it checks if the transaction
|
||||||
Err(error) => {
|
// was confirmed. If it has been, then it will call `eth_getTransactionReceipt` method
|
||||||
let error_string = error.to_string();
|
// which _might_ return the above error if the tx has not yet been indexed yet. So, we
|
||||||
if !error_string.contains(Self::TRANSACTION_INDEXING_ERROR) {
|
// need to implement a retry mechanism for the receipt to keep retrying to get it until
|
||||||
break Err(error.into());
|
// it eventually works, but we only do that if the error we get back is the "transaction
|
||||||
|
// indexing is in progress" error or if the receipt is None.
|
||||||
|
//
|
||||||
|
// At the moment we do not allow for the 60 seconds to be modified and we take it as
|
||||||
|
// being an implementation detail that's invisible to anything outside of this module.
|
||||||
|
//
|
||||||
|
// We allow a total of 60 retries for getting the receipt with one second between each
|
||||||
|
// retry and the next which means that we allow for a total of 60 seconds of waiting
|
||||||
|
// before we consider that we're unable to get the transaction receipt.
|
||||||
|
let mut retries = 0;
|
||||||
|
loop {
|
||||||
|
match provider.get_transaction_receipt(*transaction_hash).await {
|
||||||
|
Ok(Some(receipt)) => {
|
||||||
|
tracing::info!("Obtained the transaction receipt");
|
||||||
|
break Ok(receipt);
|
||||||
|
}
|
||||||
|
Ok(None) => {
|
||||||
|
if retries == 60 {
|
||||||
|
tracing::error!(
|
||||||
|
"Polled for transaction receipt for 60 seconds but failed to get it"
|
||||||
|
);
|
||||||
|
break Err(anyhow::anyhow!("Failed to get the transaction receipt"));
|
||||||
|
} else {
|
||||||
|
tracing::trace!(
|
||||||
|
retries,
|
||||||
|
"Sleeping for 1 second and trying to get the receipt again"
|
||||||
|
);
|
||||||
|
retries += 1;
|
||||||
|
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(error) => {
|
||||||
|
let error_string = error.to_string();
|
||||||
|
if error_string.contains("transaction indexing is in progress") {
|
||||||
|
if retries == 60 {
|
||||||
|
tracing::error!(
|
||||||
|
"Polled for transaction receipt for 60 seconds but failed to get it"
|
||||||
|
);
|
||||||
|
break Err(error.into());
|
||||||
|
} else {
|
||||||
|
tracing::trace!(
|
||||||
|
retries,
|
||||||
|
"Sleeping for 1 second and trying to get the receipt again"
|
||||||
|
);
|
||||||
|
retries += 1;
|
||||||
|
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break Err(error.into());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
}))
|
||||||
let next_wait_duration = Duration::from_secs(2u64.pow(retries))
|
|
||||||
.min(max_allowed_wait_duration - total_wait_duration);
|
|
||||||
total_wait_duration += next_wait_duration;
|
|
||||||
retries += 1;
|
|
||||||
|
|
||||||
tokio::time::sleep(next_wait_duration).await;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
async fn trace_transaction(
|
fn trace_transaction(
|
||||||
&self,
|
&self,
|
||||||
transaction: &TransactionReceipt,
|
transaction: TransactionReceipt,
|
||||||
trace_options: GethDebugTracingOptions,
|
|
||||||
) -> anyhow::Result<alloy::rpc::types::trace::geth::GethTrace> {
|
) -> anyhow::Result<alloy::rpc::types::trace::geth::GethTrace> {
|
||||||
let tx_hash = transaction.transaction_hash;
|
let connection_string = self.connection_string();
|
||||||
Ok(self
|
|
||||||
.provider()
|
|
||||||
.await?
|
|
||||||
.debug_trace_transaction(tx_hash, trace_options)
|
|
||||||
.await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
|
||||||
async fn state_diff(&self, transaction: &TransactionReceipt) -> anyhow::Result<DiffMode> {
|
|
||||||
let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig {
|
let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig {
|
||||||
diff_mode: Some(true),
|
diff_mode: Some(true),
|
||||||
disable_code: None,
|
disable_code: None,
|
||||||
disable_storage: None,
|
disable_storage: None,
|
||||||
});
|
});
|
||||||
|
let wallet = self.wallet.clone();
|
||||||
|
|
||||||
|
trace_transaction(Box::pin(async move {
|
||||||
|
Ok(ProviderBuilder::new()
|
||||||
|
.wallet(wallet)
|
||||||
|
.connect(&connection_string)
|
||||||
|
.await?
|
||||||
|
.debug_trace_transaction(transaction.transaction_hash, trace_options)
|
||||||
|
.await?)
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
|
fn state_diff(
|
||||||
|
&self,
|
||||||
|
transaction: alloy::rpc::types::TransactionReceipt,
|
||||||
|
) -> anyhow::Result<DiffMode> {
|
||||||
match self
|
match self
|
||||||
.trace_transaction(transaction, trace_options)
|
.trace_transaction(transaction)?
|
||||||
.await?
|
|
||||||
.try_into_pre_state_frame()?
|
.try_into_pre_state_frame()?
|
||||||
{
|
{
|
||||||
PreStateFrame::Diff(diff) => Ok(diff),
|
PreStateFrame::Diff(diff) => Ok(diff),
|
||||||
_ => anyhow::bail!("expected a diff mode trace"),
|
_ => anyhow::bail!("expected a diff mode trace"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl ResolverApi for GethNode {
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
|
||||||
async fn chain_id(&self) -> anyhow::Result<alloy::primitives::ChainId> {
|
|
||||||
self.provider()
|
|
||||||
.await?
|
|
||||||
.get_chain_id()
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
async fn block_gas_limit(&self, number: BlockNumberOrTag) -> anyhow::Result<u128> {
|
fn fetch_add_nonce(&self, address: Address) -> anyhow::Result<u64> {
|
||||||
self.provider()
|
let connection_string = self.connection_string.clone();
|
||||||
.await?
|
let wallet = self.wallet.clone();
|
||||||
.get_block_by_number(number)
|
|
||||||
.await?
|
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
|
||||||
.map(|block| block.header.gas_limit as _)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
let onchain_nonce = fetch_onchain_nonce(connection_string, wallet, address)?;
|
||||||
async fn block_coinbase(&self, number: BlockNumberOrTag) -> anyhow::Result<Address> {
|
|
||||||
self.provider()
|
|
||||||
.await?
|
|
||||||
.get_block_by_number(number)
|
|
||||||
.await?
|
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
|
||||||
.map(|block| block.header.beneficiary)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
let mut nonces = self.nonces.lock().unwrap();
|
||||||
async fn block_difficulty(&self, number: BlockNumberOrTag) -> anyhow::Result<U256> {
|
let current = nonces.entry(address).or_insert(onchain_nonce);
|
||||||
self.provider()
|
let value = *current;
|
||||||
.await?
|
*current += 1;
|
||||||
.get_block_by_number(number)
|
Ok(value)
|
||||||
.await?
|
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
|
||||||
.map(|block| block.header.difficulty)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
|
||||||
async fn block_hash(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockHash> {
|
|
||||||
self.provider()
|
|
||||||
.await?
|
|
||||||
.get_block_by_number(number)
|
|
||||||
.await?
|
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
|
||||||
.map(|block| block.header.hash)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
|
||||||
async fn block_timestamp(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockTimestamp> {
|
|
||||||
self.provider()
|
|
||||||
.await?
|
|
||||||
.get_block_by_number(number)
|
|
||||||
.await?
|
|
||||||
.ok_or(anyhow::Error::msg("Blockchain has no blocks"))
|
|
||||||
.map(|block| block.header.timestamp)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
|
||||||
async fn last_block_number(&self) -> anyhow::Result<BlockNumber> {
|
|
||||||
self.provider()
|
|
||||||
.await?
|
|
||||||
.get_block_number()
|
|
||||||
.await
|
|
||||||
.map_err(Into::into)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Node for GethNode {
|
impl Node for Instance {
|
||||||
fn new(config: &Arguments) -> Self {
|
fn new(config: &Arguments) -> Self {
|
||||||
let geth_directory = config.directory().join(Self::BASE_DIRECTORY);
|
let geth_directory = config.directory().join(Self::BASE_DIRECTORY);
|
||||||
let id = NODE_COUNT.fetch_add(1, Ordering::SeqCst);
|
let id = NODE_COUNT.fetch_add(1, Ordering::SeqCst);
|
||||||
let base_directory = geth_directory.join(id.to_string());
|
let base_directory = geth_directory.join(id.to_string());
|
||||||
|
|
||||||
let mut wallet = config.wallet();
|
|
||||||
for signer in (1..=config.private_keys_to_add)
|
|
||||||
.map(|id| U256::from(id))
|
|
||||||
.map(|id| id.to_be_bytes::<32>())
|
|
||||||
.map(|id| PrivateKeySigner::from_bytes(&FixedBytes(id)).unwrap())
|
|
||||||
{
|
|
||||||
wallet.register_signer(signer);
|
|
||||||
}
|
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
connection_string: base_directory.join(Self::IPC_FILE).display().to_string(),
|
connection_string: base_directory.join(Self::IPC_FILE).display().to_string(),
|
||||||
data_directory: base_directory.join(Self::DATA_DIRECTORY),
|
data_directory: base_directory.join(Self::DATA_DIRECTORY),
|
||||||
@@ -448,11 +355,11 @@ impl Node for GethNode {
|
|||||||
handle: None,
|
handle: None,
|
||||||
network_id: config.network_id,
|
network_id: config.network_id,
|
||||||
start_timeout: config.geth_start_timeout,
|
start_timeout: config.geth_start_timeout,
|
||||||
wallet,
|
wallet: config.wallet(),
|
||||||
|
nonces: Mutex::new(HashMap::new()),
|
||||||
// We know that we only need to be storing 2 files so we can specify that when creating
|
// We know that we only need to be storing 2 files so we can specify that when creating
|
||||||
// the vector. It's the stdout and stderr of the geth node.
|
// the vector. It's the stdout and stderr of the geth node.
|
||||||
logs_file_to_flush: Vec::with_capacity(2),
|
logs_file_to_flush: Vec::with_capacity(2),
|
||||||
nonce_manager: Default::default(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -501,17 +408,9 @@ impl Node for GethNode {
|
|||||||
.stdout;
|
.stdout;
|
||||||
Ok(String::from_utf8_lossy(&output).into())
|
Ok(String::from_utf8_lossy(&output).into())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
|
||||||
fn matches_target(&self, targets: Option<&[String]>) -> bool {
|
|
||||||
match targets {
|
|
||||||
None => true,
|
|
||||||
Some(targets) => targets.iter().any(|str| str.as_str() == "evm"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for GethNode {
|
impl Drop for Instance {
|
||||||
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))]
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
self.shutdown().expect("Failed to shutdown")
|
self.shutdown().expect("Failed to shutdown")
|
||||||
@@ -521,12 +420,11 @@ impl Drop for GethNode {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use revive_dt_config::Arguments;
|
use revive_dt_config::Arguments;
|
||||||
|
|
||||||
use temp_dir::TempDir;
|
use temp_dir::TempDir;
|
||||||
|
|
||||||
use crate::{GENESIS_JSON, Node};
|
use crate::{GENESIS_JSON, Node};
|
||||||
|
|
||||||
use super::*;
|
use super::Instance;
|
||||||
|
|
||||||
fn test_config() -> (Arguments, TempDir) {
|
fn test_config() -> (Arguments, TempDir) {
|
||||||
let mut config = Arguments::default();
|
let mut config = Arguments::default();
|
||||||
@@ -536,125 +434,26 @@ mod tests {
|
|||||||
(config, temp_dir)
|
(config, temp_dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn new_node() -> (GethNode, TempDir) {
|
|
||||||
let (args, temp_dir) = test_config();
|
|
||||||
let mut node = GethNode::new(&args);
|
|
||||||
node.init(GENESIS_JSON.to_owned())
|
|
||||||
.expect("Failed to initialize the node")
|
|
||||||
.spawn_process()
|
|
||||||
.expect("Failed to spawn the node process");
|
|
||||||
(node, temp_dir)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn init_works() {
|
fn init_works() {
|
||||||
GethNode::new(&test_config().0)
|
Instance::new(&test_config().0)
|
||||||
.init(GENESIS_JSON.to_string())
|
.init(GENESIS_JSON.to_string())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn spawn_works() {
|
fn spawn_works() {
|
||||||
GethNode::new(&test_config().0)
|
Instance::new(&test_config().0)
|
||||||
.spawn(GENESIS_JSON.to_string())
|
.spawn(GENESIS_JSON.to_string())
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn version_works() {
|
fn version_works() {
|
||||||
let version = GethNode::new(&test_config().0).version().unwrap();
|
let version = Instance::new(&test_config().0).version().unwrap();
|
||||||
assert!(
|
assert!(
|
||||||
version.starts_with("geth version"),
|
version.starts_with("geth version"),
|
||||||
"expected version string, got: '{version}'"
|
"expected version string, got: '{version}'"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn can_get_chain_id_from_node() {
|
|
||||||
// Arrange
|
|
||||||
let (node, _temp_dir) = new_node();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let chain_id = node.chain_id().await;
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
let chain_id = chain_id.expect("Failed to get the chain id");
|
|
||||||
assert_eq!(chain_id, 420_420_420);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn can_get_gas_limit_from_node() {
|
|
||||||
// Arrange
|
|
||||||
let (node, _temp_dir) = new_node();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let gas_limit = node.block_gas_limit(BlockNumberOrTag::Latest).await;
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
let gas_limit = gas_limit.expect("Failed to get the gas limit");
|
|
||||||
assert_eq!(gas_limit, u32::MAX as u128)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn can_get_coinbase_from_node() {
|
|
||||||
// Arrange
|
|
||||||
let (node, _temp_dir) = new_node();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let coinbase = node.block_coinbase(BlockNumberOrTag::Latest).await;
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
let coinbase = coinbase.expect("Failed to get the coinbase");
|
|
||||||
assert_eq!(coinbase, Address::new([0xFF; 20]))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn can_get_block_difficulty_from_node() {
|
|
||||||
// Arrange
|
|
||||||
let (node, _temp_dir) = new_node();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let block_difficulty = node.block_difficulty(BlockNumberOrTag::Latest).await;
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
let block_difficulty = block_difficulty.expect("Failed to get the block difficulty");
|
|
||||||
assert_eq!(block_difficulty, U256::ZERO)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn can_get_block_hash_from_node() {
|
|
||||||
// Arrange
|
|
||||||
let (node, _temp_dir) = new_node();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let block_hash = node.block_hash(BlockNumberOrTag::Latest).await;
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
let _ = block_hash.expect("Failed to get the block hash");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn can_get_block_timestamp_from_node() {
|
|
||||||
// Arrange
|
|
||||||
let (node, _temp_dir) = new_node();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let block_timestamp = node.block_timestamp(BlockNumberOrTag::Latest).await;
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
let _ = block_timestamp.expect("Failed to get the block timestamp");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test]
|
|
||||||
async fn can_get_block_number_from_node() {
|
|
||||||
// Arrange
|
|
||||||
let (node, _temp_dir) = new_node();
|
|
||||||
|
|
||||||
// Act
|
|
||||||
let block_number = node.last_block_number().await;
|
|
||||||
|
|
||||||
// Assert
|
|
||||||
let block_number = block_number.expect("Failed to get the block number");
|
|
||||||
assert_eq!(block_number, 0)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
+115
-796
File diff suppressed because it is too large
Load Diff
@@ -3,8 +3,6 @@
|
|||||||
use revive_dt_config::Arguments;
|
use revive_dt_config::Arguments;
|
||||||
use revive_dt_node_interaction::EthereumNode;
|
use revive_dt_node_interaction::EthereumNode;
|
||||||
|
|
||||||
pub mod common;
|
|
||||||
pub mod constants;
|
|
||||||
pub mod geth;
|
pub mod geth;
|
||||||
pub mod kitchensink;
|
pub mod kitchensink;
|
||||||
pub mod pool;
|
pub mod pool;
|
||||||
@@ -32,8 +30,4 @@ pub trait Node: EthereumNode {
|
|||||||
|
|
||||||
/// Returns the node version.
|
/// Returns the node version.
|
||||||
fn version(&self) -> anyhow::Result<String>;
|
fn version(&self) -> anyhow::Result<String>;
|
||||||
|
|
||||||
/// Given a list of targets from the metadata file, this function determines if the metadata
|
|
||||||
/// file can be ran on this node or not.
|
|
||||||
fn matches_target(&self, targets: Option<&[String]>) -> bool;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ where
|
|||||||
{
|
{
|
||||||
/// Create a new Pool. This will start as many nodes as there are workers in `config`.
|
/// Create a new Pool. This will start as many nodes as there are workers in `config`.
|
||||||
pub fn new(config: &Arguments) -> anyhow::Result<Self> {
|
pub fn new(config: &Arguments) -> anyhow::Result<Self> {
|
||||||
let nodes = config.number_of_nodes;
|
let nodes = config.workers;
|
||||||
let genesis = read_to_string(&config.genesis_file).context(format!(
|
let genesis = read_to_string(&config.genesis_file).context(format!(
|
||||||
"can not read genesis file: {}",
|
"can not read genesis file: {}",
|
||||||
config.genesis_file.display()
|
config.genesis_file.display()
|
||||||
|
|||||||
@@ -10,9 +10,9 @@ rust-version.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
revive-dt-config = { workspace = true }
|
revive-dt-config = { workspace = true }
|
||||||
revive-dt-format = { workspace = true }
|
revive-dt-format = { workspace = true }
|
||||||
revive-dt-compiler = { workspace = true }
|
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
serde_json = { workspace = true }
|
serde_json = { workspace = true }
|
||||||
|
revive-solc-json-interface = { workspace = true }
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
//! The report analyzer enriches the raw report data.
|
//! The report analyzer enriches the raw report data.
|
||||||
|
|
||||||
use revive_dt_compiler::CompilerOutput;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::reporter::CompilationTask;
|
use crate::reporter::CompilationTask;
|
||||||
@@ -14,27 +13,41 @@ pub struct CompilerStatistics {
|
|||||||
pub mean_code_size: usize,
|
pub mean_code_size: usize,
|
||||||
/// The mean size of the optimized YUL IR.
|
/// The mean size of the optimized YUL IR.
|
||||||
pub mean_yul_size: usize,
|
pub mean_yul_size: usize,
|
||||||
/// Is a proxy because the YUL also contains a lot of comments.
|
/// Is a proxy because the YUL also containes a lot of comments.
|
||||||
pub yul_to_bytecode_size_ratio: f32,
|
pub yul_to_bytecode_size_ratio: f32,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CompilerStatistics {
|
impl CompilerStatistics {
|
||||||
/// Cumulatively update the statistics with the next compiler task.
|
/// Cumulatively update the statistics with the next compiler task.
|
||||||
pub fn sample(&mut self, compilation_task: &CompilationTask) {
|
pub fn sample(&mut self, compilation_task: &CompilationTask) {
|
||||||
let Some(CompilerOutput { contracts }) = &compilation_task.json_output else {
|
let Some(output) = &compilation_task.json_output else {
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
let Some(contracts) = &output.contracts else {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
for (_solidity, contracts) in contracts.iter() {
|
for (_solidity, contracts) in contracts.iter() {
|
||||||
for (_name, (bytecode, _)) in contracts.iter() {
|
for (_name, contract) in contracts.iter() {
|
||||||
|
let Some(evm) = &contract.evm else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let Some(deploy_code) = &evm.deployed_bytecode else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
// The EVM bytecode can be unlinked and thus is not necessarily a decodable hex
|
// The EVM bytecode can be unlinked and thus is not necessarily a decodable hex
|
||||||
// string; for our statistics this is a good enough approximation.
|
// string; for our statistics this is a good enough approximation.
|
||||||
let bytecode_size = bytecode.len() / 2;
|
let bytecode_size = deploy_code.object.len() / 2;
|
||||||
|
|
||||||
// TODO: for the time being we set the yul_size to be zero. We need to change this
|
let yul_size = contract
|
||||||
// when we overhaul the reporting.
|
.ir_optimized
|
||||||
|
.as_ref()
|
||||||
|
.expect("if the contract has a deploy code it should also have the opimized IR")
|
||||||
|
.len();
|
||||||
|
|
||||||
self.update_sizes(bytecode_size, 0);
|
self.update_sizes(bytecode_size, yul_size);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,11 +12,11 @@ use std::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use revive_dt_compiler::{CompilerInput, CompilerOutput};
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use revive_dt_config::{Arguments, TestingPlatform};
|
use revive_dt_config::{Arguments, TestingPlatform};
|
||||||
use revive_dt_format::{corpus::Corpus, mode::SolcMode};
|
use revive_dt_format::{corpus::Corpus, mode::SolcMode};
|
||||||
|
use revive_solc_json_interface::{SolcStandardJsonInput, SolcStandardJsonOutput};
|
||||||
|
|
||||||
use crate::analyzer::CompilerStatistics;
|
use crate::analyzer::CompilerStatistics;
|
||||||
|
|
||||||
@@ -44,9 +44,9 @@ pub struct Report {
|
|||||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||||
pub struct CompilationTask {
|
pub struct CompilationTask {
|
||||||
/// The observed compiler input.
|
/// The observed compiler input.
|
||||||
pub json_input: CompilerInput,
|
pub json_input: SolcStandardJsonInput,
|
||||||
/// The observed compiler output.
|
/// The observed compiler output.
|
||||||
pub json_output: Option<CompilerOutput>,
|
pub json_output: Option<SolcStandardJsonOutput>,
|
||||||
/// The observed compiler mode.
|
/// The observed compiler mode.
|
||||||
pub mode: SolcMode,
|
pub mode: SolcMode,
|
||||||
/// The observed compiler version.
|
/// The observed compiler version.
|
||||||
@@ -152,7 +152,15 @@ impl Report {
|
|||||||
for (platform, results) in self.compiler_results.iter() {
|
for (platform, results) in self.compiler_results.iter() {
|
||||||
for result in results {
|
for result in results {
|
||||||
// ignore if there were no errors
|
// ignore if there were no errors
|
||||||
if result.compilation_task.error.is_none() {
|
if result.compilation_task.error.is_none()
|
||||||
|
&& result
|
||||||
|
.compilation_task
|
||||||
|
.json_output
|
||||||
|
.as_ref()
|
||||||
|
.and_then(|output| output.errors.as_ref())
|
||||||
|
.map(|errors| errors.is_empty())
|
||||||
|
.unwrap_or(true)
|
||||||
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,12 +9,9 @@ repository.workspace = true
|
|||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
revive-dt-common = { workspace = true }
|
|
||||||
|
|
||||||
anyhow = { workspace = true }
|
anyhow = { workspace = true }
|
||||||
hex = { workspace = true }
|
hex = { workspace = true }
|
||||||
tracing = { workspace = true }
|
tracing = { workspace = true }
|
||||||
tokio = { workspace = true }
|
|
||||||
reqwest = { workspace = true }
|
reqwest = { workspace = true }
|
||||||
semver = { workspace = true }
|
semver = { workspace = true }
|
||||||
serde = { workspace = true }
|
serde = { workspace = true }
|
||||||
|
|||||||
@@ -6,17 +6,15 @@ use std::{
|
|||||||
io::{BufWriter, Write},
|
io::{BufWriter, Write},
|
||||||
os::unix::fs::PermissionsExt,
|
os::unix::fs::PermissionsExt,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::LazyLock,
|
sync::{LazyLock, Mutex},
|
||||||
};
|
};
|
||||||
|
|
||||||
use tokio::sync::Mutex;
|
|
||||||
|
|
||||||
use crate::download::GHDownloader;
|
use crate::download::GHDownloader;
|
||||||
|
|
||||||
pub const SOLC_CACHE_DIRECTORY: &str = "solc";
|
pub const SOLC_CACHE_DIRECTORY: &str = "solc";
|
||||||
pub(crate) static SOLC_CACHER: LazyLock<Mutex<HashSet<PathBuf>>> = LazyLock::new(Default::default);
|
pub(crate) static SOLC_CACHER: LazyLock<Mutex<HashSet<PathBuf>>> = LazyLock::new(Default::default);
|
||||||
|
|
||||||
pub(crate) async fn get_or_download(
|
pub(crate) fn get_or_download(
|
||||||
working_directory: &Path,
|
working_directory: &Path,
|
||||||
downloader: &GHDownloader,
|
downloader: &GHDownloader,
|
||||||
) -> anyhow::Result<PathBuf> {
|
) -> anyhow::Result<PathBuf> {
|
||||||
@@ -25,20 +23,20 @@ pub(crate) async fn get_or_download(
|
|||||||
.join(downloader.version.to_string());
|
.join(downloader.version.to_string());
|
||||||
let target_file = target_directory.join(downloader.target);
|
let target_file = target_directory.join(downloader.target);
|
||||||
|
|
||||||
let mut cache = SOLC_CACHER.lock().await;
|
let mut cache = SOLC_CACHER.lock().unwrap();
|
||||||
if cache.contains(&target_file) {
|
if cache.contains(&target_file) {
|
||||||
tracing::debug!("using cached solc: {}", target_file.display());
|
tracing::debug!("using cached solc: {}", target_file.display());
|
||||||
return Ok(target_file);
|
return Ok(target_file);
|
||||||
}
|
}
|
||||||
|
|
||||||
create_dir_all(target_directory)?;
|
create_dir_all(target_directory)?;
|
||||||
download_to_file(&target_file, downloader).await?;
|
download_to_file(&target_file, downloader)?;
|
||||||
cache.insert(target_file.clone());
|
cache.insert(target_file.clone());
|
||||||
|
|
||||||
Ok(target_file)
|
Ok(target_file)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download_to_file(path: &Path, downloader: &GHDownloader) -> anyhow::Result<()> {
|
fn download_to_file(path: &Path, downloader: &GHDownloader) -> anyhow::Result<()> {
|
||||||
tracing::info!("caching file: {}", path.display());
|
tracing::info!("caching file: {}", path.display());
|
||||||
|
|
||||||
let Ok(file) = File::create_new(path) else {
|
let Ok(file) = File::create_new(path) else {
|
||||||
@@ -54,7 +52,7 @@ async fn download_to_file(path: &Path, downloader: &GHDownloader) -> anyhow::Res
|
|||||||
}
|
}
|
||||||
|
|
||||||
let mut file = BufWriter::new(file);
|
let mut file = BufWriter::new(file);
|
||||||
file.write_all(&downloader.download().await?)?;
|
file.write_all(&downloader.download()?)?;
|
||||||
file.flush()?;
|
file.flush()?;
|
||||||
drop(file);
|
drop(file);
|
||||||
|
|
||||||
|
|||||||
@@ -5,8 +5,6 @@ use std::{
|
|||||||
sync::{LazyLock, Mutex},
|
sync::{LazyLock, Mutex},
|
||||||
};
|
};
|
||||||
|
|
||||||
use revive_dt_common::types::VersionOrRequirement;
|
|
||||||
|
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
@@ -25,12 +23,12 @@ impl List {
|
|||||||
///
|
///
|
||||||
/// Caches the list retrieved from the `url` into [LIST_CACHE],
|
/// Caches the list retrieved from the `url` into [LIST_CACHE],
|
||||||
/// subsequent calls with the same `url` will return the cached list.
|
/// subsequent calls with the same `url` will return the cached list.
|
||||||
pub async fn download(url: &'static str) -> anyhow::Result<Self> {
|
pub fn download(url: &'static str) -> anyhow::Result<Self> {
|
||||||
if let Some(list) = LIST_CACHE.lock().unwrap().get(url) {
|
if let Some(list) = LIST_CACHE.lock().unwrap().get(url) {
|
||||||
return Ok(list.clone());
|
return Ok(list.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let body: List = reqwest::get(url).await?.json().await?;
|
let body: List = reqwest::blocking::get(url)?.json()?;
|
||||||
|
|
||||||
LIST_CACHE.lock().unwrap().insert(url, body.clone());
|
LIST_CACHE.lock().unwrap().insert(url, body.clone());
|
||||||
|
|
||||||
@@ -54,52 +52,28 @@ impl GHDownloader {
|
|||||||
pub const WINDOWS_NAME: &str = "solc-windows.exe";
|
pub const WINDOWS_NAME: &str = "solc-windows.exe";
|
||||||
pub const WASM_NAME: &str = "soljson.js";
|
pub const WASM_NAME: &str = "soljson.js";
|
||||||
|
|
||||||
async fn new(
|
fn new(version: Version, target: &'static str, list: &'static str) -> Self {
|
||||||
version: impl Into<VersionOrRequirement>,
|
Self {
|
||||||
target: &'static str,
|
version,
|
||||||
list: &'static str,
|
target,
|
||||||
) -> anyhow::Result<Self> {
|
list,
|
||||||
let version_or_requirement = version.into();
|
|
||||||
match version_or_requirement {
|
|
||||||
VersionOrRequirement::Version(version) => Ok(Self {
|
|
||||||
version,
|
|
||||||
target,
|
|
||||||
list,
|
|
||||||
}),
|
|
||||||
VersionOrRequirement::Requirement(requirement) => {
|
|
||||||
let Some(version) = List::download(list)
|
|
||||||
.await?
|
|
||||||
.builds
|
|
||||||
.into_iter()
|
|
||||||
.map(|build| build.version)
|
|
||||||
.filter(|version| requirement.matches(version))
|
|
||||||
.max()
|
|
||||||
else {
|
|
||||||
anyhow::bail!("Failed to find a version that satisfies {requirement:?}");
|
|
||||||
};
|
|
||||||
Ok(Self {
|
|
||||||
version,
|
|
||||||
target,
|
|
||||||
list,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn linux(version: impl Into<VersionOrRequirement>) -> anyhow::Result<Self> {
|
pub fn linux(version: Version) -> Self {
|
||||||
Self::new(version, Self::LINUX_NAME, List::LINUX_URL).await
|
Self::new(version, Self::LINUX_NAME, List::LINUX_URL)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn macosx(version: impl Into<VersionOrRequirement>) -> anyhow::Result<Self> {
|
pub fn macosx(version: Version) -> Self {
|
||||||
Self::new(version, Self::MACOSX_NAME, List::MACOSX_URL).await
|
Self::new(version, Self::MACOSX_NAME, List::MACOSX_URL)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn windows(version: impl Into<VersionOrRequirement>) -> anyhow::Result<Self> {
|
pub fn windows(version: Version) -> Self {
|
||||||
Self::new(version, Self::WINDOWS_NAME, List::WINDOWS_URL).await
|
Self::new(version, Self::WINDOWS_NAME, List::WINDOWS_URL)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn wasm(version: impl Into<VersionOrRequirement>) -> anyhow::Result<Self> {
|
pub fn wasm(version: Version) -> Self {
|
||||||
Self::new(version, Self::WASM_NAME, List::WASM_URL).await
|
Self::new(version, Self::WASM_NAME, List::WASM_URL)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the download link.
|
/// Returns the download link.
|
||||||
@@ -111,17 +85,16 @@ impl GHDownloader {
|
|||||||
///
|
///
|
||||||
/// Errors out if the download fails or the digest of the downloaded file
|
/// Errors out if the download fails or the digest of the downloaded file
|
||||||
/// mismatches the expected digest from the release [List].
|
/// mismatches the expected digest from the release [List].
|
||||||
pub async fn download(&self) -> anyhow::Result<Vec<u8>> {
|
pub fn download(&self) -> anyhow::Result<Vec<u8>> {
|
||||||
tracing::info!("downloading solc: {self:?}");
|
tracing::info!("downloading solc: {self:?}");
|
||||||
let expected_digest = List::download(self.list)
|
let expected_digest = List::download(self.list)?
|
||||||
.await?
|
|
||||||
.builds
|
.builds
|
||||||
.iter()
|
.iter()
|
||||||
.find(|build| build.version == self.version)
|
.find(|build| build.version == self.version)
|
||||||
.ok_or_else(|| anyhow::anyhow!("solc v{} not found builds", self.version))
|
.ok_or_else(|| anyhow::anyhow!("solc v{} not found builds", self.version))
|
||||||
.map(|b| b.sha256.strip_prefix("0x").unwrap_or(&b.sha256).to_string())?;
|
.map(|b| b.sha256.strip_prefix("0x").unwrap_or(&b.sha256).to_string())?;
|
||||||
|
|
||||||
let file = reqwest::get(self.url()).await?.bytes().await?.to_vec();
|
let file = reqwest::blocking::get(self.url())?.bytes()?.to_vec();
|
||||||
|
|
||||||
if hex::encode(Sha256::digest(&file)) != expected_digest {
|
if hex::encode(Sha256::digest(&file)) != expected_digest {
|
||||||
anyhow::bail!("sha256 mismatch for solc version {}", self.version);
|
anyhow::bail!("sha256 mismatch for solc version {}", self.version);
|
||||||
@@ -135,56 +108,39 @@ impl GHDownloader {
|
|||||||
mod tests {
|
mod tests {
|
||||||
use crate::{download::GHDownloader, list::List};
|
use crate::{download::GHDownloader, list::List};
|
||||||
|
|
||||||
#[tokio::test]
|
#[test]
|
||||||
async fn try_get_windows() {
|
fn try_get_windows() {
|
||||||
let version = List::download(List::WINDOWS_URL)
|
let version = List::download(List::WINDOWS_URL)
|
||||||
.await
|
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.latest_release;
|
.latest_release
|
||||||
GHDownloader::windows(version)
|
.into();
|
||||||
.await
|
GHDownloader::windows(version).download().unwrap();
|
||||||
.unwrap()
|
|
||||||
.download()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[test]
|
||||||
async fn try_get_macosx() {
|
fn try_get_macosx() {
|
||||||
let version = List::download(List::MACOSX_URL)
|
let version = List::download(List::MACOSX_URL)
|
||||||
.await
|
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.latest_release;
|
.latest_release
|
||||||
GHDownloader::macosx(version)
|
.into();
|
||||||
.await
|
GHDownloader::macosx(version).download().unwrap();
|
||||||
.unwrap()
|
|
||||||
.download()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[test]
|
||||||
async fn try_get_linux() {
|
fn try_get_linux() {
|
||||||
let version = List::download(List::LINUX_URL)
|
let version = List::download(List::LINUX_URL)
|
||||||
.await
|
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.latest_release;
|
.latest_release
|
||||||
GHDownloader::linux(version)
|
.into();
|
||||||
.await
|
GHDownloader::linux(version).download().unwrap();
|
||||||
.unwrap()
|
|
||||||
.download()
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[test]
|
||||||
async fn try_get_wasm() {
|
fn try_get_wasm() {
|
||||||
let version = List::download(List::WASM_URL).await.unwrap().latest_release;
|
let version = List::download(List::WASM_URL)
|
||||||
GHDownloader::wasm(version)
|
|
||||||
.await
|
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.download()
|
.latest_release
|
||||||
.await
|
.into();
|
||||||
.unwrap();
|
GHDownloader::wasm(version).download().unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,8 +7,7 @@ use std::path::{Path, PathBuf};
|
|||||||
|
|
||||||
use cache::get_or_download;
|
use cache::get_or_download;
|
||||||
use download::GHDownloader;
|
use download::GHDownloader;
|
||||||
|
use semver::Version;
|
||||||
use revive_dt_common::types::VersionOrRequirement;
|
|
||||||
|
|
||||||
pub mod cache;
|
pub mod cache;
|
||||||
pub mod download;
|
pub mod download;
|
||||||
@@ -19,22 +18,22 @@ pub mod list;
|
|||||||
///
|
///
|
||||||
/// Subsequent calls for the same version will use a cached artifact
|
/// Subsequent calls for the same version will use a cached artifact
|
||||||
/// and not download it again.
|
/// and not download it again.
|
||||||
pub async fn download_solc(
|
pub fn download_solc(
|
||||||
cache_directory: &Path,
|
cache_directory: &Path,
|
||||||
version: impl Into<VersionOrRequirement>,
|
version: Version,
|
||||||
wasm: bool,
|
wasm: bool,
|
||||||
) -> anyhow::Result<PathBuf> {
|
) -> anyhow::Result<PathBuf> {
|
||||||
let downloader = if wasm {
|
let downloader = if wasm {
|
||||||
GHDownloader::wasm(version).await
|
GHDownloader::wasm(version)
|
||||||
} else if cfg!(target_os = "linux") {
|
} else if cfg!(target_os = "linux") {
|
||||||
GHDownloader::linux(version).await
|
GHDownloader::linux(version)
|
||||||
} else if cfg!(target_os = "macos") {
|
} else if cfg!(target_os = "macos") {
|
||||||
GHDownloader::macosx(version).await
|
GHDownloader::macosx(version)
|
||||||
} else if cfg!(target_os = "windows") {
|
} else if cfg!(target_os = "windows") {
|
||||||
GHDownloader::windows(version).await
|
GHDownloader::windows(version)
|
||||||
} else {
|
} else {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}?;
|
};
|
||||||
|
|
||||||
get_or_download(cache_directory, &downloader).await
|
get_or_download(cache_directory, &downloader)
|
||||||
}
|
}
|
||||||
|
|||||||
+5
-1
@@ -33,5 +33,9 @@
|
|||||||
"mixhash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
"mixhash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||||
"timestamp": "0x00",
|
"timestamp": "0x00",
|
||||||
"alloc": {}
|
"alloc": {
|
||||||
|
"90F8bf6A479f320ead074411a4B0e7944Ea8c9C1": {
|
||||||
|
"balance": "1000000000000000000"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user