Compare commits

...

17 Commits

Author SHA1 Message Date
Omar Abdulla ad3d580df9 Make the code even more concurrent 2025-08-18 18:39:03 +03:00
Omar Abdulla d45b8da8e3 Format 2025-08-18 17:27:26 +03:00
Omar Abdulla 79ce4a239c Fix tests 2025-08-18 17:23:30 +03:00
Omar Abdulla 609ececea6 Better logging and fix concurrency issues 2025-08-18 16:16:33 +03:00
Omar Abdulla fb3959d345 Allow for auto display impl in declare wrapper type macro 2025-08-18 09:41:25 +03:00
Omar Abdulla 84026f9aee Cache the compiler versions 2025-08-18 06:34:26 +03:00
Omar Abdulla a7ce202a6b Merge remote-tracking branch 'origin/main' into feature/fix-os-fd-errors 2025-08-16 22:48:53 +03:00
Omar c58551803d Allow multiple files in corpus (#144) 2025-08-16 16:04:17 +00:00
Omar 185edcfad9 Cached compiler artifacts (#143)
* WIP compilation cache

* Implement a persistent compilation cache

* Correct the key and value encoding for the cache
2025-08-16 16:04:13 +00:00
James Wilson 09d56f5177 Redo how we parse and use modes (#125)
* WIP redo how we parse and use modes

* test expanding, too

* WIP integrate new Mode/ParsedMode into rest of code

* First pass integrated new mode bits

* fmt

* clippy

* Remove mode we no longer support from test metadata

* Address nits

* Add ability for compiler to opt out if it can't work with some Mode/version

* Elide viaIR input if compiler does not support it

* Improve test output a little; string modes and list ignored tests

* Move Mode to common crate

* constants.mod, and Display for CaseIdx to use it

* fmt

* Rename ModePipeline::E/Y

* Re-arrange Mode things; ParsedMode in format and Mode etc in common

* Move compile check to prepare_tests

* Remove now-unused deps

* clippy nits

* Update fallback tx weights to avoid out of gas errors

* Update kitchensink weights too and fmt

* Bump default geth timeout to 10s

* 30s timeout

* Improve geth stdout logging on failure

* fix line logging

* remove --networkid and arg, back to 5s timeout for geth
2025-08-16 11:38:17 +00:00
Omar Abdulla e19e0a4e7a Fix the OS FD error 2025-08-15 16:16:36 +03:00
Omar a59e287fa1 Add a cached fs abstraction (#141) 2025-08-14 15:21:05 +00:00
Omar f2045db0e9 Add compiler directives to metadata (#139) 2025-08-14 07:38:56 +00:00
Omar 5a11f44673 Misc features/improvements (#138)
* Implement various needed features and improvements

* Reorder the metadata struct

* Format comments
2025-08-13 13:50:06 +00:00
James Wilson 46aea0890d Split reporter and case runner, use channels to pass test reports (#137)
* Use channels to send data to reporting thread and avoid hangs / mutex / duration. Limit max concurrent tasks to avoid too many open files

* More appropriate name for dirver/reporter task fns

* Back to parallelise individual cases, report individual cases, address grumbles

* newline before 'Failures' title in report
2025-08-13 13:10:26 +00:00
Omar 9b40c9b9e3 Add an EVM version filter (#136)
* Add an EVM version filter

* Update naming
2025-08-12 10:19:59 +00:00
Omar f67a9bf643 Refactor/ignore null values (#135)
* Skip serialization of null values

* Add support for comments in various steps
2025-08-12 08:55:21 +00:00
42 changed files with 2889 additions and 1270 deletions
+2
View File
@@ -7,3 +7,5 @@ node_modules
# We do not want to commit any log files that we produce from running the code locally so this is # We do not want to commit any log files that we produce from running the code locally so this is
# added to the .gitignore file. # added to the .gitignore file.
*.log *.log
profile.json.gz
Generated
+609 -9
View File
@@ -659,7 +659,7 @@ checksum = "4f317d20f047b3de4d9728c556e2e9a92c9a507702d2016424cd8be13a74ca5e"
dependencies = [ dependencies = [
"alloy-json-rpc", "alloy-json-rpc",
"alloy-primitives", "alloy-primitives",
"base64", "base64 0.22.1",
"derive_more 2.0.1", "derive_more 2.0.1",
"futures", "futures",
"futures-utils-wasm", "futures-utils-wasm",
@@ -1189,6 +1189,151 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "async-channel"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35"
dependencies = [
"concurrent-queue",
"event-listener 2.5.3",
"futures-core",
]
[[package]]
name = "async-channel"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2"
dependencies = [
"concurrent-queue",
"event-listener-strategy",
"futures-core",
"pin-project-lite",
]
[[package]]
name = "async-executor"
version = "1.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb812ffb58524bdd10860d7d974e2f01cc0950c2438a74ee5ec2e2280c6c4ffa"
dependencies = [
"async-task",
"concurrent-queue",
"fastrand",
"futures-lite",
"pin-project-lite",
"slab",
]
[[package]]
name = "async-global-executor"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c"
dependencies = [
"async-channel 2.5.0",
"async-executor",
"async-io",
"async-lock",
"blocking",
"futures-lite",
"once_cell",
]
[[package]]
name = "async-io"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1237c0ae75a0f3765f58910ff9cdd0a12eeb39ab2f4c7de23262f337f0aacbb3"
dependencies = [
"async-lock",
"cfg-if",
"concurrent-queue",
"futures-io",
"futures-lite",
"parking",
"polling",
"rustix",
"slab",
"tracing",
"windows-sys 0.59.0",
]
[[package]]
name = "async-lock"
version = "3.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc"
dependencies = [
"event-listener 5.4.1",
"event-listener-strategy",
"pin-project-lite",
]
[[package]]
name = "async-process"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65daa13722ad51e6ab1a1b9c01299142bc75135b337923cfa10e79bbbd669f00"
dependencies = [
"async-channel 2.5.0",
"async-io",
"async-lock",
"async-signal",
"async-task",
"blocking",
"cfg-if",
"event-listener 5.4.1",
"futures-lite",
"rustix",
]
[[package]]
name = "async-signal"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7605a4e50d4b06df3898d5a70bf5fde51ed9059b0434b73105193bc27acce0d"
dependencies = [
"async-io",
"async-lock",
"atomic-waker",
"cfg-if",
"futures-core",
"futures-io",
"rustix",
"signal-hook-registry",
"slab",
"windows-sys 0.59.0",
]
[[package]]
name = "async-std"
version = "1.13.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c8e079a4ab67ae52b7403632e4618815d6db36d2a010cfe41b02c1b1578f93b"
dependencies = [
"async-channel 1.9.0",
"async-global-executor",
"async-io",
"async-lock",
"async-process",
"crossbeam-utils",
"futures-channel",
"futures-core",
"futures-io",
"futures-lite",
"gloo-timers",
"kv-log-macro",
"log",
"memchr",
"once_cell",
"pin-project-lite",
"pin-utils",
"slab",
"wasm-bindgen-futures",
]
[[package]] [[package]]
name = "async-stream" name = "async-stream"
version = "0.3.6" version = "0.3.6"
@@ -1211,6 +1356,12 @@ dependencies = [
"syn 2.0.101", "syn 2.0.101",
] ]
[[package]]
name = "async-task"
version = "4.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
[[package]] [[package]]
name = "async-trait" name = "async-trait"
version = "0.1.88" version = "0.1.88"
@@ -1266,6 +1417,12 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]] [[package]]
name = "base64" name = "base64"
version = "0.22.1" version = "0.22.1"
@@ -1404,6 +1561,19 @@ dependencies = [
"generic-array", "generic-array",
] ]
[[package]]
name = "blocking"
version = "1.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21"
dependencies = [
"async-channel 2.5.0",
"async-task",
"futures-io",
"futures-lite",
"piper",
]
[[package]] [[package]]
name = "blst" name = "blst"
version = "0.3.14" version = "0.3.14"
@@ -1437,6 +1607,29 @@ dependencies = [
"tinyvec", "tinyvec",
] ]
[[package]]
name = "bson"
version = "2.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7969a9ba84b0ff843813e7249eed1678d9b6607ce5a3b8f0a47af3fcf7978e6e"
dependencies = [
"ahash",
"base64 0.22.1",
"bitvec",
"getrandom 0.2.16",
"getrandom 0.3.3",
"hex",
"indexmap 2.10.0",
"js-sys",
"once_cell",
"rand 0.9.2",
"serde",
"serde_bytes",
"serde_json",
"time",
"uuid",
]
[[package]] [[package]]
name = "bumpalo" name = "bumpalo"
version = "3.17.0" version = "3.17.0"
@@ -1479,6 +1672,32 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "cacache"
version = "13.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c5063741c7b2e260bbede781cf4679632dd90e2718e99f7715e46824b65670b"
dependencies = [
"async-std",
"digest 0.10.7",
"either",
"futures",
"hex",
"libc",
"memmap2",
"miette",
"reflink-copy",
"serde",
"serde_derive",
"serde_json",
"sha1",
"sha2 0.10.9",
"ssri",
"tempfile",
"thiserror 1.0.69",
"walkdir",
]
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.2.25" version = "1.2.25"
@@ -1559,6 +1778,15 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2382f75942f4b3be3690fe4f86365e9c853c1587d6ee58212cebf6e2a9ccd101" checksum = "2382f75942f4b3be3690fe4f86365e9c853c1587d6ee58212cebf6e2a9ccd101"
[[package]]
name = "concurrent-queue"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973"
dependencies = [
"crossbeam-utils",
]
[[package]] [[package]]
name = "const-hex" name = "const-hex"
version = "1.14.1" version = "1.14.1"
@@ -1644,6 +1872,15 @@ version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]]
name = "crossbeam-channel"
version = "0.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
dependencies = [
"crossbeam-utils",
]
[[package]] [[package]]
name = "crossbeam-deque" name = "crossbeam-deque"
version = "0.8.6" version = "0.8.6"
@@ -2111,6 +2348,33 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "event-listener"
version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0"
[[package]]
name = "event-listener"
version = "5.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab"
dependencies = [
"concurrent-queue",
"parking",
"pin-project-lite",
]
[[package]]
name = "event-listener-strategy"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93"
dependencies = [
"event-listener 5.4.1",
"pin-project-lite",
]
[[package]] [[package]]
name = "expander" name = "expander"
version = "2.2.1" version = "2.2.1"
@@ -2352,6 +2616,19 @@ version = "0.3.31"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6"
[[package]]
name = "futures-lite"
version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad"
dependencies = [
"fastrand",
"futures-core",
"futures-io",
"parking",
"pin-project-lite",
]
[[package]] [[package]]
name = "futures-macro" name = "futures-macro"
version = "0.3.31" version = "0.3.31"
@@ -2399,6 +2676,20 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9"
[[package]]
name = "generator"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d18470a76cb7f8ff746cf1f7470914f900252ec36bbc40b569d74b1258446827"
dependencies = [
"cc",
"cfg-if",
"libc",
"log",
"rustversion",
"windows",
]
[[package]] [[package]]
name = "generic-array" name = "generic-array"
version = "0.14.7" version = "0.14.7"
@@ -2417,8 +2708,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"js-sys",
"libc", "libc",
"wasi 0.11.0+wasi-snapshot-preview1", "wasi 0.11.0+wasi-snapshot-preview1",
"wasm-bindgen",
] ]
[[package]] [[package]]
@@ -2428,9 +2721,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"js-sys",
"libc", "libc",
"r-efi", "r-efi",
"wasi 0.14.2+wasi-0.2.4", "wasi 0.14.2+wasi-0.2.4",
"wasm-bindgen",
] ]
[[package]] [[package]]
@@ -2455,6 +2750,18 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2"
[[package]]
name = "gloo-timers"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994"
dependencies = [
"futures-channel",
"futures-core",
"js-sys",
"wasm-bindgen",
]
[[package]] [[package]]
name = "group" name = "group"
version = "0.13.0" version = "0.13.0"
@@ -2549,6 +2856,12 @@ version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024"
[[package]]
name = "hermit-abi"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c"
[[package]] [[package]]
name = "hex" name = "hex"
version = "0.4.3" version = "0.4.3"
@@ -2701,7 +3014,7 @@ version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1c293b6b3d21eca78250dc7dbebd6b9210ec5530e038cbfe0661b5c47ab06e8" checksum = "b1c293b6b3d21eca78250dc7dbebd6b9210ec5530e038cbfe0661b5c47ab06e8"
dependencies = [ dependencies = [
"base64", "base64 0.22.1",
"bytes", "bytes",
"futures-channel", "futures-channel",
"futures-core", "futures-core",
@@ -3071,6 +3384,15 @@ dependencies = [
"sha3-asm", "sha3-asm",
] ]
[[package]]
name = "kv-log-macro"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f"
dependencies = [
"log",
]
[[package]] [[package]]
name = "lazy_static" name = "lazy_static"
version = "1.5.0" version = "1.5.0"
@@ -3096,7 +3418,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e79019718125edc905a079a70cfa5f3820bc76139fc91d6f9abc27ea2a887139" checksum = "e79019718125edc905a079a70cfa5f3820bc76139fc91d6f9abc27ea2a887139"
dependencies = [ dependencies = [
"arrayref", "arrayref",
"base64", "base64 0.22.1",
"digest 0.9.0", "digest 0.9.0",
"hmac-drbg", "hmac-drbg",
"libsecp256k1-core", "libsecp256k1-core",
@@ -3164,6 +3486,22 @@ name = "log"
version = "0.4.27" version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
dependencies = [
"value-bag",
]
[[package]]
name = "loom"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
dependencies = [
"cfg-if",
"generator",
"scoped-tls",
"tracing",
"tracing-subscriber",
]
[[package]] [[package]]
name = "lru" name = "lru"
@@ -3200,6 +3538,15 @@ version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "memmap2"
version = "0.5.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "memory-db" name = "memory-db"
version = "0.32.0" version = "0.32.0"
@@ -3221,6 +3568,29 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "miette"
version = "5.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59bb584eaeeab6bd0226ccf3509a69d7936d148cf3d036ad350abe35e8c6856e"
dependencies = [
"miette-derive",
"once_cell",
"thiserror 1.0.69",
"unicode-width",
]
[[package]]
name = "miette-derive"
version = "5.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.101",
]
[[package]] [[package]]
name = "mime" name = "mime"
version = "0.3.17" version = "0.3.17"
@@ -3247,6 +3617,25 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "moka"
version = "0.12.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9321642ca94a4282428e6ea4af8cc2ca4eac48ac7a6a4ea8f33f76d0ce70926"
dependencies = [
"crossbeam-channel",
"crossbeam-epoch",
"crossbeam-utils",
"loom",
"parking_lot",
"portable-atomic",
"rustc_version 0.4.1",
"smallvec",
"tagptr",
"thiserror 1.0.69",
"uuid",
]
[[package]] [[package]]
name = "native-tls" name = "native-tls"
version = "0.2.14" version = "0.2.14"
@@ -3331,7 +3720,7 @@ version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
dependencies = [ dependencies = [
"hermit-abi", "hermit-abi 0.3.9",
"libc", "libc",
] ]
@@ -3488,6 +3877,12 @@ dependencies = [
"syn 2.0.101", "syn 2.0.101",
] ]
[[package]]
name = "parking"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba"
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
version = "0.12.4" version = "0.12.4"
@@ -3593,6 +3988,17 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
[[package]]
name = "piper"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066"
dependencies = [
"atomic-waker",
"fastrand",
"futures-io",
]
[[package]] [[package]]
name = "pkcs8" name = "pkcs8"
version = "0.10.2" version = "0.10.2"
@@ -3646,6 +4052,27 @@ dependencies = [
"syn 2.0.101", "syn 2.0.101",
] ]
[[package]]
name = "polling"
version = "3.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b53a684391ad002dd6a596ceb6c74fd004fdce75f4be2e3f615068abbea5fd50"
dependencies = [
"cfg-if",
"concurrent-queue",
"hermit-abi 0.5.2",
"pin-project-lite",
"rustix",
"tracing",
"windows-sys 0.59.0",
]
[[package]]
name = "portable-atomic"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
[[package]] [[package]]
name = "potential_utf" name = "potential_utf"
version = "0.1.2" version = "0.1.2"
@@ -3927,6 +4354,18 @@ dependencies = [
"syn 2.0.101", "syn 2.0.101",
] ]
[[package]]
name = "reflink-copy"
version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78c81d000a2c524133cc00d2f92f019d399e57906c3b7119271a2495354fe895"
dependencies = [
"cfg-if",
"libc",
"rustix",
"windows",
]
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.11.1" version = "1.11.1"
@@ -3977,7 +4416,7 @@ version = "0.12.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e98ff6b0dbbe4d5a37318f433d4fc82babd21631f194d370409ceb2e40b2f0b5" checksum = "e98ff6b0dbbe4d5a37318f433d4fc82babd21631f194d370409ceb2e40b2f0b5"
dependencies = [ dependencies = [
"base64", "base64 0.22.1",
"bytes", "bytes",
"encoding_rs", "encoding_rs",
"futures-core", "futures-core",
@@ -4029,7 +4468,10 @@ name = "revive-dt-common"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"moka",
"once_cell",
"semver 1.0.26", "semver 1.0.26",
"serde",
"tokio", "tokio",
] ]
@@ -4040,6 +4482,7 @@ dependencies = [
"alloy", "alloy",
"alloy-primitives", "alloy-primitives",
"anyhow", "anyhow",
"dashmap",
"foundry-compilers-artifacts", "foundry-compilers-artifacts",
"revive-common", "revive-common",
"revive-dt-common", "revive-dt-common",
@@ -4070,9 +4513,12 @@ version = "0.1.0"
dependencies = [ dependencies = [
"alloy", "alloy",
"anyhow", "anyhow",
"bson",
"cacache",
"clap", "clap",
"futures", "futures",
"indexmap 2.10.0", "indexmap 2.10.0",
"once_cell",
"revive-dt-common", "revive-dt-common",
"revive-dt-compiler", "revive-dt-compiler",
"revive-dt-config", "revive-dt-config",
@@ -4081,9 +4527,13 @@ dependencies = [
"revive-dt-node-interaction", "revive-dt-node-interaction",
"revive-dt-report", "revive-dt-report",
"semver 1.0.26", "semver 1.0.26",
"serde",
"serde_json",
"temp-dir", "temp-dir",
"tempfile",
"tokio", "tokio",
"tracing", "tracing",
"tracing-appender",
"tracing-subscriber", "tracing-subscriber",
] ]
@@ -4095,6 +4545,9 @@ dependencies = [
"alloy-primitives", "alloy-primitives",
"alloy-sol-types", "alloy-sol-types",
"anyhow", "anyhow",
"futures",
"regex",
"revive-common",
"revive-dt-common", "revive-dt-common",
"semver 1.0.26", "semver 1.0.26",
"serde", "serde",
@@ -4109,6 +4562,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"alloy", "alloy",
"anyhow", "anyhow",
"revive-common",
"revive-dt-common", "revive-dt-common",
"revive-dt-config", "revive-dt-config",
"revive-dt-format", "revive-dt-format",
@@ -4135,12 +4589,12 @@ name = "revive-dt-report"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"revive-dt-common",
"revive-dt-compiler", "revive-dt-compiler",
"revive-dt-config", "revive-dt-config",
"revive-dt-format", "revive-dt-format",
"serde", "serde",
"serde_json", "serde_json",
"tracing",
] ]
[[package]] [[package]]
@@ -4421,6 +4875,12 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "scoped-tls"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
[[package]] [[package]]
name = "scopeguard" name = "scopeguard"
version = "1.2.0" version = "1.2.0"
@@ -4575,6 +5035,7 @@ version = "1.0.140"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373"
dependencies = [ dependencies = [
"indexmap 2.10.0",
"itoa", "itoa",
"memchr", "memchr",
"ryu", "ryu",
@@ -4618,7 +5079,7 @@ version = "3.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa"
dependencies = [ dependencies = [
"base64", "base64 0.22.1",
"chrono", "chrono",
"hex", "hex",
"indexmap 1.9.3", "indexmap 1.9.3",
@@ -4652,6 +5113,28 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "sha-1"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c"
dependencies = [
"cfg-if",
"cpufeatures",
"digest 0.10.7",
]
[[package]]
name = "sha1"
version = "0.10.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
dependencies = [
"cfg-if",
"cpufeatures",
"digest 0.10.7",
]
[[package]] [[package]]
name = "sha2" name = "sha2"
version = "0.9.9" version = "0.9.9"
@@ -5126,6 +5609,23 @@ dependencies = [
"unicode-xid", "unicode-xid",
] ]
[[package]]
name = "ssri"
version = "9.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da7a2b3c2bc9693bcb40870c4e9b5bf0d79f9cb46273321bf855ec513e919082"
dependencies = [
"base64 0.21.7",
"digest 0.10.7",
"hex",
"miette",
"serde",
"sha-1",
"sha2 0.10.9",
"thiserror 1.0.69",
"xxhash-rust",
]
[[package]] [[package]]
name = "stable_deref_trait" name = "stable_deref_trait"
version = "1.2.0" version = "1.2.0"
@@ -5273,6 +5773,12 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "tagptr"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417"
[[package]] [[package]]
name = "tap" name = "tap"
version = "1.0.1" version = "1.0.1"
@@ -5604,6 +6110,18 @@ dependencies = [
"tracing-core", "tracing-core",
] ]
[[package]]
name = "tracing-appender"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf"
dependencies = [
"crossbeam-channel",
"thiserror 1.0.69",
"time",
"tracing-subscriber",
]
[[package]] [[package]]
name = "tracing-attributes" name = "tracing-attributes"
version = "0.1.28" version = "0.1.28"
@@ -5770,6 +6288,12 @@ dependencies = [
"tinyvec", "tinyvec",
] ]
[[package]]
name = "unicode-width"
version = "0.1.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af"
[[package]] [[package]]
name = "unicode-xid" name = "unicode-xid"
version = "0.2.6" version = "0.2.6"
@@ -5805,12 +6329,30 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f33196643e165781c20a5ead5582283a7dacbb87855d867fbc2df3f81eddc1be"
dependencies = [
"getrandom 0.3.3",
"js-sys",
"serde",
"wasm-bindgen",
]
[[package]] [[package]]
name = "valuable" name = "valuable"
version = "0.1.1" version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
[[package]]
name = "value-bag"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5"
[[package]] [[package]]
name = "vcpkg" name = "vcpkg"
version = "0.2.15" version = "0.2.15"
@@ -6066,6 +6608,28 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows"
version = "0.61.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893"
dependencies = [
"windows-collections",
"windows-core",
"windows-future",
"windows-link",
"windows-numerics",
]
[[package]]
name = "windows-collections"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8"
dependencies = [
"windows-core",
]
[[package]] [[package]]
name = "windows-core" name = "windows-core"
version = "0.61.2" version = "0.61.2"
@@ -6079,6 +6643,17 @@ dependencies = [
"windows-strings 0.4.2", "windows-strings 0.4.2",
] ]
[[package]]
name = "windows-future"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e"
dependencies = [
"windows-core",
"windows-link",
"windows-threading",
]
[[package]] [[package]]
name = "windows-implement" name = "windows-implement"
version = "0.60.0" version = "0.60.0"
@@ -6103,9 +6678,19 @@ dependencies = [
[[package]] [[package]]
name = "windows-link" name = "windows-link"
version = "0.1.1" version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
[[package]]
name = "windows-numerics"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1"
dependencies = [
"windows-core",
"windows-link",
]
[[package]] [[package]]
name = "windows-registry" name = "windows-registry"
@@ -6195,6 +6780,15 @@ dependencies = [
"windows_x86_64_msvc 0.53.0", "windows_x86_64_msvc 0.53.0",
] ]
[[package]]
name = "windows-threading"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6"
dependencies = [
"windows-link",
]
[[package]] [[package]]
name = "windows_aarch64_gnullvm" name = "windows_aarch64_gnullvm"
version = "0.52.6" version = "0.52.6"
@@ -6324,6 +6918,12 @@ dependencies = [
"tap", "tap",
] ]
[[package]]
name = "xxhash-rust"
version = "0.8.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3"
[[package]] [[package]]
name = "yansi" name = "yansi"
version = "1.0.1" version = "1.0.1"
+12 -2
View File
@@ -8,7 +8,7 @@ authors = ["Parity Technologies <admin@parity.io>"]
license = "MIT/Apache-2.0" license = "MIT/Apache-2.0"
edition = "2024" edition = "2024"
repository = "https://github.com/paritytech/revive-differential-testing.git" repository = "https://github.com/paritytech/revive-differential-testing.git"
rust-version = "1.85.0" rust-version = "1.87.0"
[workspace.dependencies] [workspace.dependencies]
revive-dt-common = { version = "0.1.0", path = "crates/common" } revive-dt-common = { version = "0.1.0", path = "crates/common" }
@@ -25,10 +25,15 @@ revive-dt-solc-binaries = { version = "0.1.0", path = "crates/solc-binaries" }
alloy-primitives = "1.2.1" alloy-primitives = "1.2.1"
alloy-sol-types = "1.2.1" alloy-sol-types = "1.2.1"
anyhow = "1.0" anyhow = "1.0"
bson = { version = "2.15.0" }
cacache = { version = "13.1.0" }
clap = { version = "4", features = ["derive"] } clap = { version = "4", features = ["derive"] }
dashmap = { version = "6.1.0" }
foundry-compilers-artifacts = { version = "0.18.0" } foundry-compilers-artifacts = { version = "0.18.0" }
futures = { version = "0.3.31" } futures = { version = "0.3.31" }
hex = "0.4.3" hex = "0.4.3"
regex = "1"
moka = "0.12.10"
reqwest = { version = "0.12.15", features = ["json"] } reqwest = { version = "0.12.15", features = ["json"] }
once_cell = "1.21" once_cell = "1.21"
semver = { version = "1.0", features = ["serde"] } semver = { version = "1.0", features = ["serde"] }
@@ -36,19 +41,22 @@ serde = { version = "1.0", default-features = false, features = ["derive"] }
serde_json = { version = "1.0", default-features = false, features = [ serde_json = { version = "1.0", default-features = false, features = [
"arbitrary_precision", "arbitrary_precision",
"std", "std",
"unbounded_depth",
] } ] }
sha2 = { version = "0.10.9" } sha2 = { version = "0.10.9" }
sp-core = "36.1.0" sp-core = "36.1.0"
sp-runtime = "41.1.0" sp-runtime = "41.1.0"
temp-dir = { version = "0.1.16" } temp-dir = { version = "0.1.16" }
tempfile = "3.3" tempfile = "3.3"
thiserror = "2"
tokio = { version = "1.47.0", default-features = false, features = [ tokio = { version = "1.47.0", default-features = false, features = [
"rt-multi-thread", "rt-multi-thread",
"process", "process",
"rt", "rt",
] } ] }
uuid = { version = "1.8", features = ["v4"] } uuid = { version = "1.8", features = ["v4"] }
tracing = "0.1.41" tracing = { version = "0.1.41" }
tracing-appender = { version = "0.2.3" }
tracing-subscriber = { version = "0.3.19", default-features = false, features = [ tracing-subscriber = { version = "0.3.19", default-features = false, features = [
"fmt", "fmt",
"json", "json",
@@ -83,3 +91,5 @@ features = [
inherits = "release" inherits = "release"
lto = true lto = true
codegen-units = 1 codegen-units = 1
[workspace.lints.clippy]
+1 -2
View File
@@ -1,8 +1,7 @@
{ {
"modes": [ "modes": [
"Y >=0.8.9", "Y >=0.8.9",
"E", "E"
"I"
], ],
"cases": [ "cases": [
{ {
+1
View File
@@ -0,0 +1 @@
+6
View File
@@ -10,5 +10,11 @@ rust-version.workspace = true
[dependencies] [dependencies]
anyhow = { workspace = true } anyhow = { workspace = true }
moka = { workspace = true, features = ["sync"] }
once_cell = { workspace = true }
semver = { workspace = true } semver = { workspace = true }
serde = { workspace = true }
tokio = { workspace = true, default-features = false, features = ["time"] } tokio = { workspace = true, default-features = false, features = ["time"] }
[lints]
workspace = true
+49
View File
@@ -0,0 +1,49 @@
//! This module implements a cached file system allowing for results to be stored in-memory rather
//! rather being queried from the file system again.
use std::fs;
use std::io::{Error, Result};
use std::path::{Path, PathBuf};
use moka::sync::Cache;
use once_cell::sync::Lazy;
pub fn read(path: impl AsRef<Path>) -> Result<Vec<u8>> {
static READ_CACHE: Lazy<Cache<PathBuf, Vec<u8>>> = Lazy::new(|| Cache::new(10_000));
let path = path.as_ref().canonicalize()?;
match READ_CACHE.get(path.as_path()) {
Some(content) => Ok(content),
None => {
let content = fs::read(path.as_path())?;
READ_CACHE.insert(path, content.clone());
Ok(content)
}
}
}
pub fn read_to_string(path: impl AsRef<Path>) -> Result<String> {
let content = read(path)?;
String::from_utf8(content).map_err(|_| {
Error::new(
std::io::ErrorKind::InvalidData,
"The contents of the file are not valid UTF8",
)
})
}
pub fn read_dir(path: impl AsRef<Path>) -> Result<Box<dyn Iterator<Item = Result<PathBuf>>>> {
static READ_DIR_CACHE: Lazy<Cache<PathBuf, Vec<PathBuf>>> = Lazy::new(|| Cache::new(10_000));
let path = path.as_ref().canonicalize()?;
match READ_DIR_CACHE.get(path.as_path()) {
Some(entries) => Ok(Box::new(entries.into_iter().map(Ok)) as Box<_>),
None => {
let entries = fs::read_dir(path.as_path())?
.flat_map(|maybe_entry| maybe_entry.map(|entry| entry.path()))
.collect();
READ_DIR_CACHE.insert(path.clone(), entries);
Ok(read_dir(path).unwrap())
}
}
}
@@ -19,6 +19,11 @@ pub struct FilesWithExtensionIterator {
/// this vector then they will be returned when the [`Iterator::next`] method is called. If not /// this vector then they will be returned when the [`Iterator::next`] method is called. If not
/// then we visit one of the next directories to visit. /// then we visit one of the next directories to visit.
files_matching_allowed_extensions: Vec<PathBuf>, files_matching_allowed_extensions: Vec<PathBuf>,
/// This option controls if the the cached file system should be used or not. This could be
/// better for certain cases where the entries in the directories do not change and therefore
/// caching can be used.
use_cached_fs: bool,
} }
impl FilesWithExtensionIterator { impl FilesWithExtensionIterator {
@@ -27,6 +32,7 @@ impl FilesWithExtensionIterator {
allowed_extensions: Default::default(), allowed_extensions: Default::default(),
directories_to_search: vec![root_directory.as_ref().to_path_buf()], directories_to_search: vec![root_directory.as_ref().to_path_buf()],
files_matching_allowed_extensions: Default::default(), files_matching_allowed_extensions: Default::default(),
use_cached_fs: Default::default(),
} }
} }
@@ -37,6 +43,11 @@ impl FilesWithExtensionIterator {
self.allowed_extensions.insert(allowed_extension.into()); self.allowed_extensions.insert(allowed_extension.into());
self self
} }
pub fn with_use_cached_fs(mut self, use_cached_fs: bool) -> Self {
self.use_cached_fs = use_cached_fs;
self
}
} }
impl Iterator for FilesWithExtensionIterator { impl Iterator for FilesWithExtensionIterator {
@@ -49,16 +60,19 @@ impl Iterator for FilesWithExtensionIterator {
let directory_to_search = self.directories_to_search.pop()?; let directory_to_search = self.directories_to_search.pop()?;
// Read all of the entries in the directory. If we failed to read this dir's entires then we let iterator = if self.use_cached_fs {
// elect to just ignore it and look in the next directory, we do that by calling the next let Ok(dir_entries) = crate::cached_fs::read_dir(directory_to_search.as_path()) else {
// method again on the iterator, which is an intentional decision that we made here instead return self.next();
// of panicking. };
let Ok(dir_entries) = std::fs::read_dir(directory_to_search) else { Box::new(dir_entries) as Box<dyn Iterator<Item = std::io::Result<PathBuf>>>
return self.next(); } else {
let Ok(dir_entries) = std::fs::read_dir(directory_to_search) else {
return self.next();
};
Box::new(dir_entries.map(|maybe_entry| maybe_entry.map(|entry| entry.path()))) as Box<_>
}; };
for entry in dir_entries.flatten() { for entry_path in iterator.flatten() {
let entry_path = entry.path();
if entry_path.is_dir() { if entry_path.is_dir() {
self.directories_to_search.push(entry_path) self.directories_to_search.push(entry_path)
} else if entry_path.is_file() } else if entry_path.is_file()
+1
View File
@@ -1,6 +1,7 @@
//! This crate provides common concepts, functionality, types, macros, and more that other crates in //! This crate provides common concepts, functionality, types, macros, and more that other crates in
//! the workspace can benefit from. //! the workspace can benefit from.
pub mod cached_fs;
pub mod fs; pub mod fs;
pub mod futures; pub mod futures;
pub mod iterators; pub mod iterators;
@@ -1,3 +1,14 @@
#[macro_export]
macro_rules! impl_for_wrapper {
(Display, $ident: ident) => {
impl std::fmt::Display for $ident {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.0, f)
}
}
};
}
/// Defines wrappers around types. /// Defines wrappers around types.
/// ///
/// For example, the macro invocation seen below: /// For example, the macro invocation seen below:
@@ -42,7 +53,13 @@
macro_rules! define_wrapper_type { macro_rules! define_wrapper_type {
( (
$(#[$meta: meta])* $(#[$meta: meta])*
$vis:vis struct $ident: ident($ty: ty); $vis:vis struct $ident: ident($ty: ty)
$(
impl $($trait_ident: ident),*
)?
;
) => { ) => {
$(#[$meta])* $(#[$meta])*
$vis struct $ident($ty); $vis struct $ident($ty);
@@ -98,9 +115,15 @@ macro_rules! define_wrapper_type {
value.0 value.0
} }
} }
$(
$(
$crate::macros::impl_for_wrapper!($trait_ident, $ident);
)*
)?
}; };
} }
/// Technically not needed but this allows for the macro to be found in the `macros` module of the /// Technically not needed but this allows for the macro to be found in the `macros` module of the
/// crate in addition to being found in the root of the crate. /// crate in addition to being found in the root of the crate.
pub use define_wrapper_type; pub use {define_wrapper_type, impl_for_wrapper};
+2
View File
@@ -1,3 +1,5 @@
mod mode;
mod version_or_requirement; mod version_or_requirement;
pub use mode::*;
pub use version_or_requirement::*; pub use version_or_requirement::*;
+167
View File
@@ -0,0 +1,167 @@
use crate::types::VersionOrRequirement;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::fmt::Display;
use std::str::FromStr;
/// This represents a mode that a given test should be run with, if possible.
///
/// We obtain this by taking a [`ParsedMode`], which may be looser or more strict
/// in its requirements, and then expanding it out into a list of [`Mode`]s.
///
/// Use [`ParsedMode::to_test_modes()`] to do this.
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Mode {
pub pipeline: ModePipeline,
pub optimize_setting: ModeOptimizerSetting,
pub version: Option<semver::VersionReq>,
}
impl Display for Mode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.pipeline.fmt(f)?;
f.write_str(" ")?;
self.optimize_setting.fmt(f)?;
if let Some(version) = &self.version {
f.write_str(" ")?;
version.fmt(f)?;
}
Ok(())
}
}
impl Mode {
/// Return all of the available mode combinations.
pub fn all() -> impl Iterator<Item = Mode> {
ModePipeline::test_cases().flat_map(|pipeline| {
ModeOptimizerSetting::test_cases().map(move |optimize_setting| Mode {
pipeline,
optimize_setting,
version: None,
})
})
}
/// Resolves the [`Mode`]'s solidity version requirement into a [`VersionOrRequirement`] if
/// the requirement is present on the object. Otherwise, the passed default version is used.
pub fn compiler_version_to_use(&self, default: Version) -> VersionOrRequirement {
match self.version {
Some(ref requirement) => requirement.clone().into(),
None => default.into(),
}
}
}
/// What do we want the compiler to do?
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum ModePipeline {
/// Compile Solidity code via Yul IR
ViaYulIR,
/// Compile Solidity direct to assembly
ViaEVMAssembly,
}
impl FromStr for ModePipeline {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
// via Yul IR
"Y" => Ok(ModePipeline::ViaYulIR),
// Don't go via Yul IR
"E" => Ok(ModePipeline::ViaEVMAssembly),
// Anything else that we see isn't a mode at all
_ => Err(anyhow::anyhow!(
"Unsupported pipeline '{s}': expected 'Y' or 'E'"
)),
}
}
}
impl Display for ModePipeline {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ModePipeline::ViaYulIR => f.write_str("Y"),
ModePipeline::ViaEVMAssembly => f.write_str("E"),
}
}
}
impl ModePipeline {
/// Should we go via Yul IR?
pub fn via_yul_ir(&self) -> bool {
matches!(self, ModePipeline::ViaYulIR)
}
/// An iterator over the available pipelines that we'd like to test,
/// when an explicit pipeline was not specified.
pub fn test_cases() -> impl Iterator<Item = ModePipeline> + Clone {
[ModePipeline::ViaYulIR, ModePipeline::ViaEVMAssembly].into_iter()
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum ModeOptimizerSetting {
/// 0 / -: Don't apply any optimizations
M0,
/// 1: Apply less than default optimizations
M1,
/// 2: Apply the default optimizations
M2,
/// 3 / +: Apply aggressive optimizations
M3,
/// s: Optimize for size
Ms,
/// z: Aggressively optimize for size
Mz,
}
impl FromStr for ModeOptimizerSetting {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"M0" => Ok(ModeOptimizerSetting::M0),
"M1" => Ok(ModeOptimizerSetting::M1),
"M2" => Ok(ModeOptimizerSetting::M2),
"M3" => Ok(ModeOptimizerSetting::M3),
"Ms" => Ok(ModeOptimizerSetting::Ms),
"Mz" => Ok(ModeOptimizerSetting::Mz),
_ => Err(anyhow::anyhow!(
"Unsupported optimizer setting '{s}': expected 'M0', 'M1', 'M2', 'M3', 'Ms' or 'Mz'"
)),
}
}
}
impl Display for ModeOptimizerSetting {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ModeOptimizerSetting::M0 => f.write_str("M0"),
ModeOptimizerSetting::M1 => f.write_str("M1"),
ModeOptimizerSetting::M2 => f.write_str("M2"),
ModeOptimizerSetting::M3 => f.write_str("M3"),
ModeOptimizerSetting::Ms => f.write_str("Ms"),
ModeOptimizerSetting::Mz => f.write_str("Mz"),
}
}
}
impl ModeOptimizerSetting {
/// An iterator over the available optimizer settings that we'd like to test,
/// when an explicit optimizer setting was not specified.
pub fn test_cases() -> impl Iterator<Item = ModeOptimizerSetting> + Clone {
[
// No optimizations:
ModeOptimizerSetting::M0,
// Aggressive optimizations:
ModeOptimizerSetting::M3,
]
.into_iter()
}
/// Are any optimizations enabled?
pub fn optimizations_enabled(&self) -> bool {
!matches!(self, ModeOptimizerSetting::M0)
}
}
+4
View File
@@ -18,9 +18,13 @@ revive-common = { workspace = true }
alloy = { workspace = true } alloy = { workspace = true }
alloy-primitives = { workspace = true } alloy-primitives = { workspace = true }
anyhow = { workspace = true } anyhow = { workspace = true }
dashmap = { workspace = true }
foundry-compilers-artifacts = { workspace = true } foundry-compilers-artifacts = { workspace = true }
semver = { workspace = true } semver = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
[lints]
workspace = true
+4
View File
@@ -0,0 +1,4 @@
use semver::Version;
/// This is the first version of solc that supports the `--via-ir` flag / "viaIR" input JSON.
pub const SOLC_VERSION_SUPPORTING_VIA_YUL_IR: Version = Version::new(0, 8, 13);
+52 -10
View File
@@ -3,9 +3,10 @@
//! - Polkadot revive resolc compiler //! - Polkadot revive resolc compiler
//! - Polkadot revive Wasm compiler //! - Polkadot revive Wasm compiler
mod constants;
use std::{ use std::{
collections::HashMap, collections::HashMap,
fs::read_to_string,
hash::Hash, hash::Hash,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@@ -16,9 +17,13 @@ use semver::Version;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use revive_common::EVMVersion; use revive_common::EVMVersion;
use revive_dt_common::cached_fs::read_to_string;
use revive_dt_common::types::VersionOrRequirement; use revive_dt_common::types::VersionOrRequirement;
use revive_dt_config::Arguments; use revive_dt_config::Arguments;
// Re-export this as it's a part of the compiler interface.
pub use revive_dt_common::types::{Mode, ModeOptimizerSetting, ModePipeline};
pub mod revive_js; pub mod revive_js;
pub mod revive_resolc; pub mod revive_resolc;
pub mod solc; pub mod solc;
@@ -42,19 +47,27 @@ pub trait SolidityCompiler {
version: impl Into<VersionOrRequirement>, version: impl Into<VersionOrRequirement>,
) -> impl Future<Output = anyhow::Result<PathBuf>>; ) -> impl Future<Output = anyhow::Result<PathBuf>>;
fn version(&self) -> anyhow::Result<Version>; fn version(&self) -> impl Future<Output = anyhow::Result<Version>>;
/// Does the compiler support the provided mode and version settings?
fn supports_mode(
compiler_version: &Version,
optimize_setting: ModeOptimizerSetting,
pipeline: ModePipeline,
) -> bool;
} }
/// The generic compilation input configuration. /// The generic compilation input configuration.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CompilerInput { pub struct CompilerInput {
pub enable_optimization: Option<bool>, pub pipeline: Option<ModePipeline>,
pub via_ir: Option<bool>, pub optimization: Option<ModeOptimizerSetting>,
pub evm_version: Option<EVMVersion>, pub evm_version: Option<EVMVersion>,
pub allow_paths: Vec<PathBuf>, pub allow_paths: Vec<PathBuf>,
pub base_path: Option<PathBuf>, pub base_path: Option<PathBuf>,
pub sources: HashMap<PathBuf, String>, pub sources: HashMap<PathBuf, String>,
pub libraries: HashMap<PathBuf, HashMap<String, Address>>, pub libraries: HashMap<PathBuf, HashMap<String, Address>>,
pub revert_string_handling: Option<RevertString>,
} }
/// The generic compilation output configuration. /// The generic compilation output configuration.
@@ -84,25 +97,26 @@ where
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
input: CompilerInput { input: CompilerInput {
enable_optimization: Default::default(), pipeline: Default::default(),
via_ir: Default::default(), optimization: Default::default(),
evm_version: Default::default(), evm_version: Default::default(),
allow_paths: Default::default(), allow_paths: Default::default(),
base_path: Default::default(), base_path: Default::default(),
sources: Default::default(), sources: Default::default(),
libraries: Default::default(), libraries: Default::default(),
revert_string_handling: Default::default(),
}, },
additional_options: T::Options::default(), additional_options: T::Options::default(),
} }
} }
pub fn with_optimization(mut self, value: impl Into<Option<bool>>) -> Self { pub fn with_optimization(mut self, value: impl Into<Option<ModeOptimizerSetting>>) -> Self {
self.input.enable_optimization = value.into(); self.input.optimization = value.into();
self self
} }
pub fn with_via_ir(mut self, value: impl Into<Option<bool>>) -> Self { pub fn with_pipeline(mut self, value: impl Into<Option<ModePipeline>>) -> Self {
self.input.via_ir = value.into(); self.input.pipeline = value.into();
self self
} }
@@ -142,11 +156,27 @@ where
self self
} }
pub fn with_revert_string_handling(
mut self,
revert_string_handling: impl Into<Option<RevertString>>,
) -> Self {
self.input.revert_string_handling = revert_string_handling.into();
self
}
pub fn with_additional_options(mut self, options: impl Into<T::Options>) -> Self { pub fn with_additional_options(mut self, options: impl Into<T::Options>) -> Self {
self.additional_options = options.into(); self.additional_options = options.into();
self self
} }
pub fn then(self, callback: impl FnOnce(Self) -> Self) -> Self {
callback(self)
}
pub fn try_then<E>(self, callback: impl FnOnce(Self) -> Result<Self, E>) -> Result<Self, E> {
callback(self)
}
pub async fn try_build( pub async fn try_build(
self, self,
compiler_path: impl AsRef<Path>, compiler_path: impl AsRef<Path>,
@@ -160,3 +190,15 @@ where
self.input.clone() self.input.clone()
} }
} }
/// Defines how the compiler should handle revert strings.
#[derive(
Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize,
)]
pub enum RevertString {
#[default]
Default,
Debug,
Strip,
VerboseDebug,
}
+62 -24
View File
@@ -4,8 +4,10 @@
use std::{ use std::{
path::PathBuf, path::PathBuf,
process::{Command, Stdio}, process::{Command, Stdio},
sync::LazyLock,
}; };
use dashmap::DashMap;
use revive_dt_common::types::VersionOrRequirement; use revive_dt_common::types::VersionOrRequirement;
use revive_dt_config::Arguments; use revive_dt_config::Arguments;
use revive_solc_json_interface::{ use revive_solc_json_interface::{
@@ -14,7 +16,8 @@ use revive_solc_json_interface::{
SolcStandardJsonOutput, SolcStandardJsonOutput,
}; };
use crate::{CompilerInput, CompilerOutput, SolidityCompiler}; use super::constants::SOLC_VERSION_SUPPORTING_VIA_YUL_IR;
use crate::{CompilerInput, CompilerOutput, ModeOptimizerSetting, ModePipeline, SolidityCompiler};
use alloy::json_abi::JsonAbi; use alloy::json_abi::JsonAbi;
use anyhow::Context; use anyhow::Context;
@@ -39,17 +42,25 @@ impl SolidityCompiler for Resolc {
async fn build( async fn build(
&self, &self,
CompilerInput { CompilerInput {
enable_optimization, pipeline,
// Ignored and not honored since this is required for the resolc compilation. optimization,
via_ir: _via_ir,
evm_version, evm_version,
allow_paths, allow_paths,
base_path, base_path,
sources, sources,
libraries, libraries,
// TODO: this is currently not being handled since there is no way to pass it into
// resolc. So, we need to go back to this later once it's supported.
revert_string_handling: _,
}: CompilerInput, }: CompilerInput,
additional_options: Self::Options, additional_options: Self::Options,
) -> anyhow::Result<CompilerOutput> { ) -> anyhow::Result<CompilerOutput> {
if !matches!(pipeline, None | Some(ModePipeline::ViaYulIR)) {
anyhow::bail!(
"Resolc only supports the Y (via Yul IR) pipeline, but the provided pipeline is {pipeline:?}"
);
}
let input = SolcStandardJsonInput { let input = SolcStandardJsonInput {
language: SolcStandardJsonInputLanguage::Solidity, language: SolcStandardJsonInputLanguage::Solidity,
sources: sources sources: sources
@@ -78,7 +89,9 @@ impl SolidityCompiler for Resolc {
output_selection: Some(SolcStandardJsonInputSettingsSelection::new_required()), output_selection: Some(SolcStandardJsonInputSettingsSelection::new_required()),
via_ir: Some(true), via_ir: Some(true),
optimizer: SolcStandardJsonInputSettingsOptimizer::new( optimizer: SolcStandardJsonInputSettingsOptimizer::new(
enable_optimization.unwrap_or(false), optimization
.unwrap_or(ModeOptimizerSetting::M0)
.optimizations_enabled(),
None, None,
&Version::new(0, 0, 0), &Version::new(0, 0, 0),
false, false,
@@ -208,26 +221,51 @@ impl SolidityCompiler for Resolc {
Ok(PathBuf::from("resolc")) Ok(PathBuf::from("resolc"))
} }
fn version(&self) -> anyhow::Result<semver::Version> { async fn version(&self) -> anyhow::Result<semver::Version> {
// Logic for parsing the resolc version from the following string: /// This is a cache of the path of the compiler to the version number of the compiler. We
// Solidity frontend for the revive compiler version 0.3.0+commit.b238913.llvm-18.1.8 /// choose to cache the version in this way rather than through a field on the struct since
/// compiler objects are being created all the time from the path and the compiler object is
/// not reused over time.
static VERSION_CACHE: LazyLock<DashMap<PathBuf, Version>> = LazyLock::new(Default::default);
let output = Command::new(self.resolc_path.as_path()) match VERSION_CACHE.entry(self.resolc_path.clone()) {
.arg("--version") dashmap::Entry::Occupied(occupied_entry) => Ok(occupied_entry.get().clone()),
.stdout(Stdio::piped()) dashmap::Entry::Vacant(vacant_entry) => {
.spawn()? let output = Command::new(self.resolc_path.as_path())
.wait_with_output()? .arg("--version")
.stdout; .stdout(Stdio::piped())
let output = String::from_utf8_lossy(&output); .spawn()?
let version_string = output .wait_with_output()?
.split("version ") .stdout;
.nth(1)
.context("Version parsing failed")?
.split("+")
.next()
.context("Version parsing failed")?;
Version::parse(version_string).map_err(Into::into) let output = String::from_utf8_lossy(&output);
let version_string = output
.split("version ")
.nth(1)
.context("Version parsing failed")?
.split("+")
.next()
.context("Version parsing failed")?;
let version = Version::parse(version_string)?;
vacant_entry.insert(version.clone());
Ok(version)
}
}
}
fn supports_mode(
compiler_version: &Version,
_optimize_setting: ModeOptimizerSetting,
pipeline: ModePipeline,
) -> bool {
// We only support the Y (IE compile via Yul IR) mode here, which also means that we can
// only use solc version 0.8.13 and above. We must always compile via Yul IR as resolc
// needs this to translate to LLVM IR and then RISCV.
pipeline == ModePipeline::ViaYulIR
&& compiler_version >= &SOLC_VERSION_SUPPORTING_VIA_YUL_IR
} }
} }
@@ -245,7 +283,7 @@ mod test {
let compiler = Resolc::new(path); let compiler = Resolc::new(path);
// Act // Act
let version = compiler.version(); let version = compiler.version().await;
// Assert // Assert
let _ = version.expect("Failed to get version"); let _ = version.expect("Failed to get version");
+78 -32
View File
@@ -4,13 +4,16 @@
use std::{ use std::{
path::PathBuf, path::PathBuf,
process::{Command, Stdio}, process::{Command, Stdio},
sync::LazyLock,
}; };
use dashmap::DashMap;
use revive_dt_common::types::VersionOrRequirement; use revive_dt_common::types::VersionOrRequirement;
use revive_dt_config::Arguments; use revive_dt_config::Arguments;
use revive_dt_solc_binaries::download_solc; use revive_dt_solc_binaries::download_solc;
use crate::{CompilerInput, CompilerOutput, SolidityCompiler}; use super::constants::SOLC_VERSION_SUPPORTING_VIA_YUL_IR;
use crate::{CompilerInput, CompilerOutput, ModeOptimizerSetting, ModePipeline, SolidityCompiler};
use anyhow::Context; use anyhow::Context;
use foundry_compilers_artifacts::{ use foundry_compilers_artifacts::{
@@ -35,16 +38,28 @@ impl SolidityCompiler for Solc {
async fn build( async fn build(
&self, &self,
CompilerInput { CompilerInput {
enable_optimization, pipeline,
via_ir, optimization,
evm_version, evm_version,
allow_paths, allow_paths,
base_path, base_path,
sources, sources,
libraries, libraries,
revert_string_handling,
}: CompilerInput, }: CompilerInput,
_: Self::Options, _: Self::Options,
) -> anyhow::Result<CompilerOutput> { ) -> anyhow::Result<CompilerOutput> {
let compiler_supports_via_ir = self.version().await? >= SOLC_VERSION_SUPPORTING_VIA_YUL_IR;
// Be careful to entirely omit the viaIR field if the compiler does not support it,
// as it will error if you provide fields it does not know about. Because
// `supports_mode` is called prior to instantiating a compiler, we should never
// ask for something which is invalid.
let via_ir = match (pipeline, compiler_supports_via_ir) {
(pipeline, true) => pipeline.map(|p| p.via_yul_ir()),
(_pipeline, false) => None,
};
let input = SolcInput { let input = SolcInput {
language: SolcLanguage::Solidity, language: SolcLanguage::Solidity,
sources: Sources( sources: Sources(
@@ -55,7 +70,7 @@ impl SolidityCompiler for Solc {
), ),
settings: Settings { settings: Settings {
optimizer: Optimizer { optimizer: Optimizer {
enabled: enable_optimization, enabled: optimization.map(|o| o.optimizations_enabled()),
details: Some(Default::default()), details: Some(Default::default()),
..Default::default() ..Default::default()
}, },
@@ -87,6 +102,15 @@ impl SolidityCompiler for Solc {
}) })
.collect(), .collect(),
}, },
debug: revert_string_handling.map(|revert_string_handling| DebuggingSettings {
revert_strings: match revert_string_handling {
crate::RevertString::Default => Some(RevertStrings::Default),
crate::RevertString::Debug => Some(RevertStrings::Debug),
crate::RevertString::Strip => Some(RevertStrings::Strip),
crate::RevertString::VerboseDebug => Some(RevertStrings::VerboseDebug),
},
debug_info: Default::default(),
}),
..Default::default() ..Default::default()
}, },
}; };
@@ -187,30 +211,56 @@ impl SolidityCompiler for Solc {
Ok(path) Ok(path)
} }
fn version(&self) -> anyhow::Result<semver::Version> { async fn version(&self) -> anyhow::Result<semver::Version> {
// The following is the parsing code for the version from the solc version strings which /// This is a cache of the path of the compiler to the version number of the compiler. We
// look like the following: /// choose to cache the version in this way rather than through a field on the struct since
// ``` /// compiler objects are being created all the time from the path and the compiler object is
// solc, the solidity compiler commandline interface /// not reused over time.
// Version: 0.8.30+commit.73712a01.Darwin.appleclang static VERSION_CACHE: LazyLock<DashMap<PathBuf, Version>> = LazyLock::new(Default::default);
// ```
let child = Command::new(self.solc_path.as_path()) match VERSION_CACHE.entry(self.solc_path.clone()) {
.arg("--version") dashmap::Entry::Occupied(occupied_entry) => Ok(occupied_entry.get().clone()),
.stdout(Stdio::piped()) dashmap::Entry::Vacant(vacant_entry) => {
.spawn()?; // The following is the parsing code for the version from the solc version strings
let output = child.wait_with_output()?; // which look like the following:
let output = String::from_utf8_lossy(&output.stdout); // ```
let version_line = output // solc, the solidity compiler commandline interface
.split("Version: ") // Version: 0.8.30+commit.73712a01.Darwin.appleclang
.nth(1) // ```
.context("Version parsing failed")?; let child = Command::new(self.solc_path.as_path())
let version_string = version_line .arg("--version")
.split("+") .stdout(Stdio::piped())
.next() .spawn()?;
.context("Version parsing failed")?; let output = child.wait_with_output()?;
let output = String::from_utf8_lossy(&output.stdout);
let version_line = output
.split("Version: ")
.nth(1)
.context("Version parsing failed")?;
let version_string = version_line
.split("+")
.next()
.context("Version parsing failed")?;
Version::parse(version_string).map_err(Into::into) let version = Version::parse(version_string)?;
vacant_entry.insert(version.clone());
Ok(version)
}
}
}
fn supports_mode(
compiler_version: &Version,
_optimize_setting: ModeOptimizerSetting,
pipeline: ModePipeline,
) -> bool {
// solc 0.8.13 and above supports --via-ir, and less than that does not. Thus, we support mode E
// (ie no Yul IR) in either case, but only support Y (via Yul IR) if the compiler is new enough.
pipeline == ModePipeline::ViaEVMAssembly
|| (pipeline == ModePipeline::ViaYulIR
&& compiler_version >= &SOLC_VERSION_SUPPORTING_VIA_YUL_IR)
} }
} }
@@ -222,15 +272,13 @@ mod test {
async fn compiler_version_can_be_obtained() { async fn compiler_version_can_be_obtained() {
// Arrange // Arrange
let args = Arguments::default(); let args = Arguments::default();
println!("Getting compiler path");
let path = Solc::get_compiler_executable(&args, Version::new(0, 7, 6)) let path = Solc::get_compiler_executable(&args, Version::new(0, 7, 6))
.await .await
.unwrap(); .unwrap();
println!("Got compiler path");
let compiler = Solc::new(path); let compiler = Solc::new(path);
// Act // Act
let version = compiler.version(); let version = compiler.version().await;
// Assert // Assert
assert_eq!( assert_eq!(
@@ -243,15 +291,13 @@ mod test {
async fn compiler_version_can_be_obtained1() { async fn compiler_version_can_be_obtained1() {
// Arrange // Arrange
let args = Arguments::default(); let args = Arguments::default();
println!("Getting compiler path");
let path = Solc::get_compiler_executable(&args, Version::new(0, 4, 21)) let path = Solc::get_compiler_executable(&args, Version::new(0, 4, 21))
.await .await
.unwrap(); .unwrap();
println!("Got compiler path");
let compiler = Solc::new(path); let compiler = Solc::new(path);
// Act // Act
let version = compiler.version(); let version = compiler.version().await;
// Assert // Assert
assert_eq!( assert_eq!(
-1
View File
@@ -11,7 +11,6 @@ async fn contracts_can_be_compiled_with_solc() {
let compiler_path = Solc::get_compiler_executable(&args, Version::new(0, 8, 30)) let compiler_path = Solc::get_compiler_executable(&args, Version::new(0, 8, 30))
.await .await
.unwrap(); .unwrap();
println!("About to assert");
// Act // Act
let output = Compiler::<Solc>::new() let output = Compiler::<Solc>::new()
+2
View File
@@ -15,3 +15,5 @@ semver = { workspace = true }
temp-dir = { workspace = true } temp-dir = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
[lints]
workspace = true
+22 -6
View File
@@ -58,10 +58,6 @@ pub struct Arguments {
#[arg(long = "geth-start-timeout", default_value = "5000")] #[arg(long = "geth-start-timeout", default_value = "5000")]
pub geth_start_timeout: u64, pub geth_start_timeout: u64,
/// The test network chain ID.
#[arg(short, long = "network-id", default_value = "420420420")]
pub network_id: u64,
/// Configure nodes according to this genesis.json file. /// Configure nodes according to this genesis.json file.
#[arg(long = "genesis", default_value = "genesis.json")] #[arg(long = "genesis", default_value = "genesis.json")]
pub genesis_file: PathBuf, pub genesis_file: PathBuf,
@@ -96,10 +92,19 @@ pub struct Arguments {
#[arg(long, default_value = "1")] #[arg(long, default_value = "1")]
pub number_of_nodes: usize, pub number_of_nodes: usize,
/// Determines the amount of threads that will will be used. /// Determines the amount of tokio worker threads that will will be used.
#[arg(long, default_value = "12")] #[arg(
long,
default_value_t = std::thread::available_parallelism()
.map(|n| n.get())
.unwrap_or(1)
)]
pub number_of_threads: usize, pub number_of_threads: usize,
/// Determines the amount of concurrent tasks that will be spawned to run tests. Defaults to 10 x the number of nodes.
#[arg(long)]
pub number_concurrent_tasks: Option<usize>,
/// Extract problems back to the test corpus. /// Extract problems back to the test corpus.
#[arg(short, long = "extract-problems")] #[arg(short, long = "extract-problems")]
pub extract_problems: bool, pub extract_problems: bool,
@@ -115,6 +120,10 @@ pub struct Arguments {
/// By default it uses `eth-rpc` binary found in `$PATH`. /// By default it uses `eth-rpc` binary found in `$PATH`.
#[arg(short = 'p', long = "eth_proxy", default_value = "eth-rpc")] #[arg(short = 'p', long = "eth_proxy", default_value = "eth-rpc")]
pub eth_proxy: PathBuf, pub eth_proxy: PathBuf,
/// Controls if the compilation cache should be invalidated or not.
#[arg(short, long)]
pub invalidate_compilation_cache: bool,
} }
impl Arguments { impl Arguments {
@@ -134,6 +143,13 @@ impl Arguments {
panic!("should have a workdir configured") panic!("should have a workdir configured")
} }
/// Return the number of concurrent tasks to run. This is provided via the
/// `--number-concurrent-tasks` argument, and otherwise defaults to --number-of-nodes * 20.
pub fn number_of_concurrent_tasks(&self) -> usize {
self.number_concurrent_tasks
.unwrap_or(20 * self.number_of_nodes)
}
/// Try to parse `self.account` into a [PrivateKeySigner], /// Try to parse `self.account` into a [PrivateKeySigner],
/// panicing on error. /// panicing on error.
pub fn wallet(&self) -> EthereumWallet { pub fn wallet(&self) -> EthereumWallet {
+10
View File
@@ -23,11 +23,21 @@ revive-dt-report = { workspace = true }
alloy = { workspace = true } alloy = { workspace = true }
anyhow = { workspace = true } anyhow = { workspace = true }
bson = { workspace = true }
cacache = { workspace = true }
clap = { workspace = true } clap = { workspace = true }
futures = { workspace = true } futures = { workspace = true }
indexmap = { workspace = true } indexmap = { workspace = true }
once_cell = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
tracing-appender = { workspace = true }
tracing-subscriber = { workspace = true } tracing-subscriber = { workspace = true }
semver = { workspace = true } semver = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
temp-dir = { workspace = true } temp-dir = { workspace = true }
tempfile = { workspace = true }
[lints]
workspace = true
+262
View File
@@ -0,0 +1,262 @@
//! A wrapper around the compiler which allows for caching of compilation artifacts so that they can
//! be reused between runs.
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::Arc,
};
use futures::FutureExt;
use revive_dt_common::iterators::FilesWithExtensionIterator;
use revive_dt_compiler::{Compiler, CompilerOutput, Mode, SolidityCompiler};
use revive_dt_config::Arguments;
use revive_dt_format::metadata::{ContractIdent, ContractInstance, Metadata};
use alloy::{hex::ToHexExt, json_abi::JsonAbi, primitives::Address};
use anyhow::{Error, Result};
use once_cell::sync::Lazy;
use semver::Version;
use serde::{Deserialize, Serialize};
use tokio::sync::{Mutex, RwLock};
use tracing::{Instrument, debug, debug_span, instrument};
use crate::Platform;
pub struct CachedCompiler(ArtifactsCache);
impl CachedCompiler {
pub async fn new(path: impl AsRef<Path>, invalidate_cache: bool) -> Result<Self> {
let mut cache = ArtifactsCache::new(path);
if invalidate_cache {
cache = cache.with_invalidated_cache().await?;
}
Ok(Self(cache))
}
/// Compiles or gets the compilation artifacts from the cache.
#[instrument(
level = "debug",
skip_all,
fields(
metadata_file_path = %metadata_file_path.as_ref().display(),
%mode,
platform = P::config_id().to_string()
),
err
)]
pub async fn compile_contracts<P: Platform>(
&self,
metadata: &Metadata,
metadata_file_path: impl AsRef<Path>,
mode: &Mode,
config: &Arguments,
deployed_libraries: Option<&HashMap<ContractInstance, (ContractIdent, Address, JsonAbi)>>,
) -> Result<(CompilerOutput, Version)> {
static CACHE_KEY_LOCK: Lazy<RwLock<HashMap<CacheKey, Arc<Mutex<()>>>>> =
Lazy::new(Default::default);
let compiler_version_or_requirement = mode.compiler_version_to_use(config.solc.clone());
let compiler_path = <P::Compiler as SolidityCompiler>::get_compiler_executable(
config,
compiler_version_or_requirement,
)
.await?;
let compiler_version = <P::Compiler as SolidityCompiler>::new(compiler_path.clone())
.version()
.await?;
let cache_key = CacheKey {
platform_key: P::config_id().to_string(),
compiler_version: compiler_version.clone(),
metadata_file_path: metadata_file_path.as_ref().to_path_buf(),
solc_mode: mode.clone(),
};
let compilation_callback = || {
async move {
compile_contracts::<P>(
metadata.directory()?,
compiler_path,
metadata.files_to_compile()?,
mode,
deployed_libraries,
)
.map(|compilation_result| compilation_result.map(CacheValue::new))
.await
}
.instrument(debug_span!(
"Running compilation for the cache key",
cache_key.platform_key = %cache_key.platform_key,
cache_key.compiler_version = %cache_key.compiler_version,
cache_key.metadata_file_path = %cache_key.metadata_file_path.display(),
cache_key.solc_mode = %cache_key.solc_mode,
))
};
let compiled_contracts = match deployed_libraries {
// If deployed libraries have been specified then we will re-compile the contract as it
// means that linking is required in this case.
Some(_) => {
debug!("Deployed libraries defined, recompilation must take place");
debug!("Cache miss");
compilation_callback().await?.compiler_output
}
// If no deployed libraries are specified then we can follow the cached flow and attempt
// to lookup the compilation artifacts in the cache.
None => {
debug!("Deployed libraries undefined, attempting to make use of cache");
// Lock this specific cache key such that we do not get inconsistent state. We want
// that when multiple cases come in asking for the compilation artifacts then they
// don't all trigger a compilation if there's a cache miss. Hence, the lock here.
let read_guard = CACHE_KEY_LOCK.read().await;
let mutex = match read_guard.get(&cache_key).cloned() {
Some(value) => value,
None => {
drop(read_guard);
CACHE_KEY_LOCK
.write()
.await
.entry(cache_key.clone())
.or_default()
.clone()
}
};
let _guard = mutex.lock().await;
self.0
.get_or_insert_with(&cache_key, compilation_callback)
.await
.map(|value| value.compiler_output)?
}
};
Ok((compiled_contracts, compiler_version))
}
}
async fn compile_contracts<P: Platform>(
metadata_directory: impl AsRef<Path>,
compiler_path: impl AsRef<Path>,
mut files_to_compile: impl Iterator<Item = PathBuf>,
mode: &Mode,
deployed_libraries: Option<&HashMap<ContractInstance, (ContractIdent, Address, JsonAbi)>>,
) -> Result<CompilerOutput> {
let all_sources_in_dir = FilesWithExtensionIterator::new(metadata_directory.as_ref())
.with_allowed_extension("sol")
.with_use_cached_fs(true)
.collect::<Vec<_>>();
Compiler::<P::Compiler>::new()
.with_allow_path(metadata_directory)
// Handling the modes
.with_optimization(mode.optimize_setting)
.with_pipeline(mode.pipeline)
// Adding the contract sources to the compiler.
.try_then(|compiler| {
files_to_compile.try_fold(compiler, |compiler, path| compiler.with_source(path))
})?
// Adding the deployed libraries to the compiler.
.then(|compiler| {
deployed_libraries
.iter()
.flat_map(|value| value.iter())
.map(|(instance, (ident, address, abi))| (instance, ident, address, abi))
.flat_map(|(_, ident, address, _)| {
all_sources_in_dir
.iter()
.map(move |path| (ident, address, path))
})
.fold(compiler, |compiler, (ident, address, path)| {
compiler.with_library(path, ident.as_str(), *address)
})
})
.try_build(compiler_path)
.await
}
struct ArtifactsCache {
path: PathBuf,
}
impl ArtifactsCache {
pub fn new(path: impl AsRef<Path>) -> Self {
Self {
path: path.as_ref().to_path_buf(),
}
}
#[instrument(level = "debug", skip_all, err)]
pub async fn with_invalidated_cache(self) -> Result<Self> {
cacache::clear(self.path.as_path())
.await
.map_err(Into::<Error>::into)?;
Ok(self)
}
#[instrument(level = "debug", skip_all, err)]
pub async fn insert(&self, key: &CacheKey, value: &CacheValue) -> Result<()> {
let key = bson::to_vec(key)?;
let value = bson::to_vec(value)?;
cacache::write(self.path.as_path(), key.encode_hex(), value).await?;
Ok(())
}
pub async fn get(&self, key: &CacheKey) -> Option<CacheValue> {
let key = bson::to_vec(key).ok()?;
let value = cacache::read(self.path.as_path(), key.encode_hex())
.await
.ok()?;
let value = bson::from_slice::<CacheValue>(&value).ok()?;
Some(value)
}
#[instrument(level = "debug", skip_all, err)]
pub async fn get_or_insert_with(
&self,
key: &CacheKey,
callback: impl AsyncFnOnce() -> Result<CacheValue>,
) -> Result<CacheValue> {
match self.get(key).await {
Some(value) => {
debug!("Cache hit");
Ok(value)
}
None => {
debug!("Cache miss");
let value = callback().await?;
self.insert(key, &value).await?;
Ok(value)
}
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
struct CacheKey {
/// The platform name that this artifact was compiled for. For example, this could be EVM or
/// PVM.
platform_key: String,
/// The version of the compiler that was used to compile the artifacts.
compiler_version: Version,
/// The path of the metadata file that the compilation artifacts are for.
metadata_file_path: PathBuf,
/// The mode that the compilation artifacts where compiled with.
solc_mode: Mode,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
struct CacheValue {
/// The compiler output from the compilation run.
compiler_output: CompilerOutput,
}
impl CacheValue {
pub fn new(compiler_output: CompilerOutput) -> Self {
Self { compiler_output }
}
}
+108 -164
View File
@@ -11,31 +11,29 @@ use alloy::network::{Ethereum, TransactionBuilder};
use alloy::primitives::U256; use alloy::primitives::U256;
use alloy::rpc::types::TransactionReceipt; use alloy::rpc::types::TransactionReceipt;
use alloy::rpc::types::trace::geth::{ use alloy::rpc::types::trace::geth::{
CallFrame, GethDebugBuiltInTracerType, GethDebugTracerType, GethDebugTracingOptions, GethTrace, CallFrame, GethDebugBuiltInTracerType, GethDebugTracerConfig, GethDebugTracerType,
PreStateConfig, GethDebugTracingOptions, GethTrace, PreStateConfig,
}; };
use alloy::{ use alloy::{
primitives::Address, primitives::Address,
rpc::types::{ rpc::types::{TransactionRequest, trace::geth::DiffMode},
TransactionRequest,
trace::geth::{AccountState, DiffMode},
},
}; };
use anyhow::Context; use anyhow::Context;
use futures::TryStreamExt;
use indexmap::IndexMap; use indexmap::IndexMap;
use revive_dt_format::traits::{ResolutionContext, ResolverApi}; use revive_dt_format::traits::{ResolutionContext, ResolverApi};
use semver::Version; use semver::Version;
use revive_dt_format::case::{Case, CaseIdx}; use revive_dt_format::case::Case;
use revive_dt_format::input::{ use revive_dt_format::input::{
BalanceAssertion, Calldata, EtherValue, Expected, ExpectedOutput, Input, Method, BalanceAssertion, Calldata, EtherValue, Expected, ExpectedOutput, Input, Method, StepIdx,
StorageEmptyAssertion, StorageEmptyAssertion,
}; };
use revive_dt_format::metadata::{ContractInstance, ContractPathAndIdent}; use revive_dt_format::metadata::{ContractIdent, ContractInstance, ContractPathAndIdent};
use revive_dt_format::{input::Step, metadata::Metadata}; use revive_dt_format::{input::Step, metadata::Metadata};
use revive_dt_node::Node;
use revive_dt_node_interaction::EthereumNode; use revive_dt_node_interaction::EthereumNode;
use tracing::Instrument; use tokio::try_join;
use tracing::{Instrument, info, info_span, instrument};
use crate::Platform; use crate::Platform;
@@ -44,7 +42,7 @@ pub struct CaseState<T: Platform> {
compiled_contracts: HashMap<PathBuf, HashMap<String, (String, JsonAbi)>>, compiled_contracts: HashMap<PathBuf, HashMap<String, (String, JsonAbi)>>,
/// This map stores the contracts deployments for this case. /// This map stores the contracts deployments for this case.
deployed_contracts: HashMap<ContractInstance, (Address, JsonAbi)>, deployed_contracts: HashMap<ContractInstance, (ContractIdent, Address, JsonAbi)>,
/// This map stores the variables used for each one of the cases contained in the metadata /// This map stores the variables used for each one of the cases contained in the metadata
/// file. /// file.
@@ -63,7 +61,7 @@ where
pub fn new( pub fn new(
compiler_version: Version, compiler_version: Version,
compiled_contracts: HashMap<PathBuf, HashMap<String, (String, JsonAbi)>>, compiled_contracts: HashMap<PathBuf, HashMap<String, (String, JsonAbi)>>,
deployed_contracts: HashMap<ContractInstance, (Address, JsonAbi)>, deployed_contracts: HashMap<ContractInstance, (ContractIdent, Address, JsonAbi)>,
) -> Self { ) -> Self {
Self { Self {
compiled_contracts, compiled_contracts,
@@ -77,38 +75,38 @@ where
pub async fn handle_step( pub async fn handle_step(
&mut self, &mut self,
metadata: &Metadata, metadata: &Metadata,
case_idx: CaseIdx,
step: &Step, step: &Step,
node: &T::Blockchain, node: &T::Blockchain,
) -> anyhow::Result<StepOutput> { ) -> anyhow::Result<StepOutput> {
match step { match step {
Step::FunctionCall(input) => { Step::FunctionCall(input) => {
let (receipt, geth_trace, diff_mode) = let (receipt, geth_trace, diff_mode) =
self.handle_input(metadata, case_idx, input, node).await?; self.handle_input(metadata, input, node).await?;
Ok(StepOutput::FunctionCall(receipt, geth_trace, diff_mode)) Ok(StepOutput::FunctionCall(receipt, geth_trace, diff_mode))
} }
Step::BalanceAssertion(balance_assertion) => { Step::BalanceAssertion(balance_assertion) => {
self.handle_balance_assertion(metadata, case_idx, balance_assertion, node) self.handle_balance_assertion(metadata, balance_assertion, node)
.await?; .await?;
Ok(StepOutput::BalanceAssertion) Ok(StepOutput::BalanceAssertion)
} }
Step::StorageEmptyAssertion(storage_empty) => { Step::StorageEmptyAssertion(storage_empty) => {
self.handle_storage_empty(metadata, case_idx, storage_empty, node) self.handle_storage_empty(metadata, storage_empty, node)
.await?; .await?;
Ok(StepOutput::StorageEmptyAssertion) Ok(StepOutput::StorageEmptyAssertion)
} }
} }
.inspect(|_| info!("Step Succeeded"))
} }
#[instrument(level = "info", name = "Handling Input", skip_all)]
pub async fn handle_input( pub async fn handle_input(
&mut self, &mut self,
metadata: &Metadata, metadata: &Metadata,
case_idx: CaseIdx,
input: &Input, input: &Input,
node: &T::Blockchain, node: &T::Blockchain,
) -> anyhow::Result<(TransactionReceipt, GethTrace, DiffMode)> { ) -> anyhow::Result<(TransactionReceipt, GethTrace, DiffMode)> {
let deployment_receipts = self let deployment_receipts = self
.handle_input_contract_deployment(metadata, case_idx, input, node) .handle_input_contract_deployment(metadata, input, node)
.await?; .await?;
let execution_receipt = self let execution_receipt = self
.handle_input_execution(input, deployment_receipts, node) .handle_input_execution(input, deployment_receipts, node)
@@ -117,16 +115,17 @@ where
.handle_input_call_frame_tracing(&execution_receipt, node) .handle_input_call_frame_tracing(&execution_receipt, node)
.await?; .await?;
self.handle_input_variable_assignment(input, &tracing_result)?; self.handle_input_variable_assignment(input, &tracing_result)?;
self.handle_input_expectations(input, &execution_receipt, node, &tracing_result) let (_, (geth_trace, diff_mode)) = try_join!(
.await?; self.handle_input_expectations(input, &execution_receipt, node, &tracing_result),
self.handle_input_diff(case_idx, execution_receipt, node) self.handle_input_diff(&execution_receipt, node)
.await )?;
Ok((execution_receipt, geth_trace, diff_mode))
} }
#[instrument(level = "info", name = "Handling Balance Assertion", skip_all)]
pub async fn handle_balance_assertion( pub async fn handle_balance_assertion(
&mut self, &mut self,
metadata: &Metadata, metadata: &Metadata,
_: CaseIdx,
balance_assertion: &BalanceAssertion, balance_assertion: &BalanceAssertion,
node: &T::Blockchain, node: &T::Blockchain,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
@@ -137,10 +136,10 @@ where
Ok(()) Ok(())
} }
#[instrument(level = "info", name = "Handling Storage Assertion", skip_all)]
pub async fn handle_storage_empty( pub async fn handle_storage_empty(
&mut self, &mut self,
metadata: &Metadata, metadata: &Metadata,
_: CaseIdx,
storage_empty: &StorageEmptyAssertion, storage_empty: &StorageEmptyAssertion,
node: &T::Blockchain, node: &T::Blockchain,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
@@ -152,20 +151,13 @@ where
} }
/// Handles the contract deployment for a given input performing it if it needs to be performed. /// Handles the contract deployment for a given input performing it if it needs to be performed.
#[instrument(level = "info", skip_all)]
async fn handle_input_contract_deployment( async fn handle_input_contract_deployment(
&mut self, &mut self,
metadata: &Metadata, metadata: &Metadata,
case_idx: CaseIdx,
input: &Input, input: &Input,
node: &T::Blockchain, node: &T::Blockchain,
) -> anyhow::Result<HashMap<ContractInstance, TransactionReceipt>> { ) -> anyhow::Result<HashMap<ContractInstance, TransactionReceipt>> {
let span = tracing::debug_span!(
"Handling contract deployment",
?case_idx,
instance = ?input.instance
);
let _guard = span.enter();
let mut instances_we_must_deploy = IndexMap::<ContractInstance, bool>::new(); let mut instances_we_must_deploy = IndexMap::<ContractInstance, bool>::new();
for instance in input.find_all_contract_instances().into_iter() { for instance in input.find_all_contract_instances().into_iter() {
if !self.deployed_contracts.contains_key(&instance) { if !self.deployed_contracts.contains_key(&instance) {
@@ -177,11 +169,6 @@ where
instances_we_must_deploy.insert(input.instance.clone(), true); instances_we_must_deploy.insert(input.instance.clone(), true);
} }
tracing::debug!(
instances_to_deploy = instances_we_must_deploy.len(),
"Computed the number of required deployments for input"
);
let mut receipts = HashMap::new(); let mut receipts = HashMap::new();
for (instance, deploy_with_constructor_arguments) in instances_we_must_deploy.into_iter() { for (instance, deploy_with_constructor_arguments) in instances_we_must_deploy.into_iter() {
let calldata = deploy_with_constructor_arguments.then_some(&input.calldata); let calldata = deploy_with_constructor_arguments.then_some(&input.calldata);
@@ -208,6 +195,7 @@ where
} }
/// Handles the execution of the input in terms of the calls that need to be made. /// Handles the execution of the input in terms of the calls that need to be made.
#[instrument(level = "info", skip_all)]
async fn handle_input_execution( async fn handle_input_execution(
&mut self, &mut self,
input: &Input, input: &Input,
@@ -225,33 +213,21 @@ where
.legacy_transaction(node, self.default_resolution_context()) .legacy_transaction(node, self.default_resolution_context())
.await .await
{ {
Ok(tx) => { Ok(tx) => tx,
tracing::debug!("Legacy transaction data: {tx:#?}");
tx
}
Err(err) => { Err(err) => {
tracing::error!("Failed to construct legacy transaction: {err:?}");
return Err(err); return Err(err);
} }
}; };
tracing::trace!("Executing transaction for input: {input:?}");
match node.execute_transaction(tx).await { match node.execute_transaction(tx).await {
Ok(receipt) => Ok(receipt), Ok(receipt) => Ok(receipt),
Err(err) => { Err(err) => Err(err),
tracing::error!(
"Failed to execute transaction when executing the contract: {}, {:?}",
&*input.instance,
err
);
Err(err)
}
} }
} }
} }
} }
#[instrument(level = "info", skip_all)]
async fn handle_input_call_frame_tracing( async fn handle_input_call_frame_tracing(
&self, &self,
execution_receipt: &TransactionReceipt, execution_receipt: &TransactionReceipt,
@@ -263,6 +239,14 @@ where
tracer: Some(GethDebugTracerType::BuiltInTracer( tracer: Some(GethDebugTracerType::BuiltInTracer(
GethDebugBuiltInTracerType::CallTracer, GethDebugBuiltInTracerType::CallTracer,
)), )),
tracer_config: GethDebugTracerConfig(serde_json::json! {{
"onlyTopCall": true,
"withLog": false,
"withStorage": false,
"withMemory": false,
"withStack": false,
"withReturnData": true
}}),
..Default::default() ..Default::default()
}, },
) )
@@ -274,6 +258,7 @@ where
}) })
} }
#[instrument(level = "info", skip_all)]
fn handle_input_variable_assignment( fn handle_input_variable_assignment(
&mut self, &mut self,
input: &Input, input: &Input,
@@ -304,16 +289,14 @@ where
Ok(()) Ok(())
} }
#[instrument(level = "info", skip_all)]
async fn handle_input_expectations( async fn handle_input_expectations(
&mut self, &self,
input: &Input, input: &Input,
execution_receipt: &TransactionReceipt, execution_receipt: &TransactionReceipt,
resolver: &impl ResolverApi, resolver: &impl ResolverApi,
tracing_result: &CallFrame, tracing_result: &CallFrame,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let span = tracing::info_span!("Handling input expectations");
let _guard = span.enter();
// Resolving the `input.expected` into a series of expectations that we can then assert on. // Resolving the `input.expected` into a series of expectations that we can then assert on.
let mut expectations = match input { let mut expectations = match input {
Input { Input {
@@ -342,24 +325,25 @@ where
} }
} }
for expectation in expectations.iter() { futures::stream::iter(expectations.into_iter().map(Ok))
self.handle_input_expectation_item( .try_for_each_concurrent(None, |expectation| async move {
execution_receipt, self.handle_input_expectation_item(
resolver, execution_receipt,
expectation, resolver,
tracing_result, expectation,
) tracing_result,
.await?; )
} .await
})
Ok(()) .await
} }
#[instrument(level = "info", skip_all)]
async fn handle_input_expectation_item( async fn handle_input_expectation_item(
&mut self, &self,
execution_receipt: &TransactionReceipt, execution_receipt: &TransactionReceipt,
resolver: &impl ResolverApi, resolver: &impl ResolverApi,
expectation: &ExpectedOutput, expectation: ExpectedOutput,
tracing_result: &CallFrame, tracing_result: &CallFrame,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
if let Some(ref version_requirement) = expectation.compiler_version { if let Some(ref version_requirement) = expectation.compiler_version {
@@ -497,15 +481,12 @@ where
Ok(()) Ok(())
} }
#[instrument(level = "info", skip_all)]
async fn handle_input_diff( async fn handle_input_diff(
&mut self, &self,
_: CaseIdx, execution_receipt: &TransactionReceipt,
execution_receipt: TransactionReceipt,
node: &T::Blockchain, node: &T::Blockchain,
) -> anyhow::Result<(TransactionReceipt, GethTrace, DiffMode)> { ) -> anyhow::Result<(GethTrace, DiffMode)> {
let span = tracing::info_span!("Handling input diff");
let _guard = span.enter();
let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig { let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig {
diff_mode: Some(true), diff_mode: Some(true),
disable_code: None, disable_code: None,
@@ -513,13 +494,14 @@ where
}); });
let trace = node let trace = node
.trace_transaction(&execution_receipt, trace_options) .trace_transaction(execution_receipt, trace_options)
.await?; .await?;
let diff = node.state_diff(&execution_receipt).await?; let diff = node.state_diff(execution_receipt).await?;
Ok((execution_receipt, trace, diff)) Ok((trace, diff))
} }
#[instrument(level = "info", skip_all)]
pub async fn handle_balance_assertion_contract_deployment( pub async fn handle_balance_assertion_contract_deployment(
&mut self, &mut self,
metadata: &Metadata, metadata: &Metadata,
@@ -528,7 +510,7 @@ where
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let Some(instance) = balance_assertion let Some(instance) = balance_assertion
.address .address
.strip_prefix(".address") .strip_suffix(".address")
.map(ContractInstance::new) .map(ContractInstance::new)
else { else {
return Ok(()); return Ok(());
@@ -545,11 +527,13 @@ where
Ok(()) Ok(())
} }
#[instrument(level = "info", skip_all)]
pub async fn handle_balance_assertion_execution( pub async fn handle_balance_assertion_execution(
&mut self, &mut self,
BalanceAssertion { BalanceAssertion {
address: address_string, address: address_string,
expected_balance: amount, expected_balance: amount,
..
}: &BalanceAssertion, }: &BalanceAssertion,
node: &T::Blockchain, node: &T::Blockchain,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
@@ -579,6 +563,7 @@ where
Ok(()) Ok(())
} }
#[instrument(level = "info", skip_all)]
pub async fn handle_storage_empty_assertion_contract_deployment( pub async fn handle_storage_empty_assertion_contract_deployment(
&mut self, &mut self,
metadata: &Metadata, metadata: &Metadata,
@@ -587,7 +572,7 @@ where
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let Some(instance) = storage_empty_assertion let Some(instance) = storage_empty_assertion
.address .address
.strip_prefix(".address") .strip_suffix(".address")
.map(ContractInstance::new) .map(ContractInstance::new)
else { else {
return Ok(()); return Ok(());
@@ -604,11 +589,13 @@ where
Ok(()) Ok(())
} }
#[instrument(level = "info", skip_all)]
pub async fn handle_storage_empty_assertion_execution( pub async fn handle_storage_empty_assertion_execution(
&mut self, &mut self,
StorageEmptyAssertion { StorageEmptyAssertion {
address: address_string, address: address_string,
is_storage_empty, is_storage_empty,
..
}: &StorageEmptyAssertion, }: &StorageEmptyAssertion,
node: &T::Blockchain, node: &T::Blockchain,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
@@ -655,7 +642,7 @@ where
value: Option<EtherValue>, value: Option<EtherValue>,
node: &T::Blockchain, node: &T::Blockchain,
) -> anyhow::Result<(Address, JsonAbi, Option<TransactionReceipt>)> { ) -> anyhow::Result<(Address, JsonAbi, Option<TransactionReceipt>)> {
if let Some((address, abi)) = self.deployed_contracts.get(contract_instance) { if let Some((_, address, abi)) = self.deployed_contracts.get(contract_instance) {
return Ok((*address, abi.clone(), None)); return Ok((*address, abi.clone(), None));
} }
@@ -664,7 +651,6 @@ where
contract_ident, contract_ident,
}) = metadata.contract_sources()?.remove(contract_instance) }) = metadata.contract_sources()?.remove(contract_instance)
else { else {
tracing::error!("Contract source not found for instance");
anyhow::bail!( anyhow::bail!(
"Contract source not found for instance {:?}", "Contract source not found for instance {:?}",
contract_instance contract_instance
@@ -677,11 +663,6 @@ where
.and_then(|source_file_contracts| source_file_contracts.get(contract_ident.as_ref())) .and_then(|source_file_contracts| source_file_contracts.get(contract_ident.as_ref()))
.cloned() .cloned()
else { else {
tracing::error!(
contract_source_path = contract_source_path.display().to_string(),
contract_ident = contract_ident.as_ref(),
"Failed to find information for contract"
);
anyhow::bail!( anyhow::bail!(
"Failed to find information for contract {:?}", "Failed to find information for contract {:?}",
contract_instance contract_instance
@@ -730,7 +711,6 @@ where
}; };
let Some(address) = receipt.contract_address else { let Some(address) = receipt.contract_address else {
tracing::error!("Contract deployment transaction didn't return an address");
anyhow::bail!("Contract deployment didn't return an address"); anyhow::bail!("Contract deployment didn't return an address");
}; };
tracing::info!( tracing::info!(
@@ -739,8 +719,10 @@ where
"Deployed contract" "Deployed contract"
); );
self.deployed_contracts self.deployed_contracts.insert(
.insert(contract_instance.clone(), (address, abi.clone())); contract_instance.clone(),
(contract_ident, address, abi.clone()),
);
Ok((address, abi, Some(receipt))) Ok((address, abi, Some(receipt)))
} }
@@ -755,7 +737,6 @@ where
pub struct CaseDriver<'a, Leader: Platform, Follower: Platform> { pub struct CaseDriver<'a, Leader: Platform, Follower: Platform> {
metadata: &'a Metadata, metadata: &'a Metadata,
case: &'a Case, case: &'a Case,
case_idx: CaseIdx,
leader_node: &'a Leader::Blockchain, leader_node: &'a Leader::Blockchain,
follower_node: &'a Follower::Blockchain, follower_node: &'a Follower::Blockchain,
leader_state: CaseState<Leader>, leader_state: CaseState<Leader>,
@@ -771,7 +752,6 @@ where
pub fn new( pub fn new(
metadata: &'a Metadata, metadata: &'a Metadata,
case: &'a Case, case: &'a Case,
case_idx: impl Into<CaseIdx>,
leader_node: &'a L::Blockchain, leader_node: &'a L::Blockchain,
follower_node: &'a F::Blockchain, follower_node: &'a F::Blockchain,
leader_state: CaseState<L>, leader_state: CaseState<L>,
@@ -780,7 +760,6 @@ where
Self { Self {
metadata, metadata,
case, case,
case_idx: case_idx.into(),
leader_node, leader_node,
follower_node, follower_node,
leader_state, leader_state,
@@ -788,79 +767,44 @@ where
} }
} }
pub fn trace_diff_mode(label: &str, diff: &DiffMode) { #[instrument(level = "info", name = "Executing Case", skip_all)]
tracing::trace!("{label} - PRE STATE:");
for (addr, state) in &diff.pre {
Self::trace_account_state(" [pre]", addr, state);
}
tracing::trace!("{label} - POST STATE:");
for (addr, state) in &diff.post {
Self::trace_account_state(" [post]", addr, state);
}
}
fn trace_account_state(prefix: &str, addr: &Address, state: &AccountState) {
tracing::trace!("{prefix} 0x{addr:x}");
if let Some(balance) = &state.balance {
tracing::trace!("{prefix} balance: {balance}");
}
if let Some(nonce) = &state.nonce {
tracing::trace!("{prefix} nonce: {nonce}");
}
if let Some(code) = &state.code {
tracing::trace!("{prefix} code: {code}");
}
}
pub async fn execute(&mut self) -> anyhow::Result<usize> { pub async fn execute(&mut self) -> anyhow::Result<usize> {
if !self
.leader_node
.matches_target(self.metadata.targets.as_deref())
|| !self
.follower_node
.matches_target(self.metadata.targets.as_deref())
{
tracing::warn!(
targets = ?self.metadata.targets,
"Either the leader or follower node do not support the targets of the file"
);
return Ok(0);
}
let mut steps_executed = 0; let mut steps_executed = 0;
for (step_idx, step) in self.case.steps_iterator().enumerate() { for (step_idx, step) in self
let tracing_span = tracing::info_span!("Handling input", step_idx); .case
.steps_iterator()
.enumerate()
.map(|(idx, v)| (StepIdx::new(idx), v))
{
let (leader_step_output, follower_step_output) = try_join!(
self.leader_state
.handle_step(self.metadata, &step, self.leader_node)
.instrument(info_span!(
"Handling Step",
%step_idx,
target = "Leader",
)),
self.follower_state
.handle_step(self.metadata, &step, self.follower_node)
.instrument(info_span!(
"Handling Step",
%step_idx,
target = "Follower",
))
)?;
let leader_step_output = self
.leader_state
.handle_step(self.metadata, self.case_idx, &step, self.leader_node)
.instrument(tracing_span.clone())
.await?;
let follower_step_output = self
.follower_state
.handle_step(self.metadata, self.case_idx, &step, self.follower_node)
.instrument(tracing_span)
.await?;
match (leader_step_output, follower_step_output) { match (leader_step_output, follower_step_output) {
( (StepOutput::FunctionCall(..), StepOutput::FunctionCall(..)) => {
StepOutput::FunctionCall(leader_receipt, _, leader_diff), // TODO: We need to actually work out how/if we will compare the diff between
StepOutput::FunctionCall(follower_receipt, _, follower_diff), // the leader and the follower. The diffs are almost guaranteed to be different
) => { // from leader and follower and therefore without an actual strategy for this
if leader_diff == follower_diff { // we have something that's guaranteed to fail. Even a simple call to some
tracing::debug!("State diffs match between leader and follower."); // contract will produce two non-equal diffs because on the leader the contract
} else { // has address X and on the follower it has address Y. On the leader contract X
tracing::debug!("State diffs mismatch between leader and follower."); // contains address A in the state and on the follower it contains address B. So
Self::trace_diff_mode("Leader", &leader_diff); // this isn't exactly a straightforward thing to do and I'm not even sure that
Self::trace_diff_mode("Follower", &follower_diff); // it's possible to do. Once we have an actual strategy for doing the diffs we
} // will implement it here. Until then, this remains empty.
if leader_receipt.logs() != follower_receipt.logs() {
tracing::debug!("Log/event mismatch between leader and follower.");
tracing::trace!("Leader logs: {:?}", leader_receipt.logs());
tracing::trace!("Follower logs: {:?}", follower_receipt.logs());
}
} }
(StepOutput::BalanceAssertion, StepOutput::BalanceAssertion) => {} (StepOutput::BalanceAssertion, StepOutput::BalanceAssertion) => {}
(StepOutput::StorageEmptyAssertion, StepOutput::StorageEmptyAssertion) => {} (StepOutput::StorageEmptyAssertion, StepOutput::StorageEmptyAssertion) => {}
+497 -503
View File
File diff suppressed because it is too large Load Diff
+7
View File
@@ -11,10 +11,14 @@ rust-version.workspace = true
[dependencies] [dependencies]
revive-dt-common = { workspace = true } revive-dt-common = { workspace = true }
revive-common = { workspace = true }
alloy = { workspace = true } alloy = { workspace = true }
alloy-primitives = { workspace = true } alloy-primitives = { workspace = true }
alloy-sol-types = { workspace = true } alloy-sol-types = { workspace = true }
anyhow = { workspace = true } anyhow = { workspace = true }
futures = { workspace = true }
regex = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
semver = { workspace = true } semver = { workspace = true }
serde = { workspace = true, features = ["derive"] } serde = { workspace = true, features = ["derive"] }
@@ -22,3 +26,6 @@ serde_json = { workspace = true }
[dev-dependencies] [dev-dependencies]
tokio = { workspace = true } tokio = { workspace = true }
[lints]
workspace = true
+23 -4
View File
@@ -1,21 +1,33 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use revive_dt_common::macros::define_wrapper_type; use revive_dt_common::{macros::define_wrapper_type, types::Mode};
use crate::{ use crate::{
input::{Expected, Step}, input::{Expected, Step},
mode::Mode, mode::ParsedMode,
}; };
#[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)] #[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)]
pub struct Case { pub struct Case {
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>, pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub comment: Option<String>, pub comment: Option<String>,
pub modes: Option<Vec<Mode>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub modes: Option<Vec<ParsedMode>>,
#[serde(rename = "inputs")] #[serde(rename = "inputs")]
pub steps: Vec<Step>, pub steps: Vec<Step>,
#[serde(skip_serializing_if = "Option::is_none")]
pub group: Option<String>, pub group: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub expected: Option<Expected>, pub expected: Option<Expected>,
#[serde(skip_serializing_if = "Option::is_none")]
pub ignore: Option<bool>, pub ignore: Option<bool>,
} }
@@ -48,10 +60,17 @@ impl Case {
} }
}) })
} }
pub fn solc_modes(&self) -> Vec<Mode> {
match &self.modes {
Some(modes) => ParsedMode::many_to_modes(modes.iter()).collect(),
None => Mode::all().collect(),
}
}
} }
define_wrapper_type!( define_wrapper_type!(
/// A wrapper type for the index of test cases found in metadata file. /// A wrapper type for the index of test cases found in metadata file.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct CaseIdx(usize); pub struct CaseIdx(usize) impl Display;
); );
+102 -72
View File
@@ -3,97 +3,127 @@ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use revive_dt_common::iterators::FilesWithExtensionIterator;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::{debug, info};
use crate::metadata::MetadataFile; use crate::metadata::{Metadata, MetadataFile};
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq, Hash)] #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
pub struct Corpus { #[serde(untagged)]
pub name: String, pub enum Corpus {
pub path: PathBuf, SinglePath { name: String, path: PathBuf },
MultiplePaths { name: String, paths: Vec<PathBuf> },
} }
impl Corpus { impl Corpus {
/// Try to read and parse the corpus definition file at given `path`. pub fn try_from_path(file_path: impl AsRef<Path>) -> anyhow::Result<Self> {
pub fn try_from_path(path: &Path) -> anyhow::Result<Self> { let mut corpus = File::open(file_path.as_ref())
let file = File::open(path)?; .map_err(anyhow::Error::from)
let mut corpus: Corpus = serde_json::from_reader(file)?; .and_then(|file| serde_json::from_reader::<_, Corpus>(file).map_err(Into::into))?;
// Ensure that the path mentioned in the corpus is relative to the corpus file. for path in corpus.paths_iter_mut() {
// Canonicalizing also helps make the path in any errors unambiguous. *path = file_path
corpus.path = path .as_ref()
.parent() .parent()
.ok_or_else(|| { .ok_or_else(|| {
anyhow::anyhow!("Corpus path '{}' does not point to a file", path.display()) anyhow::anyhow!("Corpus path '{}' does not point to a file", path.display())
})? })?
.canonicalize() .canonicalize()
.map_err(|error| { .map_err(|error| {
anyhow::anyhow!( anyhow::anyhow!(
"Failed to canonicalize path to corpus '{}': {error}", "Failed to canonicalize path to corpus '{}': {error}",
path.display() path.display()
) )
})? })?
.join(corpus.path); .join(path.as_path())
}
Ok(corpus) Ok(corpus)
} }
/// Scan the corpus base directory and return all tests found.
pub fn enumerate_tests(&self) -> Vec<MetadataFile> { pub fn enumerate_tests(&self) -> Vec<MetadataFile> {
let mut tests = Vec::new(); let mut tests = self
collect_metadata(&self.path, &mut tests); .paths_iter()
.flat_map(|root_path| {
if !root_path.is_dir() {
Box::new(std::iter::once(root_path.to_path_buf()))
as Box<dyn Iterator<Item = _>>
} else {
Box::new(
FilesWithExtensionIterator::new(root_path)
.with_use_cached_fs(true)
.with_allowed_extension("sol")
.with_allowed_extension("json"),
)
}
.map(move |metadata_file_path| (root_path, metadata_file_path))
})
.filter_map(|(root_path, metadata_file_path)| {
Metadata::try_from_file(&metadata_file_path)
.or_else(|| {
debug!(
discovered_from = %root_path.display(),
metadata_file_path = %metadata_file_path.display(),
"Skipping file since it doesn't contain valid metadata"
);
None
})
.map(|metadata| MetadataFile {
metadata_file_path,
corpus_file_path: root_path.to_path_buf(),
content: metadata,
})
.inspect(|metadata_file| {
debug!(
metadata_file_path = %metadata_file.relative_path().display(),
"Loaded metadata file"
)
})
})
.collect::<Vec<_>>();
tests.sort_by(|a, b| a.metadata_file_path.cmp(&b.metadata_file_path));
tests.dedup_by(|a, b| a.metadata_file_path == b.metadata_file_path);
info!(
len = tests.len(),
corpus_name = self.name(),
"Found tests in Corpus"
);
tests tests
} }
}
/// Recursively walks `path` and parses any JSON or Solidity file into a test pub fn name(&self) -> &str {
/// definition [Metadata]. match self {
/// Corpus::SinglePath { name, .. } | Corpus::MultiplePaths { name, .. } => name.as_str(),
/// Found tests are inserted into `tests`. }
/// }
/// `path` is expected to be a directory.
pub fn collect_metadata(path: &Path, tests: &mut Vec<MetadataFile>) { pub fn paths_iter(&self) -> impl Iterator<Item = &Path> {
if path.is_dir() { match self {
let dir_entry = match std::fs::read_dir(path) { Corpus::SinglePath { path, .. } => {
Ok(dir_entry) => dir_entry, Box::new(std::iter::once(path.as_path())) as Box<dyn Iterator<Item = _>>
Err(error) => {
tracing::error!("failed to read dir '{}': {error}", path.display());
return;
} }
}; Corpus::MultiplePaths { paths, .. } => {
Box::new(paths.iter().map(|path| path.as_path())) as Box<dyn Iterator<Item = _>>
for entry in dir_entry {
let entry = match entry {
Ok(entry) => entry,
Err(error) => {
tracing::error!("error reading dir entry: {error}");
continue;
}
};
let path = entry.path();
if path.is_dir() {
collect_metadata(&path, tests);
continue;
}
if path.is_file() {
if let Some(metadata) = MetadataFile::try_from_file(&path) {
tests.push(metadata)
}
} }
} }
} else { }
let Some(extension) = path.extension() else {
tracing::error!("Failed to get file extension"); pub fn paths_iter_mut(&mut self) -> impl Iterator<Item = &mut PathBuf> {
return; match self {
}; Corpus::SinglePath { path, .. } => {
if extension.eq_ignore_ascii_case("sol") || extension.eq_ignore_ascii_case("json") { Box::new(std::iter::once(path)) as Box<dyn Iterator<Item = _>>
if let Some(metadata) = MetadataFile::try_from_file(path) {
tests.push(metadata)
} }
} else { Corpus::MultiplePaths { paths, .. } => {
tracing::error!(?extension, "Unsupported file extension"); Box::new(paths.iter_mut()) as Box<dyn Iterator<Item = _>>
}
}
}
pub fn path_count(&self) -> usize {
match self {
Corpus::SinglePath { .. } => 1,
Corpus::MultiplePaths { paths, .. } => paths.len(),
} }
} }
} }
+103 -77
View File
@@ -2,17 +2,19 @@ use std::collections::HashMap;
use alloy::{ use alloy::{
eips::BlockNumberOrTag, eips::BlockNumberOrTag,
hex::ToHexExt, json_abi::Function,
network::TransactionBuilder, network::TransactionBuilder,
primitives::{Address, Bytes, U256}, primitives::{Address, Bytes, U256},
rpc::types::TransactionRequest, rpc::types::TransactionRequest,
}; };
use alloy_primitives::{FixedBytes, utils::parse_units}; use alloy_primitives::{FixedBytes, utils::parse_units};
use anyhow::Context; use anyhow::Context;
use futures::{FutureExt, StreamExt, TryFutureExt, TryStreamExt, stream};
use semver::VersionReq; use semver::VersionReq;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use revive_dt_common::macros::define_wrapper_type; use revive_dt_common::macros::define_wrapper_type;
use tracing::{Instrument, info_span, instrument};
use crate::traits::ResolverApi; use crate::traits::ResolverApi;
use crate::{metadata::ContractInstance, traits::ResolutionContext}; use crate::{metadata::ContractInstance, traits::ResolutionContext};
@@ -32,24 +34,46 @@ pub enum Step {
StorageEmptyAssertion(Box<StorageEmptyAssertion>), StorageEmptyAssertion(Box<StorageEmptyAssertion>),
} }
define_wrapper_type!(
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct StepIdx(usize) impl Display;
);
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
pub struct Input { pub struct Input {
#[serde(default = "Input::default_caller")] #[serde(default = "Input::default_caller")]
pub caller: Address, pub caller: Address,
#[serde(skip_serializing_if = "Option::is_none")]
pub comment: Option<String>, pub comment: Option<String>,
#[serde(default = "Input::default_instance")] #[serde(default = "Input::default_instance")]
pub instance: ContractInstance, pub instance: ContractInstance,
pub method: Method, pub method: Method,
#[serde(default)] #[serde(default)]
pub calldata: Calldata, pub calldata: Calldata,
#[serde(skip_serializing_if = "Option::is_none")]
pub expected: Option<Expected>, pub expected: Option<Expected>,
#[serde(skip_serializing_if = "Option::is_none")]
pub value: Option<EtherValue>, pub value: Option<EtherValue>,
#[serde(skip_serializing_if = "Option::is_none")]
pub storage: Option<HashMap<String, Calldata>>, pub storage: Option<HashMap<String, Calldata>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub variable_assignments: Option<VariableAssignments>, pub variable_assignments: Option<VariableAssignments>,
} }
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
pub struct BalanceAssertion { pub struct BalanceAssertion {
/// An optional comment on the balance assertion.
#[serde(skip_serializing_if = "Option::is_none")]
pub comment: Option<String>,
/// The address that the balance assertion should be done on. /// The address that the balance assertion should be done on.
/// ///
/// This is a string which will be resolved into an address when being processed. Therefore, /// This is a string which will be resolved into an address when being processed. Therefore,
@@ -64,6 +88,10 @@ pub struct BalanceAssertion {
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
pub struct StorageEmptyAssertion { pub struct StorageEmptyAssertion {
/// An optional comment on the storage empty assertion.
#[serde(skip_serializing_if = "Option::is_none")]
pub comment: Option<String>,
/// The address that the balance assertion should be done on. /// The address that the balance assertion should be done on.
/// ///
/// This is a string which will be resolved into an address when being processed. Therefore, /// This is a string which will be resolved into an address when being processed. Therefore,
@@ -86,8 +114,11 @@ pub enum Expected {
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
pub struct ExpectedOutput { pub struct ExpectedOutput {
#[serde(skip_serializing_if = "Option::is_none")]
pub compiler_version: Option<VersionReq>, pub compiler_version: Option<VersionReq>,
#[serde(skip_serializing_if = "Option::is_none")]
pub return_data: Option<Calldata>, pub return_data: Option<Calldata>,
#[serde(skip_serializing_if = "Option::is_none")]
pub events: Option<Vec<Event>>, pub events: Option<Vec<Event>>,
#[serde(default)] #[serde(default)]
pub exception: bool, pub exception: bool,
@@ -95,6 +126,7 @@ pub struct ExpectedOutput {
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
pub struct Event { pub struct Event {
#[serde(skip_serializing_if = "Option::is_none")]
pub address: Option<String>, pub address: Option<String>,
pub topics: Vec<String>, pub topics: Vec<String>,
pub values: Calldata, pub values: Calldata,
@@ -162,7 +194,7 @@ define_wrapper_type! {
/// This represents an item in the [`Calldata::Compound`] variant. /// This represents an item in the [`Calldata::Compound`] variant.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
#[serde(transparent)] #[serde(transparent)]
pub struct CalldataItem(String); pub struct CalldataItem(String) impl Display;
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
@@ -207,7 +239,7 @@ pub enum Method {
define_wrapper_type!( define_wrapper_type!(
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct EtherValue(U256); pub struct EtherValue(U256) impl Display;
); );
#[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)] #[derive(Clone, Debug, Default, Serialize, Deserialize, Eq, PartialEq)]
@@ -242,36 +274,33 @@ impl Input {
} }
Method::FunctionName(ref function_name) => { Method::FunctionName(ref function_name) => {
let Some(abi) = context.deployed_contract_abi(&self.instance) else { let Some(abi) = context.deployed_contract_abi(&self.instance) else {
tracing::error!(
contract_name = self.instance.as_ref(),
"Attempted to lookup ABI of contract but it wasn't found"
);
anyhow::bail!("ABI for instance '{}' not found", self.instance.as_ref()); anyhow::bail!("ABI for instance '{}' not found", self.instance.as_ref());
}; };
tracing::trace!("ABI found for instance: {}", &self.instance.as_ref());
// We follow the same logic that's implemented in the matter-labs-tester where they resolve // We follow the same logic that's implemented in the matter-labs-tester where they resolve
// the function name into a function selector and they assume that he function doesn't have // the function name into a function selector and they assume that he function doesn't have
// any existing overloads. // any existing overloads.
// Overloads are handled by providing the full function signature in the "function
// name".
// https://github.com/matter-labs/era-compiler-tester/blob/1dfa7d07cba0734ca97e24704f12dd57f6990c2c/compiler_tester/src/test/case/input/mod.rs#L158-L190 // https://github.com/matter-labs/era-compiler-tester/blob/1dfa7d07cba0734ca97e24704f12dd57f6990c2c/compiler_tester/src/test/case/input/mod.rs#L158-L190
let function = abi let selector = if function_name.contains('(') && function_name.contains(')') {
.functions() Function::parse(function_name)
.find(|function| function.signature().starts_with(function_name)) .context(
.ok_or_else(|| { "Failed to parse the provided function name into a function signature",
anyhow::anyhow!( )?
"Function with name {:?} not found in ABI for the instance {:?}", .selector()
function_name, } else {
&self.instance abi.functions()
) .find(|function| function.signature().starts_with(function_name))
})?; .ok_or_else(|| {
anyhow::anyhow!(
tracing::trace!("Functions found for instance: {}", self.instance.as_ref()); "Function with name {:?} not found in ABI for the instance {:?}",
function_name,
tracing::trace!( &self.instance
"Starting encoding ABI's parameters for instance: {}", )
self.instance.as_ref() })?
); .selector()
};
// Allocating a vector that we will be using for the calldata. The vector size will be: // Allocating a vector that we will be using for the calldata. The vector size will be:
// 4 bytes for the function selector. // 4 bytes for the function selector.
@@ -280,7 +309,7 @@ impl Input {
// We're using indices in the following code in order to avoid the need for us to allocate // We're using indices in the following code in order to avoid the need for us to allocate
// a new buffer for each one of the resolved arguments. // a new buffer for each one of the resolved arguments.
let mut calldata = Vec::<u8>::with_capacity(4 + self.calldata.size_requirement()); let mut calldata = Vec::<u8>::with_capacity(4 + self.calldata.size_requirement());
calldata.extend(function.selector().0); calldata.extend(selector.0);
self.calldata self.calldata
.calldata_into_slice(&mut calldata, resolver, context) .calldata_into_slice(&mut calldata, resolver, context)
.await?; .await?;
@@ -399,17 +428,18 @@ impl Calldata {
buffer.extend_from_slice(bytes); buffer.extend_from_slice(bytes);
} }
Calldata::Compound(items) => { Calldata::Compound(items) => {
for (arg_idx, arg) in items.iter().enumerate() { let resolved = stream::iter(items.iter().enumerate())
match arg.resolve(resolver, context).await { .map(|(arg_idx, arg)| async move {
Ok(resolved) => { arg.resolve(resolver, context)
buffer.extend(resolved.to_be_bytes::<32>()); .instrument(info_span!("Resolving argument", %arg, arg_idx))
} .map_ok(|value| value.to_be_bytes::<32>())
Err(error) => { .await
tracing::error!(?arg, arg_idx, ?error, "Failed to resolve argument"); })
return Err(error); .buffered(0xFF)
} .try_collect::<Vec<_>>()
}; .await?;
}
buffer.extend(resolved.into_iter().flatten());
} }
}; };
Ok(()) Ok(())
@@ -432,36 +462,37 @@ impl Calldata {
match self { match self {
Calldata::Single(calldata) => Ok(calldata == other), Calldata::Single(calldata) => Ok(calldata == other),
Calldata::Compound(items) => { Calldata::Compound(items) => {
// Chunking the "other" calldata into 32 byte chunks since each stream::iter(items.iter().zip(other.chunks(32)))
// one of the items in the compound calldata represents 32 bytes .map(|(this, other)| async move {
for (this, other) in items.iter().zip(other.chunks(32)) { // The matterlabs format supports wildcards and therefore we
// The matterlabs format supports wildcards and therefore we // also need to support them.
// also need to support them. if this.as_ref() == "*" {
if this.as_ref() == "*" { return Ok::<_, anyhow::Error>(true);
continue; }
}
let other = if other.len() < 32 { let other = if other.len() < 32 {
let mut vec = other.to_vec(); let mut vec = other.to_vec();
vec.resize(32, 0); vec.resize(32, 0);
std::borrow::Cow::Owned(vec) std::borrow::Cow::Owned(vec)
} else { } else {
std::borrow::Cow::Borrowed(other) std::borrow::Cow::Borrowed(other)
}; };
let this = this.resolve(resolver, context).await?; let this = this.resolve(resolver, context).await?;
let other = U256::from_be_slice(&other); let other = U256::from_be_slice(&other);
if this != other { Ok(this == other)
return Ok(false); })
} .buffered(0xFF)
} .all(|v| async move { v.is_ok_and(|v| v) })
Ok(true) .map(Ok)
.await
} }
} }
} }
} }
impl CalldataItem { impl CalldataItem {
#[instrument(level = "info", skip_all, err)]
async fn resolve( async fn resolve(
&self, &self,
resolver: &impl ResolverApi, resolver: &impl ResolverApi,
@@ -512,14 +543,7 @@ impl CalldataItem {
match stack.as_slice() { match stack.as_slice() {
// Empty stack means that we got an empty compound calldata which we resolve to zero. // Empty stack means that we got an empty compound calldata which we resolve to zero.
[] => Ok(U256::ZERO), [] => Ok(U256::ZERO),
[CalldataToken::Item(item)] => { [CalldataToken::Item(item)] => Ok(*item),
tracing::debug!(
original = self.0,
resolved = item.to_be_bytes::<32>().encode_hex(),
"Resolved a Calldata item"
);
Ok(*item)
}
_ => Err(anyhow::anyhow!( _ => Err(anyhow::anyhow!(
"Invalid calldata arithmetic operation - Invalid stack" "Invalid calldata arithmetic operation - Invalid stack"
)), )),
@@ -703,12 +727,14 @@ impl<'de> Deserialize<'de> for EtherValue {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*;
use alloy::{eips::BlockNumberOrTag, json_abi::JsonAbi}; use alloy::{eips::BlockNumberOrTag, json_abi::JsonAbi};
use alloy_primitives::{BlockHash, BlockNumber, BlockTimestamp, ChainId, TxHash, address}; use alloy_primitives::{BlockHash, BlockNumber, BlockTimestamp, ChainId, TxHash, address};
use alloy_sol_types::SolValue; use alloy_sol_types::SolValue;
use std::collections::HashMap; use std::collections::HashMap;
use super::*;
use crate::metadata::ContractIdent;
struct MockResolver; struct MockResolver;
impl ResolverApi for MockResolver { impl ResolverApi for MockResolver {
@@ -782,11 +808,11 @@ mod tests {
let mut contracts = HashMap::new(); let mut contracts = HashMap::new();
contracts.insert( contracts.insert(
ContractInstance::new("Contract"), ContractInstance::new("Contract"),
(Address::ZERO, parsed_abi), (ContractIdent::new("Contract"), Address::ZERO, parsed_abi),
); );
let resolver = MockResolver; let resolver = MockResolver;
let context = ResolutionContext::new_from_parts(&contracts, None, None, None); let context = ResolutionContext::default().with_deployed_contracts(&contracts);
let encoded = input.encoded_input(&resolver, context).await.unwrap(); let encoded = input.encoded_input(&resolver, context).await.unwrap();
assert!(encoded.0.starts_with(&selector)); assert!(encoded.0.starts_with(&selector));
@@ -826,11 +852,11 @@ mod tests {
let mut contracts = HashMap::new(); let mut contracts = HashMap::new();
contracts.insert( contracts.insert(
ContractInstance::new("Contract"), ContractInstance::new("Contract"),
(Address::ZERO, parsed_abi), (ContractIdent::new("Contract"), Address::ZERO, parsed_abi),
); );
let resolver = MockResolver; let resolver = MockResolver;
let context = ResolutionContext::new_from_parts(&contracts, None, None, None); let context = ResolutionContext::default().with_deployed_contracts(&contracts);
let encoded = input.encoded_input(&resolver, context).await.unwrap(); let encoded = input.encoded_input(&resolver, context).await.unwrap();
assert!(encoded.0.starts_with(&selector)); assert!(encoded.0.starts_with(&selector));
@@ -873,11 +899,11 @@ mod tests {
let mut contracts = HashMap::new(); let mut contracts = HashMap::new();
contracts.insert( contracts.insert(
ContractInstance::new("Contract"), ContractInstance::new("Contract"),
(Address::ZERO, parsed_abi), (ContractIdent::new("Contract"), Address::ZERO, parsed_abi),
); );
let resolver = MockResolver; let resolver = MockResolver;
let context = ResolutionContext::new_from_parts(&contracts, None, None, None); let context = ResolutionContext::default().with_deployed_contracts(&contracts);
let encoded = input.encoded_input(&resolver, context).await.unwrap(); let encoded = input.encoded_input(&resolver, context).await.unwrap();
assert!(encoded.0.starts_with(&selector)); assert!(encoded.0.starts_with(&selector));
@@ -891,10 +917,10 @@ mod tests {
async fn resolve_calldata_item( async fn resolve_calldata_item(
input: &str, input: &str,
deployed_contracts: &HashMap<ContractInstance, (Address, JsonAbi)>, deployed_contracts: &HashMap<ContractInstance, (ContractIdent, Address, JsonAbi)>,
resolver: &impl ResolverApi, resolver: &impl ResolverApi,
) -> anyhow::Result<U256> { ) -> anyhow::Result<U256> {
let context = ResolutionContext::new_from_parts(deployed_contracts, None, None, None); let context = ResolutionContext::default().with_deployed_contracts(deployed_contracts);
CalldataItem::new(input).resolve(resolver, context).await CalldataItem::new(input).resolve(resolver, context).await
} }
+225 -62
View File
@@ -1,7 +1,8 @@
use std::{ use std::{
cmp::Ordering,
collections::BTreeMap, collections::BTreeMap,
fmt::Display, fmt::Display,
fs::{File, read_to_string}, fs::File,
ops::Deref, ops::Deref,
path::{Path, PathBuf}, path::{Path, PathBuf},
str::FromStr, str::FromStr,
@@ -9,12 +10,14 @@ use std::{
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use revive_dt_common::{iterators::FilesWithExtensionIterator, macros::define_wrapper_type}; use revive_common::EVMVersion;
use revive_dt_common::{
use crate::{ cached_fs::read_to_string, iterators::FilesWithExtensionIterator, macros::define_wrapper_type,
case::Case, types::Mode,
mode::{Mode, SolcMode},
}; };
use tracing::error;
use crate::{case::Case, mode::ParsedMode};
pub const METADATA_FILE_EXTENSION: &str = "json"; pub const METADATA_FILE_EXTENSION: &str = "json";
pub const SOLIDITY_CASE_FILE_EXTENSION: &str = "sol"; pub const SOLIDITY_CASE_FILE_EXTENSION: &str = "sol";
@@ -22,16 +25,26 @@ pub const SOLIDITY_CASE_COMMENT_MARKER: &str = "//!";
#[derive(Debug, Default, Deserialize, Clone, Eq, PartialEq)] #[derive(Debug, Default, Deserialize, Clone, Eq, PartialEq)]
pub struct MetadataFile { pub struct MetadataFile {
pub path: PathBuf, /// The path of the metadata file. This will either be a JSON or solidity file.
pub metadata_file_path: PathBuf,
/// This is the path contained within the corpus file. This could either be the path of some dir
/// or could be the actual metadata file path.
pub corpus_file_path: PathBuf,
/// The metadata contained within the file.
pub content: Metadata, pub content: Metadata,
} }
impl MetadataFile { impl MetadataFile {
pub fn try_from_file(path: &Path) -> Option<Self> { pub fn relative_path(&self) -> &Path {
Metadata::try_from_file(path).map(|metadata| Self { if self.corpus_file_path.is_file() {
path: path.to_owned(), &self.corpus_file_path
content: metadata, } else {
}) self.metadata_file_path
.strip_prefix(&self.corpus_file_path)
.unwrap()
}
} }
} }
@@ -45,31 +58,49 @@ impl Deref for MetadataFile {
#[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)] #[derive(Debug, Default, Serialize, Deserialize, Clone, Eq, PartialEq)]
pub struct Metadata { pub struct Metadata {
pub targets: Option<Vec<String>>, /// A comment on the test case that's added for human-readability.
pub cases: Vec<Case>, #[serde(skip_serializing_if = "Option::is_none")]
pub contracts: Option<BTreeMap<ContractInstance, ContractPathAndIdent>>, pub comment: Option<String>,
pub libraries: Option<BTreeMap<PathBuf, BTreeMap<ContractIdent, ContractInstance>>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub ignore: Option<bool>, pub ignore: Option<bool>,
pub modes: Option<Vec<Mode>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub targets: Option<Vec<String>>,
pub cases: Vec<Case>,
#[serde(skip_serializing_if = "Option::is_none")]
pub contracts: Option<BTreeMap<ContractInstance, ContractPathAndIdent>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub libraries: Option<BTreeMap<PathBuf, BTreeMap<ContractIdent, ContractInstance>>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub modes: Option<Vec<ParsedMode>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub file_path: Option<PathBuf>, pub file_path: Option<PathBuf>,
/// This field specifies an EVM version requirement that the test case has where the test might
/// be run of the evm version of the nodes match the evm version specified here.
#[serde(skip_serializing_if = "Option::is_none")]
pub required_evm_version: Option<EvmVersionRequirement>,
/// A set of compilation directives that will be passed to the compiler whenever the contracts for
/// the test are being compiled. Note that this differs from the [`Mode`]s in that a [`Mode`] is
/// just a filter for when a test can run whereas this is an instruction to the compiler.
#[serde(skip_serializing_if = "Option::is_none")]
pub compiler_directives: Option<CompilationDirectives>,
} }
impl Metadata { impl Metadata {
/// Returns the solc modes of this metadata, inserting a default mode if not present. /// Returns the modes that we should test from this metadata.
pub fn solc_modes(&self) -> Vec<SolcMode> { pub fn solc_modes(&self) -> Vec<Mode> {
self.modes match &self.modes {
.to_owned() Some(modes) => ParsedMode::many_to_modes(modes.iter()).collect(),
.unwrap_or_else(|| vec![Mode::Solidity(Default::default())]) None => Mode::all().collect(),
.iter() }
.filter_map(|mode| match mode {
Mode::Solidity(solc_mode) => Some(solc_mode),
Mode::Unknown(mode) => {
tracing::debug!("compiler: ignoring unknown mode '{mode}'");
None
}
})
.cloned()
.collect()
} }
/// Returns the base directory of this metadata. /// Returns the base directory of this metadata.
@@ -125,10 +156,7 @@ impl Metadata {
pub fn try_from_file(path: &Path) -> Option<Self> { pub fn try_from_file(path: &Path) -> Option<Self> {
assert!(path.is_file(), "not a file: {}", path.display()); assert!(path.is_file(), "not a file: {}", path.display());
let Some(file_extension) = path.extension() else { let file_extension = path.extension()?;
tracing::debug!("skipping corpus file: {}", path.display());
return None;
};
if file_extension == METADATA_FILE_EXTENSION { if file_extension == METADATA_FILE_EXTENSION {
return Self::try_from_json(path); return Self::try_from_json(path);
@@ -138,18 +166,12 @@ impl Metadata {
return Self::try_from_solidity(path); return Self::try_from_solidity(path);
} }
tracing::debug!("ignoring invalid corpus file: {}", path.display());
None None
} }
fn try_from_json(path: &Path) -> Option<Self> { fn try_from_json(path: &Path) -> Option<Self> {
let file = File::open(path) let file = File::open(path)
.inspect_err(|error| { .inspect_err(|err| error!(path = %path.display(), %err, "Failed to open file"))
tracing::error!(
"opening JSON test metadata file '{}' error: {error}",
path.display()
);
})
.ok()?; .ok()?;
match serde_json::from_reader::<_, Metadata>(file) { match serde_json::from_reader::<_, Metadata>(file) {
@@ -157,11 +179,8 @@ impl Metadata {
metadata.file_path = Some(path.to_path_buf()); metadata.file_path = Some(path.to_path_buf());
Some(metadata) Some(metadata)
} }
Err(error) => { Err(err) => {
tracing::error!( error!(path = %path.display(), %err, "Deserialization of metadata failed");
"parsing JSON test metadata file '{}' error: {error}",
path.display()
);
None None
} }
} }
@@ -169,12 +188,7 @@ impl Metadata {
fn try_from_solidity(path: &Path) -> Option<Self> { fn try_from_solidity(path: &Path) -> Option<Self> {
let spec = read_to_string(path) let spec = read_to_string(path)
.inspect_err(|error| { .inspect_err(|err| error!(path = %path.display(), %err, "Failed to read file content"))
tracing::error!(
"opening JSON test metadata file '{}' error: {error}",
path.display()
);
})
.ok()? .ok()?
.lines() .lines()
.filter_map(|line| line.strip_prefix(SOLIDITY_CASE_COMMENT_MARKER)) .filter_map(|line| line.strip_prefix(SOLIDITY_CASE_COMMENT_MARKER))
@@ -202,11 +216,8 @@ impl Metadata {
); );
Some(metadata) Some(metadata)
} }
Err(error) => { Err(err) => {
tracing::error!( error!(path = %path.display(), %err, "Failed to deserialize metadata");
"parsing Solidity test metadata file '{}' error: '{error}' from data: {spec}",
path.display()
);
None None
} }
} }
@@ -230,7 +241,9 @@ impl Metadata {
Ok(Box::new(std::iter::once(metadata_file_path.clone()))) Ok(Box::new(std::iter::once(metadata_file_path.clone())))
} else { } else {
Ok(Box::new( Ok(Box::new(
FilesWithExtensionIterator::new(self.directory()?).with_allowed_extension("sol"), FilesWithExtensionIterator::new(self.directory()?)
.with_allowed_extension("sol")
.with_use_cached_fs(true),
)) ))
} }
} }
@@ -244,7 +257,7 @@ define_wrapper_type!(
Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize,
)] )]
#[serde(transparent)] #[serde(transparent)]
pub struct ContractInstance(String); pub struct ContractInstance(String) impl Display;
); );
define_wrapper_type!( define_wrapper_type!(
@@ -255,7 +268,7 @@ define_wrapper_type!(
Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize,
)] )]
#[serde(transparent)] #[serde(transparent)]
pub struct ContractIdent(String); pub struct ContractIdent(String) impl Display;
); );
/// Represents an identifier used for contracts. /// Represents an identifier used for contracts.
@@ -342,6 +355,156 @@ impl From<ContractPathAndIdent> for String {
} }
} }
/// An EVM version requirement that the test case has. This gets serialized and
/// deserialized from and into [`String`].
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
#[serde(try_from = "String", into = "String")]
pub struct EvmVersionRequirement {
ordering: Ordering,
or_equal: bool,
evm_version: EVMVersion,
}
impl EvmVersionRequirement {
pub fn new_greater_than_or_equals(version: EVMVersion) -> Self {
Self {
ordering: Ordering::Greater,
or_equal: true,
evm_version: version,
}
}
pub fn new_greater_than(version: EVMVersion) -> Self {
Self {
ordering: Ordering::Greater,
or_equal: false,
evm_version: version,
}
}
pub fn new_equals(version: EVMVersion) -> Self {
Self {
ordering: Ordering::Equal,
or_equal: false,
evm_version: version,
}
}
pub fn new_less_than(version: EVMVersion) -> Self {
Self {
ordering: Ordering::Less,
or_equal: false,
evm_version: version,
}
}
pub fn new_less_than_or_equals(version: EVMVersion) -> Self {
Self {
ordering: Ordering::Less,
or_equal: true,
evm_version: version,
}
}
pub fn matches(&self, other: &EVMVersion) -> bool {
let ordering = other.cmp(&self.evm_version);
ordering == self.ordering || (self.or_equal && matches!(ordering, Ordering::Equal))
}
}
impl Display for EvmVersionRequirement {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let Self {
ordering,
or_equal,
evm_version,
} = self;
match ordering {
Ordering::Less => write!(f, "<")?,
Ordering::Equal => write!(f, "=")?,
Ordering::Greater => write!(f, ">")?,
}
if *or_equal && !matches!(ordering, Ordering::Equal) {
write!(f, "=")?;
}
write!(f, "{evm_version}")
}
}
impl FromStr for EvmVersionRequirement {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.as_bytes() {
[b'>', b'=', remaining @ ..] => Ok(Self {
ordering: Ordering::Greater,
or_equal: true,
evm_version: str::from_utf8(remaining)?.try_into()?,
}),
[b'>', remaining @ ..] => Ok(Self {
ordering: Ordering::Greater,
or_equal: false,
evm_version: str::from_utf8(remaining)?.try_into()?,
}),
[b'<', b'=', remaining @ ..] => Ok(Self {
ordering: Ordering::Less,
or_equal: true,
evm_version: str::from_utf8(remaining)?.try_into()?,
}),
[b'<', remaining @ ..] => Ok(Self {
ordering: Ordering::Less,
or_equal: false,
evm_version: str::from_utf8(remaining)?.try_into()?,
}),
[b'=', remaining @ ..] => Ok(Self {
ordering: Ordering::Equal,
or_equal: false,
evm_version: str::from_utf8(remaining)?.try_into()?,
}),
_ => anyhow::bail!("Invalid EVM version requirement {s}"),
}
}
}
impl TryFrom<String> for EvmVersionRequirement {
type Error = anyhow::Error;
fn try_from(value: String) -> Result<Self, Self::Error> {
value.parse()
}
}
impl From<EvmVersionRequirement> for String {
fn from(value: EvmVersionRequirement) -> Self {
value.to_string()
}
}
/// A set of compilation directives that will be passed to the compiler whenever the contracts for
/// the test are being compiled. Note that this differs from the [`Mode`]s in that a [`Mode`] is
/// just a filter for when a test can run whereas this is an instruction to the compiler.
/// Defines how the compiler should handle revert strings.
#[derive(
Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize,
)]
pub struct CompilationDirectives {
/// Defines how the revert strings should be handled.
pub revert_string_handling: Option<RevertString>,
}
/// Defines how the compiler should handle revert strings.
#[derive(
Clone, Debug, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Serialize, Deserialize,
)]
#[serde(rename_all = "camelCase")]
pub enum RevertString {
#[default]
Default,
Debug,
Strip,
VerboseDebug,
}
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
+249 -110
View File
@@ -1,123 +1,262 @@
use revive_dt_common::types::VersionOrRequirement; use regex::Regex;
use semver::Version; use revive_dt_common::types::{Mode, ModeOptimizerSetting, ModePipeline};
use serde::de::Deserializer;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashSet;
use std::fmt::Display;
use std::str::FromStr;
use std::sync::LazyLock;
/// Specifies the compilation mode of the test artifact. /// This represents a mode that has been parsed from test metadata.
#[derive(Hash, Debug, Clone, Eq, PartialEq)] ///
pub enum Mode { /// Mode strings can take the following form (in pseudo-regex):
Solidity(SolcMode), ///
Unknown(String), /// ```text
/// [YEILV][+-]? (M[0123sz])? <semver>?
/// ```
///
/// We can parse valid mode strings into [`ParsedMode`] using [`ParsedMode::from_str`].
#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
#[serde(try_from = "String", into = "String")]
pub struct ParsedMode {
pub pipeline: Option<ModePipeline>,
pub optimize_flag: Option<bool>,
pub optimize_setting: Option<ModeOptimizerSetting>,
pub version: Option<semver::VersionReq>,
} }
/// Specify Solidity specific compiler options. impl FromStr for ParsedMode {
#[derive(Hash, Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)] type Err = anyhow::Error;
pub struct SolcMode { fn from_str(s: &str) -> Result<Self, Self::Err> {
pub solc_version: Option<semver::VersionReq>, static REGEX: LazyLock<Regex> = LazyLock::new(|| {
solc_optimize: Option<bool>, Regex::new(r"(?x)
pub llvm_optimizer_settings: Vec<String>, ^
mode_string: String, (?:(?P<pipeline>[YEILV])(?P<optimize_flag>[+-])?)? # Pipeline to use eg Y, E+, E-
} \s*
(?P<optimize_setting>M[a-zA-Z0-9])? # Optimize setting eg M0, Ms, Mz
\s*
(?P<version>[>=<]*\d+(?:\.\d+)*)? # Optional semver version eg >=0.8.0, 0.7, <0.8
$
").unwrap()
});
impl SolcMode { let Some(caps) = REGEX.captures(s) else {
/// Try to parse a mode string into a solc mode. anyhow::bail!("Cannot parse mode '{s}' from string");
/// Returns `None` if the string wasn't a solc YUL mode string.
///
/// The mode string is expected to start with the `Y` ID (YUL ID),
/// optionally followed by `+` or `-` for the solc optimizer settings.
///
/// Options can be separated by a whitespace contain the following
/// - A solc `SemVer version requirement` string
/// - One or more `-OX` where X is a supposed to be an LLVM opt mode
pub fn parse_from_mode_string(mode_string: &str) -> Option<Self> {
let mut result = Self {
mode_string: mode_string.to_string(),
..Default::default()
}; };
let mut parts = mode_string.trim().split(" "); let pipeline = match caps.name("pipeline") {
Some(m) => Some(ModePipeline::from_str(m.as_str())?),
match parts.next()? { None => None,
"Y" => {}
"Y+" => result.solc_optimize = Some(true),
"Y-" => result.solc_optimize = Some(false),
_ => return None,
}
for part in parts {
if let Ok(solc_version) = semver::VersionReq::parse(part) {
result.solc_version = Some(solc_version);
continue;
}
if let Some(level) = part.strip_prefix("-O") {
result.llvm_optimizer_settings.push(level.to_string());
continue;
}
panic!("the YUL mode string {mode_string} failed to parse, invalid part: {part}")
}
Some(result)
}
/// Returns whether to enable the solc optimizer.
pub fn solc_optimize(&self) -> bool {
self.solc_optimize.unwrap_or(true)
}
/// Calculate the latest matching solc patch version. Returns:
/// - `latest_supported` if no version request was specified.
/// - A matching version with the same minor version as `latest_supported`, if any.
/// - `None` if no minor version of the `latest_supported` version matches.
pub fn last_patch_version(&self, latest_supported: &Version) -> Option<Version> {
let Some(version_req) = self.solc_version.as_ref() else {
return Some(latest_supported.to_owned());
}; };
// lgtm let optimize_flag = caps.name("optimize_flag").map(|m| m.as_str() == "+");
for patch in (0..latest_supported.patch + 1).rev() {
let version = Version::new(0, latest_supported.minor, patch);
if version_req.matches(&version) {
return Some(version);
}
}
None let optimize_setting = match caps.name("optimize_setting") {
} Some(m) => Some(ModeOptimizerSetting::from_str(m.as_str())?),
None => None,
/// Resolves the [`SolcMode`]'s solidity version requirement into a [`VersionOrRequirement`] if
/// the requirement is present on the object. Otherwise, the passed default version is used.
pub fn compiler_version_to_use(&self, default: Version) -> VersionOrRequirement {
match self.solc_version {
Some(ref requirement) => requirement.clone().into(),
None => default.into(),
}
}
}
impl<'de> Deserialize<'de> for Mode {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let mode_string = String::deserialize(deserializer)?;
if let Some(solc_mode) = SolcMode::parse_from_mode_string(&mode_string) {
return Ok(Self::Solidity(solc_mode));
}
Ok(Self::Unknown(mode_string))
}
}
impl Serialize for Mode {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let string = match self {
Mode::Solidity(solc_mode) => &solc_mode.mode_string,
Mode::Unknown(string) => string,
}; };
string.serialize(serializer)
let version = match caps.name("version") {
Some(m) => Some(semver::VersionReq::parse(m.as_str()).map_err(|e| {
anyhow::anyhow!("Cannot parse the version requirement '{}': {e}", m.as_str())
})?),
None => None,
};
Ok(ParsedMode {
pipeline,
optimize_flag,
optimize_setting,
version,
})
}
}
impl Display for ParsedMode {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut has_written = false;
if let Some(pipeline) = self.pipeline {
pipeline.fmt(f)?;
if let Some(optimize_flag) = self.optimize_flag {
f.write_str(if optimize_flag { "+" } else { "-" })?;
}
has_written = true;
}
if let Some(optimize_setting) = self.optimize_setting {
if has_written {
f.write_str(" ")?;
}
optimize_setting.fmt(f)?;
has_written = true;
}
if let Some(version) = &self.version {
if has_written {
f.write_str(" ")?;
}
version.fmt(f)?;
}
Ok(())
}
}
impl From<ParsedMode> for String {
fn from(parsed_mode: ParsedMode) -> Self {
parsed_mode.to_string()
}
}
impl TryFrom<String> for ParsedMode {
type Error = anyhow::Error;
fn try_from(value: String) -> Result<Self, Self::Error> {
ParsedMode::from_str(&value)
}
}
impl ParsedMode {
/// This takes a [`ParsedMode`] and expands it into a list of [`Mode`]s that we should try.
pub fn to_modes(&self) -> impl Iterator<Item = Mode> {
let pipeline_iter = self.pipeline.as_ref().map_or_else(
|| EitherIter::A(ModePipeline::test_cases()),
|p| EitherIter::B(std::iter::once(*p)),
);
let optimize_flag_setting = self.optimize_flag.map(|flag| {
if flag {
ModeOptimizerSetting::M3
} else {
ModeOptimizerSetting::M0
}
});
let optimize_flag_iter = match optimize_flag_setting {
Some(setting) => EitherIter::A(std::iter::once(setting)),
None => EitherIter::B(ModeOptimizerSetting::test_cases()),
};
let optimize_settings_iter = self.optimize_setting.as_ref().map_or_else(
|| EitherIter::A(optimize_flag_iter),
|s| EitherIter::B(std::iter::once(*s)),
);
pipeline_iter.flat_map(move |pipeline| {
optimize_settings_iter
.clone()
.map(move |optimize_setting| Mode {
pipeline,
optimize_setting,
version: self.version.clone(),
})
})
}
/// Return a set of [`Mode`]s that correspond to the given [`ParsedMode`]s.
/// This avoids any duplicate entries.
pub fn many_to_modes<'a>(
parsed: impl Iterator<Item = &'a ParsedMode>,
) -> impl Iterator<Item = Mode> {
let modes: HashSet<_> = parsed.flat_map(|p| p.to_modes()).collect();
modes.into_iter()
}
}
/// An iterator that could be either of two iterators.
#[derive(Clone, Debug)]
enum EitherIter<A, B> {
A(A),
B(B),
}
impl<A, B> Iterator for EitherIter<A, B>
where
A: Iterator,
B: Iterator<Item = A::Item>,
{
type Item = A::Item;
fn next(&mut self) -> Option<Self::Item> {
match self {
EitherIter::A(iter) => iter.next(),
EitherIter::B(iter) => iter.next(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parsed_mode_from_str() {
let strings = vec![
("Mz", "Mz"),
("Y", "Y"),
("Y+", "Y+"),
("Y-", "Y-"),
("E", "E"),
("E+", "E+"),
("E-", "E-"),
("Y M0", "Y M0"),
("Y M1", "Y M1"),
("Y M2", "Y M2"),
("Y M3", "Y M3"),
("Y Ms", "Y Ms"),
("Y Mz", "Y Mz"),
("E M0", "E M0"),
("E M1", "E M1"),
("E M2", "E M2"),
("E M3", "E M3"),
("E Ms", "E Ms"),
("E Mz", "E Mz"),
// When stringifying semver again, 0.8.0 becomes ^0.8.0 (same meaning)
("Y 0.8.0", "Y ^0.8.0"),
("E+ 0.8.0", "E+ ^0.8.0"),
("Y M3 >=0.8.0", "Y M3 >=0.8.0"),
("E Mz <0.7.0", "E Mz <0.7.0"),
// We can parse +- _and_ M1/M2 but the latter takes priority.
("Y+ M1 0.8.0", "Y+ M1 ^0.8.0"),
("E- M2 0.7.0", "E- M2 ^0.7.0"),
// We don't see this in the wild but it is parsed.
("<=0.8", "<=0.8"),
];
for (actual, expected) in strings {
let parsed = ParsedMode::from_str(actual)
.unwrap_or_else(|_| panic!("Failed to parse mode string '{actual}'"));
assert_eq!(
expected,
parsed.to_string(),
"Mode string '{actual}' did not parse to '{expected}': got '{parsed}'"
);
}
}
#[test]
fn test_parsed_mode_to_test_modes() {
let strings = vec![
("Mz", vec!["Y Mz", "E Mz"]),
("Y", vec!["Y M0", "Y M3"]),
("E", vec!["E M0", "E M3"]),
("Y+", vec!["Y M3"]),
("Y-", vec!["Y M0"]),
("Y <=0.8", vec!["Y M0 <=0.8", "Y M3 <=0.8"]),
(
"<=0.8",
vec!["Y M0 <=0.8", "Y M3 <=0.8", "E M0 <=0.8", "E M3 <=0.8"],
),
];
for (actual, expected) in strings {
let parsed = ParsedMode::from_str(actual)
.unwrap_or_else(|_| panic!("Failed to parse mode string '{actual}'"));
let expected_set: HashSet<_> = expected.into_iter().map(|s| s.to_owned()).collect();
let actual_set: HashSet<_> = parsed.to_modes().map(|m| m.to_string()).collect();
assert_eq!(
expected_set, actual_set,
"Mode string '{actual}' did not expand to '{expected_set:?}': got '{actual_set:?}'"
);
}
} }
} }
+14 -7
View File
@@ -6,7 +6,7 @@ use alloy::primitives::{Address, BlockHash, BlockNumber, BlockTimestamp, ChainId
use alloy_primitives::TxHash; use alloy_primitives::TxHash;
use anyhow::Result; use anyhow::Result;
use crate::metadata::ContractInstance; use crate::metadata::{ContractIdent, ContractInstance};
/// A trait of the interface are required to implement to be used by the resolution logic that this /// A trait of the interface are required to implement to be used by the resolution logic that this
/// crate implements to go from string calldata and into the bytes calldata. /// crate implements to go from string calldata and into the bytes calldata.
@@ -48,7 +48,7 @@ pub trait ResolverApi {
/// Contextual information required by the code that's performing the resolution. /// Contextual information required by the code that's performing the resolution.
pub struct ResolutionContext<'a> { pub struct ResolutionContext<'a> {
/// When provided the contracts provided here will be used for resolutions. /// When provided the contracts provided here will be used for resolutions.
deployed_contracts: Option<&'a HashMap<ContractInstance, (Address, JsonAbi)>>, deployed_contracts: Option<&'a HashMap<ContractInstance, (ContractIdent, Address, JsonAbi)>>,
/// When provided the variables in here will be used for performing resolutions. /// When provided the variables in here will be used for performing resolutions.
variables: Option<&'a HashMap<String, U256>>, variables: Option<&'a HashMap<String, U256>>,
@@ -66,7 +66,9 @@ impl<'a> ResolutionContext<'a> {
} }
pub fn new_from_parts( pub fn new_from_parts(
deployed_contracts: impl Into<Option<&'a HashMap<ContractInstance, (Address, JsonAbi)>>>, deployed_contracts: impl Into<
Option<&'a HashMap<ContractInstance, (ContractIdent, Address, JsonAbi)>>,
>,
variables: impl Into<Option<&'a HashMap<String, U256>>>, variables: impl Into<Option<&'a HashMap<String, U256>>>,
block_number: impl Into<Option<&'a BlockNumber>>, block_number: impl Into<Option<&'a BlockNumber>>,
transaction_hash: impl Into<Option<&'a TxHash>>, transaction_hash: impl Into<Option<&'a TxHash>>,
@@ -81,7 +83,9 @@ impl<'a> ResolutionContext<'a> {
pub fn with_deployed_contracts( pub fn with_deployed_contracts(
mut self, mut self,
deployed_contracts: impl Into<Option<&'a HashMap<ContractInstance, (Address, JsonAbi)>>>, deployed_contracts: impl Into<
Option<&'a HashMap<ContractInstance, (ContractIdent, Address, JsonAbi)>>,
>,
) -> Self { ) -> Self {
self.deployed_contracts = deployed_contracts.into(); self.deployed_contracts = deployed_contracts.into();
self self
@@ -122,17 +126,20 @@ impl<'a> ResolutionContext<'a> {
} }
} }
pub fn deployed_contract(&self, instance: &ContractInstance) -> Option<&(Address, JsonAbi)> { pub fn deployed_contract(
&self,
instance: &ContractInstance,
) -> Option<&(ContractIdent, Address, JsonAbi)> {
self.deployed_contracts self.deployed_contracts
.and_then(|deployed_contracts| deployed_contracts.get(instance)) .and_then(|deployed_contracts| deployed_contracts.get(instance))
} }
pub fn deployed_contract_address(&self, instance: &ContractInstance) -> Option<&Address> { pub fn deployed_contract_address(&self, instance: &ContractInstance) -> Option<&Address> {
self.deployed_contract(instance).map(|(a, _)| a) self.deployed_contract(instance).map(|(_, a, _)| a)
} }
pub fn deployed_contract_abi(&self, instance: &ContractInstance) -> Option<&JsonAbi> { pub fn deployed_contract_abi(&self, instance: &ContractInstance) -> Option<&JsonAbi> {
self.deployed_contract(instance).map(|(_, a)| a) self.deployed_contract(instance).map(|(_, _, a)| a)
} }
pub fn variable(&self, name: impl AsRef<str>) -> Option<&U256> { pub fn variable(&self, name: impl AsRef<str>) -> Option<&U256> {
+3
View File
@@ -11,3 +11,6 @@ rust-version.workspace = true
[dependencies] [dependencies]
alloy = { workspace = true } alloy = { workspace = true }
anyhow = { workspace = true } anyhow = { workspace = true }
[lints]
workspace = true
+4
View File
@@ -14,6 +14,7 @@ alloy = { workspace = true }
tracing = { workspace = true } tracing = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
revive-common = { workspace = true }
revive-dt-common = { workspace = true } revive-dt-common = { workspace = true }
revive-dt-config = { workspace = true } revive-dt-config = { workspace = true }
revive-dt-format = { workspace = true } revive-dt-format = { workspace = true }
@@ -28,3 +29,6 @@ sp-runtime = { workspace = true }
[dev-dependencies] [dev-dependencies]
temp-dir = { workspace = true } temp-dir = { workspace = true }
tokio = { workspace = true } tokio = { workspace = true }
[lints]
workspace = true
+95 -82
View File
@@ -32,9 +32,13 @@ use alloy::{
signers::local::PrivateKeySigner, signers::local::PrivateKeySigner,
}; };
use anyhow::Context; use anyhow::Context;
use tracing::{Instrument, Level}; use revive_common::EVMVersion;
use tracing::{Instrument, instrument};
use revive_dt_common::{fs::clear_directory, futures::poll}; use revive_dt_common::{
fs::clear_directory,
futures::{PollingWaitBehavior, poll},
};
use revive_dt_config::Arguments; use revive_dt_config::Arguments;
use revive_dt_format::traits::ResolverApi; use revive_dt_format::traits::ResolverApi;
use revive_dt_node_interaction::EthereumNode; use revive_dt_node_interaction::EthereumNode;
@@ -51,6 +55,7 @@ static NODE_COUNT: AtomicU32 = AtomicU32::new(0);
/// ///
/// Prunes the child process and the base directory on drop. /// Prunes the child process and the base directory on drop.
#[derive(Debug)] #[derive(Debug)]
#[allow(clippy::type_complexity)]
pub struct GethNode { pub struct GethNode {
connection_string: String, connection_string: String,
base_directory: PathBuf, base_directory: PathBuf,
@@ -59,10 +64,10 @@ pub struct GethNode {
geth: PathBuf, geth: PathBuf,
id: u32, id: u32,
handle: Option<Child>, handle: Option<Child>,
network_id: u64,
start_timeout: u64, start_timeout: u64,
wallet: EthereumWallet, wallet: Arc<EthereumWallet>,
nonce_manager: CachedNonceManager, nonce_manager: CachedNonceManager,
chain_id_filler: ChainIdFiller,
/// This vector stores [`File`] objects that we use for logging which we want to flush when the /// This vector stores [`File`] objects that we use for logging which we want to flush when the
/// node object is dropped. We do not store them in a structured fashion at the moment (in /// node object is dropped. We do not store them in a structured fashion at the moment (in
/// separate fields) as the logic that we need to apply to them is all the same regardless of /// separate fields) as the logic that we need to apply to them is all the same regardless of
@@ -91,7 +96,7 @@ impl GethNode {
const TRACE_POLLING_DURATION: Duration = Duration::from_secs(60); const TRACE_POLLING_DURATION: Duration = Duration::from_secs(60);
/// Create the node directory and call `geth init` to configure the genesis. /// Create the node directory and call `geth init` to configure the genesis.
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
fn init(&mut self, genesis: String) -> anyhow::Result<&mut Self> { fn init(&mut self, genesis: String) -> anyhow::Result<&mut Self> {
let _ = clear_directory(&self.base_directory); let _ = clear_directory(&self.base_directory);
let _ = clear_directory(&self.logs_directory); let _ = clear_directory(&self.logs_directory);
@@ -141,7 +146,7 @@ impl GethNode {
/// Spawn the go-ethereum node child process. /// Spawn the go-ethereum node child process.
/// ///
/// [Instance::init] must be called prior. /// [Instance::init] must be called prior.
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
fn spawn_process(&mut self) -> anyhow::Result<&mut Self> { fn spawn_process(&mut self) -> anyhow::Result<&mut Self> {
// This is the `OpenOptions` that we wish to use for all of the log files that we will be // This is the `OpenOptions` that we wish to use for all of the log files that we will be
// opening in this method. We need to construct it in this way to: // opening in this method. We need to construct it in this way to:
@@ -164,8 +169,6 @@ impl GethNode {
.arg(&self.data_directory) .arg(&self.data_directory)
.arg("--ipcpath") .arg("--ipcpath")
.arg(&self.connection_string) .arg(&self.connection_string)
.arg("--networkid")
.arg(self.network_id.to_string())
.arg("--nodiscover") .arg("--nodiscover")
.arg("--maxpeers") .arg("--maxpeers")
.arg("0") .arg("0")
@@ -199,7 +202,7 @@ impl GethNode {
/// Wait for the g-ethereum node child process getting ready. /// Wait for the g-ethereum node child process getting ready.
/// ///
/// [Instance::spawn_process] must be called priorly. /// [Instance::spawn_process] must be called priorly.
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
fn wait_ready(&mut self) -> anyhow::Result<&mut Self> { fn wait_ready(&mut self) -> anyhow::Result<&mut Self> {
let start_time = Instant::now(); let start_time = Instant::now();
@@ -212,6 +215,7 @@ impl GethNode {
let maximum_wait_time = Duration::from_millis(self.start_timeout); let maximum_wait_time = Duration::from_millis(self.start_timeout);
let mut stderr = BufReader::new(logs_file).lines(); let mut stderr = BufReader::new(logs_file).lines();
let mut lines = vec![];
loop { loop {
if let Some(Ok(line)) = stderr.next() { if let Some(Ok(line)) = stderr.next() {
if line.contains(Self::ERROR_MARKER) { if line.contains(Self::ERROR_MARKER) {
@@ -220,86 +224,87 @@ impl GethNode {
if line.contains(Self::READY_MARKER) { if line.contains(Self::READY_MARKER) {
return Ok(self); return Ok(self);
} }
lines.push(line);
} }
if Instant::now().duration_since(start_time) > maximum_wait_time { if Instant::now().duration_since(start_time) > maximum_wait_time {
anyhow::bail!("Timeout in starting geth"); anyhow::bail!(
"Timeout in starting geth: took longer than {}ms. stdout:\n\n{}\n",
self.start_timeout,
lines.join("\n")
);
} }
} }
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id), level = Level::TRACE)] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
fn geth_stdout_log_file_path(&self) -> PathBuf { fn geth_stdout_log_file_path(&self) -> PathBuf {
self.logs_directory.join(Self::GETH_STDOUT_LOG_FILE_NAME) self.logs_directory.join(Self::GETH_STDOUT_LOG_FILE_NAME)
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id), level = Level::TRACE)] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
fn geth_stderr_log_file_path(&self) -> PathBuf { fn geth_stderr_log_file_path(&self) -> PathBuf {
self.logs_directory.join(Self::GETH_STDERR_LOG_FILE_NAME) self.logs_directory.join(Self::GETH_STDERR_LOG_FILE_NAME)
} }
fn provider( async fn provider(
&self, &self,
) -> impl Future< ) -> anyhow::Result<FillProvider<impl TxFiller<Ethereum>, impl Provider<Ethereum>, Ethereum>>
Output = anyhow::Result< {
FillProvider<impl TxFiller<Ethereum>, impl Provider<Ethereum>, Ethereum>, ProviderBuilder::new()
>, .disable_recommended_fillers()
> + 'static { .filler(FallbackGasFiller::new(
let connection_string = self.connection_string(); 25_000_000,
let wallet = self.wallet.clone(); 1_000_000_000,
1_000_000_000,
// Note: We would like all providers to make use of the same nonce manager so that we have ))
// monotonically increasing nonces that are cached. The cached nonce manager uses Arc's in .filler(self.chain_id_filler.clone())
// its implementation and therefore it means that when we clone it then it still references .filler(NonceFiller::new(self.nonce_manager.clone()))
// the same state. .wallet(self.wallet.clone())
let nonce_manager = self.nonce_manager.clone(); .connect(&self.connection_string)
.await
Box::pin(async move { .map_err(Into::into)
ProviderBuilder::new()
.disable_recommended_fillers()
.filler(FallbackGasFiller::new(500_000_000, 500_000_000, 1))
.filler(ChainIdFiller::default())
.filler(NonceFiller::new(nonce_manager))
.wallet(wallet)
.connect(&connection_string)
.await
.map_err(Into::into)
})
} }
} }
impl EthereumNode for GethNode { impl EthereumNode for GethNode {
#[tracing::instrument(level = "info", skip_all, fields(geth_node_id = self.id))] #[instrument(
level = "info",
skip_all,
fields(geth_node_id = self.id, connection_string = self.connection_string),
err,
)]
async fn execute_transaction( async fn execute_transaction(
&self, &self,
transaction: TransactionRequest, transaction: TransactionRequest,
) -> anyhow::Result<alloy::rpc::types::TransactionReceipt> { ) -> anyhow::Result<alloy::rpc::types::TransactionReceipt> {
let span = tracing::debug_span!("Submitting transaction", ?transaction); let provider = self.provider().await?;
let _guard = span.enter();
let provider = Arc::new(self.provider().await?); let pending_transaction = provider.send_transaction(transaction).await.inspect_err(
let transaction_hash = *provider.send_transaction(transaction).await?.tx_hash(); |err| tracing::error!(%err, "Encountered an error when submitting the transaction"),
)?;
let transaction_hash = *pending_transaction.tx_hash();
// The following is a fix for the "transaction indexing is in progress" error that we // The following is a fix for the "transaction indexing is in progress" error that we used
// used to get. You can find more information on this in the following GH issue in geth // to get. You can find more information on this in the following GH issue in geth
// https://github.com/ethereum/go-ethereum/issues/28877. To summarize what's going on, // https://github.com/ethereum/go-ethereum/issues/28877. To summarize what's going on,
// before we can get the receipt of the transaction it needs to have been indexed by the // before we can get the receipt of the transaction it needs to have been indexed by the
// node's indexer. Just because the transaction has been confirmed it doesn't mean that // node's indexer. Just because the transaction has been confirmed it doesn't mean that it
// it has been indexed. When we call alloy's `get_receipt` it checks if the transaction // has been indexed. When we call alloy's `get_receipt` it checks if the transaction was
// was confirmed. If it has been, then it will call `eth_getTransactionReceipt` method // confirmed. If it has been, then it will call `eth_getTransactionReceipt` method which
// which _might_ return the above error if the tx has not yet been indexed yet. So, we // _might_ return the above error if the tx has not yet been indexed yet. So, we need to
// need to implement a retry mechanism for the receipt to keep retrying to get it until // implement a retry mechanism for the receipt to keep retrying to get it until it
// it eventually works, but we only do that if the error we get back is the "transaction // eventually works, but we only do that if the error we get back is the "transaction
// indexing is in progress" error or if the receipt is None. // indexing is in progress" error or if the receipt is None.
// //
// Getting the transaction indexed and taking a receipt can take a long time especially // Getting the transaction indexed and taking a receipt can take a long time especially when
// when a lot of transactions are being submitted to the node. Thus, while initially we // a lot of transactions are being submitted to the node. Thus, while initially we only
// only allowed for 60 seconds of waiting with a 1 second delay in polling, we need to // allowed for 60 seconds of waiting with a 1 second delay in polling, we need to allow for
// allow for a larger wait time. Therefore, in here we allow for 5 minutes of waiting // a larger wait time. Therefore, in here we allow for 5 minutes of waiting with exponential
// with exponential backoff each time we attempt to get the receipt and find that it's // backoff each time we attempt to get the receipt and find that it's not available.
// not available. let provider = Arc::new(provider);
poll( poll(
Self::RECEIPT_POLLING_DURATION, Self::RECEIPT_POLLING_DURATION,
Default::default(), PollingWaitBehavior::Constant(Duration::from_millis(200)),
move || { move || {
let provider = provider.clone(); let provider = provider.clone();
async move { async move {
@@ -324,7 +329,7 @@ impl EthereumNode for GethNode {
.await .await
} }
#[tracing::instrument(level = "info", skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn trace_transaction( async fn trace_transaction(
&self, &self,
transaction: &TransactionReceipt, transaction: &TransactionReceipt,
@@ -333,7 +338,7 @@ impl EthereumNode for GethNode {
let provider = Arc::new(self.provider().await?); let provider = Arc::new(self.provider().await?);
poll( poll(
Self::TRACE_POLLING_DURATION, Self::TRACE_POLLING_DURATION,
Default::default(), PollingWaitBehavior::Constant(Duration::from_millis(200)),
move || { move || {
let provider = provider.clone(); let provider = provider.clone();
let trace_options = trace_options.clone(); let trace_options = trace_options.clone();
@@ -357,7 +362,7 @@ impl EthereumNode for GethNode {
.await .await
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn state_diff(&self, transaction: &TransactionReceipt) -> anyhow::Result<DiffMode> { async fn state_diff(&self, transaction: &TransactionReceipt) -> anyhow::Result<DiffMode> {
let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig { let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig {
diff_mode: Some(true), diff_mode: Some(true),
@@ -374,7 +379,7 @@ impl EthereumNode for GethNode {
} }
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn balance_of(&self, address: Address) -> anyhow::Result<U256> { async fn balance_of(&self, address: Address) -> anyhow::Result<U256> {
self.provider() self.provider()
.await? .await?
@@ -383,7 +388,7 @@ impl EthereumNode for GethNode {
.map_err(Into::into) .map_err(Into::into)
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn latest_state_proof( async fn latest_state_proof(
&self, &self,
address: Address, address: Address,
@@ -399,7 +404,7 @@ impl EthereumNode for GethNode {
} }
impl ResolverApi for GethNode { impl ResolverApi for GethNode {
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn chain_id(&self) -> anyhow::Result<alloy::primitives::ChainId> { async fn chain_id(&self) -> anyhow::Result<alloy::primitives::ChainId> {
self.provider() self.provider()
.await? .await?
@@ -408,7 +413,7 @@ impl ResolverApi for GethNode {
.map_err(Into::into) .map_err(Into::into)
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn transaction_gas_price(&self, tx_hash: &TxHash) -> anyhow::Result<u128> { async fn transaction_gas_price(&self, tx_hash: &TxHash) -> anyhow::Result<u128> {
self.provider() self.provider()
.await? .await?
@@ -418,7 +423,7 @@ impl ResolverApi for GethNode {
.map(|receipt| receipt.effective_gas_price) .map(|receipt| receipt.effective_gas_price)
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn block_gas_limit(&self, number: BlockNumberOrTag) -> anyhow::Result<u128> { async fn block_gas_limit(&self, number: BlockNumberOrTag) -> anyhow::Result<u128> {
self.provider() self.provider()
.await? .await?
@@ -428,7 +433,7 @@ impl ResolverApi for GethNode {
.map(|block| block.header.gas_limit as _) .map(|block| block.header.gas_limit as _)
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn block_coinbase(&self, number: BlockNumberOrTag) -> anyhow::Result<Address> { async fn block_coinbase(&self, number: BlockNumberOrTag) -> anyhow::Result<Address> {
self.provider() self.provider()
.await? .await?
@@ -438,7 +443,7 @@ impl ResolverApi for GethNode {
.map(|block| block.header.beneficiary) .map(|block| block.header.beneficiary)
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn block_difficulty(&self, number: BlockNumberOrTag) -> anyhow::Result<U256> { async fn block_difficulty(&self, number: BlockNumberOrTag) -> anyhow::Result<U256> {
self.provider() self.provider()
.await? .await?
@@ -448,7 +453,7 @@ impl ResolverApi for GethNode {
.map(|block| U256::from_be_bytes(block.header.mix_hash.0)) .map(|block| U256::from_be_bytes(block.header.mix_hash.0))
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn block_base_fee(&self, number: BlockNumberOrTag) -> anyhow::Result<u64> { async fn block_base_fee(&self, number: BlockNumberOrTag) -> anyhow::Result<u64> {
self.provider() self.provider()
.await? .await?
@@ -463,7 +468,7 @@ impl ResolverApi for GethNode {
}) })
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn block_hash(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockHash> { async fn block_hash(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockHash> {
self.provider() self.provider()
.await? .await?
@@ -473,7 +478,7 @@ impl ResolverApi for GethNode {
.map(|block| block.header.hash) .map(|block| block.header.hash)
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn block_timestamp(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockTimestamp> { async fn block_timestamp(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockTimestamp> {
self.provider() self.provider()
.await? .await?
@@ -483,7 +488,7 @@ impl ResolverApi for GethNode {
.map(|block| block.header.timestamp) .map(|block| block.header.timestamp)
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
async fn last_block_number(&self) -> anyhow::Result<BlockNumber> { async fn last_block_number(&self) -> anyhow::Result<BlockNumber> {
self.provider() self.provider()
.await? .await?
@@ -516,22 +521,27 @@ impl Node for GethNode {
geth: config.geth.clone(), geth: config.geth.clone(),
id, id,
handle: None, handle: None,
network_id: config.network_id,
start_timeout: config.geth_start_timeout, start_timeout: config.geth_start_timeout,
wallet, wallet: Arc::new(wallet),
chain_id_filler: Default::default(),
nonce_manager: Default::default(),
// We know that we only need to be storing 2 files so we can specify that when creating // We know that we only need to be storing 2 files so we can specify that when creating
// the vector. It's the stdout and stderr of the geth node. // the vector. It's the stdout and stderr of the geth node.
logs_file_to_flush: Vec::with_capacity(2), logs_file_to_flush: Vec::with_capacity(2),
nonce_manager: Default::default(),
} }
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
fn id(&self) -> usize {
self.id as _
}
#[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
fn connection_string(&self) -> String { fn connection_string(&self) -> String {
self.connection_string.clone() self.connection_string.clone()
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
fn shutdown(&mut self) -> anyhow::Result<()> { fn shutdown(&mut self) -> anyhow::Result<()> {
// Terminate the processes in a graceful manner to allow for the output to be flushed. // Terminate the processes in a graceful manner to allow for the output to be flushed.
if let Some(mut child) = self.handle.take() { if let Some(mut child) = self.handle.take() {
@@ -553,13 +563,13 @@ impl Node for GethNode {
Ok(()) Ok(())
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
fn spawn(&mut self, genesis: String) -> anyhow::Result<()> { fn spawn(&mut self, genesis: String) -> anyhow::Result<()> {
self.init(genesis)?.spawn_process()?; self.init(genesis)?.spawn_process()?;
Ok(()) Ok(())
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
fn version(&self) -> anyhow::Result<String> { fn version(&self) -> anyhow::Result<String> {
let output = Command::new(&self.geth) let output = Command::new(&self.geth)
.arg("--version") .arg("--version")
@@ -572,17 +582,20 @@ impl Node for GethNode {
Ok(String::from_utf8_lossy(&output).into()) Ok(String::from_utf8_lossy(&output).into())
} }
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] fn matches_target(targets: Option<&[String]>) -> bool {
fn matches_target(&self, targets: Option<&[String]>) -> bool {
match targets { match targets {
None => true, None => true,
Some(targets) => targets.iter().any(|str| str.as_str() == "evm"), Some(targets) => targets.iter().any(|str| str.as_str() == "evm"),
} }
} }
fn evm_version() -> EVMVersion {
EVMVersion::Cancun
}
} }
impl Drop for GethNode { impl Drop for GethNode {
#[tracing::instrument(skip_all, fields(geth_node_id = self.id))] #[instrument(level = "info", skip_all, fields(geth_node_id = self.id))]
fn drop(&mut self) { fn drop(&mut self) {
self.shutdown().expect("Failed to shutdown") self.shutdown().expect("Failed to shutdown")
} }
+39 -74
View File
@@ -3,7 +3,10 @@ use std::{
io::{BufRead, Write}, io::{BufRead, Write},
path::{Path, PathBuf}, path::{Path, PathBuf},
process::{Child, Command, Stdio}, process::{Child, Command, Stdio},
sync::atomic::{AtomicU32, Ordering}, sync::{
Arc,
atomic::{AtomicU32, Ordering},
},
time::Duration, time::Duration,
}; };
@@ -32,13 +35,13 @@ use alloy::{
signers::local::PrivateKeySigner, signers::local::PrivateKeySigner,
}; };
use anyhow::Context; use anyhow::Context;
use revive_common::EVMVersion;
use revive_dt_common::fs::clear_directory; use revive_dt_common::fs::clear_directory;
use revive_dt_format::traits::ResolverApi; use revive_dt_format::traits::ResolverApi;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::{Value as JsonValue, json}; use serde_json::{Value as JsonValue, json};
use sp_core::crypto::Ss58Codec; use sp_core::crypto::Ss58Codec;
use sp_runtime::AccountId32; use sp_runtime::AccountId32;
use tracing::Level;
use revive_dt_config::Arguments; use revive_dt_config::Arguments;
use revive_dt_node_interaction::EthereumNode; use revive_dt_node_interaction::EthereumNode;
@@ -53,12 +56,13 @@ pub struct KitchensinkNode {
substrate_binary: PathBuf, substrate_binary: PathBuf,
eth_proxy_binary: PathBuf, eth_proxy_binary: PathBuf,
rpc_url: String, rpc_url: String,
wallet: EthereumWallet,
base_directory: PathBuf, base_directory: PathBuf,
logs_directory: PathBuf, logs_directory: PathBuf,
process_substrate: Option<Child>, process_substrate: Option<Child>,
process_proxy: Option<Child>, process_proxy: Option<Child>,
wallet: Arc<EthereumWallet>,
nonce_manager: CachedNonceManager, nonce_manager: CachedNonceManager,
chain_id_filler: ChainIdFiller,
/// This vector stores [`File`] objects that we use for logging which we want to flush when the /// This vector stores [`File`] objects that we use for logging which we want to flush when the
/// node object is dropped. We do not store them in a structured fashion at the moment (in /// node object is dropped. We do not store them in a structured fashion at the moment (in
/// separate fields) as the logic that we need to apply to them is all the same regardless of /// separate fields) as the logic that we need to apply to them is all the same regardless of
@@ -86,7 +90,6 @@ impl KitchensinkNode {
const PROXY_STDOUT_LOG_FILE_NAME: &str = "proxy_stdout.log"; const PROXY_STDOUT_LOG_FILE_NAME: &str = "proxy_stdout.log";
const PROXY_STDERR_LOG_FILE_NAME: &str = "proxy_stderr.log"; const PROXY_STDERR_LOG_FILE_NAME: &str = "proxy_stderr.log";
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
fn init(&mut self, genesis: &str) -> anyhow::Result<&mut Self> { fn init(&mut self, genesis: &str) -> anyhow::Result<&mut Self> {
let _ = clear_directory(&self.base_directory); let _ = clear_directory(&self.base_directory);
let _ = clear_directory(&self.logs_directory); let _ = clear_directory(&self.logs_directory);
@@ -159,7 +162,6 @@ impl KitchensinkNode {
Ok(self) Ok(self)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
fn spawn_process(&mut self) -> anyhow::Result<()> { fn spawn_process(&mut self) -> anyhow::Result<()> {
let substrate_rpc_port = Self::BASE_SUBSTRATE_RPC_PORT + self.id as u16; let substrate_rpc_port = Self::BASE_SUBSTRATE_RPC_PORT + self.id as u16;
let proxy_rpc_port = Self::BASE_PROXY_RPC_PORT + self.id as u16; let proxy_rpc_port = Self::BASE_PROXY_RPC_PORT + self.id as u16;
@@ -213,10 +215,6 @@ impl KitchensinkNode {
Self::SUBSTRATE_READY_MARKER, Self::SUBSTRATE_READY_MARKER,
Duration::from_secs(60), Duration::from_secs(60),
) { ) {
tracing::error!(
?error,
"Failed to start substrate, shutting down gracefully"
);
self.shutdown()?; self.shutdown()?;
return Err(error); return Err(error);
}; };
@@ -242,7 +240,6 @@ impl KitchensinkNode {
Self::ETH_PROXY_READY_MARKER, Self::ETH_PROXY_READY_MARKER,
Duration::from_secs(60), Duration::from_secs(60),
) { ) {
tracing::error!(?error, "Failed to start proxy, shutting down gracefully");
self.shutdown()?; self.shutdown()?;
return Err(error); return Err(error);
}; };
@@ -257,7 +254,6 @@ impl KitchensinkNode {
Ok(()) Ok(())
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
fn extract_balance_from_genesis_file( fn extract_balance_from_genesis_file(
&self, &self,
genesis: &Genesis, genesis: &Genesis,
@@ -306,7 +302,6 @@ impl KitchensinkNode {
} }
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
pub fn eth_rpc_version(&self) -> anyhow::Result<String> { pub fn eth_rpc_version(&self) -> anyhow::Result<String> {
let output = Command::new(&self.eth_proxy_binary) let output = Command::new(&self.eth_proxy_binary)
.arg("--version") .arg("--version")
@@ -319,74 +314,55 @@ impl KitchensinkNode {
Ok(String::from_utf8_lossy(&output).trim().to_string()) Ok(String::from_utf8_lossy(&output).trim().to_string())
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id), level = Level::TRACE)]
fn kitchensink_stdout_log_file_path(&self) -> PathBuf { fn kitchensink_stdout_log_file_path(&self) -> PathBuf {
self.logs_directory self.logs_directory
.join(Self::KITCHENSINK_STDOUT_LOG_FILE_NAME) .join(Self::KITCHENSINK_STDOUT_LOG_FILE_NAME)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id), level = Level::TRACE)]
fn kitchensink_stderr_log_file_path(&self) -> PathBuf { fn kitchensink_stderr_log_file_path(&self) -> PathBuf {
self.logs_directory self.logs_directory
.join(Self::KITCHENSINK_STDERR_LOG_FILE_NAME) .join(Self::KITCHENSINK_STDERR_LOG_FILE_NAME)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id), level = Level::TRACE)]
fn proxy_stdout_log_file_path(&self) -> PathBuf { fn proxy_stdout_log_file_path(&self) -> PathBuf {
self.logs_directory.join(Self::PROXY_STDOUT_LOG_FILE_NAME) self.logs_directory.join(Self::PROXY_STDOUT_LOG_FILE_NAME)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id), level = Level::TRACE)]
fn proxy_stderr_log_file_path(&self) -> PathBuf { fn proxy_stderr_log_file_path(&self) -> PathBuf {
self.logs_directory.join(Self::PROXY_STDERR_LOG_FILE_NAME) self.logs_directory.join(Self::PROXY_STDERR_LOG_FILE_NAME)
} }
fn provider( async fn provider(
&self, &self,
) -> impl Future< ) -> anyhow::Result<
Output = anyhow::Result< FillProvider<
FillProvider< impl TxFiller<KitchenSinkNetwork>,
impl TxFiller<KitchenSinkNetwork>, impl Provider<KitchenSinkNetwork>,
impl Provider<KitchenSinkNetwork>, KitchenSinkNetwork,
KitchenSinkNetwork,
>,
>, >,
> + 'static { > {
let connection_string = self.connection_string(); ProviderBuilder::new()
let wallet = self.wallet.clone(); .disable_recommended_fillers()
.network::<KitchenSinkNetwork>()
// Note: We would like all providers to make use of the same nonce manager so that we have .filler(FallbackGasFiller::new(
// monotonically increasing nonces that are cached. The cached nonce manager uses Arc's in 25_000_000,
// its implementation and therefore it means that when we clone it then it still references 1_000_000_000,
// the same state. 1_000_000_000,
let nonce_manager = self.nonce_manager.clone(); ))
.filler(self.chain_id_filler.clone())
Box::pin(async move { .filler(NonceFiller::new(self.nonce_manager.clone()))
ProviderBuilder::new() .wallet(self.wallet.clone())
.disable_recommended_fillers() .connect(&self.rpc_url)
.network::<KitchenSinkNetwork>() .await
.filler(FallbackGasFiller::new( .map_err(Into::into)
30_000_000,
200_000_000_000,
3_000_000_000,
))
.filler(ChainIdFiller::default())
.filler(NonceFiller::new(nonce_manager))
.wallet(wallet)
.connect(&connection_string)
.await
.map_err(Into::into)
})
} }
} }
impl EthereumNode for KitchensinkNode { impl EthereumNode for KitchensinkNode {
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn execute_transaction( async fn execute_transaction(
&self, &self,
transaction: alloy::rpc::types::TransactionRequest, transaction: alloy::rpc::types::TransactionRequest,
) -> anyhow::Result<TransactionReceipt> { ) -> anyhow::Result<TransactionReceipt> {
tracing::debug!(?transaction, "Submitting transaction");
let receipt = self let receipt = self
.provider() .provider()
.await? .await?
@@ -394,11 +370,9 @@ impl EthereumNode for KitchensinkNode {
.await? .await?
.get_receipt() .get_receipt()
.await?; .await?;
tracing::info!(?receipt, "Submitted tx to kitchensink");
Ok(receipt) Ok(receipt)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn trace_transaction( async fn trace_transaction(
&self, &self,
transaction: &TransactionReceipt, transaction: &TransactionReceipt,
@@ -412,7 +386,6 @@ impl EthereumNode for KitchensinkNode {
.await?) .await?)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn state_diff(&self, transaction: &TransactionReceipt) -> anyhow::Result<DiffMode> { async fn state_diff(&self, transaction: &TransactionReceipt) -> anyhow::Result<DiffMode> {
let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig { let trace_options = GethDebugTracingOptions::prestate_tracer(PreStateConfig {
diff_mode: Some(true), diff_mode: Some(true),
@@ -429,7 +402,6 @@ impl EthereumNode for KitchensinkNode {
} }
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn balance_of(&self, address: Address) -> anyhow::Result<U256> { async fn balance_of(&self, address: Address) -> anyhow::Result<U256> {
self.provider() self.provider()
.await? .await?
@@ -438,7 +410,6 @@ impl EthereumNode for KitchensinkNode {
.map_err(Into::into) .map_err(Into::into)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn latest_state_proof( async fn latest_state_proof(
&self, &self,
address: Address, address: Address,
@@ -454,7 +425,6 @@ impl EthereumNode for KitchensinkNode {
} }
impl ResolverApi for KitchensinkNode { impl ResolverApi for KitchensinkNode {
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn chain_id(&self) -> anyhow::Result<alloy::primitives::ChainId> { async fn chain_id(&self) -> anyhow::Result<alloy::primitives::ChainId> {
self.provider() self.provider()
.await? .await?
@@ -463,7 +433,6 @@ impl ResolverApi for KitchensinkNode {
.map_err(Into::into) .map_err(Into::into)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn transaction_gas_price(&self, tx_hash: &TxHash) -> anyhow::Result<u128> { async fn transaction_gas_price(&self, tx_hash: &TxHash) -> anyhow::Result<u128> {
self.provider() self.provider()
.await? .await?
@@ -473,7 +442,6 @@ impl ResolverApi for KitchensinkNode {
.map(|receipt| receipt.effective_gas_price) .map(|receipt| receipt.effective_gas_price)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn block_gas_limit(&self, number: BlockNumberOrTag) -> anyhow::Result<u128> { async fn block_gas_limit(&self, number: BlockNumberOrTag) -> anyhow::Result<u128> {
self.provider() self.provider()
.await? .await?
@@ -483,7 +451,6 @@ impl ResolverApi for KitchensinkNode {
.map(|block| block.header.gas_limit as _) .map(|block| block.header.gas_limit as _)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn block_coinbase(&self, number: BlockNumberOrTag) -> anyhow::Result<Address> { async fn block_coinbase(&self, number: BlockNumberOrTag) -> anyhow::Result<Address> {
self.provider() self.provider()
.await? .await?
@@ -493,7 +460,6 @@ impl ResolverApi for KitchensinkNode {
.map(|block| block.header.beneficiary) .map(|block| block.header.beneficiary)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn block_difficulty(&self, number: BlockNumberOrTag) -> anyhow::Result<U256> { async fn block_difficulty(&self, number: BlockNumberOrTag) -> anyhow::Result<U256> {
self.provider() self.provider()
.await? .await?
@@ -503,7 +469,6 @@ impl ResolverApi for KitchensinkNode {
.map(|block| U256::from_be_bytes(block.header.mix_hash.0)) .map(|block| U256::from_be_bytes(block.header.mix_hash.0))
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn block_base_fee(&self, number: BlockNumberOrTag) -> anyhow::Result<u64> { async fn block_base_fee(&self, number: BlockNumberOrTag) -> anyhow::Result<u64> {
self.provider() self.provider()
.await? .await?
@@ -518,7 +483,6 @@ impl ResolverApi for KitchensinkNode {
}) })
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn block_hash(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockHash> { async fn block_hash(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockHash> {
self.provider() self.provider()
.await? .await?
@@ -528,7 +492,6 @@ impl ResolverApi for KitchensinkNode {
.map(|block| block.header.hash) .map(|block| block.header.hash)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn block_timestamp(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockTimestamp> { async fn block_timestamp(&self, number: BlockNumberOrTag) -> anyhow::Result<BlockTimestamp> {
self.provider() self.provider()
.await? .await?
@@ -538,7 +501,6 @@ impl ResolverApi for KitchensinkNode {
.map(|block| block.header.timestamp) .map(|block| block.header.timestamp)
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
async fn last_block_number(&self) -> anyhow::Result<BlockNumber> { async fn last_block_number(&self) -> anyhow::Result<BlockNumber> {
self.provider() self.provider()
.await? .await?
@@ -569,11 +531,12 @@ impl Node for KitchensinkNode {
substrate_binary: config.kitchensink.clone(), substrate_binary: config.kitchensink.clone(),
eth_proxy_binary: config.eth_proxy.clone(), eth_proxy_binary: config.eth_proxy.clone(),
rpc_url: String::new(), rpc_url: String::new(),
wallet,
base_directory, base_directory,
logs_directory, logs_directory,
process_substrate: None, process_substrate: None,
process_proxy: None, process_proxy: None,
wallet: Arc::new(wallet),
chain_id_filler: Default::default(),
nonce_manager: Default::default(), nonce_manager: Default::default(),
// We know that we only need to be storing 4 files so we can specify that when creating // We know that we only need to be storing 4 files so we can specify that when creating
// the vector. It's the stdout and stderr of the substrate-node and the eth-rpc. // the vector. It's the stdout and stderr of the substrate-node and the eth-rpc.
@@ -581,12 +544,14 @@ impl Node for KitchensinkNode {
} }
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))] fn id(&self) -> usize {
self.id as _
}
fn connection_string(&self) -> String { fn connection_string(&self) -> String {
self.rpc_url.clone() self.rpc_url.clone()
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
fn shutdown(&mut self) -> anyhow::Result<()> { fn shutdown(&mut self) -> anyhow::Result<()> {
// Terminate the processes in a graceful manner to allow for the output to be flushed. // Terminate the processes in a graceful manner to allow for the output to be flushed.
if let Some(mut child) = self.process_proxy.take() { if let Some(mut child) = self.process_proxy.take() {
@@ -613,12 +578,10 @@ impl Node for KitchensinkNode {
Ok(()) Ok(())
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
fn spawn(&mut self, genesis: String) -> anyhow::Result<()> { fn spawn(&mut self, genesis: String) -> anyhow::Result<()> {
self.init(&genesis)?.spawn_process() self.init(&genesis)?.spawn_process()
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
fn version(&self) -> anyhow::Result<String> { fn version(&self) -> anyhow::Result<String> {
let output = Command::new(&self.substrate_binary) let output = Command::new(&self.substrate_binary)
.arg("--version") .arg("--version")
@@ -631,17 +594,19 @@ impl Node for KitchensinkNode {
Ok(String::from_utf8_lossy(&output).into()) Ok(String::from_utf8_lossy(&output).into())
} }
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))] fn matches_target(targets: Option<&[String]>) -> bool {
fn matches_target(&self, targets: Option<&[String]>) -> bool {
match targets { match targets {
None => true, None => true,
Some(targets) => targets.iter().any(|str| str.as_str() == "pvm"), Some(targets) => targets.iter().any(|str| str.as_str() == "pvm"),
} }
} }
fn evm_version() -> EVMVersion {
EVMVersion::Cancun
}
} }
impl Drop for KitchensinkNode { impl Drop for KitchensinkNode {
#[tracing::instrument(skip_all, fields(kitchensink_node_id = self.id))]
fn drop(&mut self) { fn drop(&mut self) {
self.shutdown().expect("Failed to shutdown") self.shutdown().expect("Failed to shutdown")
} }
+8 -1
View File
@@ -1,5 +1,6 @@
//! This crate implements the testing nodes. //! This crate implements the testing nodes.
use revive_common::EVMVersion;
use revive_dt_config::Arguments; use revive_dt_config::Arguments;
use revive_dt_node_interaction::EthereumNode; use revive_dt_node_interaction::EthereumNode;
@@ -17,6 +18,9 @@ pub trait Node: EthereumNode {
/// Create a new uninitialized instance. /// Create a new uninitialized instance.
fn new(config: &Arguments) -> Self; fn new(config: &Arguments) -> Self;
/// Returns the identifier of the node.
fn id(&self) -> usize;
/// Spawns a node configured according to the genesis json. /// Spawns a node configured according to the genesis json.
/// ///
/// Blocking until it's ready to accept transactions. /// Blocking until it's ready to accept transactions.
@@ -35,5 +39,8 @@ pub trait Node: EthereumNode {
/// Given a list of targets from the metadata file, this function determines if the metadata /// Given a list of targets from the metadata file, this function determines if the metadata
/// file can be ran on this node or not. /// file can be ran on this node or not.
fn matches_target(&self, targets: Option<&[String]>) -> bool; fn matches_target(targets: Option<&[String]>) -> bool;
/// Returns the EVM version of the node.
fn evm_version() -> EVMVersion;
} }
+2 -2
View File
@@ -1,11 +1,12 @@
//! This crate implements concurrent handling of testing node. //! This crate implements concurrent handling of testing node.
use std::{ use std::{
fs::read_to_string,
sync::atomic::{AtomicUsize, Ordering}, sync::atomic::{AtomicUsize, Ordering},
thread, thread,
}; };
use revive_dt_common::cached_fs::read_to_string;
use anyhow::Context; use anyhow::Context;
use revive_dt_config::Arguments; use revive_dt_config::Arguments;
@@ -62,7 +63,6 @@ where
fn spawn_node<T: Node + Send>(args: &Arguments, genesis: String) -> anyhow::Result<T> { fn spawn_node<T: Node + Send>(args: &Arguments, genesis: String) -> anyhow::Result<T> {
let mut node = T::new(args); let mut node = T::new(args);
tracing::info!("starting node: {}", node.connection_string());
node.spawn(genesis)?; node.spawn(genesis)?;
Ok(node) Ok(node)
} }
+4 -1
View File
@@ -8,11 +8,14 @@ repository.workspace = true
rust-version.workspace = true rust-version.workspace = true
[dependencies] [dependencies]
revive-dt-common = { workspace = true }
revive-dt-config = { workspace = true } revive-dt-config = { workspace = true }
revive-dt-format = { workspace = true } revive-dt-format = { workspace = true }
revive-dt-compiler = { workspace = true } revive-dt-compiler = { workspace = true }
anyhow = { workspace = true } anyhow = { workspace = true }
tracing = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
serde_json = { workspace = true } serde_json = { workspace = true }
[lints]
workspace = true
+9 -10
View File
@@ -12,18 +12,19 @@ use std::{
}; };
use anyhow::Context; use anyhow::Context;
use revive_dt_compiler::{CompilerInput, CompilerOutput}; use serde::Serialize;
use serde::{Deserialize, Serialize};
use revive_dt_common::types::Mode;
use revive_dt_compiler::{CompilerInput, CompilerOutput};
use revive_dt_config::{Arguments, TestingPlatform}; use revive_dt_config::{Arguments, TestingPlatform};
use revive_dt_format::{corpus::Corpus, mode::SolcMode}; use revive_dt_format::corpus::Corpus;
use crate::analyzer::CompilerStatistics; use crate::analyzer::CompilerStatistics;
pub(crate) static REPORTER: OnceLock<Mutex<Report>> = OnceLock::new(); pub(crate) static REPORTER: OnceLock<Mutex<Report>> = OnceLock::new();
/// The `Report` datastructure stores all relevant inforamtion required for generating reports. /// The `Report` datastructure stores all relevant inforamtion required for generating reports.
#[derive(Clone, Debug, Default, Serialize, Deserialize)] #[derive(Clone, Debug, Default, Serialize)]
pub struct Report { pub struct Report {
/// The configuration used during the test. /// The configuration used during the test.
pub config: Arguments, pub config: Arguments,
@@ -41,14 +42,14 @@ pub struct Report {
} }
/// Contains a compiled contract. /// Contains a compiled contract.
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize)]
pub struct CompilationTask { pub struct CompilationTask {
/// The observed compiler input. /// The observed compiler input.
pub json_input: CompilerInput, pub json_input: CompilerInput,
/// The observed compiler output. /// The observed compiler output.
pub json_output: Option<CompilerOutput>, pub json_output: Option<CompilerOutput>,
/// The observed compiler mode. /// The observed compiler mode.
pub mode: SolcMode, pub mode: Mode,
/// The observed compiler version. /// The observed compiler version.
pub compiler_version: String, pub compiler_version: String,
/// The observed error, if any. /// The observed error, if any.
@@ -56,7 +57,7 @@ pub struct CompilationTask {
} }
/// Represents a report about a compilation task. /// Represents a report about a compilation task.
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize)]
pub struct CompilationResult { pub struct CompilationResult {
/// The observed compilation task. /// The observed compilation task.
pub compilation_task: CompilationTask, pub compilation_task: CompilationTask,
@@ -65,7 +66,7 @@ pub struct CompilationResult {
} }
/// The [Span] struct indicates the context of what is being reported. /// The [Span] struct indicates the context of what is being reported.
#[derive(Clone, Copy, Debug, Serialize, Deserialize)] #[derive(Clone, Copy, Debug, Serialize)]
pub struct Span { pub struct Span {
/// The corpus index this belongs to. /// The corpus index this belongs to.
corpus: usize, corpus: usize,
@@ -184,8 +185,6 @@ impl Report {
let file = File::create(&path).context(path.display().to_string())?; let file = File::create(&path).context(path.display().to_string())?;
serde_json::to_writer_pretty(file, &self)?; serde_json::to_writer_pretty(file, &self)?;
tracing::info!("report written to: {}", path.display());
Ok(()) Ok(())
} }
} }
+3
View File
@@ -19,3 +19,6 @@ reqwest = { workspace = true }
semver = { workspace = true } semver = { workspace = true }
serde = { workspace = true } serde = { workspace = true }
sha2 = { workspace = true } sha2 = { workspace = true }
[lints]
workspace = true
-3
View File
@@ -39,10 +39,7 @@ pub(crate) async fn get_or_download(
} }
async fn download_to_file(path: &Path, downloader: &SolcDownloader) -> anyhow::Result<()> { async fn download_to_file(path: &Path, downloader: &SolcDownloader) -> anyhow::Result<()> {
tracing::info!("caching file: {}", path.display());
let Ok(file) = File::create_new(path) else { let Ok(file) = File::create_new(path) else {
tracing::debug!("cache file already exists: {}", path.display());
return Ok(()); return Ok(());
}; };
-1
View File
@@ -107,7 +107,6 @@ impl SolcDownloader {
/// Errors out if the download fails or the digest of the downloaded file /// Errors out if the download fails or the digest of the downloaded file
/// mismatches the expected digest from the release [List]. /// mismatches the expected digest from the release [List].
pub async fn download(&self) -> anyhow::Result<Vec<u8>> { pub async fn download(&self) -> anyhow::Result<Vec<u8>> {
tracing::info!("downloading solc: {self:?}");
let builds = List::download(self.list).await?.builds; let builds = List::download(self.list).await?.builds;
let build = builds let build = builds
.iter() .iter()