From 94ec34c4d5fe7e0726082c0abbfc814a30ea6c65 Mon Sep 17 00:00:00 2001 From: xermicus Date: Sat, 27 Sep 2025 20:52:22 +0200 Subject: [PATCH] Separate compilation and linker phases (#376) Separate between compilation and linker phases to allow deploy time linking and back-porting era compiler changes to fix #91. Unlinked contract binaries (caused by missing libraries or missing factory dependencies in turn) are emitted as raw ELF object. Few drive by fixes: - #98 - A compiler panic on missing libraries definitions. - Fixes some incosistent type forwarding in JSON output (empty string vs. null object). - Remove the unused fallback for size optimization setting. - Remove the broken `--lvm-ir` mode. - CI workflow fixes. --------- Signed-off-by: Cyrill Leutwiler Signed-off-by: xermicus Signed-off-by: xermicus --- .github/workflows/release.yml | 2 +- .github/workflows/test.yml | 1 + CHANGELOG.md | 9 + Cargo.lock | 1141 ++++++++++------- Cargo.toml | 13 +- clippy.toml | 1 + crates/common/Cargo.toml | 4 +- crates/common/src/contract_identifier.rs | 33 + crates/common/src/extension.rs | 2 +- crates/common/src/keccak256.rs | 68 + crates/common/src/lib.rs | 8 + crates/common/src/metadata.rs | 42 + crates/common/src/object.rs | 61 + crates/common/src/utils.rs | 32 +- crates/integration/src/cases.rs | 40 +- crates/linker/Cargo.toml | 10 +- crates/linker/src/elf.rs | 114 ++ crates/linker/src/lib.rs | 78 +- crates/linker/src/pvm.rs | 10 + crates/llvm-context/Cargo.toml | 3 +- .../llvm-context/src/debug_config/ir_type.rs | 19 +- crates/llvm-context/src/debug_config/mod.rs | 45 +- crates/llvm-context/src/lib.rs | 17 +- crates/llvm-context/src/optimizer/mod.rs | 4 +- .../src/optimizer/settings/mod.rs | 37 +- crates/llvm-context/src/polkavm/const/mod.rs | 6 +- .../src/polkavm/context/argument.rs | 8 +- .../llvm-context/src/polkavm/context/build.rs | 20 +- .../src/polkavm/context/debug_info.rs | 4 +- .../src/polkavm/context/function/mod.rs | 14 +- .../context/function/runtime/arithmetics.rs | 99 +- .../context/function/runtime/deploy_code.rs | 29 +- .../polkavm/context/function/runtime/entry.rs | 38 +- .../context/function/runtime/revive.rs | 45 +- .../context/function/runtime/runtime_code.rs | 29 +- .../polkavm/context/function/runtime/sbrk.rs | 26 +- .../src/polkavm/context/global.rs | 11 +- .../llvm-context/src/polkavm/context/mod.rs | 190 +-- .../src/polkavm/context/pointer/heap.rs | 56 +- .../src/polkavm/context/pointer/mod.rs | 29 +- .../src/polkavm/context/pointer/storage.rs | 97 +- .../src/polkavm/context/runtime.rs | 20 +- .../src/polkavm/context/solidity_data.rs | 6 +- .../llvm-context/src/polkavm/context/tests.rs | 11 +- .../src/polkavm/context/yul_data.rs | 59 +- .../src/polkavm/evm/arithmetic.rs | 80 +- .../llvm-context/src/polkavm/evm/bitwise.rs | 86 +- crates/llvm-context/src/polkavm/evm/call.rs | 56 +- .../llvm-context/src/polkavm/evm/calldata.rs | 28 +- .../src/polkavm/evm/comparison.rs | 10 +- .../llvm-context/src/polkavm/evm/context.rs | 113 +- crates/llvm-context/src/polkavm/evm/create.rs | 112 +- crates/llvm-context/src/polkavm/evm/crypto.rs | 10 +- .../llvm-context/src/polkavm/evm/ether_gas.rs | 37 +- crates/llvm-context/src/polkavm/evm/event.rs | 91 +- .../llvm-context/src/polkavm/evm/ext_code.rs | 23 +- .../llvm-context/src/polkavm/evm/immutable.rs | 74 +- crates/llvm-context/src/polkavm/evm/math.rs | 37 +- crates/llvm-context/src/polkavm/evm/memory.rs | 40 +- crates/llvm-context/src/polkavm/evm/return.rs | 31 +- .../src/polkavm/evm/return_data.rs | 19 +- .../llvm-context/src/polkavm/evm/storage.rs | 49 +- crates/llvm-context/src/polkavm/mod.rs | 188 +-- crates/llvm-context/src/target_machine/mod.rs | 4 +- crates/resolc/Cargo.toml | 10 +- crates/resolc/src/build/contract.rs | 195 ++- crates/resolc/src/build/mod.rs | 358 +++++- crates/resolc/src/const.rs | 8 +- crates/resolc/src/lib.rs | 483 ++++--- crates/resolc/src/linker.rs | 96 ++ crates/resolc/src/missing_libraries.rs | 23 +- crates/resolc/src/process/input.rs | 42 +- crates/resolc/src/process/mod.rs | 75 +- crates/resolc/src/process/native_process.rs | 128 +- crates/resolc/src/process/worker_process.rs | 75 +- .../resolc/src/project/contract/ir/llvm_ir.rs | 21 - crates/resolc/src/project/contract/ir/mod.rs | 50 +- crates/resolc/src/project/contract/ir/yul.rs | 38 +- .../resolc/src/project/contract/metadata.rs | 13 +- crates/resolc/src/project/contract/mod.rs | 187 +-- crates/resolc/src/project/mod.rs | 445 +++---- crates/resolc/src/resolc/arguments.rs | 262 ++-- crates/resolc/src/resolc/main.rs | 241 ++-- crates/resolc/src/solc/mod.rs | 58 +- crates/resolc/src/solc/solc_compiler.rs | 179 +-- crates/resolc/src/solc/soljson_compiler.rs | 46 +- crates/resolc/src/solc/version.rs | 30 +- crates/resolc/src/test_utils.rs | 709 +++++----- crates/resolc/src/tests/cli/asm.rs | 7 +- crates/resolc/src/tests/cli/combined_json.rs | 48 +- crates/resolc/src/tests/cli/linker.rs | 64 + crates/resolc/src/tests/cli/llvm_arguments.rs | 15 + crates/resolc/src/tests/cli/mod.rs | 6 +- crates/resolc/src/tests/cli/optimization.rs | 47 +- crates/resolc/src/tests/cli/output_dir.rs | 77 +- crates/resolc/src/tests/cli/standard_json.rs | 17 +- crates/resolc/src/tests/cli/usage.rs | 13 +- crates/resolc/src/tests/cli/utils.rs | 38 +- crates/resolc/src/tests/cli/yul.rs | 54 +- .../contracts => data}/solidity/contract.sol | 0 .../src/tests/data/solidity/dependency.sol | 33 + .../standard_json/solidity_contracts.json | 0 .../tests/data/standard_json/yul_solc.json | 29 + .../data/standard_json/yul_solc_urls.json | 31 + crates/resolc/src/tests/data/yul/Test.yul | 16 + .../{cli/contracts => data}/yul/contract.yul | 0 .../{cli/contracts => data}/yul/memset.yul | 0 crates/resolc/src/tests/messages.rs | 251 ---- crates/resolc/src/tests/mod.rs | 13 +- crates/resolc/src/tests/remappings.rs | 50 - crates/resolc/src/tests/runtime_code.rs | 33 - .../tests/{ => unit}/factory_dependency.rs | 42 +- .../src/tests/{ => unit}/ir_artifacts.rs | 23 +- .../resolc/src/tests/{ => unit}/libraries.rs | 37 +- crates/resolc/src/tests/unit/messages.rs | 116 ++ crates/resolc/src/tests/unit/mod.rs | 11 + .../resolc/src/tests/{ => unit}/optimizer.rs | 57 +- crates/resolc/src/tests/unit/remappings.rs | 40 + crates/resolc/src/tests/unit/runtime_code.rs | 30 + crates/resolc/src/tests/unit/standard_json.rs | 53 + .../tests/{ => unit}/unsupported_opcodes.rs | 93 +- crates/solc-json-interface/Cargo.toml | 3 +- .../src/combined_json/contract.rs | 84 +- .../src/combined_json/mod.rs | 75 +- .../src/combined_json/selector.rs | 115 ++ crates/solc-json-interface/src/lib.rs | 15 +- .../src/standard_json/input/mod.rs | 235 ++-- .../standard_json/input/settings/libraries.rs | 107 ++ .../src/standard_json/input/settings/mod.rs | 100 +- .../input/settings/optimizer/details.rs | 3 +- .../input/settings/optimizer/mod.rs | 34 +- .../input/settings/optimizer/yul_details.rs | 2 +- .../input/settings/selection/file/flag.rs | 42 +- .../input/settings/selection/file/mod.rs | 120 +- .../input/settings/selection/mod.rs | 98 +- .../standard_json/input/settings/warning.rs | 93 ++ .../src/standard_json/input/source.rs | 98 +- .../standard_json/output/contract/evm/mod.rs | 10 +- .../src/standard_json/output/contract/mod.rs | 65 +- .../output/error/error_handler.rs | 74 ++ .../output/error/mapped_location.rs | 119 ++ .../src/standard_json/output/error/mod.rs | 172 ++- .../output/error/source_location.rs | 39 +- .../src/standard_json/output/mod.rs | 194 ++- .../src/standard_json/output/source.rs | 209 ++- crates/solc-json-interface/src/warning.rs | 48 - crates/yul/Cargo.toml | 2 +- crates/yul/src/lexer/mod.rs | 6 +- crates/yul/src/lexer/token/mod.rs | 6 +- crates/yul/src/parser/mod.rs | 8 +- crates/yul/src/parser/statement/assignment.rs | 20 +- crates/yul/src/parser/statement/block.rs | 17 +- crates/yul/src/parser/statement/code.rs | 14 +- .../statement/expression/function_call/mod.rs | 172 ++- .../expression/function_call/verbatim.rs | 13 +- .../parser/statement/expression/literal.rs | 59 +- .../src/parser/statement/expression/mod.rs | 32 +- crates/yul/src/parser/statement/for_loop.rs | 14 +- .../parser/statement/function_definition.rs | 50 +- .../src/parser/statement/if_conditional.rs | 14 +- crates/yul/src/parser/statement/mod.rs | 32 +- crates/yul/src/parser/statement/object.rs | 19 +- .../yul/src/parser/statement/switch/case.rs | 4 +- crates/yul/src/parser/statement/switch/mod.rs | 20 +- .../parser/statement/variable_declaration.rs | 22 +- crates/yul/src/parser/type.rs | 16 +- js/resolc/package.json | 2 +- js/resolc/src/index.ts | 1 - package-lock.json | 2 +- 169 files changed, 6288 insertions(+), 5206 deletions(-) create mode 100644 clippy.toml create mode 100644 crates/common/src/contract_identifier.rs create mode 100644 crates/common/src/keccak256.rs create mode 100644 crates/common/src/metadata.rs create mode 100644 crates/common/src/object.rs create mode 100644 crates/linker/src/elf.rs create mode 100644 crates/linker/src/pvm.rs create mode 100644 crates/resolc/src/linker.rs delete mode 100644 crates/resolc/src/project/contract/ir/llvm_ir.rs create mode 100644 crates/resolc/src/tests/cli/linker.rs create mode 100644 crates/resolc/src/tests/cli/llvm_arguments.rs rename crates/resolc/src/tests/{cli/contracts => data}/solidity/contract.sol (100%) create mode 100644 crates/resolc/src/tests/data/solidity/dependency.sol rename crates/resolc/src/tests/{cli/contracts => data}/standard_json/solidity_contracts.json (100%) create mode 100644 crates/resolc/src/tests/data/standard_json/yul_solc.json create mode 100644 crates/resolc/src/tests/data/standard_json/yul_solc_urls.json create mode 100644 crates/resolc/src/tests/data/yul/Test.yul rename crates/resolc/src/tests/{cli/contracts => data}/yul/contract.yul (100%) rename crates/resolc/src/tests/{cli/contracts => data}/yul/memset.yul (100%) delete mode 100644 crates/resolc/src/tests/messages.rs delete mode 100644 crates/resolc/src/tests/remappings.rs delete mode 100644 crates/resolc/src/tests/runtime_code.rs rename crates/resolc/src/tests/{ => unit}/factory_dependency.rs (62%) rename crates/resolc/src/tests/{ => unit}/ir_artifacts.rs (53%) rename crates/resolc/src/tests/{ => unit}/libraries.rs (62%) create mode 100644 crates/resolc/src/tests/unit/messages.rs create mode 100644 crates/resolc/src/tests/unit/mod.rs rename crates/resolc/src/tests/{ => unit}/optimizer.rs (77%) create mode 100644 crates/resolc/src/tests/unit/remappings.rs create mode 100644 crates/resolc/src/tests/unit/runtime_code.rs create mode 100644 crates/resolc/src/tests/unit/standard_json.rs rename crates/resolc/src/tests/{ => unit}/unsupported_opcodes.rs (63%) create mode 100644 crates/solc-json-interface/src/combined_json/selector.rs create mode 100644 crates/solc-json-interface/src/standard_json/input/settings/libraries.rs create mode 100644 crates/solc-json-interface/src/standard_json/input/settings/warning.rs create mode 100644 crates/solc-json-interface/src/standard_json/output/error/error_handler.rs create mode 100644 crates/solc-json-interface/src/standard_json/output/error/mapped_location.rs delete mode 100644 crates/solc-json-interface/src/warning.rs diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4c33506..a322a7f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -132,7 +132,7 @@ jobs: run: | result=$(./resolc-${{ matrix.target }} --bin crates/integration/contracts/flipper.sol) echo $result - if [[ $result == *'0x50564d'* ]]; then exit 0; else exit 1; fi + if [[ $result == *'50564d'* ]]; then exit 0; else exit 1; fi - uses: actions/upload-artifact@v4 with: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ffe4ce3..3306e29 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -26,6 +26,7 @@ jobs: with: # without this it will override our rust flags rustflags: "" + components: rustfmt, clippy - name: Install Solc uses: ./.github/actions/get-solc diff --git a/CHANGELOG.md b/CHANGELOG.md index f36ee7f..3c6df20 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,15 +12,24 @@ This is a development pre-release. Supported `polkadot-sdk` rev: `2503.0.1` +### Changed +- Remove the broken `--llvm-ir` mode. +- Remove the unused fallback for size optimization setting. +- Unlinked contract binaries are emitted as raw ELF objects. + ### Added - Line debug information per YUL builtin and for `if` statements. - Column numbers in debug information. - Support for the YUL optimizer details in the standard json input definition. - The `revive-explorer` compiler utility. - `revive-yul`: The AST visitor interface. +- The `--link` deploy time linking mode. ### Fixed - The debug info source file matches the YUL path in `--debug-output-dir`, allowing tools to display the source line. +- Incosistent type forwarding in JSON output (empty string vs. null object). +- The solc automatic import resolution. +- Compiler panic on missing libraries definition. ## v0.3.0 diff --git a/Cargo.lock b/Cargo.lock index d7522f0..42f688a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -32,9 +32,9 @@ dependencies = [ [[package]] name = "adler2" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aead" @@ -135,11 +135,11 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "741bdd7499908b3aa0b159bba11e71c8cddd009a2c2eb7a06e825f1ec87900a5" dependencies = [ - "alloy-primitives 1.1.2", + "alloy-primitives 1.3.1", "alloy-rlp", "crc", "serde", - "thiserror 2.0.12", + "thiserror 2.0.16", ] [[package]] @@ -148,7 +148,7 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b82752a889170df67bbb36d42ca63c531eb16274f0d7299ae2a680facba17bd" dependencies = [ - "alloy-primitives 1.1.2", + "alloy-primitives 1.3.1", "alloy-rlp", "serde", ] @@ -159,22 +159,22 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d4769c6ffddca380b0070d71c8b7f30bed375543fe76bb2f74ec0acf4b7cd16" dependencies = [ - "alloy-primitives 1.1.2", + "alloy-primitives 1.3.1", "alloy-rlp", "serde", - "thiserror 2.0.12", + "thiserror 2.0.16", ] [[package]] name = "alloy-eips" -version = "1.0.9" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3056872f6da48046913e76edb5ddced272861f6032f09461aea1a2497be5ae5d" +checksum = "7473a19f02b25f8e1e8c69d35f02c07245694d11bd91bfe00e9190ac106b3838" dependencies = [ "alloy-eip2124", "alloy-eip2930", "alloy-eip7702", - "alloy-primitives 1.1.2", + "alloy-primitives 1.3.1", "alloy-rlp", "alloy-serde", "auto_impl", @@ -187,15 +187,16 @@ dependencies = [ [[package]] name = "alloy-genesis" -version = "1.0.9" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c98fb40f07997529235cc474de814cd7bd9de561e101716289095696c0e4639d" +checksum = "17b2c29f25098bfa4cd3d9ec7806e1506716931e188c7c0843284123831c2cf1" dependencies = [ "alloy-eips", - "alloy-primitives 1.1.2", + "alloy-primitives 1.3.1", "alloy-serde", "alloy-trie", "serde", + "serde_with", ] [[package]] @@ -212,12 +213,12 @@ dependencies = [ [[package]] name = "alloy-json-abi" -version = "1.1.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ccaa79753d7bf15f06399ea76922afbfaf8d18bebed9e8fc452984b4a90dcc9" +checksum = "125a1c373261b252e53e04d6e92c37d881833afc1315fceab53fd46045695640" dependencies = [ - "alloy-primitives 1.1.2", - "alloy-sol-type-parser 1.1.2", + "alloy-primitives 1.3.1", + "alloy-sol-type-parser 1.3.1", "serde", "serde_json", ] @@ -234,8 +235,8 @@ dependencies = [ "const-hex", "derive_more 2.0.1", "foldhash", - "hashbrown 0.15.3", - "indexmap 2.9.0", + "hashbrown 0.15.5", + "indexmap 2.10.0", "itoa", "k256", "keccak-asm", @@ -251,9 +252,9 @@ dependencies = [ [[package]] name = "alloy-primitives" -version = "1.1.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18c35fc4b03ace65001676358ffbbaefe2a2b27ee50fe777c345082c7c888be8" +checksum = "bc9485c56de23438127a731a6b4c87803d49faf1a7068dcd1d8768aca3a9edb9" dependencies = [ "alloy-rlp", "bytes", @@ -261,14 +262,14 @@ dependencies = [ "const-hex", "derive_more 2.0.1", "foldhash", - "hashbrown 0.15.3", - "indexmap 2.9.0", + "hashbrown 0.15.5", + "indexmap 2.10.0", "itoa", "k256", "keccak-asm", "paste", "proptest", - "rand 0.9.1", + "rand 0.9.2", "ruint", "rustc-hash 2.1.1", "serde", @@ -295,16 +296,16 @@ checksum = "64b728d511962dda67c1bc7ea7c03736ec275ed2cf4c35d9585298ac9ccf3b73" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "alloy-serde" -version = "1.0.9" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "730e8f2edf2fc224cabd1c25d090e1655fa6137b2e409f92e5eec735903f1507" +checksum = "30be84f45d4f687b00efaba1e6290cbf53ccc8f6b8fbb54e4c2f9d2a0474ce95" dependencies = [ - "alloy-primitives 1.1.2", + "alloy-primitives 1.3.1", "serde", "serde_json", ] @@ -320,21 +321,21 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "alloy-sol-macro" -version = "1.1.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8612e0658964d616344f199ab251a49d48113992d81b92dab93ed855faa66383" +checksum = "d20d867dcf42019d4779519a1ceb55eba8d7f3d0e4f0a89bcba82b8f9eb01e48" dependencies = [ - "alloy-sol-macro-expander 1.1.2", - "alloy-sol-macro-input 1.1.2", + "alloy-sol-macro-expander 1.3.1", + "alloy-sol-macro-input 1.3.1", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -346,30 +347,30 @@ dependencies = [ "alloy-sol-macro-input 0.8.25", "const-hex", "heck", - "indexmap 2.9.0", + "indexmap 2.10.0", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "syn-solidity 0.8.25", "tiny-keccak", ] [[package]] name = "alloy-sol-macro-expander" -version = "1.1.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a384edac7283bc4c010a355fb648082860c04b826bb7a814c45263c8f304c74" +checksum = "b74e91b0b553c115d14bd0ed41898309356dc85d0e3d4b9014c4e7715e48c8ad" dependencies = [ - "alloy-sol-macro-input 1.1.2", + "alloy-sol-macro-input 1.3.1", "const-hex", "heck", - "indexmap 2.9.0", + "indexmap 2.10.0", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.101", - "syn-solidity 1.1.2", + "syn 2.0.106", + "syn-solidity 1.3.1", "tiny-keccak", ] @@ -385,15 +386,15 @@ dependencies = [ "macro-string", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "syn-solidity 0.8.25", ] [[package]] name = "alloy-sol-macro-input" -version = "1.1.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd588c2d516da7deb421b8c166dc60b7ae31bca5beea29ab6621fcfa53d6ca5" +checksum = "84194d31220803f5f62d0a00f583fd3a062b36382e2bea446f1af96727754565" dependencies = [ "const-hex", "dunce", @@ -401,8 +402,8 @@ dependencies = [ "macro-string", "proc-macro2", "quote", - "syn 2.0.101", - "syn-solidity 1.1.2", + "syn 2.0.106", + "syn-solidity 1.3.1", ] [[package]] @@ -417,9 +418,9 @@ dependencies = [ [[package]] name = "alloy-sol-type-parser" -version = "1.1.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86ddeb70792c7ceaad23e57d52250107ebbb86733e52f4a25d8dc1abc931837" +checksum = "fe8c27b3cf6b2bb8361904732f955bc7c05e00be5f469cec7e2280b6167f3ff0" dependencies = [ "serde", "winnow", @@ -440,23 +441,23 @@ dependencies = [ [[package]] name = "alloy-sol-types" -version = "1.1.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "584cb97bfc5746cb9dcc4def77da11694b5d6d7339be91b7480a6a68dc129387" +checksum = "f5383d34ea00079e6dd89c652bcbdb764db160cef84e6250926961a0b2295d04" dependencies = [ - "alloy-json-abi 1.1.2", - "alloy-primitives 1.1.2", - "alloy-sol-macro 1.1.2", + "alloy-json-abi 1.3.1", + "alloy-primitives 1.3.1", + "alloy-sol-macro 1.3.1", "serde", ] [[package]] name = "alloy-trie" -version = "0.8.1" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "983d99aa81f586cef9dae38443245e585840fcf0fc58b09aee0b1f27aed1d500" +checksum = "e3412d52bb97c6c6cc27ccc28d4e6e8cf605469101193b50b0bd5813b1f990b5" dependencies = [ - "alloy-primitives 1.1.2", + "alloy-primitives 1.3.1", "alloy-rlp", "arrayvec", "derive_more 2.0.1", @@ -466,6 +467,21 @@ dependencies = [ "tracing", ] +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "anes" version = "0.1.6" @@ -474,9 +490,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.18" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192" dependencies = [ "anstyle", "anstyle-parse", @@ -489,44 +505,44 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" +checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" [[package]] name = "anstyle-parse" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.2" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "anstyle-wincon" -version = "3.0.8" +version = "3.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6680de5231bd6ee4c6191b8a1325daa282b415391ec9d3a37bd34f2060dc73fa" +checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a" dependencies = [ "anstyle", "once_cell_polyfill", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" [[package]] name = "approx" @@ -548,7 +564,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -668,7 +684,7 @@ dependencies = [ "ark-std 0.5.0", "educe", "fnv", - "hashbrown 0.15.3", + "hashbrown 0.15.5", "itertools 0.13.0", "num-bigint", "num-integer", @@ -823,7 +839,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" dependencies = [ "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -861,7 +877,7 @@ dependencies = [ "num-traits", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -902,7 +918,7 @@ dependencies = [ "ark-std 0.5.0", "educe", "fnv", - "hashbrown 0.15.3", + "hashbrown 0.15.5", ] [[package]] @@ -973,7 +989,7 @@ checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -1151,15 +1167,21 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.88" +version = "0.1.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + [[package]] name = "auto_impl" version = "1.3.0" @@ -1168,14 +1190,14 @@ checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "autocfg" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "backtrace" @@ -1218,9 +1240,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.7.3" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89e25b6adfb930f02d1981565a6e5d9c547ac15a96606256d3b59040e5cd4ca3" +checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" [[package]] name = "binary-merkle-tree" @@ -1261,9 +1283,9 @@ dependencies = [ [[package]] name = "bip39" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33415e24172c1b7d6066f6d999545375ab8e1d95421d6784bdfff9496f292387" +checksum = "43d193de1f7487df1914d3a568b772458861d33f9c54249612cc2893d6915054" dependencies = [ "bitcoin_hashes 0.13.0", "serde", @@ -1325,9 +1347,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.1" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" +checksum = "6a65b545ab31d687cff52899d4890855fec459eb6afe0da6417b8a18da87aa29" [[package]] name = "bitvec" @@ -1395,9 +1417,9 @@ dependencies = [ [[package]] name = "blst" -version = "0.3.14" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47c79a94619fade3c0b887670333513a67ac28a6a7e653eb260bf0d4103db38d" +checksum = "4fd49896f12ac9b6dcd7a5998466b9b58263a695a3dd1ecc1aaca2e12a90b080" dependencies = [ "cc", "glob", @@ -1701,9 +1723,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.17.0" +version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "byte-slice-cast" @@ -1713,9 +1735,9 @@ checksum = "7575182f7272186991736b70173b0ea045398f984bf5ebbb3804736ce1330c9d" [[package]] name = "bytemuck" -version = "1.23.0" +version = "1.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9134a6ef01ce4b366b50689c94f82c14bc72bc5d0386829828a2e2752ef7958c" +checksum = "3995eaeebcdf32f91f980d360f78732ddc061097ab4e39991ae7a6ace9194677" [[package]] name = "byteorder" @@ -1749,9 +1771,9 @@ dependencies = [ [[package]] name = "camino" -version = "1.1.10" +version = "1.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0da45bc31171d8d6960122e222a67740df867c1dd53b4d51caa297084c185cab" +checksum = "5d07aa9a93b00c76f71bc35d598bed923f6d4f3a9ca5c24b7737ae1a292841c0" dependencies = [ "serde", ] @@ -1787,9 +1809,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.2.25" +version = "1.2.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0fc897dc1e865cc67c0e05a836d9d3f1df3cbe442aa4a9473b18e12624a4951" +checksum = "3ee0f8803222ba5a7e2777dd72ca451868909b1ac410621b676adf07280e9b5f" dependencies = [ "jobserver", "libc", @@ -1807,9 +1829,22 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" + +[[package]] +name = "chrono" +version = "0.4.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "num-traits", + "serde", + "windows-link", +] [[package]] name = "ciborium" @@ -1851,9 +1886,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.39" +version = "4.5.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd60e63e9be68e5fb56422e397cf9baddded06dae1d2e523401542383bc72a9f" +checksum = "1fc0e74a703892159f5ae7d3aac52c8e6c392f5ae5f359c70b5881d60aaac318" dependencies = [ "clap_builder", "clap_derive", @@ -1861,9 +1896,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.39" +version = "4.5.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89cc6392a1f72bbeb820d71f32108f61fdaf18bc526e1d23954168a67759ef51" +checksum = "b3e7f4214277f3c7aa526a59dd3fbe306a370daee1f8b7b8c987069cd8e888a8" dependencies = [ "anstream", "anstyle", @@ -1873,27 +1908,27 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.32" +version = "4.5.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09176aae279615badda0765c0c0b3f6ed53f4709118af73cf4655d85d1530cd7" +checksum = "14cb31bb0a7d536caef2639baa7fad459e15c3144efefa6dbd1c84562c4739f6" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "clap_lex" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" [[package]] name = "colorchoice" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" [[package]] name = "common-path" @@ -1903,9 +1938,9 @@ checksum = "2382f75942f4b3be3690fe4f86365e9c853c1587d6ee58212cebf6e2a9ccd101" [[package]] name = "const-hex" -version = "1.14.1" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83e22e0ed40b96a48d3db274f72fd365bd78f67af39b6bbd47e8a15e1c6207ff" +checksum = "dccd746bf9b1038c0507b7cec21eb2b11222db96a2902c96e8c185d6d20fb9c4" dependencies = [ "cfg-if", "cpufeatures", @@ -2121,9 +2156,9 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" -version = "1.4.2" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" dependencies = [ "cfg-if", ] @@ -2188,9 +2223,9 @@ checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crunchy" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43da5946c66ffcc7745f48db692ffbb10a83bfe0afd96235c5c2a4fb23994929" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" [[package]] name = "crypto-bigint" @@ -2330,7 +2365,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -2579,7 +2614,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -2603,7 +2638,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -2614,7 +2649,7 @@ checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -2624,6 +2659,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" dependencies = [ "const-oid", + "pem-rfc7468", "zeroize", ] @@ -2634,6 +2670,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" dependencies = [ "powerfmt", + "serde", ] [[package]] @@ -2655,18 +2692,18 @@ checksum = "d65d7ce8132b7c0e54497a4d9a55a1c2a0912a0d786cf894472ba818fba45762" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "derive-where" -version = "1.4.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e73f2692d4bd3cac41dca28934a39894200c9fabf49586d77d0e5954af1d7902" +checksum = "ef941ded77d15ca19b40374869ac6000af1c9f2a4c0f3d4c70926287e6364a8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -2679,7 +2716,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version 0.4.1", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -2708,7 +2745,7 @@ checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "unicode-xid", ] @@ -2720,7 +2757,7 @@ checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "unicode-xid", ] @@ -2801,7 +2838,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -2831,9 +2868,9 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.101", + "syn 2.0.106", "termcolor", - "toml 0.8.22", + "toml 0.8.23", "walkdir", ] @@ -2880,14 +2917,14 @@ checksum = "7e8671d54058979a37a26f3511fbf8d198ba1aa35ffb202c42587d918d77213a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "dyn-clone" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c7a8fb8a9fbf66c1f703fe16184d10ca0ee9d23be5b4436400408ba54a95005" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" [[package]] name = "ecdsa" @@ -2916,9 +2953,9 @@ dependencies = [ [[package]] name = "ed25519-dalek" -version = "2.1.1" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" +checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" dependencies = [ "curve25519-dalek", "ed25519", @@ -2930,16 +2967,17 @@ dependencies = [ [[package]] name = "ed25519-zebra" -version = "4.0.3" +version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d9ce6874da5d4415896cd45ffbc4d1cfc0c4f9c079427bd870742c30f2f65a9" +checksum = "0017d969298eec91e3db7a2985a8cab4df6341d86e6f3a6f5878b13fb7846bc9" dependencies = [ "curve25519-dalek", "ed25519", - "hashbrown 0.14.5", - "hex", + "hashbrown 0.15.5", + "pkcs8", "rand_core 0.6.4", "sha2 0.10.9", + "subtle", "zeroize", ] @@ -2952,7 +2990,7 @@ dependencies = [ "enum-ordinalize", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -3001,27 +3039,27 @@ checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "enumflags2" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba2f4b465f5318854c6f8dd686ede6c0a9dc67d4b1ac241cf0eb51521a309147" +checksum = "1027f7680c853e056ebcec683615fb6fbbc07dbaa13b4d5d9442b146ded4ecef" dependencies = [ "enumflags2_derive", ] [[package]] name = "enumflags2_derive" -version = "0.7.11" +version = "0.7.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4caf64a58d7a6d65ab00639b046ff54399a39f5f2554728895ace4b297cd79" +checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -3032,7 +3070,7 @@ checksum = "2f9ed6b3789237c8a0c1c505af1c7eb2c560df6186f01b098c3a1064ea532f38" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -3087,12 +3125,12 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.12" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cea14ef9355e3beab063703aa9dab15afd25f0667c341310c1e5274bb1d0da18" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" dependencies = [ "libc", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -3148,7 +3186,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -3269,9 +3307,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ced92e76e966ca2fd84c8f7aa01a4aea65b0eb6648d72f7c8f3e2764a67fece" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" dependencies = [ "crc32fast", "miniz_oxide", @@ -3315,9 +3353,9 @@ dependencies = [ [[package]] name = "frame-benchmarking" -version = "40.2.0" +version = "40.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a9e5fcdb30bb83b2d97d7e718127230e0fbbad82b9c32dedf63971f08709def" +checksum = "e223b9cbb4e6d3f742b33c104037155c91315e97fe495406ba946f9823b432f0" dependencies = [ "frame-support", "frame-support-procedural", @@ -3376,7 +3414,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -3515,7 +3553,7 @@ dependencies = [ "proc-macro2", "quote", "sp-crypto-hashing", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -3528,7 +3566,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -3539,14 +3577,14 @@ checksum = "ed971c6435503a099bdac99fe4c5bea08981709e5b5a0a8535a1856f48561191" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "frame-system" -version = "40.1.0" +version = "40.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfc20d95c35bad22eb8b8d7ef91197a439483458237b176e621d9210f2fbff15" +checksum = "d1e700f225f5cfe5d89f564ab23b6c609c144228d4d9871956ef209b20c9df98" dependencies = [ "cfg-if", "docify", @@ -3564,9 +3602,9 @@ dependencies = [ [[package]] name = "frame-system-benchmarking" -version = "40.0.0" +version = "40.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dcf84c561e598ef31078af449398d87211867611ebc7068ba1364fba4c7e653" +checksum = "e71232838b3b442b49601fc4634d175e552fc954ffebe303d8455963eb3bd5c1" dependencies = [ "frame-benchmarking", "frame-support", @@ -3678,7 +3716,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -3745,7 +3783,7 @@ checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi 0.11.1+wasi-snapshot-preview1", ] [[package]] @@ -3807,7 +3845,7 @@ version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2deb07a133b1520dc1a5690e9bd08950108873d7ed5de38dcc74d3b5ebffa110" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", "libc", "libgit2-sys", "log", @@ -3816,9 +3854,9 @@ dependencies = [ [[package]] name = "glob" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "globset" @@ -3839,7 +3877,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bf760ebf69878d9fd8f110c89703d90ce35095324d1f1edcb595c63945ee757" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", "ignore", "walkdir", ] @@ -3907,11 +3945,12 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.15.3" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84b26c544d002229e640969970a2e74021aadf6e2f96372b9c58eff97de08eb3" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", + "equivalent", "foldhash", "serde", ] @@ -3930,9 +3969,9 @@ checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hermit-abi" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f154ce46856750ed433c8649605bf7ed2de3bc35fd9d2a9f30cddd873c80cb08" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" [[package]] name = "hex" @@ -4050,19 +4089,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b112acc8b3adf4b107a8ec20977da0273a8c386765a3ec0229bd500a1443f9f" [[package]] -name = "hyper" -version = "1.6.0" +name = "humantime-serde" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +checksum = "57a3db5ea5923d99402c94e9feb261dc5ee9b4efa158b0315f788cf549cc200c" dependencies = [ + "humantime", + "serde", +] + +[[package]] +name = "hyper" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" +dependencies = [ + "atomic-waker", "bytes", "futures-channel", - "futures-util", + "futures-core", "http", "http-body", "httparse", "itoa", "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", @@ -4086,9 +4137,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.13" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c293b6b3d21eca78250dc7dbebd6b9210ec5530e038cbfe0661b5c47ab06e8" +checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e" dependencies = [ "base64 0.22.1", "bytes", @@ -4108,6 +4159,30 @@ dependencies = [ "tracing", ] +[[package]] +name = "iana-time-zone" +version = "0.1.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + [[package]] name = "iced-x86" version = "1.21.0" @@ -4301,7 +4376,7 @@ checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -4336,12 +4411,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.9.0" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" +checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" dependencies = [ "equivalent", - "hashbrown 0.15.3", + "hashbrown 0.15.5", "serde", ] @@ -4374,7 +4449,7 @@ checksum = "f365c8de536236cfdebd0ba2130de22acefed18b1fb99c32783b3840aec5fb46" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -4406,6 +4481,17 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "io-uring" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" +dependencies = [ + "bitflags 2.9.2", + "cfg-if", + "libc", +] + [[package]] name = "ipnet" version = "2.11.0" @@ -4428,7 +4514,7 @@ version = "0.4.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9" dependencies = [ - "hermit-abi 0.5.1", + "hermit-abi 0.5.2", "libc", "windows-sys 0.59.0", ] @@ -4555,15 +4641,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.172" +version = "0.2.175" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d750af042f7ef4f724306de029d18836c26c1765a54a6a3f094cbd23a7267ffa" +checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" [[package]] name = "libgit2-sys" -version = "0.18.1+1.9.0" +version = "0.18.2+1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1dcb20f84ffcdd825c7a311ae347cce604a6f084a767dec4a4929829645290e" +checksum = "1c42fe03df2bd3c53a3a9c7317ad91d80c81cd1fb0caec8d7cc4cd2bfa10c222" dependencies = [ "cc", "libc", @@ -4579,9 +4665,9 @@ checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" [[package]] name = "libmimalloc-sys" -version = "0.1.42" +version = "0.1.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec9d6fac27761dabcd4ee73571cdb06b7022dc99089acbe5435691edffaac0f4" +checksum = "bf88cd67e9de251c1781dbe2f641a1a3ad66eaae831b8a2c38fbdc5ddae16d4d" dependencies = [ "cc", "libc", @@ -4589,11 +4675,11 @@ dependencies = [ [[package]] name = "libredox" -version = "0.1.3" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", "libc", "redox_syscall", ] @@ -4747,7 +4833,7 @@ checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -4759,7 +4845,7 @@ dependencies = [ "macro_magic_core", "macro_magic_macros", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -4773,7 +4859,7 @@ dependencies = [ "macro_magic_core_macros", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -4784,7 +4870,7 @@ checksum = "b02abfe41815b5bd98dbd4260173db2c116dda171dc0fe7838cb206333b83308" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -4795,7 +4881,7 @@ checksum = "73ea28ee64b88876bf45277ed9a5817c1817df061a74f2b988971a12570e5869" dependencies = [ "macro_magic_core", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -4819,9 +4905,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "memfd" @@ -4864,24 +4950,18 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.46" +version = "0.1.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "995942f432bbb4822a7e9c3faa87a695185b0d09273ba85f097b54f4e458f2af" +checksum = "b1791cbe101e95af5764f06f20f6760521f7158f69dbf9d6baf941ee1bf6bc40" dependencies = [ "libmimalloc-sys", ] -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - [[package]] name = "miniz_oxide" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3be647b768db090acb35d5ec5db2b0e1f1de11133ca123b9eacf5137868f892a" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", ] @@ -4893,7 +4973,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" dependencies = [ "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi 0.11.1+wasi-snapshot-preview1", "windows-sys 0.59.0", ] @@ -4941,6 +5021,15 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" +[[package]] +name = "normpath" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8911957c4b1549ac0dc74e30db9c8b0e66ddcd6d7acc33098f4c63a64a6d7ed" +dependencies = [ + "windows-sys 0.59.0", +] + [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -4998,7 +5087,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -5058,19 +5147,20 @@ version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" dependencies = [ - "hermit-abi 0.5.1", + "hermit-abi 0.5.2", "libc", ] [[package]] name = "nybbles" -version = "0.3.4" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8983bb634df7248924ee0c4c3a749609b5abcb082c28fffe3254b3eb3602b307" +checksum = "63cb50036b1ad148038105af40aaa70ff24d8a14fbc44ae5c914e1348533d12e" dependencies = [ "alloy-rlp", - "const-hex", + "cfg-if", "proptest", + "ruint", "serde", "smallvec", ] @@ -5126,7 +5216,7 @@ version = "0.10.73" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", "cfg-if", "foreign-types", "libc", @@ -5143,7 +5233,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -5454,9 +5544,9 @@ dependencies = [ [[package]] name = "pallet-balances" -version = "41.1.0" +version = "41.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dcd7bf033312c976e0c044a80b4cd8b88471d7371baae6fea67b3f42eba288b" +checksum = "58e04ed6c01cd829731ec7bcec0de4e49cd806195ca2448a1887c5493efd8262" dependencies = [ "docify", "frame-benchmarking", @@ -5620,9 +5710,9 @@ dependencies = [ [[package]] name = "pallet-broker" -version = "0.19.1" +version = "0.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f227cf4ee9025e9387547e37300bd00c1c19e786eb23276268af7dc710915ce3" +checksum = "47c26e061a2b40adc3ef186de6fb619f993bea265643b5ef41e98c578784ed6e" dependencies = [ "bitvec", "frame-benchmarking", @@ -5780,7 +5870,7 @@ checksum = "e35aaa3d7f1dba4ea7b74d7015e6068b753d1f7f63b39a4ce6377de1bc51b476" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -6017,9 +6107,9 @@ dependencies = [ [[package]] name = "pallet-im-online" -version = "39.1.0" +version = "39.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cadfed668f67c5c483a40cd24ee7d0453bb53eb41aa393898f471e837724df48" +checksum = "03cf47659cfc7af8584c3aac5c120e92fcffd6629c1a4de2cd7688dcc7019e36" dependencies = [ "frame-benchmarking", "frame-support", @@ -6135,9 +6225,9 @@ dependencies = [ [[package]] name = "pallet-migrations" -version = "10.1.0" +version = "10.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290a3db17ac6eb9bc965a37eb689b35403f47930b4097626b7b8d07f651caf33" +checksum = "ca04828f98e3ee22d1429c7395cb2073010e8eb9c59cb7ecf237b49d4bd38cb6" dependencies = [ "docify", "frame-benchmarking", @@ -6486,9 +6576,9 @@ dependencies = [ [[package]] name = "pallet-revive" -version = "0.5.0" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "895fe6f50f621a69132697b8b43d29d1db4d9ff445eec410bf1fc98cd7e9412c" +checksum = "e2c0b3638bc9d9e680fa9c2dfef14c87d267ad796c012ea80af62acd8cec83c5" dependencies = [ "alloy-core", "derive_more 0.99.20", @@ -6499,6 +6589,7 @@ dependencies = [ "frame-support", "frame-system", "hex-literal", + "humantime-serde", "impl-trait-for-tuples", "log", "num-bigint", @@ -6533,9 +6624,9 @@ dependencies = [ [[package]] name = "pallet-revive-fixtures" -version = "0.3.0" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1df19ca809f036d6ddf1632039e9db312f92dbe8f9390e6722ad808cd95377" +checksum = "3694218bf3227428b79eef643da05e8256af67b6fcac932b0d254da909a0882f" dependencies = [ "anyhow", "cargo_metadata", @@ -6543,14 +6634,14 @@ dependencies = [ "polkavm-linker 0.21.0", "sp-core", "sp-io", - "toml 0.8.22", + "toml 0.8.23", ] [[package]] name = "pallet-revive-mock-network" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78dc337fa4265c93849f7a00d56ae19c6b5f4f78140b03ea752ef6f176507aaf" +checksum = "ce198787c5c07bf424971953d431b68feec771196303315fefe3d00ac5b90656" dependencies = [ "frame-support", "frame-system", @@ -6584,7 +6675,7 @@ checksum = "63c2dc2fc6961da23fefc54689ce81a8e006f6988bc465dcc9ab9db905d31766" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -6810,9 +6901,9 @@ dependencies = [ [[package]] name = "pallet-state-trie-migration" -version = "44.1.0" +version = "45.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7954fe634d7fb20902d04815aa2fb87e4d47736158e83cefd6abd6ea9938bab1" +checksum = "c14f4480104e8aaf9fd3dd6d813be9e49f8f13b0e1e6c063173d20c5d54ab64b" dependencies = [ "frame-benchmarking", "frame-support", @@ -7059,15 +7150,18 @@ dependencies = [ [[package]] name = "pallet-xcm" -version = "19.1.2" +version = "19.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca27d506282f4c9cd2cac6fb0d199edd89d366635f04801319e7145912547a68" +checksum = "a632dce7b57ffe08fd14ece4b27f4115a675d913a069b5d96f57fdbc07f02990" dependencies = [ "bounded-collections", "frame-benchmarking", "frame-support", "frame-system", + "hex-literal", "pallet-balances", + "pallet-revive", + "pallet-timestamp", "parity-scale-codec", "scale-info", "serde", @@ -7144,9 +7238,9 @@ dependencies = [ [[package]] name = "parachains-common" -version = "21.0.0" +version = "21.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b68844f03979cb0c8b208306047f3b1134b59c74c1fdc9b7f2d8a591ba69b956" +checksum = "97b32bf9e055a2ec4aab91bd936c901a17caf8c82e35cc9bf2e97840bfa5e6d5" dependencies = [ "cumulus-primitives-core", "cumulus-primitives-utility", @@ -7243,7 +7337,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -7309,6 +7403,15 @@ dependencies = [ "password-hash", ] +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + [[package]] name = "percent-encoding" version = "2.3.1" @@ -7317,12 +7420,12 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.8.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "198db74531d58c70a361c42201efde7e2591e976d518caf7662a47dc5720e7b6" +checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323" dependencies = [ "memchr", - "thiserror 2.0.12", + "thiserror 2.0.16", "ucd-trie", ] @@ -7423,9 +7526,9 @@ dependencies = [ [[package]] name = "polkadot-primitives" -version = "18.1.0" +version = "18.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d46b3d45e295d975a9be6128212b29e0efd05f26cdde4a45115424a1f6bad0dd" +checksum = "7eadd5ca22e2ded7a12a484a6e0962ed86c379ce4bb83fbd82843b6459a20cef" dependencies = [ "bitvec", "hex-literal", @@ -7452,9 +7555,9 @@ dependencies = [ [[package]] name = "polkadot-runtime-common" -version = "19.1.0" +version = "19.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eccd922c8032004e38c1a6cab86f304949d04e61e270c982b06a02132d53bf58" +checksum = "768b3b70c32202f5e2fa1c673b01dcdb46c726b0d66d865d9638035fd2ecccfc" dependencies = [ "bitvec", "frame-benchmarking", @@ -7515,9 +7618,9 @@ dependencies = [ [[package]] name = "polkadot-runtime-parachains" -version = "19.1.0" +version = "19.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a4c580cf509b6b7d4f2b556e31da04e528c69acfaeec28d5ac7f02b4dc0fa9" +checksum = "a76fc49c2a9a7d13fbd1d97d448366ad5b7197d7684ec9013271d99c015d13d6" dependencies = [ "bitflags 1.3.2", "bitvec", @@ -7571,9 +7674,9 @@ dependencies = [ [[package]] name = "polkadot-sdk" -version = "2503.0.1" +version = "2503.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb1fe16dffcf985e6054c16a20469c19398c54b10b2f444819b0a5fa90eb1e1b" +checksum = "2cee03fbcacc7726e5f6239b246f245e9d9761a686c6fa8b45ad47666c637989" dependencies = [ "asset-test-utils", "assets-common", @@ -7954,7 +8057,7 @@ dependencies = [ "polkavm-common 0.18.0", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -7966,7 +8069,7 @@ dependencies = [ "polkavm-common 0.21.0", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -7976,7 +8079,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c16669ddc7433e34c1007d31080b80901e3e8e523cb9d4b441c3910cf9294b" dependencies = [ "polkavm-derive-impl 0.18.1", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -7986,7 +8089,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36837f6b7edfd6f4498f8d25d81da16cf03bd6992c3e56f3d477dfc90f4fefca" dependencies = [ "polkavm-derive-impl 0.21.0", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -8128,12 +8231,12 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.33" +version = "0.2.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dee91521343f4c5c6a63edd65e54f31f5c92fe8978c40a4282f8372194c6a7d" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -8214,7 +8317,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -8225,31 +8328,31 @@ checksum = "75eea531cfcd120e0851a3f8aed42c4841f78c889eefafd96339c72677ae42c3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "proc-macro2" -version = "1.0.95" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" dependencies = [ "unicode-ident", ] [[package]] name = "proptest" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14cae93065090804185d3b75f0bf93b8eeda30c7a9b4a33d3bdb3988d6229e50" +checksum = "6fcdab19deb5195a31cf7726a210015ff1496ba1464fd42cb4f537b8b01b471f" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.9.1", + "bitflags 2.9.2", "lazy_static", "num-traits", - "rand 0.8.5", - "rand_chacha 0.3.1", + "rand 0.9.2", + "rand_chacha 0.9.0", "rand_xorshift", "regex-syntax 0.8.5", "rusty-fork", @@ -8283,9 +8386,9 @@ dependencies = [ [[package]] name = "r-efi" -version = "5.2.0" +version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "radium" @@ -8307,9 +8410,9 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ "rand_chacha 0.9.0", "rand_core 0.9.3", @@ -8357,11 +8460,11 @@ dependencies = [ [[package]] name = "rand_xorshift" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" +checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" dependencies = [ - "rand_core 0.6.4", + "rand_core 0.9.3", ] [[package]] @@ -8372,9 +8475,9 @@ checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" [[package]] name = "rayon" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" dependencies = [ "either", "rayon-core", @@ -8382,9 +8485,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.1" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -8392,11 +8495,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.12" +version = "0.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "928fca9cf2aa042393a8325b9ead81d2f0df4cb12e1e24cef072922ccd99c5af" +checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", ] [[package]] @@ -8427,7 +8530,7 @@ checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -8507,9 +8610,9 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "reqwest" -version = "0.12.19" +version = "0.12.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2f8e5513d63f2e5b386eb5106dc67eaf3f84e95258e210489136b8b92ad6119" +checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb" dependencies = [ "base64 0.22.1", "bytes", @@ -8520,12 +8623,9 @@ dependencies = [ "hyper", "hyper-tls", "hyper-util", - "ipnet", "js-sys", "log", - "mime", "native-tls", - "once_cell", "percent-encoding", "pin-project-lite", "rustls-pki-types", @@ -8546,7 +8646,7 @@ dependencies = [ [[package]] name = "resolc" -version = "0.3.0" +version = "0.4.0" dependencies = [ "anyhow", "clap", @@ -8555,6 +8655,7 @@ dependencies = [ "inkwell", "libc", "mimalloc", + "normpath", "once_cell", "path-slash", "rayon", @@ -8565,7 +8666,7 @@ dependencies = [ "semver 1.0.26", "serde", "serde_json", - "sha3", + "tempfile", "which", ] @@ -8573,7 +8674,7 @@ dependencies = [ name = "revive-benchmarks" version = "0.1.0" dependencies = [ - "alloy-primitives 1.1.2", + "alloy-primitives 1.3.1", "criterion", "hex", "revive-differential", @@ -8594,12 +8695,14 @@ dependencies = [ [[package]] name = "revive-common" -version = "0.1.0" +version = "0.2.0" dependencies = [ "anyhow", + "hex", "serde", "serde_json", "serde_stacker", + "sha3", ] [[package]] @@ -8607,7 +8710,7 @@ name = "revive-differential" version = "0.1.0" dependencies = [ "alloy-genesis", - "alloy-primitives 1.1.2", + "alloy-primitives 1.3.1", "alloy-serde", "hex", "serde", @@ -8629,8 +8732,8 @@ dependencies = [ name = "revive-integration" version = "0.1.1" dependencies = [ - "alloy-primitives 1.1.2", - "alloy-sol-types 1.1.2", + "alloy-primitives 1.3.1", + "alloy-sol-types 1.3.1", "hex", "rayon", "resolc", @@ -8643,7 +8746,7 @@ dependencies = [ [[package]] name = "revive-linker" -version = "0.1.0" +version = "0.2.0" dependencies = [ "anyhow", "libc", @@ -8671,13 +8774,13 @@ dependencies = [ "regex", "serde", "tar", - "toml 0.8.22", + "toml 0.8.23", "which", ] [[package]] name = "revive-llvm-context" -version = "0.3.0" +version = "0.4.0" dependencies = [ "anyhow", "hex", @@ -8694,20 +8797,19 @@ dependencies = [ "revive-stdlib", "semver 1.0.26", "serde", - "sha3", ] [[package]] name = "revive-runner" version = "0.1.0" dependencies = [ - "alloy-primitives 1.1.2", + "alloy-primitives 1.3.1", "anyhow", "clap", "env_logger 0.11.8", "hex", "parity-scale-codec", - "polkadot-sdk 2503.0.1", + "polkadot-sdk 2503.2.0", "resolc", "revive-differential", "revive-llvm-context", @@ -8728,9 +8830,10 @@ dependencies = [ [[package]] name = "revive-solc-json-interface" -version = "0.2.0" +version = "0.3.0" dependencies = [ "anyhow", + "hex", "rayon", "revive-common", "semver 1.0.26", @@ -8748,7 +8851,7 @@ dependencies = [ [[package]] name = "revive-yul" -version = "0.2.1" +version = "0.3.0" dependencies = [ "anyhow", "inkwell", @@ -8757,7 +8860,7 @@ dependencies = [ "revive-common", "revive-llvm-context", "serde", - "thiserror 2.0.12", + "thiserror 2.0.16", ] [[package]] @@ -8818,9 +8921,9 @@ dependencies = [ [[package]] name = "ruint" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11256b5fe8c68f56ac6f39ef0720e592f33d2367a4782740d9c9142e889c7fb4" +checksum = "9ecb38f82477f20c5c3d62ef52d7c4e536e38ea9b73fb570a20c5cae0e14bcf6" dependencies = [ "alloy-rlp", "ark-ff 0.3.0", @@ -8835,7 +8938,7 @@ dependencies = [ "primitive-types 0.12.2", "proptest", "rand 0.8.5", - "rand 0.9.1", + "rand 0.9.2", "rlp 0.5.2", "ruint-macro", "serde", @@ -8851,9 +8954,9 @@ checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" [[package]] name = "rustc-demangle" -version = "0.1.24" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" [[package]] name = "rustc-hash" @@ -8920,7 +9023,7 @@ version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", "errno", "libc", "linux-raw-sys 0.4.15", @@ -8929,15 +9032,15 @@ dependencies = [ [[package]] name = "rustix" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", "errno", "libc", "linux-raw-sys 0.9.4", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -8951,9 +9054,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "rusty-fork" @@ -9124,7 +9227,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -9152,7 +9255,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -9178,7 +9281,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -9216,6 +9319,30 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "schemars" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schemars" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + [[package]] name = "schnellru" version = "0.2.4" @@ -9229,9 +9356,9 @@ dependencies = [ [[package]] name = "schnorrkel" -version = "0.11.4" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de18f6d8ba0aad7045f5feae07ec29899c1112584a38509a84ad7b04451eaa0" +checksum = "6e9fcb6c2e176e86ec703e22560d99d65a5ee9056ae45a08e13e84ebf796296f" dependencies = [ "aead", "arrayref", @@ -9285,7 +9412,7 @@ version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25996b82292a7a57ed3508f052cfff8640d38d32018784acd714758b43da9c8f" dependencies = [ - "secp256k1-sys 0.8.1", + "secp256k1-sys 0.8.2", ] [[package]] @@ -9310,9 +9437,9 @@ dependencies = [ [[package]] name = "secp256k1-sys" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70a129b9e9efbfb223753b9163c4ab3b13cff7fd9c7f010fbac25ab4099fa07e" +checksum = "4473013577ec77b4ee3668179ef1186df3146e2cf2d927bd200974c6fe60fd99" dependencies = [ "cc", ] @@ -9359,7 +9486,7 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", "core-foundation", "core-foundation-sys", "libc", @@ -9444,14 +9571,14 @@ checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "serde_json" -version = "1.0.140" +version = "1.0.143" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +checksum = "d401abef1d108fbd9cbaebc3e46611f4b1021f714a0597a71f41ee463f5f4a5a" dependencies = [ "itoa", "memchr", @@ -9461,9 +9588,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" dependencies = [ "serde", ] @@ -9490,6 +9617,38 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_with" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.10.0", + "schemars 0.9.0", + "schemars 1.0.4", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "serdect" version = "0.2.0" @@ -9601,12 +9760,9 @@ checksum = "620a1d43d70e142b1d46a929af51d44f383db9c7a2ec122de2cd992ccfcf3c18" [[package]] name = "slab" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "slice-group-by" @@ -9628,9 +9784,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.15.0" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8917285742e9f3e1683f0a9c4e6b57960b7314d0b08d30d1ecd426713ee2eee9" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "snowbridge-core" @@ -9660,12 +9816,12 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.10" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -9703,7 +9859,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -9916,9 +10072,9 @@ dependencies = [ [[package]] name = "sp-crypto-ec-utils" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4d64a8c21ef31aeea66fee00502268001981d24ea61b1befc424dd61003c426" +checksum = "6523d4eb8a9b1d1ca6eb4cd4510784cc754ac5a3d5d655a80f17beff87457da7" dependencies = [ "ark-bls12-377", "ark-bls12-377-ext", @@ -9957,7 +10113,7 @@ checksum = "b85d0f1f1e44bd8617eb2a48203ee854981229e3e79e6f468c7175d5fd37489b" dependencies = [ "quote", "sp-crypto-hashing", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -9968,7 +10124,7 @@ checksum = "48d09fa0a5f7299fb81ee25ae3853d26200f7a348148aed6de76be905c007dbe" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10206,7 +10362,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10404,7 +10560,7 @@ dependencies = [ "proc-macro-warning", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10548,9 +10704,9 @@ dependencies = [ [[package]] name = "staging-xcm-executor" -version = "19.1.2" +version = "19.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af9bc315e8c7018fcfe0371ce4b7e726fb699e37b2acc3e5effb87a7d131a3ff" +checksum = "604ccc5e603cc6ec323928b1ef95897d97f495f5a7f4355953f0d51f48a4f567" dependencies = [ "environmental", "frame-benchmarking", @@ -10609,7 +10765,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10729,9 +10885,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.101" +version = "2.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce2b7fc941b3a24138a0a7cf8e858bfc6a992e7978a068a5c760deb0ed43caf" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" dependencies = [ "proc-macro2", "quote", @@ -10747,19 +10903,19 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "syn-solidity" -version = "1.1.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5d879005cc1b5ba4e18665be9e9501d9da3a9b95f625497c4cb7ee082b532e" +checksum = "a0b198d366dbec045acfcd97295eb653a7a2b40e4dc764ef1e79aafcad439d3c" dependencies = [ "paste", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10779,7 +10935,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -10807,15 +10963,15 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" [[package]] name = "tempfile" -version = "3.20.0" +version = "3.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" +checksum = "15b61f8f20e3a6f7e0649d825294eaf317edce30f82cf6026e7e4cb9222a7d1e" dependencies = [ "fastrand", "getrandom 0.3.3", "once_cell", - "rustix 1.0.7", - "windows-sys 0.59.0", + "rustix 1.0.8", + "windows-sys 0.60.2", ] [[package]] @@ -10860,11 +11016,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.12" +version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" dependencies = [ - "thiserror-impl 2.0.12", + "thiserror-impl 2.0.16", ] [[package]] @@ -10875,28 +11031,27 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "thread_local" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" dependencies = [ "cfg-if", - "once_cell", ] [[package]] @@ -10970,9 +11125,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" dependencies = [ "tinyvec_macros", ] @@ -10985,17 +11140,19 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.45.1" +version = "1.47.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779" +checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" dependencies = [ "backtrace", "bytes", + "io-uring", "libc", "mio", "pin-project-lite", + "slab", "socket2", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -11019,9 +11176,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.22" +version = "0.8.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05ae329d1f08c4d17a59bed7ff5b5a769d062e64a62d34a3261b219e62cd5aae" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ "serde", "serde_spanned", @@ -11031,20 +11188,20 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.9" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3da5db5a963e24bc68be8b17b6fa82814bb22ee8660f192bb182771d498f09a3" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.22.26" +version = "0.22.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ - "indexmap 2.9.0", + "indexmap 2.10.0", "serde", "serde_spanned", "toml_datetime", @@ -11054,9 +11211,9 @@ dependencies = [ [[package]] name = "toml_write" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfb942dfe1d8e29a7ee7fcbde5bd2b9a25fb89aa70caea2eba3bee836ff41076" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" [[package]] name = "tower" @@ -11079,7 +11236,7 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", "bytes", "futures-util", "http", @@ -11117,20 +11274,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.28" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] name = "tracing-core" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", "valuable", @@ -11429,9 +11586,9 @@ dependencies = [ [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasi" @@ -11464,7 +11621,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "wasm-bindgen-shared", ] @@ -11499,7 +11656,7 @@ checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11811,15 +11968,15 @@ checksum = "24d643ce3fd3e5b54854602a080f34fb10ab75e0b813ee32d00ca2b44fa74762" dependencies = [ "either", "env_home", - "rustix 1.0.7", + "rustix 1.0.8", "winsafe", ] [[package]] name = "wide" -version = "0.7.32" +version = "0.7.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41b5576b9a81633f3e8df296ce0063042a73507636cbe956c61133dd7034ab22" +checksum = "0ce5da8ecb62bcd8ec8b7ea19f69a51275e91299be594ea5cc6ef7819e16cd03" dependencies = [ "bytemuck", "safe_arch", @@ -11843,11 +12000,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "0978bf7171b3d90bac376700cb56d606feb40f251a475a5d6634613564460b22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -11856,6 +12013,65 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-core" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link", +] + [[package]] name = "windows-sys" version = "0.45.0" @@ -11876,20 +12092,20 @@ dependencies = [ [[package]] name = "windows-sys" -version = "0.52.0" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets 0.52.6", ] [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets 0.52.6", + "windows-targets 0.53.3", ] [[package]] @@ -11931,13 +12147,30 @@ dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", + "windows_i686_gnullvm 0.52.6", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] +[[package]] +name = "windows-targets" +version = "0.53.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -11956,6 +12189,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + [[package]] name = "windows_aarch64_msvc" version = "0.42.2" @@ -11974,6 +12213,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + [[package]] name = "windows_i686_gnu" version = "0.42.2" @@ -11992,12 +12237,24 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -12016,6 +12273,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" +[[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -12034,6 +12297,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -12052,6 +12321,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + [[package]] name = "windows_x86_64_msvc" version = "0.42.2" @@ -12071,10 +12346,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] -name = "winnow" -version = "0.7.10" +name = "windows_x86_64_msvc" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06928c8748d81b05c9be96aad92e1b6ff01833332f281e8cfca3be4b35fc9ec" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + +[[package]] +name = "winnow" +version = "0.7.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" dependencies = [ "memchr", ] @@ -12091,7 +12372,7 @@ version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.2", ] [[package]] @@ -12123,12 +12404,12 @@ dependencies = [ [[package]] name = "xattr" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d65cbf2f12c15564212d48f4e3dfb87923d25d611f2aed18f4cb23f0413d89e" +checksum = "af3a19837351dc82ba89f8a125e22a3c475f05aba604acc023d62b2739ae2909" dependencies = [ "libc", - "rustix 1.0.7", + "rustix 1.0.8", ] [[package]] @@ -12140,7 +12421,7 @@ dependencies = [ "Inflector", "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -12200,28 +12481,28 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "synstructure", ] [[package]] name = "zerocopy" -version = "0.8.25" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb" +checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.25" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef" +checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -12241,7 +12522,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", "synstructure", ] @@ -12262,7 +12543,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] @@ -12278,9 +12559,9 @@ dependencies = [ [[package]] name = "zerovec" -version = "0.11.2" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" dependencies = [ "yoke", "zerofrom", @@ -12295,7 +12576,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.101", + "syn 2.0.106", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 5877089..bc699f5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,22 +14,22 @@ repository = "https://github.com/paritytech/revive" rust-version = "1.85.0" [workspace.dependencies] -resolc = { version = "0.3.0", path = "crates/resolc" } +resolc = { version = "0.4.0", path = "crates/resolc", default-features = false } revive-benchmarks = { version = "0.1.0", path = "crates/benchmarks" } revive-builtins = { version = "0.1.0", path = "crates/builtins" } -revive-common = { version = "0.1.0", path = "crates/common" } +revive-common = { version = "0.2.0", path = "crates/common" } revive-differential = { version = "0.1.0", path = "crates/differential" } revive-explorer = { version = "0.1.0", path = "crates/explore" } revive-integration = { version = "0.1.1", path = "crates/integration" } -revive-linker = { version = "0.1.0", path = "crates/linker" } +revive-linker = { version = "0.2.0", path = "crates/linker" } lld-sys = { version = "0.1.0", path = "crates/lld-sys" } -revive-llvm-context = { version = "0.3.0", path = "crates/llvm-context" } +revive-llvm-context = { version = "0.4.0", path = "crates/llvm-context" } revive-runtime-api = { version = "0.2.0", path = "crates/runtime-api" } revive-runner = { version = "0.1.0", path = "crates/runner" } -revive-solc-json-interface = { version = "0.2.0", path = "crates/solc-json-interface" } +revive-solc-json-interface = { version = "0.3.0", path = "crates/solc-json-interface", default-features = false } revive-stdlib = { version = "0.1.1", path = "crates/stdlib" } revive-build-utils = { version = "0.1.0", path = "crates/build-utils" } -revive-yul = { version = "0.2.1", path = "crates/yul" } +revive-yul = { version = "0.3.0", path = "crates/yul" } hex = "0.4.3" cc = "1.2" @@ -71,6 +71,7 @@ tar = "0.4" toml = "0.8" assert_cmd = "2.0" assert_fs = "1.1" +normpath = "1.3" # polkadot-sdk and friends codec = { version = "3.7.5", default-features = false, package = "parity-scale-codec" } diff --git a/clippy.toml b/clippy.toml new file mode 100644 index 0000000..4e1d44c --- /dev/null +++ b/clippy.toml @@ -0,0 +1 @@ +large-error-threshold = 192 diff --git a/crates/common/Cargo.toml b/crates/common/Cargo.toml index c0bfb46..89d767d 100644 --- a/crates/common/Cargo.toml +++ b/crates/common/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "revive-common" -version.workspace = true +version = "0.2.0" license.workspace = true edition.workspace = true repository.workspace = true @@ -15,6 +15,8 @@ doctest = false [dependencies] anyhow = { workspace = true } +hex = { workspace = true } +sha3 = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true, features = [ "arbitrary_precision", "unbounded_depth" ] } serde_stacker = { workspace = true } diff --git a/crates/common/src/contract_identifier.rs b/crates/common/src/contract_identifier.rs new file mode 100644 index 0000000..615e9b1 --- /dev/null +++ b/crates/common/src/contract_identifier.rs @@ -0,0 +1,33 @@ +//! The contract identifier helper library. + +use serde::{Deserialize, Serialize}; + +/// This structure simplifies passing the contract identifiers through the compilation pipeline. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ContractIdentifier { + /// The absolute file path. + pub path: String, + /// The contract name. + /// Is set for Solidity contracts only. Otherwise it would be equal to the file name. + pub name: Option, + /// The full contract identifier. + /// For Solidity, The format is `:`. + /// For other languages, ``. + pub full_path: String, +} + +impl ContractIdentifier { + /// A shortcut constructor. + pub fn new(path: String, name: Option) -> Self { + let full_path = match name { + Some(ref name) => format!("{path}:{name}"), + None => path.clone(), + }; + + Self { + path, + name, + full_path, + } + } +} diff --git a/crates/common/src/extension.rs b/crates/common/src/extension.rs index 562acbb..a9969c7 100644 --- a/crates/common/src/extension.rs +++ b/crates/common/src/extension.rs @@ -37,4 +37,4 @@ pub static EXTENSION_POLKAVM_ASSEMBLY: &str = "pvmasm"; pub static EXTENSION_POLKAVM_BINARY: &str = "pvm"; /// The ELF shared object file extension. -pub static EXTENSION_SHARED_OBJECT: &str = "so"; +pub static EXTENSION_OBJECT: &str = "o"; diff --git a/crates/common/src/keccak256.rs b/crates/common/src/keccak256.rs new file mode 100644 index 0000000..03389a3 --- /dev/null +++ b/crates/common/src/keccak256.rs @@ -0,0 +1,68 @@ +//! Keccak-256 hash utilities. + +use serde::{Deserialize, Serialize}; +use sha3::digest::FixedOutput; +use sha3::Digest; + +pub const DIGEST_BYTES: usize = 32; + +/// Keccak-256 hash utilities. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct Keccak256 { + /// Binary representation. + bytes: [u8; DIGEST_BYTES], + /// Hexadecimal string representation. + string: String, +} + +impl Keccak256 { + /// Computes the `keccak256` hash for `preimage`. + pub fn from_slice(preimage: &[u8]) -> Self { + let bytes = sha3::Keccak256::digest(preimage).into(); + let string = format!("0x{}", hex::encode(bytes)); + Self { bytes, string } + } + + /// Computes the `keccak256` hash for an array of `preimages`. + pub fn from_slices>(preimages: &[R]) -> Self { + let mut hasher = sha3::Keccak256::new(); + for preimage in preimages.iter() { + hasher.update(preimage); + } + let bytes: [u8; DIGEST_BYTES] = hasher.finalize_fixed().into(); + let string = format!("0x{}", hex::encode(bytes)); + Self { bytes, string } + } + + /// Returns a reference to the 32-byte SHA-3 hash. + pub fn as_bytes(&self) -> &[u8] { + self.bytes.as_slice() + } + + /// Returns a reference to the hexadecimal string representation. + pub fn as_str(&self) -> &str { + self.string.as_str() + } + + /// Extracts the binary representation. + pub fn to_vec(&self) -> Vec { + self.bytes.to_vec() + } +} + +impl std::fmt::Display for Keccak256 { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +#[cfg(test)] +mod tests { + #[test] + fn hash_and_stringify_works() { + assert_eq!( + super::Keccak256::from_slices(&["foo".as_bytes(), "bar".as_bytes(),]).as_str(), + "0x38d18acb67d25c8bb9942764b62f18e17054f66a817bd4295423adf9ed98873e" + ); + } +} diff --git a/crates/common/src/lib.rs b/crates/common/src/lib.rs index b9fd621..3081632 100644 --- a/crates/common/src/lib.rs +++ b/crates/common/src/lib.rs @@ -3,9 +3,13 @@ pub(crate) mod base; pub(crate) mod bit_length; pub(crate) mod byte_length; +pub(crate) mod contract_identifier; pub(crate) mod evm_version; pub(crate) mod exit_code; pub(crate) mod extension; +pub(crate) mod keccak256; +pub(crate) mod metadata; +pub(crate) mod object; pub(crate) mod utils; pub use self::base::*; @@ -14,4 +18,8 @@ pub use self::byte_length::*; pub use self::evm_version::EVMVersion; pub use self::exit_code::*; pub use self::extension::*; +pub use self::keccak256::*; +pub use self::metadata::*; +pub use self::object::*; pub use self::utils::*; +pub use contract_identifier::*; diff --git a/crates/common/src/metadata.rs b/crates/common/src/metadata.rs new file mode 100644 index 0000000..b62f96e --- /dev/null +++ b/crates/common/src/metadata.rs @@ -0,0 +1,42 @@ +//! The metadata hash type. + +use std::str::FromStr; + +use serde::{Deserialize, Serialize}; + +/// The metadata hash type. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum MetadataHash { + /// Do not include bytecode hash. + #[serde(rename = "none")] + None, + /// Include the `ipfs` hash. + #[serde(rename = "ipfs")] + IPFS, + /// Include the `keccak256`` hash. + #[serde(rename = "keccak256")] + Keccak256, +} + +impl FromStr for MetadataHash { + type Err = anyhow::Error; + + fn from_str(string: &str) -> Result { + match string { + "none" => Ok(Self::None), + "ipfs" => Ok(Self::IPFS), + "keccak256" => Ok(Self::Keccak256), + string => anyhow::bail!("unknown bytecode hash mode: `{string}`"), + } + } +} + +impl std::fmt::Display for MetadataHash { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + Self::None => write!(f, "none"), + Self::IPFS => write!(f, "ipfs"), + Self::Keccak256 => write!(f, "keccak256"), + } + } +} diff --git a/crates/common/src/object.rs b/crates/common/src/object.rs new file mode 100644 index 0000000..ff2f24e --- /dev/null +++ b/crates/common/src/object.rs @@ -0,0 +1,61 @@ +//! The revive binary object helper module. + +use std::str::FromStr; + +use serde::{Deserialize, Serialize}; + +/// The binary object format. +/// +/// Unlinked contracts are stored in a different object format +/// than final (linked) contract blobs. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum ObjectFormat { + /// The unlinked ELF object format. + ELF, + /// The fully linked PVM format. + PVM, +} + +impl ObjectFormat { + pub const PVM_MAGIC: [u8; 4] = [b'P', b'V', b'M', b'\0']; + pub const ELF_MAGIC: [u8; 4] = [0x7f, b'E', b'L', b'F']; +} + +impl FromStr for ObjectFormat { + type Err = anyhow::Error; + + fn from_str(value: &str) -> Result { + match value { + "ELF" => Ok(Self::ELF), + "PVM" => Ok(Self::PVM), + _ => anyhow::bail!( + "Unknown object format: {value}. Supported formats: {}, {}", + Self::ELF.to_string(), + Self::PVM.to_string() + ), + } + } +} + +impl TryFrom<&[u8]> for ObjectFormat { + type Error = &'static str; + + fn try_from(value: &[u8]) -> Result { + if value.starts_with(&Self::PVM_MAGIC) { + return Ok(Self::PVM); + } + if value.starts_with(&Self::ELF_MAGIC) { + return Ok(Self::ELF); + } + Err("expected a contract object") + } +} + +impl std::fmt::Display for ObjectFormat { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::ELF => write!(f, "ELF"), + Self::PVM => write!(f, "PVM"), + } + } +} diff --git a/crates/common/src/utils.rs b/crates/common/src/utils.rs index 1778e9d..63b7afd 100644 --- a/crates/common/src/utils.rs +++ b/crates/common/src/utils.rs @@ -1,25 +1,45 @@ //! The compiler common utils. /// Deserializes a `serde_json` object from slice with the recursion limit disabled. +/// /// Must be used for all JSON I/O to avoid crashes due to the aforementioned limit. pub fn deserialize_from_slice(input: &[u8]) -> anyhow::Result where O: serde::de::DeserializeOwned, { - let mut deserializer = serde_json::Deserializer::from_slice(input); - deserializer.disable_recursion_limit(); - let deserializer = serde_stacker::Deserializer::new(&mut deserializer); - let result = O::deserialize(deserializer)?; - Ok(result) + let deserializer = serde_json::Deserializer::from_slice(input); + deserialize(deserializer) } /// Deserializes a `serde_json` object from string with the recursion limit disabled. +/// /// Must be used for all JSON I/O to avoid crashes due to the aforementioned limit. pub fn deserialize_from_str(input: &str) -> anyhow::Result where O: serde::de::DeserializeOwned, { - let mut deserializer = serde_json::Deserializer::from_str(input); + let deserializer = serde_json::Deserializer::from_str(input); + deserialize(deserializer) +} + +/// Deserializes a `serde_json` object from reader with the recursion limit disabled. +/// +/// Must be used for all JSON I/O to avoid crashes due to the aforementioned limit. +pub fn deserialize_from_reader(reader: R) -> anyhow::Result +where + R: std::io::Read, + O: serde::de::DeserializeOwned, +{ + let deserializer = serde_json::Deserializer::from_reader(reader); + deserialize(deserializer) +} + +/// Runs the generic deserializer. +pub fn deserialize<'de, R, O>(mut deserializer: serde_json::Deserializer) -> anyhow::Result +where + R: serde_json::de::Read<'de>, + O: serde::de::DeserializeOwned, +{ deserializer.disable_recursion_limit(); let deserializer = serde_stacker::Deserializer::new(&mut deserializer); let result = O::deserialize(deserializer)?; diff --git a/crates/integration/src/cases.rs b/crates/integration/src/cases.rs index df1b9a6..4d4ed8a 100644 --- a/crates/integration/src/cases.rs +++ b/crates/integration/src/cases.rs @@ -12,6 +12,26 @@ pub struct Contract { pub calldata: Vec, } +impl Contract { + pub fn build(calldata: Vec, name: &'static str, code: &str) -> Self { + Self { + name, + evm_runtime: compile_evm_bin_runtime(name, code), + pvm_runtime: compile_blob(name, code), + calldata, + } + } + + pub fn build_size_opt(calldata: Vec, name: &'static str, code: &str) -> Self { + Self { + name, + evm_runtime: compile_evm_bin_runtime(name, code), + pvm_runtime: compile_blob_with_options(name, code, true, OptimizerSettings::size()), + calldata, + } + } +} + macro_rules! case { // Arguments: // 1. The file name, expect to live under "../contracts/" @@ -261,26 +281,6 @@ sol!( case!("AddressPredictor.sol", Predicted, constructorCall, predicted_constructor, salt: U256); case!("AddressPredictor.sol", AddressPredictor, constructorCall, address_predictor_constructor, salt: U256, bytecode: Bytes); -impl Contract { - pub fn build(calldata: Vec, name: &'static str, code: &str) -> Self { - Self { - name, - evm_runtime: compile_evm_bin_runtime(name, code), - pvm_runtime: compile_blob(name, code), - calldata, - } - } - - pub fn build_size_opt(calldata: Vec, name: &'static str, code: &str) -> Self { - Self { - name, - evm_runtime: compile_evm_bin_runtime(name, code), - pvm_runtime: compile_blob_with_options(name, code, true, OptimizerSettings::size()), - calldata, - } - } -} - #[cfg(test)] mod tests { use rayon::iter::{IntoParallelIterator, ParallelIterator}; diff --git a/crates/linker/Cargo.toml b/crates/linker/Cargo.toml index 1157e4a..b02bc9b 100644 --- a/crates/linker/Cargo.toml +++ b/crates/linker/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "revive-linker" -version.workspace = true +version = "0.2.0" license.workspace = true edition.workspace = true repository.workspace = true @@ -8,10 +8,10 @@ authors.workspace = true description = "revive compiler linker utils" [dependencies] -tempfile = { workspace = true } -polkavm-linker = { workspace = true } -libc = { workspace = true } anyhow = { workspace = true } +libc = { workspace = true } +polkavm-linker = { workspace = true } +tempfile = { workspace = true } -revive-builtins = { workspace = true } lld-sys = { workspace = true } +revive-builtins = { workspace = true } diff --git a/crates/linker/src/elf.rs b/crates/linker/src/elf.rs new file mode 100644 index 0000000..afa3e46 --- /dev/null +++ b/crates/linker/src/elf.rs @@ -0,0 +1,114 @@ +//! The revive ELF object linker library. + +use std::{ffi::CString, fs, path::PathBuf, sync::Mutex}; + +use lld_sys::LLDELFLink; +use tempfile::TempDir; + +use revive_builtins::COMPILER_RT; + +static GUARD: Mutex<()> = Mutex::new(()); + +/// The revive ELF object linker. +pub struct ElfLinker { + temporary_directory: TempDir, + output_path: PathBuf, + object_path: PathBuf, + symbols_path: PathBuf, + linker_script_path: PathBuf, +} + +impl ElfLinker { + const LINKER_SCRIPT: &str = r#" +SECTIONS { + .text : { KEEP(*(.text.polkavm_export)) *(.text .text.*) } +}"#; + + const BUILTINS_ARCHIVE_FILE: &str = "libclang_rt.builtins-riscv64.a"; + const BUILTINS_LIB_NAME: &str = "clang_rt.builtins-riscv64"; + + /// The setup routine prepares a temporary working directory. + pub fn setup() -> anyhow::Result { + let temporary_directory = TempDir::new()?; + let object_path = temporary_directory.path().join("obj.o"); + let output_path = temporary_directory.path().join("out.o"); + let symbols_path = temporary_directory.path().join("sym.o"); + let linker_script_path = temporary_directory.path().join("linker.ld"); + + fs::write(&linker_script_path, Self::LINKER_SCRIPT) + .map_err(|message| anyhow::anyhow!("{message} {linker_script_path:?}",))?; + + let compiler_rt_path = temporary_directory.path().join(Self::BUILTINS_ARCHIVE_FILE); + fs::write(&compiler_rt_path, COMPILER_RT) + .map_err(|message| anyhow::anyhow!("{message} {compiler_rt_path:?}"))?; + + Ok(Self { + temporary_directory, + output_path, + object_path, + symbols_path, + linker_script_path, + }) + } + + /// Link `input` with `symbols` and the `compiler_rt` via `LLD`. + pub fn link>(self, input: T, symbols: T) -> anyhow::Result> { + fs::write(&self.object_path, input) + .map_err(|message| anyhow::anyhow!("{message} {:?}", self.object_path))?; + + fs::write(&self.symbols_path, symbols) + .map_err(|message| anyhow::anyhow!("{message} {:?}", self.symbols_path))?; + + if lld(self + .create_arguments() + .into_iter() + .map(|v| v.to_string()) + .collect()) + { + return Err(anyhow::anyhow!("ld.lld failed")); + } + + Ok(fs::read(&self.output_path)?) + } + + /// The argument creation helper function. + fn create_arguments(&self) -> Vec { + [ + "ld.lld", + "--error-limit=0", + "--relocatable", + "--emit-relocs", + "--no-relax", + "--unique", + "--gc-sections", + self.linker_script_path.to_str().expect("should be utf8"), + "-o", + self.output_path.to_str().expect("should be utf8"), + self.object_path.to_str().expect("should be utf8"), + self.symbols_path.to_str().expect("should be utf8"), + "--library-path", + self.temporary_directory + .path() + .to_str() + .expect("should be utf8"), + "--library", + Self::BUILTINS_LIB_NAME, + ] + .iter() + .map(ToString::to_string) + .collect() + } +} + +/// The thread-safe LLD helper function. +fn lld(arguments: Vec) -> bool { + let c_strings = arguments + .into_iter() + .map(|arg| CString::new(arg).expect("ld.lld args should not contain null bytes")) + .collect::>(); + + let args: Vec<*const libc::c_char> = c_strings.iter().map(|arg| arg.as_ptr()).collect(); + + let _lock = GUARD.lock().expect("ICE: linker mutex should not poison"); + unsafe { LLDELFLink(args.as_ptr(), args.len()) == 0 } +} diff --git a/crates/linker/src/lib.rs b/crates/linker/src/lib.rs index 9ded026..57ea55a 100644 --- a/crates/linker/src/lib.rs +++ b/crates/linker/src/lib.rs @@ -1,76 +1,4 @@ -use std::{env, ffi::CString, fs}; +//! The revive ELF object to PVM blob linker library. -use lld_sys::LLDELFLink; -use revive_builtins::COMPILER_RT; - -const LINKER_SCRIPT: &str = r#" -SECTIONS { - .text : { KEEP(*(.text.polkavm_export)) *(.text .text.*) } -}"#; - -const BUILTINS_ARCHIVE_FILE: &str = "libclang_rt.builtins-riscv64.a"; -const BUILTINS_LIB_NAME: &str = "clang_rt.builtins-riscv64"; - -fn invoke_lld(cmd_args: &[&str]) -> bool { - let c_strings = cmd_args - .iter() - .map(|arg| CString::new(*arg).expect("ld.lld args should not contain null bytes")) - .collect::>(); - - let args: Vec<*const libc::c_char> = c_strings.iter().map(|arg| arg.as_ptr()).collect(); - - unsafe { LLDELFLink(args.as_ptr(), args.len()) == 0 } -} - -pub fn polkavm_linker>(code: T, strip_binary: bool) -> anyhow::Result> { - let mut config = polkavm_linker::Config::default(); - config.set_strip(strip_binary); - config.set_optimize(true); - - polkavm_linker::program_from_elf(config, code.as_ref()) - .map_err(|reason| anyhow::anyhow!("polkavm linker failed: {}", reason)) -} - -pub fn link>(input: T) -> anyhow::Result> { - let dir = tempfile::tempdir().expect("failed to create temp directory for linking"); - let output_path = dir.path().join("out.so"); - let object_path = dir.path().join("out.o"); - let linker_script_path = dir.path().join("linker.ld"); - let compiler_rt_path = dir.path().join(BUILTINS_ARCHIVE_FILE); - - fs::write(&object_path, input).map_err(|msg| anyhow::anyhow!("{msg} {object_path:?}"))?; - - if env::var("PVM_LINKER_DUMP_OBJ").is_ok() { - fs::copy(&object_path, "/tmp/out.o")?; - } - - fs::write(&linker_script_path, LINKER_SCRIPT) - .map_err(|msg| anyhow::anyhow!("{msg} {linker_script_path:?}"))?; - - fs::write(&compiler_rt_path, COMPILER_RT) - .map_err(|msg| anyhow::anyhow!("{msg} {compiler_rt_path:?}"))?; - - let ld_args = [ - "ld.lld", - "--error-limit=0", - "--relocatable", - "--emit-relocs", - "--no-relax", - "--unique", - "--gc-sections", - "--library-path", - dir.path().to_str().expect("should be utf8"), - "--library", - BUILTINS_LIB_NAME, - linker_script_path.to_str().expect("should be utf8"), - object_path.to_str().expect("should be utf8"), - "-o", - output_path.to_str().expect("should be utf8"), - ]; - - if invoke_lld(&ld_args) { - return Err(anyhow::anyhow!("ld.lld failed")); - } - - Ok(fs::read(&output_path)?) -} +pub mod elf; +pub mod pvm; diff --git a/crates/linker/src/pvm.rs b/crates/linker/src/pvm.rs new file mode 100644 index 0000000..5b6141b --- /dev/null +++ b/crates/linker/src/pvm.rs @@ -0,0 +1,10 @@ +//! The revive PVM blob linker library. + +pub fn polkavm_linker>(code: T, strip_binary: bool) -> anyhow::Result> { + let mut config = polkavm_linker::Config::default(); + config.set_strip(strip_binary); + config.set_optimize(true); + + polkavm_linker::program_from_elf(config, code.as_ref()) + .map_err(|reason| anyhow::anyhow!("polkavm linker failed: {}", reason)) +} diff --git a/crates/llvm-context/Cargo.toml b/crates/llvm-context/Cargo.toml index 2fad8e7..963be73 100644 --- a/crates/llvm-context/Cargo.toml +++ b/crates/llvm-context/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "revive-llvm-context" -version = "0.3.0" +version = "0.4.0" license.workspace = true edition.workspace = true repository.workspace = true @@ -20,7 +20,6 @@ itertools = { workspace = true } serde = { workspace = true, features = ["derive"] } num = { workspace = true } hex = { workspace = true } -sha3 = { workspace = true } inkwell = { workspace = true } libc = { workspace = true } polkavm-disassembler = { workspace = true } diff --git a/crates/llvm-context/src/debug_config/ir_type.rs b/crates/llvm-context/src/debug_config/ir_type.rs index 195fc95..19a6768 100644 --- a/crates/llvm-context/src/debug_config/ir_type.rs +++ b/crates/llvm-context/src/debug_config/ir_type.rs @@ -1,5 +1,9 @@ //! The debug IR type. +use revive_common::{ + EXTENSION_LLVM_SOURCE, EXTENSION_OBJECT, EXTENSION_POLKAVM_ASSEMBLY, EXTENSION_YUL, +}; + /// The debug IR type. #[allow(clippy::upper_case_acronyms)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -11,22 +15,17 @@ pub enum IRType { /// Whether to dump the assembly code. Assembly, /// Whether to dump the ELF shared object - SO, - /// Whether to jump JSON - #[cfg(debug_assertions)] - JSON, + Object, } impl IRType { /// Returns the file extension for the specified IR. pub fn file_extension(&self) -> &'static str { match self { - Self::Yul => revive_common::EXTENSION_YUL, - Self::LLVM => revive_common::EXTENSION_LLVM_SOURCE, - Self::Assembly => revive_common::EXTENSION_POLKAVM_ASSEMBLY, - #[cfg(debug_assertions)] - Self::JSON => revive_common::EXTENSION_JSON, - Self::SO => revive_common::EXTENSION_SHARED_OBJECT, + Self::Yul => EXTENSION_YUL, + Self::LLVM => EXTENSION_LLVM_SOURCE, + Self::Assembly => EXTENSION_POLKAVM_ASSEMBLY, + Self::Object => EXTENSION_OBJECT, } } } diff --git a/crates/llvm-context/src/debug_config/mod.rs b/crates/llvm-context/src/debug_config/mod.rs index af32a65..cfcd8db 100644 --- a/crates/llvm-context/src/debug_config/mod.rs +++ b/crates/llvm-context/src/debug_config/mod.rs @@ -1,8 +1,5 @@ //! The debug configuration. -pub mod ir_type; - -use std::path::Path; use std::path::PathBuf; use serde::Deserialize; @@ -10,6 +7,8 @@ use serde::Serialize; use self::ir_type::IRType; +pub mod ir_type; + /// The debug configuration. #[derive(Debug, Default, Serialize, Deserialize, Clone)] pub struct DebugConfig { @@ -18,13 +17,7 @@ pub struct DebugConfig { /// Whether debug info should be emitted. pub emit_debug_info: bool, /// The YUL debug output file path. - /// - /// Is expected to be configured when running in YUL mode. pub contract_path: Option, - /// The YUL input file path. - /// - /// Is expected to be configured when not running in YUL mode. - pub yul_path: Option, } impl DebugConfig { @@ -34,29 +27,15 @@ impl DebugConfig { output_directory, emit_debug_info, contract_path: None, - yul_path: None, } } - /// Set the current YUL path. - pub fn set_yul_path(&mut self, yul_path: &Path) { - self.yul_path = yul_path.to_path_buf().into(); - } - /// Set the current contract path. pub fn set_contract_path(&mut self, contract_path: &str) { self.contract_path = self.yul_source_path(contract_path); } - /// Returns with the following precedence: - /// 1. The YUL source path if it was configured. - /// 2. The source YUL path from the debug output dir if it was configured. - /// 3. `None` if there is no debug output directory. pub fn yul_source_path(&self, contract_path: &str) -> Option { - if let Some(path) = self.yul_path.as_ref() { - return Some(path.clone()); - } - self.output_directory.as_ref().map(|output_directory| { let mut file_path = output_directory.to_owned(); let full_file_name = Self::full_file_name(contract_path, None, IRType::Yul); @@ -128,7 +107,7 @@ impl DebugConfig { pub fn dump_object(&self, contract_path: &str, code: &[u8]) -> anyhow::Result<()> { if let Some(output_directory) = self.output_directory.as_ref() { let mut file_path = output_directory.to_owned(); - let full_file_name = Self::full_file_name(contract_path, None, IRType::SO); + let full_file_name = Self::full_file_name(contract_path, None, IRType::Object); file_path.push(full_file_name); std::fs::write(file_path, code)?; } @@ -136,24 +115,6 @@ impl DebugConfig { Ok(()) } - /// Dumps the stage output as a json file suitable for use with --recursive-process - #[cfg(debug_assertions)] - pub fn dump_stage_output( - &self, - contract_path: &str, - contract_suffix: Option<&str>, - stage_json: &Vec, - ) -> anyhow::Result<()> { - if let Some(output_directory) = self.output_directory.as_ref() { - let mut file_path = output_directory.to_owned(); - let full_file_name = Self::full_file_name(contract_path, contract_suffix, IRType::JSON); - file_path.push(full_file_name); - std::fs::write(file_path, stage_json)?; - } - - Ok(()) - } - /// Creates a full file name, given the contract full path, suffix, and extension. fn full_file_name(contract_path: &str, suffix: Option<&str>, ir_type: IRType) -> String { let mut full_file_name = contract_path.replace('/', "_").replace(':', "."); diff --git a/crates/llvm-context/src/lib.rs b/crates/llvm-context/src/lib.rs index c7a9ae7..db83d44 100644 --- a/crates/llvm-context/src/lib.rs +++ b/crates/llvm-context/src/lib.rs @@ -1,5 +1,7 @@ //! The LLVM context library. +#![allow(clippy::too_many_arguments)] + use std::ffi::CString; use std::sync::OnceLock; @@ -8,7 +10,7 @@ pub use self::debug_config::DebugConfig; pub use self::optimizer::settings::size_level::SizeLevel as OptimizerSettingsSizeLevel; pub use self::optimizer::settings::Settings as OptimizerSettings; pub use self::optimizer::Optimizer; -pub use self::polkavm::build_assembly_text as polkavm_build_assembly_text; +pub use self::polkavm::build as polkavm_build; pub use self::polkavm::context::address_space::AddressSpace as PolkaVMAddressSpace; pub use self::polkavm::context::argument::Argument as PolkaVMArgument; pub use self::polkavm::context::attribute::Attribute as PolkaVMAttribute; @@ -46,6 +48,7 @@ pub use self::polkavm::context::r#loop::Loop as PolkaVMLoop; pub use self::polkavm::context::solidity_data::SolidityData as PolkaVMContextSolidityData; pub use self::polkavm::context::yul_data::YulData as PolkaVMContextYulData; pub use self::polkavm::context::Context as PolkaVMContext; +pub use self::polkavm::disassemble as polkavm_disassemble; pub use self::polkavm::evm::arithmetic as polkavm_evm_arithmetic; pub use self::polkavm::evm::bitwise as polkavm_evm_bitwise; pub use self::polkavm::evm::call as polkavm_evm_call; @@ -66,13 +69,13 @@ pub use self::polkavm::evm::memory as polkavm_evm_memory; pub use self::polkavm::evm::r#return as polkavm_evm_return; pub use self::polkavm::evm::return_data as polkavm_evm_return_data; pub use self::polkavm::evm::storage as polkavm_evm_storage; +pub use self::polkavm::hash as polkavm_hash; +pub use self::polkavm::link as polkavm_link; pub use self::polkavm::r#const as polkavm_const; -pub use self::polkavm::Dependency as PolkaVMDependency; -pub use self::polkavm::DummyDependency as PolkaVMDummyDependency; pub use self::polkavm::DummyLLVMWritable as PolkaVMDummyLLVMWritable; pub use self::polkavm::WriteLLVM as PolkaVMWriteLLVM; -pub use self::target_machine::target::Target; -pub use self::target_machine::TargetMachine; +pub use self::target_machine::target::Target as PolkaVMTarget; +pub use self::target_machine::TargetMachine as PolkaVMTargetMachine; pub(crate) mod debug_config; pub(crate) mod optimizer; @@ -86,7 +89,7 @@ static DID_INITIALIZE: OnceLock<()> = OnceLock::new(); /// This is a no-op if called subsequentially. /// /// `llvm_arguments` are passed as-is to the LLVM CL options parser. -pub fn initialize_llvm(target: Target, name: &str, llvm_arguments: &[String]) { +pub fn initialize_llvm(target: PolkaVMTarget, name: &str, llvm_arguments: &[String]) { let Ok(_) = DID_INITIALIZE.set(()) else { return; // Tests don't go through a recursive process }; @@ -109,6 +112,6 @@ pub fn initialize_llvm(target: Target, name: &str, llvm_arguments: &[String]) { inkwell::support::enable_llvm_pretty_stack_trace(); match target { - Target::PVM => inkwell::targets::Target::initialize_riscv(&Default::default()), + PolkaVMTarget::PVM => inkwell::targets::Target::initialize_riscv(&Default::default()), } } diff --git a/crates/llvm-context/src/optimizer/mod.rs b/crates/llvm-context/src/optimizer/mod.rs index 1395aed..b02aaac 100644 --- a/crates/llvm-context/src/optimizer/mod.rs +++ b/crates/llvm-context/src/optimizer/mod.rs @@ -1,7 +1,5 @@ //! The LLVM optimizing tools. -pub mod settings; - use serde::Deserialize; use serde::Serialize; @@ -9,6 +7,8 @@ use crate::target_machine::TargetMachine; use self::settings::Settings; +pub mod settings; + /// The LLVM optimizing tools. #[derive(Debug, Serialize, Deserialize)] pub struct Optimizer { diff --git a/crates/llvm-context/src/optimizer/settings/mod.rs b/crates/llvm-context/src/optimizer/settings/mod.rs index 3237ec9..6ef642f 100644 --- a/crates/llvm-context/src/optimizer/settings/mod.rs +++ b/crates/llvm-context/src/optimizer/settings/mod.rs @@ -1,8 +1,5 @@ //! The LLVM optimizer settings. -pub mod size_level; - -use revive_solc_json_interface::SolcStandardJsonInputSettingsOptimizer; use serde::Deserialize; use serde::Serialize; @@ -10,6 +7,8 @@ use itertools::Itertools; use self::size_level::SizeLevel; +pub mod size_level; + /// The LLVM optimizer and code-gen settings. #[derive(Debug, Serialize, Deserialize, Clone, Eq)] pub struct Settings { @@ -20,9 +19,6 @@ pub struct Settings { /// The back-end optimization level. pub level_back_end: inkwell::OptimizationLevel, - /// Fallback to optimizing for size if the bytecode is too large. - pub is_fallback_to_size_enabled: bool, - /// Whether the LLVM `verify each` option is enabled. pub is_verify_each_enabled: bool, /// Whether the LLVM `debug logging` option is enabled. @@ -41,8 +37,6 @@ impl Settings { level_middle_end_size, level_back_end, - is_fallback_to_size_enabled: false, - is_verify_each_enabled: false, is_debug_logging_enabled: false, } @@ -62,8 +56,6 @@ impl Settings { level_middle_end_size, level_back_end, - is_fallback_to_size_enabled: false, - is_verify_each_enabled, is_debug_logging_enabled, } @@ -197,16 +189,6 @@ impl Settings { combinations } - - /// Sets the fallback to optimizing for size if the bytecode is too large. - pub fn enable_fallback_to_size(&mut self) { - self.is_fallback_to_size_enabled = true; - } - - /// Whether the fallback to optimizing for size is enabled. - pub fn is_fallback_to_size_enabled(&self) -> bool { - self.is_fallback_to_size_enabled - } } impl PartialEq for Settings { @@ -227,18 +209,3 @@ impl std::fmt::Display for Settings { ) } } - -impl TryFrom<&SolcStandardJsonInputSettingsOptimizer> for Settings { - type Error = anyhow::Error; - - fn try_from(value: &SolcStandardJsonInputSettingsOptimizer) -> Result { - let mut result = match value.mode { - Some(mode) => Self::try_from_cli(mode)?, - None => Self::size(), - }; - if value.fallback_to_optimizing_for_size.unwrap_or_default() { - result.enable_fallback_to_size(); - } - Ok(result) - } -} diff --git a/crates/llvm-context/src/polkavm/const/mod.rs b/crates/llvm-context/src/polkavm/const/mod.rs index 6f36793..5c18023 100644 --- a/crates/llvm-context/src/polkavm/const/mod.rs +++ b/crates/llvm-context/src/polkavm/const/mod.rs @@ -1,10 +1,12 @@ //! The LLVM context constants. +use revive_common::{BIT_LENGTH_X32, BYTE_LENGTH_WORD}; + /// The LLVM framework version. pub const LLVM_VERSION: semver::Version = semver::Version::new(18, 1, 4); /// The pointer width sized type. -pub static XLEN: usize = revive_common::BIT_LENGTH_X32; +pub static XLEN: usize = BIT_LENGTH_X32; /// The calldata size global variable name. pub static GLOBAL_CALLDATA_SIZE: &str = "calldatasize"; @@ -20,4 +22,4 @@ pub static GLOBAL_ADDRESS_SPILL_BUFFER: &str = "address_spill_buffer"; /// The deployer call header size that consists of: /// - bytecode hash (32 bytes) -pub const DEPLOYER_CALL_HEADER_SIZE: usize = revive_common::BYTE_LENGTH_WORD; +pub const DEPLOYER_CALL_HEADER_SIZE: usize = BYTE_LENGTH_WORD; diff --git a/crates/llvm-context/src/polkavm/context/argument.rs b/crates/llvm-context/src/polkavm/context/argument.rs index 8dfd18b..5f63254 100644 --- a/crates/llvm-context/src/polkavm/context/argument.rs +++ b/crates/llvm-context/src/polkavm/context/argument.rs @@ -66,9 +66,9 @@ impl<'ctx> Argument<'ctx> { /// Access the underlying value. /// /// Will emit a stack load if `self` is a pointer argument. - pub fn access( + pub fn access( &self, - context: &crate::polkavm::context::Context<'ctx, D>, + context: &crate::polkavm::context::Context<'ctx>, ) -> anyhow::Result> { match &self.value { Value::Register(value) => Ok(*value), @@ -79,9 +79,9 @@ impl<'ctx> Argument<'ctx> { /// Access the underlying value. /// /// Will emit a stack load if `self` is a pointer argument. - pub fn as_pointer( + pub fn as_pointer( &self, - context: &crate::polkavm::context::Context<'ctx, D>, + context: &crate::polkavm::context::Context<'ctx>, ) -> anyhow::Result> { match &self.value { Value::Register(value) => { diff --git a/crates/llvm-context/src/polkavm/context/build.rs b/crates/llvm-context/src/polkavm/context/build.rs index 5083539..fcb0bd4 100644 --- a/crates/llvm-context/src/polkavm/context/build.rs +++ b/crates/llvm-context/src/polkavm/context/build.rs @@ -2,6 +2,7 @@ use std::collections::BTreeMap; +use revive_common::BYTE_LENGTH_WORD; use serde::Deserialize; use serde::Serialize; @@ -9,30 +10,25 @@ use serde::Serialize; #[derive(Debug, Serialize, Deserialize)] pub struct Build { /// The PolkaVM text assembly. - pub assembly_text: String, + pub assembly_text: Option, /// The metadata hash. - pub metadata_hash: Option<[u8; revive_common::BYTE_LENGTH_WORD]>, + pub metadata_hash: Option<[u8; BYTE_LENGTH_WORD]>, /// The PolkaVM binary bytecode. pub bytecode: Vec, - /// The PolkaVM bytecode hash. - pub bytecode_hash: String, + /// The PolkaVM bytecode hash. Unlinked builds don't have a hash yet. + pub bytecode_hash: Option<[u8; BYTE_LENGTH_WORD]>, /// The hash-to-full-path mapping of the contract factory dependencies. pub factory_dependencies: BTreeMap, } impl Build { /// A shortcut constructor. - pub fn new( - assembly_text: String, - metadata_hash: Option<[u8; revive_common::BYTE_LENGTH_WORD]>, - bytecode: Vec, - bytecode_hash: String, - ) -> Self { + pub fn new(metadata_hash: Option<[u8; BYTE_LENGTH_WORD]>, bytecode: Vec) -> Self { Self { - assembly_text, + assembly_text: None, metadata_hash, bytecode, - bytecode_hash, + bytecode_hash: None, factory_dependencies: BTreeMap::new(), } } diff --git a/crates/llvm-context/src/polkavm/context/debug_info.rs b/crates/llvm-context/src/polkavm/context/debug_info.rs index 595b788..d322281 100644 --- a/crates/llvm-context/src/polkavm/context/debug_info.rs +++ b/crates/llvm-context/src/polkavm/context/debug_info.rs @@ -2,6 +2,8 @@ use std::cell::RefCell; +use revive_common::BIT_LENGTH_WORD; + use inkwell::debug_info::AsDIScope; use inkwell::debug_info::DIScope; @@ -164,7 +166,7 @@ impl<'ctx> DebugInfo<'ctx> { &self, flags: Option, ) -> anyhow::Result> { - self.create_primitive_type(revive_common::BIT_LENGTH_WORD, flags) + self.create_primitive_type(BIT_LENGTH_WORD, flags) } /// Return the DIBuilder. diff --git a/crates/llvm-context/src/polkavm/context/function/mod.rs b/crates/llvm-context/src/polkavm/context/function/mod.rs index 15a3222..e7bba5b 100644 --- a/crates/llvm-context/src/polkavm/context/function/mod.rs +++ b/crates/llvm-context/src/polkavm/context/function/mod.rs @@ -1,12 +1,5 @@ //! The LLVM IR generator function. -pub mod declaration; -pub mod intrinsics; -pub mod llvm_runtime; -pub mod r#return; -pub mod runtime; -pub mod yul_data; - use std::collections::HashMap; use inkwell::debug_info::AsDIScope; @@ -20,6 +13,13 @@ use self::declaration::Declaration; use self::r#return::Return; use self::yul_data::YulData; +pub mod declaration; +pub mod intrinsics; +pub mod llvm_runtime; +pub mod r#return; +pub mod runtime; +pub mod yul_data; + /// The LLVM IR generator function. #[derive(Debug)] pub struct Function<'ctx> { diff --git a/crates/llvm-context/src/polkavm/context/function/runtime/arithmetics.rs b/crates/llvm-context/src/polkavm/context/function/runtime/arithmetics.rs index ba9395f..5f45968 100644 --- a/crates/llvm-context/src/polkavm/context/function/runtime/arithmetics.rs +++ b/crates/llvm-context/src/polkavm/context/function/runtime/arithmetics.rs @@ -1,22 +1,19 @@ //! Translates the arithmetic operations. use inkwell::values::BasicValue; +use revive_common::BIT_LENGTH_WORD; use crate::polkavm::context::runtime::RuntimeFunction; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::polkavm::WriteLLVM; /// Implements the division operator according to the EVM specification. pub struct Division; -impl RuntimeFunction for Division -where - D: Dependency + Clone, -{ +impl RuntimeFunction for Division { const NAME: &'static str = "__revive_division"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context.word_type().fn_type( &[context.word_type().into(), context.word_type().into()], false, @@ -25,7 +22,7 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let operand_1 = Self::paramater(context, 0).into_int_value(); let operand_2 = Self::paramater(context, 1).into_int_value(); @@ -39,29 +36,23 @@ where } } -impl WriteLLVM for Division -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for Division { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } /// Implements the signed division operator according to the EVM specification. pub struct SignedDivision; -impl RuntimeFunction for SignedDivision -where - D: Dependency + Clone, -{ +impl RuntimeFunction for SignedDivision { const NAME: &'static str = "__revive_signed_division"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context.word_type().fn_type( &[context.word_type().into(), context.word_type().into()], false, @@ -70,7 +61,7 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let operand_1 = Self::paramater(context, 0).into_int_value(); let operand_2 = Self::paramater(context, 1).into_int_value(); @@ -96,9 +87,7 @@ where context.set_basic_block(block_overflow); let max_uint = context.builder().build_int_z_extend( - context - .integer_type(revive_common::BIT_LENGTH_WORD - 1) - .const_all_ones(), + context.integer_type(BIT_LENGTH_WORD - 1).const_all_ones(), context.word_type(), "max_uint", )?; @@ -121,29 +110,23 @@ where } } -impl WriteLLVM for SignedDivision -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for SignedDivision { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } /// Implements the remainder operator according to the EVM specification. pub struct Remainder; -impl RuntimeFunction for Remainder -where - D: Dependency + Clone, -{ +impl RuntimeFunction for Remainder { const NAME: &'static str = "__revive_remainder"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context.word_type().fn_type( &[context.word_type().into(), context.word_type().into()], false, @@ -152,7 +135,7 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let operand_1 = Self::paramater(context, 0).into_int_value(); let operand_2 = Self::paramater(context, 1).into_int_value(); @@ -166,29 +149,23 @@ where } } -impl WriteLLVM for Remainder -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for Remainder { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } /// Implements the signed remainder operator according to the EVM specification. pub struct SignedRemainder; -impl RuntimeFunction for SignedRemainder -where - D: Dependency + Clone, -{ +impl RuntimeFunction for SignedRemainder { const NAME: &'static str = "__revive_signed_remainder"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context.word_type().fn_type( &[context.word_type().into(), context.word_type().into()], false, @@ -197,7 +174,7 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let operand_1 = Self::paramater(context, 0).into_int_value(); let operand_2 = Self::paramater(context, 1).into_int_value(); @@ -211,16 +188,13 @@ where } } -impl WriteLLVM for SignedRemainder -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for SignedRemainder { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } @@ -231,13 +205,12 @@ where /// /// The result is either the calculated quotient or zero, /// selected at runtime. -fn wrapped_division<'ctx, D, F, T>( - context: &Context<'ctx, D>, +fn wrapped_division<'ctx, F, T>( + context: &Context<'ctx>, denominator: inkwell::values::IntValue<'ctx>, f: F, ) -> anyhow::Result> where - D: Dependency + Clone, F: FnOnce() -> anyhow::Result, T: inkwell::values::IntMathValue<'ctx>, { diff --git a/crates/llvm-context/src/polkavm/context/function/runtime/deploy_code.rs b/crates/llvm-context/src/polkavm/context/function/runtime/deploy_code.rs index e961040..92b569f 100644 --- a/crates/llvm-context/src/polkavm/context/function/runtime/deploy_code.rs +++ b/crates/llvm-context/src/polkavm/context/function/runtime/deploy_code.rs @@ -1,47 +1,36 @@ //! The deploy code function. -use std::marker::PhantomData; - use crate::polkavm::context::code_type::CodeType; use crate::polkavm::context::function::runtime; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::polkavm::WriteLLVM; /// The deploy code function. /// Is a special function that is only used by the front-end generated code. #[derive(Debug)] -pub struct DeployCode +pub struct DeployCode where - B: WriteLLVM, - D: Dependency + Clone, + B: WriteLLVM, { /// The deploy code AST representation. inner: B, - /// The `D` phantom data. - _pd: PhantomData, } -impl DeployCode +impl DeployCode where - B: WriteLLVM, - D: Dependency + Clone, + B: WriteLLVM, { /// A shortcut constructor. pub fn new(inner: B) -> Self { - Self { - inner, - _pd: PhantomData, - } + Self { inner } } } -impl WriteLLVM for DeployCode +impl WriteLLVM for DeployCode where - B: WriteLLVM, - D: Dependency + Clone, + B: WriteLLVM, { - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { let function_type = context.function_type::(vec![], 0); context.add_function( runtime::FUNCTION_DEPLOY_CODE, @@ -54,7 +43,7 @@ where self.inner.declare(context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { context.set_current_function(runtime::FUNCTION_DEPLOY_CODE, None)?; context.set_basic_block(context.current_function().borrow().entry_block()); diff --git a/crates/llvm-context/src/polkavm/context/function/runtime/entry.rs b/crates/llvm-context/src/polkavm/context/function/runtime/entry.rs index 0724d98..8c5827a 100644 --- a/crates/llvm-context/src/polkavm/context/function/runtime/entry.rs +++ b/crates/llvm-context/src/polkavm/context/function/runtime/entry.rs @@ -1,12 +1,16 @@ //! The entry function. use inkwell::types::BasicType; +use revive_common::BIT_LENGTH_ETH_ADDRESS; +use revive_runtime_api::immutable_data::{ + GLOBAL_IMMUTABLE_DATA_POINTER, GLOBAL_IMMUTABLE_DATA_SIZE, +}; +use revive_runtime_api::polkavm_imports::CALL_DATA_SIZE; use revive_solc_json_interface::PolkaVMDefaultHeapMemorySize; use crate::polkavm::context::address_space::AddressSpace; use crate::polkavm::context::function::runtime; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::polkavm::WriteLLVM; /// The entry function. @@ -21,10 +25,7 @@ impl Entry { /// Initializes the global variables. /// The pointers are not initialized, because it's not possible to create a null pointer. - pub fn initialize_globals(context: &mut Context) -> anyhow::Result<()> - where - D: Dependency + Clone, - { + pub fn initialize_globals(context: &mut Context) -> anyhow::Result<()> { context.set_global( crate::polkavm::GLOBAL_CALLDATA_SIZE, context.xlen_type(), @@ -52,7 +53,7 @@ impl Entry { heap_memory_type.const_zero(), ); - let address_type = context.integer_type(revive_common::BIT_LENGTH_ETH_ADDRESS); + let address_type = context.integer_type(BIT_LENGTH_ETH_ADDRESS); context.set_global( crate::polkavm::GLOBAL_ADDRESS_SPILL_BUFFER, address_type, @@ -64,16 +65,13 @@ impl Entry { } /// Populate the calldata size global value. - pub fn load_calldata_size(context: &mut Context) -> anyhow::Result<()> - where - D: Dependency + Clone, - { + pub fn load_calldata_size(context: &mut Context) -> anyhow::Result<()> { let call_data_size_pointer = context .get_global(crate::polkavm::GLOBAL_CALLDATA_SIZE)? .value .as_pointer_value(); let call_data_size_value = context - .build_runtime_call(revive_runtime_api::polkavm_imports::CALL_DATA_SIZE, &[]) + .build_runtime_call(CALL_DATA_SIZE, &[]) .expect("the call_data_size syscall method should return a value") .into_int_value(); let call_data_size_value = context.builder().build_int_truncate( @@ -90,10 +88,7 @@ impl Entry { /// Calls the deploy code if the first function argument was `1`. /// Calls the runtime code otherwise. - pub fn leave_entry(context: &mut Context) -> anyhow::Result<()> - where - D: Dependency + Clone, - { + pub fn leave_entry(context: &mut Context) -> anyhow::Result<()> { context.set_debug_location(0, 0, None)?; let is_deploy = context @@ -133,11 +128,8 @@ impl Entry { } } -impl WriteLLVM for Entry -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { +impl WriteLLVM for Entry { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { let entry_arguments = vec![context.bool_type().as_basic_type_enum()]; let entry_function_type = context.function_type(entry_arguments, 0); context.add_function( @@ -149,13 +141,13 @@ where )?; context.declare_global( - revive_runtime_api::immutable_data::GLOBAL_IMMUTABLE_DATA_POINTER, + GLOBAL_IMMUTABLE_DATA_POINTER, context.word_type().array_type(0), AddressSpace::Stack, ); context.declare_global( - revive_runtime_api::immutable_data::GLOBAL_IMMUTABLE_DATA_SIZE, + GLOBAL_IMMUTABLE_DATA_SIZE, context.xlen_type(), AddressSpace::Stack, ); @@ -166,7 +158,7 @@ where /// Instead of a single entrypoint, the runtime expects two exports: `call ` and `deploy`. /// `call` and `deploy` directly call `entry`, signaling a deploy if the first arg is `1`. /// The `entry` function loads calldata, sets globals and calls the runtime or deploy code. - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { let entry = context .get_function(runtime::FUNCTION_ENTRY) .expect("the entry function should already be declared") diff --git a/crates/llvm-context/src/polkavm/context/function/runtime/revive.rs b/crates/llvm-context/src/polkavm/context/function/runtime/revive.rs index 294186c..90b930b 100644 --- a/crates/llvm-context/src/polkavm/context/function/runtime/revive.rs +++ b/crates/llvm-context/src/polkavm/context/function/runtime/revive.rs @@ -5,7 +5,6 @@ use inkwell::values::BasicValue; use crate::polkavm::context::function::Attribute; use crate::polkavm::context::runtime::RuntimeFunction; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::polkavm::WriteLLVM; /// Pointers are represented as opaque 256 bit integer values in EVM. @@ -15,10 +14,7 @@ use crate::polkavm::WriteLLVM; /// (but wrong) pointers when truncated. pub struct WordToPointer; -impl RuntimeFunction for WordToPointer -where - D: Dependency + Clone, -{ +impl RuntimeFunction for WordToPointer { const NAME: &'static str = "__revive_int_truncate"; const ATTRIBUTES: &'static [Attribute] = &[ @@ -27,7 +23,7 @@ where Attribute::AlwaysInline, ]; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context .xlen_type() .fn_type(&[context.word_type().into()], false) @@ -35,7 +31,7 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let value = Self::paramater(context, 0).into_int_value(); let truncated = @@ -67,26 +63,20 @@ where } } -impl WriteLLVM for WordToPointer -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for WordToPointer { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } /// The revive runtime exit function. pub struct Exit; -impl RuntimeFunction for Exit -where - D: Dependency + Clone, -{ +impl RuntimeFunction for Exit { const NAME: &'static str = "__revive_exit"; const ATTRIBUTES: &'static [Attribute] = &[ @@ -95,7 +85,7 @@ where Attribute::AlwaysInline, ]; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context.void_type().fn_type( &[ context.xlen_type().into(), @@ -108,7 +98,7 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let flags = Self::paramater(context, 0).into_int_value(); let offset = Self::paramater(context, 1).into_int_value(); @@ -133,15 +123,12 @@ where } } -impl WriteLLVM for Exit -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for Exit { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } diff --git a/crates/llvm-context/src/polkavm/context/function/runtime/runtime_code.rs b/crates/llvm-context/src/polkavm/context/function/runtime/runtime_code.rs index 1718392..a3abb74 100644 --- a/crates/llvm-context/src/polkavm/context/function/runtime/runtime_code.rs +++ b/crates/llvm-context/src/polkavm/context/function/runtime/runtime_code.rs @@ -1,47 +1,36 @@ //! The runtime code function. -use std::marker::PhantomData; - use crate::polkavm::context::code_type::CodeType; use crate::polkavm::context::function::runtime; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::polkavm::WriteLLVM; /// The runtime code function. /// Is a special function that is only used by the front-end generated code. #[derive(Debug)] -pub struct RuntimeCode +pub struct RuntimeCode where - B: WriteLLVM, - D: Dependency + Clone, + B: WriteLLVM, { /// The runtime code AST representation. inner: B, - /// The `D` phantom data. - _pd: PhantomData, } -impl RuntimeCode +impl RuntimeCode where - B: WriteLLVM, - D: Dependency + Clone, + B: WriteLLVM, { /// A shortcut constructor. pub fn new(inner: B) -> Self { - Self { - inner, - _pd: PhantomData, - } + Self { inner } } } -impl WriteLLVM for RuntimeCode +impl WriteLLVM for RuntimeCode where - B: WriteLLVM, - D: Dependency + Clone, + B: WriteLLVM, { - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { let function_type = context.function_type::(vec![], 0); context.add_function( runtime::FUNCTION_RUNTIME_CODE, @@ -54,7 +43,7 @@ where self.inner.declare(context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { context.set_current_function(runtime::FUNCTION_RUNTIME_CODE, None)?; context.set_basic_block(context.current_function().borrow().entry_block()); diff --git a/crates/llvm-context/src/polkavm/context/function/runtime/sbrk.rs b/crates/llvm-context/src/polkavm/context/function/runtime/sbrk.rs index 1ea12b8..da8a8d6 100644 --- a/crates/llvm-context/src/polkavm/context/function/runtime/sbrk.rs +++ b/crates/llvm-context/src/polkavm/context/function/runtime/sbrk.rs @@ -1,11 +1,11 @@ //! Emulates the linear EVM heap memory via a simulated `sbrk` system call. use inkwell::values::BasicValue; +use revive_common::BYTE_LENGTH_WORD; use crate::polkavm::context::attribute::Attribute; use crate::polkavm::context::runtime::RuntimeFunction; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::polkavm::WriteLLVM; /// Simulates the `sbrk` system call, reproducing the semantics of the EVM heap memory. @@ -24,10 +24,7 @@ use crate::polkavm::WriteLLVM; /// - Maintains the total memory size (`msize`) in global heap size value. pub struct Sbrk; -impl RuntimeFunction for Sbrk -where - D: Dependency + Clone, -{ +impl RuntimeFunction for Sbrk { const NAME: &'static str = "__sbrk_internal"; const ATTRIBUTES: &'static [Attribute] = &[ @@ -36,7 +33,7 @@ where Attribute::WillReturn, ]; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context.llvm().ptr_type(Default::default()).fn_type( &[context.xlen_type().into(), context.xlen_type().into()], false, @@ -45,7 +42,7 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let offset = Self::paramater(context, 0).into_int_value(); let size = Self::paramater(context, 1).into_int_value(); @@ -71,7 +68,7 @@ where context.set_basic_block(offset_in_bounds_block); let mask = context .xlen_type() - .const_int(revive_common::BYTE_LENGTH_WORD as u64 - 1, false); + .const_int(BYTE_LENGTH_WORD as u64 - 1, false); let total_size = context .builder() .build_int_add(offset, size, "total_size")?; @@ -130,15 +127,12 @@ where } } -impl WriteLLVM for Sbrk -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for Sbrk { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } diff --git a/crates/llvm-context/src/polkavm/context/global.rs b/crates/llvm-context/src/polkavm/context/global.rs index 27864d7..315cbf6 100644 --- a/crates/llvm-context/src/polkavm/context/global.rs +++ b/crates/llvm-context/src/polkavm/context/global.rs @@ -5,7 +5,6 @@ use inkwell::values::BasicValue; use crate::polkavm::context::address_space::AddressSpace; use crate::polkavm::context::Context; -use crate::PolkaVMDependency; /// The LLVM global value. #[derive(Debug, Clone, Copy)] @@ -18,15 +17,14 @@ pub struct Global<'ctx> { impl<'ctx> Global<'ctx> { /// A shortcut constructor. - pub fn new( - context: &mut Context<'ctx, D>, + pub fn new( + context: &mut Context<'ctx>, r#type: T, address_space: AddressSpace, initializer: V, name: &str, ) -> Self where - D: PolkaVMDependency + Clone, T: BasicType<'ctx>, V: BasicValue<'ctx>, { @@ -53,14 +51,13 @@ impl<'ctx> Global<'ctx> { } /// Construct an external global. - pub fn declare( - context: &mut Context<'ctx, D>, + pub fn declare( + context: &mut Context<'ctx>, r#type: T, address_space: AddressSpace, name: &str, ) -> Self where - D: PolkaVMDependency + Clone, T: BasicType<'ctx>, { let r#type = r#type.as_basic_type_enum(); diff --git a/crates/llvm-context/src/polkavm/context/mod.rs b/crates/llvm-context/src/polkavm/context/mod.rs index 5a48ddc..99c7d4b 100644 --- a/crates/llvm-context/src/polkavm/context/mod.rs +++ b/crates/llvm-context/src/polkavm/context/mod.rs @@ -1,22 +1,5 @@ //! The LLVM IR generator context. -pub mod address_space; -pub mod argument; -pub mod attribute; -pub mod build; -pub mod code_type; -pub mod debug_info; -pub mod function; -pub mod global; -pub mod r#loop; -pub mod pointer; -pub mod runtime; -pub mod solidity_data; -pub mod yul_data; - -#[cfg(test)] -mod tests; - use std::cell::RefCell; use std::collections::HashMap; use std::rc::Rc; @@ -32,7 +15,6 @@ use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory; use crate::optimizer::settings::Settings as OptimizerSettings; use crate::optimizer::Optimizer; use crate::polkavm::DebugConfig; -use crate::polkavm::Dependency; use crate::target_machine::target::Target; use crate::target_machine::TargetMachine; use crate::PolkaVMLoadHeapWordFunction; @@ -58,13 +40,27 @@ use self::runtime::RuntimeFunction; use self::solidity_data::SolidityData; use self::yul_data::YulData; +pub mod address_space; +pub mod argument; +pub mod attribute; +pub mod build; +pub mod code_type; +pub mod debug_info; +pub mod function; +pub mod global; +pub mod r#loop; +pub mod pointer; +pub mod runtime; +pub mod solidity_data; +pub mod yul_data; + +#[cfg(test)] +mod tests; + /// The LLVM IR generator context. /// It is a not-so-big god-like object glueing all the compilers' complexity and act as an adapter /// and a superstructure over the inner `inkwell` LLVM context. -pub struct Context<'ctx, D> -where - D: Dependency + Clone, -{ +pub struct Context<'ctx> { /// The inner LLVM context. llvm: &'ctx inkwell::context::Context, /// The inner LLVM context builder. @@ -87,17 +83,9 @@ where current_function: Option>>>, /// The loop context stack. loop_stack: Vec>, - /// The extra LLVM arguments that were used during target initialization. - llvm_arguments: &'ctx [String], /// The PVM memory configuration. memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, - /// The project dependency manager. It can be any entity implementing the trait. - /// The manager is used to get information about contracts and their dependencies during - /// the multi-threaded compilation process. - dependency_manager: Option, - /// Whether to append the metadata hash at the end of bytecode. - include_metadata_hash: bool, /// The debug info of the current module. debug_info: Option>, /// The debug configuration telling whether to dump the needed IRs. @@ -109,10 +97,7 @@ where yul_data: Option, } -impl<'ctx, D> Context<'ctx, D> -where - D: Dependency + Clone, -{ +impl<'ctx> Context<'ctx> { /// The functions hashmap default capacity. const FUNCTIONS_HASHMAP_INITIAL_CAPACITY: usize = 64; @@ -221,15 +206,11 @@ where } /// Initializes a new LLVM context. - #[allow(clippy::too_many_arguments)] pub fn new( llvm: &'ctx inkwell::context::Context, module: inkwell::module::Module<'ctx>, optimizer: Optimizer, - dependency_manager: Option, - include_metadata_hash: bool, debug_config: DebugConfig, - llvm_arguments: &'ctx [String], memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, ) -> Self { Self::set_data_layout(llvm, &module); @@ -264,12 +245,8 @@ where functions: HashMap::with_capacity(Self::FUNCTIONS_HASHMAP_INITIAL_CAPACITY), current_function: None, loop_stack: Vec::with_capacity(Self::LOOP_STACK_INITIAL_CAPACITY), - llvm_arguments, memory_config, - dependency_manager, - include_metadata_hash, - debug_info, debug_config, @@ -280,12 +257,10 @@ where /// Builds the LLVM IR module, returning the build artifacts. pub fn build( - mut self, + self, contract_path: &str, - metadata_hash: Option<[u8; revive_common::BYTE_LENGTH_WORD]>, + metadata_hash: Option, ) -> anyhow::Result { - let module_clone = self.module.clone(); - self.link_polkavm_exports(contract_path)?; self.link_immutable_data(contract_path)?; @@ -334,33 +309,16 @@ where ) })?; - let shared_object = revive_linker::link(buffer.as_slice())?; + let object = buffer.as_slice().to_vec(); - self.debug_config - .dump_object(contract_path, &shared_object)?; + self.debug_config.dump_object(contract_path, &object)?; - let polkavm_bytecode = - revive_linker::polkavm_linker(shared_object, !self.debug_config().emit_debug_info)?; - - let build = match crate::polkavm::build_assembly_text( - contract_path, - &polkavm_bytecode, - metadata_hash, - self.debug_config(), - ) { - Ok(build) => build, - Err(_error) - if self.optimizer.settings() != &OptimizerSettings::size() - && self.optimizer.settings().is_fallback_to_size_enabled() => - { - self.optimizer = Optimizer::new(OptimizerSettings::size()); - self.module = module_clone; - self.build(contract_path, metadata_hash)? - } - Err(error) => Err(error)?, - }; - - Ok(build) + crate::polkavm::build( + &object, + metadata_hash + .as_ref() + .map(|hash| hash.as_bytes().try_into().unwrap()), + ) } /// Verifies the current LLVM IR module. @@ -437,11 +395,15 @@ where } } - /// Declare an external global. + /// Declare an external global. This is an idempotent method. pub fn declare_global(&mut self, name: &str, r#type: T, address_space: AddressSpace) where T: BasicType<'ctx> + Clone + Copy, { + if self.globals.contains_key(name) { + return; + } + let global = Global::declare(self, r#type, address_space, name); self.globals.insert(name.to_owned(), global); } @@ -650,54 +612,6 @@ where .expect("The current context is not in a loop") } - /// Compiles a contract dependency, if the dependency manager is set. - pub fn compile_dependency(&mut self, name: &str) -> anyhow::Result { - self.dependency_manager - .to_owned() - .ok_or_else(|| anyhow::anyhow!("The dependency manager is unset")) - .and_then(|manager| { - Dependency::compile( - manager, - name, - self.optimizer.settings().to_owned(), - self.include_metadata_hash, - self.debug_config.clone(), - self.llvm_arguments, - self.memory_config, - ) - }) - } - - /// Gets a full contract_path from the dependency manager. - pub fn resolve_path(&self, identifier: &str) -> anyhow::Result { - self.dependency_manager - .to_owned() - .ok_or_else(|| anyhow::anyhow!("The dependency manager is unset")) - .and_then(|manager| { - let full_path = manager.resolve_path(identifier)?; - Ok(full_path) - }) - } - - /// Gets a deployed library address from the dependency manager. - pub fn resolve_library(&self, path: &str) -> anyhow::Result> { - self.dependency_manager - .to_owned() - .ok_or_else(|| anyhow::anyhow!("The dependency manager is unset")) - .and_then(|manager| { - let address = manager.resolve_library(path)?; - let address = self.word_const_str_hex(address.as_str()); - Ok(address) - }) - } - - /// Extracts the dependency manager. - pub fn take_dependency_manager(&mut self) -> D { - self.dependency_manager - .take() - .expect("The dependency manager is unset") - } - /// Returns the debug info. pub fn debug_info(&self) -> Option<&DebugInfo<'ctx>> { self.debug_info.as_ref() @@ -808,9 +722,9 @@ where ) -> anyhow::Result> { match pointer.address_space { AddressSpace::Heap => { - let name = >::NAME; + let name = ::NAME; let declaration = - >::declaration(self); + ::declaration(self); let arguments = [self .builder() .build_ptr_to_int(pointer.value, self.xlen_type(), "offset_ptrtoint")? @@ -846,7 +760,7 @@ where match pointer.address_space { AddressSpace::Heap => { let declaration = - >::declaration(self); + ::declaration(self); let arguments = [ pointer.to_int(self).as_basic_value_enum(), value.as_basic_value_enum(), @@ -966,10 +880,7 @@ where pub fn build_runtime_call_to_getter( &self, import: &'static str, - ) -> anyhow::Result> - where - D: Dependency + Clone, - { + ) -> anyhow::Result> { let pointer = self.build_alloca_at_entry(self.word_type(), &format!("{import}_output")); self.build_runtime_call(import, &[pointer.to_int(self).into()]); self.build_load(pointer, import) @@ -1064,7 +975,7 @@ where length: inkwell::values::IntValue<'ctx>, ) -> anyhow::Result<()> { self.build_call( - >::declaration(self), + ::declaration(self), &[flags.into(), offset.into(), length.into()], "exit", ); @@ -1088,14 +999,14 @@ where Ok(self .build_call( - >::declaration(self), + ::declaration(self), &[value.into()], "word_to_pointer", ) .unwrap_or_else(|| { panic!( "revive runtime function {} should return a value", - >::NAME, + ::NAME, ) }) .into_int_value()) @@ -1111,7 +1022,7 @@ where size: inkwell::values::IntValue<'ctx>, ) -> anyhow::Result> { let call_site_value = self.builder().build_call( - >::declaration(self).function_value(), + ::declaration(self).function_value(), &[offset.into(), size.into()], "alloc_start", )?; @@ -1133,7 +1044,7 @@ where .unwrap_or_else(|| { panic!( "revive runtime function {} should return a value", - >::NAME, + ::NAME, ) }) .into_pointer_value()) @@ -1433,19 +1344,8 @@ where /// Returns the Yul data reference. /// # Panics /// If the Yul data has not been initialized. - pub fn yul(&self) -> &YulData { - self.yul_data - .as_ref() - .expect("The Yul data must have been initialized") - } - - /// Returns the Yul data mutable reference. - /// # Panics - /// If the Yul data has not been initialized. - pub fn yul_mut(&mut self) -> &mut YulData { - self.yul_data - .as_mut() - .expect("The Yul data must have been initialized") + pub fn yul(&self) -> Option<&YulData> { + self.yul_data.as_ref() } /// Returns the current number of immutables values in the contract. diff --git a/crates/llvm-context/src/polkavm/context/pointer/heap.rs b/crates/llvm-context/src/polkavm/context/pointer/heap.rs index a143e89..8054951 100644 --- a/crates/llvm-context/src/polkavm/context/pointer/heap.rs +++ b/crates/llvm-context/src/polkavm/context/pointer/heap.rs @@ -2,21 +2,20 @@ use inkwell::values::BasicValueEnum; +use revive_common::BYTE_LENGTH_BYTE; +use revive_common::BYTE_LENGTH_WORD; + use crate::polkavm::context::runtime::RuntimeFunction; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::polkavm::WriteLLVM; /// Load a word size value from a heap pointer. pub struct LoadWord; -impl RuntimeFunction for LoadWord -where - D: Dependency + Clone, -{ +impl RuntimeFunction for LoadWord { const NAME: &'static str = "__revive_load_heap_word"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context .word_type() .fn_type(&[context.xlen_type().into()], false) @@ -24,12 +23,12 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let offset = Self::paramater(context, 0).into_int_value(); let length = context .xlen_type() - .const_int(revive_common::BYTE_LENGTH_WORD as u64, false); + .const_int(BYTE_LENGTH_WORD as u64, false); let pointer = context.build_heap_gep(offset, length)?; let value = context .builder() @@ -38,7 +37,7 @@ where .basic_block() .get_last_instruction() .expect("Always exists") - .set_alignment(revive_common::BYTE_LENGTH_BYTE as u32) + .set_alignment(BYTE_LENGTH_BYTE as u32) .expect("Alignment is valid"); let swapped_value = context.build_byte_swap(value)?; @@ -46,29 +45,23 @@ where } } -impl WriteLLVM for LoadWord -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for LoadWord { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } /// Store a word size value through a heap pointer. pub struct StoreWord; -impl RuntimeFunction for StoreWord -where - D: Dependency + Clone, -{ +impl RuntimeFunction for StoreWord { const NAME: &'static str = "__revive_store_heap_word"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context.void_type().fn_type( &[context.xlen_type().into(), context.word_type().into()], false, @@ -77,12 +70,12 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let offset = Self::paramater(context, 0).into_int_value(); let length = context .xlen_type() - .const_int(revive_common::BYTE_LENGTH_WORD as u64, false); + .const_int(BYTE_LENGTH_WORD as u64, false); let pointer = context.build_heap_gep(offset, length)?; let value = context.build_byte_swap(Self::paramater(context, 1))?; @@ -90,21 +83,18 @@ where context .builder() .build_store(pointer.value, value)? - .set_alignment(revive_common::BYTE_LENGTH_BYTE as u32) + .set_alignment(BYTE_LENGTH_BYTE as u32) .expect("Alignment is valid"); Ok(None) } } -impl WriteLLVM for StoreWord -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for StoreWord { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } diff --git a/crates/llvm-context/src/polkavm/context/pointer/mod.rs b/crates/llvm-context/src/polkavm/context/pointer/mod.rs index afd1e5e..988ccf4 100644 --- a/crates/llvm-context/src/polkavm/context/pointer/mod.rs +++ b/crates/llvm-context/src/polkavm/context/pointer/mod.rs @@ -5,7 +5,6 @@ use inkwell::types::BasicType; use crate::polkavm::context::address_space::AddressSpace; use crate::polkavm::context::global::Global; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; pub mod heap; pub mod storage; @@ -39,13 +38,10 @@ impl<'ctx> Pointer<'ctx> { } /// Wraps a 256-bit primitive type pointer. - pub fn new_stack_field( - context: &Context<'ctx, D>, + pub fn new_stack_field( + context: &Context<'ctx>, value: inkwell::values::PointerValue<'ctx>, - ) -> Self - where - D: Dependency + Clone, - { + ) -> Self { Self { r#type: context.word_type().as_basic_type_enum(), address_space: AddressSpace::Stack, @@ -54,15 +50,14 @@ impl<'ctx> Pointer<'ctx> { } /// Creates a new pointer with the specified `offset`. - pub fn new_with_offset( - context: &Context<'ctx, D>, + pub fn new_with_offset( + context: &Context<'ctx>, address_space: AddressSpace, r#type: T, offset: inkwell::values::IntValue<'ctx>, name: &str, ) -> Self where - D: Dependency + Clone, T: BasicType<'ctx>, { assert_ne!( @@ -92,25 +87,19 @@ impl<'ctx> Pointer<'ctx> { } /// Cast this pointer to a register sized integer value. - pub fn to_int(&self, context: &Context<'ctx, D>) -> inkwell::values::IntValue<'ctx> - where - D: Dependency + Clone, - { + pub fn to_int(&self, context: &Context<'ctx>) -> inkwell::values::IntValue<'ctx> { context .builder() .build_ptr_to_int(self.value, context.xlen_type(), "ptr_to_xlen") .expect("we should be positioned") } - pub fn address_space_cast( + pub fn address_space_cast( self, - context: &Context<'ctx, D>, + context: &Context<'ctx>, address_space: AddressSpace, name: &str, - ) -> anyhow::Result - where - D: Dependency + Clone, - { + ) -> anyhow::Result { let value = context.builder().build_address_space_cast( self.value, context.llvm().ptr_type(address_space.into()), diff --git a/crates/llvm-context/src/polkavm/context/pointer/storage.rs b/crates/llvm-context/src/polkavm/context/pointer/storage.rs index 01ea8a5..36a45ed 100644 --- a/crates/llvm-context/src/polkavm/context/pointer/storage.rs +++ b/crates/llvm-context/src/polkavm/context/pointer/storage.rs @@ -4,19 +4,15 @@ use inkwell::values::BasicValueEnum; use crate::polkavm::context::runtime::RuntimeFunction; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::polkavm::WriteLLVM; /// Load a word size value from a storage pointer. pub struct LoadWord; -impl RuntimeFunction for LoadWord -where - D: Dependency + Clone, -{ +impl RuntimeFunction for LoadWord { const NAME: &'static str = "__revive_load_storage_word"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context .word_type() .fn_type(&[context.llvm().ptr_type(Default::default()).into()], false) @@ -24,7 +20,7 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { Ok(Some(emit_load( context, @@ -34,29 +30,23 @@ where } } -impl WriteLLVM for LoadWord -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for LoadWord { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } /// Load a word size value from a transient storage pointer. pub struct LoadTransientWord; -impl RuntimeFunction for LoadTransientWord -where - D: Dependency + Clone, -{ +impl RuntimeFunction for LoadTransientWord { const NAME: &'static str = "__revive_load_transient_storage_word"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context .word_type() .fn_type(&[context.llvm().ptr_type(Default::default()).into()], false) @@ -64,35 +54,29 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { Ok(Some(emit_load(context, Self::paramater(context, 0), true)?)) } } -impl WriteLLVM for LoadTransientWord -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for LoadTransientWord { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } /// Store a word size value through a storage pointer. pub struct StoreWord; -impl RuntimeFunction for StoreWord -where - D: Dependency + Clone, -{ +impl RuntimeFunction for StoreWord { const NAME: &'static str = "__revive_store_storage_word"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context.void_type().fn_type( &[ context.llvm().ptr_type(Default::default()).into(), @@ -104,7 +88,7 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { emit_store( context, @@ -117,29 +101,23 @@ where } } -impl WriteLLVM for StoreWord -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for StoreWord { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } /// Store a word size value through a transient storage pointer. pub struct StoreTransientWord; -impl RuntimeFunction for StoreTransientWord -where - D: Dependency + Clone, -{ +impl RuntimeFunction for StoreTransientWord { const NAME: &'static str = "__revive_store_transient_storage_word"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context.void_type().fn_type( &[ context.llvm().ptr_type(Default::default()).into(), @@ -151,7 +129,7 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { emit_store( context, @@ -164,21 +142,18 @@ where } } -impl WriteLLVM for StoreTransientWord -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for StoreTransientWord { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } -fn emit_load<'ctx, D: Dependency + Clone>( - context: &mut Context<'ctx, D>, +fn emit_load<'ctx>( + context: &mut Context<'ctx>, key: BasicValueEnum<'ctx>, transient: bool, ) -> anyhow::Result> { @@ -229,8 +204,8 @@ fn emit_load<'ctx, D: Dependency + Clone>( }) } -fn emit_store<'ctx, D: Dependency + Clone>( - context: &mut Context<'ctx, D>, +fn emit_store<'ctx>( + context: &mut Context<'ctx>, key: BasicValueEnum<'ctx>, value: BasicValueEnum<'ctx>, transient: bool, diff --git a/crates/llvm-context/src/polkavm/context/runtime.rs b/crates/llvm-context/src/polkavm/context/runtime.rs index 315a92a..d8bf6c8 100644 --- a/crates/llvm-context/src/polkavm/context/runtime.rs +++ b/crates/llvm-context/src/polkavm/context/runtime.rs @@ -8,14 +8,10 @@ use crate::polkavm::context::function::declaration::Declaration; use crate::polkavm::context::function::Function; use crate::polkavm::context::Attribute; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// The revive runtime function interface simplifies declaring runtime functions /// and code emitting by providing helpful default implementations. -pub trait RuntimeFunction -where - D: Dependency + Clone, -{ +pub trait RuntimeFunction { /// The function name. const NAME: &'static str; @@ -26,10 +22,10 @@ where ]; /// The function type. - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx>; + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx>; /// Declare the function. - fn declare(&self, context: &mut Context) -> anyhow::Result<()> { + fn declare(&self, context: &mut Context) -> anyhow::Result<()> { let function = context.add_function( Self::NAME, Self::r#type(context), @@ -54,7 +50,7 @@ where } /// Get the function declaration. - fn declaration<'ctx>(context: &Context<'ctx, D>) -> Declaration<'ctx> { + fn declaration<'ctx>(context: &Context<'ctx>) -> Declaration<'ctx> { context .get_function(Self::NAME) .unwrap_or_else(|| panic!("runtime function {} should be declared", Self::NAME)) @@ -63,7 +59,7 @@ where } /// Emit the function. - fn emit(&self, context: &mut Context) -> anyhow::Result<()> { + fn emit(&self, context: &mut Context) -> anyhow::Result<()> { context.set_current_function(Self::NAME, None)?; context.set_basic_block(context.current_function().borrow().entry_block()); @@ -78,13 +74,13 @@ where /// Emit the function body. fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>>; /// Emit the function return instructions. fn emit_epilogue<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, return_value: Option>, ) { let return_block = context.current_function().borrow().return_block(); @@ -98,7 +94,7 @@ where /// Get the nth function paramater. fn paramater<'ctx>( - context: &Context<'ctx, D>, + context: &Context<'ctx>, index: usize, ) -> inkwell::values::BasicValueEnum<'ctx> { let name = Self::NAME; diff --git a/crates/llvm-context/src/polkavm/context/solidity_data.rs b/crates/llvm-context/src/polkavm/context/solidity_data.rs index 467c262..7c6ef50 100644 --- a/crates/llvm-context/src/polkavm/context/solidity_data.rs +++ b/crates/llvm-context/src/polkavm/context/solidity_data.rs @@ -2,6 +2,8 @@ use std::collections::BTreeMap; +use revive_common::BYTE_LENGTH_WORD; + /// The LLVM IR generator Solidity data. /// Describes some data that is only relevant to Solidity. #[derive(Debug, Default)] @@ -19,14 +21,14 @@ impl SolidityData { /// Returns the current size of immutable values in the contract. pub fn immutables_size(&self) -> usize { - self.immutables.len() * revive_common::BYTE_LENGTH_WORD + self.immutables.len() * BYTE_LENGTH_WORD } /// Allocates memory for an immutable value in the auxiliary heap. /// If the identifier is already known, just returns its offset. pub fn allocate_immutable(&mut self, identifier: &str) -> usize { let number_of_elements = self.immutables.len(); - let new_offset = number_of_elements * revive_common::BYTE_LENGTH_WORD; + let new_offset = number_of_elements * BYTE_LENGTH_WORD; *self .immutables .entry(identifier.to_owned()) diff --git a/crates/llvm-context/src/polkavm/context/tests.rs b/crates/llvm-context/src/polkavm/context/tests.rs index 817d7ea..1ad4c39 100644 --- a/crates/llvm-context/src/polkavm/context/tests.rs +++ b/crates/llvm-context/src/polkavm/context/tests.rs @@ -4,24 +4,21 @@ use crate::optimizer::settings::Settings as OptimizerSettings; use crate::optimizer::Optimizer; use crate::polkavm::context::attribute::Attribute; use crate::polkavm::context::Context; -use crate::polkavm::DummyDependency; +use crate::PolkaVMTarget; pub fn create_context( llvm: &inkwell::context::Context, optimizer_settings: OptimizerSettings, -) -> Context<'_, DummyDependency> { - crate::initialize_llvm(crate::Target::PVM, "resolc", Default::default()); +) -> Context<'_> { + crate::initialize_llvm(PolkaVMTarget::PVM, "resolc", Default::default()); let module = llvm.create_module("test"); let optimizer = Optimizer::new(optimizer_settings); - Context::::new( + Context::new( llvm, module, optimizer, - None, - true, - Default::default(), Default::default(), Default::default(), ) diff --git a/crates/llvm-context/src/polkavm/context/yul_data.rs b/crates/llvm-context/src/polkavm/context/yul_data.rs index 74da649..dd2090d 100644 --- a/crates/llvm-context/src/polkavm/context/yul_data.rs +++ b/crates/llvm-context/src/polkavm/context/yul_data.rs @@ -2,60 +2,25 @@ use std::collections::BTreeMap; -use num::Zero; - /// The LLVM IR generator Yul data. -/// Describes some data that is only relevant to Yul. +/// +/// Contains data that is only relevant to Yul. #[derive(Debug, Default)] pub struct YulData { - /// The list of constant arrays in the code section. - /// It is a temporary storage used until the finalization method is called. - const_arrays: BTreeMap>, + /// Mapping from Yul object identifiers to full contract paths. + identifier_paths: BTreeMap, } impl YulData { - /// Declares a temporary constant array representation. - pub fn const_array_declare(&mut self, index: u8, size: u16) -> anyhow::Result<()> { - if self.const_arrays.contains_key(&index) { - anyhow::bail!( - "The constant array with index {} is already declared", - index - ); - } - - self.const_arrays - .insert(index, vec![num::BigUint::zero(); size as usize]); - - Ok(()) + /// A shorthand constructor. + pub fn new(identifier_paths: BTreeMap) -> Self { + Self { identifier_paths } } - /// Sets a value in the constant array representation. - pub fn const_array_set( - &mut self, - index: u8, - offset: u16, - value: num::BigUint, - ) -> anyhow::Result<()> { - let array = self.const_arrays.get_mut(&index).ok_or_else(|| { - anyhow::anyhow!("The constant array with index {} is not declared", index) - })?; - if offset >= array.len() as u16 { - anyhow::bail!( - "The constant array with index {} has size {} but the offset is {}", - index, - array.len(), - offset, - ); - } - array[offset as usize] = value; - - Ok(()) - } - - /// Finalizes the constant array declaration. - pub fn const_array_take(&mut self, index: u8) -> anyhow::Result> { - self.const_arrays.remove(&index).ok_or_else(|| { - anyhow::anyhow!("The constant array with index {} is not declared", index) - }) + /// Resolves the full contract path by the Yul object identifier. + pub fn resolve_path(&self, identifier: &str) -> Option<&str> { + self.identifier_paths + .get(identifier) + .map(|path| path.as_str()) } } diff --git a/crates/llvm-context/src/polkavm/evm/arithmetic.rs b/crates/llvm-context/src/polkavm/evm/arithmetic.rs index 143cf1c..b4e71fc 100644 --- a/crates/llvm-context/src/polkavm/evm/arithmetic.rs +++ b/crates/llvm-context/src/polkavm/evm/arithmetic.rs @@ -4,21 +4,17 @@ use inkwell::values::BasicValue; use crate::polkavm::context::runtime::RuntimeFunction; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::PolkaVMDivisionFunction; use crate::PolkaVMRemainderFunction; use crate::PolkaVMSignedDivisionFunction; use crate::PolkaVMSignedRemainderFunction; /// Translates the arithmetic addition. -pub fn addition<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn addition<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { Ok(context .builder() .build_int_add(operand_1, operand_2, "addition_result")? @@ -26,14 +22,11 @@ where } /// Translates the arithmetic subtraction. -pub fn subtraction<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn subtraction<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { Ok(context .builder() .build_int_sub(operand_1, operand_2, "subtraction_result")? @@ -41,14 +34,11 @@ where } /// Translates the arithmetic multiplication. -pub fn multiplication<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn multiplication<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { Ok(context .builder() .build_int_mul(operand_1, operand_2, "multiplication_result")? @@ -56,32 +46,26 @@ where } /// Translates the arithmetic division. -pub fn division<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn division<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ - let name = >::NAME; - let declaration = >::declaration(context); +) -> anyhow::Result> { + let name = ::NAME; + let declaration = ::declaration(context); Ok(context .build_call(declaration, &[operand_1.into(), operand_2.into()], "div") .unwrap_or_else(|| panic!("revive runtime function {name} should return a value",))) } /// Translates the arithmetic remainder. -pub fn remainder<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn remainder<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ - let name = >::NAME; - let declaration = >::declaration(context); +) -> anyhow::Result> { + let name = ::NAME; + let declaration = ::declaration(context); Ok(context .build_call(declaration, &[operand_1.into(), operand_2.into()], "rem") .unwrap_or_else(|| panic!("revive runtime function {name} should return a value",))) @@ -91,32 +75,26 @@ where /// Two differences between the EVM and LLVM IR: /// 1. In case of division by zero, 0 is returned. /// 2. In case of overflow, the first argument is returned. -pub fn division_signed<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn division_signed<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ - let name = >::NAME; - let declaration = >::declaration(context); +) -> anyhow::Result> { + let name = ::NAME; + let declaration = ::declaration(context); Ok(context .build_call(declaration, &[operand_1.into(), operand_2.into()], "sdiv") .unwrap_or_else(|| panic!("revive runtime function {name} should return a value",))) } /// Translates the signed arithmetic remainder. -pub fn remainder_signed<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn remainder_signed<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ - let name = >::NAME; - let declaration = >::declaration(context); +) -> anyhow::Result> { + let name = ::NAME; + let declaration = ::declaration(context); Ok(context .build_call(declaration, &[operand_1.into(), operand_2.into()], "srem") .unwrap_or_else(|| panic!("revive runtime function {name} should return a value",))) diff --git a/crates/llvm-context/src/polkavm/evm/bitwise.rs b/crates/llvm-context/src/polkavm/evm/bitwise.rs index b4a0217..d1db287 100644 --- a/crates/llvm-context/src/polkavm/evm/bitwise.rs +++ b/crates/llvm-context/src/polkavm/evm/bitwise.rs @@ -2,18 +2,17 @@ use inkwell::values::BasicValue; +use revive_common::BIT_LENGTH_BYTE; +use revive_common::BIT_LENGTH_WORD; + use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// Translates the bitwise OR. -pub fn or<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn or<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { Ok(context .builder() .build_or(operand_1, operand_2, "or_result")? @@ -21,14 +20,11 @@ where } /// Translates the bitwise XOR. -pub fn xor<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn xor<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { Ok(context .builder() .build_xor(operand_1, operand_2, "xor_result")? @@ -36,14 +32,11 @@ where } /// Translates the bitwise AND. -pub fn and<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn and<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { Ok(context .builder() .build_and(operand_1, operand_2, "and_result")? @@ -51,14 +44,11 @@ where } /// Translates the bitwise shift left. -pub fn shift_left<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn shift_left<'ctx>( + context: &mut Context<'ctx>, shift: inkwell::values::IntValue<'ctx>, value: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let overflow_block = context.append_basic_block("shift_left_overflow"); let non_overflow_block = context.append_basic_block("shift_left_non_overflow"); let join_block = context.append_basic_block("shift_left_join"); @@ -66,7 +56,7 @@ where let condition_is_overflow = context.builder().build_int_compare( inkwell::IntPredicate::UGT, shift, - context.word_const((revive_common::BIT_LENGTH_WORD - 1) as u64), + context.word_const((BIT_LENGTH_WORD - 1) as u64), "shift_left_is_overflow", )?; context.build_conditional_branch(condition_is_overflow, overflow_block, non_overflow_block)?; @@ -93,14 +83,11 @@ where } /// Translates the bitwise shift right. -pub fn shift_right<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn shift_right<'ctx>( + context: &mut Context<'ctx>, shift: inkwell::values::IntValue<'ctx>, value: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let overflow_block = context.append_basic_block("shift_right_overflow"); let non_overflow_block = context.append_basic_block("shift_right_non_overflow"); let join_block = context.append_basic_block("shift_right_join"); @@ -108,7 +95,7 @@ where let condition_is_overflow = context.builder().build_int_compare( inkwell::IntPredicate::UGT, shift, - context.word_const((revive_common::BIT_LENGTH_WORD - 1) as u64), + context.word_const((BIT_LENGTH_WORD - 1) as u64), "shift_right_is_overflow", )?; context.build_conditional_branch(condition_is_overflow, overflow_block, non_overflow_block)?; @@ -137,14 +124,11 @@ where } /// Translates the arithmetic bitwise shift right. -pub fn shift_right_arithmetic<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn shift_right_arithmetic<'ctx>( + context: &mut Context<'ctx>, shift: inkwell::values::IntValue<'ctx>, value: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let overflow_block = context.append_basic_block("shift_right_arithmetic_overflow"); let overflow_positive_block = context.append_basic_block("shift_right_arithmetic_overflow_positive"); @@ -156,7 +140,7 @@ where let condition_is_overflow = context.builder().build_int_compare( inkwell::IntPredicate::UGT, shift, - context.word_const((revive_common::BIT_LENGTH_WORD - 1) as u64), + context.word_const((BIT_LENGTH_WORD - 1) as u64), "shift_right_arithmetic_is_overflow", )?; context.build_conditional_branch(condition_is_overflow, overflow_block, non_overflow_block)?; @@ -164,7 +148,7 @@ where context.set_basic_block(overflow_block); let sign_bit = context.builder().build_right_shift( value, - context.word_const((revive_common::BIT_LENGTH_WORD - 1) as u64), + context.word_const((BIT_LENGTH_WORD - 1) as u64), false, "shift_right_arithmetic_sign_bit", )?; @@ -217,14 +201,11 @@ where /// Because this opcode returns zero on overflows, the index `operand_1` /// is checked for overflow. On overflow, the mask will be all zeros, /// resulting in a branchless implementation. -pub fn byte<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn byte<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { const MAX_INDEX_BYTES: u64 = 31; let is_overflow_bit = context.builder().build_int_compare( @@ -254,16 +235,13 @@ where .build_int_truncate(operand_1, context.byte_type(), "index_truncated")?; let index_in_bits = context.builder().build_int_mul( index_truncated, - context - .byte_type() - .const_int(revive_common::BIT_LENGTH_BYTE as u64, false), + context.byte_type().const_int(BIT_LENGTH_BYTE as u64, false), "index_in_bits", )?; let index_from_most_significant_bit = context.builder().build_int_sub( - context.byte_type().const_int( - MAX_INDEX_BYTES * revive_common::BIT_LENGTH_BYTE as u64, - false, - ), + context + .byte_type() + .const_int(MAX_INDEX_BYTES * BIT_LENGTH_BYTE as u64, false), index_in_bits, "index_from_msb", )?; diff --git a/crates/llvm-context/src/polkavm/evm/call.rs b/crates/llvm-context/src/polkavm/evm/call.rs index e5d1c5f..bcdca6b 100644 --- a/crates/llvm-context/src/polkavm/evm/call.rs +++ b/crates/llvm-context/src/polkavm/evm/call.rs @@ -2,18 +2,15 @@ use inkwell::values::BasicValue; -use crate::polkavm::context::argument::Argument; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; const STATIC_CALL_FLAG: u32 = 0b0001_0000; const REENTRANT_CALL_FLAG: u32 = 0b0000_1000; const SOLIDITY_TRANSFER_GAS_STIPEND_THRESHOLD: u64 = 2300; /// Translates a contract call. -#[allow(clippy::too_many_arguments)] -pub fn call<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn call<'ctx>( + context: &mut Context<'ctx>, gas: inkwell::values::IntValue<'ctx>, address: inkwell::values::IntValue<'ctx>, value: Option>, @@ -23,10 +20,7 @@ pub fn call<'ctx, D>( output_length: inkwell::values::IntValue<'ctx>, _constants: Vec>, static_call: bool, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let address_pointer = context.build_address_argument_store(address)?; let value = value.unwrap_or_else(|| context.word_const(0)); @@ -115,9 +109,8 @@ where .as_basic_value_enum()) } -#[allow(clippy::too_many_arguments)] -pub fn delegate_call<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn delegate_call<'ctx>( + context: &mut Context<'ctx>, _gas: inkwell::values::IntValue<'ctx>, address: inkwell::values::IntValue<'ctx>, input_offset: inkwell::values::IntValue<'ctx>, @@ -125,10 +118,7 @@ pub fn delegate_call<'ctx, D>( output_offset: inkwell::values::IntValue<'ctx>, output_length: inkwell::values::IntValue<'ctx>, _constants: Vec>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let address_pointer = context.build_address_argument_store(address)?; let input_offset = context.safe_truncate_int_to_xlen(input_offset)?; @@ -199,21 +189,16 @@ where } /// Translates the Yul `linkersymbol` instruction. -pub fn linker_symbol<'ctx, D>( - context: &mut Context<'ctx, D>, - mut arguments: [Argument<'ctx>; 1], -) -> anyhow::Result> -where - D: Dependency + Clone, -{ - let path = arguments[0] - .original - .take() - .ok_or_else(|| anyhow::anyhow!("Linker symbol literal is missing"))?; - - Ok(context - .resolve_library(path.as_str())? - .as_basic_value_enum()) +pub fn linker_symbol<'ctx>( + context: &mut Context<'ctx>, + path: &str, +) -> anyhow::Result> { + context.declare_global( + path, + context.integer_type(revive_common::BIT_LENGTH_ETH_ADDRESS), + Default::default(), + ); + context.build_load_address(context.get_global(path)?.into()) } /// The Solidity `address.transfer` and `address.send` call detection heuristic. @@ -236,18 +221,15 @@ where /// /// # Returns /// The call flags xlen `IntValue` and the deposit limit word `IntValue`. -fn call_reentrancy_heuristic<'ctx, D>( - context: &mut Context<'ctx, D>, +fn call_reentrancy_heuristic<'ctx>( + context: &mut Context<'ctx>, gas: inkwell::values::IntValue<'ctx>, input_length: inkwell::values::IntValue<'ctx>, output_length: inkwell::values::IntValue<'ctx>, ) -> anyhow::Result<( inkwell::values::IntValue<'ctx>, inkwell::values::IntValue<'ctx>, -)> -where - D: Dependency + Clone, -{ +)> { // Branch-free SSA implementation: First derive the heuristic boolean (int1) value. let input_length_or_output_length = context diff --git a/crates/llvm-context/src/polkavm/evm/calldata.rs b/crates/llvm-context/src/polkavm/evm/calldata.rs index d926b29..1ac093e 100644 --- a/crates/llvm-context/src/polkavm/evm/calldata.rs +++ b/crates/llvm-context/src/polkavm/evm/calldata.rs @@ -1,16 +1,12 @@ //! Translates the calldata instructions. use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// Translates the calldata load. -pub fn load<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn load<'ctx>( + context: &mut Context<'ctx>, offset: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let output_pointer = context.build_alloca_at_entry(context.word_type(), "call_data_output"); let offset = context.safe_truncate_int_to_xlen(offset)?; @@ -23,12 +19,9 @@ where } /// Translates the calldata size. -pub fn size<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn size<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { let value = context.get_global_value(crate::polkavm::GLOBAL_CALLDATA_SIZE)?; Ok(context .builder() @@ -41,15 +34,12 @@ where } /// Translates the calldata copy. -pub fn copy<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn copy<'ctx>( + context: &mut Context<'ctx>, destination_offset: inkwell::values::IntValue<'ctx>, source_offset: inkwell::values::IntValue<'ctx>, size: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ +) -> anyhow::Result<()> { let source_offset = context.safe_truncate_int_to_xlen(source_offset)?; let size = context.safe_truncate_int_to_xlen(size)?; let destination_offset = context.safe_truncate_int_to_xlen(destination_offset)?; diff --git a/crates/llvm-context/src/polkavm/evm/comparison.rs b/crates/llvm-context/src/polkavm/evm/comparison.rs index 287ac7f..869add7 100644 --- a/crates/llvm-context/src/polkavm/evm/comparison.rs +++ b/crates/llvm-context/src/polkavm/evm/comparison.rs @@ -3,19 +3,15 @@ use inkwell::values::BasicValue; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// Translates the comparison operations. /// There is not difference between the EVM and LLVM IR behaviors. -pub fn compare<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn compare<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, operation: inkwell::IntPredicate, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let result = context.builder().build_int_compare( operation, operand_1, diff --git a/crates/llvm-context/src/polkavm/evm/context.rs b/crates/llvm-context/src/polkavm/evm/context.rs index 2e293d1..f1f057a 100644 --- a/crates/llvm-context/src/polkavm/evm/context.rs +++ b/crates/llvm-context/src/polkavm/evm/context.rs @@ -2,17 +2,15 @@ use inkwell::values::BasicValue; +use revive_common::BIT_LENGTH_ETH_ADDRESS; + use crate::polkavm::context::pointer::Pointer; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// Translates the `gas_limit` instruction. -pub fn gas_limit<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn gas_limit<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { let gas_limit_value = context .build_runtime_call(revive_runtime_api::polkavm_imports::GAS_LIMIT, &[]) .expect("the gas_limit syscall method should return a value") @@ -25,12 +23,9 @@ where } /// Translates the `gas_price` instruction. -pub fn gas_price<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn gas_price<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { let gas_price_value = context .build_runtime_call(revive_runtime_api::polkavm_imports::GAS_PRICE, &[]) .expect("the gas_price syscall method should return a value") @@ -43,13 +38,10 @@ where } /// Translates the `tx.origin` instruction. -pub fn origin<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ - let address_type = context.integer_type(revive_common::BIT_LENGTH_ETH_ADDRESS); +pub fn origin<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { + let address_type = context.integer_type(BIT_LENGTH_ETH_ADDRESS); let address_pointer: Pointer<'_> = context .get_global(crate::polkavm::GLOBAL_ADDRESS_SPILL_BUFFER)? .into(); @@ -62,43 +54,31 @@ where } /// Translates the `chain_id` instruction. -pub fn chain_id<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn chain_id<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { context.build_runtime_call_to_getter(revive_runtime_api::polkavm_imports::CHAIN_ID) } /// Translates the `block_number` instruction. -pub fn block_number<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn block_number<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { context.build_runtime_call_to_getter(revive_runtime_api::polkavm_imports::BLOCK_NUMBER) } /// Translates the `block_timestamp` instruction. -pub fn block_timestamp<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn block_timestamp<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { context.build_runtime_call_to_getter(revive_runtime_api::polkavm_imports::NOW) } /// Translates the `block_hash` instruction. -pub fn block_hash<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn block_hash<'ctx>( + context: &mut Context<'ctx>, index: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let output_pointer = context.build_alloca_at_entry(context.word_type(), "blockhash_out_ptr"); let index_pointer = context.build_alloca_at_entry(context.word_type(), "blockhash_index_ptr"); context.build_store(index_pointer, index)?; @@ -114,22 +94,16 @@ where } /// Translates the `difficulty` instruction. -pub fn difficulty<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn difficulty<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { Ok(context.word_const(2500000000000000).as_basic_value_enum()) } /// Translates the `coinbase` instruction. -pub fn coinbase<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn coinbase<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { let pointer: Pointer<'_> = context .get_global(crate::polkavm::GLOBAL_ADDRESS_SPILL_BUFFER)? .into(); @@ -141,22 +115,16 @@ where } /// Translates the `basefee` instruction. -pub fn basefee<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn basefee<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { context.build_runtime_call_to_getter(revive_runtime_api::polkavm_imports::BASE_FEE) } /// Translates the `address` instruction. -pub fn address<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn address<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { let pointer: Pointer<'_> = context .get_global(crate::polkavm::GLOBAL_ADDRESS_SPILL_BUFFER)? .into(); @@ -168,12 +136,9 @@ where } /// Translates the `caller` instruction. -pub fn caller<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn caller<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { let pointer: Pointer<'_> = context .get_global(crate::polkavm::GLOBAL_ADDRESS_SPILL_BUFFER)? .into(); diff --git a/crates/llvm-context/src/polkavm/evm/create.rs b/crates/llvm-context/src/polkavm/evm/create.rs index 7d859d6..25fee18 100644 --- a/crates/llvm-context/src/polkavm/evm/create.rs +++ b/crates/llvm-context/src/polkavm/evm/create.rs @@ -3,24 +3,22 @@ use inkwell::values::BasicValue; use num::Zero; +use revive_common::BIT_LENGTH_ETH_ADDRESS; + use crate::polkavm::context::argument::Argument; use crate::polkavm::context::code_type::CodeType; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// Translates the contract `create` and `create2` instruction. /// /// A `salt` value of `None` is equivalent to `create1`. -pub fn create<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn create<'ctx>( + context: &mut Context<'ctx>, value: inkwell::values::IntValue<'ctx>, input_offset: inkwell::values::IntValue<'ctx>, input_length: inkwell::values::IntValue<'ctx>, salt: Option>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let input_offset = context.safe_truncate_int_to_xlen(input_offset)?; let input_length = context.safe_truncate_int_to_xlen(input_length)?; @@ -40,7 +38,7 @@ where }; let address_pointer = context.build_alloca_at_entry( - context.integer_type(revive_common::BIT_LENGTH_ETH_ADDRESS), + context.integer_type(BIT_LENGTH_ETH_ADDRESS), "address_pointer", ); context.build_store(address_pointer, context.word_const(0))?; @@ -96,77 +94,83 @@ where /// Translates the contract hash instruction, which is actually used to set the hash of the contract /// being created, or other related auxiliary data. /// Represents `dataoffset` in Yul and `PUSH [$]` in the EVM legacy assembly. -pub fn contract_hash<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn contract_hash<'ctx>( + context: &mut Context<'ctx>, identifier: String, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let code_type = context .code_type() .ok_or_else(|| anyhow::anyhow!("The contract code part type is undefined"))?; let parent = context.module().get_name().to_str().expect("Always valid"); - let contract_path = - context - .resolve_path(identifier.as_str()) - .map_err(|error| match code_type { - CodeType::Runtime if identifier.ends_with("_deployed") => { - anyhow::anyhow!("type({}).runtimeCode is not supported", identifier) - } - _ => error, - })?; - if contract_path.as_str() == parent { - return Ok(Argument::value(context.word_const(0).as_basic_value_enum()) - .with_constant(num::BigUint::zero())); - } else if identifier.ends_with("_deployed") && code_type == CodeType::Runtime { - anyhow::bail!("type({}).runtimeCode is not supported", identifier); + let full_path = match context.yul() { + Some(yul_data) => yul_data + .resolve_path( + identifier + .strip_suffix("_deployed") + .unwrap_or(identifier.as_str()), + ) + .expect("Always exists") + .to_owned(), + None => identifier.clone(), + }; + + match code_type { + CodeType::Deploy if full_path == parent => { + return Ok(Argument::value(context.word_const(0).as_basic_value_enum()) + .with_constant(num::BigUint::zero())); + } + CodeType::Runtime if context.yul().is_some() && identifier.ends_with("_deployed") => { + anyhow::bail!("type({identifier}).runtimeCode is not supported"); + } + _ => {} } - let hash_string = context.compile_dependency(identifier.as_str())?; - let hash_value = context - .word_const_str_hex(hash_string.as_str()) - .as_basic_value_enum(); - Ok(Argument::value(hash_value).with_original(hash_string)) + context.declare_global(&full_path, context.word_type(), Default::default()); + context + .build_load(context.get_global(&full_path)?.into(), &full_path) + .map(Argument::value) } /// Translates the deploy call header size instruction. the header consists of /// the hash of the bytecode of the contract whose instance is being created. /// Represents `datasize` in Yul and `PUSH #[$]` in the EVM legacy assembly. -pub fn header_size<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn header_size<'ctx>( + context: &mut Context<'ctx>, identifier: String, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let code_type = context .code_type() .ok_or_else(|| anyhow::anyhow!("The contract code part type is undefined"))?; let parent = context.module().get_name().to_str().expect("Always valid"); - let contract_path = - context - .resolve_path(identifier.as_str()) - .map_err(|error| match code_type { - CodeType::Runtime if identifier.ends_with("_deployed") => { - anyhow::anyhow!("type({}).runtimeCode is not supported", identifier) - } - _ => error, - })?; - if contract_path.as_str() == parent { - return Ok(Argument::value(context.word_const(0).as_basic_value_enum()) - .with_constant(num::BigUint::zero())); - } else if identifier.ends_with("_deployed") && code_type == CodeType::Runtime { - anyhow::bail!("type({}).runtimeCode is not supported", identifier); + let full_path = match context.yul() { + Some(yul_data) => yul_data + .resolve_path( + identifier + .strip_suffix("_deployed") + .unwrap_or(identifier.as_str()), + ) + .unwrap_or_else(|| panic!("ICE: {identifier} not found {yul_data:?}")), + None => identifier.as_str(), + }; + + match code_type { + CodeType::Deploy if full_path == parent => { + return Ok(Argument::value(context.word_const(0).as_basic_value_enum()) + .with_constant(num::BigUint::zero())); + } + CodeType::Runtime if context.yul().is_some() && identifier.ends_with("_deployed") => { + anyhow::bail!("type({identifier}).runtimeCode is not supported"); + } + _ => {} } - let size_bigint = num::BigUint::from(crate::polkavm::DEPLOYER_CALL_HEADER_SIZE); let size_value = context .word_const(crate::polkavm::DEPLOYER_CALL_HEADER_SIZE as u64) .as_basic_value_enum(); + let size_bigint = num::BigUint::from(crate::polkavm::DEPLOYER_CALL_HEADER_SIZE); Ok(Argument::value(size_value).with_constant(size_bigint)) } diff --git a/crates/llvm-context/src/polkavm/evm/crypto.rs b/crates/llvm-context/src/polkavm/evm/crypto.rs index c3a11e7..3f28762 100644 --- a/crates/llvm-context/src/polkavm/evm/crypto.rs +++ b/crates/llvm-context/src/polkavm/evm/crypto.rs @@ -1,17 +1,13 @@ //! Translates the cryptographic operations. use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// Translates the `sha3` instruction. -pub fn sha3<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn sha3<'ctx>( + context: &mut Context<'ctx>, offset: inkwell::values::IntValue<'ctx>, length: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let offset_casted = context.safe_truncate_int_to_xlen(offset)?; let length_casted = context.safe_truncate_int_to_xlen(length)?; let input_pointer = context.build_heap_gep(offset_casted, length_casted)?; diff --git a/crates/llvm-context/src/polkavm/evm/ether_gas.rs b/crates/llvm-context/src/polkavm/evm/ether_gas.rs index bfa76e9..ab15264 100644 --- a/crates/llvm-context/src/polkavm/evm/ether_gas.rs +++ b/crates/llvm-context/src/polkavm/evm/ether_gas.rs @@ -1,15 +1,11 @@ //! Translates the value and balance operations. use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// Translates the `gas` instruction. -pub fn gas<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn gas<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { let ref_time_left_value = context .build_runtime_call(revive_runtime_api::polkavm_imports::REF_TIME_LEFT, &[]) .expect("the ref_time_left syscall method should return a value") @@ -22,12 +18,9 @@ where } /// Translates the `value` instruction. -pub fn value<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn value<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { let output_pointer = context.build_alloca_at_entry(context.value_type(), "value_transferred"); context.build_store(output_pointer, context.word_const(0))?; context.build_runtime_call( @@ -38,13 +31,10 @@ where } /// Translates the `balance` instructions. -pub fn balance<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn balance<'ctx>( + context: &mut Context<'ctx>, address: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let address_pointer = context.build_address_argument_store(address)?; let balance_pointer = context.build_alloca_at_entry(context.word_type(), "balance_pointer"); let balance = context.builder().build_ptr_to_int( @@ -62,12 +52,9 @@ where } /// Translates the `selfbalance` instructions. -pub fn self_balance<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn self_balance<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { let balance_pointer = context.build_alloca_at_entry(context.word_type(), "balance_pointer"); let balance = context.builder().build_ptr_to_int( balance_pointer.value, diff --git a/crates/llvm-context/src/polkavm/evm/event.rs b/crates/llvm-context/src/polkavm/evm/event.rs index ba7fc9e..52af693 100644 --- a/crates/llvm-context/src/polkavm/evm/event.rs +++ b/crates/llvm-context/src/polkavm/evm/event.rs @@ -1,19 +1,16 @@ //! Translates a log or event call. use inkwell::values::BasicValue; +use revive_common::BYTE_LENGTH_WORD; use crate::polkavm::context::runtime::RuntimeFunction; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::polkavm::WriteLLVM; /// A function for emitting EVM event logs from contract code. pub struct EventLog; -impl RuntimeFunction for EventLog -where - D: Dependency + Clone, -{ +impl RuntimeFunction for EventLog { const NAME: &'static str = match N { 0 => "__revive_log_0", 1 => "__revive_log_1", @@ -23,7 +20,7 @@ where _ => unreachable!(), }; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { let mut parameter_types = vec![context.xlen_type().into(), context.xlen_type().into()]; parameter_types.extend_from_slice(&[context.word_type().into(); N]); context.void_type().fn_type(¶meter_types, false) @@ -31,7 +28,7 @@ where fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let input_offset = Self::paramater(context, 0).into_int_value(); let input_length = Self::paramater(context, 1).into_int_value(); @@ -49,7 +46,7 @@ where input_length.as_basic_value_enum(), ] } else { - let topics_buffer_size = N * revive_common::BYTE_LENGTH_WORD; + let topics_buffer_size = N * BYTE_LENGTH_WORD; let topics_buffer_pointer = context.build_alloca_at_entry( context.byte_type().array_type(topics_buffer_size as u32), "topics_buffer", @@ -59,7 +56,7 @@ where let topic = Self::paramater(context, n + 2); let topic_buffer_offset = context .xlen_type() - .const_int((n * revive_common::BYTE_LENGTH_WORD) as u64, false); + .const_int((n * BYTE_LENGTH_WORD) as u64, false); context.build_store( context.build_gep( topics_buffer_pointer, @@ -98,82 +95,64 @@ where } } -impl WriteLLVM for EventLog<0> -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for EventLog<0> { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } -impl WriteLLVM for EventLog<1> -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for EventLog<1> { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } -impl WriteLLVM for EventLog<2> -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for EventLog<2> { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } -impl WriteLLVM for EventLog<3> -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for EventLog<3> { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } -impl WriteLLVM for EventLog<4> -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for EventLog<4> { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } /// Translates a log or event call. -pub fn log<'ctx, D, const N: usize>( - context: &mut Context<'ctx, D>, +pub fn log<'ctx, const N: usize>( + context: &mut Context<'ctx>, input_offset: inkwell::values::IntValue<'ctx>, input_length: inkwell::values::IntValue<'ctx>, topics: [inkwell::values::BasicValueEnum<'ctx>; N], -) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ - let declaration = as RuntimeFunction>::declaration(context); +) -> anyhow::Result<()> { + let declaration = as RuntimeFunction>::declaration(context); let mut arguments = vec![ context.safe_truncate_int_to_xlen(input_offset)?.into(), context.safe_truncate_int_to_xlen(input_length)?.into(), diff --git a/crates/llvm-context/src/polkavm/evm/ext_code.rs b/crates/llvm-context/src/polkavm/evm/ext_code.rs index 8af5915..126e2f5 100644 --- a/crates/llvm-context/src/polkavm/evm/ext_code.rs +++ b/crates/llvm-context/src/polkavm/evm/ext_code.rs @@ -1,17 +1,15 @@ //! Translates the external code operations. +use revive_common::BIT_LENGTH_ETH_ADDRESS; + use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// Translates the `extcodesize` instruction if `address` is `Some`. /// Otherwise, translates the `codesize` instruction. -pub fn size<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn size<'ctx>( + context: &mut Context<'ctx>, address: Option>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let address = match address { Some(address) => address, None => super::context::address(context)?.into_int_value(), @@ -33,14 +31,11 @@ where } /// Translates the `extcodehash` instruction. -pub fn hash<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn hash<'ctx>( + context: &mut Context<'ctx>, address: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ - let address_type = context.integer_type(revive_common::BIT_LENGTH_ETH_ADDRESS); +) -> anyhow::Result> { + let address_type = context.integer_type(BIT_LENGTH_ETH_ADDRESS); let address_pointer = context.build_alloca_at_entry(address_type, "address_pointer"); let address_truncated = context diff --git a/crates/llvm-context/src/polkavm/evm/immutable.rs b/crates/llvm-context/src/polkavm/evm/immutable.rs index 60f2827..2c336d3 100644 --- a/crates/llvm-context/src/polkavm/evm/immutable.rs +++ b/crates/llvm-context/src/polkavm/evm/immutable.rs @@ -7,7 +7,6 @@ use crate::polkavm::context::code_type::CodeType; use crate::polkavm::context::pointer::Pointer; use crate::polkavm::context::runtime::RuntimeFunction; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::polkavm::WriteLLVM; /// A function for requesting the immutable data from the runtime. @@ -20,19 +19,16 @@ use crate::polkavm::WriteLLVM; /// However, this is a one time assertion, hence worth it. pub struct Load; -impl RuntimeFunction for Load -where - D: Dependency + Clone, -{ +impl RuntimeFunction for Load { const NAME: &'static str = "__revive_load_immutable_data"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context.void_type().fn_type(Default::default(), false) } fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let immutable_data_size_pointer = context .get_global(revive_runtime_api::immutable_data::GLOBAL_IMMUTABLE_DATA_SIZE)? @@ -109,35 +105,29 @@ where } } -impl WriteLLVM for Load -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for Load { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } /// Store the immutable data from the constructor code. pub struct Store; -impl RuntimeFunction for Store -where - D: Dependency + Clone, -{ +impl RuntimeFunction for Store { const NAME: &'static str = "__revive_store_immutable_data"; - fn r#type<'ctx>(context: &Context<'ctx, D>) -> inkwell::types::FunctionType<'ctx> { + fn r#type<'ctx>(context: &Context<'ctx>) -> inkwell::types::FunctionType<'ctx> { context.void_type().fn_type(Default::default(), false) } fn emit_body<'ctx>( &self, - context: &mut Context<'ctx, D>, + context: &mut Context<'ctx>, ) -> anyhow::Result>> { let immutable_data_size_pointer = context .get_global(revive_runtime_api::immutable_data::GLOBAL_IMMUTABLE_DATA_SIZE)? @@ -192,16 +182,13 @@ where } } -impl WriteLLVM for Store -where - D: Dependency + Clone, -{ - fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { - >::declare(self, context) +impl WriteLLVM for Store { + fn declare(&mut self, context: &mut Context) -> anyhow::Result<()> { + ::declare(self, context) } - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { - >::emit(&self, context) + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()> { + ::emit(&self, context) } } @@ -210,20 +197,17 @@ where /// In deploy code the values are read from the stack. /// /// In runtime code they are loaded lazily with the `get_immutable_data` syscall. -pub fn load<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn load<'ctx>( + context: &mut Context<'ctx>, index: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { match context.code_type() { None => { anyhow::bail!("Immutables are not available if the contract part is undefined"); } Some(CodeType::Deploy) => load_from_memory(context, index), Some(CodeType::Runtime) => { - let name = >::NAME; + let name = ::NAME; context.build_call( context .get_function(name) @@ -244,14 +228,11 @@ where /// being prepared for storing them using the `set_immutable_data` syscall. /// /// Ignored in the runtime code. -pub fn store<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn store<'ctx>( + context: &mut Context<'ctx>, index: inkwell::values::IntValue<'ctx>, value: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ +) -> anyhow::Result<()> { match context.code_type() { None => { anyhow::bail!("Immutables are not available if the contract part is undefined"); @@ -279,13 +260,10 @@ where } } -pub fn load_from_memory<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn load_from_memory<'ctx>( + context: &mut Context<'ctx>, index: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let immutable_data_pointer = context .get_global(revive_runtime_api::immutable_data::GLOBAL_IMMUTABLE_DATA_POINTER)? .value diff --git a/crates/llvm-context/src/polkavm/evm/math.rs b/crates/llvm-context/src/polkavm/evm/math.rs index 68f59ef..ed2bf19 100644 --- a/crates/llvm-context/src/polkavm/evm/math.rs +++ b/crates/llvm-context/src/polkavm/evm/math.rs @@ -3,18 +3,14 @@ use inkwell::values::BasicValue; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// Translates the `addmod` instruction. -pub fn add_mod<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn add_mod<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, modulo: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { Ok(context .build_call( context.llvm_runtime().add_mod, @@ -29,15 +25,12 @@ where } /// Translates the `mulmod` instruction. -pub fn mul_mod<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn mul_mod<'ctx>( + context: &mut Context<'ctx>, operand_1: inkwell::values::IntValue<'ctx>, operand_2: inkwell::values::IntValue<'ctx>, modulo: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { Ok(context .build_call( context.llvm_runtime().mul_mod, @@ -52,14 +45,11 @@ where } /// Translates the `exp` instruction. -pub fn exponent<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn exponent<'ctx>( + context: &mut Context<'ctx>, value: inkwell::values::IntValue<'ctx>, exponent: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { Ok(context .build_call( context.llvm_runtime().exp, @@ -70,14 +60,11 @@ where } /// Translates the `signextend` instruction. -pub fn sign_extend<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn sign_extend<'ctx>( + context: &mut Context<'ctx>, bytes: inkwell::values::IntValue<'ctx>, value: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { Ok(context .build_call( context.llvm_runtime().sign_extend, diff --git a/crates/llvm-context/src/polkavm/evm/memory.rs b/crates/llvm-context/src/polkavm/evm/memory.rs index 9182f08..f21eaf7 100644 --- a/crates/llvm-context/src/polkavm/evm/memory.rs +++ b/crates/llvm-context/src/polkavm/evm/memory.rs @@ -1,19 +1,16 @@ //! Translates the heap memory operations. use inkwell::values::BasicValue; +use revive_common::BYTE_LENGTH_BYTE; use crate::polkavm::context::address_space::AddressSpace; use crate::polkavm::context::pointer::Pointer; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// Translates the `msize` instruction. -pub fn msize<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn msize<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { Ok(context .builder() .build_int_z_extend( @@ -26,13 +23,10 @@ where /// Translates the `mload` instruction. /// Uses the main heap. -pub fn load<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn load<'ctx>( + context: &mut Context<'ctx>, offset: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +) -> anyhow::Result> { let pointer = Pointer::new_with_offset( context, AddressSpace::Heap, @@ -45,14 +39,11 @@ where /// Translates the `mstore` instruction. /// Uses the main heap. -pub fn store<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn store<'ctx>( + context: &mut Context<'ctx>, offset: inkwell::values::IntValue<'ctx>, value: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ +) -> anyhow::Result<()> { let pointer = Pointer::new_with_offset( context, AddressSpace::Heap, @@ -66,14 +57,11 @@ where /// Translates the `mstore8` instruction. /// Uses the main heap. -pub fn store_byte<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn store_byte<'ctx>( + context: &mut Context<'ctx>, offset: inkwell::values::IntValue<'ctx>, value: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ +) -> anyhow::Result<()> { let byte_type = context.byte_type(); let value = context .builder() @@ -92,7 +80,7 @@ where context .builder() .build_store(pointer, value)? - .set_alignment(revive_common::BYTE_LENGTH_BYTE as u32) + .set_alignment(BYTE_LENGTH_BYTE as u32) .expect("Alignment is valid"); Ok(()) } diff --git a/crates/llvm-context/src/polkavm/evm/return.rs b/crates/llvm-context/src/polkavm/evm/return.rs index 717ce81..3ad665d 100644 --- a/crates/llvm-context/src/polkavm/evm/return.rs +++ b/crates/llvm-context/src/polkavm/evm/return.rs @@ -4,22 +4,18 @@ use crate::polkavm::context::code_type::CodeType; use crate::polkavm::context::runtime::RuntimeFunction; use crate::polkavm::context::Context; use crate::polkavm::evm::immutable::Store; -use crate::polkavm::Dependency; /// Translates the `return` instruction. -pub fn r#return<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn r#return<'ctx>( + context: &mut Context<'ctx>, offset: inkwell::values::IntValue<'ctx>, length: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ +) -> anyhow::Result<()> { match context.code_type() { None => anyhow::bail!("Return is not available if the contract part is undefined"), Some(CodeType::Deploy) => { context.build_call( - >::declaration(context), + ::declaration(context), Default::default(), "store_immutable_data", ); @@ -35,14 +31,11 @@ where } /// Translates the `revert` instruction. -pub fn revert<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn revert<'ctx>( + context: &mut Context<'ctx>, offset: inkwell::values::IntValue<'ctx>, length: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ +) -> anyhow::Result<()> { context.build_exit( context.integer_const(crate::polkavm::XLEN, 1), offset, @@ -52,19 +45,13 @@ where /// Translates the `stop` instruction. /// Is the same as `return(0, 0)`. -pub fn stop(context: &mut Context) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ +pub fn stop(context: &mut Context) -> anyhow::Result<()> { r#return(context, context.word_const(0), context.word_const(0)) } /// Translates the `invalid` instruction. /// Burns all gas using an out-of-bounds memory store, causing a panic. -pub fn invalid(context: &mut Context) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ +pub fn invalid(context: &mut Context) -> anyhow::Result<()> { crate::polkavm::evm::memory::store( context, context.word_type().const_all_ones(), diff --git a/crates/llvm-context/src/polkavm/evm/return_data.rs b/crates/llvm-context/src/polkavm/evm/return_data.rs index 213abc6..2596bed 100644 --- a/crates/llvm-context/src/polkavm/evm/return_data.rs +++ b/crates/llvm-context/src/polkavm/evm/return_data.rs @@ -1,15 +1,11 @@ //! Translates the return data instructions. use crate::polkavm::context::Context; -use crate::polkavm::Dependency; /// Translates the return data size. -pub fn size<'ctx, D>( - context: &mut Context<'ctx, D>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ +pub fn size<'ctx>( + context: &mut Context<'ctx>, +) -> anyhow::Result> { let return_data_size_value = context .build_runtime_call(revive_runtime_api::polkavm_imports::RETURNDATASIZE, &[]) .expect("the return_data_size syscall method should return a value") @@ -29,15 +25,12 @@ where /// - Destination, offset or size exceed the VM register size (XLEN) /// - `source_offset + size` overflows (in XLEN) /// - `source_offset + size` is beyond `RETURNDATASIZE` -pub fn copy<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn copy<'ctx>( + context: &mut Context<'ctx>, destination_offset: inkwell::values::IntValue<'ctx>, source_offset: inkwell::values::IntValue<'ctx>, size: inkwell::values::IntValue<'ctx>, -) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ +) -> anyhow::Result<()> { let source_offset = context.safe_truncate_int_to_xlen(source_offset)?; let destination_offset = context.safe_truncate_int_to_xlen(destination_offset)?; let size = context.safe_truncate_int_to_xlen(size)?; diff --git a/crates/llvm-context/src/polkavm/evm/storage.rs b/crates/llvm-context/src/polkavm/evm/storage.rs index a934f64..9f658d3 100644 --- a/crates/llvm-context/src/polkavm/evm/storage.rs +++ b/crates/llvm-context/src/polkavm/evm/storage.rs @@ -2,7 +2,6 @@ use crate::polkavm::context::runtime::RuntimeFunction; use crate::polkavm::context::Context; -use crate::polkavm::Dependency; use crate::PolkaVMArgument; use crate::PolkaVMLoadStorageWordFunction; use crate::PolkaVMLoadTransientStorageWordFunction; @@ -10,15 +9,12 @@ use crate::PolkaVMStoreStorageWordFunction; use crate::PolkaVMStoreTransientStorageWordFunction; /// Translates the storage load. -pub fn load<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn load<'ctx>( + context: &mut Context<'ctx>, position: &PolkaVMArgument<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ - let name = >::NAME; - let declaration = >::declaration(context); +) -> anyhow::Result> { + let name = ::NAME; + let declaration = ::declaration(context); let arguments = [position.as_pointer(context)?.value.into()]; Ok(context .build_call(declaration, &arguments, "storage_load") @@ -26,15 +22,12 @@ where } /// Translates the storage store. -pub fn store<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn store<'ctx>( + context: &mut Context<'ctx>, position: &PolkaVMArgument<'ctx>, value: &PolkaVMArgument<'ctx>, -) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ - let declaration = >::declaration(context); +) -> anyhow::Result<()> { + let declaration = ::declaration(context); let arguments = [ position.as_pointer(context)?.value.into(), value.as_pointer(context)?.value.into(), @@ -44,33 +37,27 @@ where } /// Translates the transient storage load. -pub fn transient_load<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn transient_load<'ctx>( + context: &mut Context<'ctx>, position: &PolkaVMArgument<'ctx>, -) -> anyhow::Result> -where - D: Dependency + Clone, -{ - let name = >::NAME; +) -> anyhow::Result> { + let name = ::NAME; let arguments = [position.as_pointer(context)?.value.into()]; let declaration = - >::declaration(context); + ::declaration(context); Ok(context .build_call(declaration, &arguments, "transient_storage_load") .unwrap_or_else(|| panic!("runtime function {name} should return a value"))) } /// Translates the transient storage store. -pub fn transient_store<'ctx, D>( - context: &mut Context<'ctx, D>, +pub fn transient_store<'ctx>( + context: &mut Context<'ctx>, position: &PolkaVMArgument<'ctx>, value: &PolkaVMArgument<'ctx>, -) -> anyhow::Result<()> -where - D: Dependency + Clone, -{ +) -> anyhow::Result<()> { let declaration = - >::declaration(context); + ::declaration(context); let arguments = [ position.as_pointer(context)?.value.into(), value.as_pointer(context)?.value.into(), diff --git a/crates/llvm-context/src/polkavm/mod.rs b/crates/llvm-context/src/polkavm/mod.rs index 9403ae1..28b5ffe 100644 --- a/crates/llvm-context/src/polkavm/mod.rs +++ b/crates/llvm-context/src/polkavm/mod.rs @@ -1,30 +1,43 @@ //! The LLVM context library. +use std::collections::BTreeMap; + +use crate::debug_config::DebugConfig; +use crate::optimizer::settings::Settings as OptimizerSettings; +use crate::{PolkaVMTarget, PolkaVMTargetMachine}; + +use anyhow::Context as AnyhowContext; +use polkavm_common::program::ProgramBlob; +use polkavm_disassembler::{Disassembler, DisassemblyFormat}; +use revive_common::{ + Keccak256, ObjectFormat, BIT_LENGTH_ETH_ADDRESS, BIT_LENGTH_WORD, BYTE_LENGTH_ETH_ADDRESS, + BYTE_LENGTH_WORD, +}; +use revive_linker::elf::ElfLinker; +use revive_linker::pvm::polkavm_linker; + +use self::context::build::Build; +use self::context::Context; +pub use self::r#const::*; + pub mod r#const; pub mod context; pub mod evm; -pub use self::r#const::*; +/// Get a [Build] from contract bytecode and its auxilliary data. +pub fn build( + bytecode: &[u8], + metadata_hash: Option<[u8; BYTE_LENGTH_WORD]>, +) -> anyhow::Result { + Ok(Build::new(metadata_hash, bytecode.to_owned())) +} -use crate::debug_config::DebugConfig; -use crate::optimizer::settings::Settings as OptimizerSettings; - -use anyhow::Context as AnyhowContext; -use polkavm_common::program::ProgramBlob; -use polkavm_disassembler::{Disassembler, DisassemblyFormat}; -use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory; -use sha3::Digest; - -use self::context::build::Build; -use self::context::Context; - -/// Builds PolkaVM assembly text. -pub fn build_assembly_text( +/// Disassembles the PolkaVM blob into assembly text representation. +pub fn disassemble( contract_path: &str, bytecode: &[u8], - metadata_hash: Option<[u8; revive_common::BYTE_LENGTH_WORD]>, debug_config: &DebugConfig, -) -> anyhow::Result { +) -> anyhow::Result { let program_blob = ProgramBlob::parse(bytecode.into()) .map_err(anyhow::Error::msg) .with_context(|| format!("Failed to parse program blob for contract: {contract_path}"))?; @@ -45,86 +58,97 @@ pub fn build_assembly_text( debug_config.dump_assembly(contract_path, &assembly_text)?; - Ok(Build::new( - assembly_text.to_owned(), - metadata_hash, - bytecode.to_owned(), - hex::encode(sha3::Keccak256::digest(bytecode)), - )) + Ok(assembly_text) } +/// Computes the PVM bytecode hash. +pub fn hash(bytecode_buffer: &[u8]) -> [u8; BYTE_LENGTH_WORD] { + Keccak256::from_slice(bytecode_buffer) + .as_bytes() + .try_into() + .expect("the bytecode hash should be word sized") +} + +/// Links the `bytecode` with `linker_symbols` and `factory_dependencies`. +pub fn link( + bytecode: &[u8], + linker_symbols: &BTreeMap, + factory_dependencies: &BTreeMap, + strip_binary: bool, +) -> anyhow::Result<(Vec, ObjectFormat)> { + Ok(match ObjectFormat::try_from(bytecode) { + Ok(format @ ObjectFormat::PVM) => (bytecode.to_vec(), format), + Ok(ObjectFormat::ELF) => { + let symbols = build_symbols(linker_symbols, factory_dependencies)?; + let bytecode_linked = ElfLinker::setup()?.link(bytecode, symbols.as_slice())?; + polkavm_linker(&bytecode_linked, strip_binary) + .map(|pvm| (pvm, ObjectFormat::PVM)) + .unwrap_or_else(|_| (bytecode.to_vec(), ObjectFormat::ELF)) + } + Err(error) => panic!("ICE: linker: {error}"), + }) +} + +/// The returned module defines given `linker_symbols` and `factory_dependencies` global values. +pub fn build_symbols( + linker_symbols: &BTreeMap, + factory_dependencies: &BTreeMap, +) -> anyhow::Result { + let context = inkwell::context::Context::create(); + let module = context.create_module("symbols"); + let word_type = context.custom_width_int_type(BIT_LENGTH_WORD as u32); + let address_type = context.custom_width_int_type(BIT_LENGTH_ETH_ADDRESS as u32); + + for (name, value) in linker_symbols { + let global_value = module.add_global(address_type, Default::default(), name); + global_value.set_linkage(inkwell::module::Linkage::External); + global_value.set_initializer( + &address_type + .const_int_from_string( + hex::encode(value).as_str(), + inkwell::types::StringRadix::Hexadecimal, + ) + .expect("should be valid"), + ); + } + + for (name, value) in factory_dependencies { + let global_value = module.add_global(word_type, Default::default(), name); + global_value.set_linkage(inkwell::module::Linkage::External); + global_value.set_initializer( + &word_type + .const_int_from_string( + hex::encode(value).as_str(), + inkwell::types::StringRadix::Hexadecimal, + ) + .expect("should be valid"), + ); + } + + Ok( + PolkaVMTargetMachine::new(PolkaVMTarget::PVM, &OptimizerSettings::none())? + .write_to_memory_buffer(&module) + .expect("ICE: the symbols module should be valid"), + ) +} /// Implemented by items which are translated into LLVM IR. -pub trait WriteLLVM -where - D: Dependency + Clone, -{ +pub trait WriteLLVM { /// Declares the entity in the LLVM IR. /// Is usually performed in order to use the item before defining it. - fn declare(&mut self, _context: &mut Context) -> anyhow::Result<()> { + fn declare(&mut self, _context: &mut Context) -> anyhow::Result<()> { Ok(()) } /// Translates the entity into LLVM IR. - fn into_llvm(self, context: &mut Context) -> anyhow::Result<()>; + fn into_llvm(self, context: &mut Context) -> anyhow::Result<()>; } /// The dummy LLVM writable entity. #[derive(Debug, Default, Clone)] pub struct DummyLLVMWritable {} -impl WriteLLVM for DummyLLVMWritable -where - D: Dependency + Clone, -{ - fn into_llvm(self, _context: &mut Context) -> anyhow::Result<()> { +impl WriteLLVM for DummyLLVMWritable { + fn into_llvm(self, _context: &mut Context) -> anyhow::Result<()> { Ok(()) } } - -/// Implemented by items managing project dependencies. -pub trait Dependency { - /// Compiles a project dependency. - fn compile( - dependency: Self, - path: &str, - optimizer_settings: OptimizerSettings, - include_metadata_hash: bool, - debug_config: DebugConfig, - llvm_arguments: &[String], - memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, - ) -> anyhow::Result; - - /// Resolves a full contract path. - fn resolve_path(&self, identifier: &str) -> anyhow::Result; - - /// Resolves a library address. - fn resolve_library(&self, path: &str) -> anyhow::Result; -} - -/// The dummy dependency entity. -#[derive(Debug, Default, Clone)] -pub struct DummyDependency {} - -impl Dependency for DummyDependency { - fn compile( - _dependency: Self, - _path: &str, - _optimizer_settings: OptimizerSettings, - _include_metadata_hash: bool, - _debug_config: DebugConfig, - _llvm_arguments: &[String], - _memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, - ) -> anyhow::Result { - Ok(String::new()) - } - - /// Resolves a full contract path. - fn resolve_path(&self, _identifier: &str) -> anyhow::Result { - Ok(String::new()) - } - - /// Resolves a library address. - fn resolve_library(&self, _path: &str) -> anyhow::Result { - Ok(String::new()) - } -} diff --git a/crates/llvm-context/src/target_machine/mod.rs b/crates/llvm-context/src/target_machine/mod.rs index f7837a3..0200f19 100644 --- a/crates/llvm-context/src/target_machine/mod.rs +++ b/crates/llvm-context/src/target_machine/mod.rs @@ -1,12 +1,12 @@ //! The LLVM target machine. -pub mod target; - use crate::optimizer::settings::size_level::SizeLevel as OptimizerSettingsSizeLevel; use crate::optimizer::settings::Settings as OptimizerSettings; use self::target::Target; +pub mod target; + /// The LLVM target machine. #[derive(Debug)] pub struct TargetMachine { diff --git a/crates/resolc/Cargo.toml b/crates/resolc/Cargo.toml index 73591cd..6a5025f 100644 --- a/crates/resolc/Cargo.toml +++ b/crates/resolc/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "resolc" -version = "0.3.0" +version = "0.4.0" license.workspace = true edition.workspace = true repository.workspace = true @@ -28,8 +28,8 @@ rayon = { workspace = true, optional = true } semver = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } -sha3 = { workspace = true } which = { workspace = true } +normpath = { workspace = true } revive-common = { workspace = true } revive-llvm-context = { workspace = true } @@ -46,6 +46,10 @@ inkwell = { workspace = true, features = ["target-riscv", "llvm18-1-no-llvm-link [build-dependencies] git2 = { workspace = true, default-features = false } +[dev-dependencies] +tempfile = { workspace = true } + [features] -parallel = ["rayon"] +parallel = ["rayon", "revive-solc-json-interface/parallel"] default = ["parallel"] + diff --git a/crates/resolc/src/build/contract.rs b/crates/resolc/src/build/contract.rs index 721f501..3a75d9d 100644 --- a/crates/resolc/src/build/contract.rs +++ b/crates/resolc/src/build/contract.rs @@ -1,10 +1,19 @@ //! The Solidity contract build. -use std::collections::HashSet; +use std::collections::BTreeMap; +use std::collections::BTreeSet; use std::fs::File; use std::io::Write; use std::path::Path; +use std::path::PathBuf; +use revive_common::ContractIdentifier; +use revive_common::ObjectFormat; +use revive_common::BYTE_LENGTH_WORD; +use revive_common::EXTENSION_JSON; +use revive_common::EXTENSION_POLKAVM_ASSEMBLY; +use revive_common::EXTENSION_POLKAVM_BINARY; +use revive_llvm_context::PolkaVMBuild; use revive_solc_json_interface::CombinedJsonContract; use revive_solc_json_interface::SolcStandardJsonOutputContract; use serde::Deserialize; @@ -13,92 +22,137 @@ use serde::Serialize; /// The Solidity contract build. #[derive(Debug, Serialize, Deserialize)] pub struct Contract { - /// The contract path. - pub path: String, - /// The auxiliary identifier. Used to identify Yul objects. - pub identifier: String, + /// The contract identifier. + pub identifier: ContractIdentifier, /// The LLVM module build. - pub build: revive_llvm_context::PolkaVMBuild, + pub build: PolkaVMBuild, /// The metadata JSON. pub metadata_json: serde_json::Value, - /// The factory dependencies. - pub factory_dependencies: HashSet, + /// The unlinked missing libraries. + pub missing_libraries: BTreeSet, + /// The unresolved factory dependencies. + pub factory_dependencies: BTreeSet, + /// The resolved factory dependencies. + pub factory_dependencies_resolved: BTreeMap<[u8; BYTE_LENGTH_WORD], String>, + /// The binary object format. + pub object_format: ObjectFormat, } impl Contract { /// A shortcut constructor. pub fn new( - path: String, - identifier: String, - build: revive_llvm_context::PolkaVMBuild, + identifier: ContractIdentifier, + build: PolkaVMBuild, metadata_json: serde_json::Value, - factory_dependencies: HashSet, + missing_libraries: BTreeSet, + factory_dependencies: BTreeSet, + object_format: ObjectFormat, ) -> Self { Self { - path, identifier, build, metadata_json, + missing_libraries, factory_dependencies, + factory_dependencies_resolved: BTreeMap::new(), + object_format, } } + /// Writes the contract text assembly and bytecode to terminal. + pub fn write_to_terminal( + self, + path: String, + output_metadata: bool, + output_assembly: bool, + output_binary: bool, + ) -> anyhow::Result<()> { + writeln!(std::io::stdout(), "\n======= {path} =======")?; + if output_assembly { + writeln!( + std::io::stdout(), + "Assembly:\n{}", + self.build.assembly_text.unwrap_or_default(), + )?; + } + if output_metadata { + writeln!(std::io::stdout(), "Metadata:\n{}", self.metadata_json)?; + } + if output_binary { + writeln!( + std::io::stdout(), + "Binary:\n{}", + hex::encode(self.build.bytecode) + )?; + } + + Ok(()) + } + /// Writes the contract text assembly and bytecode to files. pub fn write_to_directory( self, path: &Path, + output_metadata: bool, output_assembly: bool, output_binary: bool, overwrite: bool, ) -> anyhow::Result<()> { - let file_name = Self::short_path(self.path.as_str()); - - if output_assembly { - let file_name = format!( - "{}.{}", - file_name, - revive_common::EXTENSION_POLKAVM_ASSEMBLY - ); - let mut file_path = path.to_owned(); - file_path.push(file_name); + let file_path = PathBuf::from(self.identifier.path); + let file_name = file_path + .file_name() + .expect("Always exists") + .to_str() + .expect("Always valid"); + let output_path = path.to_owned(); + std::fs::create_dir_all(output_path.as_path())?; + if output_metadata { + let file_path = output_path.join(format!( + "{file_name}:{}.{EXTENSION_JSON}", + self.identifier.name.as_deref().unwrap_or(file_name), + )); if file_path.exists() && !overwrite { anyhow::bail!( "Refusing to overwrite an existing file {file_path:?} (use --overwrite to force)." ); - } else { - let assembly_text = self.build.assembly_text; - - File::create(&file_path) - .map_err(|error| { - anyhow::anyhow!("File {:?} creating error: {}", file_path, error) - })? - .write_all(assembly_text.as_bytes()) - .map_err(|error| { - anyhow::anyhow!("File {:?} writing error: {}", file_path, error) - })?; } + std::fs::write( + file_path.as_path(), + self.metadata_json.to_string().as_bytes(), + ) + .map_err(|error| anyhow::anyhow!("File {file_path:?} writing: {error}"))?; + } + if output_assembly { + let file_path = output_path.join(format!( + "{file_name}:{}.{EXTENSION_POLKAVM_ASSEMBLY}", + self.identifier.name.as_deref().unwrap_or(file_name), + )); + if file_path.exists() && !overwrite { + anyhow::bail!( + "Refusing to overwrite an existing file {file_path:?} (use --overwrite to force)." + ); + } + File::create(&file_path) + .map_err(|error| anyhow::anyhow!("File {file_path:?} creating error: {error}"))? + .write_all(self.build.assembly_text.unwrap_or_default().as_bytes()) + .map_err(|error| anyhow::anyhow!("File {file_path:?} writing error: {error}"))?; } if output_binary { - let file_name = format!("{}.{}", file_name, revive_common::EXTENSION_POLKAVM_BINARY); - let mut file_path = path.to_owned(); - file_path.push(file_name); - + let file_path = output_path.join(format!( + "{file_name}:{}.{EXTENSION_POLKAVM_BINARY}", + self.identifier.name.as_deref().unwrap_or(file_name), + )); if file_path.exists() && !overwrite { anyhow::bail!( "Refusing to overwrite an existing file {file_path:?} (use --overwrite to force)." ); - } else { - File::create(&file_path) - .map_err(|error| { - anyhow::anyhow!("File {:?} creating error: {}", file_path, error) - })? - .write_all(self.build.bytecode.as_slice()) - .map_err(|error| { - anyhow::anyhow!("File {:?} writing error: {}", file_path, error) - })?; } + File::create(&file_path) + .map_err(|error| anyhow::anyhow!("File {file_path:?} creating error: {error}"))? + .write_all(self.build.bytecode.as_slice()) + .map_err(|error| anyhow::anyhow!("File {file_path:?} writing error: {error}"))?; } Ok(()) @@ -109,20 +163,30 @@ impl Contract { self, combined_json_contract: &mut CombinedJsonContract, ) -> anyhow::Result<()> { + let hexadecimal_bytecode = hex::encode(self.build.bytecode); + if let Some(metadata) = combined_json_contract.metadata.as_mut() { *metadata = self.metadata_json.to_string(); } - if let Some(asm) = combined_json_contract.asm.as_mut() { - *asm = serde_json::Value::String(self.build.assembly_text); - } - let hexadecimal_bytecode = hex::encode(self.build.bytecode); + combined_json_contract.assembly = self.build.assembly_text; combined_json_contract.bin = Some(hexadecimal_bytecode); combined_json_contract .bin_runtime .clone_from(&combined_json_contract.bin); - combined_json_contract.factory_deps = Some(self.build.factory_dependencies); + combined_json_contract + .missing_libraries + .extend(self.missing_libraries); + combined_json_contract + .factory_deps_unlinked + .extend(self.factory_dependencies); + combined_json_contract.factory_deps.extend( + self.factory_dependencies_resolved + .into_iter() + .map(|(hash, path)| (hex::encode(hash), path)), + ); + combined_json_contract.object_format = Some(self.object_format); Ok(()) } @@ -132,16 +196,27 @@ impl Contract { self, standard_json_contract: &mut SolcStandardJsonOutputContract, ) -> anyhow::Result<()> { - standard_json_contract.metadata = Some(self.metadata_json); - - let assembly_text = self.build.assembly_text; let bytecode = hex::encode(self.build.bytecode.as_slice()); - if let Some(evm) = standard_json_contract.evm.as_mut() { - evm.modify(assembly_text, bytecode); - } + let assembly_text = self.build.assembly_text.unwrap_or_default(); - standard_json_contract.factory_dependencies = Some(self.build.factory_dependencies); - standard_json_contract.hash = Some(self.build.bytecode_hash); + standard_json_contract.metadata = self.metadata_json; + standard_json_contract + .evm + .get_or_insert_with(Default::default) + .modify(assembly_text, bytecode); + standard_json_contract.hash = self.build.bytecode_hash.map(hex::encode); + standard_json_contract + .missing_libraries + .extend(self.missing_libraries); + standard_json_contract + .factory_dependencies_unlinked + .extend(self.factory_dependencies); + standard_json_contract.factory_dependencies.extend( + self.factory_dependencies_resolved + .into_iter() + .map(|(hash, path)| (hex::encode(hash), path)), + ); + standard_json_contract.object_format = Some(self.object_format); Ok(()) } diff --git a/crates/resolc/src/build/mod.rs b/crates/resolc/src/build/mod.rs index 853a48f..e6953d7 100644 --- a/crates/resolc/src/build/mod.rs +++ b/crates/resolc/src/build/mod.rs @@ -1,94 +1,352 @@ //! The Solidity project build. +use std::collections::BTreeMap; +use std::io::Write; +use std::path::Path; +use std::path::PathBuf; + +use normpath::PathExt; + +use revive_common::ObjectFormat; +use revive_common::BYTE_LENGTH_ETH_ADDRESS; +use revive_llvm_context::polkavm_disassemble; +use revive_llvm_context::polkavm_hash; +use revive_llvm_context::polkavm_link; +use revive_llvm_context::DebugConfig; +use revive_solc_json_interface::combined_json::CombinedJson; +use revive_solc_json_interface::CombinedJsonContract; +use revive_solc_json_interface::SolcStandardJsonOutput; +use revive_solc_json_interface::SolcStandardJsonOutputContract; +use revive_solc_json_interface::SolcStandardJsonOutputError; +use revive_solc_json_interface::SolcStandardJsonOutputErrorHandler; + +use crate::build::contract::Contract; +use crate::solc::version::Version as SolcVersion; + pub mod contract; -use std::collections::BTreeMap; -use std::path::Path; - -use revive_solc_json_interface::combined_json::CombinedJson; -use revive_solc_json_interface::SolcStandardJsonOutput; - -use crate::solc::version::Version as SolcVersion; -use crate::ResolcVersion; - -use self::contract::Contract; - -/// The Solidity project build. +/// The Solidity project PVM build. #[derive(Debug, Default)] pub struct Build { /// The contract data, - pub contracts: BTreeMap, + pub results: BTreeMap>, + /// The additional message to output (added by the revive compiler). + pub messages: Vec, } impl Build { - /// Writes all contracts to the specified directory. - pub fn write_to_directory( - self, - output_directory: &Path, + /// A shorthand constructor. + /// + /// Note: Takes the supplied `messages`, leaving an empty vec. + pub fn new( + results: BTreeMap>, + messages: &mut Vec, + ) -> Self { + Self { + results, + messages: std::mem::take(messages), + } + } + + /// Links the PVM build. + pub fn link( + mut self, + linker_symbols: BTreeMap, + debug_config: &DebugConfig, + ) -> Self { + let mut contracts: BTreeMap = self + .results + .into_iter() + .map(|(path, result)| (path, result.expect("Cannot link a project with errors"))) + .collect(); + + loop { + let mut linkage_data = BTreeMap::new(); + for (path, contract) in contracts + .iter() + .filter(|(_path, contract)| contract.object_format == ObjectFormat::ELF) + { + match polkavm_link( + &contract.build.bytecode, + &linker_symbols, + &contract + .factory_dependencies + .iter() + .filter_map(|dependency| { + let bytecode_hash = contracts + .get(dependency) + .as_ref()? + .build + .bytecode_hash + .as_ref()? + .to_owned(); + Some((dependency.to_owned(), bytecode_hash)) + }) + .collect(), + !debug_config.emit_debug_info, + ) { + Ok((memory_buffer_linked, ObjectFormat::PVM)) => { + let bytecode_hash = polkavm_hash(&memory_buffer_linked); + let assembly_text = + polkavm_disassemble(path, &memory_buffer_linked, debug_config) + .unwrap_or_else(|error| { + panic!("ICE: The PVM disassembler failed: {error}") + }); + linkage_data.insert( + path.to_owned(), + (memory_buffer_linked, bytecode_hash, assembly_text), + ); + } + Ok((_memory_buffer_linked, ObjectFormat::ELF)) => {} + Err(error) => self + .messages + .push(SolcStandardJsonOutputError::new_error(error, None, None)), + } + } + if linkage_data.is_empty() { + break; + } + + for (path, (memory_buffer_linked, bytecode_hash, assembly_text)) in + linkage_data.into_iter() + { + let contract = contracts.get(path.as_str()).expect("Always exists"); + let factory_dependencies_resolved = contract + .factory_dependencies + .iter() + .filter_map(|dependency| { + Some(( + contracts + .get(dependency) + .as_ref()? + .build + .bytecode_hash + .as_ref()? + .to_owned(), + dependency.to_owned(), + )) + }) + .collect(); + let contract = contracts.get_mut(path.as_str()).expect("Always exists"); + contract.build.bytecode = memory_buffer_linked.as_slice().to_vec(); + contract.build.bytecode_hash = Some(bytecode_hash); + contract.build.assembly_text = Some(assembly_text); + contract.factory_dependencies_resolved = factory_dependencies_resolved; + contract.object_format = ObjectFormat::PVM; + } + } + + let results = contracts + .into_iter() + .map(|(path, contract)| { + if contract.object_format == ObjectFormat::ELF { + self.messages.push(SolcStandardJsonOutputError::new_warning( + format!("{path} is unlinked. Consider providing missing libraries."), + None, + None, + )); + } + (path, Ok(contract)) + }) + .collect(); + Self::new(results, &mut self.messages) + } + + /// Writes all contracts to the terminal. + pub fn write_to_terminal( + mut self, + output_metadata: bool, output_assembly: bool, output_binary: bool, - overwrite: bool, ) -> anyhow::Result<()> { - for (_path, contract) in self.contracts.into_iter() { - contract.write_to_directory( - output_directory, + self.take_and_write_warnings(); + self.exit_on_error(); + + if !output_metadata && !output_assembly && !output_binary { + writeln!( + std::io::stderr(), + "Compiler run successful. No output requested. Use flags --metadata, --asm, --bin." + )?; + return Ok(()); + } + + for (path, build) in self.results.into_iter() { + build.expect("Always valid").write_to_terminal( + path, + output_metadata, output_assembly, output_binary, - overwrite, )?; } Ok(()) } - /// Writes all contracts assembly and bytecode to the combined JSON. - pub fn write_to_combined_json(self, combined_json: &mut CombinedJson) -> anyhow::Result<()> { - for (path, contract) in self.contracts.into_iter() { - let combined_json_contract = combined_json - .contracts - .iter_mut() - .find_map(|(json_path, contract)| { - if path.ends_with(json_path) { - Some(contract) - } else { - None - } - }) - .ok_or_else(|| anyhow::anyhow!("Contract `{}` not found in the project", path))?; + /// Writes all contracts to the specified directory. + pub fn write_to_directory( + mut self, + output_directory: &Path, + output_metadata: bool, + output_assembly: bool, + output_binary: bool, + overwrite: bool, + ) -> anyhow::Result<()> { + self.take_and_write_warnings(); + self.exit_on_error(); - contract.write_to_combined_json(combined_json_contract)?; + std::fs::create_dir_all(output_directory)?; + + for build in self.results.into_values() { + build.expect("Always valid").write_to_directory( + output_directory, + output_metadata, + output_assembly, + output_binary, + overwrite, + )?; } - combined_json.revive_version = Some(ResolcVersion::default().long); + writeln!( + std::io::stderr(), + "Compiler run successful. Artifact(s) can be found in directory {output_directory:?}." + )?; + Ok(()) + } + + /// Writes all contracts assembly and bytecode to the combined JSON. + pub fn write_to_combined_json( + mut self, + combined_json: &mut CombinedJson, + ) -> anyhow::Result<()> { + self.take_and_write_warnings(); + self.exit_on_error(); + + for result in self.results.into_values() { + let build = result.expect("Exits on an error above"); + let identifier = build.identifier.clone(); + + let combined_json_contract = + match combined_json + .contracts + .iter_mut() + .find_map(|(json_path, contract)| { + if Self::normalize_full_path(identifier.full_path.as_str()) + .ends_with(Self::normalize_full_path(json_path).as_str()) + { + Some(contract) + } else { + None + } + }) { + Some(contract) => contract, + None => { + combined_json.contracts.insert( + identifier.full_path.clone(), + CombinedJsonContract::default(), + ); + combined_json + .contracts + .get_mut(identifier.full_path.as_str()) + .expect("Always exists") + } + }; + + build.write_to_combined_json(combined_json_contract)?; + } Ok(()) } /// Writes all contracts assembly and bytecode to the standard JSON. pub fn write_to_standard_json( - mut self, + self, standard_json: &mut SolcStandardJsonOutput, solc_version: &SolcVersion, ) -> anyhow::Result<()> { - let contracts = match standard_json.contracts.as_mut() { - Some(contracts) => contracts, - None => return Ok(()), - }; + let mut errors = Vec::with_capacity(self.results.len()); + for result in self.results.into_values() { + let build = match result { + Ok(build) => build, + Err(error) => { + errors.push(error); + continue; + } + }; + let identifier = build.identifier.clone(); - for (path, contracts) in contracts.iter_mut() { - for (name, contract) in contracts.iter_mut() { - let full_name = format!("{path}:{name}"); - - if let Some(contract_data) = self.contracts.remove(full_name.as_str()) { - contract_data.write_to_standard_json(contract)?; + match standard_json + .contracts + .get_mut(identifier.path.as_str()) + .and_then(|contracts| { + contracts.get_mut( + identifier + .name + .as_deref() + .unwrap_or(identifier.path.as_str()), + ) + }) { + Some(contract) => { + build.write_to_standard_json(contract)?; + } + None => { + let contracts = standard_json + .contracts + .entry(identifier.path.clone()) + .or_default(); + let mut contract = SolcStandardJsonOutputContract::default(); + build.write_to_standard_json(&mut contract)?; + contracts.insert(identifier.name.unwrap_or(identifier.path), contract); } } } + standard_json.errors.extend(errors); standard_json.version = Some(solc_version.default.to_string()); standard_json.long_version = Some(solc_version.long.to_owned()); - standard_json.revive_version = Some(ResolcVersion::default().long); Ok(()) } + + /// Normalizes the full contract path. + /// + /// # Panics + /// If the path does not contain a colon. + fn normalize_full_path(path: &str) -> String { + let mut iterator = path.split(':'); + let path = iterator.next().expect("Always exists"); + let name = iterator.next().expect("Always exists"); + + let mut full_path = PathBuf::from(path) + .normalize() + .expect("Path normalization error") + .as_os_str() + .to_string_lossy() + .into_owned(); + full_path.push(':'); + full_path.push_str(name); + full_path + } +} + +impl SolcStandardJsonOutputErrorHandler for Build { + fn errors(&self) -> Vec<&SolcStandardJsonOutputError> { + let mut errors: Vec<&SolcStandardJsonOutputError> = self + .results + .values() + .filter_map(|build| build.as_ref().err()) + .collect(); + errors.extend(self.messages.iter().filter(|message| message.is_error())); + errors + } + + fn take_warnings(&mut self) -> Vec { + let warnings = self + .messages + .iter() + .filter(|message| message.is_warning()) + .cloned() + .collect(); + self.messages.retain(|message| !message.is_warning()); + warnings + } } diff --git a/crates/resolc/src/const.rs b/crates/resolc/src/const.rs index 519584a..219e562 100644 --- a/crates/resolc/src/const.rs +++ b/crates/resolc/src/const.rs @@ -1,5 +1,7 @@ //! Solidity to PolkaVM compiler constants. +use revive_common::BYTE_LENGTH_WORD; + /// The default executable name. pub static DEFAULT_EXECUTABLE_NAME: &str = "resolc"; @@ -7,10 +9,10 @@ pub static DEFAULT_EXECUTABLE_NAME: &str = "resolc"; pub const OFFSET_SCRATCH_SPACE: usize = 0; /// The memory pointer offset. -pub const OFFSET_MEMORY_POINTER: usize = 2 * revive_common::BYTE_LENGTH_WORD; +pub const OFFSET_MEMORY_POINTER: usize = 2 * BYTE_LENGTH_WORD; /// The empty slot offset. -pub const OFFSET_EMPTY_SLOT: usize = 3 * revive_common::BYTE_LENGTH_WORD; +pub const OFFSET_EMPTY_SLOT: usize = 3 * BYTE_LENGTH_WORD; /// The non-reserved memory offset. -pub const OFFSET_NON_RESERVED: usize = 4 * revive_common::BYTE_LENGTH_WORD; +pub const OFFSET_NON_RESERVED: usize = 4 * BYTE_LENGTH_WORD; diff --git a/crates/resolc/src/lib.rs b/crates/resolc/src/lib.rs index 698e8b9..8a9f389 100644 --- a/crates/resolc/src/lib.rs +++ b/crates/resolc/src/lib.rs @@ -1,12 +1,35 @@ //! Solidity to PolkaVM compiler library. -pub(crate) mod build; -pub(crate) mod r#const; -pub(crate) mod missing_libraries; -pub(crate) mod process; -pub(crate) mod project; -pub(crate) mod solc; -pub(crate) mod version; +#![allow(clippy::too_many_arguments)] + +use std::collections::BTreeMap; +use std::collections::BTreeSet; +use std::collections::HashSet; +use std::io::Write; +use std::path::PathBuf; + +#[cfg(feature = "parallel")] +use rayon::iter::IntoParallelIterator; +#[cfg(feature = "parallel")] +use rayon::iter::ParallelIterator; +use revive_common::EVMVersion; +use revive_common::MetadataHash; +use revive_common::EXIT_CODE_SUCCESS; +use revive_llvm_context::DebugConfig; +use revive_llvm_context::OptimizerSettings; +use revive_solc_json_interface::CombinedJsonSelector; +use revive_solc_json_interface::ResolcWarning; +use revive_solc_json_interface::SolcStandardJsonInput; +use revive_solc_json_interface::SolcStandardJsonInputLanguage; +use revive_solc_json_interface::SolcStandardJsonInputSettingsLibraries; +use revive_solc_json_interface::SolcStandardJsonInputSettingsOptimizer; +use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVM; +use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory; +use revive_solc_json_interface::SolcStandardJsonInputSettingsSelection; +use revive_solc_json_interface::SolcStandardJsonOutputError; +use revive_solc_json_interface::SolcStandardJsonOutputErrorHandler; + +use crate::linker::Output; pub use self::build::contract::Contract as ContractBuild; pub use self::build::Build; @@ -31,118 +54,76 @@ pub use self::solc::FIRST_SUPPORTED_VERSION as SolcFirstSupportedVersion; pub use self::solc::LAST_SUPPORTED_VERSION as SolcLastSupportedVersion; pub use self::version::Version as ResolcVersion; +pub(crate) mod build; +pub(crate) mod r#const; +pub(crate) mod linker; +pub(crate) mod missing_libraries; +pub(crate) mod process; +pub(crate) mod project; +pub(crate) mod solc; #[cfg(not(target_os = "emscripten"))] pub mod test_utils; pub mod tests; +pub(crate) mod version; -use std::collections::BTreeSet; -use std::io::Write; -use std::path::PathBuf; - -use revive_solc_json_interface::standard_json::input::settings::metadata_hash::MetadataHash; -use revive_solc_json_interface::ResolcWarning; -use revive_solc_json_interface::SolcStandardJsonInput; -use revive_solc_json_interface::SolcStandardJsonInputLanguage; -use revive_solc_json_interface::SolcStandardJsonInputSettingsOptimizer; -use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVM; -use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory; -use revive_solc_json_interface::SolcStandardJsonInputSettingsSelection; +/// The rayon worker stack size. +pub const RAYON_WORKER_STACK_SIZE: usize = 64 * 1024 * 1024; /// Runs the Yul mode. pub fn yul( + solc: &T, input_files: &[PathBuf], - solc: &mut T, - optimizer_settings: revive_llvm_context::OptimizerSettings, - include_metadata_hash: bool, - mut debug_config: revive_llvm_context::DebugConfig, + libraries: &[String], + metadata_hash: MetadataHash, + messages: &mut Vec, + optimizer_settings: OptimizerSettings, + debug_config: DebugConfig, llvm_arguments: &[String], memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, ) -> anyhow::Result { - let path = match input_files.len() { - 1 => input_files.first().expect("Always exists"), - 0 => anyhow::bail!("The input file is missing"), - length => anyhow::bail!( - "Only one input file is allowed in the Yul mode, but found {}", - length, - ), - }; + let libraries = SolcStandardJsonInputSettingsLibraries::try_from(libraries)?; + solc.validate_yul_paths(input_files, libraries.clone(), messages)?; - if solc.version()?.default != solc::LAST_SUPPORTED_VERSION { - anyhow::bail!( - "The Yul mode is only supported with the most recent version of the Solidity compiler: {}", - solc::LAST_SUPPORTED_VERSION, - ); - } - - let solc_validator = Some(&*solc); - let project = Project::try_from_yul_path(path, solc_validator)?; - - debug_config.set_yul_path(path); - let build = project.compile( + let linker_symbols = libraries.as_linker_symbols()?; + let project = Project::try_from_yul_paths(input_files, None, libraries, &debug_config)?; + let mut build = project.compile( + messages, optimizer_settings, - include_metadata_hash, - debug_config, - llvm_arguments, - memory_config, - )?; - - Ok(build) -} - -/// Runs the LLVM IR mode. -pub fn llvm_ir( - input_files: &[PathBuf], - optimizer_settings: revive_llvm_context::OptimizerSettings, - include_metadata_hash: bool, - debug_config: revive_llvm_context::DebugConfig, - llvm_arguments: &[String], - memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, -) -> anyhow::Result { - let path = match input_files.len() { - 1 => input_files.first().expect("Always exists"), - 0 => anyhow::bail!("The input file is missing"), - length => anyhow::bail!( - "Only one input file is allowed in the LLVM IR mode, but found {}", - length, - ), - }; - - let project = Project::try_from_llvm_ir_path(path)?; - - let build = project.compile( - optimizer_settings, - include_metadata_hash, - debug_config, + metadata_hash, + &debug_config, llvm_arguments, memory_config, )?; + build.take_and_write_warnings(); + build.check_errors()?; + let mut build = build.link(linker_symbols, &debug_config); + build.take_and_write_warnings(); + build.check_errors()?; Ok(build) } /// Runs the standard output mode. -#[allow(clippy::too_many_arguments)] pub fn standard_output( + solc: &T, input_files: &[PathBuf], - libraries: Vec, - solc: &mut T, - evm_version: Option, + libraries: &[String], + metadata_hash: MetadataHash, + messages: &mut Vec, + evm_version: Option, solc_optimizer_enabled: bool, - optimizer_settings: revive_llvm_context::OptimizerSettings, - include_metadata_hash: bool, + optimizer_settings: OptimizerSettings, base_path: Option, include_paths: Vec, allow_paths: Option, - remappings: Option>, - suppressed_warnings: Option>, - debug_config: revive_llvm_context::DebugConfig, - llvm_arguments: &[String], + remappings: BTreeSet, + suppressed_warnings: Vec, + debug_config: DebugConfig, + llvm_arguments: Vec, memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, ) -> anyhow::Result { let solc_version = solc.version()?; - - let solc_input = SolcStandardJsonInput::try_from_paths( - SolcStandardJsonInputLanguage::Solidity, + let mut solc_input = SolcStandardJsonInput::try_from_solidity_paths( evm_version, input_files, libraries, @@ -150,162 +131,211 @@ pub fn standard_output( SolcStandardJsonInputSettingsSelection::new_required(), SolcStandardJsonInputSettingsOptimizer::new( solc_optimizer_enabled, - None, - &solc_version.default, - optimizer_settings.is_fallback_to_size_enabled(), + SolcStandardJsonInputSettingsOptimizer::default_mode(), + Default::default(), ), - None, + Default::default(), suppressed_warnings, - Some(SolcStandardJsonInputSettingsPolkaVM::new( + SolcStandardJsonInputSettingsPolkaVM::new( Some(memory_config), debug_config.emit_debug_info, - )), + ), + llvm_arguments, + false, )?; + let mut solc_output = solc.standard_json( + &mut solc_input, + messages, + base_path, + include_paths, + allow_paths, + )?; + solc_output.take_and_write_warnings(); + solc_output.check_errors()?; - let source_code_files = solc_input - .sources - .iter() - .map(|(path, source)| (path.to_owned(), source.content.to_owned())) - .collect(); - - let libraries = solc_input.settings.libraries.clone().unwrap_or_default(); - let solc_output = solc.standard_json(solc_input, base_path, include_paths, allow_paths)?; - - if let Some(errors) = solc_output.errors.as_deref() { - let mut has_errors = false; - - for error in errors.iter() { - if error.severity.as_str() == "error" { - has_errors = true; - } - - writeln!(std::io::stderr(), "{error}")?; - } - - if has_errors { - anyhow::bail!("Error(s) found. Compilation aborted"); - } - } + let linker_symbols = solc_input.settings.libraries.as_linker_symbols()?; let project = Project::try_from_standard_json_output( - &solc_output, - source_code_files, - libraries, + &mut solc_output, + solc_input.settings.libraries, &solc_version, &debug_config, )?; + solc_output.take_and_write_warnings(); + solc_output.check_errors()?; - let build = project.compile( + let mut build = project.compile( + messages, optimizer_settings, - include_metadata_hash, - debug_config, - llvm_arguments, + metadata_hash, + &debug_config, + &solc_input.settings.llvm_arguments, memory_config, )?; + build.take_and_write_warnings(); + build.check_errors()?; + + let mut build = build.link(linker_symbols, &debug_config); + build.take_and_write_warnings(); + build.check_errors()?; Ok(build) } /// Runs the standard JSON mode. -#[allow(clippy::too_many_arguments)] pub fn standard_json( - solc: &mut T, - detect_missing_libraries: bool, + solc: &T, + metadata_hash: MetadataHash, + messages: &mut Vec, + json_path: Option, base_path: Option, include_paths: Vec, allow_paths: Option, - mut debug_config: revive_llvm_context::DebugConfig, - llvm_arguments: &[String], + mut debug_config: DebugConfig, + detect_missing_libraries: bool, ) -> anyhow::Result<()> { let solc_version = solc.version()?; - - let solc_input = SolcStandardJsonInput::try_from_stdin()?; - let source_code_files = solc_input - .sources - .iter() - .map(|(path, source)| (path.to_owned(), source.content.to_owned())) - .collect(); - - let optimizer_settings = - revive_llvm_context::OptimizerSettings::try_from(&solc_input.settings.optimizer)?; - - let polkavm_settings = solc_input.settings.polkavm.unwrap_or_default(); - debug_config.emit_debug_info = polkavm_settings.debug_information.unwrap_or_default(); - - let include_metadata_hash = match solc_input.settings.metadata { - Some(ref metadata) => metadata.bytecode_hash != Some(MetadataHash::None), - None => true, - }; - - let libraries = solc_input.settings.libraries.clone().unwrap_or_default(); - let mut solc_output = solc.standard_json(solc_input, base_path, include_paths, allow_paths)?; - - if let Some(errors) = solc_output.errors.as_deref() { - for error in errors.iter() { - if error.severity.as_str() == "error" { - serde_json::to_writer(std::io::stdout(), &solc_output)?; - std::process::exit(0); - } - } - } - - let project = Project::try_from_standard_json_output( - &solc_output, - source_code_files, - libraries, - &solc_version, - &debug_config, + let mut solc_input = SolcStandardJsonInput::try_from(json_path.as_deref())?; + let language = solc_input.language; + let prune_output = solc_input.settings.selection_to_prune(); + let deployed_libraries = solc_input.settings.libraries.as_paths(); + let linker_symbols = solc_input.settings.libraries.as_linker_symbols()?; + let optimizer_settings = OptimizerSettings::try_from_cli(solc_input.settings.optimizer.mode)?; + let detect_missing_libraries = + solc_input.settings.detect_missing_libraries || detect_missing_libraries; + debug_config.emit_debug_info = solc_input + .settings + .polkavm + .debug_information + .unwrap_or(false); + solc_input.extend_selection(SolcStandardJsonInputSettingsSelection::new_required()); + let mut solc_output = solc.standard_json( + &mut solc_input, + messages, + base_path, + include_paths, + allow_paths, )?; - if detect_missing_libraries { - let missing_libraries = project.get_missing_libraries(); - missing_libraries.write_to_standard_json(&mut solc_output, &solc_version)?; - } else { - let build = project.compile( - optimizer_settings, - include_metadata_hash, - debug_config, - llvm_arguments, - polkavm_settings - .memory_config - .unwrap_or_else(SolcStandardJsonInputSettingsPolkaVMMemory::default), - )?; - build.write_to_standard_json(&mut solc_output, &solc_version)?; + let (mut solc_output, project) = match language { + SolcStandardJsonInputLanguage::Solidity => { + let project = Project::try_from_standard_json_output( + &mut solc_output, + solc_input.settings.libraries, + &solc_version, + &debug_config, + )?; + (solc_output, project) + } + SolcStandardJsonInputLanguage::Yul => { + let mut solc_output = solc.validate_yul_standard_json(&mut solc_input, messages)?; + if solc_output.has_errors() { + solc_output.write_and_exit(prune_output); + } + let project = Project::try_from_yul_sources( + solc_input.sources, + solc_input.settings.libraries, + Some(&mut solc_output), + &debug_config, + )?; + + (solc_output, project) + } + }; + + if solc_output.has_errors() { + solc_output.write_and_exit(prune_output); } - serde_json::to_writer(std::io::stdout(), &solc_output)?; - std::process::exit(0); + + if detect_missing_libraries { + let missing_libraries = project.get_missing_libraries(&deployed_libraries); + missing_libraries.write_to_standard_json(&mut solc_output, &solc_version); + solc_output.write_and_exit(prune_output); + } + + let build = project.compile( + messages, + optimizer_settings, + metadata_hash, + &debug_config, + &solc_input.settings.llvm_arguments, + solc_input + .settings + .polkavm + .memory_config + .unwrap_or_default(), + )?; + if build.has_errors() { + build.write_to_standard_json(&mut solc_output, &solc_version)?; + solc_output.write_and_exit(prune_output); + } + + let build = build.link(linker_symbols, &debug_config); + build.write_to_standard_json(&mut solc_output, &solc_version)?; + solc_output.write_and_exit(prune_output); } /// Runs the combined JSON mode. -#[allow(clippy::too_many_arguments)] pub fn combined_json( + solc: &T, + paths: &[PathBuf], + libraries: &[String], + metadata_hash: MetadataHash, + messages: &mut Vec, + evm_version: Option, format: String, - input_files: &[PathBuf], - libraries: Vec, - solc: &mut T, - evm_version: Option, solc_optimizer_enabled: bool, - optimizer_settings: revive_llvm_context::OptimizerSettings, - include_metadata_hash: bool, + optimizer_settings: OptimizerSettings, base_path: Option, include_paths: Vec, allow_paths: Option, - remappings: Option>, - suppressed_warnings: Option>, - debug_config: revive_llvm_context::DebugConfig, + remappings: BTreeSet, + suppressed_warnings: Vec, + debug_config: DebugConfig, output_directory: Option, overwrite: bool, - llvm_arguments: &[String], + llvm_arguments: Vec, memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, ) -> anyhow::Result<()> { - let build = standard_output( - input_files, - libraries, + let selectors = CombinedJsonSelector::from_cli(format.as_str()) + .into_iter() + .filter_map(|result| match result { + Ok(selector) => Some(selector), + Err(error) => { + messages.push(SolcStandardJsonOutputError::new_error( + error.to_string(), + None, + None, + )); + None + } + }) + .collect::>(); + if !selectors.contains(&CombinedJsonSelector::Bytecode) { + messages.push(SolcStandardJsonOutputError::new_warning( + "Bytecode is always emitted even if the selector is not provided.".to_string(), + None, + None, + )); + } + if selectors.contains(&CombinedJsonSelector::BytecodeRuntime) { + messages.push(SolcStandardJsonOutputError::new_warning( + format!("The `{}` selector does not make sense for the PVM target, since there is only one bytecode segment.", CombinedJsonSelector::BytecodeRuntime), + None, + None, + )); + } + + let mut combined_json = solc.combined_json(paths, selectors)?; + standard_output( solc, + paths, + libraries, + metadata_hash, + messages, evm_version, solc_optimizer_enabled, optimizer_settings, - include_metadata_hash, base_path, include_paths, allow_paths, @@ -314,24 +344,57 @@ pub fn combined_json( debug_config, llvm_arguments, memory_config, - )?; - - let mut combined_json = solc.combined_json(input_files, format.as_str())?; - build.write_to_combined_json(&mut combined_json)?; + )? + .write_to_combined_json(&mut combined_json)?; match output_directory { Some(output_directory) => { std::fs::create_dir_all(output_directory.as_path())?; - combined_json.write_to_directory(output_directory.as_path(), overwrite)?; - } - None => { + writeln!( - std::io::stdout(), - "{}", - serde_json::to_string(&combined_json).expect("Always valid") + std::io::stderr(), + "Compiler run successful. Artifact(s) can be found in directory {output_directory:?}." )?; } + None => { + serde_json::to_writer(std::io::stdout(), &combined_json)?; + } } - std::process::exit(0); + std::process::exit(EXIT_CODE_SUCCESS); +} + +/// Links unlinked bytecode files. +pub fn link(paths: Vec, libraries: Vec) -> anyhow::Result<()> { + #[cfg(feature = "parallel")] + let iter = paths.into_par_iter(); + #[cfg(not(feature = "parallel"))] + let iter = paths.into_iter(); + + let bytecodes = iter + .map(|path| { + let bytecode = std::fs::read(path.as_str())?; + Ok((path, bytecode)) + }) + .collect::>>>()?; + + let output = Output::try_from(&bytecodes, &libraries)?; + + #[cfg(feature = "parallel")] + let iter = output.linked.into_par_iter(); + #[cfg(not(feature = "parallel"))] + let iter = output.linked.into_iter(); + + iter.map(|(path, bytecode)| { + std::fs::write(path, bytecode)?; + Ok(()) + }) + .collect::>()?; + + for (path, _) in output.unlinked { + println!("Warning: file '{path}' still unresolved"); + } + println!("Linking completed"); + + std::process::exit(EXIT_CODE_SUCCESS); } diff --git a/crates/resolc/src/linker.rs b/crates/resolc/src/linker.rs new file mode 100644 index 0000000..0cd847a --- /dev/null +++ b/crates/resolc/src/linker.rs @@ -0,0 +1,96 @@ +//! The Solidity to PolkaVM compiler deploy time linking library. +//! +//! # Deploy time linking +//! +//! At compile time, factory dependencies and library addresses +//! are declared but not necessarily defined. +//! +//! `resolc` will emit raw ELF objects for any contract requiring +//! deploy time linking using the `--link` flag. +//! +//! # Internals +//! +//! After all contracts have been built successfully, the compiler +//! tries to link the resulting raw ELF object files into PVM blobs. +//! This fails if any library address symbols are unknown at compile +//! time (which is better known in Solidity as the so called "deploy +//! time linking" feature). Since factory dependency symbols can be +//! resolved only after the the final PVM blob linking step, missing +//! libraries may further lead to unresolved factory dependencies. + +use std::collections::BTreeMap; + +use revive_common::{ObjectFormat, EXTENSION_POLKAVM_BINARY}; +use revive_llvm_context::{polkavm_hash, polkavm_link}; +use revive_solc_json_interface::SolcStandardJsonInputSettingsLibraries; + +/// The Solidity to PolkaVM compiler deploy time linking outputs. +pub struct Output { + /// The linked objects. + pub linked: BTreeMap>, + /// The unlinked objects. + pub unlinked: Vec<(String, Vec)>, +} + +impl Output { + /// Try linking given `libraries` into given `bytecodes`. + /// + /// Bytecodes failing to fully resolve end up in [Output::unlinked]. + pub fn try_from( + bytecodes: &BTreeMap>, + libraries: &[String], + ) -> anyhow::Result { + let linker_symbols = + SolcStandardJsonInputSettingsLibraries::try_from(libraries)?.as_linker_symbols()?; + + let mut linked = BTreeMap::default(); + let mut unlinked = Vec::default(); + let mut factory_dependencies = BTreeMap::default(); + + for (path, bytecode) in bytecodes { + match ObjectFormat::try_from(bytecode.as_slice()) { + Ok(ObjectFormat::ELF) => unlinked.push((path.clone(), bytecode.clone())), + Ok(ObjectFormat::PVM) => { + factory_dependencies + .insert(factory_dependency_symbol(path), polkavm_hash(bytecode)); + } + Err(error) => anyhow::bail!("{path}: {error}"), + } + } + + loop { + let mut linked_counter = 0; + let mut remaining_objects = Vec::new(); + for (path, bytecode_buffer) in unlinked.drain(..) { + let (linked_bytecode, object_format) = polkavm_link( + &bytecode_buffer, + &linker_symbols, + &factory_dependencies, + true, + )?; + match object_format { + ObjectFormat::ELF => remaining_objects.push((path, linked_bytecode)), + ObjectFormat::PVM => { + factory_dependencies.insert( + factory_dependency_symbol(&path), + polkavm_hash(&linked_bytecode), + ); + linked.insert(path, linked_bytecode); + linked_counter += 1; + } + } + } + unlinked = remaining_objects; + if linked_counter == 0 { + break; + } + } + + Ok(Self { linked, unlinked }) + } +} + +fn factory_dependency_symbol(path: &str) -> String { + path.trim_end_matches(&format!(".{EXTENSION_POLKAVM_BINARY}")) + .to_string() +} diff --git a/crates/resolc/src/missing_libraries.rs b/crates/resolc/src/missing_libraries.rs index dc5b990..4fab579 100644 --- a/crates/resolc/src/missing_libraries.rs +++ b/crates/resolc/src/missing_libraries.rs @@ -1,22 +1,21 @@ //! The missing Solidity libraries. use std::collections::BTreeMap; -use std::collections::HashSet; +use std::collections::BTreeSet; use revive_solc_json_interface::SolcStandardJsonOutput; use crate::solc::version::Version as SolcVersion; -use crate::ResolcVersion; /// The missing Solidity libraries. pub struct MissingLibraries { /// The missing libraries. - pub contract_libraries: BTreeMap>, + pub contract_libraries: BTreeMap>, } impl MissingLibraries { /// A shortcut constructor. - pub fn new(contract_libraries: BTreeMap>) -> Self { + pub fn new(contract_libraries: BTreeMap>) -> Self { Self { contract_libraries } } @@ -25,27 +24,19 @@ impl MissingLibraries { mut self, standard_json: &mut SolcStandardJsonOutput, solc_version: &SolcVersion, - ) -> anyhow::Result<()> { - let contracts = match standard_json.contracts.as_mut() { - Some(contracts) => contracts, - None => return Ok(()), - }; - - for (path, contracts) in contracts.iter_mut() { - for (name, contract) in contracts.iter_mut() { + ) { + for (path, file) in standard_json.contracts.iter_mut() { + for (name, contract) in file.iter_mut() { let full_name = format!("{path}:{name}"); let missing_libraries = self.contract_libraries.remove(full_name.as_str()); if let Some(missing_libraries) = missing_libraries { - contract.missing_libraries = Some(missing_libraries); + contract.missing_libraries = missing_libraries; } } } standard_json.version = Some(solc_version.default.to_string()); standard_json.long_version = Some(solc_version.long.to_owned()); - standard_json.revive_version = Some(ResolcVersion::default().long); - - Ok(()) } } diff --git a/crates/resolc/src/process/input.rs b/crates/resolc/src/process/input.rs index fc91296..9a2b78a 100644 --- a/crates/resolc/src/process/input.rs +++ b/crates/resolc/src/process/input.rs @@ -1,51 +1,69 @@ //! Process for compiling a single compilation unit. //! The input data. +use std::collections::BTreeMap; +use std::collections::BTreeSet; + +use revive_common::MetadataHash; +use revive_llvm_context::DebugConfig; +use revive_llvm_context::OptimizerSettings; use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory; use serde::Deserialize; use serde::Serialize; use crate::project::contract::Contract; -use crate::project::Project; +use crate::SolcVersion; /// The input data. #[derive(Debug, Serialize, Deserialize)] pub struct Input { /// The contract representation. pub contract: Contract, - /// The project representation. - pub project: Project, + /// The `solc` compiler version. + pub solc_version: Option, /// Whether to append the metadata hash. - pub include_metadata_hash: bool, + pub metadata_hash: MetadataHash, /// The optimizer settings. - pub optimizer_settings: revive_llvm_context::OptimizerSettings, + pub optimizer_settings: OptimizerSettings, /// The debug output config. - pub debug_config: revive_llvm_context::DebugConfig, + pub debug_config: DebugConfig, /// The extra LLVM arguments give used for manual control. pub llvm_arguments: Vec, /// The PVM memory configuration. pub memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, + /// Missing unlinked libraries. + pub missing_libraries: BTreeSet, + /// Factory dependencies. + pub factory_dependencies: BTreeSet, + /// The mapping of auxiliary identifiers, e.g. Yul object names, to full contract paths. + pub identifier_paths: BTreeMap, } impl Input { /// A shortcut constructor. pub fn new( contract: Contract, - project: Project, - include_metadata_hash: bool, - optimizer_settings: revive_llvm_context::OptimizerSettings, - debug_config: revive_llvm_context::DebugConfig, + solc_version: Option, + metadata_hash: MetadataHash, + optimizer_settings: OptimizerSettings, + debug_config: DebugConfig, llvm_arguments: Vec, memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, + missing_libraries: BTreeSet, + factory_dependencies: BTreeSet, + identifier_paths: BTreeMap, ) -> Self { Self { contract, - project, - include_metadata_hash, + solc_version, + metadata_hash, optimizer_settings, debug_config, llvm_arguments, memory_config, + missing_libraries, + factory_dependencies, + identifier_paths, } } } diff --git a/crates/resolc/src/process/mod.rs b/crates/resolc/src/process/mod.rs index a98f822..cb85487 100644 --- a/crates/resolc/src/process/mod.rs +++ b/crates/resolc/src/process/mod.rs @@ -1,5 +1,12 @@ //! Process for compiling a single compilation unit. +use revive_solc_json_interface::SolcStandardJsonOutputError; +use serde::de::DeserializeOwned; +use serde::Serialize; + +use self::input::Input; +use self::output::Output; + pub mod input; #[cfg(not(target_os = "emscripten"))] pub mod native_process; @@ -7,71 +14,13 @@ pub mod output; #[cfg(target_os = "emscripten")] pub mod worker_process; -use std::io::{Read, Write}; - -use self::input::Input; -use self::output::Output; - pub trait Process { /// Read input from `stdin`, compile a contract, and write the output to `stdout`. - fn run(input_file: Option<&mut std::fs::File>) -> anyhow::Result<()> { - let mut stdin = std::io::stdin(); - let mut stdout = std::io::stdout(); - let mut stderr = std::io::stderr(); - - let mut buffer = Vec::with_capacity(16384); - match input_file { - Some(ins) => { - if let Err(error) = ins.read_to_end(&mut buffer) { - anyhow::bail!("Failed to read recursive process input file: {:?}", error); - } - } - None => { - if let Err(error) = stdin.read_to_end(&mut buffer) { - anyhow::bail!( - "Failed to read recursive process input from stdin: {:?}", - error - ) - } - } - } - - let input: Input = revive_common::deserialize_from_slice(buffer.as_slice())?; - - revive_llvm_context::initialize_llvm( - revive_llvm_context::Target::PVM, - crate::DEFAULT_EXECUTABLE_NAME, - &input.llvm_arguments, - ); - - let result = input.contract.compile( - input.project, - input.optimizer_settings, - input.include_metadata_hash, - input.debug_config, - &input.llvm_arguments, - input.memory_config, - ); - - match result { - Ok(build) => { - let output = Output::new(build); - let json = serde_json::to_vec(&output).expect("Always valid"); - stdout - .write_all(json.as_slice()) - .expect("Stdout writing error"); - Ok(()) - } - Err(error) => { - let message = error.to_string(); - stderr - .write_all(message.as_bytes()) - .expect("Stderr writing error"); - Err(error) - } - } - } + fn run(input: Input) -> anyhow::Result<()>; /// Runs this process recursively to compile a single contract. - fn call(input: Input) -> anyhow::Result; + fn call( + path: &str, + input: I, + ) -> Result; } diff --git a/crates/resolc/src/process/native_process.rs b/crates/resolc/src/process/native_process.rs index e0ff374..d99e064 100644 --- a/crates/resolc/src/process/native_process.rs +++ b/crates/resolc/src/process/native_process.rs @@ -5,6 +5,12 @@ use std::path::PathBuf; use std::process::Command; use once_cell::sync::OnceCell; +use revive_common::deserialize_from_slice; +use revive_common::EXIT_CODE_SUCCESS; +use revive_solc_json_interface::standard_json::output::error::source_location::SourceLocation; +use revive_solc_json_interface::SolcStandardJsonOutputError; +use serde::de::DeserializeOwned; +use serde::Serialize; use super::Input; use super::Output; @@ -16,61 +22,95 @@ pub static EXECUTABLE: OnceCell = OnceCell::new(); pub struct NativeProcess; impl Process for NativeProcess { - fn call(input: Input) -> anyhow::Result { - let input_json = serde_json::to_vec(&input).expect("Always valid"); + fn run(input: Input) -> anyhow::Result<()> { + let source_location = SourceLocation::new(input.contract.identifier.path.to_owned()); - let executable = match EXECUTABLE.get() { - Some(executable) => executable.to_owned(), - None => std::env::current_exe()?, - }; + let result = std::thread::Builder::new() + .stack_size(crate::RAYON_WORKER_STACK_SIZE) + .spawn(move || { + input + .contract + .compile( + input.solc_version, + input.optimizer_settings, + input.metadata_hash, + input.debug_config, + &input.llvm_arguments, + input.memory_config, + input.missing_libraries, + input.factory_dependencies, + input.identifier_paths, + ) + .map(Output::new) + .map_err(|error| { + SolcStandardJsonOutputError::new_error(error, Some(source_location), None) + }) + }) + .expect("Threading error") + .join() + .expect("Threading error"); + serde_json::to_writer(std::io::stdout(), &result) + .map_err(|error| anyhow::anyhow!("Stdout writing error: {error}"))?; + + Ok(()) + } + + fn call(path: &str, input: I) -> Result + where + I: Serialize, + O: DeserializeOwned, + { + let executable = EXECUTABLE + .get() + .cloned() + .unwrap_or_else(|| std::env::current_exe().expect("Should have an executable")); let mut command = Command::new(executable.as_path()); command.stdin(std::process::Stdio::piped()); command.stdout(std::process::Stdio::piped()); command.stderr(std::process::Stdio::piped()); command.arg("--recursive-process"); - let process = command.spawn().map_err(|error| { - anyhow::anyhow!("{:?} subprocess spawning error: {:?}", executable, error) - })?; + command.arg(path); - #[cfg(debug_assertions)] - input - .debug_config - .dump_stage_output(&input.contract.path, Some("stage"), &input_json) - .map_err(|error| { - anyhow::anyhow!( - "{:?} failed to log the recursive process output: {:?}", - executable, - error, - ) - })?; - - process + let mut process = command + .spawn() + .unwrap_or_else(|error| panic!("{executable:?} subprocess spawning: {error:?}")); + let stdin = process .stdin - .as_ref() - .ok_or_else(|| anyhow::anyhow!("{:?} stdin getting error", executable))? - .write_all(input_json.as_slice()) - .map_err(|error| { - anyhow::anyhow!("{:?} stdin writing error: {:?}", executable, error) - })?; - let output = process.wait_with_output().map_err(|error| { - anyhow::anyhow!("{:?} subprocess output error: {:?}", executable, error) - })?; - if !output.status.success() { - anyhow::bail!( - "{}", - String::from_utf8_lossy(output.stderr.as_slice()).to_string(), + .as_mut() + .unwrap_or_else(|| panic!("{executable:?} subprocess stdin getting error")); + let stdin_input = serde_json::to_vec(&input).expect("Always valid"); + stdin + .write_all(stdin_input.as_slice()) + .unwrap_or_else(|error| panic!("{executable:?} subprocess stdin writing: {error:?}")); + + let result = process + .wait_with_output() + .unwrap_or_else(|error| panic!("{executable:?} subprocess output reading: {error:?}")); + + if result.status.code() != Some(EXIT_CODE_SUCCESS) { + let message = format!( + "{executable:?} subprocess failed with exit code {:?}:\n{}\n{}", + result.status.code(), + String::from_utf8_lossy(result.stdout.as_slice()), + String::from_utf8_lossy(result.stderr.as_slice()), ); + return Err(SolcStandardJsonOutputError::new_error( + message, + Some(SourceLocation::new(path.to_owned())), + None, + )); } - let output: Output = revive_common::deserialize_from_slice(output.stdout.as_slice()) - .map_err(|error| { - anyhow::anyhow!( - "{:?} subprocess output parsing error: {}", - executable, - error, - ) - })?; - Ok(output) + match deserialize_from_slice(result.stdout.as_slice()) { + Ok(output) => output, + Err(error) => { + panic!( + "{executable:?} subprocess stdout parsing error: {error:?}\n{}\n{}", + String::from_utf8_lossy(result.stdout.as_slice()), + String::from_utf8_lossy(result.stderr.as_slice()), + ); + } + } } } diff --git a/crates/resolc/src/process/worker_process.rs b/crates/resolc/src/process/worker_process.rs index 00a332b..faca552 100644 --- a/crates/resolc/src/process/worker_process.rs +++ b/crates/resolc/src/process/worker_process.rs @@ -2,13 +2,18 @@ use std::ffi::{c_char, c_void, CStr, CString}; +use serde::de::DeserializeOwned; +use serde::Deserialize; +use serde::Serialize; + +use revive_common::deserialize_from_slice; +use revive_solc_json_interface::standard_json::output::error::source_location::SourceLocation; +use revive_solc_json_interface::SolcStandardJsonOutputError; + use super::Input; use super::Output; use super::Process; -use anyhow::Context; -use serde::Deserialize; - #[derive(Deserialize)] struct Error { message: String, @@ -29,10 +34,40 @@ enum Response { pub struct WorkerProcess; impl Process for WorkerProcess { - fn call(input: Input) -> anyhow::Result { + fn run(input: Input) -> anyhow::Result<()> { + let source_location = SourceLocation::new(input.contract.identifier.path.to_owned()); + + let result = input + .contract + .compile( + None, + input.optimizer_settings, + input.metadata_hash, + input.debug_config, + &input.llvm_arguments, + input.memory_config, + input.missing_libraries, + input.factory_dependencies, + input.identifier_paths, + ) + .map(Output::new) + .map_err(|error| { + SolcStandardJsonOutputError::new_error(error, Some(source_location), None) + }); + + serde_json::to_writer(std::io::stdout(), &result) + .map_err(|error| anyhow::anyhow!("Stdout writing error: {error}"))?; + + Ok(()) + } + + fn call(_path: &str, input: I) -> Result + where + I: Serialize, + O: DeserializeOwned, + { let input_json = serde_json::to_vec(&input).expect("Always valid"); let input_str = String::from_utf8(input_json).expect("Input shall be valid"); - // Prepare the input string for the Emscripten function let input_cstring = CString::new(input_str).expect("CString allocation failed"); // Call the Emscripten function @@ -40,26 +75,20 @@ impl Process for WorkerProcess { unsafe { resolc_compile(input_cstring.as_ptr(), input_cstring.as_bytes().len()) }; // Convert the output pointer back to a Rust string - let output_str = unsafe { - CStr::from_ptr(output_ptr) - .to_str() - .with_context(|| "Failed to convert C string to Rust string") - .map(str::to_owned) - }; + let output_str = unsafe { CStr::from_ptr(output_ptr).to_str().map(str::to_owned) }; unsafe { libc::free(output_ptr as *mut c_void) }; - let output_str = output_str?; - let response: Response = serde_json::from_str(&output_str) - .map_err(|error| anyhow::anyhow!("Worker output parsing error: {}", error,))?; - match response { - Response::Success(out) => { - let output: Output = revive_common::deserialize_from_slice(out.data.as_bytes()) - .map_err(|error| { - anyhow::anyhow!("resolc.js subprocess output parsing error: {}", error,) - })?; - Ok(output) - } - Response::Error(err) => anyhow::bail!("Worker error: {}", err.message,), + let output_str = output_str.unwrap_or_else(|error| panic!("resolc.js output: {error:?}")); + let response = serde_json::from_str(&output_str) + .unwrap_or_else(|error| panic!("Worker output parsing error: {error}")); + match response { + Response::Success(out) => match deserialize_from_slice(out.data.as_bytes()) { + Ok(output) => output, + Err(error) => { + panic!("resolc.js subprocess output parsing error: {error}") + } + }, + Response::Error(err) => panic!("Worker error: {}", err.message), } } } diff --git a/crates/resolc/src/project/contract/ir/llvm_ir.rs b/crates/resolc/src/project/contract/ir/llvm_ir.rs deleted file mode 100644 index 0549213..0000000 --- a/crates/resolc/src/project/contract/ir/llvm_ir.rs +++ /dev/null @@ -1,21 +0,0 @@ -//! The contract LLVM IR source code. - -use serde::Deserialize; -use serde::Serialize; - -/// The contract LLVM IR source code. -#[derive(Debug, Serialize, Deserialize, Clone)] -#[allow(clippy::upper_case_acronyms)] -pub struct LLVMIR { - /// The LLVM IR file path. - pub path: String, - /// The LLVM IR source code. - pub source: String, -} - -impl LLVMIR { - /// A shortcut constructor. - pub fn new(path: String, source: String) -> Self { - Self { path, source } - } -} diff --git a/crates/resolc/src/project/contract/ir/mod.rs b/crates/resolc/src/project/contract/ir/mod.rs index 0084823..7c358f1 100644 --- a/crates/resolc/src/project/contract/ir/mod.rs +++ b/crates/resolc/src/project/contract/ir/mod.rs @@ -1,66 +1,40 @@ //! The contract source code. -pub mod llvm_ir; -pub mod yul; - -use std::collections::HashSet; +use std::collections::BTreeSet; use serde::Deserialize; use serde::Serialize; -use revive_yul::parser::statement::object::Object; - -use self::llvm_ir::LLVMIR; use self::yul::Yul; +pub mod yul; + /// The contract source code. #[derive(Debug, Serialize, Deserialize, Clone)] #[allow(clippy::upper_case_acronyms)] pub enum IR { /// The Yul source code. Yul(Yul), - /// The LLVM IR source code. - LLVMIR(LLVMIR), } impl IR { - /// A shortcut constructor. - pub fn new_yul(source_code: String, object: Object) -> Self { - Self::Yul(Yul::new(source_code, object)) - } - - /// A shortcut constructor. - pub fn new_llvm_ir(path: String, source: String) -> Self { - Self::LLVMIR(LLVMIR::new(path, source)) + /// Drains the list of factory dependencies. + pub fn drain_factory_dependencies(&mut self) -> BTreeSet { + match self { + IR::Yul(ref mut yul) => yul.object.factory_dependencies.drain().collect(), + } } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { match self { Self::Yul(inner) => inner.get_missing_libraries(), - Self::LLVMIR(_inner) => HashSet::new(), } } } -impl revive_llvm_context::PolkaVMWriteLLVM for IR -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn declare( - &mut self, - context: &mut revive_llvm_context::PolkaVMContext, - ) -> anyhow::Result<()> { - match self { - Self::Yul(inner) => inner.declare(context), - Self::LLVMIR(_inner) => Ok(()), - } - } - - fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { - match self { - Self::Yul(inner) => inner.into_llvm(context), - Self::LLVMIR(_inner) => Ok(()), - } +impl From for IR { + fn from(inner: Yul) -> Self { + Self::Yul(inner) } } diff --git a/crates/resolc/src/project/contract/ir/yul.rs b/crates/resolc/src/project/contract/ir/yul.rs index cc879a0..b1b0af6 100644 --- a/crates/resolc/src/project/contract/ir/yul.rs +++ b/crates/resolc/src/project/contract/ir/yul.rs @@ -1,48 +1,46 @@ //! The contract Yul source code. -use std::collections::HashSet; +use std::collections::BTreeSet; +use revive_yul::lexer::Lexer; use serde::Deserialize; use serde::Serialize; use revive_yul::parser::statement::object::Object; -/// The contract Yul source code. +/// he contract Yul source code. #[derive(Debug, Serialize, Deserialize, Clone)] pub struct Yul { - /// The Yul source code. - pub source_code: String, /// The Yul AST object. pub object: Object, } impl Yul { - /// A shortcut constructor. - pub fn new(source_code: String, object: Object) -> Self { - Self { - source_code, - object, - } + /// Transforms the `solc` standard JSON output contract into a Yul object. + pub fn try_from_source(source_code: &str) -> anyhow::Result> { + if source_code.is_empty() { + return Ok(None); + }; + + let mut lexer = Lexer::new(source_code.to_owned()); + let object = Object::parse(&mut lexer, None) + .map_err(|error| anyhow::anyhow!("Yul parsing: {error:?}"))?; + + Ok(Some(Self { object })) } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { self.object.get_missing_libraries() } } -impl revive_llvm_context::PolkaVMWriteLLVM for Yul -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn declare( - &mut self, - context: &mut revive_llvm_context::PolkaVMContext, - ) -> anyhow::Result<()> { +impl revive_llvm_context::PolkaVMWriteLLVM for Yul { + fn declare(&mut self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { self.object.declare(context) } - fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { + fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { self.object.into_llvm(context) } } diff --git a/crates/resolc/src/project/contract/metadata.rs b/crates/resolc/src/project/contract/metadata.rs index 41b0b81..745f15c 100644 --- a/crates/resolc/src/project/contract/metadata.rs +++ b/crates/resolc/src/project/contract/metadata.rs @@ -1,5 +1,6 @@ //! The Solidity contract metadata. +use revive_llvm_context::OptimizerSettings; use serde::Serialize; use crate::ResolcVersion; @@ -11,13 +12,11 @@ pub struct Metadata { /// The `solc` metadata. pub solc_metadata: serde_json::Value, /// The `solc` version. - pub solc_version: String, + pub solc_version: Option, /// The pallet revive edition. - pub revive_pallet_version: Option, - /// The PolkaVM compiler version. pub revive_version: String, /// The PolkaVM compiler optimizer settings. - pub optimizer_settings: revive_llvm_context::OptimizerSettings, + pub optimizer_settings: OptimizerSettings, /// The extra LLVM arguments give used for manual control. pub llvm_arguments: Vec, } @@ -26,15 +25,13 @@ impl Metadata { /// A shortcut constructor. pub fn new( solc_metadata: serde_json::Value, - solc_version: String, - revive_pallet_version: Option, - optimizer_settings: revive_llvm_context::OptimizerSettings, + solc_version: Option, + optimizer_settings: OptimizerSettings, llvm_arguments: Vec, ) -> Self { Self { solc_metadata, solc_version, - revive_pallet_version, revive_version: ResolcVersion::default().long, optimizer_settings, llvm_arguments, diff --git a/crates/resolc/src/project/contract/mod.rs b/crates/resolc/src/project/contract/mod.rs index c179afc..e428daf 100644 --- a/crates/resolc/src/project/contract/mod.rs +++ b/crates/resolc/src/project/contract/mod.rs @@ -1,29 +1,38 @@ //! The contract data. -pub mod ir; -pub mod metadata; - -use std::collections::HashSet; +use std::collections::BTreeMap; +use std::collections::BTreeSet; +use revive_common::ContractIdentifier; +use revive_common::Keccak256; +use revive_common::MetadataHash; +use revive_common::ObjectFormat; +use revive_llvm_context::DebugConfig; +use revive_llvm_context::Optimizer; +use revive_llvm_context::OptimizerSettings; +use revive_llvm_context::PolkaVMContext; +use revive_llvm_context::PolkaVMContextSolidityData; +use revive_llvm_context::PolkaVMContextYulData; use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory; use serde::Deserialize; use serde::Serialize; -use sha3::Digest; use revive_llvm_context::PolkaVMWriteLLVM; use crate::build::contract::Contract as ContractBuild; -use crate::project::Project; use crate::solc::version::Version as SolcVersion; use self::ir::IR; use self::metadata::Metadata; +pub mod ir; +pub mod metadata; + /// The contract data. #[derive(Debug, Serialize, Deserialize, Clone)] pub struct Contract { /// The absolute file path. - pub path: String, + pub identifier: ContractIdentifier, /// The IR source code data. pub ir: IR, /// The metadata JSON. @@ -32,22 +41,9 @@ pub struct Contract { impl Contract { /// A shortcut constructor. - pub fn new( - path: String, - source_hash: [u8; revive_common::BYTE_LENGTH_WORD], - source_version: SolcVersion, - ir: IR, - metadata_json: Option, - ) -> Self { - let metadata_json = metadata_json.unwrap_or_else(|| { - serde_json::json!({ - "source_hash": hex::encode(source_hash.as_slice()), - "source_version": serde_json::to_value(&source_version).expect("Always valid"), - }) - }); - + pub fn new(identifier: ContractIdentifier, ir: IR, metadata_json: serde_json::Value) -> Self { Self { - path, + identifier, ir, metadata_json, } @@ -56,136 +52,77 @@ impl Contract { /// Returns the contract identifier, which is: /// - the Yul object identifier for Yul /// - the module name for LLVM IR - pub fn identifier(&self) -> &str { + pub fn object_identifier(&self) -> &str { match self.ir { IR::Yul(ref yul) => yul.object.identifier.as_str(), - IR::LLVMIR(ref llvm_ir) => llvm_ir.path.as_str(), - } - } - - /// Extract factory dependencies. - pub fn drain_factory_dependencies(&mut self) -> HashSet { - match self.ir { - IR::Yul(ref mut yul) => yul.object.factory_dependencies.drain().collect(), - IR::LLVMIR(_) => HashSet::new(), } } /// Compiles the specified contract, setting its build artifacts. pub fn compile( - mut self, - project: Project, - optimizer_settings: revive_llvm_context::OptimizerSettings, - include_metadata_hash: bool, - mut debug_config: revive_llvm_context::DebugConfig, + self, + solc_version: Option, + optimizer_settings: OptimizerSettings, + metadata_hash: MetadataHash, + mut debug_config: DebugConfig, llvm_arguments: &[String], memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, + missing_libraries: BTreeSet, + factory_dependencies: BTreeSet, + identifier_paths: BTreeMap, ) -> anyhow::Result { let llvm = inkwell::context::Context::create(); - let optimizer = revive_llvm_context::Optimizer::new(optimizer_settings); - - let version = project.version.clone(); - let identifier = self.identifier().to_owned(); - + let optimizer = Optimizer::new(optimizer_settings); let metadata = Metadata::new( - self.metadata_json.take(), - version.long.clone(), - version.l2_revision.clone(), + self.metadata_json, + solc_version + .as_ref() + .map(|version| version.default.to_owned()), optimizer.settings().to_owned(), - llvm_arguments.to_vec(), + llvm_arguments.to_owned(), ); let metadata_json = serde_json::to_value(&metadata).expect("Always valid"); - let metadata_hash: Option<[u8; revive_common::BYTE_LENGTH_WORD]> = if include_metadata_hash - { - let metadata_string = serde_json::to_string(&metadata).expect("Always valid"); - Some(sha3::Keccak256::digest(metadata_string.as_bytes()).into()) - } else { - None + let metadata_json_bytes = serde_json::to_vec(&metadata_json).expect("Always valid"); + let metadata_bytes = match metadata_hash { + MetadataHash::Keccak256 => Keccak256::from_slice(&metadata_json_bytes).into(), + MetadataHash::IPFS => todo!("IPFS hash isn't supported yet"), + MetadataHash::None => None, }; + debug_config.set_contract_path(&self.identifier.full_path); - let module = match self.ir { - IR::LLVMIR(ref llvm_ir) => { - // Create the output module - let memory_buffer = - inkwell::memory_buffer::MemoryBuffer::create_from_memory_range_copy( - llvm_ir.source.as_bytes(), - self.path.as_str(), - ); - llvm.create_module_from_ir(memory_buffer) - .map_err(|error| anyhow::anyhow!(error.to_string()))? + let build = match self.ir { + IR::Yul(mut yul) => { + let module = llvm.create_module(self.identifier.full_path.as_str()); + let mut context = + PolkaVMContext::new(&llvm, module, optimizer, debug_config, memory_config); + context.set_solidity_data(PolkaVMContextSolidityData::default()); + let yul_data = PolkaVMContextYulData::new(identifier_paths); + context.set_yul_data(yul_data); + + yul.declare(&mut context)?; + yul.into_llvm(&mut context) + .map_err(|error| anyhow::anyhow!("LLVM IR generator: {error}"))?; + + context.build(self.identifier.full_path.as_str(), metadata_bytes)? } - _ => llvm.create_module(self.path.as_str()), }; - debug_config.set_contract_path(&self.path); - let mut context = revive_llvm_context::PolkaVMContext::new( - &llvm, - module, - optimizer, - Some(project), - include_metadata_hash, - debug_config, - llvm_arguments, - memory_config, - ); - context.set_solidity_data(revive_llvm_context::PolkaVMContextSolidityData::default()); - match self.ir { - IR::Yul(_) => { - context.set_yul_data(Default::default()); - } - IR::LLVMIR(_) => {} - } - - let factory_dependencies = self.drain_factory_dependencies(); - - self.ir.declare(&mut context).map_err(|error| { - anyhow::anyhow!( - "The contract `{}` LLVM IR generator declaration pass error: {}", - self.path, - error - ) - })?; - self.ir.into_llvm(&mut context).map_err(|error| { - anyhow::anyhow!( - "The contract `{}` LLVM IR generator definition pass error: {}", - self.path, - error - ) - })?; - - if let Some(debug_info) = context.debug_info() { - debug_info.finalize_module() - } - - let build = context.build(self.path.as_str(), metadata_hash)?; - Ok(ContractBuild::new( - self.path, - identifier, + self.identifier, build, metadata_json, + missing_libraries, factory_dependencies, + ObjectFormat::ELF, )) } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { - self.ir.get_missing_libraries() - } -} - -impl PolkaVMWriteLLVM for Contract -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn declare( - &mut self, - context: &mut revive_llvm_context::PolkaVMContext, - ) -> anyhow::Result<()> { - self.ir.declare(context) - } - - fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { - self.ir.into_llvm(context) + pub fn get_missing_libraries(&self, deployed_libraries: &BTreeSet) -> BTreeSet { + self.ir + .get_missing_libraries() + .into_iter() + .filter(|library| !deployed_libraries.contains(library)) + .collect::>() } } diff --git a/crates/resolc/src/project/mod.rs b/crates/resolc/src/project/mod.rs index d639e7a..4a914b6 100644 --- a/crates/resolc/src/project/mod.rs +++ b/crates/resolc/src/project/mod.rs @@ -3,54 +3,59 @@ pub mod contract; use std::collections::BTreeMap; -use std::collections::HashMap; -use std::collections::HashSet; -use std::path::Path; +use std::collections::BTreeSet; +use std::path::PathBuf; #[cfg(feature = "parallel")] use rayon::iter::{IntoParallelIterator, ParallelIterator}; +use revive_common::Keccak256; +use revive_common::MetadataHash; +use revive_llvm_context::DebugConfig; +use revive_llvm_context::OptimizerSettings; +use revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory; +use revive_solc_json_interface::SolcStandardJsonInputSource; +use revive_solc_json_interface::SolcStandardJsonOutputError; use serde::Deserialize; use serde::Serialize; -use sha3::Digest; +use revive_common::ContractIdentifier; +use revive_solc_json_interface::SolcStandardJsonInputSettingsLibraries; use revive_solc_json_interface::SolcStandardJsonOutput; -use revive_yul::lexer::Lexer; -use revive_yul::parser::statement::object::Object; use crate::build::contract::Contract as ContractBuild; use crate::build::Build; use crate::missing_libraries::MissingLibraries; use crate::process::input::Input as ProcessInput; use crate::process::Process; +use crate::project::contract::ir::yul::Yul; use crate::project::contract::ir::IR; +use crate::project::contract::Contract; use crate::solc::version::Version as SolcVersion; -use crate::solc::Compiler; - -use self::contract::Contract; +use crate::ProcessOutput; /// The processes input data. #[derive(Debug, Serialize, Deserialize, Clone)] pub struct Project { /// The source code version. - pub version: SolcVersion, + pub version: Option, /// The project contracts, pub contracts: BTreeMap, /// The mapping of auxiliary identifiers, e.g. Yul object names, to full contract paths. pub identifier_paths: BTreeMap, /// The library addresses. - pub libraries: BTreeMap>, + pub libraries: SolcStandardJsonInputSettingsLibraries, } impl Project { /// A shortcut constructor. pub fn new( - version: SolcVersion, + version: Option, contracts: BTreeMap, - libraries: BTreeMap>, + libraries: SolcStandardJsonInputSettingsLibraries, ) -> Self { let mut identifier_paths = BTreeMap::new(); for (path, contract) in contracts.iter() { - identifier_paths.insert(contract.identifier().to_owned(), path.to_owned()); + identifier_paths.insert(contract.object_identifier().to_owned(), path.to_owned()); } Self { @@ -64,319 +69,195 @@ impl Project { /// Compiles all contracts, returning their build artifacts. pub fn compile( self, - optimizer_settings: revive_llvm_context::OptimizerSettings, - include_metadata_hash: bool, - debug_config: revive_llvm_context::DebugConfig, + messages: &mut Vec, + optimizer_settings: OptimizerSettings, + metadata_hash: MetadataHash, + debug_config: &DebugConfig, llvm_arguments: &[String], - memory_config: revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory, + memory_config: SolcStandardJsonInputSettingsPolkaVMMemory, ) -> anyhow::Result { - let project = self.clone(); + let deployed_libraries = self.libraries.as_paths(); + #[cfg(feature = "parallel")] let iter = self.contracts.into_par_iter(); #[cfg(not(feature = "parallel"))] let iter = self.contracts.into_iter(); - let results: BTreeMap> = iter - .map(|(full_path, contract)| { - let process_input = ProcessInput::new( + let results = iter + .map(|(path, mut contract)| { + let factory_dependencies = contract + .ir + .drain_factory_dependencies() + .iter() + .map(|identifier| { + self.identifier_paths + .get(identifier) + .cloned() + .expect("Always exists") + }) + .collect(); + let missing_libraries = contract.get_missing_libraries(&deployed_libraries); + let input = ProcessInput::new( contract, - project.clone(), - include_metadata_hash, + self.version.clone(), + metadata_hash, optimizer_settings.clone(), debug_config.clone(), - llvm_arguments.to_vec(), + llvm_arguments.to_owned(), memory_config, + missing_libraries, + factory_dependencies, + self.identifier_paths.clone(), ); - let process_output = { + let result: Result = { #[cfg(target_os = "emscripten")] { - crate::WorkerProcess::call(process_input) + crate::WorkerProcess::call(path.as_str(), input) } #[cfg(not(target_os = "emscripten"))] { - crate::NativeProcess::call(process_input) + crate::NativeProcess::call(path.as_str(), input) } }; - (full_path, process_output.map(|output| output.build)) + let result = result.map(|output| output.build); + (path, result) }) - .collect(); - - let mut build = Build::default(); - let mut hashes = HashMap::with_capacity(results.len()); - for (path, result) in results.iter() { - match result { - Ok(contract) => { - hashes.insert(path.to_owned(), contract.build.bytecode_hash.to_owned()); - } - Err(error) => { - anyhow::bail!("Contract `{}` compiling error: {:?}", path, error); - } - } - } - for (path, result) in results.into_iter() { - match result { - Ok(mut contract) => { - for dependency in contract.factory_dependencies.drain() { - let dependency_path = project - .identifier_paths - .get(dependency.as_str()) - .cloned() - .unwrap_or_else(|| { - panic!("Dependency `{dependency}` full path not found") - }); - let hash = match hashes.get(dependency_path.as_str()) { - Some(hash) => hash.to_owned(), - None => anyhow::bail!( - "Dependency contract `{}` not found in the project", - dependency_path - ), - }; - contract - .build - .factory_dependencies - .insert(hash, dependency_path); - } - - build.contracts.insert(path, contract); - } - Err(error) => { - anyhow::bail!("Contract `{}` compiling error: {:?}", path, error); - } - } - } - - Ok(build) + .collect::>>(); + Ok(Build::new(results, messages)) } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> MissingLibraries { - let deployed_libraries = self - .libraries + pub fn get_missing_libraries(&self, deployed_libraries: &BTreeSet) -> MissingLibraries { + let missing_libraries = self + .contracts .iter() - .flat_map(|(file, names)| { - names - .keys() - .map(|name| format!("{file}:{name}")) - .collect::>() + .map(|(path, contract)| { + ( + path.to_owned(), + contract.get_missing_libraries(deployed_libraries), + ) }) - .collect::>(); - - let mut missing_deployable_libraries = BTreeMap::new(); - for (contract_path, contract) in self.contracts.iter() { - let missing_libraries = contract - .get_missing_libraries() - .into_iter() - .filter(|library| !deployed_libraries.contains(library)) - .collect::>(); - missing_deployable_libraries.insert(contract_path.to_owned(), missing_libraries); - } - MissingLibraries::new(missing_deployable_libraries) + .collect(); + MissingLibraries::new(missing_libraries) } /// Parses the Yul source code file and returns the source data. - pub fn try_from_yul_path( - path: &Path, - solc_validator: Option<&T>, + pub fn try_from_yul_paths( + paths: &[PathBuf], + solc_output: Option<&mut SolcStandardJsonOutput>, + libraries: SolcStandardJsonInputSettingsLibraries, + debug_config: &DebugConfig, ) -> anyhow::Result { - let source_code = std::fs::read_to_string(path) - .map_err(|error| anyhow::anyhow!("Yul file {:?} reading error: {}", path, error))?; - Self::try_from_yul_string(path, source_code.as_str(), solc_validator) + let sources = paths + .iter() + .map(|path| { + let source = SolcStandardJsonInputSource::from(path.as_path()); + (path.to_string_lossy().to_string(), source) + }) + .collect::>(); + Self::try_from_yul_sources(sources, libraries, solc_output, debug_config) } /// Parses the test Yul source code string and returns the source data. - /// Only for integration testing purposes. - pub fn try_from_yul_string( - path: &Path, - source_code: &str, - solc_validator: Option<&T>, + pub fn try_from_yul_sources( + sources: BTreeMap, + libraries: SolcStandardJsonInputSettingsLibraries, + mut solc_output: Option<&mut SolcStandardJsonOutput>, + debug_config: &DebugConfig, ) -> anyhow::Result { - if let Some(solc) = solc_validator { - solc.validate_yul(path)?; + #[cfg(feature = "parallel")] + let iter = sources.into_par_iter(); + #[cfg(not(feature = "parallel"))] + let iter = sources.into_iter(); + + let results = iter + .filter_map(|(path, mut source)| { + let source_code = match source.try_resolve() { + Ok(()) => source.take_content().expect("Always exists"), + Err(error) => return Some((path, Err(error))), + }; + let ir = match Yul::try_from_source(&source_code) { + Ok(ir) => ir?, + Err(error) => return Some((path, Err(error))), + }; + let object_identifier = ir.object.identifier.clone(); + let name = ContractIdentifier::new(path.clone(), Some(object_identifier)); + let full_path = name.full_path.clone(); + if let Err(error) = debug_config.dump_yul(&name.full_path, &source_code) { + return Some((full_path.clone(), Err(error))); + } + let source_metadata = serde_json::json!({ + "source_hash": Keccak256::from_slice(source_code.as_bytes()).to_string() + }); + let contract = Contract::new(name, ir.into(), source_metadata); + Some((full_path, Ok(contract))) + }) + .collect::>>(); + + let mut contracts = BTreeMap::new(); + for (path, result) in results.into_iter() { + match result { + Ok(contract) => { + contracts.insert(path, contract); + } + Err(error) => match solc_output { + Some(ref mut solc_output) => solc_output.push_error(Some(path), error), + None => anyhow::bail!(error), + }, + } } - - let source_version = SolcVersion::new_simple(crate::solc::LAST_SUPPORTED_VERSION); - let path = path.to_string_lossy().to_string(); - let source_hash = sha3::Keccak256::digest(source_code.as_bytes()).into(); - - let mut lexer = Lexer::new(source_code.to_owned()); - let object = Object::parse(&mut lexer, None) - .map_err(|error| anyhow::anyhow!("Yul object `{}` parsing error: {}", path, error))?; - - let mut project_contracts = BTreeMap::new(); - project_contracts.insert( - path.to_owned(), - Contract::new( - path, - source_hash, - source_version.clone(), - IR::new_yul(source_code.to_owned(), object), - None, - ), - ); - - Ok(Self::new( - source_version, - project_contracts, - BTreeMap::new(), - )) - } - - /// Parses the LLVM IR source code file and returns the source data. - pub fn try_from_llvm_ir_path(path: &Path) -> anyhow::Result { - let source_code = std::fs::read_to_string(path) - .map_err(|error| anyhow::anyhow!("LLVM IR file {:?} reading error: {}", path, error))?; - let source_hash = sha3::Keccak256::digest(source_code.as_bytes()).into(); - - let source_version = - SolcVersion::new_simple(revive_llvm_context::polkavm_const::LLVM_VERSION); - let path = path.to_string_lossy().to_string(); - - let mut project_contracts = BTreeMap::new(); - project_contracts.insert( - path.clone(), - Contract::new( - path.clone(), - source_hash, - source_version.clone(), - IR::new_llvm_ir(path, source_code), - None, - ), - ); - - Ok(Self::new( - source_version, - project_contracts, - BTreeMap::new(), - )) + Ok(Self::new(None, contracts, libraries)) } /// Converts the `solc` JSON output into a convenient project. pub fn try_from_standard_json_output( - output: &SolcStandardJsonOutput, - source_code_files: BTreeMap, - libraries: BTreeMap>, + solc_output: &mut SolcStandardJsonOutput, + libraries: SolcStandardJsonInputSettingsLibraries, solc_version: &SolcVersion, - debug_config: &revive_llvm_context::DebugConfig, + debug_config: &DebugConfig, ) -> anyhow::Result { - let files = match output.contracts.as_ref() { - Some(files) => files, - None => match &output.errors { - Some(errors) if errors.iter().any(|e| e.severity == "error") => { - anyhow::bail!(serde_json::to_string_pretty(errors).expect("Always valid")); - } - _ => &BTreeMap::new(), - }, - }; - let mut project_contracts = BTreeMap::new(); - - for (path, contracts) in files.iter() { - for (name, contract) in contracts.iter() { - let full_path = format!("{path}:{name}"); - - let ir_optimized = match contract.ir_optimized.to_owned() { - Some(ir_optimized) => ir_optimized, - None => continue, - }; - if ir_optimized.is_empty() { - continue; - } - - debug_config.dump_yul(full_path.as_str(), ir_optimized.as_str())?; - - let mut lexer = Lexer::new(ir_optimized.to_owned()); - let object = Object::parse(&mut lexer, None).map_err(|error| { - anyhow::anyhow!("Contract `{}` parsing error: {:?}", full_path, error) - })?; - - let source = IR::new_yul(ir_optimized.to_owned(), object); - - let source_code = source_code_files - .get(path.as_str()) - .ok_or_else(|| anyhow::anyhow!("Source code for path `{}` not found", path))?; - let source_hash = sha3::Keccak256::digest(source_code.as_bytes()).into(); - - let project_contract = Contract::new( - full_path.clone(), - source_hash, - solc_version.to_owned(), - source, - contract.metadata.to_owned(), - ); - project_contracts.insert(full_path, project_contract); + let mut input_contracts = Vec::with_capacity(solc_output.contracts.len()); + for (path, file) in solc_output.contracts.iter() { + for (name, contract) in file.iter() { + let name = ContractIdentifier::new((*path).to_owned(), Some((*name).to_owned())); + input_contracts.push((name, contract)); } } + #[cfg(feature = "parallel")] + let iter = input_contracts.into_par_iter(); + #[cfg(not(feature = "parallel"))] + let iter = input_contracts.into_iter(); + + let results = iter + .filter_map(|(name, contract)| { + let ir = match Yul::try_from_source(&contract.ir_optimized) + .map(|yul| yul.map(IR::from)) + { + Ok(ir) => ir?, + Err(error) => return Some((name.full_path, Err(error))), + }; + if let Err(error) = debug_config.dump_yul(&name.full_path, &contract.ir_optimized) { + return Some((name.full_path, Err(error))); + } + let contract = Contract::new(name.clone(), ir, contract.metadata.clone()); + Some((name.full_path, Ok(contract))) + }) + .collect::>>(); + + let mut contracts = BTreeMap::new(); + for (path, result) in results.into_iter() { + match result { + Ok(contract) => { + contracts.insert(path, contract); + } + Err(error) => solc_output.push_error(Some(path), error), + } + } Ok(Project::new( - solc_version.to_owned(), - project_contracts, + Some(solc_version.clone()), + contracts, libraries, )) } } - -impl revive_llvm_context::PolkaVMDependency for Project { - fn compile( - project: Self, - identifier: &str, - optimizer_settings: revive_llvm_context::OptimizerSettings, - include_metadata_hash: bool, - debug_config: revive_llvm_context::DebugConfig, - llvm_arguments: &[String], - memory_config: revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory, - ) -> anyhow::Result { - let contract_path = project.resolve_path(identifier)?; - let contract = project - .contracts - .get(contract_path.as_str()) - .cloned() - .ok_or_else(|| { - anyhow::anyhow!( - "Dependency contract `{}` not found in the project", - contract_path - ) - })?; - - contract - .compile( - project, - optimizer_settings, - include_metadata_hash, - debug_config, - llvm_arguments, - memory_config, - ) - .map_err(|error| { - anyhow::anyhow!( - "Dependency contract `{}` compiling error: {}", - identifier, - error - ) - }) - .map(|contract| contract.build.bytecode_hash) - } - - fn resolve_path(&self, identifier: &str) -> anyhow::Result { - self.identifier_paths - .get(identifier.strip_suffix("_deployed").unwrap_or(identifier)) - .cloned() - .ok_or_else(|| { - anyhow::anyhow!( - "Contract with identifier `{}` not found in the project", - identifier - ) - }) - } - - fn resolve_library(&self, path: &str) -> anyhow::Result { - for (file_path, contracts) in self.libraries.iter() { - for (contract_name, address) in contracts.iter() { - let key = format!("{file_path}:{contract_name}"); - if key.as_str() == path { - return Ok(address["0x".len()..].to_owned()); - } - } - } - - anyhow::bail!("Library `{}` not found in the project", path); - } -} diff --git a/crates/resolc/src/resolc/arguments.rs b/crates/resolc/src/resolc/arguments.rs index 5964606..58fc161 100644 --- a/crates/resolc/src/resolc/arguments.rs +++ b/crates/resolc/src/resolc/arguments.rs @@ -6,6 +6,8 @@ use std::path::PathBuf; use clap::Parser; use path_slash::PathExt; +use revive_common::MetadataHash; +use revive_solc_json_interface::SolcStandardJsonOutputError; /// Compiles the provided Solidity input files (or use the standard input if no files /// are given or "-" is specified as a file name). Outputs the components based on the @@ -58,10 +60,6 @@ pub struct Arguments { #[arg(short = 'O', long = "optimization")] pub optimization: Option, - /// Try to recompile with -Oz if the bytecode is too large. - #[arg(long = "fallback-Oz")] - pub fallback_to_optimizing_for_size: bool, - /// Disable the `solc` optimizer. /// Use it if your project uses the `MSIZE` instruction, or in other cases. /// Beware that it will prevent libraries from being inlined. @@ -92,7 +90,7 @@ pub struct Arguments { /// Switch to standard JSON input/output mode. Read from stdin, write the result to stdout. /// This is the default used by the Hardhat plugin. #[arg(long = "standard-json")] - pub standard_json: bool, + pub standard_json: Option>, /// Switch to missing deployable libraries detection mode. /// Only available for standard JSON input/output mode. @@ -106,17 +104,20 @@ pub struct Arguments { #[arg(long = "yul")] pub yul: bool, - /// Switch to LLVM IR mode. - /// Only one input LLVM IR file is allowed. - /// Cannot be used with combined and standard JSON modes. - /// Use this mode at your own risk, as LLVM IR input validation is not implemented. - #[arg(long = "llvm-ir")] - pub llvm_ir: bool, + /// Switch to linker mode, ignoring all options apart from `--libraries` and modify binaries in place. + /// + /// Unlinked contract binaries (caused by missing libraries or missing factory dependencies in turn) + /// are emitted as raw ELF objects. Use this mode to link them into PVM blobs. + /// + /// NOTE: Contracts must be present in the input files with the EXACT SAME directory structure as their source code, + /// otherwise this may fail to resolve factory dependencies. + #[arg(long)] + pub link: bool, - /// Set metadata hash mode. - /// The only supported value is `none` that disables appending the metadata hash. - /// Is enabled by default. - #[arg(long = "metadata-hash")] + /// Set the metadata hash type. + /// Available types: `none`, `ipfs`, `keccak256`. + /// The default is `keccak256`. + #[arg(long)] pub metadata_hash: Option, /// Output PolkaVM assembly of the contracts. @@ -127,6 +128,10 @@ pub struct Arguments { #[arg(long = "bin")] pub output_binary: bool, + /// Output metadata of the compiled project. + #[arg(long = "metadata")] + pub output_metadata: bool, + /// Suppress specified warnings. /// Available arguments: `ecrecover`, `sendtransfer`, `extcodesize`, `txorigin`, `blocktimestamp`, `blocknumber`, `blockhash`. #[arg(long = "suppress-warnings")] @@ -202,155 +207,206 @@ pub struct Arguments { impl Arguments { /// Validates the arguments. - pub fn validate(&self) -> anyhow::Result<()> { + pub fn validate(&self) -> Vec { + let mut messages = Vec::new(); + if self.version && std::env::args().count() > 2 { - anyhow::bail!("No other options are allowed while getting the compiler version."); + messages.push(SolcStandardJsonOutputError::new_error( + "No other options are allowed while getting the compiler version.", + None, + None, + )); } if self.supported_solc_versions && std::env::args().count() > 2 { - anyhow::bail!( - "No other options are allowed while getting the supported `solc` versions." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "No other options are allowed while getting the supported `solc` version.", + None, + None, + )); } - #[cfg(debug_assertions)] - if self.recursive_process_input.is_some() && !self.recursive_process { - anyhow::bail!("--process-input can be only used when --recursive-process is given"); + if self.metadata_hash == Some(MetadataHash::IPFS.to_string()) { + messages.push(SolcStandardJsonOutputError::new_error( + "`IPFS` metadata hash type is not supported. Please use `keccak256` instead.", + None, + None, + )); } - #[cfg(debug_assertions)] - if self.recursive_process - && ((self.recursive_process_input.is_none() && std::env::args().count() > 2) - || (self.recursive_process_input.is_some() && std::env::args().count() > 4)) - { - anyhow::bail!("No other options are allowed in recursive mode."); - } - - #[cfg(not(debug_assertions))] - if self.recursive_process && std::env::args().count() > 2 { - anyhow::bail!("No other options are allowed in recursive mode."); - } - - let modes_count = [ + let modes = [ self.yul, - self.llvm_ir, self.combined_json.is_some(), - self.standard_json, + self.standard_json.is_some(), + self.link, ] .iter() .filter(|&&x| x) .count(); - if modes_count > 1 { - anyhow::bail!("Only one modes is allowed at the same time: Yul, LLVM IR, PolkaVM assembly, combined JSON, standard JSON."); + let acceptable_count = 1 + self.standard_json.is_some() as usize; + if modes > acceptable_count { + messages.push(SolcStandardJsonOutputError::new_error( + "Only one modes is allowed at the same time: Yul, LLVM IR, PolkaVM assembly, combined JSON, standard JSON.",None,None)); } - if self.yul || self.llvm_ir { + if self.yul && !self.libraries.is_empty() { + messages.push(SolcStandardJsonOutputError::new_error( + "Libraries are not supported in Yul and linker modes.", + None, + None, + )); + } + + if self.yul || self.link { if self.base_path.is_some() { - anyhow::bail!( - "`base-path` is not used in Yul, LLVM IR and PolkaVM assembly modes." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "`base-path` is not used in Yul and linker modes.", + None, + None, + )); } if !self.include_paths.is_empty() { - anyhow::bail!( - "`include-paths` is not used in Yul, LLVM IR and PolkaVM assembly modes." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "`include-paths` is not used in Yul and linker modes.", + None, + None, + )); } if self.allow_paths.is_some() { - anyhow::bail!( - "`allow-paths` is not used in Yul, LLVM IR and PolkaVM assembly modes." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "`allow-paths` is not used in Yul and linker modes.", + None, + None, + )); } - if !self.libraries.is_empty() { - anyhow::bail!( - "Libraries are not supported in Yul, LLVM IR and PolkaVM assembly modes." - ); - } - if self.evm_version.is_some() { - anyhow::bail!( - "`evm-version` is not used in Yul, LLVM IR and PolkaVM assembly modes." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "`evm-version` is not used in Yul and linker modes.", + None, + None, + )); } - if self.disable_solc_optimizer { - anyhow::bail!("Disabling the solc optimizer is not supported in Yul, LLVM IR and PolkaVM assembly modes."); + messages.push(SolcStandardJsonOutputError::new_error( + "Disabling the solc optimizer is not supported in Yul and linker modes.", + None, + None, + )); } } - if self.llvm_ir && self.solc.is_some() { - anyhow::bail!("`solc` is not used in LLVM IR and PolkaVM assembly modes."); - } - if self.combined_json.is_some() && (self.output_assembly || self.output_binary) { - anyhow::bail!( - "Cannot output assembly or binary outside of JSON in combined JSON mode." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "Cannot output assembly or binary outside of JSON in combined JSON mode.", + None, + None, + )); } - if self.standard_json { + if self.standard_json.is_some() { if self.output_assembly || self.output_binary { - anyhow::bail!( - "Cannot output assembly or binary outside of JSON in standard JSON mode." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "Cannot output assembly or binary outside of JSON in standard JSON mode.", + None, + None, + )); } if !self.inputs.is_empty() { - anyhow::bail!("Input files must be passed via standard JSON input."); + messages.push(SolcStandardJsonOutputError::new_error( + "Input files must be passed via standard JSON input.", + None, + None, + )); } if !self.libraries.is_empty() { - anyhow::bail!("Libraries must be passed via standard JSON input."); + messages.push(SolcStandardJsonOutputError::new_error( + "Libraries must be passed via standard JSON input.", + None, + None, + )); } if self.evm_version.is_some() { - anyhow::bail!("EVM version must be passed via standard JSON input."); + messages.push(SolcStandardJsonOutputError::new_error( + "EVM version must be passed via standard JSON input.", + None, + None, + )); } if self.output_directory.is_some() { - anyhow::bail!("Output directory cannot be used in standard JSON mode."); + messages.push(SolcStandardJsonOutputError::new_error( + "Output directory cannot be used in standard JSON mode.", + None, + None, + )); } if self.overwrite { - anyhow::bail!("Overwriting flag cannot be used in standard JSON mode."); + messages.push(SolcStandardJsonOutputError::new_error( + "Overwriting flag cannot be used in standard JSON mode.", + None, + None, + )); } if self.disable_solc_optimizer { - anyhow::bail!( - "Disabling the solc optimizer must specified in standard JSON input settings." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "Disabling the solc optimizer must specified in standard JSON input settings.", + None, + None, + )); } if self.optimization.is_some() { - anyhow::bail!("LLVM optimizations must specified in standard JSON input settings."); - } - if self.fallback_to_optimizing_for_size { - anyhow::bail!( - "Falling back to -Oz must specified in standard JSON input settings." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "LLVM optimizations must specified in standard JSON input settings.", + None, + None, + )); } if self.metadata_hash.is_some() { - anyhow::bail!("Metadata hash mode must specified in standard JSON input settings."); + messages.push(SolcStandardJsonOutputError::new_error( + "Metadata hash mode must specified in standard JSON input settings.", + None, + None, + )); } if self.heap_size.is_some() { - anyhow::bail!( - "Heap size must be specified in standard JSON input polkavm memory settings." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "Heap size must be specified in standard JSON input polkavm memory settings.", + None, + None, + )); } if self.stack_size.is_some() { - anyhow::bail!( - "Stack size must be specified in standard JSON input polkavm memory settings." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "Stack size must be specified in standard JSON input polkavm memory settings.", + None, + None, + )); } if self.emit_source_debug_info { - anyhow::bail!( - "Debug info must be requested in standard JSON input polkavm settings." - ); + messages.push(SolcStandardJsonOutputError::new_error( + "Debug info must be requested in standard JSON input polkavm settings.", + None, + None, + )); + } + if !self.llvm_arguments.is_empty() { + messages.push(SolcStandardJsonOutputError::new_error( + "LLVM arguments must be configured in standard JSON input polkavm settings.", + None, + None, + )); } } - Ok(()) + messages } /// Returns remappings from input paths. pub fn split_input_files_and_remappings( &self, - ) -> anyhow::Result<(Vec, Option>)> { + ) -> anyhow::Result<(Vec, BTreeSet)> { let mut input_files = Vec::with_capacity(self.inputs.len()); let mut remappings = BTreeSet::new(); @@ -367,7 +423,7 @@ impl Arguments { } if parts.len() != 2 { anyhow::bail!( - "Invalid remapping `{}`: expected two parts separated by '='", + "Invalid remapping `{}`: expected two parts separated by '='.", input ); } @@ -379,12 +435,6 @@ impl Arguments { } } - let remappings = if remappings.is_empty() { - None - } else { - Some(remappings) - }; - Ok((input_files, remappings)) } diff --git a/crates/resolc/src/resolc/main.rs b/crates/resolc/src/resolc/main.rs index ec3817f..114621c 100644 --- a/crates/resolc/src/resolc/main.rs +++ b/crates/resolc/src/resolc/main.rs @@ -1,36 +1,84 @@ //! Solidity to PolkaVM compiler binary. -pub mod arguments; - -use std::io::Write; use std::str::FromStr; +use std::{io::Write, path::PathBuf}; +use clap::error::ErrorKind; use resolc::Process; +use revive_common::{ + deserialize_from_str, EVMVersion, MetadataHash, EXIT_CODE_FAILURE, EXIT_CODE_SUCCESS, +}; +use revive_llvm_context::{initialize_llvm, DebugConfig, OptimizerSettings, PolkaVMTarget}; +use revive_solc_json_interface::{ + ResolcWarning, SolcStandardJsonInputSettingsPolkaVMMemory, + SolcStandardJsonInputSettingsSelection, SolcStandardJsonOutput, SolcStandardJsonOutputError, +}; use self::arguments::Arguments; -#[cfg(feature = "parallel")] -/// The rayon worker stack size. -const RAYON_WORKER_STACK_SIZE: usize = 16 * 1024 * 1024; +pub mod arguments; #[cfg(target_env = "musl")] #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; fn main() -> anyhow::Result<()> { - std::process::exit(match main_inner() { - Ok(()) => revive_common::EXIT_CODE_SUCCESS, - Err(error) => { - writeln!(std::io::stderr(), "{error}")?; - revive_common::EXIT_CODE_FAILURE + let arguments = ::try_parse().inspect_err(|error| { + if let ErrorKind::DisplayHelp = error.kind() { + let _ = error.print(); + std::process::exit(EXIT_CODE_SUCCESS); } - }) + })?; + + let is_standard_json = arguments.standard_json.is_some(); + let mut messages = arguments.validate(); + if messages.iter().all(|error| error.severity != "error") { + if !is_standard_json { + std::io::stderr() + .write_all( + messages + .drain(..) + .map(|error| error.to_string()) + .collect::>() + .join("\n") + .as_bytes(), + ) + .expect("Stderr writing error"); + } + if let Err(error) = main_inner(arguments, &mut messages) { + messages.push(SolcStandardJsonOutputError::new_error(error, None, None)); + } + } + + if is_standard_json { + let output = SolcStandardJsonOutput::new_with_messages(messages); + output.write_and_exit(SolcStandardJsonInputSettingsSelection::default()); + } + + std::io::stderr() + .write_all( + messages + .iter() + .map(|error| error.to_string()) + .collect::>() + .join("\n") + .as_bytes(), + ) + .expect("Stderr writing error"); + + std::process::exit( + if messages.iter().any(SolcStandardJsonOutputError::is_error) { + EXIT_CODE_FAILURE + } else { + EXIT_CODE_SUCCESS + }, + ); } -fn main_inner() -> anyhow::Result<()> { - let arguments = ::try_parse()?; - arguments.validate()?; - +fn main_inner( + arguments: Arguments, + messages: &mut Vec, +) -> anyhow::Result<()> { if arguments.version { writeln!( std::io::stdout(), @@ -53,59 +101,60 @@ fn main_inner() -> anyhow::Result<()> { #[cfg(feature = "parallel")] rayon::ThreadPoolBuilder::new() - .stack_size(RAYON_WORKER_STACK_SIZE) + .stack_size(resolc::RAYON_WORKER_STACK_SIZE) .build_global() .expect("Thread pool configuration failure"); if arguments.recursive_process { - #[cfg(debug_assertions)] - if let Some(fname) = arguments.recursive_process_input { - let mut infile = std::fs::File::open(fname)?; - #[cfg(target_os = "emscripten")] - { - return resolc::WorkerProcess::run(Some(&mut infile)); - } - #[cfg(not(target_os = "emscripten"))] - { - return resolc::NativeProcess::run(Some(&mut infile)); - } - } + let input_json = std::io::read_to_string(std::io::stdin()) + .map_err(|error| anyhow::anyhow!("Stdin reading error: {error}"))?; + let input: resolc::ProcessInput = deserialize_from_str(input_json.as_str()) + .map_err(|error| anyhow::anyhow!("Stdin parsing error: {error}"))?; + + initialize_llvm( + PolkaVMTarget::PVM, + resolc::DEFAULT_EXECUTABLE_NAME, + &input.llvm_arguments, + ); + #[cfg(target_os = "emscripten")] { - return resolc::WorkerProcess::run(None); + return resolc::WorkerProcess::run(input); } #[cfg(not(target_os = "emscripten"))] { - return resolc::NativeProcess::run(None); + return resolc::NativeProcess::run(input); } } + initialize_llvm( + PolkaVMTarget::PVM, + resolc::DEFAULT_EXECUTABLE_NAME, + &arguments.llvm_arguments, + ); + let debug_config = match arguments.debug_output_directory { Some(ref debug_output_directory) => { std::fs::create_dir_all(debug_output_directory.as_path())?; - revive_llvm_context::DebugConfig::new( + DebugConfig::new( Some(debug_output_directory.to_owned()), arguments.emit_source_debug_info, ) } - None => revive_llvm_context::DebugConfig::new(None, arguments.emit_source_debug_info), + None => DebugConfig::new(None, arguments.emit_source_debug_info), }; let (input_files, remappings) = arguments.split_input_files_and_remappings()?; - let suppressed_warnings = match arguments.suppress_warnings { - Some(warnings) => Some(revive_solc_json_interface::ResolcWarning::try_from_strings( - warnings.as_slice(), - )?), - None => None, - }; + let suppressed_warnings = ResolcWarning::try_from_strings( + arguments.suppress_warnings.unwrap_or_default().as_slice(), + )?; - let mut solc = { + let solc = { #[cfg(target_os = "emscripten")] { - resolc::SoljsonCompiler + resolc::SoljsonCompiler {} } - #[cfg(not(target_os = "emscripten"))] { resolc::SolcCompiler::new( @@ -117,76 +166,61 @@ fn main_inner() -> anyhow::Result<()> { }; let evm_version = match arguments.evm_version { - Some(evm_version) => Some(revive_common::EVMVersion::try_from(evm_version.as_str())?), + Some(evm_version) => Some(EVMVersion::try_from(evm_version.as_str())?), None => None, }; let mut optimizer_settings = match arguments.optimization { - Some(mode) => revive_llvm_context::OptimizerSettings::try_from_cli(mode)?, - None => revive_llvm_context::OptimizerSettings::size(), + Some(mode) => OptimizerSettings::try_from_cli(mode)?, + None => OptimizerSettings::size(), }; - if arguments.fallback_to_optimizing_for_size { - optimizer_settings.enable_fallback_to_size(); - } optimizer_settings.is_verify_each_enabled = arguments.llvm_verify_each; optimizer_settings.is_debug_logging_enabled = arguments.llvm_debug_logging; - let include_metadata_hash = match arguments.metadata_hash { - Some(metadata_hash) => { - let metadata = - revive_solc_json_interface::SolcStandardJsonInputSettingsMetadataHash::from_str( - metadata_hash.as_str(), - )?; - metadata != revive_solc_json_interface::SolcStandardJsonInputSettingsMetadataHash::None - } - None => true, + let metadata_hash = match arguments.metadata_hash { + Some(ref hash_type) => MetadataHash::from_str(hash_type.as_str())?, + None => MetadataHash::Keccak256, }; - let memory_config = revive_solc_json_interface::SolcStandardJsonInputSettingsPolkaVMMemory::new( - arguments.heap_size, - arguments.stack_size, - ); + let memory_config = + SolcStandardJsonInputSettingsPolkaVMMemory::new(arguments.heap_size, arguments.stack_size); let build = if arguments.yul { resolc::yul( + &solc, input_files.as_slice(), - &mut solc, + arguments.libraries.as_slice(), + metadata_hash, + messages, optimizer_settings, - include_metadata_hash, debug_config, &arguments.llvm_arguments, memory_config, ) - } else if arguments.llvm_ir { - resolc::llvm_ir( - input_files.as_slice(), - optimizer_settings, - include_metadata_hash, - debug_config, - &arguments.llvm_arguments, - memory_config, - ) - } else if arguments.standard_json { + } else if let Some(standard_json) = arguments.standard_json { resolc::standard_json( - &mut solc, - arguments.detect_missing_libraries, + &solc, + metadata_hash, + messages, + standard_json.map(PathBuf::from), arguments.base_path, arguments.include_paths, arguments.allow_paths, debug_config, - &arguments.llvm_arguments, + arguments.detect_missing_libraries, )?; return Ok(()); } else if let Some(format) = arguments.combined_json { resolc::combined_json( - format, + &solc, input_files.as_slice(), - arguments.libraries, - &mut solc, + arguments.libraries.as_slice(), + metadata_hash, + messages, evm_version, + format, !arguments.disable_solc_optimizer, optimizer_settings, - include_metadata_hash, arguments.base_path, arguments.include_paths, arguments.allow_paths, @@ -195,67 +229,46 @@ fn main_inner() -> anyhow::Result<()> { debug_config, arguments.output_directory, arguments.overwrite, - &arguments.llvm_arguments, + arguments.llvm_arguments, memory_config, )?; return Ok(()); + } else if arguments.link { + return resolc::link(arguments.inputs, arguments.libraries); } else { resolc::standard_output( + &solc, input_files.as_slice(), - arguments.libraries, - &mut solc, + arguments.libraries.as_slice(), + metadata_hash, + messages, evm_version, !arguments.disable_solc_optimizer, optimizer_settings, - include_metadata_hash, arguments.base_path, arguments.include_paths, arguments.allow_paths, remappings, suppressed_warnings, debug_config, - &arguments.llvm_arguments, + arguments.llvm_arguments, memory_config, ) }?; if let Some(output_directory) = arguments.output_directory { - std::fs::create_dir_all(&output_directory)?; - build.write_to_directory( &output_directory, + arguments.output_metadata, arguments.output_assembly, arguments.output_binary, arguments.overwrite, )?; - - writeln!( - std::io::stderr(), - "Compiler run successful. Artifact(s) can be found in directory {output_directory:?}." - )?; - } else if arguments.output_assembly || arguments.output_binary { - for (path, contract) in build.contracts.into_iter() { - if arguments.output_assembly { - let assembly_text = contract.build.assembly_text; - - writeln!( - std::io::stdout(), - "Contract `{path}` assembly:\n\n{assembly_text}" - )?; - } - if arguments.output_binary { - writeln!( - std::io::stdout(), - "Contract `{}` bytecode: 0x{}", - path, - hex::encode(contract.build.bytecode) - )?; - } - } } else { - writeln!( - std::io::stderr(), - "Compiler run successful. No output requested. Use --asm and --bin flags." + build.write_to_terminal( + arguments.output_metadata, + arguments.output_assembly, + arguments.output_binary, )?; } diff --git a/crates/resolc/src/solc/mod.rs b/crates/resolc/src/solc/mod.rs index f54d940..b27a982 100644 --- a/crates/resolc/src/solc/mod.rs +++ b/crates/resolc/src/solc/mod.rs @@ -1,35 +1,37 @@ //! The Solidity compiler. +use std::collections::HashSet; +use std::path::PathBuf; + +use revive_solc_json_interface::combined_json::CombinedJson; +use revive_solc_json_interface::CombinedJsonSelector; +use revive_solc_json_interface::SolcStandardJsonInput; +use revive_solc_json_interface::SolcStandardJsonInputSettingsLibraries; +use revive_solc_json_interface::SolcStandardJsonInputSettingsSelection; +use revive_solc_json_interface::SolcStandardJsonOutput; +use revive_solc_json_interface::SolcStandardJsonOutputError; + +use self::version::Version; + #[cfg(not(target_os = "emscripten"))] pub mod solc_compiler; #[cfg(target_os = "emscripten")] pub mod soljson_compiler; pub mod version; -use std::path::Path; -use std::path::PathBuf; - -use revive_solc_json_interface::combined_json::CombinedJson; -use revive_solc_json_interface::SolcStandardJsonInput; -use revive_solc_json_interface::SolcStandardJsonOutput; - -use self::version::Version; - /// The first version of `solc` with the support of standard JSON interface. pub const FIRST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 8, 0); /// The last supported version of `solc`. pub const LAST_SUPPORTED_VERSION: semver::Version = semver::Version::new(0, 8, 30); -/// `--include-path` was introduced in solc `0.8.8` -pub const FIRST_INCLUDE_PATH_VERSION: semver::Version = semver::Version::new(0, 8, 8); - /// The Solidity compiler. pub trait Compiler { /// Compiles the Solidity `--standard-json` input into Yul IR. fn standard_json( - &mut self, - input: SolcStandardJsonInput, + &self, + input: &mut SolcStandardJsonInput, + messages: &mut Vec, base_path: Option, include_paths: Vec, allow_paths: Option, @@ -39,12 +41,32 @@ pub trait Compiler { fn combined_json( &self, paths: &[PathBuf], - combined_json_argument: &str, + selectors: HashSet, ) -> anyhow::Result; - /// The `solc` Yul validator. - fn validate_yul(&self, path: &Path) -> anyhow::Result<()>; + /// Validates the Yul project as paths and libraries. + fn validate_yul_paths( + &self, + paths: &[PathBuf], + libraries: SolcStandardJsonInputSettingsLibraries, + messages: &mut Vec, + ) -> anyhow::Result { + let mut solc_input = + SolcStandardJsonInput::from_yul_paths(paths, libraries, Default::default(), vec![]); + self.validate_yul_standard_json(&mut solc_input, messages) + } + + /// Validates the Yul project as standard JSON input. + fn validate_yul_standard_json( + &self, + solc_input: &mut SolcStandardJsonInput, + messages: &mut Vec, + ) -> anyhow::Result { + solc_input.extend_selection(SolcStandardJsonInputSettingsSelection::new_yul_validation()); + let solc_output = self.standard_json(solc_input, messages, None, vec![], None)?; + Ok(solc_output) + } /// The `solc --version` mini-parser. - fn version(&mut self) -> anyhow::Result; + fn version(&self) -> anyhow::Result; } diff --git a/crates/resolc/src/solc/solc_compiler.rs b/crates/resolc/src/solc/solc_compiler.rs index 39ad2c7..951041e 100644 --- a/crates/resolc/src/solc/solc_compiler.rs +++ b/crates/resolc/src/solc/solc_compiler.rs @@ -1,12 +1,15 @@ //! The Solidity compiler solc interface. +use std::collections::HashSet; use std::io::Write; -use std::path::Path; use std::path::PathBuf; +use revive_common::deserialize_from_slice; use revive_solc_json_interface::combined_json::CombinedJson; +use revive_solc_json_interface::CombinedJsonSelector; use revive_solc_json_interface::SolcStandardJsonInput; use revive_solc_json_interface::SolcStandardJsonOutput; +use revive_solc_json_interface::SolcStandardJsonOutputError; use crate::solc::version::Version; @@ -39,8 +42,9 @@ impl SolcCompiler { impl Compiler for SolcCompiler { /// Compiles the Solidity `--standard-json` input into Yul IR. fn standard_json( - &mut self, - mut input: SolcStandardJsonInput, + &self, + input: &mut SolcStandardJsonInput, + messages: &mut Vec, base_path: Option, include_paths: Vec, allow_paths: Option, @@ -63,10 +67,6 @@ impl Compiler for SolcCompiler { command.arg(allow_paths); } - input.normalize(); - - let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default(); - let input_json = serde_json::to_vec(&input).expect("Always valid"); let process = command.spawn().map_err(|error| { @@ -92,22 +92,32 @@ impl Compiler for SolcCompiler { ); } - let mut output: SolcStandardJsonOutput = - revive_common::deserialize_from_slice(output.stdout.as_slice()).map_err(|error| { - anyhow::anyhow!( - "{} subprocess output parsing error: {}\n{}", - self.executable, - error, - revive_common::deserialize_from_slice::( - output.stdout.as_slice() - ) + let mut output: SolcStandardJsonOutput = deserialize_from_slice(output.stdout.as_slice()) + .map_err(|error| { + anyhow::anyhow!( + "{} subprocess output parsing error: {}\n{}", + self.executable, + error, + deserialize_from_slice::(output.stdout.as_slice()) .map(|json| serde_json::to_string_pretty(&json).expect("Always valid")) .unwrap_or_else( |_| String::from_utf8_lossy(output.stdout.as_slice()).to_string() ), - ) - })?; - output.preprocess_ast(suppressed_warnings.as_slice())?; + ) + })?; + output + .errors + .retain(|error| match error.error_code.as_deref() { + Some(code) => !SolcStandardJsonOutputError::IGNORED_WARNING_CODES.contains(&code), + None => true, + }); + output.errors.append(messages); + + let mut suppressed_warnings = input.suppressed_warnings.clone(); + suppressed_warnings.extend_from_slice(input.settings.suppressed_warnings.as_slice()); + + input.resolve_sources(); + output.preprocess_ast(&input.sources, &suppressed_warnings)?; Ok(output) } @@ -116,104 +126,58 @@ impl Compiler for SolcCompiler { fn combined_json( &self, paths: &[PathBuf], - combined_json_argument: &str, + mut selectors: HashSet, ) -> anyhow::Result { - let mut command = std::process::Command::new(self.executable.as_str()); + selectors.retain(|selector| selector.is_source_solc()); + if selectors.is_empty() { + let version = &self.version()?.default; + return Ok(CombinedJson::new(version.to_owned(), None)); + } + + let executable = self.executable.to_owned(); + + let mut command = std::process::Command::new(executable.as_str()); + command.stdout(std::process::Stdio::piped()); + command.stderr(std::process::Stdio::piped()); command.args(paths); - - let mut combined_json_flags = Vec::new(); - let mut combined_json_fake_flag_pushed = false; - let mut filtered_flags = Vec::with_capacity(3); - for flag in combined_json_argument.split(',') { - match flag { - flag @ "asm" | flag @ "bin" | flag @ "bin-runtime" => filtered_flags.push(flag), - flag => combined_json_flags.push(flag), - } - } - if combined_json_flags.is_empty() { - combined_json_flags.push("ast"); - combined_json_fake_flag_pushed = true; - } command.arg("--combined-json"); - command.arg(combined_json_flags.join(",")); + command.arg( + selectors + .into_iter() + .map(|selector| selector.to_string()) + .collect::>() + .join(","), + ); - let output = command.output().map_err(|error| { - anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error) + let process = command + .spawn() + .map_err(|error| anyhow::anyhow!("{executable} subprocess spawning: {error:?}"))?; + + let result = process.wait_with_output().map_err(|error| { + anyhow::anyhow!("{} subprocess output reading: {error:?}", self.executable) })?; - if !output.status.success() { - writeln!( - std::io::stdout(), - "{}", - String::from_utf8_lossy(output.stdout.as_slice()) - )?; - writeln!( - std::io::stdout(), - "{}", - String::from_utf8_lossy(output.stderr.as_slice()) - )?; + + if !result.status.success() { anyhow::bail!( - "{} error: {}", + "{} subprocess failed with exit code {:?}:\n{}\n{}", self.executable, - String::from_utf8_lossy(output.stdout.as_slice()).to_string() + result.status.code(), + String::from_utf8_lossy(result.stdout.as_slice()), + String::from_utf8_lossy(result.stderr.as_slice()), ); } - let mut combined_json: CombinedJson = - revive_common::deserialize_from_slice(output.stdout.as_slice()).map_err(|error| { - anyhow::anyhow!( - "{} subprocess output parsing error: {}\n{}", - self.executable, - error, - revive_common::deserialize_from_slice::( - output.stdout.as_slice() - ) - .map(|json| serde_json::to_string_pretty(&json).expect("Always valid")) - .unwrap_or_else( - |_| String::from_utf8_lossy(output.stdout.as_slice()).to_string() - ), - ) - })?; - for filtered_flag in filtered_flags.into_iter() { - for (_path, contract) in combined_json.contracts.iter_mut() { - match filtered_flag { - "asm" => contract.asm = Some(serde_json::Value::Null), - "bin" => contract.bin = Some("".to_owned()), - "bin-runtime" => contract.bin_runtime = Some("".to_owned()), - _ => continue, - } - } - } - if combined_json_fake_flag_pushed { - combined_json.source_list = None; - combined_json.sources = None; - } - combined_json.remove_evm(); - - Ok(combined_json) - } - - /// The `solc` Yul validator. - fn validate_yul(&self, path: &Path) -> anyhow::Result<()> { - let mut command = std::process::Command::new(self.executable.as_str()); - command.arg("--strict-assembly"); - command.arg(path); - - let output = command.output().map_err(|error| { - anyhow::anyhow!("{} subprocess error: {:?}", self.executable, error) - })?; - if !output.status.success() { - anyhow::bail!( - "{} error: {}", + deserialize_from_slice::(result.stdout.as_slice()).map_err(|error| { + anyhow::anyhow!( + "{} subprocess stdout parsing: {error:?} (stderr: {})", self.executable, - String::from_utf8_lossy(output.stderr.as_slice()).to_string() - ); - } - - Ok(()) + String::from_utf8_lossy(result.stderr.as_slice()), + ) + }) } /// The `solc --version` mini-parser. - fn version(&mut self) -> anyhow::Result { + fn version(&self) -> anyhow::Result { let mut command = std::process::Command::new(self.executable.as_str()); command.arg("--version"); let output = command.output().map_err(|error| { @@ -252,13 +216,6 @@ impl Compiler for SolcCompiler { .parse() .map_err(|error| anyhow::anyhow!("{} version parsing: {}", self.executable, error))?; - let l2_revision: Option = stdout - .lines() - .nth(2) - .and_then(|line| line.split(' ').nth(1)) - .and_then(|line| line.split('-').nth(1)) - .and_then(|version| version.parse().ok()); - - Ok(Version::new(long, default, l2_revision)) + Version::new(long, default).validate() } } diff --git a/crates/resolc/src/solc/soljson_compiler.rs b/crates/resolc/src/solc/soljson_compiler.rs index 39afd76..0d0a626 100644 --- a/crates/resolc/src/solc/soljson_compiler.rs +++ b/crates/resolc/src/solc/soljson_compiler.rs @@ -1,11 +1,14 @@ //! The Solidity compiler solJson interface. -use std::path::Path; +use std::collections::HashSet; use std::path::PathBuf; +use revive_common::deserialize_from_slice; use revive_solc_json_interface::combined_json::CombinedJson; +use revive_solc_json_interface::CombinedJsonSelector; use revive_solc_json_interface::SolcStandardJsonInput; use revive_solc_json_interface::SolcStandardJsonOutput; +use revive_solc_json_interface::SolcStandardJsonOutputError; use crate::solc::version::Version; use anyhow::Context; @@ -24,8 +27,9 @@ pub struct SoljsonCompiler; impl Compiler for SoljsonCompiler { /// Compiles the Solidity `--standard-json` input into Yul IR. fn standard_json( - &mut self, - mut input: SolcStandardJsonInput, + &self, + input: &mut SolcStandardJsonInput, + messages: &mut Vec, base_path: Option, include_paths: Vec, allow_paths: Option, @@ -40,23 +44,31 @@ impl Compiler for SoljsonCompiler { anyhow::bail!("configuring allow paths is not supported with solJson") } - input.normalize(); - - let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default(); - let input_json = serde_json::to_string(&input).expect("Always valid"); let out = Self::compile_standard_json(input_json)?; let mut output: SolcStandardJsonOutput = - revive_common::deserialize_from_slice(out.as_bytes()).map_err(|error| { + deserialize_from_slice(out.as_bytes()).map_err(|error| { anyhow::anyhow!( "Soljson output parsing error: {}\n{}", error, - revive_common::deserialize_from_slice::(out.as_bytes()) + deserialize_from_slice::(out.as_bytes()) .map(|json| serde_json::to_string_pretty(&json).expect("Always valid")) .unwrap_or_else(|_| String::from_utf8_lossy(out.as_bytes()).to_string()), ) })?; - output.preprocess_ast(suppressed_warnings.as_slice())?; + output + .errors + .retain(|error| match error.error_code.as_deref() { + Some(code) => !SolcStandardJsonOutputError::IGNORED_WARNING_CODES.contains(&code), + None => true, + }); + output.errors.append(messages); + + let mut suppressed_warnings = input.suppressed_warnings.clone(); + suppressed_warnings.extend_from_slice(input.settings.suppressed_warnings.as_slice()); + + input.resolve_sources(); + output.preprocess_ast(&input.sources, &suppressed_warnings)?; Ok(output) } @@ -64,16 +76,12 @@ impl Compiler for SoljsonCompiler { fn combined_json( &self, _paths: &[PathBuf], - _combined_json_argument: &str, + _selector: HashSet, ) -> anyhow::Result { unimplemented!(); } - fn validate_yul(&self, _path: &Path) -> anyhow::Result<()> { - unimplemented!(); - } - - fn version(&mut self) -> anyhow::Result { + fn version(&self) -> anyhow::Result { let version = Self::get_soljson_version()?; let long = version.clone(); let default: semver::Version = version @@ -82,11 +90,7 @@ impl Compiler for SoljsonCompiler { .ok_or_else(|| anyhow::anyhow!("Soljson version parsing: metadata dropping"))? .parse() .map_err(|error| anyhow::anyhow!("Soljson version parsing: {}", error))?; - let l2_revision: Option = version - .split('-') - .nth(1) - .and_then(|version| version.parse().ok()); - Ok(Version::new(long, default, l2_revision)) + Version::new(long, default).validate() } } diff --git a/crates/resolc/src/solc/version.rs b/crates/resolc/src/solc/version.rs index e1ab041..77e2051 100644 --- a/crates/resolc/src/solc/version.rs +++ b/crates/resolc/src/solc/version.rs @@ -10,34 +10,16 @@ pub struct Version { pub long: String, /// The short `semver`. pub default: semver::Version, - /// The L2 revision additional versioning. - pub l2_revision: Option, } impl Version { /// A shortcut constructor. - pub fn new( - long: String, - default: semver::Version, - l2_revision: Option, - ) -> Self { - Self { - long, - default, - l2_revision, - } + pub fn new(long: String, default: semver::Version) -> Self { + Self { long, default } } - /// A shortcut constructor for a simple version. - pub fn new_simple(version: semver::Version) -> Self { - Self { - long: version.to_string(), - default: version, - l2_revision: None, - } - } - - pub fn validate(self, include_paths: &[String]) -> anyhow::Result { + /// Returns an error if an unsupported version is detected. + pub fn validate(self) -> anyhow::Result { if self.default < super::FIRST_SUPPORTED_VERSION { anyhow::bail!( "`solc` versions <{} are not supported, found {}", @@ -52,10 +34,6 @@ impl Version { self.default ); } - if !include_paths.is_empty() && self.default < super::FIRST_INCLUDE_PATH_VERSION { - anyhow::bail!("--include-path is not supported in solc {}", self.default); - } - Ok(self) } } diff --git a/crates/resolc/src/test_utils.rs b/crates/resolc/src/test_utils.rs index 299ead8..5668b3c 100644 --- a/crates/resolc/src/test_utils.rs +++ b/crates/resolc/src/test_utils.rs @@ -1,19 +1,29 @@ -//! Common utility used for in frontend and integration tests. +//! Common helper utilities used in tests and benchmarks. + use std::collections::BTreeMap; use std::collections::BTreeSet; use std::collections::HashMap; +use std::fmt::Display; use std::path::PathBuf; use std::sync::Mutex; use once_cell::sync::Lazy; +use revive_common::MetadataHash; +use revive_llvm_context::initialize_llvm; +use revive_llvm_context::DebugConfig; use revive_llvm_context::OptimizerSettings; +use revive_llvm_context::PolkaVMTarget; use revive_solc_json_interface::standard_json::output::contract::evm::bytecode::Bytecode; use revive_solc_json_interface::standard_json::output::contract::evm::bytecode::DeployedBytecode; -use revive_solc_json_interface::warning::Warning; +use revive_solc_json_interface::ResolcWarning; use revive_solc_json_interface::SolcStandardJsonInput; +use revive_solc_json_interface::SolcStandardJsonInputSettingsLibraries; +use revive_solc_json_interface::SolcStandardJsonInputSettingsMetadata; use revive_solc_json_interface::SolcStandardJsonInputSettingsOptimizer; use revive_solc_json_interface::SolcStandardJsonInputSettingsSelection; +use revive_solc_json_interface::SolcStandardJsonInputSource; use revive_solc_json_interface::SolcStandardJsonOutput; +use revive_solc_json_interface::SolcStandardJsonOutputErrorHandler; use crate::project::Project; use crate::solc::solc_compiler::SolcCompiler; @@ -24,17 +34,373 @@ static EVM_BLOB_CACHE: Lazy>>> = Lazy::new(Def static EVM_RUNTIME_BLOB_CACHE: Lazy>>> = Lazy::new(Default::default); -const DEBUG_CONFIG: revive_llvm_context::DebugConfig = - revive_llvm_context::DebugConfig::new(None, true); +const DEBUG_CONFIG: revive_llvm_context::DebugConfig = DebugConfig::new(None, true); +/// Tests may share and re-use contract code. +/// The compiled blob cache helps avoiding duplicate compilation. #[derive(Hash, PartialEq, Eq)] struct CachedBlob { + /// The contract name. contract_name: String, - solidity: String, + /// Whether the solc optimizer is enabled. solc_optimizer_enabled: bool, + /// The contract code. + solidity: String, + /// The optimization level. opt: String, } +/// Builds the Solidity project and returns the standard JSON output. +pub fn build_solidity( + sources: BTreeMap, +) -> anyhow::Result { + build_solidity_with_options( + sources, + Default::default(), + Default::default(), + OptimizerSettings::cycles(), + true, + Default::default(), + ) +} + +/// Builds the Solidity project and returns the standard JSON output. +pub fn build_solidity_with_options( + sources: BTreeMap, + libraries: SolcStandardJsonInputSettingsLibraries, + remappings: BTreeSet, + optimizer_settings: OptimizerSettings, + solc_optimizer_enabled: bool, + suppressed_warnings: Vec, +) -> anyhow::Result { + check_dependencies(); + inkwell::support::enable_llvm_pretty_stack_trace(); + initialize_llvm(PolkaVMTarget::PVM, crate::DEFAULT_EXECUTABLE_NAME, &[]); + + let _ = crate::process::native_process::EXECUTABLE + .set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME)); + + let solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?; + let solc_version = solc.version()?; + + let mut input = SolcStandardJsonInput::try_from_solidity_sources( + None, + sources.clone(), + libraries.clone(), + remappings, + SolcStandardJsonInputSettingsSelection::new_required_for_tests(), + SolcStandardJsonInputSettingsOptimizer::new( + solc_optimizer_enabled, + optimizer_settings + .middle_end_as_string() + .chars() + .last() + .unwrap(), + Default::default(), + ), + SolcStandardJsonInputSettingsMetadata::default(), + suppressed_warnings, + Default::default(), + Default::default(), + false, + )?; + + let mut output = solc.standard_json(&mut input, &mut vec![], None, vec![], None)?; + if output.has_errors() { + return Ok(output); + } + let debug_config = DebugConfig::new(None, optimizer_settings.middle_end_as_string() != "z"); + let linker_symbols = libraries.as_linker_symbols()?; + let build = Project::try_from_standard_json_output( + &mut output, + libraries, + &solc_version, + &debug_config, + )? + .compile( + &mut vec![], + optimizer_settings, + MetadataHash::Keccak256, + &debug_config, + Default::default(), + Default::default(), + )?; + build.check_errors()?; + + let build = build.link(linker_symbols, &debug_config); + build.check_errors()?; + build.write_to_standard_json(&mut output, &solc_version)?; + output.check_errors()?; + + Ok(output) +} + +/// Build a Solidity contract and get the EVM code +pub fn build_solidity_with_options_evm( + sources: BTreeMap, + libraries: SolcStandardJsonInputSettingsLibraries, + remappings: BTreeSet, + solc_optimizer_enabled: bool, +) -> anyhow::Result> { + check_dependencies(); + inkwell::support::enable_llvm_pretty_stack_trace(); + initialize_llvm(PolkaVMTarget::PVM, crate::DEFAULT_EXECUTABLE_NAME, &[]); + let _ = crate::process::native_process::EXECUTABLE + .set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME)); + + let solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?; + let mut input = SolcStandardJsonInput::try_from_solidity_sources( + None, + sources.clone(), + libraries.clone(), + remappings, + SolcStandardJsonInputSettingsSelection::new_required_for_tests(), + SolcStandardJsonInputSettingsOptimizer::new( + solc_optimizer_enabled, + Default::default(), + Default::default(), + ), + SolcStandardJsonInputSettingsMetadata::default(), + Default::default(), + Default::default(), + Default::default(), + false, + )?; + + let mut contracts = BTreeMap::new(); + for files in solc + .standard_json(&mut input, &mut vec![], None, vec![], None)? + .contracts + { + for (name, contract) in files.1 { + if let Some(evm) = contract.evm { + let (Some(bytecode), Some(deployed_bytecode)) = + (evm.bytecode.as_ref(), evm.deployed_bytecode.as_ref()) + else { + continue; + }; + contracts.insert(name.clone(), (bytecode.clone(), deployed_bytecode.clone())); + } + } + } + + Ok(contracts) +} + +/// Builds the Solidity project and returns the standard JSON output. +pub fn build_solidity_and_detect_missing_libraries( + sources: &[(T, T)], + libraries: SolcStandardJsonInputSettingsLibraries, +) -> anyhow::Result { + check_dependencies(); + + let deployed_libraries = libraries.as_paths(); + let sources = BTreeMap::from_iter( + sources + .iter() + .map(|(path, code)| (path.to_string(), code.to_string().into())), + ); + + inkwell::support::enable_llvm_pretty_stack_trace(); + initialize_llvm(PolkaVMTarget::PVM, crate::DEFAULT_EXECUTABLE_NAME, &[]); + let _ = crate::process::native_process::EXECUTABLE + .set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME)); + + let solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?; + let solc_version = solc.version()?; + let mut input = SolcStandardJsonInput::try_from_solidity_sources( + None, + sources.clone(), + libraries.clone(), + Default::default(), + SolcStandardJsonInputSettingsSelection::new_required_for_tests(), + SolcStandardJsonInputSettingsOptimizer::default(), + SolcStandardJsonInputSettingsMetadata::default(), + Default::default(), + Default::default(), + Default::default(), + true, + )?; + + let mut output = solc.standard_json(&mut input, &mut vec![], None, vec![], None)?; + if output.has_errors() { + return Ok(output); + } + + let project = Project::try_from_standard_json_output( + &mut output, + libraries, + &solc_version, + &DEBUG_CONFIG, + )?; + + let missing_libraries = project.get_missing_libraries(&deployed_libraries); + missing_libraries.write_to_standard_json(&mut output, &solc.version()?); + + Ok(output) +} + +/// Checks if the Yul project can be built without errors. +pub fn build_yul(sources: &[(T, T)]) -> anyhow::Result<()> { + check_dependencies(); + + inkwell::support::enable_llvm_pretty_stack_trace(); + initialize_llvm(PolkaVMTarget::PVM, crate::DEFAULT_EXECUTABLE_NAME, &[]); + let optimizer_settings = OptimizerSettings::none(); + + let sources = sources + .iter() + .map(|(path, source)| { + ( + path.to_string(), + SolcStandardJsonInputSource::from(source.to_string()), + ) + }) + .collect(); + let mut output = SolcStandardJsonOutput::new(&sources, &mut vec![]); + + let project = Project::try_from_yul_sources( + sources, + Default::default(), + Some(&mut output), + &Default::default(), + )?; + let build = project.compile( + &mut vec![], + optimizer_settings, + MetadataHash::None, + &DEBUG_CONFIG, + Default::default(), + Default::default(), + )?; + build.check_errors()?; + + let build = build.link(BTreeMap::new(), &DEBUG_CONFIG); + build.check_errors()?; + + let solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?; + build.write_to_standard_json(&mut output, &solc.version()?)?; + output.check_errors()?; + Ok(()) +} + +/// Builds the Yul standard JSON and returns the standard JSON output. +pub fn build_yul_standard_json( + mut solc_input: SolcStandardJsonInput, +) -> anyhow::Result { + check_dependencies(); + inkwell::support::enable_llvm_pretty_stack_trace(); + initialize_llvm(PolkaVMTarget::PVM, crate::DEFAULT_EXECUTABLE_NAME, &[]); + + let solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?; + let mut output = solc.validate_yul_standard_json(&mut solc_input, &mut vec![])?; + let build = Project::try_from_yul_sources( + solc_input.sources, + Default::default(), + Some(&mut output), + &Default::default(), + )? + .compile( + &mut vec![], + OptimizerSettings::try_from_cli(solc_input.settings.optimizer.mode)?, + MetadataHash::Keccak256, + &DEBUG_CONFIG, + Default::default(), + Default::default(), + )?; + build.check_errors()?; + + let build = build.link(Default::default(), &Default::default()); + build.check_errors()?; + build.write_to_standard_json(&mut output, &solc.version()?)?; + + output.check_errors()?; + Ok(output) +} + +/// Compile the blob of `contract_name` found in given `source_code`. +/// The `solc` optimizer will be enabled +pub fn compile_blob(contract_name: &str, source_code: &str) -> Vec { + compile_blob_with_options( + contract_name, + source_code, + true, + OptimizerSettings::cycles(), + ) +} + +/// Compile the blob of `contract_name` found in given `source_code`. +pub fn compile_blob_with_options( + contract_name: &str, + source_code: &str, + solc_optimizer_enabled: bool, + optimizer_settings: OptimizerSettings, +) -> Vec { + let id = CachedBlob { + contract_name: contract_name.to_owned(), + opt: optimizer_settings.middle_end_as_string(), + solc_optimizer_enabled, + solidity: source_code.to_owned(), + }; + + if let Some(blob) = PVM_BLOB_CACHE.lock().unwrap().get(&id) { + return blob.clone(); + } + + let file_name = "contract.sol"; + let contracts = build_solidity_with_options( + BTreeMap::from([( + file_name.to_owned(), + SolcStandardJsonInputSource::from(source_code.to_owned()), + )]), + Default::default(), + Default::default(), + optimizer_settings, + solc_optimizer_enabled, + Default::default(), + ) + .expect("source should compile") + .contracts; + let bytecode = contracts[file_name][contract_name] + .evm + .as_ref() + .expect("source should produce EVM output") + .bytecode + .as_ref() + .expect("source should produce assembly text") + .object + .as_str(); + let blob = hex::decode(bytecode).expect("hex encoding should always be valid"); + + PVM_BLOB_CACHE.lock().unwrap().insert(id, blob.clone()); + + blob +} + +/// Compile the EVM bin-runtime of `contract_name` found in given `source_code`. +/// The `solc` optimizer will be enabled +pub fn compile_evm_bin_runtime(contract_name: &str, source_code: &str) -> Vec { + compile_evm(contract_name, source_code, true, true) +} + +/// Compile the EVM bin of `contract_name` found in given `source_code`. +pub fn compile_evm_deploy_code( + contract_name: &str, + source_code: &str, + solc_optimizer_enabled: bool, +) -> Vec { + compile_evm(contract_name, source_code, solc_optimizer_enabled, false) +} + +/// Convert `(path, solidity)` tuples to a standard JSON input source. +pub fn sources(sources: &[(T, T)]) -> BTreeMap { + BTreeMap::from_iter( + sources + .iter() + .map(|(path, code)| (path.to_string(), code.to_string().into())), + ) +} + /// Checks if the required executables are present in `${PATH}`. fn check_dependencies() { for executable in [ @@ -50,285 +416,7 @@ fn check_dependencies() { } } -/// Builds the Solidity project and returns the standard JSON output. -pub fn build_solidity( - sources: BTreeMap, - libraries: BTreeMap>, - remappings: Option>, - optimizer_settings: revive_llvm_context::OptimizerSettings, -) -> anyhow::Result { - build_solidity_with_options(sources, libraries, remappings, optimizer_settings, true) -} - -/// Builds the Solidity project and returns the standard JSON output. -/// Gives control over additional options: -/// - `solc_optimizer_enabled`: Whether to use the `solc` optimizer -pub fn build_solidity_with_options( - sources: BTreeMap, - libraries: BTreeMap>, - remappings: Option>, - optimizer_settings: revive_llvm_context::OptimizerSettings, - solc_optimizer_enabled: bool, -) -> anyhow::Result { - check_dependencies(); - - inkwell::support::enable_llvm_pretty_stack_trace(); - revive_llvm_context::initialize_llvm( - revive_llvm_context::Target::PVM, - crate::DEFAULT_EXECUTABLE_NAME, - &[], - ); - let _ = crate::process::native_process::EXECUTABLE - .set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME)); - - let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?; - let solc_version = solc.version()?; - - let input = SolcStandardJsonInput::try_from_sources( - None, - sources.clone(), - libraries.clone(), - remappings, - SolcStandardJsonInputSettingsSelection::new_required_for_tests(), - SolcStandardJsonInputSettingsOptimizer::new( - solc_optimizer_enabled, - optimizer_settings.middle_end_as_string().chars().last(), - &solc_version.default, - false, - ), - None, - None, - None, - )?; - - let mut output = solc.standard_json(input, None, vec![], None)?; - - let debug_config = revive_llvm_context::DebugConfig::new( - None, - optimizer_settings.middle_end_as_string() != "z", - ); - - let project = Project::try_from_standard_json_output( - &output, - sources, - libraries, - &solc_version, - &debug_config, - )?; - - let build: crate::Build = project.compile( - optimizer_settings, - false, - debug_config, - Default::default(), - Default::default(), - )?; - build.write_to_standard_json(&mut output, &solc_version)?; - - Ok(output) -} - -/// Build a Solidity contract and get the EVM code -pub fn build_solidity_with_options_evm( - sources: BTreeMap, - libraries: BTreeMap>, - remappings: Option>, - solc_optimizer_enabled: bool, -) -> anyhow::Result> { - check_dependencies(); - - inkwell::support::enable_llvm_pretty_stack_trace(); - revive_llvm_context::initialize_llvm( - revive_llvm_context::Target::PVM, - crate::DEFAULT_EXECUTABLE_NAME, - &[], - ); - let _ = crate::process::native_process::EXECUTABLE - .set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME)); - - let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?; - let solc_version = solc.version()?; - - let input = SolcStandardJsonInput::try_from_sources( - None, - sources.clone(), - libraries.clone(), - remappings, - SolcStandardJsonInputSettingsSelection::new_required_for_tests(), - SolcStandardJsonInputSettingsOptimizer::new( - solc_optimizer_enabled, - None, - &solc_version.default, - false, - ), - None, - None, - None, - )?; - - let mut output = solc.standard_json(input, None, vec![], None)?; - - let mut contracts = BTreeMap::new(); - if let Some(files) = output.contracts.as_mut() { - for (_, file) in files.iter_mut() { - for (name, contract) in file.iter_mut() { - if let Some(evm) = contract.evm.as_mut() { - let (Some(bytecode), Some(deployed_bytecode)) = - (evm.bytecode.as_ref(), evm.deployed_bytecode.as_ref()) - else { - continue; - }; - contracts.insert(name.clone(), (bytecode.clone(), deployed_bytecode.clone())); - } - } - } - } - - Ok(contracts) -} - -/// Builds the Solidity project and returns the standard JSON output. -pub fn build_solidity_and_detect_missing_libraries( - sources: BTreeMap, - libraries: BTreeMap>, -) -> anyhow::Result { - check_dependencies(); - - inkwell::support::enable_llvm_pretty_stack_trace(); - revive_llvm_context::initialize_llvm( - revive_llvm_context::Target::PVM, - crate::DEFAULT_EXECUTABLE_NAME, - &[], - ); - let _ = crate::process::native_process::EXECUTABLE - .set(PathBuf::from(crate::r#const::DEFAULT_EXECUTABLE_NAME)); - - let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?; - let solc_version = solc.version()?; - - let input = SolcStandardJsonInput::try_from_sources( - None, - sources.clone(), - libraries.clone(), - None, - SolcStandardJsonInputSettingsSelection::new_required_for_tests(), - SolcStandardJsonInputSettingsOptimizer::new(true, None, &solc_version.default, false), - None, - None, - None, - )?; - - let mut output = solc.standard_json(input, None, vec![], None)?; - - let project = Project::try_from_standard_json_output( - &output, - sources, - libraries, - &solc_version, - &DEBUG_CONFIG, - )?; - - let missing_libraries = project.get_missing_libraries(); - missing_libraries.write_to_standard_json(&mut output, &solc.version()?)?; - - Ok(output) -} - -/// Checks if the Yul project can be built without errors. -pub fn build_yul(source_code: &str) -> anyhow::Result<()> { - check_dependencies(); - - inkwell::support::enable_llvm_pretty_stack_trace(); - revive_llvm_context::initialize_llvm( - revive_llvm_context::Target::PVM, - crate::DEFAULT_EXECUTABLE_NAME, - &[], - ); - let optimizer_settings = revive_llvm_context::OptimizerSettings::none(); - - let project = Project::try_from_yul_string::( - PathBuf::from("test.yul").as_path(), - source_code, - None, - )?; - let _build = project.compile( - optimizer_settings, - false, - DEBUG_CONFIG, - Default::default(), - Default::default(), - )?; - - Ok(()) -} - -/// Checks if the built Solidity project contains the given warning. -pub fn check_solidity_warning( - source_code: &str, - warning_substring: &str, - libraries: BTreeMap>, - skip_for_revive_edition: bool, - suppressed_warnings: Option>, -) -> anyhow::Result { - check_dependencies(); - - let mut solc = SolcCompiler::new(SolcCompiler::DEFAULT_EXECUTABLE_NAME.to_owned())?; - let solc_version = solc.version()?; - if skip_for_revive_edition && solc_version.l2_revision.is_some() { - return Ok(true); - } - - let mut sources = BTreeMap::new(); - sources.insert("test.sol".to_string(), source_code.to_string()); - let input = SolcStandardJsonInput::try_from_sources( - None, - sources.clone(), - libraries, - None, - SolcStandardJsonInputSettingsSelection::new_required_for_tests(), - SolcStandardJsonInputSettingsOptimizer::new(true, None, &solc_version.default, false), - None, - suppressed_warnings, - None, - )?; - - let output = solc.standard_json(input, None, vec![], None)?; - let contains_warning = output - .errors - .ok_or_else(|| anyhow::anyhow!("Solidity compiler messages not found"))? - .iter() - .any(|error| error.formatted_message.contains(warning_substring)); - - Ok(contains_warning) -} - -/// Compile the blob of `contract_name` found in given `source_code`. -/// The `solc` optimizer will be enabled -pub fn compile_blob(contract_name: &str, source_code: &str) -> Vec { - compile_blob_with_options( - contract_name, - source_code, - true, - OptimizerSettings::cycles(), - ) -} - -/// Compile the EVM bin-runtime of `contract_name` found in given `source_code`. -/// The `solc` optimizer will be enabled -pub fn compile_evm_bin_runtime(contract_name: &str, source_code: &str) -> Vec { - compile_evm(contract_name, source_code, true, true) -} - -/// Compile the EVM bin of `contract_name` found in given `source_code`. -/// The `solc` optimizer will be enabled -pub fn compile_evm_deploy_code( - contract_name: &str, - source_code: &str, - solc_optimizer_enabled: bool, -) -> Vec { - compile_evm(contract_name, source_code, solc_optimizer_enabled, false) -} - +/// The internal EVM bytecode compile helper. fn compile_evm( contract_name: &str, source_code: &str, @@ -353,9 +441,12 @@ fn compile_evm( let file_name = "contract.sol"; let contracts = build_solidity_with_options_evm( - [(file_name.into(), source_code.into())].into(), + BTreeMap::from([( + file_name.into(), + SolcStandardJsonInputSource::from(source_code.to_owned()), + )]), + Default::default(), Default::default(), - None, solc_optimizer_enabled, ) .expect("source should compile"); @@ -373,49 +464,3 @@ fn compile_evm( blob } - -/// Compile the blob of `contract_name` found in given `source_code`. -pub fn compile_blob_with_options( - contract_name: &str, - source_code: &str, - solc_optimizer_enabled: bool, - optimizer_settings: revive_llvm_context::OptimizerSettings, -) -> Vec { - let id = CachedBlob { - contract_name: contract_name.to_owned(), - solidity: source_code.to_owned(), - solc_optimizer_enabled, - opt: optimizer_settings.middle_end_as_string(), - }; - - if let Some(blob) = PVM_BLOB_CACHE.lock().unwrap().get(&id) { - return blob.clone(); - } - - let file_name = "contract.sol"; - let contracts = build_solidity_with_options( - [(file_name.into(), source_code.into())].into(), - Default::default(), - None, - optimizer_settings, - solc_optimizer_enabled, - ) - .expect("source should compile") - .contracts - .expect("source should contain at least one contract"); - - let bytecode = contracts[file_name][contract_name] - .evm - .as_ref() - .expect("source should produce EVM output") - .bytecode - .as_ref() - .expect("source should produce assembly text") - .object - .as_str(); - let blob = hex::decode(bytecode).expect("hex encoding should always be valid"); - - PVM_BLOB_CACHE.lock().unwrap().insert(id, blob.clone()); - - blob -} diff --git a/crates/resolc/src/tests/cli/asm.rs b/crates/resolc/src/tests/cli/asm.rs index 29ffed1..cf2b86a 100644 --- a/crates/resolc/src/tests/cli/asm.rs +++ b/crates/resolc/src/tests/cli/asm.rs @@ -1,7 +1,5 @@ //! The tests for running resolc with asm option. -#![cfg(test)] - use crate::tests::cli::utils; const ASM_OPTION: &str = "--asm"; @@ -30,10 +28,7 @@ fn fails_without_input_file() { utils::assert_command_failure(&resolc_result, "Omitting an input file"); let output = resolc_result.stderr.to_lowercase(); - assert!( - output.contains("no input sources specified") || output.contains("compilation aborted"), - "Expected the output to contain a specific error message." - ); + assert!(output.contains("no input sources specified")); let solc_result = utils::execute_solc(arguments); utils::assert_equal_exit_codes(&solc_result, &resolc_result); diff --git a/crates/resolc/src/tests/cli/combined_json.rs b/crates/resolc/src/tests/cli/combined_json.rs index c60cf29..666cc70 100644 --- a/crates/resolc/src/tests/cli/combined_json.rs +++ b/crates/resolc/src/tests/cli/combined_json.rs @@ -1,8 +1,6 @@ //! The tests for running resolc with combined JSON option. -#![cfg(test)] - -use revive_common; +use revive_solc_json_interface::CombinedJsonInvalidSelectorMessage; use crate::tests::cli::utils; @@ -53,10 +51,9 @@ fn fails_with_invalid_json_argument() { let resolc_result = utils::execute_resolc(arguments); utils::assert_command_failure(&resolc_result, "Providing an invalid json argument"); - assert!( - resolc_result.stdout.contains("Invalid option"), - "Expected the output to contain a specific error message." - ); + assert!(resolc_result + .stderr + .contains(CombinedJsonInvalidSelectorMessage)); let solc_result = utils::execute_solc(arguments); utils::assert_equal_exit_codes(&solc_result, &resolc_result); @@ -73,16 +70,12 @@ fn fails_with_multiple_json_arguments() { let resolc_result = utils::execute_resolc(arguments); utils::assert_command_failure(&resolc_result, "Providing multiple json arguments"); - assert!( - resolc_result - .stderr - .contains("reading error: No such file or directory"), - "Expected the output to contain a specific error message." - ); + assert!(resolc_result + .stderr + .contains(&format!("Error: \"{}\" is not found.", JSON_ARGUMENTS[1])),); - // FIX: Resolc exit code == 101 - // let solc_result = utils::execute_solc(arguments); - // utils::assert_equal_exit_codes(&solc_result, &resolc_result); + let solc_result = utils::execute_solc(arguments); + utils::assert_equal_exit_codes(&solc_result, &resolc_result); } #[test] @@ -91,12 +84,9 @@ fn fails_without_json_argument() { let resolc_result = utils::execute_resolc(arguments); utils::assert_command_failure(&resolc_result, "Omitting a JSON argument"); - assert!( - resolc_result.stderr.contains( - "a value is required for '--combined-json ' but none was supplied" - ), - "Expected the output to contain a specific error message." - ); + assert!(resolc_result.stderr.contains( + "a value is required for '--combined-json ' but none was supplied" + )); let solc_result = utils::execute_solc(arguments); utils::assert_equal_exit_codes(&solc_result, &resolc_result); @@ -108,10 +98,7 @@ fn fails_without_solidity_input_file() { let resolc_result = utils::execute_resolc(arguments); utils::assert_command_failure(&resolc_result, "Omitting a Solidity input file"); - assert!( - resolc_result.stderr.contains("No input sources specified"), - "Expected the output to contain a specific error message." - ); + assert!(resolc_result.stderr.contains("Error: No input files given"),); let solc_result = utils::execute_solc(arguments); utils::assert_equal_exit_codes(&solc_result, &resolc_result); @@ -124,12 +111,9 @@ fn fails_with_yul_input_file() { let resolc_result = utils::execute_resolc(arguments); utils::assert_command_failure(&resolc_result, "Providing a Yul input file"); - assert!( - resolc_result - .stderr - .contains("ParserError: Expected identifier"), - "Expected the output to contain a specific error message." - ); + assert!(resolc_result + .stderr + .contains("Error: Expected identifier but got 'StringLiteral'")); let solc_result = utils::execute_solc(arguments); utils::assert_equal_exit_codes(&solc_result, &resolc_result); diff --git a/crates/resolc/src/tests/cli/linker.rs b/crates/resolc/src/tests/cli/linker.rs new file mode 100644 index 0000000..cb37bfc --- /dev/null +++ b/crates/resolc/src/tests/cli/linker.rs @@ -0,0 +1,64 @@ +use crate::tests::cli::utils::{assert_command_success, execute_resolc, DEPENDENCY_CONTRACT_PATH}; + +/// Test deploy time linking a contract with unresolved factory dependencies. +#[test] +fn deploy_time_linking_works() { + let temp_dir = tempfile::TempDir::new().unwrap(); + let output_directory = temp_dir.path().to_path_buf(); + let source_path = temp_dir.path().to_path_buf().join("dependency.sol"); + std::fs::copy(DEPENDENCY_CONTRACT_PATH, &source_path).unwrap(); + + assert_command_success( + &execute_resolc(&[ + source_path.to_str().unwrap(), + "--bin", + "-o", + &output_directory.to_string_lossy(), + ]), + "Missing libraries should compile fine", + ); + + let dependency_blob_path = temp_dir + .path() + .to_path_buf() + .join("dependency.sol:Dependency.pvm"); + let blob_path = temp_dir + .path() + .to_path_buf() + .join("dependency.sol:TestAssert.pvm"); + + let output = execute_resolc(&[ + "--link", + blob_path.to_str().unwrap(), + dependency_blob_path.to_str().unwrap(), + ]); + assert_command_success(&output, "The linker mode with missing library should work"); + assert!(output.stdout.contains("still unresolved")); + + let assert_library_path = format!( + "{}:Assert=0x0000000000000000000000000000000000000001", + source_path.to_str().unwrap() + ); + let assert_ne_library_path = format!( + "{}:AssertNe=0x0000000000000000000000000000000000000002", + source_path.to_str().unwrap() + ); + let output = execute_resolc(&[ + "--link", + "--libraries", + &assert_library_path, + "--libraries", + &assert_ne_library_path, + blob_path.to_str().unwrap(), + dependency_blob_path.to_str().unwrap(), + ]); + assert_command_success(&output, "The linker mode with all library should work"); + assert!(!output.stdout.contains("still unresolved")); +} + +#[test] +fn emits_unlinked_binary_warning() { + let output = execute_resolc(&[DEPENDENCY_CONTRACT_PATH, "--bin"]); + assert_command_success(&output, "Missing libraries should compile fine"); + assert!(output.stderr.contains("is unlinked")); +} diff --git a/crates/resolc/src/tests/cli/llvm_arguments.rs b/crates/resolc/src/tests/cli/llvm_arguments.rs new file mode 100644 index 0000000..b7814ac --- /dev/null +++ b/crates/resolc/src/tests/cli/llvm_arguments.rs @@ -0,0 +1,15 @@ +use crate::tests::cli::utils::{ + assert_command_success, execute_resolc, RESOLC_YUL_FLAG, YUL_CONTRACT_PATH, +}; + +#[test] +fn llvm_arguments_work_with_yul_input() { + let output_with_argument = execute_resolc(&[ + RESOLC_YUL_FLAG, + YUL_CONTRACT_PATH, + "--llvm-arg=-riscv-soften-spills'", + "--bin", + ]); + assert_command_success(&output_with_argument, "Providing LLVM arguments"); + assert!(output_with_argument.success); +} diff --git a/crates/resolc/src/tests/cli/mod.rs b/crates/resolc/src/tests/cli/mod.rs index 7b1ba82..db9221d 100644 --- a/crates/resolc/src/tests/cli/mod.rs +++ b/crates/resolc/src/tests/cli/mod.rs @@ -1,9 +1,9 @@ -//! The CLI tests. - -#![cfg(test)] +//! The `resolc` CLI tests. mod asm; mod combined_json; +mod linker; +mod llvm_arguments; mod optimization; mod output_dir; mod standard_json; diff --git a/crates/resolc/src/tests/cli/optimization.rs b/crates/resolc/src/tests/cli/optimization.rs index 92d7456..2e56749 100644 --- a/crates/resolc/src/tests/cli/optimization.rs +++ b/crates/resolc/src/tests/cli/optimization.rs @@ -1,10 +1,9 @@ //! The tests for running resolc with explicit optimization. -#![cfg(test)] - -use revive_common; - -use crate::tests::cli::{utils, yul}; +use crate::tests::cli::utils::{ + self, assert_command_failure, assert_command_success, assert_equal_exit_codes, execute_resolc, + execute_solc, RESOLC_YUL_FLAG, SOLIDITY_CONTRACT_PATH, YUL_MEMSET_CONTRACT_PATH, +}; const LEVELS: &[char] = &['0', '1', '2', '3', 's', 'z']; @@ -12,11 +11,7 @@ const LEVELS: &[char] = &['0', '1', '2', '3', 's', 'z']; fn runs_with_valid_level() { for level in LEVELS { let optimization_argument = format!("-O{level}"); - let arguments = &[ - utils::YUL_MEMSET_CONTRACT_PATH, - yul::YUL_OPTION, - &optimization_argument, - ]; + let arguments = &[YUL_MEMSET_CONTRACT_PATH, "--yul", &optimization_argument]; let resolc_result = utils::execute_resolc(arguments); assert!( resolc_result.success, @@ -37,17 +32,27 @@ fn runs_with_valid_level() { #[test] fn fails_with_invalid_level() { - let arguments = &[utils::YUL_MEMSET_CONTRACT_PATH, yul::YUL_OPTION, "-O9"]; - let resolc_result = utils::execute_resolc(arguments); - utils::assert_command_failure(&resolc_result, "Providing an invalid optimization level"); + let arguments = &[YUL_MEMSET_CONTRACT_PATH, RESOLC_YUL_FLAG, "-O9"]; + let resolc_result = execute_resolc(arguments); + assert_command_failure(&resolc_result, "Providing an invalid optimization level"); - assert!( - resolc_result - .stderr - .contains("Unexpected optimization option"), - "Expected the output to contain a specific error message." - ); + assert!(resolc_result + .stderr + .contains("Unexpected optimization option")); - let solc_result = utils::execute_solc(arguments); - utils::assert_equal_exit_codes(&solc_result, &resolc_result); + let solc_result = execute_solc(arguments); + assert_equal_exit_codes(&solc_result, &resolc_result); +} + +#[test] +fn disable_solc_optimzer() { + let arguments = &[SOLIDITY_CONTRACT_PATH, "--bin", "--disable-solc-optimizer"]; + let disabled = execute_resolc(arguments); + assert_command_success(&disabled, "Disabling the solc optimizer"); + + let arguments = &[SOLIDITY_CONTRACT_PATH, "--bin"]; + let enabled = execute_resolc(arguments); + assert_command_success(&disabled, "Enabling the solc optimizer"); + + assert_ne!(enabled.stdout, disabled.stdout); } diff --git a/crates/resolc/src/tests/cli/output_dir.rs b/crates/resolc/src/tests/cli/output_dir.rs index ed41f60..3c6fd13 100644 --- a/crates/resolc/src/tests/cli/output_dir.rs +++ b/crates/resolc/src/tests/cli/output_dir.rs @@ -1,52 +1,41 @@ //! The tests for running resolc with output directory option. -#![cfg(test)] - use std::path::Path; +use tempfile::tempdir; + use crate::tests::cli::utils; -const OUTPUT_DIRECTORY: &str = "src/tests/cli/artifacts"; -const OUTPUT_BIN_FILE_PATH: &str = "src/tests/cli/artifacts/contract.sol:C.pvm"; -const OUTPUT_ASM_FILE_PATH: &str = "src/tests/cli/artifacts/contract.sol:C.pvmasm"; -const OUTPUT_LLVM_OPTIMIZED_FILE_PATH: &str = - "src/tests/cli/artifacts/src_tests_cli_contracts_solidity_contract.sol.C.optimized.ll"; +const OUTPUT_BIN_FILE_PATH: &str = "contract.sol:C.pvm"; +const OUTPUT_ASM_FILE_PATH: &str = "contract.sol:C.pvmasm"; +const OUTPUT_LLVM_OPTIMIZED_FILE_PATH: &str = "src_tests_data_solidity_contract.sol.C.optimized.ll"; const OUTPUT_LLVM_UNOPTIMIZED_FILE_PATH: &str = - "src/tests/cli/artifacts/src_tests_cli_contracts_solidity_contract.sol.C.unoptimized.ll"; - -fn file_exists(path: &str) -> bool { - Path::new(path).try_exists().unwrap() -} - -fn file_is_empty(path: &str) -> bool { - Path::new(path).metadata().unwrap().len() == 0 -} + "src_tests_data_solidity_contract.sol.C.unoptimized.ll"; fn assert_valid_output_file( result: &utils::CommandResult, - output_file_type: &str, - output_file_path: &str, + debug_output_directory: &Path, + output_file_name: &str, ) { utils::assert_command_success(result, "Providing an output directory"); - assert!( - result.stderr.contains("Compiler run successful"), - "Expected the compiler output to contain a success message.", - ); + assert!(result.stderr.contains("Compiler run successful"),); - assert!( - file_exists(output_file_path), - "Expected the {output_file_type} output file `{output_file_path}` to exist." - ); + let file = debug_output_directory.to_path_buf().join(output_file_name); - assert!( - !file_is_empty(output_file_path), - "Expected the {output_file_type} output file `{output_file_path}` to not be empty." + assert!(file.exists(), "Artifact should exist: {}", file.display()); + + assert_ne!( + file.metadata().unwrap().len(), + 0, + "Artifact shouldn't be empty: {}", + file.display() ); } #[test] fn writes_to_file() { + let temp_dir = tempdir().unwrap(); let arguments = &[ utils::SOLIDITY_CONTRACT_PATH, "--overwrite", @@ -54,15 +43,16 @@ fn writes_to_file() { "--bin", "--asm", "--output-dir", - OUTPUT_DIRECTORY, + temp_dir.path().to_str().unwrap(), ]; let result = utils::execute_resolc(arguments); - assert_valid_output_file(&result, "--bin", OUTPUT_BIN_FILE_PATH); - assert_valid_output_file(&result, "--asm", OUTPUT_ASM_FILE_PATH); + assert_valid_output_file(&result, temp_dir.path(), OUTPUT_BIN_FILE_PATH); + assert_valid_output_file(&result, temp_dir.path(), OUTPUT_ASM_FILE_PATH); } #[test] fn writes_debug_info_to_file_unoptimized() { + let temp_dir = tempdir().unwrap(); let arguments = &[ utils::SOLIDITY_CONTRACT_PATH, "-g", @@ -71,15 +61,16 @@ fn writes_debug_info_to_file_unoptimized() { "--bin", "--asm", "--output-dir", - OUTPUT_DIRECTORY, + temp_dir.path().to_str().unwrap(), ]; let result = utils::execute_resolc(arguments); - assert_valid_output_file(&result, "--bin", OUTPUT_BIN_FILE_PATH); - assert_valid_output_file(&result, "--asm", OUTPUT_ASM_FILE_PATH); + assert_valid_output_file(&result, temp_dir.path(), OUTPUT_BIN_FILE_PATH); + assert_valid_output_file(&result, temp_dir.path(), OUTPUT_ASM_FILE_PATH); } #[test] fn writes_debug_info_to_file_optimized() { + let temp_dir = tempdir().unwrap(); let arguments = &[ utils::SOLIDITY_CONTRACT_PATH, "-g", @@ -87,36 +78,38 @@ fn writes_debug_info_to_file_optimized() { "--bin", "--asm", "--output-dir", - OUTPUT_DIRECTORY, + temp_dir.path().to_str().unwrap(), ]; let result = utils::execute_resolc(arguments); - assert_valid_output_file(&result, "--bin", OUTPUT_BIN_FILE_PATH); - assert_valid_output_file(&result, "--asm", OUTPUT_ASM_FILE_PATH); + assert_valid_output_file(&result, temp_dir.path(), OUTPUT_BIN_FILE_PATH); + assert_valid_output_file(&result, temp_dir.path(), OUTPUT_ASM_FILE_PATH); } #[test] fn writes_llvm_debug_info_to_file_unoptimized() { + let temp_dir = tempdir().unwrap(); let arguments = &[ utils::SOLIDITY_CONTRACT_PATH, "-g", "--disable-solc-optimizer", "--overwrite", "--debug-output-dir", - OUTPUT_DIRECTORY, + temp_dir.path().to_str().unwrap(), ]; let result = utils::execute_resolc(arguments); - assert_valid_output_file(&result, "llvm", OUTPUT_LLVM_UNOPTIMIZED_FILE_PATH); + assert_valid_output_file(&result, temp_dir.path(), OUTPUT_LLVM_UNOPTIMIZED_FILE_PATH); } #[test] fn writes_llvm_debug_info_to_file_optimized() { + let temp_dir = tempdir().unwrap(); let arguments = &[ utils::SOLIDITY_CONTRACT_PATH, "-g", "--overwrite", "--debug-output-dir", - OUTPUT_DIRECTORY, + temp_dir.path().to_str().unwrap(), ]; let result = utils::execute_resolc(arguments); - assert_valid_output_file(&result, "llvm", OUTPUT_LLVM_OPTIMIZED_FILE_PATH); + assert_valid_output_file(&result, temp_dir.path(), OUTPUT_LLVM_OPTIMIZED_FILE_PATH); } diff --git a/crates/resolc/src/tests/cli/standard_json.rs b/crates/resolc/src/tests/cli/standard_json.rs index b074dec..11aa41f 100644 --- a/crates/resolc/src/tests/cli/standard_json.rs +++ b/crates/resolc/src/tests/cli/standard_json.rs @@ -1,24 +1,23 @@ //! The tests for running resolc with standard JSON option. -#![cfg(test)] - -use crate::tests::cli::utils; +use crate::tests::cli::utils::{ + assert_command_success, assert_equal_exit_codes, execute_resolc_with_stdin_input, + execute_solc_with_stdin_input, STANDARD_JSON_CONTRACTS_PATH, +}; const JSON_OPTION: &str = "--standard-json"; #[test] fn runs_with_valid_input_file() { let arguments = &[JSON_OPTION]; - let resolc_result = - utils::execute_resolc_with_stdin_input(arguments, utils::STANDARD_JSON_CONTRACTS_PATH); - utils::assert_command_success(&resolc_result, "Providing a valid input file to stdin"); + let resolc_result = execute_resolc_with_stdin_input(arguments, STANDARD_JSON_CONTRACTS_PATH); + assert_command_success(&resolc_result, "Providing a valid input file to stdin"); assert!( resolc_result.stdout.contains("contracts"), "Expected the output to contain a `contracts` field." ); - let solc_result = - utils::execute_solc_with_stdin_input(arguments, utils::STANDARD_JSON_CONTRACTS_PATH); - utils::assert_equal_exit_codes(&solc_result, &resolc_result); + let solc_result = execute_solc_with_stdin_input(arguments, STANDARD_JSON_CONTRACTS_PATH); + assert_equal_exit_codes(&solc_result, &resolc_result); } diff --git a/crates/resolc/src/tests/cli/usage.rs b/crates/resolc/src/tests/cli/usage.rs index 468487c..50ced35 100644 --- a/crates/resolc/src/tests/cli/usage.rs +++ b/crates/resolc/src/tests/cli/usage.rs @@ -1,20 +1,14 @@ //! The tests for running resolc when expecting usage output. -#![cfg(test)] - use crate::tests::cli::utils; #[test] -#[ignore = "Fix: 'resolc --help' should exit with success exit code"] fn shows_usage_with_help() { let arguments = &["--help"]; let resolc_result = utils::execute_resolc(arguments); utils::assert_command_success(&resolc_result, "Providing the `--help` option"); - assert!( - resolc_result.stdout.contains("Usage: resolc"), - "Expected the output to contain usage information." - ); + assert!(resolc_result.stdout.contains("Usage: resolc")); let solc_result = utils::execute_solc(arguments); utils::assert_equal_exit_codes(&solc_result, &resolc_result); @@ -25,10 +19,7 @@ fn fails_without_options() { let resolc_result = utils::execute_resolc(&[]); utils::assert_command_failure(&resolc_result, "Omitting options"); - assert!( - resolc_result.stderr.contains("Usage: resolc"), - "Expected the output to contain usage information." - ); + assert!(resolc_result.stderr.contains("Usage: resolc")); let solc_result = utils::execute_solc(&[]); utils::assert_equal_exit_codes(&solc_result, &resolc_result); diff --git a/crates/resolc/src/tests/cli/utils.rs b/crates/resolc/src/tests/cli/utils.rs index af3de3d..c5b970e 100644 --- a/crates/resolc/src/tests/cli/utils.rs +++ b/crates/resolc/src/tests/cli/utils.rs @@ -5,15 +5,28 @@ use std::{ process::{Command, Stdio}, }; -use revive_common; - use crate::SolcCompiler; -pub const SOLIDITY_CONTRACT_PATH: &str = "src/tests/cli/contracts/solidity/contract.sol"; -pub const YUL_CONTRACT_PATH: &str = "src/tests/cli/contracts/yul/contract.yul"; -pub const YUL_MEMSET_CONTRACT_PATH: &str = "src/tests/cli/contracts/yul/memset.yul"; +/// The simple Solidity contract test fixture path. +pub const SOLIDITY_CONTRACT_PATH: &str = "src/tests/data/solidity/contract.sol"; +/// The dependency Solidity contract test fixture path. +pub const DEPENDENCY_CONTRACT_PATH: &str = "src/tests/data/solidity/dependency.sol"; + +/// The simple YUL contract test fixture path. +pub const YUL_CONTRACT_PATH: &str = "src/tests/data/yul/contract.yul"; + +/// The memeset YUL contract test fixture path. +pub const YUL_MEMSET_CONTRACT_PATH: &str = "src/tests/data/yul/memset.yul"; +/// The standard JSON contracts test fixture path. +/// pub const STANDARD_JSON_CONTRACTS_PATH: &str = - "src/tests/cli/contracts/standard_json/solidity_contracts.json"; + "src/tests/data/standard_json/solidity_contracts.json"; + +/// The `resolc` YUL mode flag. +pub const RESOLC_YUL_FLAG: &str = "--yul"; +/// The `--yul` option was deprecated in Solidity 0.8.27 in favor of `--strict-assembly`. +/// See section `--strict-assembly vs. --yul` in https://soliditylang.org/blog/2024/09/04/solidity-0.8.27-release-announcement/ +pub const SOLC_YUL_FLAG: &str = "--strict-assembly"; /// The result of executing a command. pub struct CommandResult { @@ -52,6 +65,14 @@ fn execute_command( arguments: &[&str], stdin_file_path: Option<&str>, ) -> CommandResult { + println!( + "executing command: '{command} {}{}'", + arguments.join(" "), + stdin_file_path + .map(|argument| format!("< {argument}")) + .unwrap_or_default() + ); + let stdin_config = match stdin_file_path { Some(path) => Stdio::from(File::open(path).unwrap()), None => Stdio::null(), @@ -73,10 +94,7 @@ fn execute_command( } pub fn assert_equal_exit_codes(solc_result: &CommandResult, resolc_result: &CommandResult) { - assert_eq!( - solc_result.code, resolc_result.code, - "Expected solc and resolc to have the same exit code." - ); + assert_eq!(solc_result.code, resolc_result.code,); } pub fn assert_command_success(result: &CommandResult, error_message_prefix: &str) { diff --git a/crates/resolc/src/tests/cli/yul.rs b/crates/resolc/src/tests/cli/yul.rs index c9b597b..97d7963 100644 --- a/crates/resolc/src/tests/cli/yul.rs +++ b/crates/resolc/src/tests/cli/yul.rs @@ -1,44 +1,32 @@ //! The tests for running resolc with yul option. -#![cfg(test)] - -use crate::tests::cli::utils; - -pub const YUL_OPTION: &str = "--yul"; -/// The `--yul` option was deprecated in Solidity 0.8.27 in favor of `--strict-assembly`. -/// See section `--strict-assembly vs. --yul` in https://soliditylang.org/blog/2024/09/04/solidity-0.8.27-release-announcement/ -const SOLC_YUL_OPTION: &str = "--strict-assembly"; +use crate::tests::cli::utils::{ + assert_command_success, assert_equal_exit_codes, execute_resolc, execute_solc, RESOLC_YUL_FLAG, + SOLC_YUL_FLAG, YUL_CONTRACT_PATH, +}; #[test] fn runs_with_valid_input_file() { - let arguments = &[utils::YUL_CONTRACT_PATH, YUL_OPTION]; - let resolc_result = utils::execute_resolc(arguments); - utils::assert_command_success(&resolc_result, "Providing a valid input file"); + let resolc_result = execute_resolc(&[YUL_CONTRACT_PATH, RESOLC_YUL_FLAG]); + assert_command_success(&resolc_result, "Providing a valid input file"); - assert!( - resolc_result - .stderr - .contains("Compiler run successful. No output requested"), - "Expected the output to contain a success message." - ); + assert!(resolc_result + .stderr + .contains("Compiler run successful. No output requested")); - let solc_arguments = &[utils::YUL_CONTRACT_PATH, SOLC_YUL_OPTION]; - let solc_result = utils::execute_solc(solc_arguments); - utils::assert_equal_exit_codes(&solc_result, &resolc_result); + let solc_result = execute_solc(&[YUL_CONTRACT_PATH, SOLC_YUL_FLAG]); + assert_equal_exit_codes(&solc_result, &resolc_result); } +/// While the `solc` Solidity mode requires output selection, +/// the strict-assembly mode does not. +/// +/// `resolc` exhibits consistent behavior for both modes. #[test] -fn fails_without_input_file() { - let arguments = &[YUL_OPTION]; - let resolc_result = utils::execute_resolc(arguments); - utils::assert_command_failure(&resolc_result, "Omitting an input file"); - - assert!( - resolc_result.stderr.contains("The input file is missing"), - "Expected the output to contain a specific error message." - ); - - let solc_arguments = &[SOLC_YUL_OPTION]; - let solc_result = utils::execute_solc(solc_arguments); - utils::assert_equal_exit_codes(&solc_result, &resolc_result); +fn runs_without_input_file() { + let resolc_result = execute_resolc(&[RESOLC_YUL_FLAG]); + assert_command_success(&resolc_result, "Omitting an input file"); + assert!(resolc_result + .stderr + .contains("Compiler run successful. No output requested")); } diff --git a/crates/resolc/src/tests/cli/contracts/solidity/contract.sol b/crates/resolc/src/tests/data/solidity/contract.sol similarity index 100% rename from crates/resolc/src/tests/cli/contracts/solidity/contract.sol rename to crates/resolc/src/tests/data/solidity/contract.sol diff --git a/crates/resolc/src/tests/data/solidity/dependency.sol b/crates/resolc/src/tests/data/solidity/dependency.sol new file mode 100644 index 0000000..be4ba34 --- /dev/null +++ b/crates/resolc/src/tests/data/solidity/dependency.sol @@ -0,0 +1,33 @@ +// SPDX-License-Identifier: GPL-3.0 + +pragma solidity >=0.7.0 <0.9.0; + +library Assert { + function equal(uint256 a, uint256 b) public pure returns (bool result) { + result = (a == b); + } +} + +library AssertNe { + function notEqual(uint256 a, uint256 b) public pure returns (bool result) { + result = (a != b); + } +} + +contract TestAssert { + constructor() payable { + new Dependency(); + } + + function checkEquality(uint256 a, uint256 b) public pure returns (string memory) { + Assert.equal(a, b); + return "Values are equal"; + } +} + +contract Dependency { + function checkNotEquality(uint256 a, uint256 b) public pure returns (string memory) { + AssertNe.notEqual(a, b); + return "Values are not equal"; + } +} diff --git a/crates/resolc/src/tests/cli/contracts/standard_json/solidity_contracts.json b/crates/resolc/src/tests/data/standard_json/solidity_contracts.json similarity index 100% rename from crates/resolc/src/tests/cli/contracts/standard_json/solidity_contracts.json rename to crates/resolc/src/tests/data/standard_json/solidity_contracts.json diff --git a/crates/resolc/src/tests/data/standard_json/yul_solc.json b/crates/resolc/src/tests/data/standard_json/yul_solc.json new file mode 100644 index 0000000..5230760 --- /dev/null +++ b/crates/resolc/src/tests/data/standard_json/yul_solc.json @@ -0,0 +1,29 @@ +{ + "language": "Yul", + "sources": { + "Test": { + "content": "object \"Return\" { code { { return(0, 0) } } object \"Return_deployed\" { code { { return(0, 0) } } } }" + } + }, + "settings": { + "optimizer": { + "enabled": true + }, + "metadata": { + "useLiteralContent": true + }, + "outputSelection": { + "*": { + "": [ + "ast" + ], + "*": [ + "abi", + "evm.methodIdentifiers", + "evm.bytecode" + ] + } + }, + "libraries": {} + } +} diff --git a/crates/resolc/src/tests/data/standard_json/yul_solc_urls.json b/crates/resolc/src/tests/data/standard_json/yul_solc_urls.json new file mode 100644 index 0000000..8d1aed6 --- /dev/null +++ b/crates/resolc/src/tests/data/standard_json/yul_solc_urls.json @@ -0,0 +1,31 @@ +{ + "language": "Yul", + "sources": { + "Test": { + "urls": [ + "src/tests/data/yul/Test.yul" + ] + } + }, + "settings": { + "optimizer": { + "enabled": true + }, + "metadata": { + "useLiteralContent": true + }, + "outputSelection": { + "*": { + "": [ + "ast" + ], + "*": [ + "abi", + "evm.methodIdentifiers", + "evm.bytecode" + ] + } + }, + "libraries": {} + } +} diff --git a/crates/resolc/src/tests/data/yul/Test.yul b/crates/resolc/src/tests/data/yul/Test.yul new file mode 100644 index 0000000..7245fea --- /dev/null +++ b/crates/resolc/src/tests/data/yul/Test.yul @@ -0,0 +1,16 @@ +object "Return" { + code { + { + return(0, 0) + } + } + + object "Return_deployed" { + code { + { + mstore(0, 42) + return(0, 32) + } + } + } +} diff --git a/crates/resolc/src/tests/cli/contracts/yul/contract.yul b/crates/resolc/src/tests/data/yul/contract.yul similarity index 100% rename from crates/resolc/src/tests/cli/contracts/yul/contract.yul rename to crates/resolc/src/tests/data/yul/contract.yul diff --git a/crates/resolc/src/tests/cli/contracts/yul/memset.yul b/crates/resolc/src/tests/data/yul/memset.yul similarity index 100% rename from crates/resolc/src/tests/cli/contracts/yul/memset.yul rename to crates/resolc/src/tests/data/yul/memset.yul diff --git a/crates/resolc/src/tests/messages.rs b/crates/resolc/src/tests/messages.rs deleted file mode 100644 index eb8fae3..0000000 --- a/crates/resolc/src/tests/messages.rs +++ /dev/null @@ -1,251 +0,0 @@ -//! The Solidity compiler unit tests for messages. - -#![cfg(test)] - -use std::collections::BTreeMap; - -use revive_solc_json_interface::warning::Warning; - -pub const ECRECOVER_TEST_SOURCE: &str = r#" -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -contract ECRecoverExample { - function recoverAddress( - bytes32 messageHash, - uint8 v, - bytes32 r, - bytes32 s - ) public pure returns (address) { - return ecrecover(messageHash, v, r, s); - } -} - "#; - -#[test] -fn ecrecover() { - assert!( - super::check_solidity_warning( - ECRECOVER_TEST_SOURCE, - "Warning: It looks like you are using 'ecrecover' to validate a signature of a user account.", - BTreeMap::new(), - false, - None, - ).expect("Test failure") - ); -} - -#[test] -fn ecrecover_suppressed() { - assert!( - !super::check_solidity_warning( - ECRECOVER_TEST_SOURCE, - "Warning: It looks like you are using 'ecrecover' to validate a signature of a user account.", - BTreeMap::new(), - false, - Some(vec![Warning::EcRecover]), - ).expect("Test failure") - ); -} - -pub const SEND_TEST_SOURCE: &str = r#" -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -contract SendExample { - address payable public recipient; - - constructor(address payable _recipient) { - recipient = _recipient; - } - - function forwardEther() external payable { - bool success = recipient.send(msg.value); - require(success, "Failed to send Ether"); - } -} -"#; - -pub const BALANCE_CALLS_MESSAGE: &str = - "Warning: It looks like you are using '
.send/transfer()'"; - -#[test] -fn send() { - assert!(super::check_solidity_warning( - SEND_TEST_SOURCE, - BALANCE_CALLS_MESSAGE, - BTreeMap::new(), - false, - None, - ) - .expect("Test failure")); -} - -#[test] -fn send_suppressed() { - assert!(!super::check_solidity_warning( - SEND_TEST_SOURCE, - BALANCE_CALLS_MESSAGE, - BTreeMap::new(), - false, - Some(vec![Warning::SendTransfer]), - ) - .expect("Test failure")); -} - -pub const TRANSFER_TEST_SOURCE: &str = r#" -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -contract TransferExample { - address payable public recipient; - - constructor(address payable _recipient) { - recipient = _recipient; - } - - function forwardEther() external payable { - recipient.transfer(msg.value); - } -} - "#; - -#[test] -fn transfer() { - assert!(super::check_solidity_warning( - TRANSFER_TEST_SOURCE, - BALANCE_CALLS_MESSAGE, - BTreeMap::new(), - false, - None, - ) - .expect("Test failure")); -} - -#[test] -fn transfer_suppressed() { - assert!(!super::check_solidity_warning( - TRANSFER_TEST_SOURCE, - BALANCE_CALLS_MESSAGE, - BTreeMap::new(), - false, - Some(vec![Warning::SendTransfer]), - ) - .expect("Test failure")); -} - -pub const EXTCODESIZE_TEST_SOURCE: &str = r#" -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -contract ExternalCodeSize { - function getExternalCodeSize(address target) public view returns (uint256) { - uint256 codeSize; - assembly { - codeSize := extcodesize(target) - } - return codeSize; - } -} - "#; - -#[test] -fn extcodesize() { - assert!(super::check_solidity_warning( - EXTCODESIZE_TEST_SOURCE, - "Warning: Your code or one of its dependencies uses the 'extcodesize' instruction,", - BTreeMap::new(), - false, - None, - ) - .expect("Test failure")); -} - -#[test] -fn extcodesize_suppressed() { - assert!(!super::check_solidity_warning( - EXTCODESIZE_TEST_SOURCE, - "Warning: Your code or one of its dependencies uses the 'extcodesize' instruction,", - BTreeMap::new(), - false, - Some(vec![Warning::ExtCodeSize]), - ) - .expect("Test failure")); -} - -pub const TX_ORIGIN_TEST_SOURCE: &str = r#" -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -contract TxOriginExample { - function isOriginSender() public view returns (bool) { - return tx.origin == msg.sender; - } -} - "#; - -#[test] -fn tx_origin() { - assert!(super::check_solidity_warning( - TX_ORIGIN_TEST_SOURCE, - "Warning: You are checking for 'tx.origin' in your code, which might lead to", - BTreeMap::new(), - false, - None, - ) - .expect("Test failure")); -} - -#[test] -fn tx_origin_suppressed() { - assert!(!super::check_solidity_warning( - TX_ORIGIN_TEST_SOURCE, - "Warning: You are checking for 'tx.origin' in your code, which might lead to", - BTreeMap::new(), - false, - Some(vec![Warning::TxOrigin]), - ) - .expect("Test failure")); -} - -pub const TX_ORIGIN_ASSEMBLY_TEST_SOURCE: &str = r#" -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -contract TxOriginExample { - function isOriginSender() public view returns (bool) { - address txOrigin; - address sender = msg.sender; - - assembly { - txOrigin := origin() // Get the transaction origin using the 'origin' instruction - } - - return txOrigin == sender; - } -} - "#; - -#[test] -fn tx_origin_assembly() { - assert!(super::check_solidity_warning( - TX_ORIGIN_ASSEMBLY_TEST_SOURCE, - "Warning: You are checking for 'tx.origin' in your code, which might lead to", - BTreeMap::new(), - false, - None, - ) - .expect("Test failure")); -} - -#[test] -fn tx_origin_assembly_suppressed() { - assert!(!super::check_solidity_warning( - TX_ORIGIN_ASSEMBLY_TEST_SOURCE, - "Warning: You are checking for 'tx.origin' in your code, which might lead to", - BTreeMap::new(), - false, - Some(vec![Warning::TxOrigin]), - ) - .expect("Test failure")); -} diff --git a/crates/resolc/src/tests/mod.rs b/crates/resolc/src/tests/mod.rs index c9deaef..83a8e33 100644 --- a/crates/resolc/src/tests/mod.rs +++ b/crates/resolc/src/tests/mod.rs @@ -1,15 +1,6 @@ -//! The Solidity compiler unit tests. +//! The Solidity compiler tests. #![cfg(test)] mod cli; -mod factory_dependency; -mod ir_artifacts; -mod libraries; -mod messages; -mod optimizer; -mod remappings; -mod runtime_code; -mod unsupported_opcodes; - -pub(crate) use super::test_utils::*; +mod unit; diff --git a/crates/resolc/src/tests/remappings.rs b/crates/resolc/src/tests/remappings.rs deleted file mode 100644 index 15dc31e..0000000 --- a/crates/resolc/src/tests/remappings.rs +++ /dev/null @@ -1,50 +0,0 @@ -//! The Solidity compiler unit tests for remappings. - -#![cfg(test)] - -use std::collections::BTreeMap; -use std::collections::BTreeSet; - -pub const CALLEE_TEST_SOURCE: &str = r#" -// SPDX-License-Identifier: MIT - -pragma solidity >=0.4.16; - -contract Callable { - function f(uint a) public pure returns(uint) { - return a * 2; - } -} -"#; - -pub const CALLER_TEST_SOURCE: &str = r#" -// SPDX-License-Identifier: MIT - -pragma solidity >=0.4.16; - -import "libraries/default/callable.sol"; - -contract Main { - function main(Callable callable) public returns(uint) { - return callable.f(5); - } -} -"#; - -#[test] -fn default() { - let mut sources = BTreeMap::new(); - sources.insert("./test.sol".to_owned(), CALLER_TEST_SOURCE.to_owned()); - sources.insert("./callable.sol".to_owned(), CALLEE_TEST_SOURCE.to_owned()); - - let mut remappings = BTreeSet::new(); - remappings.insert("libraries/default/=./".to_owned()); - - super::build_solidity( - sources, - BTreeMap::new(), - Some(remappings), - revive_llvm_context::OptimizerSettings::cycles(), - ) - .expect("Test failure"); -} diff --git a/crates/resolc/src/tests/runtime_code.rs b/crates/resolc/src/tests/runtime_code.rs deleted file mode 100644 index 6080311..0000000 --- a/crates/resolc/src/tests/runtime_code.rs +++ /dev/null @@ -1,33 +0,0 @@ -//! The Solidity compiler unit tests for runtime code. - -#![cfg(test)] - -use std::collections::BTreeMap; - -#[test] -#[should_panic(expected = "runtimeCode is not supported")] -fn default() { - let source_code = r#" -// SPDX-License-Identifier: MIT -pragma solidity ^0.8.0; - -contract A {} - -contract Test { - function main() public pure returns(bytes memory) { - return type(A).runtimeCode; - } -} - "#; - - let mut sources = BTreeMap::new(); - sources.insert("test.sol".to_owned(), source_code.to_owned()); - - super::build_solidity( - sources, - BTreeMap::new(), - None, - revive_llvm_context::OptimizerSettings::cycles(), - ) - .expect("Test failure"); -} diff --git a/crates/resolc/src/tests/factory_dependency.rs b/crates/resolc/src/tests/unit/factory_dependency.rs similarity index 62% rename from crates/resolc/src/tests/factory_dependency.rs rename to crates/resolc/src/tests/unit/factory_dependency.rs index 3788672..6dc2b7d 100644 --- a/crates/resolc/src/tests/factory_dependency.rs +++ b/crates/resolc/src/tests/unit/factory_dependency.rs @@ -1,10 +1,10 @@ //! The Solidity compiler unit tests for factory dependencies. -#![cfg(test)] +use crate::test_utils::{build_solidity, sources}; -use std::collections::BTreeMap; - -pub const MAIN_CODE: &str = r#" +#[test] +fn default() { + let caller_code = r#" // SPDX-License-Identifier: MIT pragma solidity >=0.4.16; @@ -18,10 +18,9 @@ contract Main { callable.set(10); return callable.get(); } -} -"#; +}"#; -pub const CALLABLE_CODE: &str = r#" + let callee_code = r#" // SPDX-License-Identifier: MIT pragma solidity >=0.4.16; @@ -36,35 +35,22 @@ contract Callable { function get() external view returns(uint256) { return value; } -} -"#; +}"#; -#[test] -fn default() { - let mut sources = BTreeMap::new(); - sources.insert("main.sol".to_owned(), MAIN_CODE.to_owned()); - sources.insert("callable.sol".to_owned(), CALLABLE_CODE.to_owned()); - - let output = super::build_solidity( - sources, - BTreeMap::new(), - None, - revive_llvm_context::OptimizerSettings::cycles(), - ) - .expect("Build failure"); + let output = build_solidity(sources(&[ + ("main.sol", caller_code), + ("callable.sol", callee_code), + ])) + .unwrap(); assert_eq!( output .contracts - .as_ref() - .expect("Missing field `contracts`") .get("main.sol") .expect("Missing file `main.sol`") .get("Main") .expect("Missing contract `main.sol:Main`") .factory_dependencies - .as_ref() - .expect("Missing field `factory_dependencies`") .len(), 1, "Expected 1 factory dependency in `main.sol:Main`" @@ -72,15 +58,11 @@ fn default() { assert_eq!( output .contracts - .as_ref() - .expect("Missing field `contracts`") .get("callable.sol") .expect("Missing file `callable.sol`") .get("Callable") .expect("Missing contract `callable.sol:Callable`") .factory_dependencies - .as_ref() - .expect("Missing field `factory_dependencies`") .len(), 0, "Expected 0 factory dependencies in `callable.sol:Callable`" diff --git a/crates/resolc/src/tests/ir_artifacts.rs b/crates/resolc/src/tests/unit/ir_artifacts.rs similarity index 53% rename from crates/resolc/src/tests/ir_artifacts.rs rename to crates/resolc/src/tests/unit/ir_artifacts.rs index d5be735..e874a37 100644 --- a/crates/resolc/src/tests/ir_artifacts.rs +++ b/crates/resolc/src/tests/unit/ir_artifacts.rs @@ -1,13 +1,11 @@ //! The Solidity compiler unit tests for IR artifacts. //! The tests check if the IR artifacts are kept in the final output. -#![cfg(test)] - -use std::collections::BTreeMap; +use crate::test_utils::{build_solidity, sources}; #[test] fn yul() { - let source_code = r#" + let code = r#" // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; @@ -18,28 +16,17 @@ contract Test { } "#; - let mut sources = BTreeMap::new(); - sources.insert("test.sol".to_owned(), source_code.to_owned()); - - let build = super::build_solidity( - sources, - BTreeMap::new(), - None, - revive_llvm_context::OptimizerSettings::cycles(), - ) - .expect("Test failure"); + let build = build_solidity(sources(&[("test.sol", code)])).expect("Test failure"); assert!( - build + !build .contracts - .as_ref() - .expect("Always exists") .get("test.sol") .expect("Always exists") .get("Test") .expect("Always exists") .ir_optimized - .is_some(), + .is_empty(), "Yul IR is missing" ); } diff --git a/crates/resolc/src/tests/libraries.rs b/crates/resolc/src/tests/unit/libraries.rs similarity index 62% rename from crates/resolc/src/tests/libraries.rs rename to crates/resolc/src/tests/unit/libraries.rs index 0178602..2e1b2bd 100644 --- a/crates/resolc/src/tests/libraries.rs +++ b/crates/resolc/src/tests/unit/libraries.rs @@ -1,10 +1,10 @@ //! The Solidity compiler unit tests for libraries. -#![cfg(test)] +use revive_solc_json_interface::SolcStandardJsonInputSettingsLibraries; -use std::collections::BTreeMap; +use crate::test_utils::build_solidity_and_detect_missing_libraries; -pub const LIBRARY_TEST_SOURCE: &str = r#" +pub const CODE: &str = r#" // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; @@ -28,29 +28,21 @@ contract SimpleContract { } return sum; } -} - "#; +}"#; #[test] fn not_specified() { - let mut sources = BTreeMap::new(); - sources.insert("test.sol".to_owned(), LIBRARY_TEST_SOURCE.to_owned()); - let output = - super::build_solidity_and_detect_missing_libraries(sources.clone(), BTreeMap::new()) - .expect("Test failure"); + build_solidity_and_detect_missing_libraries(&[("test.sol", CODE)], Default::default()) + .unwrap(); assert!( output .contracts - .as_ref() - .expect("Always exists") .get("test.sol") .expect("Always exists") .get("SimpleContract") .expect("Always exists") .missing_libraries - .as_ref() - .expect("Always exists") .contains("test.sol:SimpleLibrary"), "Missing library not detected" ); @@ -58,32 +50,25 @@ fn not_specified() { #[test] fn specified() { - let mut sources = BTreeMap::new(); - sources.insert("test.sol".to_owned(), LIBRARY_TEST_SOURCE.to_owned()); - - let mut libraries = BTreeMap::new(); + let mut libraries = SolcStandardJsonInputSettingsLibraries::default(); libraries + .as_inner_mut() .entry("test.sol".to_string()) - .or_insert_with(BTreeMap::new) + .or_default() .entry("SimpleLibrary".to_string()) .or_insert("0x00000000000000000000000000000000DEADBEEF".to_string()); let output = - super::build_solidity_and_detect_missing_libraries(sources.clone(), libraries.clone()) - .expect("Test failure"); + build_solidity_and_detect_missing_libraries(&[("test.sol", CODE)], libraries.clone()) + .unwrap(); assert!( output .contracts - .as_ref() - .expect("Always exists") .get("test.sol") .expect("Always exists") .get("SimpleContract") .expect("Always exists") .missing_libraries - .as_ref() - .cloned() - .unwrap_or_default() .is_empty(), "The list of missing libraries must be empty" ); diff --git a/crates/resolc/src/tests/unit/messages.rs b/crates/resolc/src/tests/unit/messages.rs new file mode 100644 index 0000000..6a50a34 --- /dev/null +++ b/crates/resolc/src/tests/unit/messages.rs @@ -0,0 +1,116 @@ +//! The Solidity compiler unit tests for messages. + +use revive_llvm_context::OptimizerSettings; +use revive_solc_json_interface::{ResolcWarning, SolcStandardJsonOutput}; + +use crate::test_utils::{build_solidity, build_solidity_with_options, sources}; + +pub const SEND_TEST_SOURCE: &str = r#" +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +contract SendExample { + address payable public recipient; + + constructor(address payable _recipient) { + recipient = _recipient; + } + + function forwardEther() external payable { + bool success = recipient.send(msg.value); + require(success, "Failed to send Ether"); + } +}"#; + +pub const TRANSFER_TEST_SOURCE: &str = r#" +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +contract TransferExample { + address payable public recipient; + + constructor(address payable _recipient) { + recipient = _recipient; + } + + function forwardEther() external payable { + recipient.transfer(msg.value); + } +}"#; + +pub const TX_ORIGIN_TEST_SOURCE: &str = r#" +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +contract TxOriginExample { + function isOriginSender() public view returns (bool) { + return tx.origin == msg.sender; + } +}"#; + +fn contains_warning(build: SolcStandardJsonOutput, warning: ResolcWarning) -> bool { + build + .errors + .iter() + .any(|error| error.is_warning() && error.message.contains(warning.as_message())) +} + +#[test] +fn send() { + let build = build_solidity(sources(&[("test.sol", SEND_TEST_SOURCE)])).unwrap(); + assert!(contains_warning(build, ResolcWarning::SendAndTransfer)); +} + +#[test] +fn send_suppressed() { + let build = build_solidity_with_options( + sources(&[("test.sol", SEND_TEST_SOURCE)]), + Default::default(), + Default::default(), + OptimizerSettings::cycles(), + true, + vec![ResolcWarning::SendAndTransfer], + ) + .unwrap(); + assert!(!contains_warning(build, ResolcWarning::SendAndTransfer)); +} + +#[test] +fn transfer() { + let build = build_solidity(sources(&[("test.sol", TRANSFER_TEST_SOURCE)])).unwrap(); + assert!(contains_warning(build, ResolcWarning::SendAndTransfer)); +} + +#[test] +fn transfer_suppressed() { + let build = build_solidity_with_options( + sources(&[("test.sol", TRANSFER_TEST_SOURCE)]), + Default::default(), + Default::default(), + OptimizerSettings::cycles(), + true, + vec![ResolcWarning::SendAndTransfer], + ) + .unwrap(); + assert!(!contains_warning(build, ResolcWarning::SendAndTransfer)) +} + +#[test] +fn tx_origin() { + let build = build_solidity(sources(&[("test.sol", TX_ORIGIN_TEST_SOURCE)])).unwrap(); + assert!(contains_warning(build, ResolcWarning::TxOrigin)); +} + +#[test] +fn tx_origin_suppressed() { + let build = build_solidity_with_options( + sources(&[("test.sol", TX_ORIGIN_TEST_SOURCE)]), + Default::default(), + Default::default(), + OptimizerSettings::cycles(), + true, + vec![ResolcWarning::TxOrigin], + ) + .unwrap(); + assert!(!contains_warning(build, ResolcWarning::TxOrigin)) +} diff --git a/crates/resolc/src/tests/unit/mod.rs b/crates/resolc/src/tests/unit/mod.rs new file mode 100644 index 0000000..48b3b4f --- /dev/null +++ b/crates/resolc/src/tests/unit/mod.rs @@ -0,0 +1,11 @@ +//! The Solidity compiler unit tests. + +mod factory_dependency; +mod ir_artifacts; +mod libraries; +mod messages; +mod optimizer; +mod remappings; +mod runtime_code; +mod standard_json; +mod unsupported_opcodes; diff --git a/crates/resolc/src/tests/optimizer.rs b/crates/resolc/src/tests/unit/optimizer.rs similarity index 77% rename from crates/resolc/src/tests/optimizer.rs rename to crates/resolc/src/tests/unit/optimizer.rs index 4ae16fd..16b6667 100644 --- a/crates/resolc/src/tests/optimizer.rs +++ b/crates/resolc/src/tests/unit/optimizer.rs @@ -1,10 +1,12 @@ //! The Solidity compiler unit tests for the optimizer. -#![cfg(test)] +use crate::test_utils::{build_solidity, build_solidity_with_options, sources}; -use std::collections::BTreeMap; - -pub const SOURCE_CODE: &str = r#" +#[test] +fn optimizer() { + let source = &[( + "test.sol", + r#" // SPDX-License-Identifier: MIT pragma solidity >=0.5.0; @@ -40,40 +42,31 @@ contract Test { } return h; } -} -"#; +}"#, + )]; -#[test] -fn optimizer() { - let mut sources = BTreeMap::new(); - sources.insert("test.sol".to_owned(), SOURCE_CODE.to_owned()); - - let build_unoptimized = super::build_solidity( - sources.clone(), - BTreeMap::new(), - None, + let build_unoptimized = build_solidity_with_options( + sources(source), + Default::default(), + Default::default(), revive_llvm_context::OptimizerSettings::none(), + true, + Default::default(), ) - .expect("Build failure"); - let build_optimized_for_cycles = super::build_solidity( - sources.clone(), - BTreeMap::new(), - None, - revive_llvm_context::OptimizerSettings::cycles(), - ) - .expect("Build failure"); - let build_optimized_for_size = super::build_solidity( - sources, - BTreeMap::new(), - None, + .unwrap(); + let build_optimized_for_cycles = build_solidity(sources(source)).unwrap(); + let build_optimized_for_size = build_solidity_with_options( + sources(source), + Default::default(), + Default::default(), revive_llvm_context::OptimizerSettings::size(), + true, + Default::default(), ) - .expect("Build failure"); + .unwrap(); let size_when_unoptimized = build_unoptimized .contracts - .as_ref() - .expect("Missing field `contracts`") .get("test.sol") .expect("Missing file `test.sol`") .get("Test") @@ -88,8 +81,6 @@ fn optimizer() { .len(); let size_when_optimized_for_cycles = build_optimized_for_cycles .contracts - .as_ref() - .expect("Missing field `contracts`") .get("test.sol") .expect("Missing file `test.sol`") .get("Test") @@ -104,8 +95,6 @@ fn optimizer() { .len(); let size_when_optimized_for_size = build_optimized_for_size .contracts - .as_ref() - .expect("Missing field `contracts`") .get("test.sol") .expect("Missing file `test.sol`") .get("Test") diff --git a/crates/resolc/src/tests/unit/remappings.rs b/crates/resolc/src/tests/unit/remappings.rs new file mode 100644 index 0000000..8b88d72 --- /dev/null +++ b/crates/resolc/src/tests/unit/remappings.rs @@ -0,0 +1,40 @@ +//! The Solidity compiler unit tests for remappings. + +use crate::test_utils::{build_solidity_with_options, sources}; + +#[test] +fn default() { + let callee_code = r#" +// SPDX-License-Identifier: MIT + +pragma solidity >=0.4.16; + +contract Callable { + function f(uint a) public pure returns(uint) { + return a * 2; + } +}"#; + + let caller_code = r#" +// SPDX-License-Identifier: MIT + +pragma solidity >=0.4.16; + +import "libraries/default/callable.sol"; + +contract Main { + function main(Callable callable) public returns(uint) { + return callable.f(5); + } +}"#; + + build_solidity_with_options( + sources(&[("./test.sol", caller_code), ("./callable.sol", callee_code)]), + Default::default(), + ["libraries/default/=./".to_owned()].into(), + revive_llvm_context::OptimizerSettings::cycles(), + true, + Default::default(), + ) + .unwrap(); +} diff --git a/crates/resolc/src/tests/unit/runtime_code.rs b/crates/resolc/src/tests/unit/runtime_code.rs new file mode 100644 index 0000000..e04b4e5 --- /dev/null +++ b/crates/resolc/src/tests/unit/runtime_code.rs @@ -0,0 +1,30 @@ +//! The Solidity compiler unit tests for runtime code. + +use crate::test_utils::{build_solidity, sources}; + +#[test] +fn default() { + let code = r#" +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +contract A {} + +contract Test { + function main() public pure returns(bytes memory) { + return type(A).runtimeCode; + } +} + "#; + + build_solidity(sources(&[("test.sol", code)])) + .unwrap() + .errors + .iter() + .find(|error| { + error + .to_string() + .contains("Error: Deploy and runtime code are merged in PVM") + }) + .unwrap(); +} diff --git a/crates/resolc/src/tests/unit/standard_json.rs b/crates/resolc/src/tests/unit/standard_json.rs new file mode 100644 index 0000000..15ac541 --- /dev/null +++ b/crates/resolc/src/tests/unit/standard_json.rs @@ -0,0 +1,53 @@ +use std::path::PathBuf; + +use revive_solc_json_interface::SolcStandardJsonInput; + +use crate::test_utils::build_yul_standard_json; + +#[test] +fn standard_json_yul_solc() { + let solc_input = SolcStandardJsonInput::try_from(Some( + PathBuf::from("src/tests/data/standard_json/yul_solc.json").as_path(), + )) + .unwrap(); + let solc_output = build_yul_standard_json(solc_input).unwrap(); + + assert!(!solc_output + .contracts + .get("Test") + .expect("The `Test` contract is missing") + .get("Return") + .expect("The `Return` contract is missing") + .evm + .as_ref() + .expect("The `evm` field is missing") + .bytecode + .as_ref() + .expect("The `bytecode` field is missing") + .object + .is_empty()) +} + +#[test] +fn standard_json_yul_solc_urls() { + let solc_input = SolcStandardJsonInput::try_from(Some( + PathBuf::from("src/tests/data/standard_json/yul_solc_urls.json").as_path(), + )) + .unwrap(); + let solc_output = build_yul_standard_json(solc_input).unwrap(); + + assert!(!solc_output + .contracts + .get("Test") + .expect("The `Test` contract is missing") + .get("Return") + .expect("The `Return` contract is missing") + .evm + .as_ref() + .expect("The `evm` field is missing") + .bytecode + .as_ref() + .expect("The `bytecode` field is missing") + .object + .is_empty()) +} diff --git a/crates/resolc/src/tests/unsupported_opcodes.rs b/crates/resolc/src/tests/unit/unsupported_opcodes.rs similarity index 63% rename from crates/resolc/src/tests/unsupported_opcodes.rs rename to crates/resolc/src/tests/unit/unsupported_opcodes.rs index b6479a0..6322ea7 100644 --- a/crates/resolc/src/tests/unsupported_opcodes.rs +++ b/crates/resolc/src/tests/unit/unsupported_opcodes.rs @@ -1,13 +1,11 @@ //! The Solidity compiler unit tests for unsupported opcodes. -#![cfg(test)] - -use std::collections::BTreeMap; +use crate::test_utils::{build_solidity, build_yul, sources}; #[test] #[should_panic(expected = "The `CODECOPY` instruction is not supported")] fn codecopy_yul_runtime() { - let source_code = r#" + let code = r#" // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; @@ -22,22 +20,15 @@ contract FixedCodeCopy { return code; } -} - "#; +}"#; - let mut sources = BTreeMap::new(); - sources.insert("test.sol".to_owned(), source_code.to_owned()); - - super::build_solidity( - sources, - BTreeMap::new(), - None, - revive_llvm_context::OptimizerSettings::cycles(), - ) - .expect("Test failure"); + build_solidity(sources(&[("test.sol", code)])).unwrap(); } -pub const CALLCODE_TEST_SOURCE: &str = r#" +#[test] +#[should_panic(expected = "The `CALLCODE` instruction is not supported")] +fn callcode_yul() { + let solidity = r#" // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; @@ -57,28 +48,15 @@ contract CallcodeTest { return success; } -} - "#; +}"#; -#[test] -#[should_panic(expected = "The `CALLCODE` instruction is not supported")] -fn callcode_yul() { - let mut sources = BTreeMap::new(); - sources.insert("test.sol".to_owned(), CALLCODE_TEST_SOURCE.to_owned()); - - super::build_solidity( - sources, - BTreeMap::new(), - None, - revive_llvm_context::OptimizerSettings::cycles(), - ) - .expect("Test failure"); + build_solidity(sources(&[("test.sol", solidity)])).unwrap(); } #[test] #[should_panic(expected = "The `PC` instruction is not supported")] fn pc_yul() { - let source_code = r#" + let code = r#" object "ProgramCounter" { code { datacopy(0, dataoffset("ProgramCounter_deployed"), datasize("ProgramCounter_deployed")) @@ -94,13 +72,15 @@ object "ProgramCounter" { sstore(0, pcValue) } } -} - "#; +}"#; - super::build_yul(source_code).expect("Test failure"); + build_yul(&[("test.sol", code)]).unwrap(); } -pub const EXTCODECOPY_TEST_SOURCE: &str = r#" +#[test] +#[should_panic(expected = "The `EXTCODECOPY` instruction is not supported")] +fn extcodecopy_yul() { + let code = r#" // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; @@ -114,25 +94,15 @@ contract ExternalCodeCopy { return code; } +}"#; + + build_solidity(sources(&[("test.sol", code)])).unwrap(); } - "#; #[test] -#[should_panic(expected = "The `EXTCODECOPY` instruction is not supported")] -fn extcodecopy_yul() { - let mut sources = BTreeMap::new(); - sources.insert("test.sol".to_owned(), EXTCODECOPY_TEST_SOURCE.to_owned()); - - super::build_solidity( - sources, - BTreeMap::new(), - None, - revive_llvm_context::OptimizerSettings::cycles(), - ) - .expect("Test failure"); -} - -pub const SELFDESTRUCT_TEST_SOURCE: &str = r#" +#[should_panic(expected = "The `SELFDESTRUCT` instruction is not supported")] +fn selfdestruct_yul() { + let solidity = r#" // SPDX-License-Identifier: MIT pragma solidity ^0.8.0; @@ -147,20 +117,7 @@ contract MinimalDestructible { require(msg.sender == owner, "Only the owner can call this function."); selfdestruct(owner); } -} - "#; +}"#; -#[test] -#[should_panic(expected = "The `SELFDESTRUCT` instruction is not supported")] -fn selfdestruct_yul() { - let mut sources = BTreeMap::new(); - sources.insert("test.sol".to_owned(), SELFDESTRUCT_TEST_SOURCE.to_owned()); - - super::build_solidity( - sources, - BTreeMap::new(), - None, - revive_llvm_context::OptimizerSettings::cycles(), - ) - .expect("Test failure"); + build_solidity(sources(&[("test.sol", solidity)])).unwrap(); } diff --git a/crates/solc-json-interface/Cargo.toml b/crates/solc-json-interface/Cargo.toml index 4e7093c..cc7a06e 100644 --- a/crates/solc-json-interface/Cargo.toml +++ b/crates/solc-json-interface/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "revive-solc-json-interface" -version = "0.2.0" +version = "0.3.0" authors.workspace = true license.workspace = true edition.workspace = true @@ -17,6 +17,7 @@ resolc = [] # The resolc binary adds a bunch of custom fields to the format revive-common = { workspace = true } anyhow = { workspace = true } +hex = { workspace = true } rayon = { workspace = true, optional = true } semver = { workspace = true } serde = { workspace = true } diff --git a/crates/solc-json-interface/src/combined_json/contract.rs b/crates/solc-json-interface/src/combined_json/contract.rs index a2f14b4..c70a5f1 100644 --- a/crates/solc-json-interface/src/combined_json/contract.rs +++ b/crates/solc-json-interface/src/combined_json/contract.rs @@ -1,72 +1,64 @@ //! The `solc --combined-json` contract. use std::collections::BTreeMap; -use std::collections::HashSet; use serde::Deserialize; use serde::Serialize; /// The contract. -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Default, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Contract { /// The `solc` hashes output. - #[serde(skip_serializing_if = "Option::is_none")] - pub hashes: Option>, + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + pub hashes: BTreeMap, /// The `solc` ABI output. - #[serde(skip_serializing_if = "Option::is_none")] - pub abi: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub abi: serde_json::Value, /// The `solc` metadata output. #[serde(skip_serializing_if = "Option::is_none")] pub metadata: Option, /// The `solc` developer documentation output. - #[serde(skip_serializing_if = "Option::is_none")] - pub devdoc: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub devdoc: serde_json::Value, /// The `solc` user documentation output. - #[serde(skip_serializing_if = "Option::is_none")] - pub userdoc: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub userdoc: serde_json::Value, /// The `solc` storage layout output. - #[serde(skip_serializing_if = "Option::is_none")] - pub storage_layout: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub storage_layout: serde_json::Value, /// The `solc` AST output. - #[serde(skip_serializing_if = "Option::is_none")] - pub ast: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub ast: serde_json::Value, /// The `solc` assembly output. - #[serde(skip_serializing_if = "Option::is_none")] - pub asm: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub asm: serde_json::Value, + + /// LLVM-generated assembly. + #[cfg(feature = "resolc")] + #[serde(default, skip_serializing_if = "Option::is_none", skip_deserializing)] + pub assembly: Option, /// The `solc` hexadecimal binary output. - #[serde(skip_serializing_if = "Option::is_none")] + #[serde(default, skip_serializing_if = "Option::is_none", skip_deserializing)] pub bin: Option, /// The `solc` hexadecimal binary runtime part output. - #[serde(skip_serializing_if = "Option::is_none")] + #[serde(default, skip_serializing_if = "Option::is_none", skip_deserializing)] pub bin_runtime: Option, - /// The factory dependencies. - #[serde(skip_serializing_if = "Option::is_none")] - pub factory_deps: Option>, - /// The missing libraries. - #[serde(skip_serializing_if = "Option::is_none")] - pub missing_libraries: Option>, -} -impl Contract { - /// Returns the signature hash of the specified contract entry. - /// # Panics - /// If the hashes have not been requested in the `solc` call. - pub fn entry(&self, entry: &str) -> u32 { - self.hashes - .as_ref() - .expect("Always exists") - .iter() - .find_map(|(contract_entry, hash)| { - if contract_entry.starts_with(entry) { - Some( - u32::from_str_radix(hash.as_str(), revive_common::BASE_HEXADECIMAL) - .expect("Test hash is always valid"), - ) - } else { - None - } - }) - .unwrap_or_else(|| panic!("Entry `{entry}` not found")) - } + /// The unlinked factory dependencies. + #[cfg(feature = "resolc")] + #[serde(default, skip_deserializing)] + pub factory_deps_unlinked: std::collections::BTreeSet, + /// The factory dependencies. + #[cfg(feature = "resolc")] + #[serde(default, skip_deserializing)] + pub factory_deps: BTreeMap, + /// The missing libraries. + #[cfg(feature = "resolc")] + #[serde(default, skip_deserializing)] + pub missing_libraries: std::collections::BTreeSet, + /// The binary object format. + #[cfg(feature = "resolc")] + #[serde(default, skip_deserializing)] + pub object_format: Option, } diff --git a/crates/solc-json-interface/src/combined_json/mod.rs b/crates/solc-json-interface/src/combined_json/mod.rs index 17ca906..f89e362 100644 --- a/crates/solc-json-interface/src/combined_json/mod.rs +++ b/crates/solc-json-interface/src/combined_json/mod.rs @@ -1,79 +1,51 @@ //! The `solc --combined-json` output. -pub mod contract; - use std::collections::BTreeMap; -use std::fs::File; -use std::io::Write; -use std::path::Path; use serde::Deserialize; use serde::Serialize; use self::contract::Contract; +pub mod contract; +pub mod selector; + /// The `solc --combined-json` output. #[derive(Debug, Serialize, Deserialize)] pub struct CombinedJson { /// The contract entries. pub contracts: BTreeMap, /// The list of source files. - #[serde(rename = "sourceList")] - #[serde(skip_serializing_if = "Option::is_none")] - pub source_list: Option>, + #[serde(default, rename = "sourceList", skip_serializing_if = "Vec::is_empty")] + pub source_list: Vec, /// The source code extra data, including the AST. - #[serde(skip_serializing_if = "Option::is_none")] - pub sources: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub sources: serde_json::Value, /// The `solc` compiler version. pub version: String, /// The `resolc` compiler version. #[serde(skip_serializing_if = "Option::is_none")] - pub revive_version: Option, + #[cfg(feature = "resolc")] + pub resolc_version: Option, } +#[cfg(feature = "resolc")] impl CombinedJson { - /// Returns the signature hash of the specified contract and entry. - pub fn entry(&self, path: &str, entry: &str) -> u32 { - self.contracts - .iter() - .find_map(|(name, contract)| { - if name.starts_with(path) { - Some(contract) - } else { - None - } - }) - .expect("Always exists") - .entry(entry) - } - - /// Returns the full contract path which can be found in `combined-json` output. - pub fn get_full_path(&self, name: &str) -> Option { - self.contracts.iter().find_map(|(path, _value)| { - if let Some(last_slash_position) = path.rfind('/') { - if let Some(colon_position) = path.rfind(':') { - if &path[last_slash_position + 1..colon_position] == name { - return Some(path.to_owned()); - } - } - } - - None - }) - } - - /// Removes EVM artifacts to prevent their accidental usage. - pub fn remove_evm(&mut self) { - for (_, contract) in self.contracts.iter_mut() { - contract.bin = None; - contract.bin_runtime = None; + /// A shortcut constructor. + pub fn new(solc_version: semver::Version, resolc_version: Option) -> Self { + Self { + contracts: BTreeMap::new(), + source_list: Vec::new(), + sources: serde_json::Value::Null, + version: solc_version.to_string(), + resolc_version, } } /// Writes the JSON to the specified directory. pub fn write_to_directory( self, - output_directory: &Path, + output_directory: &std::path::Path, overwrite: bool, ) -> anyhow::Result<()> { let mut file_path = output_directory.to_owned(); @@ -85,10 +57,11 @@ impl CombinedJson { ); } - File::create(&file_path) - .map_err(|error| anyhow::anyhow!("File {:?} creating error: {}", file_path, error))? - .write_all(serde_json::to_vec(&self).expect("Always valid").as_slice()) - .map_err(|error| anyhow::anyhow!("File {:?} writing error: {}", file_path, error))?; + std::fs::write( + file_path.as_path(), + serde_json::to_vec(&self).expect("Always valid").as_slice(), + ) + .map_err(|error| anyhow::anyhow!("File {file_path:?} writing: {error}"))?; Ok(()) } diff --git a/crates/solc-json-interface/src/combined_json/selector.rs b/crates/solc-json-interface/src/combined_json/selector.rs new file mode 100644 index 0000000..e4e266e --- /dev/null +++ b/crates/solc-json-interface/src/combined_json/selector.rs @@ -0,0 +1,115 @@ +//! The `solc --combined-json` expected output selection flag. + +use std::str::FromStr; + +use serde::{Deserialize, Serialize}; + +/// The solc `--combind-json` invalid selector message. +pub const MESSAGE_SELECTOR_INVALID: &str = "Invalid option to --combined-json"; + +/// The `solc --combined-json` expected output selection flag. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum Selector { + /// The ABI JSON. + #[serde(rename = "abi")] + ABI, + /// The function signature hashes JSON. + #[serde(rename = "hashes")] + Hashes, + /// The metadata. + #[serde(rename = "metadata")] + Metadata, + /// The developer documentation. + #[serde(rename = "devdoc")] + Devdoc, + /// The user documentation. + #[serde(rename = "userdoc")] + Userdoc, + /// The storage layout. + #[serde(rename = "storage-layout")] + StorageLayout, + /// The transient storage layout. + #[serde(rename = "transient-storage-layout")] + TransientStorageLayout, + /// The AST JSON. + #[serde(rename = "ast")] + AST, + /// The EVM assembly. + #[serde(rename = "asm")] + ASM, + + /// The assembly. + #[serde(rename = "assembly", skip_serializing)] + Assembly, + + /// The deploy bytecode. + #[serde(rename = "bin", skip_serializing)] + Bytecode, + /// The runtime bytecode. + #[serde(rename = "bin-runtime", skip_serializing)] + BytecodeRuntime, +} + +impl Selector { + /// Converts the comma-separated CLI argument into an array of flags. + pub fn from_cli(format: &str) -> Vec> { + format + .split(',') + .map(|flag| Self::from_str(flag.trim())) + .collect() + } + + /// Whether the selector is available in `solc`. + pub fn is_source_solc(&self) -> bool { + !matches!( + self, + Self::Assembly | Self::Bytecode | Self::BytecodeRuntime + ) + } +} + +impl FromStr for Selector { + type Err = anyhow::Error; + + fn from_str(string: &str) -> Result { + match string { + "abi" => Ok(Self::ABI), + "hashes" => Ok(Self::Hashes), + "metadata" => Ok(Self::Metadata), + "devdoc" => Ok(Self::Devdoc), + "userdoc" => Ok(Self::Userdoc), + "storage-layout" => Ok(Self::StorageLayout), + "transient-storage-layout" => Ok(Self::TransientStorageLayout), + "ast" => Ok(Self::AST), + "asm" => Ok(Self::ASM), + + "bin" => Ok(Self::Bytecode), + "bin-runtime" => Ok(Self::BytecodeRuntime), + + "assembly" => Ok(Self::Assembly), + + selector => anyhow::bail!("{MESSAGE_SELECTOR_INVALID}: {selector}"), + } + } +} + +impl std::fmt::Display for Selector { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + Self::ABI => write!(f, "abi"), + Self::Hashes => write!(f, "hashes"), + Self::Metadata => write!(f, "metadata"), + Self::Devdoc => write!(f, "devdoc"), + Self::Userdoc => write!(f, "userdoc"), + Self::StorageLayout => write!(f, "storage-layout"), + Self::TransientStorageLayout => write!(f, "transient-storage-layout"), + Self::AST => write!(f, "ast"), + Self::ASM => write!(f, "asm"), + + Self::Bytecode => write!(f, "bin"), + Self::BytecodeRuntime => write!(f, "bin-runtime"), + + Self::Assembly => write!(f, "assembly"), + } + } +} diff --git a/crates/solc-json-interface/src/lib.rs b/crates/solc-json-interface/src/lib.rs index 99588bb..b6d2bee 100644 --- a/crates/solc-json-interface/src/lib.rs +++ b/crates/solc-json-interface/src/lib.rs @@ -3,8 +3,13 @@ //! //! [0]: https://docs.soliditylang.org/en/latest/using-the-compiler.html#compiler-input-and-output-json-description +#![allow(clippy::too_many_arguments)] + pub use self::combined_json::contract::Contract as CombinedJsonContract; +pub use self::combined_json::selector::Selector as CombinedJsonSelector; +pub use self::combined_json::selector::MESSAGE_SELECTOR_INVALID as CombinedJsonInvalidSelectorMessage; pub use self::standard_json::input::language::Language as SolcStandardJsonInputLanguage; +pub use self::standard_json::input::settings::libraries::Libraries as SolcStandardJsonInputSettingsLibraries; pub use self::standard_json::input::settings::metadata::Metadata as SolcStandardJsonInputSettingsMetadata; pub use self::standard_json::input::settings::metadata_hash::MetadataHash as SolcStandardJsonInputSettingsMetadataHash; pub use self::standard_json::input::settings::optimizer::yul_details::YulDetails as SolcStandardJsonInputSettingsYulOptimizerDetails; @@ -16,17 +21,19 @@ pub use self::standard_json::input::settings::polkavm::PolkaVM as SolcStandardJs pub use self::standard_json::input::settings::selection::file::flag::Flag as SolcStandardJsonInputSettingsSelectionFileFlag; pub use self::standard_json::input::settings::selection::file::File as SolcStandardJsonInputSettingsSelectionFile; pub use self::standard_json::input::settings::selection::Selection as SolcStandardJsonInputSettingsSelection; +#[cfg(feature = "resolc")] +pub use self::standard_json::input::settings::warning::Warning as ResolcWarning; pub use self::standard_json::input::settings::Settings as SolcStandardJsonInputSettings; pub use self::standard_json::input::source::Source as SolcStandardJsonInputSource; pub use self::standard_json::input::Input as SolcStandardJsonInput; pub use self::standard_json::output::contract::evm::bytecode::Bytecode as SolcStandardJsonOutputContractEVMBytecode; pub use self::standard_json::output::contract::evm::EVM as SolcStandardJsonOutputContractEVM; pub use self::standard_json::output::contract::Contract as SolcStandardJsonOutputContract; -pub use self::standard_json::output::Output as SolcStandardJsonOutput; #[cfg(feature = "resolc")] -pub use self::warning::Warning as ResolcWarning; +pub use self::standard_json::output::error::error_handler::ErrorHandler as SolcStandardJsonOutputErrorHandler; +pub use self::standard_json::output::error::mapped_location::MappedLocation as SolcStandardJsonOutputErrorMappedLocation; +pub use self::standard_json::output::error::Error as SolcStandardJsonOutputError; +pub use self::standard_json::output::Output as SolcStandardJsonOutput; pub mod combined_json; pub mod standard_json; -#[cfg(feature = "resolc")] -pub mod warning; diff --git a/crates/solc-json-interface/src/standard_json/input/mod.rs b/crates/solc-json-interface/src/standard_json/input/mod.rs index 6df2037..9c4f069 100644 --- a/crates/solc-json-interface/src/standard_json/input/mod.rs +++ b/crates/solc-json-interface/src/standard_json/input/mod.rs @@ -1,29 +1,39 @@ //! The `solc --standard-json` input. +use std::collections::BTreeMap; +#[cfg(feature = "resolc")] +use std::collections::BTreeSet; +#[cfg(feature = "resolc")] +use std::path::Path; +#[cfg(feature = "resolc")] +use std::path::PathBuf; + +#[cfg(all(feature = "parallel", feature = "resolc"))] +use rayon::iter::{IntoParallelIterator, IntoParallelRefMutIterator, ParallelIterator}; +use serde::Deserialize; +use serde::Serialize; + +#[cfg(feature = "resolc")] +use crate::standard_json::input::settings::metadata::Metadata as SolcStandardJsonInputSettingsMetadata; +#[cfg(feature = "resolc")] +use crate::standard_json::input::settings::optimizer::Optimizer as SolcStandardJsonInputSettingsOptimizer; +#[cfg(feature = "resolc")] +use crate::standard_json::input::settings::selection::Selection as SolcStandardJsonInputSettingsSelection; +#[cfg(feature = "resolc")] +use crate::SolcStandardJsonInputSettingsLibraries; +#[cfg(feature = "resolc")] +use crate::SolcStandardJsonInputSettingsPolkaVM; + +use self::language::Language; +#[cfg(feature = "resolc")] +use self::settings::warning::Warning; +use self::settings::Settings; +use self::source::Source; + pub mod language; pub mod settings; pub mod source; -use std::collections::BTreeMap; -use std::collections::BTreeSet; -use std::path::PathBuf; - -#[cfg(all(feature = "parallel", feature = "resolc"))] -use rayon::iter::{IntoParallelIterator, ParallelIterator}; -use serde::Deserialize; -use serde::Serialize; - -use crate::standard_json::input::settings::metadata::Metadata as SolcStandardJsonInputSettingsMetadata; -use crate::standard_json::input::settings::optimizer::Optimizer as SolcStandardJsonInputSettingsOptimizer; -use crate::standard_json::input::settings::selection::Selection as SolcStandardJsonInputSettingsSelection; -#[cfg(feature = "resolc")] -use crate::warning::Warning; -use crate::SolcStandardJsonInputSettingsPolkaVM; - -use self::language::Language; -use self::settings::Settings; -use self::source::Source; - /// The `solc --standard-json` input. #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] @@ -36,93 +46,88 @@ pub struct Input { pub settings: Settings, /// The suppressed warnings. #[cfg(feature = "resolc")] - #[serde(skip_serializing)] - pub suppressed_warnings: Option>, + #[serde(default, skip_serializing)] + pub suppressed_warnings: Vec, } +#[cfg(feature = "resolc")] impl Input { - /// A shortcut constructor from stdin. - pub fn try_from_stdin() -> anyhow::Result { - let mut input: Self = serde_json::from_reader(std::io::BufReader::new(std::io::stdin()))?; - input - .settings - .output_selection - .get_or_insert_with(SolcStandardJsonInputSettingsSelection::default) - .extend_with_required(); - Ok(input) + /// A shortcut constructor. + /// + /// If the `path` is `None`, the input is read from the stdin. + pub fn try_from(path: Option<&Path>) -> anyhow::Result { + let input_json = match path { + Some(path) => std::fs::read_to_string(path) + .map_err(|error| anyhow::anyhow!("Standard JSON file {path:?} reading: {error}")), + None => std::io::read_to_string(std::io::stdin()) + .map_err(|error| anyhow::anyhow!("Standard JSON reading from stdin: {error}")), + }?; + revive_common::deserialize_from_str::(input_json.as_str()) + .map_err(|error| anyhow::anyhow!("Standard JSON parsing: {error}")) } /// A shortcut constructor from paths. - #[allow(clippy::too_many_arguments)] - pub fn try_from_paths( - language: Language, + pub fn try_from_solidity_paths( evm_version: Option, paths: &[PathBuf], - library_map: Vec, - remappings: Option>, + libraries: &[String], + remappings: BTreeSet, output_selection: SolcStandardJsonInputSettingsSelection, optimizer: SolcStandardJsonInputSettingsOptimizer, - metadata: Option, - #[cfg(feature = "resolc")] suppressed_warnings: Option>, - polkavm: Option, + metadata: SolcStandardJsonInputSettingsMetadata, + suppressed_warnings: Vec, + polkavm: SolcStandardJsonInputSettingsPolkaVM, + llvm_arguments: Vec, + detect_missing_libraries: bool, ) -> anyhow::Result { let mut paths: BTreeSet = paths.iter().cloned().collect(); - let libraries = Settings::parse_libraries(library_map)?; - for library_file in libraries.keys() { + let libraries = SolcStandardJsonInputSettingsLibraries::try_from(libraries)?; + for library_file in libraries.as_inner().keys() { paths.insert(PathBuf::from(library_file)); } - let sources = paths - .iter() - .map(|path| { - let source = Source::try_from(path.as_path()).unwrap_or_else(|error| { - panic!("Source code file {path:?} reading error: {error}") - }); - (path.to_string_lossy().to_string(), source) - }) - .collect(); + #[cfg(feature = "parallel")] + let iter = paths.into_par_iter(); // Parallel iterator + #[cfg(not(feature = "parallel"))] + let iter = paths.into_iter(); // Sequential iterator - Ok(Self { - language, + let sources = iter + .map(|path| { + let source = Source::try_read(path.as_path())?; + Ok((path.to_string_lossy().to_string(), source)) + }) + .collect::>>()?; + + Self::try_from_solidity_sources( + evm_version, sources, - settings: Settings::new( - evm_version, - libraries, - remappings, - output_selection, - optimizer, - metadata, - polkavm, - ), - #[cfg(feature = "resolc")] + libraries, + remappings, + output_selection, + optimizer, + metadata, suppressed_warnings, - }) + polkavm, + llvm_arguments, + detect_missing_libraries, + ) } /// A shortcut constructor from source code. /// Only for the integration test purposes. - #[cfg(feature = "resolc")] - #[allow(clippy::too_many_arguments)] - pub fn try_from_sources( + pub fn try_from_solidity_sources( evm_version: Option, - sources: BTreeMap, - libraries: BTreeMap>, - remappings: Option>, + sources: BTreeMap, + libraries: SolcStandardJsonInputSettingsLibraries, + remappings: BTreeSet, output_selection: SolcStandardJsonInputSettingsSelection, optimizer: SolcStandardJsonInputSettingsOptimizer, - metadata: Option, - suppressed_warnings: Option>, - polkavm: Option, + metadata: SolcStandardJsonInputSettingsMetadata, + suppressed_warnings: Vec, + polkavm: SolcStandardJsonInputSettingsPolkaVM, + llvm_arguments: Vec, + detect_missing_libraries: bool, ) -> anyhow::Result { - #[cfg(feature = "parallel")] - let iter = sources.into_par_iter(); // Parallel iterator - - #[cfg(not(feature = "parallel"))] - let iter = sources.into_iter(); // Sequential iterator - let sources = iter - .map(|(path, content)| (path, Source::from(content))) - .collect(); - Ok(Self { language: Language::Solidity, sources, @@ -134,13 +139,75 @@ impl Input { optimizer, metadata, polkavm, + suppressed_warnings.clone(), + llvm_arguments, + detect_missing_libraries, ), suppressed_warnings, }) } - /// Sets the necessary defaults. - pub fn normalize(&mut self) { - self.settings.normalize(); + /// A shortcut constructor from paths to Yul source files. + pub fn from_yul_paths( + paths: &[PathBuf], + libraries: SolcStandardJsonInputSettingsLibraries, + optimizer: SolcStandardJsonInputSettingsOptimizer, + llvm_options: Vec, + ) -> Self { + let sources = paths + .iter() + .map(|path| { + ( + path.to_string_lossy().to_string(), + Source::from(path.as_path()), + ) + }) + .collect(); + Self::from_yul_sources(sources, libraries, optimizer, llvm_options) + } + + /// A shortcut constructor from Yul source code. + pub fn from_yul_sources( + sources: BTreeMap, + libraries: SolcStandardJsonInputSettingsLibraries, + optimizer: SolcStandardJsonInputSettingsOptimizer, + llvm_arguments: Vec, + ) -> Self { + let output_selection = SolcStandardJsonInputSettingsSelection::new_yul_validation(); + + Self { + language: Language::Yul, + sources, + settings: Settings::new( + None, + libraries, + Default::default(), + output_selection, + optimizer, + Default::default(), + Default::default(), + vec![], + llvm_arguments, + false, + ), + suppressed_warnings: vec![], + } + } + + /// Extends the output selection with another one. + pub fn extend_selection(&mut self, selection: SolcStandardJsonInputSettingsSelection) { + self.settings.extend_selection(selection); + } + + /// Tries to resolve all sources. + pub fn resolve_sources(&mut self) { + #[cfg(feature = "parallel")] + let iter = self.sources.par_iter_mut(); + #[cfg(not(feature = "parallel"))] + let iter = self.sources.iter_mut(); + + iter.for_each(|(_path, source)| { + let _ = source.try_resolve(); + }); } } diff --git a/crates/solc-json-interface/src/standard_json/input/settings/libraries.rs b/crates/solc-json-interface/src/standard_json/input/settings/libraries.rs new file mode 100644 index 0000000..878b2a4 --- /dev/null +++ b/crates/solc-json-interface/src/standard_json/input/settings/libraries.rs @@ -0,0 +1,107 @@ +//! The Solidity libraries. + +use std::collections::BTreeMap; +use std::collections::BTreeSet; + +use serde::Deserialize; +use serde::Serialize; + +/// The Solidity libraries. +#[derive(Debug, Default, Clone, Serialize, Deserialize)] +pub struct Libraries { + /// The unified representation of libraries. + #[serde(flatten)] + pub inner: BTreeMap>, +} + +impl Libraries { + /// Returns a representation of libraries suitable for the LLD linker. + pub fn as_linker_symbols( + &self, + ) -> anyhow::Result> { + let mut linker_symbols = BTreeMap::new(); + for (file, contracts) in self.inner.iter() { + for (name, address) in contracts.iter() { + let path = format!("{file}:{name}"); + + let address_stripped = address.strip_prefix("0x").unwrap_or(address.as_str()); + let address_vec = hex::decode(address_stripped).map_err(|error| { + anyhow::anyhow!("Invalid address `{address}` of library `{path}`: {error}.") + })?; + let address_array: [u8; revive_common::BYTE_LENGTH_ETH_ADDRESS] = address_vec.try_into().map_err(|address_vec: Vec| { + anyhow::anyhow!( + "Incorrect size of address `{address}` of library `{path}`: expected {}, found {}.", + revive_common::BYTE_LENGTH_ETH_ADDRESS, + address_vec.len(), + ) + })?; + + linker_symbols.insert(path, address_array); + } + } + Ok(linker_symbols) + } + + /// Returns a representation of libraries suitable for filtering. + pub fn as_paths(&self) -> BTreeSet { + self.inner + .iter() + .flat_map(|(file, names)| { + names + .keys() + .map(|name| format!("{file}:{name}")) + .collect::>() + }) + .collect::>() + } + + /// Checks whether the libraries are empty. + pub fn is_empty(&self) -> bool { + self.inner.is_empty() + } + + /// Returns a reference to the inner value. + pub fn as_inner(&self) -> &BTreeMap> { + &self.inner + } + + /// Returns a mutable reference to the inner value. + pub fn as_inner_mut(&mut self) -> &mut BTreeMap> { + &mut self.inner + } +} + +impl From>> for Libraries { + fn from(inner: BTreeMap>) -> Self { + Self { inner } + } +} + +impl TryFrom<&[String]> for Libraries { + type Error = anyhow::Error; + + fn try_from(arguments: &[String]) -> Result { + let mut libraries = BTreeMap::new(); + for (index, library) in arguments.iter().enumerate() { + let mut path_and_address = library.split('='); + let path = path_and_address + .next() + .ok_or_else(|| anyhow::anyhow!("Library #{index} path is missing."))?; + let mut file_and_contract = path.split(':'); + let file = file_and_contract + .next() + .ok_or_else(|| anyhow::anyhow!("Library `{path}` file name is missing."))?; + let contract = file_and_contract + .next() + .ok_or_else(|| anyhow::anyhow!("Library `{path}` contract name is missing."))?; + let address = path_and_address + .next() + .ok_or_else(|| anyhow::anyhow!("Library `{path}` address is missing."))?; + libraries + .entry(file.to_owned()) + .or_insert_with(BTreeMap::new) + .insert(contract.to_owned(), address.to_owned()); + } + Ok(Self { inner: libraries }) + } +} diff --git a/crates/solc-json-interface/src/standard_json/input/settings/mod.rs b/crates/solc-json-interface/src/standard_json/input/settings/mod.rs index e25c7e0..2f2ca73 100644 --- a/crates/solc-json-interface/src/standard_json/input/settings/mod.rs +++ b/crates/solc-json-interface/src/standard_json/input/settings/mod.rs @@ -1,21 +1,26 @@ //! The `solc --standard-json` input settings. +pub mod libraries; pub mod metadata; pub mod metadata_hash; pub mod optimizer; pub mod polkavm; pub mod selection; +#[cfg(feature = "resolc")] +pub mod warning; -use std::collections::BTreeMap; use std::collections::BTreeSet; use serde::Deserialize; use serde::Serialize; +use self::libraries::Libraries; use self::metadata::Metadata; use self::optimizer::Optimizer; use self::polkavm::PolkaVM; use self::selection::Selection; +#[cfg(feature = "resolc")] +use self::warning::Warning; /// The `solc --standard-json` input settings. #[derive(Clone, Debug, Serialize, Deserialize)] @@ -25,14 +30,14 @@ pub struct Settings { #[serde(skip_serializing_if = "Option::is_none")] pub evm_version: Option, /// The linker library addresses. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub libraries: Option>>, + #[serde(default, skip_serializing_if = "Libraries::is_empty")] + pub libraries: Libraries, /// The sorted list of remappings. - #[serde(skip_serializing_if = "Option::is_none")] - pub remappings: Option>, + #[serde(default, skip_serializing_if = "BTreeSet::is_empty")] + pub remappings: BTreeSet, /// The output selection filters. - #[serde(skip_serializing_if = "Option::is_none")] - pub output_selection: Option, + #[serde(default)] + pub output_selection: Selection, /// Whether to compile via IR. Only for testing with solc >=0.8.13. #[serde( rename = "viaIR", @@ -43,67 +48,68 @@ pub struct Settings { /// The optimizer settings. pub optimizer: Optimizer, /// The metadata settings. - #[serde(skip_serializing_if = "Option::is_none")] - pub metadata: Option, + #[serde(default)] + pub metadata: Metadata, /// The resolc custom PolkaVM settings. - #[serde(skip_serializing_if = "Option::is_none")] - pub polkavm: Option, + #[serde(default, skip_serializing)] + pub polkavm: PolkaVM, + + /// The suppressed warnings. + #[cfg(feature = "resolc")] + #[serde(default, skip_serializing)] + pub suppressed_warnings: Vec, + + /// The extra LLVM arguments. + #[cfg(feature = "resolc")] + #[serde(default, alias = "LLVMOptions", skip_serializing)] + pub llvm_arguments: Vec, + + /// Whether to enable the missing libraries detection mode. + /// Deprecated in favor of post-compile-time linking. + #[serde(default, rename = "detectMissingLibraries", skip_serializing)] + pub detect_missing_libraries: bool, } +#[cfg(feature = "resolc")] impl Settings { /// A shortcut constructor. pub fn new( evm_version: Option, - libraries: BTreeMap>, - remappings: Option>, + libraries: Libraries, + remappings: BTreeSet, output_selection: Selection, optimizer: Optimizer, - metadata: Option, - polkavm: Option, + metadata: Metadata, + polkavm: PolkaVM, + suppressed_warnings: Vec, + llvm_arguments: Vec, + detect_missing_libraries: bool, ) -> Self { Self { evm_version, - libraries: Some(libraries), + libraries, remappings, - output_selection: Some(output_selection), + output_selection, optimizer, metadata, via_ir: Some(true), polkavm, + suppressed_warnings, + llvm_arguments, + detect_missing_libraries, } } - /// Sets the necessary defaults. - pub fn normalize(&mut self) { - self.polkavm = None; - self.optimizer.normalize(); + /// Extends the output selection with another one. + pub fn extend_selection(&mut self, selection: Selection) { + self.output_selection.extend(selection); } - /// Parses the library list and returns their double hashmap with path and name as keys. - pub fn parse_libraries( - input: Vec, - ) -> anyhow::Result>> { - let mut libraries = BTreeMap::new(); - for (index, library) in input.into_iter().enumerate() { - let mut path_and_address = library.split('='); - let path = path_and_address - .next() - .ok_or_else(|| anyhow::anyhow!("The library #{} path is missing", index))?; - let mut file_and_contract = path.split(':'); - let file = file_and_contract - .next() - .ok_or_else(|| anyhow::anyhow!("The library `{}` file name is missing", path))?; - let contract = file_and_contract.next().ok_or_else(|| { - anyhow::anyhow!("The library `{}` contract name is missing", path) - })?; - let address = path_and_address - .next() - .ok_or_else(|| anyhow::anyhow!("The library `{}` address is missing", path))?; - libraries - .entry(file.to_owned()) - .or_insert_with(BTreeMap::new) - .insert(contract.to_owned(), address.to_owned()); - } - Ok(libraries) + /// Returns flags that are going to be automatically added by the compiler, + /// but were not explicitly requested by the user. + /// + /// Afterwards, the flags are used to prune JSON output before returning it. + pub fn selection_to_prune(&self) -> Selection { + self.output_selection.selection_to_prune() } } diff --git a/crates/solc-json-interface/src/standard_json/input/settings/optimizer/details.rs b/crates/solc-json-interface/src/standard_json/input/settings/optimizer/details.rs index 2504f17..b6e44bc 100644 --- a/crates/solc-json-interface/src/standard_json/input/settings/optimizer/details.rs +++ b/crates/solc-json-interface/src/standard_json/input/settings/optimizer/details.rs @@ -6,7 +6,7 @@ use serde::Serialize; use crate::standard_json::input::settings::optimizer::yul_details::YulDetails; /// The `solc --standard-json` input settings optimizer details. -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct Details { /// Whether the pass is enabled. @@ -40,7 +40,6 @@ pub struct Details { impl Details { /// A shortcut constructor. - #[allow(clippy::too_many_arguments)] pub fn new( peephole: Option, inliner: Option, diff --git a/crates/solc-json-interface/src/standard_json/input/settings/optimizer/mod.rs b/crates/solc-json-interface/src/standard_json/input/settings/optimizer/mod.rs index 817c32f..3d5c146 100644 --- a/crates/solc-json-interface/src/standard_json/input/settings/optimizer/mod.rs +++ b/crates/solc-json-interface/src/standard_json/input/settings/optimizer/mod.rs @@ -15,35 +15,31 @@ pub struct Optimizer { /// Whether the optimizer is enabled. pub enabled: bool, /// The optimization mode string. - #[serde(skip_serializing_if = "Option::is_none")] - pub mode: Option, + #[serde(default = "Optimizer::default_mode", skip_serializing)] + pub mode: char, /// The `solc` optimizer details. - #[serde(skip_serializing_if = "Option::is_none")] - pub details: Option
, - /// Whether to try to recompile with -Oz if the bytecode is too large. - #[serde(skip_serializing_if = "Option::is_none")] - pub fallback_to_optimizing_for_size: Option, + #[serde(default)] + pub details: Details, } impl Optimizer { /// A shortcut constructor. - pub fn new( - enabled: bool, - mode: Option, - version: &semver::Version, - fallback_to_optimizing_for_size: bool, - ) -> Self { + pub fn new(enabled: bool, mode: char, details: Details) -> Self { Self { enabled, mode, - details: Some(Details::disabled(version)), - fallback_to_optimizing_for_size: Some(fallback_to_optimizing_for_size), + details, } } - /// Sets the necessary defaults. - pub fn normalize(&mut self) { - self.mode = None; - self.fallback_to_optimizing_for_size = None; + /// The default optimization mode. + pub fn default_mode() -> char { + 'z' + } +} + +impl Default for Optimizer { + fn default() -> Self { + Self::new(true, Self::default_mode(), Details::default()) } } diff --git a/crates/solc-json-interface/src/standard_json/input/settings/optimizer/yul_details.rs b/crates/solc-json-interface/src/standard_json/input/settings/optimizer/yul_details.rs index 999a6da..b778bf8 100644 --- a/crates/solc-json-interface/src/standard_json/input/settings/optimizer/yul_details.rs +++ b/crates/solc-json-interface/src/standard_json/input/settings/optimizer/yul_details.rs @@ -4,7 +4,7 @@ use serde::Deserialize; use serde::Serialize; /// The `solc --standard-json` input settings optimizer YUL details. -#[derive(Clone, Debug, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct YulDetails { /// Whether the stack allocation pass is enabled. diff --git a/crates/solc-json-interface/src/standard_json/input/settings/selection/file/flag.rs b/crates/solc-json-interface/src/standard_json/input/settings/selection/file/flag.rs index 9e38327..f4c6611 100644 --- a/crates/solc-json-interface/src/standard_json/input/settings/selection/file/flag.rs +++ b/crates/solc-json-interface/src/standard_json/input/settings/selection/file/flag.rs @@ -30,6 +30,9 @@ pub enum Flag { /// The Yul IR. #[serde(rename = "irOptimized")] Yul, + /// The EVM bytecode. + #[serde(rename = "evm")] + EVM, /// The EVM legacy assembly JSON. #[serde(rename = "evm.legacyAssembly")] EVMLA, @@ -45,22 +48,23 @@ pub enum Flag { Ir, } -impl std::fmt::Display for Flag { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::ABI => write!(f, "abi"), - Self::Metadata => write!(f, "metadata"), - Self::Devdoc => write!(f, "devdoc"), - Self::Userdoc => write!(f, "userdoc"), - Self::MethodIdentifiers => write!(f, "evm.methodIdentifiers"), - Self::StorageLayout => write!(f, "storageLayout"), - Self::AST => write!(f, "ast"), - Self::Yul => write!(f, "irOptimized"), - Self::EVMLA => write!(f, "evm.legacyAssembly"), - Self::EVMBC => write!(f, "evm.bytecode"), - Self::EVMDBC => write!(f, "evm.deployedBytecode"), - Self::Assembly => write!(f, "evm.assembly"), - Self::Ir => write!(f, "ir"), - } - } -} +//impl std::fmt::Display for Flag { +// fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +// match self { +// Self::ABI => write!(f, "abi"), +// Self::Metadata => write!(f, "metadata"), +// Self::Devdoc => write!(f, "devdoc"), +// Self::Userdoc => write!(f, "userdoc"), +// Self::MethodIdentifiers => write!(f, "evm.methodIdentifiers"), +// Self::StorageLayout => write!(f, "storageLayout"), +// Self::AST => write!(f, "ast"), +// Self::Yul => write!(f, "irOptimized"), +// Self::EVM => write!(f, "evm"), +// Self::EVMLA => write!(f, "evm.legacyAssembly"), +// Self::EVMBC => write!(f, "evm.bytecode"), +// Self::EVMDBC => write!(f, "evm.deployedBytecode"), +// Self::Assembly => write!(f, "evm.assembly"), +// Self::Ir => write!(f, "ir"), +// } +// } +//} diff --git a/crates/solc-json-interface/src/standard_json/input/settings/selection/file/mod.rs b/crates/solc-json-interface/src/standard_json/input/settings/selection/file/mod.rs index 945c47c..5342603 100644 --- a/crates/solc-json-interface/src/standard_json/input/settings/selection/file/mod.rs +++ b/crates/solc-json-interface/src/standard_json/input/settings/selection/file/mod.rs @@ -13,37 +13,56 @@ use self::flag::Flag as SelectionFlag; #[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)] pub struct File { /// The per-file output selections. - #[serde(rename = "", skip_serializing_if = "Option::is_none")] - pub per_file: Option>, + #[serde(default, rename = "", skip_serializing_if = "HashSet::is_empty")] + pub per_file: HashSet, /// The per-contract output selections. - #[serde(rename = "*", skip_serializing_if = "Option::is_none")] - pub per_contract: Option>, + #[serde(default, rename = "*", skip_serializing_if = "HashSet::is_empty")] + pub per_contract: HashSet, } impl File { + /// A shortcut constructor. + pub fn new(flags: Vec) -> Self { + let mut per_file = HashSet::new(); + let mut per_contract = HashSet::new(); + for flag in flags.into_iter() { + match flag { + SelectionFlag::AST => { + per_file.insert(SelectionFlag::AST); + } + flag => { + per_contract.insert(flag); + } + } + } + Self { + per_file, + per_contract, + } + } /// Creates the selection required for production compilation (excludes EVM bytecode). pub fn new_required() -> Self { Self { - per_file: Some(HashSet::from_iter([SelectionFlag::AST])), - per_contract: Some(HashSet::from_iter([ + per_file: HashSet::from_iter([SelectionFlag::AST]), + per_contract: HashSet::from_iter([ SelectionFlag::MethodIdentifiers, SelectionFlag::Metadata, SelectionFlag::Yul, - ])), + ]), } } /// Creates the selection required for test compilation (includes EVM bytecode). pub fn new_required_for_tests() -> Self { Self { - per_file: Some(HashSet::from_iter([SelectionFlag::AST])), - per_contract: Some(HashSet::from_iter([ + per_file: HashSet::from_iter([SelectionFlag::AST]), + per_contract: HashSet::from_iter([ SelectionFlag::EVMBC, SelectionFlag::EVMDBC, SelectionFlag::MethodIdentifiers, SelectionFlag::Metadata, SelectionFlag::Yul, - ])), + ]), } } @@ -51,48 +70,59 @@ impl File { pub fn extend_with_required(&mut self) -> &mut Self { let required = Self::new_required(); - self.per_file - .get_or_insert_with(HashSet::default) - .extend(required.per_file.unwrap_or_default()); - self.per_contract - .get_or_insert_with(HashSet::default) - .extend(required.per_contract.unwrap_or_default()); + self.per_file.extend(required.per_file); + self.per_contract.extend(required.per_contract); self } -} -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn production_excludes_evm_bytecode() { - let selection = File::new_required(); - let per_contract = selection.per_contract.unwrap(); - - // Production should NOT include EVM bytecode flags - assert!(!per_contract.contains(&SelectionFlag::EVMBC)); - assert!(!per_contract.contains(&SelectionFlag::EVMDBC)); - - // But should include other required flags - assert!(per_contract.contains(&SelectionFlag::MethodIdentifiers)); - assert!(per_contract.contains(&SelectionFlag::Metadata)); - assert!(per_contract.contains(&SelectionFlag::Yul)); + /// Extends the output selection with another one. + pub fn extend(&mut self, other: Self) -> &mut Self { + self.per_file.extend(other.per_file); + self.per_contract.extend(other.per_contract); + self } - #[test] - fn tests_include_evm_bytecode() { - let selection = File::new_required_for_tests(); - let per_contract = selection.per_contract.unwrap(); + /// Returns flags that are going to be automatically added by the compiler, + /// but were not explicitly requested by the user. + /// + /// Afterwards, the flags are used to prune JSON output before returning it. + pub fn selection_to_prune(&self) -> Self { + let required_per_file = vec![SelectionFlag::AST]; + let required_per_contract = vec![ + SelectionFlag::MethodIdentifiers, + SelectionFlag::Metadata, + SelectionFlag::Yul, + ]; - // Tests should include EVM bytecode flags - assert!(per_contract.contains(&SelectionFlag::EVMBC)); - assert!(per_contract.contains(&SelectionFlag::EVMDBC)); + let mut unset_per_file = HashSet::with_capacity(required_per_file.len()); + let mut unset_per_contract = HashSet::with_capacity(required_per_contract.len()); - // And should also include other required flags - assert!(per_contract.contains(&SelectionFlag::MethodIdentifiers)); - assert!(per_contract.contains(&SelectionFlag::Metadata)); - assert!(per_contract.contains(&SelectionFlag::Yul)); + for flag in required_per_file { + if !self.per_file.contains(&flag) { + unset_per_file.insert(flag); + } + } + for flag in required_per_contract { + if !self.per_contract.contains(&flag) { + unset_per_contract.insert(flag); + } + } + Self { + per_file: unset_per_file, + per_contract: unset_per_contract, + } + } + /// Whether the flag is requested. + pub fn contains(&self, flag: &SelectionFlag) -> bool { + match flag { + flag @ SelectionFlag::AST => self.per_file.contains(flag), + flag => self.per_contract.contains(flag), + } + } + + /// Checks whether the selection is empty. + pub fn is_empty(&self) -> bool { + self.per_file.is_empty() && self.per_contract.is_empty() } } diff --git a/crates/solc-json-interface/src/standard_json/input/settings/selection/mod.rs b/crates/solc-json-interface/src/standard_json/input/settings/selection/mod.rs index f496759..7343472 100644 --- a/crates/solc-json-interface/src/standard_json/input/settings/selection/mod.rs +++ b/crates/solc-json-interface/src/standard_json/input/settings/selection/mod.rs @@ -7,24 +7,33 @@ use std::collections::BTreeMap; use serde::Deserialize; use serde::Serialize; +use self::file::flag::Flag; use self::file::File as FileSelection; /// The `solc --standard-json` output selection. #[derive(Clone, Debug, Serialize, Deserialize, Default, PartialEq)] pub struct Selection { /// Only the 'all' wildcard is available for robustness reasons. - #[serde(rename = "*", skip_serializing_if = "Option::is_none")] - all: Option, + #[serde(default, rename = "*", skip_serializing_if = "FileSelection::is_empty")] + all: FileSelection, #[serde(skip_serializing_if = "BTreeMap::is_empty", flatten)] pub files: BTreeMap, } impl Selection { + /// Creates the selection with arbitrary flags. + pub fn new(flags: Vec) -> Self { + Self { + all: FileSelection::new(flags), + files: Default::default(), + } + } + /// Creates the selection required by our compilation process. pub fn new_required() -> Self { Self { - all: Some(FileSelection::new_required()), + all: FileSelection::new_required(), files: BTreeMap::new(), } } @@ -32,76 +41,35 @@ impl Selection { /// Creates the selection required for test compilation (includes EVM bytecode). pub fn new_required_for_tests() -> Self { Self { - all: Some(FileSelection::new_required_for_tests()), + all: FileSelection::new_required_for_tests(), files: BTreeMap::new(), } } - /// Extends the user's output selection with flag required by our compilation process. - pub fn extend_with_required(&mut self) -> &mut Self { - self.all - .get_or_insert_with(FileSelection::new_required) - .extend_with_required(); - for (_, v) in self.files.iter_mut() { - v.extend_with_required(); - } + /// Creates the selection required by Yul validation process. + pub fn new_yul_validation() -> Self { + Self::new(vec![Flag::EVM]) + } + + /// Extends the output selection with another one. + pub fn extend(&mut self, other: Self) -> &mut Self { + self.all.extend(other.all); self } -} -#[cfg(test)] -mod test { - use std::collections::BTreeMap; - - use crate::SolcStandardJsonInputSettingsSelectionFile; - - use super::Selection; - - #[test] - fn per_file() { - let init = Selection { - all: None, - files: BTreeMap::from([( - "Test".to_owned(), - SolcStandardJsonInputSettingsSelectionFile::new_required(), - )]), - }; - - let deser = serde_json::to_string(&init) - .and_then(|string| serde_json::from_str(&string)) - .unwrap(); - - assert_eq!(init, deser) + /// Returns flags that are going to be automatically added by the compiler, + /// but were not explicitly requested by the user. + /// + /// Afterwards, the flags are used to prune JSON output before returning it. + pub fn selection_to_prune(&self) -> Self { + Self { + all: self.all.selection_to_prune(), + files: Default::default(), + } } - #[test] - fn all() { - let init = Selection { - all: Some(SolcStandardJsonInputSettingsSelectionFile::new_required()), - files: BTreeMap::new(), - }; - - let deser = serde_json::to_string(&init) - .and_then(|string| serde_json::from_str(&string)) - .unwrap(); - - assert_eq!(init, deser) - } - - #[test] - fn all_and_override() { - let init = Selection { - all: Some(SolcStandardJsonInputSettingsSelectionFile::new_required()), - files: BTreeMap::from([( - "Test".to_owned(), - SolcStandardJsonInputSettingsSelectionFile::new_required(), - )]), - }; - - let deser = serde_json::to_string(&init) - .and_then(|string| serde_json::from_str(&string)) - .unwrap(); - - assert_eq!(init, deser) + /// Whether the flag is requested. + pub fn contains(&self, flag: &Flag) -> bool { + self.all.contains(flag) } } diff --git a/crates/solc-json-interface/src/standard_json/input/settings/warning.rs b/crates/solc-json-interface/src/standard_json/input/settings/warning.rs new file mode 100644 index 0000000..42d31af --- /dev/null +++ b/crates/solc-json-interface/src/standard_json/input/settings/warning.rs @@ -0,0 +1,93 @@ +//! `resolc` custom compiler warnings. +//! +//! The revive compiler adds warnings only applicable when compilng +//! to the revive stack on Polkadot to the output. + +use std::collections::BTreeMap; +use std::str::FromStr; + +use serde::Deserialize; +use serde::Serialize; + +use crate::standard_json::output::error::source_location::SourceLocation; +use crate::SolcStandardJsonInputSource; +use crate::SolcStandardJsonOutputError; + +// The `resolc` custom compiler warning. +#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Warning { + /// The `
`'s `send` and `transfer` methods usage warning. + SendAndTransfer, + /// The `origin` instruction usage warning. + TxOrigin, +} + +impl Warning { + /// Converts string arguments into an array of warnings. + pub fn try_from_strings(strings: &[String]) -> Result, anyhow::Error> { + strings + .iter() + .map(|string| Self::from_str(string)) + .collect() + } + + /// The displayed warning messages. + pub fn as_message(&self) -> &'static str { + match self { + Self::SendAndTransfer => { + r#" +Warning: It looks like you are using '
.send/transfer()'. +Using '
.send/transfer()' is deprecated and strongly discouraged! +The resolc compiler uses a heuristic to detect '
.send/transfer()' calls, +which disables call re-entrancy and supplies all remaining gas instead of the 2300 gas stipend. +However, detection is not guaranteed. You are advised to carefully test this, employ +re-entrancy guards or use the withdrawal pattern instead! +Learn more on https://docs.soliditylang.org/en/latest/security-considerations.html#reentrancy +and https://docs.soliditylang.org/en/latest/common-patterns.html#withdrawal-from-contracts +"# + } + Self::TxOrigin => { + r#" +Warning: You are checking for 'tx.origin' in your code, which might lead to unexpected behavior. +Polkadot comes with native account abstraction support, and therefore the initiator of a +transaction might be different from the contract calling your code. It is highly recommended NOT +to rely on tx.origin, but use msg.sender instead. +"# + } + } + } + + pub fn as_error( + &self, + node: Option<&str>, + id_paths: &BTreeMap, + sources: &BTreeMap, + ) -> SolcStandardJsonOutputError { + SolcStandardJsonOutputError::new_warning( + self.as_message(), + node.and_then(|node| SourceLocation::try_from_ast(node, id_paths)), + Some(sources), + ) + } +} + +impl FromStr for Warning { + type Err = anyhow::Error; + + fn from_str(string: &str) -> Result { + match string { + "sendandtransfer" => Ok(Self::SendAndTransfer), + "txorigin" => Ok(Self::TxOrigin), + _ => Err(anyhow::anyhow!("Invalid warning: {}", string)), + } + } +} + +impl std::fmt::Display for Warning { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + match self { + Self::SendAndTransfer => write!(f, "sendandtransfer"), + Self::TxOrigin => write!(f, "txorigin"), + } + } +} diff --git a/crates/solc-json-interface/src/standard_json/input/source.rs b/crates/solc-json-interface/src/standard_json/input/source.rs index d716510..ca230c5 100644 --- a/crates/solc-json-interface/src/standard_json/input/source.rs +++ b/crates/solc-json-interface/src/standard_json/input/source.rs @@ -1,7 +1,7 @@ //! The `solc --standard-json` input source. -use std::io::Read; use std::path::Path; +use std::path::PathBuf; use serde::Deserialize; use serde::Serialize; @@ -11,30 +11,90 @@ use serde::Serialize; #[serde(rename_all = "camelCase")] pub struct Source { /// The source code file content. - pub content: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub content: Option, + /// The source file URLs. + #[serde(skip_serializing_if = "Option::is_none")] + pub urls: Option>, +} + +impl Source { + /// Reads the source from the file system. + pub fn try_read(path: &Path) -> anyhow::Result { + let content = if path.to_string_lossy() == "-" { + std::io::read_to_string(std::io::stdin()) + .map_err(|error| anyhow::anyhow!(" reading: {error}")) + } else { + std::fs::read_to_string(path) + .map_err(|error| anyhow::anyhow!("File {path:?} reading: {error}")) + }?; + + Ok(Self { + content: Some(content), + urls: None, + }) + } + + /// Tries to resolve the source code. + /// + /// At the moment only one URL pointing to the file system is supported. + pub fn try_resolve(&mut self) -> anyhow::Result<()> { + match (self.content.as_ref(), self.urls.as_ref()) { + (Some(_), None) => Ok(()), + (None, Some(urls)) => { + let mut errors = Vec::with_capacity(urls.len()); + for url in urls.iter() { + let url_path = PathBuf::from(url); + match Source::try_read(url_path.as_path()) { + Ok(resolved) => { + *self = resolved; + break; + } + Err(error) => errors.push(error), + } + } + if !errors.is_empty() { + anyhow::bail!( + "{}", + errors + .into_iter() + .map(|error| error.to_string()) + .collect::>() + .join("\n") + ); + } + Ok(()) + } + (Some(_), Some(_)) => anyhow::bail!("Both `content` and `urls` cannot be set."), + (None, None) => anyhow::bail!("Either `content` or `urls` must be set."), + } + } + + /// Takes ownership of the source code and returns it. + pub fn take_content(&mut self) -> Option { + self.content.take() + } + + /// Returns the source code reference, if the source has been previously read or resolved. + pub fn content(&self) -> Option<&str> { + self.content.as_deref() + } } impl From for Source { fn from(content: String) -> Self { - Self { content } + Self { + content: Some(content), + urls: None, + } } } -impl TryFrom<&Path> for Source { - type Error = anyhow::Error; - - fn try_from(path: &Path) -> Result { - let content = if path.to_string_lossy() == "-" { - let mut solidity_code = String::with_capacity(16384); - std::io::stdin() - .read_to_string(&mut solidity_code) - .map_err(|error| anyhow::anyhow!(" reading error: {}", error))?; - solidity_code - } else { - std::fs::read_to_string(path) - .map_err(|error| anyhow::anyhow!("File {:?} reading error: {}", path, error))? - }; - - Ok(Self { content }) +impl From<&Path> for Source { + fn from(path: &Path) -> Self { + Self { + content: None, + urls: Some(vec![path.to_string_lossy().to_string()]), + } } } diff --git a/crates/solc-json-interface/src/standard_json/output/contract/evm/mod.rs b/crates/solc-json-interface/src/standard_json/output/contract/evm/mod.rs index 6e57bdd..9ab5763 100644 --- a/crates/solc-json-interface/src/standard_json/output/contract/evm/mod.rs +++ b/crates/solc-json-interface/src/standard_json/output/contract/evm/mod.rs @@ -1,7 +1,5 @@ //! The `solc --standard-json` output contract EVM data. -pub mod bytecode; - use std::collections::BTreeMap; use serde::Deserialize; @@ -10,9 +8,11 @@ use serde::Serialize; use self::bytecode::Bytecode; use self::bytecode::DeployedBytecode; +pub mod bytecode; + /// The `solc --standard-json` output contract EVM data. /// It is replaced by PolkaVM data after compiling. -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Default, Serialize, Deserialize, Clone)] #[serde(rename_all = "camelCase")] pub struct EVM { /// The contract PolkaVM assembly code. @@ -28,8 +28,8 @@ pub struct EVM { #[serde(skip_serializing_if = "Option::is_none")] pub deployed_bytecode: Option, /// The contract function signatures. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub method_identifiers: Option>, + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + pub method_identifiers: BTreeMap, } impl EVM { diff --git a/crates/solc-json-interface/src/standard_json/output/contract/mod.rs b/crates/solc-json-interface/src/standard_json/output/contract/mod.rs index 5928d07..2596864 100644 --- a/crates/solc-json-interface/src/standard_json/output/contract/mod.rs +++ b/crates/solc-json-interface/src/standard_json/output/contract/mod.rs @@ -3,7 +3,7 @@ pub mod evm; use std::collections::BTreeMap; -use std::collections::HashSet; +use std::collections::BTreeSet; use serde::Deserialize; use serde::Serialize; @@ -11,24 +11,27 @@ use serde::Serialize; use self::evm::EVM; /// The `solc --standard-json` output contract. -#[derive(Debug, Serialize, Deserialize, Clone)] +#[derive(Debug, Default, Serialize, Deserialize, Clone)] #[serde(rename_all = "camelCase")] pub struct Contract { /// The contract ABI. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub abi: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub abi: serde_json::Value, /// The contract metadata. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub metadata: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub metadata: serde_json::Value, /// The contract developer documentation. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub devdoc: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub devdoc: serde_json::Value, /// The contract user documentation. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub userdoc: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub userdoc: serde_json::Value, /// The contract storage layout. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub storage_layout: Option, + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub storage_layout: serde_json::Value, + /// The contract storage layout. + #[serde(default, skip_serializing_if = "serde_json::Value::is_null")] + pub transient_storage_layout: serde_json::Value, /// Contract's bytecode and related objects #[serde(default, skip_serializing_if = "Option::is_none")] pub evm: Option, @@ -36,15 +39,39 @@ pub struct Contract { #[serde(default, skip_serializing_if = "Option::is_none")] pub ir: Option, /// The contract optimized IR code. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub ir_optimized: Option, + #[serde(default, skip_serializing_if = "String::is_empty")] + pub ir_optimized: String, /// The contract PolkaVM bytecode hash. #[serde(default, skip_serializing_if = "Option::is_none")] pub hash: Option, + /// Unlinked factory dependencies. + #[serde(default, skip_deserializing)] + pub factory_dependencies_unlinked: BTreeSet, /// The contract factory dependencies. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub factory_dependencies: Option>, - /// The contract missing libraries. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub missing_libraries: Option>, + #[serde(default, skip_deserializing)] + pub factory_dependencies: BTreeMap, + /// Missing linkable libraries. + #[serde(default, skip_deserializing)] + pub missing_libraries: BTreeSet, + /// Binary object format. + #[serde(default, skip_deserializing)] + pub object_format: Option, +} + +impl Contract { + /// Checks if all fields are unset or empty. + pub fn is_empty(&self) -> bool { + self.abi.is_null() + && self.storage_layout.is_null() + && self.transient_storage_layout.is_null() + && self.metadata.is_null() + && self.devdoc.is_null() + && self.userdoc.is_null() + && self.ir_optimized.is_empty() + && self.evm.is_none() + && self.hash.is_none() + && self.factory_dependencies_unlinked.is_empty() + && self.factory_dependencies.is_empty() + && self.missing_libraries.is_empty() + } } diff --git a/crates/solc-json-interface/src/standard_json/output/error/error_handler.rs b/crates/solc-json-interface/src/standard_json/output/error/error_handler.rs new file mode 100644 index 0000000..8729a0d --- /dev/null +++ b/crates/solc-json-interface/src/standard_json/output/error/error_handler.rs @@ -0,0 +1,74 @@ +//! Unifies error handling between different Solidity compilers. + +use std::io::Write; + +use crate::standard_json::output::error::Error; + +/// The Solidity compiler error handler trait. +/// +/// This is implemented by entities that can collect and handle errors. +pub trait ErrorHandler { + /// Returns errors as a list. + fn errors(&self) -> Vec<&Error>; + + /// Extracts warnings from the list of messages. + fn take_warnings(&mut self) -> Vec; + + /// Checks if there is at least one error. + fn has_errors(&self) -> bool { + !self.errors().is_empty() + } + + /// Collects errors into one message and bails, if there is at least one error. + fn check_errors(&self) -> anyhow::Result<()> { + if !self.has_errors() { + return Ok(()); + } + + anyhow::bail!( + "{}", + self.errors() + .iter() + .map(|error| error.to_string()) + .collect::>() + .join("\n") + ); + } + + /// Checks for errors, exiting the application if there is at least one error. + fn exit_on_error(&self) { + if !self.has_errors() { + return; + } + + std::io::stderr() + .write_all( + self.errors() + .iter() + .map(|error| error.to_string()) + .collect::>() + .join("\n") + .as_bytes(), + ) + .expect("Stderr writing error"); + std::process::exit(revive_common::EXIT_CODE_FAILURE); + } + + /// Removes warnings from the list of messages and prints them to stderr. + fn take_and_write_warnings(&mut self) { + let warnings = self.take_warnings(); + if warnings.is_empty() { + return; + } + writeln!( + std::io::stderr(), + "{}", + warnings + .into_iter() + .map(|error| error.to_string()) + .collect::>() + .join("\n") + ) + .expect("Stderr writing error"); + } +} diff --git a/crates/solc-json-interface/src/standard_json/output/error/mapped_location.rs b/crates/solc-json-interface/src/standard_json/output/error/mapped_location.rs new file mode 100644 index 0000000..5f21d4b --- /dev/null +++ b/crates/solc-json-interface/src/standard_json/output/error/mapped_location.rs @@ -0,0 +1,119 @@ +//! The mapped error location. + +use crate::standard_json::output::error::source_location::SourceLocation; + +/// The mapped error location. +/// +/// It can be resolved from `solc` AST error location if the source code is provided. +#[derive(Debug)] +pub struct MappedLocation<'a> { + /// The source file path. + pub path: String, + /// The line number. + pub line: Option, + /// The column number. + pub column: Option, + /// The error area length. + pub length: Option, + /// The source code line to print. + pub source_code_line: Option<&'a str>, +} + +impl<'a> MappedLocation<'a> { + /// A shortcut constructor. + pub fn new(path: String) -> Self { + Self { + path, + line: None, + column: None, + length: None, + source_code_line: None, + } + } + + /// A shortcut constructor. + pub fn new_with_location( + path: String, + line: usize, + column: usize, + length: usize, + source_code_line: Option<&'a str>, + ) -> Self { + Self { + path, + line: Some(line), + column: Some(column), + length: Some(length), + source_code_line, + } + } + + /// A shortcut constructor from `solc` AST source location. + pub fn try_from_source_location( + source_location: &SourceLocation, + source_code: Option<&'a str>, + ) -> Self { + let source_code = match source_code { + Some(source_code) => source_code, + None => return Self::new(source_location.file.to_owned()), + }; + if source_location.start <= 0 || source_location.end <= 0 { + return Self::new(source_location.file.to_owned()); + } + let start = source_location.start as usize; + let end = source_location.end as usize; + + let mut cursor = 1; + for (line, source_line) in source_code.lines().enumerate() { + let cursor_next = cursor + source_line.len() + 1; + + if cursor <= start && start <= cursor_next { + let line = line + 1; + let column = start - cursor; + let length = end - start; + return Self::new_with_location( + source_location.file.to_owned(), + line, + column, + length, + Some(source_line), + ); + } + + cursor = cursor_next; + } + + Self::new(source_location.file.to_owned()) + } +} + +impl std::fmt::Display for MappedLocation<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + let mut path = self.path.clone(); + if let Some(line) = self.line { + path.push(':'); + path.push_str(line.to_string().as_str()); + if let Some(column) = self.column { + path.push(':'); + path.push_str(column.to_string().as_str()); + if let (Some(source_code_line), Some(length)) = (self.source_code_line, self.length) + { + let line_number_length = line.to_string().len(); + writeln!(f, "{} --> {path}", " ".repeat(line_number_length))?; + writeln!(f, " {} |", " ".repeat(line_number_length))?; + writeln!(f, " {line} | {source_code_line}")?; + writeln!( + f, + " {} | {} {}", + " ".repeat(line_number_length), + " ".repeat(column), + "^".repeat(std::cmp::min(length, source_code_line.len() - column)) + )?; + } + } + } else { + writeln!(f, "--> {path}")?; + } + Ok(()) + } +} diff --git a/crates/solc-json-interface/src/standard_json/output/error/mod.rs b/crates/solc-json-interface/src/standard_json/output/error/mod.rs index 95505d0..208e132 100644 --- a/crates/solc-json-interface/src/standard_json/output/error/mod.rs +++ b/crates/solc-json-interface/src/standard_json/output/error/mod.rs @@ -1,14 +1,19 @@ //! The `solc --standard-json` output error. -pub mod source_location; - -use std::str::FromStr; +use std::collections::BTreeMap; use serde::Deserialize; use serde::Serialize; +use crate::SolcStandardJsonInputSource; + +use self::mapped_location::MappedLocation; use self::source_location::SourceLocation; +pub mod error_handler; +pub mod mapped_location; +pub mod source_location; + /// The `solc --standard-json` output error. #[derive(Debug, Serialize, Deserialize, Clone)] #[serde(rename_all = "camelCase")] @@ -30,29 +35,81 @@ pub struct Error { } impl Error { - /// Returns the `ecrecover` function usage warning. - pub fn message_ecrecover(src: Option<&str>) -> Self { - let message = r#" -Warning: It looks like you are using 'ecrecover' to validate a signature of a user account. -Polkadot comes with native account abstraction support, therefore it is highly recommended NOT -to rely on the fact that the account has an ECDSA private key attached to it since accounts might -implement other signature schemes. -"# - .to_owned(); + /// The list of ignored `solc` warnings that are strictly EVM-related. + pub const IGNORED_WARNING_CODES: [&'static str; 5] = ["1699", "3860", "5159", "5574", "6417"]; + + /// A shortcut constructor. + pub fn new( + r#type: &str, + message: S, + source_location: Option, + sources: Option<&BTreeMap>, + ) -> Self + where + S: std::fmt::Display, + { + let message = message.to_string(); + + let message_trimmed = message.trim(); + let mut formatted_message = if message_trimmed.starts_with(r#type) { + message_trimmed.to_owned() + } else { + format!("{type}: {message_trimmed}") + }; + formatted_message.push('\n'); + if let Some(ref source_location) = source_location { + let source_code = sources.and_then(|sources| { + sources + .get(source_location.file.as_str()) + .and_then(|source| source.content()) + }); + let mapped_location = + MappedLocation::try_from_source_location(source_location, source_code); + formatted_message.push_str(mapped_location.to_string().as_str()); + formatted_message.push('\n'); + } Self { component: "general".to_owned(), error_code: None, - formatted_message: message.clone(), + formatted_message, message, - severity: "warning".to_owned(), - source_location: src.map(SourceLocation::from_str).and_then(Result::ok), - r#type: "Warning".to_owned(), + severity: r#type.to_lowercase(), + source_location, + r#type: r#type.to_owned(), } } + /// A shortcut constructor. + pub fn new_error( + message: S, + source_location: Option, + sources: Option<&BTreeMap>, + ) -> Self + where + S: std::fmt::Display, + { + Self::new("Error", message, source_location, sources) + } + + /// A shortcut constructor. + pub fn new_warning( + message: S, + source_location: Option, + sources: Option<&BTreeMap>, + ) -> Self + where + S: std::fmt::Display, + { + Self::new("Warning", message, source_location, sources) + } + /// Returns the `
`'s `send` and `transfer` methods usage error. - pub fn message_send_and_transfer(src: Option<&str>) -> Self { + pub fn warning_send_and_transfer( + node: Option<&str>, + id_paths: &BTreeMap, + sources: &BTreeMap, + ) -> Self { let message = r#" Warning: It looks like you are using '
.send/transfer()'. Using '
.send/transfer()' is deprecated and strongly discouraged! @@ -65,43 +122,19 @@ and https://docs.soliditylang.org/en/latest/common-patterns.html#withdrawal-from "# .to_owned(); - Self { - component: "general".to_owned(), - error_code: None, - formatted_message: message.clone(), + Self::new_warning( message, - severity: "warning".to_owned(), - source_location: src.map(SourceLocation::from_str).and_then(Result::ok), - r#type: "Warning".to_owned(), - } - } - - /// Returns the `extcodesize` instruction usage warning. - pub fn message_extcodesize(src: Option<&str>) -> Self { - let message = r#" -Warning: Your code or one of its dependencies uses the 'extcodesize' instruction, which is -usually needed in the following cases: - 1. To detect whether an address belongs to a smart contract. - 2. To detect whether the deploy code execution has finished. -Polkadot comes with native account abstraction support (so smart contracts are just accounts -coverned by code), and you should avoid differentiating between contracts and non-contract -addresses. -"# - .to_owned(); - - Self { - component: "general".to_owned(), - error_code: None, - formatted_message: message.clone(), - message, - severity: "warning".to_owned(), - source_location: src.map(SourceLocation::from_str).and_then(Result::ok), - r#type: "Warning".to_owned(), - } + node.and_then(|node| SourceLocation::try_from_ast(node, id_paths)), + Some(sources), + ) } /// Returns the `origin` instruction usage warning. - pub fn message_tx_origin(src: Option<&str>) -> Self { + pub fn warning_tx_origin( + node: Option<&str>, + id_paths: &BTreeMap, + sources: &BTreeMap, + ) -> Self { let message = r#" Warning: You are checking for 'tx.origin' in your code, which might lead to unexpected behavior. Polkadot comes with native account abstraction support, and therefore the initiator of a @@ -110,15 +143,28 @@ to rely on tx.origin, but use msg.sender instead. "# .to_owned(); - Self { - component: "general".to_owned(), - error_code: None, - formatted_message: message.clone(), + Self::new_warning( message, - severity: "warning".to_owned(), - source_location: src.map(SourceLocation::from_str).and_then(Result::ok), - r#type: "Warning".to_owned(), - } + node.and_then(|node| SourceLocation::try_from_ast(node, id_paths)), + Some(sources), + ) + } + /// Returns the `runtimeCode` code usage error. + pub fn error_runtime_code( + node: Option<&str>, + id_paths: &BTreeMap, + sources: &BTreeMap, + ) -> Self { + let message = r#" +Deploy and runtime code are merged in PVM, accessing `type(T).runtimeCode` is not possible. +Please consider changing the functionality relying on reading runtime code to a different approach. +"#; + + Self::new_error( + message, + node.and_then(|node| SourceLocation::try_from_ast(node, id_paths)), + Some(sources), + ) } /// Appends the contract path to the message.. @@ -126,6 +172,16 @@ to rely on tx.origin, but use msg.sender instead. self.formatted_message .push_str(format!("\n--> {path}\n").as_str()); } + + /// Returns true if this is an error. + pub fn is_error(&self) -> bool { + self.severity == "error" + } + + /// Returns true if this is a warning. + pub fn is_warning(&self) -> bool { + self.severity == "warning" + } } impl std::fmt::Display for Error { diff --git a/crates/solc-json-interface/src/standard_json/output/error/source_location.rs b/crates/solc-json-interface/src/standard_json/output/error/source_location.rs index d8ac1fc..4a87f94 100644 --- a/crates/solc-json-interface/src/standard_json/output/error/source_location.rs +++ b/crates/solc-json-interface/src/standard_json/output/error/source_location.rs @@ -1,6 +1,6 @@ //! The `solc --standard-json` output error source location. -use std::str::FromStr; +use std::collections::BTreeMap; use serde::Deserialize; use serde::Serialize; @@ -17,11 +17,27 @@ pub struct SourceLocation { pub end: isize, } -impl FromStr for SourceLocation { - type Err = anyhow::Error; +impl SourceLocation { + /// A shortcut constructor. + pub fn new(file: String) -> Self { + Self { + file, + start: -1, + end: -1, + } + } - fn from_str(string: &str) -> Result { - let mut parts = string.split(':'); + /// A shortcut constructor. + /// + /// Please note that `start` and `end` are not line and column, + /// but absolute char offsets in the source code file. + pub fn new_with_offsets(file: String, start: isize, end: isize) -> Self { + Self { file, start, end } + } + + /// A shortcut constructor from a `solc` AST node. + pub fn try_from_ast(source: &str, id_paths: &BTreeMap) -> Option { + let mut parts = source.split(':'); let start = parts .next() .map(|string| string.parse::()) @@ -32,12 +48,15 @@ impl FromStr for SourceLocation { .map(|string| string.parse::()) .and_then(Result::ok) .unwrap_or_default(); - let file = parts.next().unwrap_or_default().to_owned(); + let path = parts + .next() + .and_then(|string| string.parse::().ok()) + .and_then(|file_id| id_paths.get(&file_id))?; - Ok(Self { - file, + Some(Self::new_with_offsets( + (*path).to_owned(), start, - end: start + length, - }) + start + length, + )) } } diff --git a/crates/solc-json-interface/src/standard_json/output/mod.rs b/crates/solc-json-interface/src/standard_json/output/mod.rs index 95779db..7f875da 100644 --- a/crates/solc-json-interface/src/standard_json/output/mod.rs +++ b/crates/solc-json-interface/src/standard_json/output/mod.rs @@ -1,33 +1,40 @@ //! The `solc --standard-json` output. -pub mod contract; -pub mod error; -pub mod source; - use std::collections::BTreeMap; use serde::Deserialize; use serde::Serialize; #[cfg(feature = "resolc")] -use crate::warning::Warning; +use crate::standard_json::input::settings::warning::Warning; +use crate::standard_json::output::error::error_handler::ErrorHandler; +#[cfg(feature = "resolc")] +use crate::SolcStandardJsonInputSettingsSelection; +#[cfg(feature = "resolc")] +use crate::SolcStandardJsonInputSource; +#[cfg(feature = "parallel")] +use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; use self::contract::Contract; use self::error::Error as SolcStandardJsonOutputError; use self::source::Source; +pub mod contract; +pub mod error; +pub mod source; + /// The `solc --standard-json` output. #[derive(Debug, Serialize, Deserialize, Clone, Default)] pub struct Output { /// The file-contract hashmap. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub contracts: Option>>, + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + pub contracts: BTreeMap>, /// The source code mapping data. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub sources: Option>, + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + pub sources: BTreeMap, /// The compilation errors and warnings. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub errors: Option>, + #[serde(default, skip_serializing_if = "Vec::is_empty")] + pub errors: Vec, /// The `solc` compiler version. #[serde(skip_serializing_if = "Option::is_none")] pub version: Option, @@ -39,33 +46,156 @@ pub struct Output { pub revive_version: Option, } +#[cfg(feature = "resolc")] impl Output { - /// Traverses the AST and returns the list of additional errors and warnings. - #[cfg(feature = "resolc")] - pub fn preprocess_ast(&mut self, suppressed_warnings: &[Warning]) -> anyhow::Result<()> { - let sources = match self.sources.as_ref() { - Some(sources) => sources, - None => return Ok(()), - }; + /// Initializes a standard JSON output. + /// + /// Is used for projects compiled without `solc`. + pub fn new( + sources: &BTreeMap, + messages: &mut Vec, + ) -> Self { + let sources = sources + .keys() + .enumerate() + .map(|(index, path)| (path.to_owned(), Source::new(index))) + .collect::>(); - let mut messages = Vec::new(); - for (path, source) in sources.iter() { - if let Some(ast) = source.ast.as_ref() { - let mut polkavm_messages = Source::get_messages(ast, suppressed_warnings); - for message in polkavm_messages.iter_mut() { - message.push_contract_path(path.as_str()); - } - messages.extend(polkavm_messages); + Self { + contracts: BTreeMap::new(), + sources, + errors: std::mem::take(messages), + + version: None, + long_version: None, + revive_version: None, + } + } + + /// Initializes a standard JSON output with messages. + /// + /// Is used to emit errors in standard JSON mode. + pub fn new_with_messages(messages: Vec) -> Self { + Self { + contracts: BTreeMap::new(), + sources: BTreeMap::new(), + errors: messages, + + version: None, + long_version: None, + revive_version: None, + } + } + + /// Prunes the output JSON and prints it to stdout. + pub fn write_and_exit( + mut self, + selection_to_prune: SolcStandardJsonInputSettingsSelection, + ) -> ! { + let sources = self.sources.values_mut().collect::>(); + for source in sources.into_iter() { + if selection_to_prune + .contains(&crate::SolcStandardJsonInputSettingsSelectionFileFlag::AST) + { + source.ast = None; } } - self.errors = match self.errors.take() { - Some(mut errors) => { - errors.extend(messages); - Some(errors) + + let contracts = self + .contracts + .values_mut() + .flat_map(|contracts| contracts.values_mut()) + .collect::>(); + for contract in contracts.into_iter() { + if selection_to_prune + .contains(&crate::SolcStandardJsonInputSettingsSelectionFileFlag::Metadata) + { + contract.metadata = serde_json::Value::Null; } - None => Some(messages), - }; + if selection_to_prune + .contains(&crate::SolcStandardJsonInputSettingsSelectionFileFlag::Yul) + { + contract.ir_optimized = String::new(); + } + if let Some(ref mut evm) = contract.evm { + if selection_to_prune.contains( + &crate::SolcStandardJsonInputSettingsSelectionFileFlag::MethodIdentifiers, + ) { + evm.method_identifiers.clear(); + } + } + } + + self.contracts.retain(|_, contracts| { + contracts.retain(|_, contract| !contract.is_empty()); + !contracts.is_empty() + }); + + serde_json::to_writer(std::io::stdout(), &self).expect("Stdout writing error"); + std::process::exit(revive_common::EXIT_CODE_SUCCESS); + } + + /// Traverses the AST and returns the list of additional errors and warnings. + pub fn preprocess_ast( + &mut self, + sources: &BTreeMap, + suppressed_warnings: &[Warning], + ) -> anyhow::Result<()> { + let id_paths: BTreeMap = self + .sources + .iter() + .map(|(path, source)| (source.id, path)) + .collect(); + + #[cfg(feature = "parallel")] + let iter = self.sources.par_iter(); + #[cfg(not(feature = "parallel"))] + let iter = self.sources.iter(); + + let messages: Vec = iter + .flat_map(|(_path, source)| { + source + .ast + .as_ref() + .map(|ast| Source::get_messages(ast, &id_paths, sources, suppressed_warnings)) + .unwrap_or_default() + }) + .collect(); + self.errors.extend(messages); Ok(()) } + + /// Pushes an arbitrary error with path. + /// + /// Please do not push project-general errors without paths here. + pub fn push_error(&mut self, path: Option, error: anyhow::Error) { + use crate::standard_json::output::error::source_location::SourceLocation; + + self.errors.push(SolcStandardJsonOutputError::new_error( + error, + path.map(SourceLocation::new), + None, + )); + } +} + +impl ErrorHandler for Output { + fn errors(&self) -> Vec<&SolcStandardJsonOutputError> { + self.errors + .iter() + .filter(|error| error.is_error()) + .collect() + } + + fn take_warnings(&mut self) -> Vec { + let warnings = self + .errors + .iter() + .filter(|message| message.is_warning()) + .cloned() + .collect(); + self.errors.retain(|message| !message.is_warning()); + warnings + } } diff --git a/crates/solc-json-interface/src/standard_json/output/source.rs b/crates/solc-json-interface/src/standard_json/output/source.rs index f42157f..b736122 100644 --- a/crates/solc-json-interface/src/standard_json/output/source.rs +++ b/crates/solc-json-interface/src/standard_json/output/source.rs @@ -1,11 +1,17 @@ //! The `solc --standard-json` output source. +#[cfg(feature = "resolc")] +use std::collections::BTreeMap; + use serde::Deserialize; use serde::Serialize; +#[cfg(feature = "resolc")] +use crate::standard_json::input::settings::warning::Warning; +#[cfg(feature = "resolc")] use crate::standard_json::output::error::Error as SolcStandardJsonOutputError; #[cfg(feature = "resolc")] -use crate::warning::Warning; +use crate::SolcStandardJsonInputSource; /// The `solc --standard-json` output source. #[derive(Debug, Serialize, Deserialize, Clone)] @@ -17,160 +23,126 @@ pub struct Source { pub ast: Option, } +#[cfg(feature = "resolc")] impl Source { - /// Checks the AST node for the `ecrecover` function usage. - pub fn check_ecrecover(ast: &serde_json::Value) -> Option { - let ast = ast.as_object()?; - - if ast.get("nodeType")?.as_str()? != "FunctionCall" { - return None; - } - - let expression = ast.get("expression")?.as_object()?; - if expression.get("nodeType")?.as_str()? != "Identifier" { - return None; - } - if expression.get("name")?.as_str()? != "ecrecover" { - return None; - } - - Some(SolcStandardJsonOutputError::message_ecrecover( - ast.get("src")?.as_str(), - )) + /// Initializes a standard JSON source. + /// + /// Is used for projects compiled without `solc`. + pub fn new(id: usize) -> Self { + Self { id, ast: None } } - /// Checks the AST node for the `
`'s `send` and `transfer` methods usage. - pub fn check_send_and_transfer(ast: &serde_json::Value) -> Option { - let ast = ast.as_object()?; - - if ast.get("nodeType")?.as_str()? != "FunctionCall" { - return None; - } - - let expression = ast.get("expression")?.as_object()?; - if expression.get("nodeType")?.as_str()? != "MemberAccess" { - return None; - } - let member_name = expression.get("memberName")?.as_str()?; - if member_name != "send" && member_name != "transfer" { - return None; - } - - Some(SolcStandardJsonOutputError::message_send_and_transfer( - ast.get("src")?.as_str(), - )) - } - - /// Checks the AST node for the `extcodesize` assembly instruction usage. - pub fn check_assembly_extcodesize( + /// Checks the AST node for the usage of send or transfer address methods. + pub fn check_send_and_transfer( ast: &serde_json::Value, + id_paths: &BTreeMap, + sources: &BTreeMap, ) -> Option { let ast = ast.as_object()?; - if ast.get("nodeType")?.as_str()? != "YulFunctionCall" { - return None; - } - if ast - .get("functionName")? - .as_object()? - .get("name")? - .as_str()? - != "extcodesize" - { - return None; - } + (ast.get("nodeType")?.as_str()? == "FunctionCall").then_some(())?; - Some(SolcStandardJsonOutputError::message_extcodesize( - ast.get("src")?.as_str(), - )) + let expression = ast.get("expression")?.as_object()?; + (expression.get("nodeType")?.as_str()? == "MemberAccess").then_some(())?; + let member_name = expression.get("memberName")?.as_str()?; + ["send", "transfer"].contains(&member_name).then_some(())?; + + let expression = expression.get("expression")?.as_object()?; + let type_descriptions = expression.get("typeDescriptions")?.as_object()?; + let type_identifier = type_descriptions.get("typeIdentifier")?.as_str()?; + ["t_address_payable"] + .contains(&type_identifier) + .then_some(())?; + + Some(Warning::SendAndTransfer.as_error(ast.get("src")?.as_str(), id_paths, sources)) } - /// Checks the AST node for the `origin` assembly instruction usage. - pub fn check_assembly_origin(ast: &serde_json::Value) -> Option { + /// Checks the AST node for the usage of runtime code. + pub fn check_runtime_code( + ast: &serde_json::Value, + id_paths: &BTreeMap, + sources: &BTreeMap, + ) -> Option { let ast = ast.as_object()?; - if ast.get("nodeType")?.as_str()? != "YulFunctionCall" { - return None; - } - if ast - .get("functionName")? - .as_object()? - .get("name")? - .as_str()? - != "origin" - { - return None; - } + (ast.get("nodeType")?.as_str()? == "MemberAccess").then_some(())?; + (ast.get("memberName")?.as_str()? == "runtimeCode").then_some(())?; - Some(SolcStandardJsonOutputError::message_tx_origin( + let expression = ast.get("expression")?.as_object()?; + let type_descriptions = expression.get("typeDescriptions")?.as_object()?; + type_descriptions + .get("typeIdentifier")? + .as_str()? + .starts_with("t_magic_meta_type") + .then_some(())?; + + Some(SolcStandardJsonOutputError::error_runtime_code( ast.get("src")?.as_str(), + id_paths, + sources, )) } /// Checks the AST node for the `tx.origin` value usage. - pub fn check_tx_origin(ast: &serde_json::Value) -> Option { + pub fn check_tx_origin( + ast: &serde_json::Value, + id_paths: &BTreeMap, + sources: &BTreeMap, + ) -> Option { let ast = ast.as_object()?; - if ast.get("nodeType")?.as_str()? != "MemberAccess" { - return None; - } - if ast.get("memberName")?.as_str()? != "origin" { - return None; - } + (ast.get("nodeType")?.as_str()? == "MemberAccess").then_some(())?; + (ast.get("memberName")?.as_str()? == "origin").then_some(())?; let expression = ast.get("expression")?.as_object()?; - if expression.get("nodeType")?.as_str()? != "Identifier" { - return None; - } - if expression.get("name")?.as_str()? != "tx" { - return None; - } + (expression.get("nodeType")?.as_str()? == "Identifier").then_some(())?; + (expression.get("name")?.as_str()? == "tx").then_some(())?; - Some(SolcStandardJsonOutputError::message_tx_origin( - ast.get("src")?.as_str(), - )) + Some(Warning::TxOrigin.as_error(ast.get("src")?.as_str(), id_paths, sources)) } /// Returns the list of messages for some specific parts of the AST. #[cfg(feature = "resolc")] pub fn get_messages( ast: &serde_json::Value, + id_paths: &BTreeMap, + sources: &BTreeMap, suppressed_warnings: &[Warning], ) -> Vec { let mut messages = Vec::new(); - if !suppressed_warnings.contains(&Warning::EcRecover) { - if let Some(message) = Self::check_ecrecover(ast) { - messages.push(message); - } - } - if !suppressed_warnings.contains(&Warning::SendTransfer) { - if let Some(message) = Self::check_send_and_transfer(ast) { - messages.push(message); - } - } - if !suppressed_warnings.contains(&Warning::ExtCodeSize) { - if let Some(message) = Self::check_assembly_extcodesize(ast) { + if !suppressed_warnings.contains(&Warning::SendAndTransfer) { + if let Some(message) = Self::check_send_and_transfer(ast, id_paths, sources) { messages.push(message); } } if !suppressed_warnings.contains(&Warning::TxOrigin) { - if let Some(message) = Self::check_assembly_origin(ast) { - messages.push(message); - } - if let Some(message) = Self::check_tx_origin(ast) { + if let Some(message) = Self::check_tx_origin(ast, id_paths, sources) { messages.push(message); } } + if let Some(message) = Self::check_runtime_code(ast, id_paths, sources) { + messages.push(message); + } match ast { serde_json::Value::Array(array) => { for element in array.iter() { - messages.extend(Self::get_messages(element, suppressed_warnings)); + messages.extend(Self::get_messages( + element, + id_paths, + sources, + suppressed_warnings, + )); } } serde_json::Value::Object(object) => { for (_key, value) in object.iter() { - messages.extend(Self::get_messages(value, suppressed_warnings)); + messages.extend(Self::get_messages( + value, + id_paths, + sources, + suppressed_warnings, + )); } } _ => {} @@ -178,25 +150,4 @@ impl Source { messages } - - /// Returns the name of the last contract. - pub fn last_contract_name(&self) -> anyhow::Result { - self.ast - .as_ref() - .ok_or_else(|| anyhow::anyhow!("The AST is empty"))? - .get("nodes") - .and_then(|value| value.as_array()) - .ok_or_else(|| { - anyhow::anyhow!("The last contract cannot be found in an empty list of nodes") - })? - .iter() - .filter_map( - |node| match node.get("nodeType").and_then(|node| node.as_str()) { - Some("ContractDefinition") => Some(node.get("name")?.as_str()?.to_owned()), - _ => None, - }, - ) - .next_back() - .ok_or_else(|| anyhow::anyhow!("The last contract not found in the AST")) - } } diff --git a/crates/solc-json-interface/src/warning.rs b/crates/solc-json-interface/src/warning.rs deleted file mode 100644 index f1a6d48..0000000 --- a/crates/solc-json-interface/src/warning.rs +++ /dev/null @@ -1,48 +0,0 @@ -//! `resolc` custom compiler warnings. -//! -//! The revive compiler adds warnings only applicable when compilng -//! to the revive stack on Polkadot to the output. - -use std::str::FromStr; - -use serde::Deserialize; -use serde::Serialize; - -// The `resolc` custom compiler warning. -#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)] -pub enum Warning { - EcRecover, - SendTransfer, - ExtCodeSize, - TxOrigin, - BlockTimestamp, - BlockNumber, - BlockHash, -} - -impl Warning { - /// Converts string arguments into an array of warnings. - pub fn try_from_strings(strings: &[String]) -> Result, anyhow::Error> { - strings - .iter() - .map(|string| Self::from_str(string)) - .collect() - } -} - -impl FromStr for Warning { - type Err = anyhow::Error; - - fn from_str(string: &str) -> Result { - match string { - "ecrecover" => Ok(Self::EcRecover), - "sendtransfer" => Ok(Self::SendTransfer), - "extcodesize" => Ok(Self::ExtCodeSize), - "txorigin" => Ok(Self::TxOrigin), - "blocktimestamp" => Ok(Self::BlockTimestamp), - "blocknumber" => Ok(Self::BlockNumber), - "blockhash" => Ok(Self::BlockHash), - _ => Err(anyhow::anyhow!("Invalid warning: {}", string)), - } - } -} diff --git a/crates/yul/Cargo.toml b/crates/yul/Cargo.toml index 97538f3..4021b4c 100644 --- a/crates/yul/Cargo.toml +++ b/crates/yul/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "revive-yul" description = "The revive YUL parser library." -version = "0.2.1" +version = "0.3.0" authors.workspace = true license.workspace = true edition.workspace = true diff --git a/crates/yul/src/lexer/mod.rs b/crates/yul/src/lexer/mod.rs index ca26e8f..844fa9c 100644 --- a/crates/yul/src/lexer/mod.rs +++ b/crates/yul/src/lexer/mod.rs @@ -1,8 +1,5 @@ //! The compiler lexer. -pub mod error; -pub mod token; - #[cfg(test)] mod tests; @@ -16,6 +13,9 @@ use self::token::lexeme::Lexeme; use self::token::location::Location; use self::token::Token; +pub mod error; +pub mod token; + /// The compiler lexer. pub struct Lexer { /// The input source code. diff --git a/crates/yul/src/lexer/token/mod.rs b/crates/yul/src/lexer/token/mod.rs index 30d9fa4..53fc40a 100644 --- a/crates/yul/src/lexer/token/mod.rs +++ b/crates/yul/src/lexer/token/mod.rs @@ -1,11 +1,11 @@ //! The token. -pub mod lexeme; -pub mod location; - use self::lexeme::Lexeme; use self::location::Location; +pub mod lexeme; +pub mod location; + /// The token. /// Contains a lexeme and its location. #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/yul/src/parser/mod.rs b/crates/yul/src/parser/mod.rs index a5167ea..deb6d09 100644 --- a/crates/yul/src/parser/mod.rs +++ b/crates/yul/src/parser/mod.rs @@ -1,14 +1,14 @@ //! The YUL code block. +use crate::lexer::error::Error as LexerError; +use crate::lexer::token::Token; +use crate::lexer::Lexer; + pub mod error; pub mod identifier; pub mod statement; pub mod r#type; -use crate::lexer::error::Error as LexerError; -use crate::lexer::token::Token; -use crate::lexer::Lexer; - /// Returns the `token` value if it is `Some(_)`, otherwise takes the next token from the `stream`. pub fn take_or_next(mut token: Option, lexer: &mut Lexer) -> Result { match token.take() { diff --git a/crates/yul/src/parser/statement/assignment.rs b/crates/yul/src/parser/statement/assignment.rs index a5e9a99..d1a8b64 100644 --- a/crates/yul/src/parser/statement/assignment.rs +++ b/crates/yul/src/parser/statement/assignment.rs @@ -1,11 +1,15 @@ //! The assignment expression statement. -use std::collections::HashSet; +use std::collections::BTreeSet; use inkwell::types::BasicType; use serde::Deserialize; use serde::Serialize; +use revive_common::BIT_LENGTH_X32; +use revive_llvm_context::PolkaVMContext; +use revive_llvm_context::PolkaVMWriteLLVM; + use crate::error::Error; use crate::lexer::token::lexeme::symbol::Symbol; use crate::lexer::token::lexeme::Lexeme; @@ -108,19 +112,13 @@ impl Assignment { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { self.initializer.get_missing_libraries() } } -impl revive_llvm_context::PolkaVMWriteLLVM for Assignment -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn into_llvm( - mut self, - context: &mut revive_llvm_context::PolkaVMContext, - ) -> anyhow::Result<()> { +impl PolkaVMWriteLLVM for Assignment { + fn into_llvm(mut self, context: &mut PolkaVMContext) -> anyhow::Result<()> { context.set_debug_location(self.location.line, self.location.column, None)?; let value = match self.initializer.into_llvm(context)? { @@ -158,7 +156,7 @@ where &[ context.word_const(0), context - .integer_type(revive_common::BIT_LENGTH_X32) + .integer_type(BIT_LENGTH_X32) .const_int(index as u64, false), ], context.word_type().as_basic_type_enum(), diff --git a/crates/yul/src/parser/statement/block.rs b/crates/yul/src/parser/statement/block.rs index 87edfd3..45e2ec3 100644 --- a/crates/yul/src/parser/statement/block.rs +++ b/crates/yul/src/parser/statement/block.rs @@ -1,11 +1,13 @@ //! The source code block. -use std::collections::HashSet; +use std::collections::BTreeSet; +use inkwell::debug_info::AsDIScope; use serde::Deserialize; use serde::Serialize; -use inkwell::debug_info::AsDIScope; +use revive_llvm_context::PolkaVMContext; +use revive_llvm_context::PolkaVMWriteLLVM; use crate::error::Error; use crate::lexer::token::lexeme::symbol::Symbol; @@ -123,8 +125,8 @@ impl Block { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { - let mut libraries = HashSet::new(); + pub fn get_missing_libraries(&self) -> BTreeSet { + let mut libraries = BTreeSet::new(); for statement in self.statements.iter() { libraries.extend(statement.get_missing_libraries()); } @@ -132,11 +134,8 @@ impl Block { } } -impl revive_llvm_context::PolkaVMWriteLLVM for Block -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { +impl PolkaVMWriteLLVM for Block { + fn into_llvm(self, context: &mut PolkaVMContext) -> anyhow::Result<()> { let current_function = context.current_function().borrow().name().to_owned(); let current_block = context.basic_block(); diff --git a/crates/yul/src/parser/statement/code.rs b/crates/yul/src/parser/statement/code.rs index 9675f1e..e40b380 100644 --- a/crates/yul/src/parser/statement/code.rs +++ b/crates/yul/src/parser/statement/code.rs @@ -1,10 +1,13 @@ //! The YUL code. -use std::collections::HashSet; +use std::collections::BTreeSet; use serde::Deserialize; use serde::Serialize; +use revive_llvm_context::PolkaVMContext; +use revive_llvm_context::PolkaVMWriteLLVM; + use crate::error::Error; use crate::lexer::token::lexeme::keyword::Keyword; use crate::lexer::token::lexeme::Lexeme; @@ -52,16 +55,13 @@ impl Code { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { self.block.get_missing_libraries() } } -impl revive_llvm_context::PolkaVMWriteLLVM for Code -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { +impl PolkaVMWriteLLVM for Code { + fn into_llvm(self, context: &mut PolkaVMContext) -> anyhow::Result<()> { self.block.into_llvm(context)?; Ok(()) diff --git a/crates/yul/src/parser/statement/expression/function_call/mod.rs b/crates/yul/src/parser/statement/expression/function_call/mod.rs index 93c7756..ead528d 100644 --- a/crates/yul/src/parser/statement/expression/function_call/mod.rs +++ b/crates/yul/src/parser/statement/expression/function_call/mod.rs @@ -3,7 +3,7 @@ pub mod name; pub mod verbatim; -use std::collections::HashSet; +use std::collections::BTreeSet; use inkwell::values::BasicValue; use serde::Deserialize; @@ -95,8 +95,8 @@ impl FunctionCall { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { - let mut libraries = HashSet::new(); + pub fn get_missing_libraries(&self) -> BTreeSet { + let mut libraries = BTreeSet::new(); if let Name::LinkerSymbol = self.name { let _argument = self.arguments.first().expect("Always exists"); @@ -117,13 +117,10 @@ impl FunctionCall { } /// Converts the function call into an LLVM value. - pub fn into_llvm<'ctx, D>( + pub fn into_llvm<'ctx>( mut self, - context: &mut revive_llvm_context::PolkaVMContext<'ctx, D>, - ) -> anyhow::Result>> - where - D: revive_llvm_context::PolkaVMDependency + Clone, - { + context: &mut revive_llvm_context::PolkaVMContext<'ctx>, + ) -> anyhow::Result>> { let location = self.location; context.set_debug_location(location.line, location.column, None)?; @@ -164,7 +161,7 @@ impl FunctionCall { } Name::Add => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_arithmetic::addition( context, arguments[0].into_int_value(), @@ -173,7 +170,7 @@ impl FunctionCall { .map(Some) } Name::Sub => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_arithmetic::subtraction( context, arguments[0].into_int_value(), @@ -182,7 +179,7 @@ impl FunctionCall { .map(Some) } Name::Mul => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_arithmetic::multiplication( context, arguments[0].into_int_value(), @@ -191,7 +188,7 @@ impl FunctionCall { .map(Some) } Name::Div => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_arithmetic::division( context, arguments[0].into_int_value(), @@ -200,7 +197,7 @@ impl FunctionCall { .map(Some) } Name::Mod => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_arithmetic::remainder( context, arguments[0].into_int_value(), @@ -209,7 +206,7 @@ impl FunctionCall { .map(Some) } Name::Sdiv => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_arithmetic::division_signed( context, arguments[0].into_int_value(), @@ -218,7 +215,7 @@ impl FunctionCall { .map(Some) } Name::Smod => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_arithmetic::remainder_signed( context, arguments[0].into_int_value(), @@ -228,7 +225,7 @@ impl FunctionCall { } Name::Lt => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_comparison::compare( context, arguments[0].into_int_value(), @@ -238,7 +235,7 @@ impl FunctionCall { .map(Some) } Name::Gt => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_comparison::compare( context, arguments[0].into_int_value(), @@ -248,7 +245,7 @@ impl FunctionCall { .map(Some) } Name::Eq => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_comparison::compare( context, arguments[0].into_int_value(), @@ -258,7 +255,7 @@ impl FunctionCall { .map(Some) } Name::IsZero => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<1>(context)?; revive_llvm_context::polkavm_evm_comparison::compare( context, arguments[0].into_int_value(), @@ -268,7 +265,7 @@ impl FunctionCall { .map(Some) } Name::Slt => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_comparison::compare( context, arguments[0].into_int_value(), @@ -278,7 +275,7 @@ impl FunctionCall { .map(Some) } Name::Sgt => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_comparison::compare( context, arguments[0].into_int_value(), @@ -289,7 +286,7 @@ impl FunctionCall { } Name::Or => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_bitwise::or( context, arguments[0].into_int_value(), @@ -298,7 +295,7 @@ impl FunctionCall { .map(Some) } Name::Xor => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_bitwise::xor( context, arguments[0].into_int_value(), @@ -307,7 +304,7 @@ impl FunctionCall { .map(Some) } Name::Not => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<1>(context)?; revive_llvm_context::polkavm_evm_bitwise::xor( context, arguments[0].into_int_value(), @@ -316,7 +313,7 @@ impl FunctionCall { .map(Some) } Name::And => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_bitwise::and( context, arguments[0].into_int_value(), @@ -325,7 +322,7 @@ impl FunctionCall { .map(Some) } Name::Shl => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_bitwise::shift_left( context, arguments[0].into_int_value(), @@ -334,7 +331,7 @@ impl FunctionCall { .map(Some) } Name::Shr => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_bitwise::shift_right( context, arguments[0].into_int_value(), @@ -343,7 +340,7 @@ impl FunctionCall { .map(Some) } Name::Sar => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_bitwise::shift_right_arithmetic( context, arguments[0].into_int_value(), @@ -352,7 +349,7 @@ impl FunctionCall { .map(Some) } Name::Byte => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_bitwise::byte( context, arguments[0].into_int_value(), @@ -361,12 +358,12 @@ impl FunctionCall { .map(Some) } Name::Pop => { - let _arguments = self.pop_arguments_llvm::(context)?; + let _arguments = self.pop_arguments_llvm::<1>(context)?; Ok(None) } Name::AddMod => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<3>(context)?; revive_llvm_context::polkavm_evm_math::add_mod( context, arguments[0].into_int_value(), @@ -376,7 +373,7 @@ impl FunctionCall { .map(Some) } Name::MulMod => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<3>(context)?; revive_llvm_context::polkavm_evm_math::mul_mod( context, arguments[0].into_int_value(), @@ -386,7 +383,7 @@ impl FunctionCall { .map(Some) } Name::Exp => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_math::exponent( context, arguments[0].into_int_value(), @@ -395,7 +392,7 @@ impl FunctionCall { .map(Some) } Name::SignExtend => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_math::sign_extend( context, arguments[0].into_int_value(), @@ -405,7 +402,7 @@ impl FunctionCall { } Name::Keccak256 => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_crypto::sha3( context, arguments[0].into_int_value(), @@ -415,7 +412,7 @@ impl FunctionCall { } Name::MLoad => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<1>(context)?; revive_llvm_context::polkavm_evm_memory::load( context, arguments[0].into_int_value(), @@ -423,7 +420,7 @@ impl FunctionCall { .map(Some) } Name::MStore => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_memory::store( context, arguments[0].into_int_value(), @@ -432,7 +429,7 @@ impl FunctionCall { .map(|_| None) } Name::MStore8 => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_memory::store_byte( context, arguments[0].into_int_value(), @@ -441,7 +438,7 @@ impl FunctionCall { .map(|_| None) } Name::MCopy => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<3>(context)?; let destination = revive_llvm_context::PolkaVMPointer::new_with_offset( context, revive_llvm_context::PolkaVMAddressSpace::Heap, @@ -467,11 +464,11 @@ impl FunctionCall { } Name::SLoad => { - let arguments = self.pop_arguments::(context)?; + let arguments = self.pop_arguments::<1>(context)?; revive_llvm_context::polkavm_evm_storage::load(context, &arguments[0]).map(Some) } Name::SStore => { - let arguments = self.pop_arguments::(context)?; + let arguments = self.pop_arguments::<2>(context)?; revive_llvm_context::polkavm_evm_storage::store( context, &arguments[0], @@ -480,12 +477,12 @@ impl FunctionCall { .map(|_| None) } Name::TLoad => { - let arguments = self.pop_arguments::(context)?; + let arguments = self.pop_arguments::<1>(context)?; revive_llvm_context::polkavm_evm_storage::transient_load(context, &arguments[0]) .map(Some) } Name::TStore => { - let arguments = self.pop_arguments::(context)?; + let arguments = self.pop_arguments::<2>(context)?; revive_llvm_context::polkavm_evm_storage::transient_store( context, &arguments[0], @@ -494,7 +491,7 @@ impl FunctionCall { .map(|_| None) } Name::LoadImmutable => { - let mut arguments = self.pop_arguments::(context)?; + let mut arguments = self.pop_arguments::<1>(context)?; let key = arguments[0].original.take().ok_or_else(|| { anyhow::anyhow!("{} `load_immutable` literal is missing", location) })?; @@ -506,7 +503,7 @@ impl FunctionCall { revive_llvm_context::polkavm_evm_immutable::load(context, index).map(Some) } Name::SetImmutable => { - let mut arguments = self.pop_arguments::(context)?; + let mut arguments = self.pop_arguments::<3>(context)?; let key = arguments[1].original.take().ok_or_else(|| { anyhow::anyhow!("{} `load_immutable` literal is missing", location) })?; @@ -518,7 +515,7 @@ impl FunctionCall { .map(|_| None) } Name::CallDataLoad => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<1>(context)?; match context .code_type() @@ -550,7 +547,7 @@ impl FunctionCall { } } Name::CallDataCopy => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<3>(context)?; match context .code_type() @@ -603,7 +600,7 @@ impl FunctionCall { ); } - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<3>(context)?; revive_llvm_context::polkavm_evm_calldata::copy( context, arguments[0].into_int_value(), @@ -616,7 +613,7 @@ impl FunctionCall { revive_llvm_context::polkavm_evm_return_data::size(context).map(Some) } Name::ReturnDataCopy => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<3>(context)?; revive_llvm_context::polkavm_evm_return_data::copy( context, arguments[0].into_int_value(), @@ -626,7 +623,7 @@ impl FunctionCall { .map(|_| None) } Name::ExtCodeSize => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<1>(context)?; revive_llvm_context::polkavm_evm_ext_code::size( context, Some(arguments[0].into_int_value()), @@ -634,7 +631,7 @@ impl FunctionCall { .map(Some) } Name::ExtCodeHash => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<1>(context)?; revive_llvm_context::polkavm_evm_ext_code::hash( context, arguments[0].into_int_value(), @@ -643,7 +640,7 @@ impl FunctionCall { } Name::Return => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_return::r#return( context, arguments[0].into_int_value(), @@ -652,7 +649,7 @@ impl FunctionCall { .map(|_| None) } Name::Revert => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_return::revert( context, arguments[0].into_int_value(), @@ -666,7 +663,7 @@ impl FunctionCall { } Name::Log0 => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<2>(context)?; revive_llvm_context::polkavm_evm_event::log( context, arguments[0].into_int_value(), @@ -676,7 +673,7 @@ impl FunctionCall { .map(|_| None) } Name::Log1 => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<3>(context)?; revive_llvm_context::polkavm_evm_event::log( context, arguments[0].into_int_value(), @@ -686,7 +683,7 @@ impl FunctionCall { .map(|_| None) } Name::Log2 => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<4>(context)?; revive_llvm_context::polkavm_evm_event::log( context, arguments[0].into_int_value(), @@ -696,7 +693,7 @@ impl FunctionCall { .map(|_| None) } Name::Log3 => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<5>(context)?; revive_llvm_context::polkavm_evm_event::log( context, arguments[0].into_int_value(), @@ -706,7 +703,7 @@ impl FunctionCall { .map(|_| None) } Name::Log4 => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<6>(context)?; revive_llvm_context::polkavm_evm_event::log( context, arguments[0].into_int_value(), @@ -717,7 +714,7 @@ impl FunctionCall { } Name::Call => { - let arguments = self.pop_arguments::(context)?; + let arguments = self.pop_arguments::<7>(context)?; let gas = arguments[0].access(context)?.into_int_value(); let address = arguments[1].access(context)?.into_int_value(); @@ -747,7 +744,7 @@ impl FunctionCall { .map(Some) } Name::StaticCall => { - let arguments = self.pop_arguments::(context)?; + let arguments = self.pop_arguments::<6>(context)?; let gas = arguments[0].access(context)?.into_int_value(); let address = arguments[1].access(context)?.into_int_value(); @@ -776,7 +773,7 @@ impl FunctionCall { .map(Some) } Name::DelegateCall => { - let arguments = self.pop_arguments::(context)?; + let arguments = self.pop_arguments::<6>(context)?; let gas = arguments[0].access(context)?.into_int_value(); let address = arguments[1].access(context)?.into_int_value(); @@ -804,7 +801,7 @@ impl FunctionCall { } Name::Create => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<3>(context)?; let value = arguments[0].into_int_value(); let input_offset = arguments[1].into_int_value(); @@ -820,7 +817,7 @@ impl FunctionCall { .map(Some) } Name::Create2 => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<4>(context)?; let value = arguments[0].into_int_value(); let input_offset = arguments[1].into_int_value(); @@ -837,7 +834,7 @@ impl FunctionCall { .map(Some) } Name::DataOffset => { - let mut arguments = self.pop_arguments::(context)?; + let mut arguments = self.pop_arguments::<1>(context)?; let identifier = arguments[0].original.take().ok_or_else(|| { anyhow::anyhow!("{} `dataoffset` object identifier is missing", location) @@ -848,7 +845,7 @@ impl FunctionCall { .map(Some) } Name::DataSize => { - let mut arguments = self.pop_arguments::(context)?; + let mut arguments = self.pop_arguments::<1>(context)?; let identifier = arguments[0].original.take().ok_or_else(|| { anyhow::anyhow!("{} `dataoffset` object identifier is missing", location) @@ -859,7 +856,7 @@ impl FunctionCall { .map(Some) } Name::DataCopy => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<3>(context)?; revive_llvm_context::polkavm_evm_memory::store( context, arguments[0].into_int_value(), @@ -869,19 +866,14 @@ impl FunctionCall { } Name::LinkerSymbol => { - let mut arguments = self.pop_arguments::(context)?; + let mut arguments = self.pop_arguments::<1>(context)?; let path = arguments[0].original.take().ok_or_else(|| { anyhow::anyhow!("{} Linker symbol literal is missing", location) })?; - - Ok(Some( - context - .resolve_library(path.as_str())? - .as_basic_value_enum(), - )) + revive_llvm_context::polkavm_evm_call::linker_symbol(context, &path).map(Some) } Name::MemoryGuard => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<1>(context)?; Ok(Some(arguments[0])) } @@ -894,7 +886,7 @@ impl FunctionCall { Name::CallValue => revive_llvm_context::polkavm_evm_ether_gas::value(context).map(Some), Name::Gas => revive_llvm_context::polkavm_evm_ether_gas::gas(context).map(Some), Name::Balance => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<1>(context)?; let address = arguments[0].into_int_value(); revive_llvm_context::polkavm_evm_ether_gas::balance(context, address).map(Some) @@ -923,14 +915,14 @@ impl FunctionCall { revive_llvm_context::polkavm_evm_contract_context::block_number(context).map(Some) } Name::BlockHash => { - let arguments = self.pop_arguments_llvm::(context)?; + let arguments = self.pop_arguments_llvm::<1>(context)?; let index = arguments[0].into_int_value(); revive_llvm_context::polkavm_evm_contract_context::block_hash(context, index) .map(Some) } Name::BlobHash => { - let _arguments = self.pop_arguments_llvm::(context)?; + let _arguments = self.pop_arguments_llvm::<1>(context)?; anyhow::bail!( "{} The `BLOBHASH` instruction is not supported in revive", location @@ -959,19 +951,19 @@ impl FunctionCall { } => verbatim::verbatim(context, &mut self, input_size, output_size), Name::CallCode => { - let _arguments = self.pop_arguments_llvm::(context)?; + let _arguments = self.pop_arguments_llvm::<7>(context)?; anyhow::bail!("{} The `CALLCODE` instruction is not supported", location) } Name::Pc => anyhow::bail!("{} The `PC` instruction is not supported", location), Name::ExtCodeCopy => { - let _arguments = self.pop_arguments_llvm::(context)?; + let _arguments = self.pop_arguments_llvm::<4>(context)?; anyhow::bail!( "{} The `EXTCODECOPY` instruction is not supported", location ) } Name::SelfDestruct => { - let _arguments = self.pop_arguments_llvm::(context)?; + let _arguments = self.pop_arguments_llvm::<1>(context)?; anyhow::bail!( "{} The `SELFDESTRUCT` instruction is not supported", location @@ -981,13 +973,10 @@ impl FunctionCall { } /// Pops the specified number of arguments, converted into their LLVM values. - fn pop_arguments_llvm<'ctx, D, const N: usize>( + fn pop_arguments_llvm<'ctx, const N: usize>( &mut self, - context: &mut revive_llvm_context::PolkaVMContext<'ctx, D>, - ) -> anyhow::Result<[inkwell::values::BasicValueEnum<'ctx>; N]> - where - D: revive_llvm_context::PolkaVMDependency + Clone, - { + context: &mut revive_llvm_context::PolkaVMContext<'ctx>, + ) -> anyhow::Result<[inkwell::values::BasicValueEnum<'ctx>; N]> { let mut arguments = Vec::with_capacity(N); for expression in self.arguments.drain(0..N).rev() { arguments.push( @@ -1005,13 +994,10 @@ impl FunctionCall { } /// Pops the specified number of arguments. - fn pop_arguments<'ctx, D, const N: usize>( + fn pop_arguments<'ctx, const N: usize>( &mut self, - context: &mut revive_llvm_context::PolkaVMContext<'ctx, D>, - ) -> anyhow::Result<[revive_llvm_context::PolkaVMArgument<'ctx>; N]> - where - D: revive_llvm_context::PolkaVMDependency + Clone, - { + context: &mut revive_llvm_context::PolkaVMContext<'ctx>, + ) -> anyhow::Result<[revive_llvm_context::PolkaVMArgument<'ctx>; N]> { let mut arguments = Vec::with_capacity(N); for expression in self.arguments.drain(0..N).rev() { arguments.push(expression.into_llvm(context)?.expect("Always exists")); diff --git a/crates/yul/src/parser/statement/expression/function_call/verbatim.rs b/crates/yul/src/parser/statement/expression/function_call/verbatim.rs index 9fc54a2..b0d2762 100644 --- a/crates/yul/src/parser/statement/expression/function_call/verbatim.rs +++ b/crates/yul/src/parser/statement/expression/function_call/verbatim.rs @@ -1,17 +1,16 @@ //! Translates the verbatim simulations. +use revive_llvm_context::PolkaVMContext; + use crate::parser::statement::expression::function_call::FunctionCall; /// Translates the verbatim simulations. -pub fn verbatim<'ctx, D>( - context: &mut revive_llvm_context::PolkaVMContext<'ctx, D>, +pub fn verbatim<'ctx>( + context: &mut PolkaVMContext<'ctx>, call: &mut FunctionCall, _input_size: usize, output_size: usize, -) -> anyhow::Result>> -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ +) -> anyhow::Result>> { if output_size > 1 { anyhow::bail!( "{} Verbatim instructions with multiple return values are not supported", @@ -19,7 +18,7 @@ where ); } - let mut arguments = call.pop_arguments::(context)?; + let mut arguments = call.pop_arguments::<1>(context)?; let identifier = arguments[0] .original .take() diff --git a/crates/yul/src/parser/statement/expression/literal.rs b/crates/yul/src/parser/statement/expression/literal.rs index d5aabab..554e54e 100644 --- a/crates/yul/src/parser/statement/expression/literal.rs +++ b/crates/yul/src/parser/statement/expression/literal.rs @@ -7,6 +7,12 @@ use num::Zero; use serde::Deserialize; use serde::Serialize; +use revive_common::BASE_DECIMAL; +use revive_common::BASE_HEXADECIMAL; +use revive_common::BYTE_LENGTH_WORD; +use revive_llvm_context::PolkaVMArgument; +use revive_llvm_context::PolkaVMContext; + use crate::error::Error; use crate::lexer::token::lexeme::literal::boolean::Boolean as BooleanLiteral; use crate::lexer::token::lexeme::literal::integer::Integer as IntegerLiteral; @@ -72,13 +78,10 @@ impl Literal { } /// Converts the literal into its LLVM. - pub fn into_llvm<'ctx, D>( + pub fn into_llvm<'ctx>( self, - context: &revive_llvm_context::PolkaVMContext<'ctx, D>, - ) -> anyhow::Result> - where - D: revive_llvm_context::PolkaVMDependency + Clone, - { + context: &PolkaVMContext<'ctx>, + ) -> anyhow::Result> { match self.inner { LexicalLiteral::Boolean(inner) => { let value = self @@ -99,7 +102,7 @@ impl Literal { BooleanLiteral::True => num::BigUint::one(), }; - Ok(revive_llvm_context::PolkaVMArgument::value(value).with_constant(constant)) + Ok(PolkaVMArgument::value(value).with_constant(constant)) } LexicalLiteral::Integer(inner) => { let r#type = self.yul_type.unwrap_or_default().into_llvm(context); @@ -118,16 +121,15 @@ impl Literal { let constant = match inner { IntegerLiteral::Decimal { ref inner } => { - num::BigUint::from_str_radix(inner.as_str(), revive_common::BASE_DECIMAL) + num::BigUint::from_str_radix(inner.as_str(), BASE_DECIMAL) + } + IntegerLiteral::Hexadecimal { ref inner } => { + num::BigUint::from_str_radix(&inner["0x".len()..], BASE_HEXADECIMAL) } - IntegerLiteral::Hexadecimal { ref inner } => num::BigUint::from_str_radix( - &inner["0x".len()..], - revive_common::BASE_HEXADECIMAL, - ), } .expect("Always valid"); - Ok(revive_llvm_context::PolkaVMArgument::value(value).with_constant(constant)) + Ok(PolkaVMArgument::value(value).with_constant(constant)) } LexicalLiteral::String(inner) => { let string = inner.inner; @@ -136,7 +138,7 @@ impl Literal { let mut hex_string = if inner.is_hexadecimal { string.clone() } else { - let mut hex_string = String::with_capacity(revive_common::BYTE_LENGTH_WORD * 2); + let mut hex_string = String::with_capacity(BYTE_LENGTH_WORD * 2); let mut index = 0; loop { if index >= string.len() { @@ -151,17 +153,16 @@ impl Literal { index += 3; } else if string[index..].starts_with('u') { let codepoint_str = &string[index + 1..index + 5]; - let codepoint = u32::from_str_radix( - codepoint_str, - revive_common::BASE_HEXADECIMAL, - ) - .map_err(|error| { - anyhow::anyhow!( - "Invalid codepoint `{}`: {}", - codepoint_str, - error - ) - })?; + let codepoint = + u32::from_str_radix(codepoint_str, BASE_HEXADECIMAL).map_err( + |error| { + anyhow::anyhow!( + "Invalid codepoint `{}`: {}", + codepoint_str, + error + ) + }, + )?; let unicode_char = char::from_u32(codepoint).ok_or_else(|| { anyhow::anyhow!("Invalid codepoint {}", codepoint) })?; @@ -197,16 +198,16 @@ impl Literal { hex_string }; - if hex_string.len() > revive_common::BYTE_LENGTH_WORD * 2 { + if hex_string.len() > BYTE_LENGTH_WORD * 2 { return Ok(revive_llvm_context::PolkaVMArgument::value( r#type.const_zero().as_basic_value_enum(), ) .with_original(string)); } - if hex_string.len() < revive_common::BYTE_LENGTH_WORD * 2 { + if hex_string.len() < BYTE_LENGTH_WORD * 2 { hex_string.push_str( - "0".repeat((revive_common::BYTE_LENGTH_WORD * 2) - hex_string.len()) + "0".repeat((BYTE_LENGTH_WORD * 2) - hex_string.len()) .as_str(), ); } @@ -218,7 +219,7 @@ impl Literal { ) .expect("The value is valid") .as_basic_value_enum(); - Ok(revive_llvm_context::PolkaVMArgument::value(value).with_original(string)) + Ok(PolkaVMArgument::value(value).with_original(string)) } } } diff --git a/crates/yul/src/parser/statement/expression/mod.rs b/crates/yul/src/parser/statement/expression/mod.rs index 2a6a609..b0dc167 100644 --- a/crates/yul/src/parser/statement/expression/mod.rs +++ b/crates/yul/src/parser/statement/expression/mod.rs @@ -1,13 +1,13 @@ //! The expression statement. -pub mod function_call; -pub mod literal; - -use std::collections::HashSet; +use std::collections::BTreeSet; use serde::Deserialize; use serde::Serialize; +use revive_llvm_context::PolkaVMArgument; +use revive_llvm_context::PolkaVMContext; + use crate::error::Error; use crate::lexer::token::lexeme::symbol::Symbol; use crate::lexer::token::lexeme::Lexeme; @@ -22,6 +22,9 @@ use crate::visitor::AstVisitor; use self::function_call::FunctionCall; use self::literal::Literal; +pub mod function_call; +pub mod literal; + /// The Yul expression statement. #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] pub enum Expression { @@ -82,11 +85,11 @@ impl Expression { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { match self { Self::FunctionCall(inner) => inner.get_missing_libraries(), - Self::Identifier(_) => HashSet::new(), - Self::Literal(_) => HashSet::new(), + Self::Identifier(_) => BTreeSet::new(), + Self::Literal(_) => BTreeSet::new(), } } @@ -100,13 +103,10 @@ impl Expression { } /// Converts the expression into an LLVM value. - pub fn into_llvm<'ctx, D>( + pub fn into_llvm<'ctx>( self, - context: &mut revive_llvm_context::PolkaVMContext<'ctx, D>, - ) -> anyhow::Result>> - where - D: revive_llvm_context::PolkaVMDependency + Clone, - { + context: &mut PolkaVMContext<'ctx>, + ) -> anyhow::Result>> { match self { Self::Literal(literal) => literal .clone() @@ -133,16 +133,14 @@ impl Expression { let constant = context.current_function().borrow().yul().get_constant(&id); - let argument = revive_llvm_context::PolkaVMArgument::pointer(pointer, id); + let argument = PolkaVMArgument::pointer(pointer, id); Ok(Some(match constant { Some(constant) => argument.with_constant(constant), _ => argument, })) } - Self::FunctionCall(call) => Ok(call - .into_llvm(context)? - .map(revive_llvm_context::PolkaVMArgument::value)), + Self::FunctionCall(call) => Ok(call.into_llvm(context)?.map(PolkaVMArgument::value)), } } } diff --git a/crates/yul/src/parser/statement/for_loop.rs b/crates/yul/src/parser/statement/for_loop.rs index 605bded..6203d8d 100644 --- a/crates/yul/src/parser/statement/for_loop.rs +++ b/crates/yul/src/parser/statement/for_loop.rs @@ -1,10 +1,13 @@ //! The for-loop statement. -use std::collections::HashSet; +use std::collections::BTreeSet; use serde::Deserialize; use serde::Serialize; +use revive_llvm_context::PolkaVMContext; +use revive_llvm_context::PolkaVMWriteLLVM; + use crate::error::Error; use crate::lexer::token::location::Location; use crate::lexer::token::Token; @@ -53,7 +56,7 @@ impl ForLoop { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { let mut libraries = self.initializer.get_missing_libraries(); libraries.extend(self.condition.get_missing_libraries()); libraries.extend(self.finalizer.get_missing_libraries()); @@ -62,11 +65,8 @@ impl ForLoop { } } -impl revive_llvm_context::PolkaVMWriteLLVM for ForLoop -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { +impl PolkaVMWriteLLVM for ForLoop { + fn into_llvm(self, context: &mut PolkaVMContext) -> anyhow::Result<()> { self.initializer.into_llvm(context)?; let condition_block = context.append_basic_block("for_condition"); diff --git a/crates/yul/src/parser/statement/function_definition.rs b/crates/yul/src/parser/statement/function_definition.rs index 7b1ecb5..0d65fd0 100644 --- a/crates/yul/src/parser/statement/function_definition.rs +++ b/crates/yul/src/parser/statement/function_definition.rs @@ -1,13 +1,20 @@ //! The function definition statement. use std::collections::BTreeSet; -use std::collections::HashSet; use inkwell::types::BasicType; use serde::Deserialize; use serde::Serialize; +use revive_common::BIT_LENGTH_X32; +use revive_llvm_context::PolkaVMAttribute; +use revive_llvm_context::PolkaVMContext; +use revive_llvm_context::PolkaVMFunction; +use revive_llvm_context::PolkaVMFunctionReturn; +use revive_llvm_context::PolkaVMFunctionYulData; +use revive_llvm_context::PolkaVMWriteLLVM; + use crate::error::Error; use crate::lexer::token::lexeme::symbol::Symbol; use crate::lexer::token::lexeme::Lexeme; @@ -38,7 +45,7 @@ pub struct FunctionDefinition { /// The function body block. pub body: Block, /// The function LLVM attributes encoded in the identifier. - pub attributes: BTreeSet, + pub attributes: BTreeSet, } impl FunctionDefinition { @@ -148,14 +155,14 @@ impl FunctionDefinition { } /// Gets the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { self.body.get_missing_libraries() } /// Gets the list of LLVM attributes provided in the function name. pub fn get_llvm_attributes( identifier: &Identifier, - ) -> Result, Error> { + ) -> Result, Error> { let mut valid_attributes = BTreeSet::new(); let llvm_begin = identifier.inner.find(Self::LLVM_ATTRIBUTE_PREFIX); @@ -172,7 +179,7 @@ impl FunctionDefinition { let mut invalid_attributes = BTreeSet::new(); for value in attribute_string.split('_') { - match revive_llvm_context::PolkaVMAttribute::try_from(value) { + match PolkaVMAttribute::try_from(value) { Ok(attribute) => valid_attributes.insert(attribute), Err(value) => invalid_attributes.insert(value), }; @@ -190,14 +197,8 @@ impl FunctionDefinition { } } -impl revive_llvm_context::PolkaVMWriteLLVM for FunctionDefinition -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn declare( - &mut self, - context: &mut revive_llvm_context::PolkaVMContext, - ) -> anyhow::Result<()> { +impl PolkaVMWriteLLVM for FunctionDefinition { + fn declare(&mut self, context: &mut PolkaVMContext) -> anyhow::Result<()> { context.set_debug_location(self.location.line, self.location.column, None)?; let argument_types: Vec<_> = self .arguments @@ -217,7 +218,7 @@ where Some(inkwell::module::Linkage::External), Some((self.location.line, self.location.column)), )?; - revive_llvm_context::PolkaVMFunction::set_attributes( + PolkaVMFunction::set_attributes( context.llvm(), function.borrow().declaration(), &self.attributes.clone().into_iter().collect::>(), @@ -225,15 +226,12 @@ where ); function .borrow_mut() - .set_yul_data(revive_llvm_context::PolkaVMFunctionYulData::default()); + .set_yul_data(PolkaVMFunctionYulData::default()); Ok(()) } - fn into_llvm( - mut self, - context: &mut revive_llvm_context::PolkaVMContext, - ) -> anyhow::Result<()> { + fn into_llvm(mut self, context: &mut PolkaVMContext) -> anyhow::Result<()> { context.set_current_function( self.identifier.as_str(), Some((self.location.line, self.location.column)), @@ -242,8 +240,8 @@ where let r#return = context.current_function().borrow().r#return(); match r#return { - revive_llvm_context::PolkaVMFunctionReturn::None => {} - revive_llvm_context::PolkaVMFunctionReturn::Primitive { pointer } => { + PolkaVMFunctionReturn::None => {} + PolkaVMFunctionReturn::Primitive { pointer } => { let identifier = self.result.pop().expect("Always exists"); let r#type = identifier.r#type.unwrap_or_default(); @@ -253,7 +251,7 @@ where .borrow_mut() .insert_stack_pointer(identifier.inner, pointer); } - revive_llvm_context::PolkaVMFunctionReturn::Compound { pointer, .. } => { + PolkaVMFunctionReturn::Compound { pointer, .. } => { for (index, identifier) in self.result.into_iter().enumerate() { let r#type = identifier.r#type.unwrap_or_default().into_llvm(context); let pointer = context.build_gep( @@ -261,7 +259,7 @@ where &[ context.word_const(0), context - .integer_type(revive_common::BIT_LENGTH_X32) + .integer_type(BIT_LENGTH_X32) .const_int(index as u64, false), ], context.word_type(), @@ -312,14 +310,14 @@ where context.set_basic_block(context.current_function().borrow().return_block()); match context.current_function().borrow().r#return() { - revive_llvm_context::PolkaVMFunctionReturn::None => { + PolkaVMFunctionReturn::None => { context.build_return(None); } - revive_llvm_context::PolkaVMFunctionReturn::Primitive { pointer } => { + PolkaVMFunctionReturn::Primitive { pointer } => { let return_value = context.build_load(pointer, "return_value")?; context.build_return(Some(&return_value)); } - revive_llvm_context::PolkaVMFunctionReturn::Compound { pointer, .. } => { + PolkaVMFunctionReturn::Compound { pointer, .. } => { let return_value = context.build_load(pointer, "return_value")?; context.build_return(Some(&return_value)); } diff --git a/crates/yul/src/parser/statement/if_conditional.rs b/crates/yul/src/parser/statement/if_conditional.rs index 5ac04fa..d3848c2 100644 --- a/crates/yul/src/parser/statement/if_conditional.rs +++ b/crates/yul/src/parser/statement/if_conditional.rs @@ -1,10 +1,13 @@ //! The if-conditional statement. -use std::collections::HashSet; +use std::collections::BTreeSet; use serde::Deserialize; use serde::Serialize; +use revive_llvm_context::PolkaVMContext; +use revive_llvm_context::PolkaVMWriteLLVM; + use crate::error::Error; use crate::lexer::token::location::Location; use crate::lexer::token::Token; @@ -43,18 +46,15 @@ impl IfConditional { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { let mut libraries = self.condition.get_missing_libraries(); libraries.extend(self.block.get_missing_libraries()); libraries } } -impl revive_llvm_context::PolkaVMWriteLLVM for IfConditional -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { +impl PolkaVMWriteLLVM for IfConditional { + fn into_llvm(self, context: &mut PolkaVMContext) -> anyhow::Result<()> { let condition = self .condition .into_llvm(context)? diff --git a/crates/yul/src/parser/statement/mod.rs b/crates/yul/src/parser/statement/mod.rs index 0f3436d..ddd63f0 100644 --- a/crates/yul/src/parser/statement/mod.rs +++ b/crates/yul/src/parser/statement/mod.rs @@ -1,17 +1,6 @@ //! The block statement. -pub mod assignment; -pub mod block; -pub mod code; -pub mod expression; -pub mod for_loop; -pub mod function_definition; -pub mod if_conditional; -pub mod object; -pub mod switch; -pub mod variable_declaration; - -use std::collections::HashSet; +use std::collections::BTreeSet; use serde::Deserialize; use serde::Serialize; @@ -37,6 +26,17 @@ use self::object::Object; use self::switch::Switch; use self::variable_declaration::VariableDeclaration; +pub mod assignment; +pub mod block; +pub mod code; +pub mod expression; +pub mod for_loop; +pub mod function_definition; +pub mod if_conditional; +pub mod object; +pub mod switch; +pub mod variable_declaration; + /// The Yul block statement. #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] pub enum Statement { @@ -142,7 +142,7 @@ impl Statement { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { match self { Self::Object(inner) => inner.get_missing_libraries(), Self::Code(inner) => inner.get_missing_libraries(), @@ -154,9 +154,9 @@ impl Statement { Self::IfConditional(inner) => inner.get_missing_libraries(), Self::Switch(inner) => inner.get_missing_libraries(), Self::ForLoop(inner) => inner.get_missing_libraries(), - Self::Continue(_) => HashSet::new(), - Self::Break(_) => HashSet::new(), - Self::Leave(_) => HashSet::new(), + Self::Continue(_) => BTreeSet::new(), + Self::Break(_) => BTreeSet::new(), + Self::Leave(_) => BTreeSet::new(), } } diff --git a/crates/yul/src/parser/statement/object.rs b/crates/yul/src/parser/statement/object.rs index b79d54c..22a72a0 100644 --- a/crates/yul/src/parser/statement/object.rs +++ b/crates/yul/src/parser/statement/object.rs @@ -1,12 +1,15 @@ //! The YUL object. +use std::collections::BTreeSet; use std::collections::HashSet; use inkwell::debug_info::AsDIScope; - use serde::Deserialize; use serde::Serialize; +use revive_llvm_context::PolkaVMContext; +use revive_llvm_context::PolkaVMWriteLLVM; + use crate::error::Error; use crate::lexer::token::lexeme::keyword::Keyword; use crate::lexer::token::lexeme::literal::Literal; @@ -170,7 +173,7 @@ impl Object { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { let mut missing_libraries = self.code.get_missing_libraries(); if let Some(inner_object) = &self.inner_object { missing_libraries.extend(inner_object.get_missing_libraries()); @@ -179,14 +182,8 @@ impl Object { } } -impl revive_llvm_context::PolkaVMWriteLLVM for Object -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn declare( - &mut self, - context: &mut revive_llvm_context::PolkaVMContext, - ) -> anyhow::Result<()> { +impl PolkaVMWriteLLVM for Object { + fn declare(&mut self, context: &mut PolkaVMContext) -> anyhow::Result<()> { revive_llvm_context::PolkaVMLoadImmutableDataFunction.declare(context)?; revive_llvm_context::PolkaVMStoreImmutableDataFunction.declare(context)?; @@ -270,7 +267,7 @@ where Ok(()) } - fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { + fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { if let Some(debug_info) = context.debug_info() { let di_builder = debug_info.builder(); let object_name: &str = self.identifier.as_str(); diff --git a/crates/yul/src/parser/statement/switch/case.rs b/crates/yul/src/parser/statement/switch/case.rs index d618ae0..905447a 100644 --- a/crates/yul/src/parser/statement/switch/case.rs +++ b/crates/yul/src/parser/statement/switch/case.rs @@ -1,6 +1,6 @@ //! The switch statement case. -use std::collections::HashSet; +use std::collections::BTreeSet; use serde::Deserialize; use serde::Serialize; @@ -58,7 +58,7 @@ impl Case { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { self.block.get_missing_libraries() } } diff --git a/crates/yul/src/parser/statement/switch/mod.rs b/crates/yul/src/parser/statement/switch/mod.rs index 2b374ea..7105d43 100644 --- a/crates/yul/src/parser/statement/switch/mod.rs +++ b/crates/yul/src/parser/statement/switch/mod.rs @@ -1,12 +1,13 @@ //! The switch statement. -pub mod case; - -use std::collections::HashSet; +use std::collections::BTreeSet; use serde::Deserialize; use serde::Serialize; +use revive_llvm_context::PolkaVMContext; +use revive_llvm_context::PolkaVMWriteLLVM; + use crate::error::Error; use crate::lexer::token::lexeme::keyword::Keyword; use crate::lexer::token::lexeme::Lexeme; @@ -21,6 +22,8 @@ use crate::visitor::AstVisitor; use self::case::Case; +pub mod case; + /// The Yul switch statement. #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] pub struct Switch { @@ -108,8 +111,8 @@ impl Switch { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { - let mut libraries = HashSet::new(); + pub fn get_missing_libraries(&self) -> BTreeSet { + let mut libraries = BTreeSet::new(); for case in self.cases.iter() { libraries.extend(case.get_missing_libraries()); } @@ -120,11 +123,8 @@ impl Switch { } } -impl revive_llvm_context::PolkaVMWriteLLVM for Switch -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext) -> anyhow::Result<()> { +impl PolkaVMWriteLLVM for Switch { + fn into_llvm(self, context: &mut PolkaVMContext) -> anyhow::Result<()> { context.set_debug_location(self.location.line, self.location.column, None)?; let scrutinee = self.expression.into_llvm(context)?; diff --git a/crates/yul/src/parser/statement/variable_declaration.rs b/crates/yul/src/parser/statement/variable_declaration.rs index 8231d33..171c073 100644 --- a/crates/yul/src/parser/statement/variable_declaration.rs +++ b/crates/yul/src/parser/statement/variable_declaration.rs @@ -1,12 +1,16 @@ //! The variable declaration statement. -use std::collections::HashSet; +use std::collections::BTreeSet; use inkwell::types::BasicType; use inkwell::values::BasicValue; use serde::Deserialize; use serde::Serialize; +use revive_common::BIT_LENGTH_X32; +use revive_llvm_context::PolkaVMContext; +use revive_llvm_context::PolkaVMWriteLLVM; + use crate::error::Error; use crate::lexer::token::lexeme::symbol::Symbol; use crate::lexer::token::lexeme::Lexeme; @@ -84,23 +88,17 @@ impl VariableDeclaration { } /// Get the list of missing deployable libraries. - pub fn get_missing_libraries(&self) -> HashSet { + pub fn get_missing_libraries(&self) -> BTreeSet { self.expression .as_ref() - .map_or_else(HashSet::new, |expression| { + .map_or_else(BTreeSet::new, |expression| { expression.get_missing_libraries() }) } } -impl revive_llvm_context::PolkaVMWriteLLVM for VariableDeclaration -where - D: revive_llvm_context::PolkaVMDependency + Clone, -{ - fn into_llvm<'ctx>( - mut self, - context: &mut revive_llvm_context::PolkaVMContext<'ctx, D>, - ) -> anyhow::Result<()> { +impl PolkaVMWriteLLVM for VariableDeclaration { + fn into_llvm<'ctx>(mut self, context: &mut PolkaVMContext<'ctx>) -> anyhow::Result<()> { if self.bindings.len() == 1 { let identifier = self.bindings.remove(0); context.set_debug_location(self.location.line, self.location.column, None)?; @@ -194,7 +192,7 @@ where &[ context.word_const(0), context - .integer_type(revive_common::BIT_LENGTH_X32) + .integer_type(BIT_LENGTH_X32) .const_int(index as u64, false), ], binding.r#type.unwrap_or_default().into_llvm(context), diff --git a/crates/yul/src/parser/type.rs b/crates/yul/src/parser/type.rs index ec4d8c4..c734627 100644 --- a/crates/yul/src/parser/type.rs +++ b/crates/yul/src/parser/type.rs @@ -3,6 +3,10 @@ use serde::Deserialize; use serde::Serialize; +use revive_common::BIT_LENGTH_BOOLEAN; +use revive_common::BIT_LENGTH_WORD; +use revive_llvm_context::PolkaVMContext; + use crate::error::Error; use crate::lexer::token::lexeme::keyword::Keyword; use crate::lexer::token::lexeme::Lexeme; @@ -26,7 +30,7 @@ pub enum Type { impl Default for Type { fn default() -> Self { - Self::UInt(revive_common::BIT_LENGTH_WORD) + Self::UInt(BIT_LENGTH_WORD) } } @@ -62,15 +66,9 @@ impl Type { } /// Converts the type into its LLVM. - pub fn into_llvm<'ctx, D>( - self, - context: &revive_llvm_context::PolkaVMContext<'ctx, D>, - ) -> inkwell::types::IntType<'ctx> - where - D: revive_llvm_context::PolkaVMDependency + Clone, - { + pub fn into_llvm<'ctx>(self, context: &PolkaVMContext<'ctx>) -> inkwell::types::IntType<'ctx> { match self { - Self::Bool => context.integer_type(revive_common::BIT_LENGTH_BOOLEAN), + Self::Bool => context.integer_type(BIT_LENGTH_BOOLEAN), Self::Int(bitlength) => context.integer_type(bitlength), Self::UInt(bitlength) => context.integer_type(bitlength), Self::Custom(_) => context.word_type(), diff --git a/js/resolc/package.json b/js/resolc/package.json index 7dab675..4bb6e70 100644 --- a/js/resolc/package.json +++ b/js/resolc/package.json @@ -1,7 +1,7 @@ { "name": "@parity/resolc", "license": "Apache-2.0", - "version": "0.3.0", + "version": "0.4.0", "author": "Parity (https://parity.io)", "module": "index.ts", "types": "./dist/index.d.ts", diff --git a/js/resolc/src/index.ts b/js/resolc/src/index.ts index ea61bf4..cb65701 100644 --- a/js/resolc/src/index.ts +++ b/js/resolc/src/index.ts @@ -98,7 +98,6 @@ export async function compile( const { optimizer = { mode: 'z', - fallback_to_optimizing_for_size: true, enabled: true, runs: 200, }, diff --git a/package-lock.json b/package-lock.json index 7578a1a..3ca9cd1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -31,7 +31,7 @@ }, "js/resolc": { "name": "@parity/resolc", - "version": "0.3.0", + "version": "0.4.0", "license": "Apache-2.0", "dependencies": { "@types/node": "^22.9.0",