mirror of
https://github.com/pezkuwichain/revive.git
synced 2026-04-22 02:07:55 +00:00
Merge branch 'main' into cl/optsize
This commit is contained in:
@@ -1,3 +1,13 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
This script generates JSON files for different platforms based on GitHub release data.
|
||||
It fetches release information from a specified GitHub repository and tag,
|
||||
parses the release assets, and generates JSON files for each platform with relevant metadata.
|
||||
It also handles checksum files and updates a list.json file for each platform.
|
||||
It requires the GITHUB_TOKEN environment variable to be set for authentication.
|
||||
Usage:
|
||||
python json_generator.py <repo> <tag>
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
|
||||
@@ -0,0 +1,155 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
This script generates JSON files for different platforms based on GitHub data.
|
||||
Requires the GITHUB_SHA, FIRST_SOLC_VERSION, LAST_SOLC_VERSION, TAG and FILEPATH environment variables to be set.
|
||||
Usage:
|
||||
python json_generator_nightly.py
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
def validate_env_variables():
|
||||
"""Validate that environment variables are set."""
|
||||
if "GITHUB_SHA" not in os.environ:
|
||||
print("Error: GITHUB_SHA environment variable is not set.")
|
||||
sys.exit(1)
|
||||
if "FIRST_SOLC_VERSION" not in os.environ:
|
||||
print("Error: FIRST_SOLC_VERSION environment variable is not set.")
|
||||
sys.exit(1)
|
||||
if "LAST_SOLC_VERSION" not in os.environ:
|
||||
print("Error: LAST_SOLC_VERSION environment variable is not set.")
|
||||
sys.exit(1)
|
||||
if "TAG" not in os.environ:
|
||||
print("Error: TAG environment variable is not set.")
|
||||
sys.exit(1)
|
||||
if "FILEPATH" not in os.environ:
|
||||
print("Error: FILEPATH environment variable is not set.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def fetch_data_file():
|
||||
"""
|
||||
Fetch the data.json file with artifacts urls and sha256 checksums
|
||||
and parse it into a single dictionary mapping artifact names to their URLs and SHAs.
|
||||
"""
|
||||
# read data.json file
|
||||
artifacts_data = {}
|
||||
data_file_path = os.environ["FILEPATH"]
|
||||
if not os.path.exists(data_file_path):
|
||||
print("Error: data.json file not found.")
|
||||
sys.exit(1)
|
||||
with open(data_file_path, 'r') as f:
|
||||
try:
|
||||
artifacts_data = json.load(f)
|
||||
except json.JSONDecodeError:
|
||||
print("Error: data.json file is not a valid JSON.")
|
||||
sys.exit(1)
|
||||
|
||||
result = {}
|
||||
|
||||
for item in artifacts_data:
|
||||
for key, value in item.items():
|
||||
if key.endswith('_url'):
|
||||
base_key = key.rsplit('_url', 1)[0]
|
||||
if base_key not in result:
|
||||
result[base_key] = {}
|
||||
result[base_key]['url'] = value
|
||||
elif key.endswith('_sha'):
|
||||
base_key = key.rsplit('_sha', 1)[0]
|
||||
if base_key not in result:
|
||||
result[base_key] = {}
|
||||
result[base_key]['sha'] = value
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def extract_build_hash():
|
||||
"""Extract the first 8 characters of the commit hash."""
|
||||
sha = os.environ.get("GITHUB_SHA")
|
||||
return f"commit.{sha[:8]}"
|
||||
|
||||
def generate_asset_json_nightly(name, url, checksum):
|
||||
"""Generate JSON for a specific asset."""
|
||||
# Date in format YYYY-MM-DD
|
||||
date = datetime.now().strftime("%Y.%-m.%-d")
|
||||
last_version = os.environ.get("TAG").replace('v','')
|
||||
version = f"{last_version}-nightly.{date}"
|
||||
SHA = os.environ.get("GITHUB_SHA", "")[:8]
|
||||
build = f"commit.{SHA}"
|
||||
long_version = f"{version}+{build}"
|
||||
|
||||
return {
|
||||
"name": name,
|
||||
"version": version,
|
||||
"build": build,
|
||||
"longVersion": long_version,
|
||||
"url": url,
|
||||
"sha256": checksum,
|
||||
"firstSolcVersion": os.environ.get("FIRST_SOLC_VERSION"),
|
||||
"lastSolcVersion": os.environ.get("LAST_SOLC_VERSION")
|
||||
}
|
||||
|
||||
def save_platform_json(platform_folder, asset_json):
|
||||
"""Save asset JSON and update list.json for a specific platform."""
|
||||
# Create platform folder if it doesn't exist
|
||||
os.makedirs(platform_folder, exist_ok=True)
|
||||
|
||||
# Update or create list.json
|
||||
list_file_path = os.path.join(platform_folder, "list.json")
|
||||
|
||||
if os.path.exists(list_file_path):
|
||||
with open(list_file_path, 'r') as f:
|
||||
try:
|
||||
list_data = json.load(f)
|
||||
except json.JSONDecodeError:
|
||||
list_data = {"builds": [], "releases": {}, "latestRelease": ""}
|
||||
else:
|
||||
list_data = {"builds": [], "releases": {}, "latestRelease": ""}
|
||||
|
||||
# Remove any existing entry with the same path
|
||||
list_data['builds'] = [
|
||||
build for build in list_data['builds']
|
||||
if build['version'] != asset_json['version']
|
||||
]
|
||||
# Add the new build
|
||||
list_data['builds'].append(asset_json)
|
||||
|
||||
# Update releases
|
||||
version = asset_json['version']
|
||||
list_data['releases'][version] = f"{asset_json['name']}+{asset_json['longVersion']}"
|
||||
|
||||
# Update latest release
|
||||
list_data['latestRelease'] = version
|
||||
|
||||
with open(list_file_path, 'w') as f:
|
||||
json.dump(list_data, f, indent=4)
|
||||
|
||||
def main():
|
||||
|
||||
validate_env_variables()
|
||||
data = fetch_data_file()
|
||||
|
||||
# Mapping of asset names to platform folders
|
||||
platform_mapping = {
|
||||
'resolc-x86_64-unknown-linux-musl': 'linux',
|
||||
'resolc-universal-apple-darwin': 'macos',
|
||||
'resolc-x86_64-pc-windows-msvc': 'windows',
|
||||
'resolc-web.js': 'wasm'
|
||||
}
|
||||
|
||||
# Process each asset
|
||||
for asset in data.keys():
|
||||
platform_name = platform_mapping.get(asset)
|
||||
if platform_name:
|
||||
platform_folder = os.path.join(platform_name)
|
||||
asset_json = generate_asset_json_nightly(asset, data[asset]['url'], data[asset]['sha'])
|
||||
save_platform_json(platform_folder, asset_json)
|
||||
print(f"Processed {asset} for {platform_name}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,385 @@
|
||||
name: Nightly Release
|
||||
on:
|
||||
schedule:
|
||||
# Run every day at 01:00 UTC
|
||||
- cron: "0 1 * * *"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_MUSL_CROSS_IMAGE: messense/rust-musl-cross@sha256:c0154e992adb791c3b848dd008939d19862549204f8cb26f5ca7a00f629e6067
|
||||
|
||||
jobs:
|
||||
# check if there were commits yesterday
|
||||
check_commits:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
has_commits: ${{ steps.check_commits.outputs.has_commits }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Fetch full history to check previous commits
|
||||
ref: "main"
|
||||
|
||||
- name: Check for commits from yesterday
|
||||
id: check_commits
|
||||
run: |
|
||||
# Get yesterday's date in YYYY-MM-DD format
|
||||
YESTERDAY=$(date -d "yesterday" +%Y-%m-%d)
|
||||
echo "Checking for commits from: $YESTERDAY"
|
||||
|
||||
# Check if there were any commits yesterday
|
||||
COMMIT_COUNT=$(git log --oneline --since="$YESTERDAY 00:00:00" --until="$YESTERDAY 23:59:59" | wc -l)
|
||||
|
||||
echo "Found $COMMIT_COUNT commits from yesterday"
|
||||
|
||||
if [ $COMMIT_COUNT -gt 0 ]; then
|
||||
echo "has_commits=true" >> $GITHUB_OUTPUT
|
||||
echo "✅ Found $COMMIT_COUNT commits from yesterday - continuing workflow"
|
||||
else
|
||||
echo "has_commits=false" >> $GITHUB_OUTPUT
|
||||
echo "❌ No commits found from yesterday - skipping remaining steps"
|
||||
echo "::notice::❌ No commits found from yesterday - skipping remaining steps"
|
||||
fi
|
||||
|
||||
build:
|
||||
# github actions matrix jobs don't support multiple outputs
|
||||
# ugly workaround from https://github.com/orgs/community/discussions/17245#discussioncomment-11222880
|
||||
if: ${{ needs.check_commits.outputs.has_commits == 'true' }}
|
||||
outputs:
|
||||
resolc-x86_64-unknown-linux-musl_url: ${{ steps.set-output.outputs.resolc-x86_64-unknown-linux-musl_url }}
|
||||
resolc-x86_64-unknown-linux-musl_sha: ${{ steps.set-output.outputs.resolc-x86_64-unknown-linux-musl_sha }}
|
||||
resolc-aarch64-apple-darwin_url: ${{ steps.set-output.outputs.resolc-aarch64-apple-darwin_url }}
|
||||
resolc-aarch64-apple-darwin_sha: ${{ steps.set-output.outputs.resolc-aarch64-apple-darwin_sha }}
|
||||
resolc-x86_64-apple-darwin_url: ${{ steps.set-output.outputs.resolc-x86_64-apple-darwin_url }}
|
||||
resolc-x86_64-apple-darwin_sha: ${{ steps.set-output.outputs.resolc-x86_64-apple-darwin_sha }}
|
||||
resolc-x86_64-pc-windows-msvc_url: ${{ steps.set-output.outputs.resolc-x86_64-pc-windows-msvc_url }}
|
||||
resolc-x86_64-pc-windows-msvc_sha: ${{ steps.set-output.outputs.resolc-x86_64-pc-windows-msvc_sha }}
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
[
|
||||
x86_64-unknown-linux-musl,
|
||||
aarch64-apple-darwin,
|
||||
x86_64-apple-darwin,
|
||||
x86_64-pc-windows-msvc,
|
||||
]
|
||||
include:
|
||||
- target: x86_64-unknown-linux-musl
|
||||
type: musl
|
||||
runner: ubuntu-24.04
|
||||
- target: aarch64-apple-darwin
|
||||
type: native
|
||||
runner: macos-14
|
||||
- target: x86_64-apple-darwin
|
||||
type: native
|
||||
runner: macos-13
|
||||
- target: x86_64-pc-windows-msvc
|
||||
type: native
|
||||
runner: windows-2022
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [check_commits]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
# without this it will override our rust flags
|
||||
rustflags: ""
|
||||
cache-key: ${{ matrix.target }}
|
||||
|
||||
- name: Download LLVM
|
||||
uses: ./.github/actions/get-llvm
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
- name: Build
|
||||
if: ${{ matrix.type == 'native' }}
|
||||
shell: bash
|
||||
run: |
|
||||
export LLVM_SYS_181_PREFIX=$PWD/llvm-${{ matrix.target }}
|
||||
make install-bin
|
||||
mv target/release/resolc resolc-${{ matrix.target }} || mv target/release/resolc.exe resolc-${{ matrix.target }}.exe
|
||||
|
||||
- name: Build
|
||||
if: ${{ matrix.type == 'musl' }}
|
||||
run: |
|
||||
docker run -v $PWD:/opt/revive $RUST_MUSL_CROSS_IMAGE /bin/bash -c "
|
||||
cd /opt/revive
|
||||
chown -R root:root .
|
||||
apt update && apt upgrade -y && apt install -y pkg-config
|
||||
export LLVM_SYS_181_PREFIX=/opt/revive/llvm-${{ matrix.target }}
|
||||
make install-bin
|
||||
mv target/${{ matrix.target }}/release/resolc resolc-${{ matrix.target }}
|
||||
"
|
||||
sudo chown -R $(id -u):$(id -g) .
|
||||
|
||||
- name: Install Solc
|
||||
uses: ./.github/actions/get-solc
|
||||
|
||||
- name: Basic Sanity Check
|
||||
shell: bash
|
||||
run: |
|
||||
result=$(./resolc-${{ matrix.target }} --bin crates/integration/contracts/flipper.sol)
|
||||
echo $result
|
||||
if [[ $result == *'0x50564d'* ]]; then exit 0; else exit 1; fi
|
||||
|
||||
- name: Upload artifacts (nightly)
|
||||
uses: actions/upload-artifact@v4
|
||||
id: artifact-upload-step
|
||||
with:
|
||||
name: resolc-${{ matrix.target }}
|
||||
path: resolc-${{ matrix.target }}*
|
||||
retention-days: 40
|
||||
|
||||
- name: Set output variables (nightly)
|
||||
id: set-output
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Artifact URL is ${{ steps.artifact-upload-step.outputs.artifact-url }}"
|
||||
echo "Artifact SHA is ${{ steps.artifact-upload-step.outputs.artifact-digest }}"
|
||||
echo "resolc-${{ matrix.target }}_url=${{ steps.artifact-upload-step.outputs.artifact-url }}"
|
||||
echo "resolc-${{ matrix.target }}_url=${{ steps.artifact-upload-step.outputs.artifact-url }}" >> "$GITHUB_OUTPUT"
|
||||
echo "resolc-${{ matrix.target }}_sha=${{ steps.artifact-upload-step.outputs.artifact-digest }}"
|
||||
echo "resolc-${{ matrix.target }}_sha=${{ steps.artifact-upload-step.outputs.artifact-digest }}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
build-wasm:
|
||||
runs-on: ubuntu-24.04
|
||||
needs: [check_commits]
|
||||
if: ${{ needs.check_commits.outputs.has_commits == 'true' }}
|
||||
outputs:
|
||||
resolc-web.js_url: ${{ steps.set-output.outputs.resolc_web_js_url }}
|
||||
resolc-web.js_sha: ${{ steps.set-output.outputs.resolc_web_js_sha }}
|
||||
env:
|
||||
RELEASE_RESOLC_WASM_URI: https://github.com/paritytech/revive/releases/download/${{ github.ref_name }}/resolc.wasm
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
target: wasm32-unknown-emscripten
|
||||
# without this it will override our rust flags
|
||||
rustflags: ""
|
||||
|
||||
- name: Download Host LLVM
|
||||
uses: ./.github/actions/get-llvm
|
||||
with:
|
||||
target: x86_64-unknown-linux-gnu
|
||||
|
||||
- name: Download Wasm LLVM
|
||||
uses: ./.github/actions/get-llvm
|
||||
with:
|
||||
target: wasm32-unknown-emscripten
|
||||
|
||||
- name: Download EMSDK
|
||||
uses: ./.github/actions/get-emsdk
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
export LLVM_SYS_181_PREFIX=$PWD/llvm-x86_64-unknown-linux-gnu
|
||||
export REVIVE_LLVM_TARGET_PREFIX=$PWD/llvm-wasm32-unknown-emscripten
|
||||
source emsdk/emsdk_env.sh
|
||||
make install-wasm
|
||||
chmod -x ./target/wasm32-unknown-emscripten/release/resolc.wasm
|
||||
|
||||
- name: Set Up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "20"
|
||||
|
||||
- name: Basic Sanity Check
|
||||
run: |
|
||||
mkdir -p solc
|
||||
curl -sSLo solc/soljson.js https://github.com/ethereum/solidity/releases/download/v0.8.30/soljson.js
|
||||
node -e "
|
||||
const soljson = require('solc/soljson');
|
||||
const createRevive = require('./target/wasm32-unknown-emscripten/release/resolc.js');
|
||||
|
||||
const compiler = createRevive();
|
||||
compiler.soljson = soljson;
|
||||
|
||||
const standardJsonInput =
|
||||
{
|
||||
language: 'Solidity',
|
||||
sources: {
|
||||
'MyContract.sol': {
|
||||
content: 'pragma solidity ^0.8.0; contract MyContract { function greet() public pure returns (string memory) { return \'Hello\'; } }',
|
||||
},
|
||||
},
|
||||
settings: { optimizer: { enabled: false } }
|
||||
};
|
||||
|
||||
compiler.writeToStdin(JSON.stringify(standardJsonInput));
|
||||
compiler.callMain(['--standard-json']);
|
||||
|
||||
// Collect output
|
||||
const stdout = compiler.readFromStdout();
|
||||
const stderr = compiler.readFromStderr();
|
||||
|
||||
if (stderr) { console.error(stderr); process.exit(1); }
|
||||
|
||||
let out = JSON.parse(stdout);
|
||||
let bytecode = out.contracts['MyContract.sol']['MyContract'].evm.bytecode.object
|
||||
console.log(bytecode);
|
||||
|
||||
if(!bytecode.startsWith('50564d')) { process.exit(1); }
|
||||
"
|
||||
|
||||
- name: Compress Artifact
|
||||
run: |
|
||||
mkdir -p resolc-wasm32-unknown-emscripten
|
||||
mv ./target/wasm32-unknown-emscripten/release/resolc.js ./resolc-wasm32-unknown-emscripten/
|
||||
mv ./target/wasm32-unknown-emscripten/release/resolc.wasm ./resolc-wasm32-unknown-emscripten/
|
||||
mv ./target/wasm32-unknown-emscripten/release/resolc_web.js ./resolc-wasm32-unknown-emscripten/
|
||||
|
||||
# There is no way to upload several files as several artifacts with a single upload-artifact step
|
||||
# It's needed to have resolc_web.js separately for night builds for resolc-bin repo
|
||||
# https://github.com/actions/upload-artifact/issues/331
|
||||
- name: Upload artifact resolc.js (nightly)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: resolc.js
|
||||
path: resolc-wasm32-unknown-emscripten/resolc.js
|
||||
retention-days: 40
|
||||
|
||||
- name: Upload artifacts resolc.wasm (nightly)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: resolc.wasm
|
||||
path: resolc-wasm32-unknown-emscripten/resolc.wasm
|
||||
retention-days: 40
|
||||
|
||||
- name: Upload artifacts resolc_web.js (nightly)
|
||||
uses: actions/upload-artifact@v4
|
||||
id: artifact-upload-step
|
||||
with:
|
||||
name: resolc_web.js
|
||||
path: resolc-wasm32-unknown-emscripten/resolc_web.js
|
||||
retention-days: 40
|
||||
|
||||
- name: Set output variables
|
||||
id: set-output
|
||||
env:
|
||||
TARGET: resolc_web_js
|
||||
run: |
|
||||
echo "Artifact URL is ${{ steps.artifact-upload-step.outputs.artifact-url }}"
|
||||
echo "Artifact SHA is ${{ steps.artifact-upload-step.outputs.artifact-digest }}"
|
||||
echo "${TARGET}_url=${{ steps.artifact-upload-step.outputs.artifact-url }}"
|
||||
echo "${TARGET}_url=${{ steps.artifact-upload-step.outputs.artifact-url }}" >> "$GITHUB_OUTPUT"
|
||||
echo "${TARGET}_sha=${{ steps.artifact-upload-step.outputs.artifact-digest }}""
|
||||
echo "${TARGET}_sha=${{ steps.artifact-upload-step.outputs.artifact-digest }}"" >> "$GITHUB_OUTPUT"
|
||||
|
||||
create-macos-fat-binary:
|
||||
if: ${{ needs.check_commits.outputs.has_commits == 'true' }}
|
||||
needs: [build]
|
||||
outputs:
|
||||
resolc-universal-apple-darwin_url: ${{ steps.set-output.outputs.resolc-universal-apple-darwin_url }}
|
||||
resolc-universal-apple-darwin_sha: ${{ steps.set-output.outputs.resolc-universal-apple-darwin_sha }}
|
||||
runs-on: macos-14
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
merge-multiple: true
|
||||
|
||||
- name: Create macOS Fat Binary
|
||||
run: |
|
||||
lipo resolc-aarch64-apple-darwin resolc-x86_64-apple-darwin -create -output resolc-universal-apple-darwin
|
||||
|
||||
- name: Make Executable
|
||||
run: |
|
||||
chmod +x resolc-universal-apple-darwin
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
id: artifact-upload-step
|
||||
with:
|
||||
name: resolc-universal-apple-darwin
|
||||
path: resolc-universal-apple-darwin
|
||||
retention-days: 40
|
||||
|
||||
- name: Set output variables
|
||||
id: set-output
|
||||
env:
|
||||
TARGET: resolc-universal-apple-darwin
|
||||
run: |
|
||||
echo "Artifact URL is ${{ steps.artifact-upload-step.outputs.artifact-url }}"
|
||||
echo "Artifact SHA is ${{ steps.artifact-upload-step.outputs.artifact-digest }}"
|
||||
echo "${TARGET}_url=${{ steps.artifact-upload-step.outputs.artifact-url }}"
|
||||
echo "${TARGET}_url=${{ steps.artifact-upload-step.outputs.artifact-url }}" >> "$GITHUB_OUTPUT"
|
||||
echo "${TARGET}_sha=${{ steps.artifact-upload-step.outputs.artifact-digest }}""
|
||||
echo "${TARGET}_sha=${{ steps.artifact-upload-step.outputs.artifact-digest }}"" >> "$GITHUB_OUTPUT"
|
||||
|
||||
generate-nightly-json:
|
||||
runs-on: ubuntu-24.04
|
||||
if: ${{ needs.check_commits.outputs.has_commits == 'true' }}
|
||||
environment: tags
|
||||
needs: [build-wasm, build, create-macos-fat-binary, check_commits]
|
||||
steps:
|
||||
- name: Checkout revive
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: revive
|
||||
|
||||
- name: Checkout resolc-bin
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: paritytech/resolc-bin
|
||||
path: resolc-bin
|
||||
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
merge-multiple: true
|
||||
path: bins
|
||||
|
||||
- uses: actions/create-github-app-token@v1
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.REVIVE_JSON_APP_ID }}
|
||||
private-key: ${{ secrets.REVIVE_JSON_APP_KEY }}
|
||||
owner: paritytech
|
||||
repositories: resolc-bin
|
||||
|
||||
- name: Generate JSON
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
APP_NAME: "paritytech-revive-json"
|
||||
Green: "\e[32m"
|
||||
NC: "\e[0m"
|
||||
run: |
|
||||
echo '[' > data.json
|
||||
echo '${{ toJSON(needs.build.outputs) }}' >> data.json
|
||||
echo ',' >> data.json
|
||||
echo '${{ toJSON(needs.build-wasm.outputs) }}' >> data.json
|
||||
echo ',' >> data.json
|
||||
echo '${{ toJSON(needs.create-macos-fat-binary.outputs) }}' >> data.json
|
||||
echo ']' >> data.json
|
||||
chmod +x bins/resolc-x86_64-unknown-linux-musl
|
||||
export FIRST_SOLC_VERSION=$(./bins/resolc-x86_64-unknown-linux-musl --supported-solc-versions | cut -f 1 -d "," | tr -d ">=")
|
||||
export LAST_SOLC_VERSION=$(./bins/resolc-x86_64-unknown-linux-musl --supported-solc-versions | cut -f 2 -d "," | tr -d "<=")
|
||||
export FILEPATH=$(readlink -f data.json)
|
||||
export TAG=$(cd revive;gh release list --json name,isLatest --jq '.[] | select(.isLatest)|.name')
|
||||
cd resolc-bin
|
||||
mkdir -p nightly
|
||||
cd nightly
|
||||
python3 ../../revive/.github/scripts/json_generator_nightly.py
|
||||
cd ..
|
||||
git status
|
||||
|
||||
echo "${Green}Add new remote with gh app token${NC}"
|
||||
git remote set-url origin $(git config remote.origin.url | sed "s/github.com/${APP_NAME}:${TOKEN}@github.com/g")
|
||||
|
||||
echo "${Green}Remove http section that causes issues with gh app auth token${NC}"
|
||||
sed -i.bak '/\[http/d' ./.git/config
|
||||
sed -i.bak '/extraheader/d' ./.git/config
|
||||
|
||||
git config user.email "ci@parity.io"
|
||||
git config user.name "${APP_NAME}"
|
||||
|
||||
git add nightly/
|
||||
git commit -m "Update nightly json"
|
||||
git push origin main
|
||||
|
||||
echo "::notice::nightly info.list files were successfully published to https://github.com/paritytech/resolc-bin"
|
||||
@@ -14,6 +14,7 @@ concurrency:
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
# if changed, dont forget to update the env var in release-nightly.yml
|
||||
RUST_MUSL_CROSS_IMAGE: messense/rust-musl-cross@sha256:c0154e992adb791c3b848dd008939d19862549204f8cb26f5ca7a00f629e6067
|
||||
|
||||
jobs:
|
||||
|
||||
@@ -6,6 +6,19 @@ This is a development pre-release.
|
||||
|
||||
Supported `polkadot-sdk` rev: `2503.0.1`
|
||||
|
||||
## v0.4.0
|
||||
|
||||
This is a development pre-release.
|
||||
|
||||
Supported `polkadot-sdk` rev: `2503.0.1`
|
||||
|
||||
### Added
|
||||
- Line debug information per YUL builtin and for `if` statements.
|
||||
- Support for the YUL optimizer details in the standard json input definition.
|
||||
|
||||
### Fixed
|
||||
- The debug info source file matches the YUL path in `--debug-output-dir`, allowing tools to display the source line.
|
||||
|
||||
## v0.3.0
|
||||
|
||||
This is a development pre-release.
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
pub mod ir_type;
|
||||
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::Deserialize;
|
||||
@@ -16,6 +17,14 @@ pub struct DebugConfig {
|
||||
pub output_directory: Option<PathBuf>,
|
||||
/// Whether debug info should be emitted.
|
||||
pub emit_debug_info: bool,
|
||||
/// The YUL debug output file path.
|
||||
///
|
||||
/// Is expected to be configured when running in YUL mode.
|
||||
pub contract_path: Option<PathBuf>,
|
||||
/// The YUL input file path.
|
||||
///
|
||||
/// Is expected to be configured when not running in YUL mode.
|
||||
pub yul_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl DebugConfig {
|
||||
@@ -24,15 +33,41 @@ impl DebugConfig {
|
||||
Self {
|
||||
output_directory,
|
||||
emit_debug_info,
|
||||
contract_path: None,
|
||||
yul_path: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the current YUL path.
|
||||
pub fn set_yul_path(&mut self, yul_path: &Path) {
|
||||
self.yul_path = yul_path.to_path_buf().into();
|
||||
}
|
||||
|
||||
/// Set the current contract path.
|
||||
pub fn set_contract_path(&mut self, contract_path: &str) {
|
||||
self.contract_path = self.yul_source_path(contract_path);
|
||||
}
|
||||
|
||||
/// Returns with the following precedence:
|
||||
/// 1. The YUL source path if it was configured.
|
||||
/// 2. The source YUL path from the debug output dir if it was configured.
|
||||
/// 3. `None` if there is no debug output directory.
|
||||
pub fn yul_source_path(&self, contract_path: &str) -> Option<PathBuf> {
|
||||
if let Some(path) = self.yul_path.as_ref() {
|
||||
return Some(path.clone());
|
||||
}
|
||||
|
||||
self.output_directory.as_ref().map(|output_directory| {
|
||||
let mut file_path = output_directory.to_owned();
|
||||
let full_file_name = Self::full_file_name(contract_path, None, IRType::Yul);
|
||||
file_path.push(full_file_name);
|
||||
file_path
|
||||
})
|
||||
}
|
||||
|
||||
/// Dumps the Yul IR.
|
||||
pub fn dump_yul(&self, contract_path: &str, code: &str) -> anyhow::Result<()> {
|
||||
if let Some(output_directory) = self.output_directory.as_ref() {
|
||||
let mut file_path = output_directory.to_owned();
|
||||
let full_file_name = Self::full_file_name(contract_path, None, IRType::Yul);
|
||||
file_path.push(full_file_name);
|
||||
if let Some(file_path) = self.yul_source_path(contract_path) {
|
||||
std::fs::write(file_path, code)?;
|
||||
}
|
||||
|
||||
|
||||
@@ -51,11 +51,20 @@ pub struct DebugInfo<'ctx> {
|
||||
|
||||
impl<'ctx> DebugInfo<'ctx> {
|
||||
/// A shortcut constructor.
|
||||
pub fn new(module: &inkwell::module::Module<'ctx>) -> Self {
|
||||
pub fn new(
|
||||
module: &inkwell::module::Module<'ctx>,
|
||||
debug_config: &crate::debug_config::DebugConfig,
|
||||
) -> Self {
|
||||
let module_name = module.get_name().to_string_lossy();
|
||||
let yul_name = debug_config
|
||||
.contract_path
|
||||
.as_ref()
|
||||
.map(|path| path.display().to_string());
|
||||
|
||||
let (builder, compile_unit) = module.create_debug_info_builder(
|
||||
true,
|
||||
inkwell::debug_info::DWARFSourceLanguage::C,
|
||||
module.get_name().to_string_lossy().as_ref(),
|
||||
yul_name.as_deref().unwrap_or_else(|| module_name.as_ref()),
|
||||
"",
|
||||
"",
|
||||
false,
|
||||
|
||||
@@ -247,7 +247,7 @@ where
|
||||
let intrinsics = Intrinsics::new(llvm, &module);
|
||||
let llvm_runtime = LLVMRuntime::new(llvm, &module, &optimizer);
|
||||
let debug_info = debug_config.emit_debug_info.then(|| {
|
||||
let debug_info = DebugInfo::new(&module);
|
||||
let debug_info = DebugInfo::new(&module, &debug_config);
|
||||
debug_info.initialize_module(llvm, &module);
|
||||
debug_info
|
||||
});
|
||||
|
||||
@@ -54,7 +54,7 @@ pub fn yul<T: Compiler>(
|
||||
solc: &mut T,
|
||||
optimizer_settings: revive_llvm_context::OptimizerSettings,
|
||||
include_metadata_hash: bool,
|
||||
debug_config: revive_llvm_context::DebugConfig,
|
||||
mut debug_config: revive_llvm_context::DebugConfig,
|
||||
llvm_arguments: &[String],
|
||||
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
|
||||
) -> anyhow::Result<Build> {
|
||||
@@ -77,6 +77,7 @@ pub fn yul<T: Compiler>(
|
||||
let solc_validator = Some(&*solc);
|
||||
let project = Project::try_from_yul_path(path, solc_validator)?;
|
||||
|
||||
debug_config.set_yul_path(path);
|
||||
let build = project.compile(
|
||||
optimizer_settings,
|
||||
include_metadata_hash,
|
||||
|
||||
@@ -77,7 +77,7 @@ impl Contract {
|
||||
project: Project,
|
||||
optimizer_settings: revive_llvm_context::OptimizerSettings,
|
||||
include_metadata_hash: bool,
|
||||
debug_config: revive_llvm_context::DebugConfig,
|
||||
mut debug_config: revive_llvm_context::DebugConfig,
|
||||
llvm_arguments: &[String],
|
||||
memory_config: SolcStandardJsonInputSettingsPolkaVMMemory,
|
||||
) -> anyhow::Result<ContractBuild> {
|
||||
@@ -117,6 +117,7 @@ impl Contract {
|
||||
_ => llvm.create_module(self.path.as_str()),
|
||||
};
|
||||
|
||||
debug_config.set_contract_path(&self.path);
|
||||
let mut context = revive_llvm_context::PolkaVMContext::new(
|
||||
&llvm,
|
||||
module,
|
||||
|
||||
@@ -45,8 +45,6 @@ impl Compiler for SolcCompiler {
|
||||
include_paths: Vec<String>,
|
||||
allow_paths: Option<String>,
|
||||
) -> anyhow::Result<SolcStandardJsonOutput> {
|
||||
let version = self.version()?.validate(&include_paths)?.default;
|
||||
|
||||
let mut command = std::process::Command::new(self.executable.as_str());
|
||||
command.stdin(std::process::Stdio::piped());
|
||||
command.stdout(std::process::Stdio::piped());
|
||||
@@ -65,7 +63,7 @@ impl Compiler for SolcCompiler {
|
||||
command.arg(allow_paths);
|
||||
}
|
||||
|
||||
input.normalize(&version);
|
||||
input.normalize();
|
||||
|
||||
let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default();
|
||||
|
||||
|
||||
@@ -40,8 +40,7 @@ impl Compiler for SoljsonCompiler {
|
||||
anyhow::bail!("configuring allow paths is not supported with solJson")
|
||||
}
|
||||
|
||||
let version = self.version()?.validate(&include_paths)?.default;
|
||||
input.normalize(&version);
|
||||
input.normalize();
|
||||
|
||||
let suppressed_warnings = input.suppressed_warnings.take().unwrap_or_default();
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ pub use self::combined_json::contract::Contract as CombinedJsonContract;
|
||||
pub use self::standard_json::input::language::Language as SolcStandardJsonInputLanguage;
|
||||
pub use self::standard_json::input::settings::metadata::Metadata as SolcStandardJsonInputSettingsMetadata;
|
||||
pub use self::standard_json::input::settings::metadata_hash::MetadataHash as SolcStandardJsonInputSettingsMetadataHash;
|
||||
pub use self::standard_json::input::settings::optimizer::yul_details::YulDetails as SolcStandardJsonInputSettingsYulOptimizerDetails;
|
||||
pub use self::standard_json::input::settings::optimizer::Optimizer as SolcStandardJsonInputSettingsOptimizer;
|
||||
pub use self::standard_json::input::settings::polkavm::memory::MemoryConfig as SolcStandardJsonInputSettingsPolkaVMMemory;
|
||||
pub use self::standard_json::input::settings::polkavm::memory::DEFAULT_HEAP_SIZE as PolkaVMDefaultHeapMemorySize;
|
||||
|
||||
@@ -140,7 +140,7 @@ impl Input {
|
||||
}
|
||||
|
||||
/// Sets the necessary defaults.
|
||||
pub fn normalize(&mut self, version: &semver::Version) {
|
||||
self.settings.normalize(version);
|
||||
pub fn normalize(&mut self) {
|
||||
self.settings.normalize();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,9 +74,9 @@ impl Settings {
|
||||
}
|
||||
|
||||
/// Sets the necessary defaults.
|
||||
pub fn normalize(&mut self, version: &semver::Version) {
|
||||
pub fn normalize(&mut self) {
|
||||
self.polkavm = None;
|
||||
self.optimizer.normalize(version);
|
||||
self.optimizer.normalize();
|
||||
}
|
||||
|
||||
/// Parses the library list and returns their double hashmap with path and name as keys.
|
||||
|
||||
@@ -3,37 +3,54 @@
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::standard_json::input::settings::optimizer::yul_details::YulDetails;
|
||||
|
||||
/// The `solc --standard-json` input settings optimizer details.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Details {
|
||||
/// Whether the pass is enabled.
|
||||
pub peephole: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub peephole: Option<bool>,
|
||||
/// Whether the pass is enabled.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub inliner: Option<bool>,
|
||||
/// Whether the pass is enabled.
|
||||
pub jumpdest_remover: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub jumpdest_remover: Option<bool>,
|
||||
/// Whether the pass is enabled.
|
||||
pub order_literals: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub order_literals: Option<bool>,
|
||||
/// Whether the pass is enabled.
|
||||
pub deduplicate: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub deduplicate: Option<bool>,
|
||||
/// Whether the pass is enabled.
|
||||
pub cse: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cse: Option<bool>,
|
||||
/// Whether the pass is enabled.
|
||||
pub constant_optimizer: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub constant_optimizer: Option<bool>,
|
||||
/// Whether the YUL optimizer is enabled.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub yul: Option<bool>,
|
||||
/// The YUL optimizer configuration.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub yul_details: Option<YulDetails>,
|
||||
}
|
||||
|
||||
impl Details {
|
||||
/// A shortcut constructor.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
peephole: bool,
|
||||
peephole: Option<bool>,
|
||||
inliner: Option<bool>,
|
||||
jumpdest_remover: bool,
|
||||
order_literals: bool,
|
||||
deduplicate: bool,
|
||||
cse: bool,
|
||||
constant_optimizer: bool,
|
||||
jumpdest_remover: Option<bool>,
|
||||
order_literals: Option<bool>,
|
||||
deduplicate: Option<bool>,
|
||||
cse: Option<bool>,
|
||||
constant_optimizer: Option<bool>,
|
||||
yul: Option<bool>,
|
||||
yul_details: Option<YulDetails>,
|
||||
) -> Self {
|
||||
Self {
|
||||
peephole,
|
||||
@@ -43,10 +60,11 @@ impl Details {
|
||||
deduplicate,
|
||||
cse,
|
||||
constant_optimizer,
|
||||
yul,
|
||||
yul_details,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a set of disabled optimizations.
|
||||
pub fn disabled(version: &semver::Version) -> Self {
|
||||
let inliner = if version >= &semver::Version::new(0, 8, 5) {
|
||||
Some(false)
|
||||
@@ -54,6 +72,16 @@ impl Details {
|
||||
None
|
||||
};
|
||||
|
||||
Self::new(false, inliner, false, false, false, false, false)
|
||||
Self::new(
|
||||
Some(false),
|
||||
inliner,
|
||||
Some(false),
|
||||
Some(false),
|
||||
Some(false),
|
||||
Some(false),
|
||||
Some(false),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
//! The `solc --standard-json` input settings optimizer.
|
||||
|
||||
pub mod details;
|
||||
pub mod yul_details;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
@@ -41,13 +42,8 @@ impl Optimizer {
|
||||
}
|
||||
|
||||
/// Sets the necessary defaults.
|
||||
pub fn normalize(&mut self, version: &semver::Version) {
|
||||
pub fn normalize(&mut self) {
|
||||
self.mode = None;
|
||||
self.fallback_to_optimizing_for_size = None;
|
||||
self.details = if version >= &semver::Version::new(0, 5, 5) {
|
||||
Some(Details::disabled(version))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
//! The `solc --standard-json` input settings YUL optimizer details.
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
|
||||
/// The `solc --standard-json` input settings optimizer YUL details.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct YulDetails {
|
||||
/// Whether the stack allocation pass is enabled.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stack_allocation: Option<bool>,
|
||||
/// The optimization step sequence string.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub optimizer_steps: Option<String>,
|
||||
}
|
||||
|
||||
impl YulDetails {
|
||||
/// A shortcut constructor.
|
||||
pub fn new(stack_allocation: Option<bool>, optimizer_steps: Option<String>) -> Self {
|
||||
Self {
|
||||
stack_allocation,
|
||||
optimizer_steps,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -124,6 +124,7 @@ impl FunctionCall {
|
||||
D: revive_llvm_context::PolkaVMDependency + Clone,
|
||||
{
|
||||
let location = self.location;
|
||||
context.set_debug_location(location.line, 0, None)?;
|
||||
|
||||
match self.name {
|
||||
Name::UserDefined(name) => {
|
||||
|
||||
@@ -54,6 +54,7 @@ where
|
||||
{
|
||||
fn into_llvm(self, context: &mut revive_llvm_context::PolkaVMContext<D>) -> anyhow::Result<()> {
|
||||
let binding_pointer = context.build_alloca(context.word_type(), "if_condition");
|
||||
context.set_debug_location(self.location.line, 0, None)?;
|
||||
let condition = self
|
||||
.condition
|
||||
.into_llvm(&[("todo".to_string(), binding_pointer)], context)?
|
||||
|
||||
Reference in New Issue
Block a user