feat: Add rebrand CI/CD workflows to main branch

- Add 72 rebrand workflow files (polkadot→pezkuwi, substrate→bizinikiwi, cumulus→pezcumulus)
- Add GitHub actions, issue templates, and configs
- Removed unnecessary workflows (fork-sync, gitspiegel, upstream-tracker, sync-templates, backport)
- Renamed zombienet test files to match new naming convention
This commit is contained in:
2025-12-19 22:51:57 +03:00
parent 0ec342b620
commit ee389beb8c
131 changed files with 16523 additions and 0 deletions
+172
View File
@@ -0,0 +1,172 @@
# Default state for all rules
default: true
# Path to configuration file to extend
extends: null
# MD001/heading-increment/header-increment - Heading levels should only increment by one level at a time
MD001: true
# MD002/first-heading-h1/first-header-h1 - First heading should be a top-level heading
MD002:
# Heading level
level: 1
# MD003/heading-style/header-style - Heading style
MD003:
# Heading style
style: "consistent"
# MD004/ul-style - Unordered list style (disabled - too many legacy files)
MD004: false
# MD005/list-indent - Inconsistent indentation for list items at the same level
MD005: false
# MD006/ul-start-left - Consider starting bulleted lists at the beginning of the line
MD006: false
# MD007/ul-indent - Unordered list indentation
MD007: false
# MD009/no-trailing-spaces - Trailing spaces (disabled - too many legacy files)
MD009: false
# MD010/no-hard-tabs - Hard tabs
MD010: false
# MD011/no-reversed-links - Reversed link syntax
MD011: true
# MD012/no-multiple-blanks - Multiple consecutive blank lines
MD012:
# Consecutive blank lines
maximum: 2
# MD013/line-length - Line length (disabled - URLs make lines long)
MD013: false
# MD014/commands-show-output - Dollar signs used before commands without showing output
MD014: true
# MD018/no-missing-space-atx - No space after hash on atx style heading
MD018: true
# MD019/no-multiple-space-atx - Multiple spaces after hash on atx style heading
MD019: true
# MD020/no-missing-space-closed-atx - No space inside hashes on closed atx style heading
MD020: true
# MD021/no-multiple-space-closed-atx - Multiple spaces inside hashes on closed atx style heading
MD021: true
# MD022/blanks-around-headings/blanks-around-headers - Headings should be surrounded by blank lines
MD022: false
# MD023/heading-start-left/header-start-left - Headings must start at the beginning of the line
MD023: true
# MD024/no-duplicate-heading/no-duplicate-header - Multiple headings with the same content
MD024: false
# MD025/single-title/single-h1 - Multiple top-level headings in the same document
MD025: false
# MD026/no-trailing-punctuation - Trailing punctuation in heading (disabled - too many legacy files)
MD026: false
# MD027/no-multiple-space-blockquote - Multiple spaces after blockquote symbol
MD027: true
# MD028/no-blanks-blockquote - Blank line inside blockquote
MD028: true
# MD029/ol-prefix - Ordered list item prefix
MD029:
# List style
style: "one_or_ordered"
# MD030/list-marker-space - Spaces after list markers (disabled - too many legacy files)
MD030: false
# MD031/blanks-around-fences - Fenced code blocks should be surrounded by blank lines
MD031: false
# MD032/blanks-around-lists - Lists should be surrounded by blank lines
MD032: false
# MD033/no-inline-html - Inline HTML
MD033: false
# MD034/no-bare-urls - Bare URL used
MD034: false
# MD035/hr-style - Horizontal rule style
MD035:
# Horizontal rule style
style: "consistent"
# MD036/no-emphasis-as-heading/no-emphasis-as-header - Emphasis used instead of a heading
MD036: false
# MD037/no-space-in-emphasis - Spaces inside emphasis markers
MD037: true
# MD038/no-space-in-code - Spaces inside code span elements
MD038: true
# MD039/no-space-in-links - Spaces inside link text
MD039: true
# MD040/fenced-code-language - Fenced code blocks should have a language specified
MD040: false
# MD041/first-line-heading/first-line-h1 - First line in a file should be a top-level heading
MD041: false
# MD042/no-empty-links - No empty links
MD042: true
# MD043/required-headings/required-headers - Required heading structure
MD043: false
# MD044/proper-names - Proper names should have the correct capitalization
# Disabled - Pezkuwi SDK has its own terminology (not Pezkuwi)
MD044: false
# MD045/no-alt-text - Images should have alternate text (alt text)
MD045: false
# MD046/code-block-style - Code block style
MD046:
# Block style
style: "consistent"
# MD047/single-trailing-newline - Files should end with a single newline character
MD047: true
# MD048/code-fence-style - Code fence style
MD048:
# Code fence style
style: "consistent"
# MD049/emphasis-style - Emphasis style should be consistent
MD049: false
# MD050/strong-style - Strong style should be consistent
MD050:
# Strong style
style: "consistent"
# MD051/link-fragments - Link fragments should be valid
MD051: false
# MD052/reference-links-images - Reference links and images should use a label that is defined
MD052: false
# MD053/link-image-reference-definitions - Link and image reference definitions should be needed
MD053: false
# MD058/blanks-around-tables - Tables should be surrounded by blank lines (disabled - too many legacy files)
MD058: false
+73
View File
@@ -0,0 +1,73 @@
# PezkuwiChain Code Owners
#
# A codeowner oversees part of the codebase. If an owned file is changed,
# the corresponding codeowner receives a review request.
#
# For details about syntax, see:
# https://help.github.com/en/articles/about-code-owners
# Global - Core Team
* @pezkuwichain/core
# Custom Pallets - Pallet Team
/pezkuwi/pallets/ @pezkuwichain/pallets
# Presale Pallet - Critical financial code
/pezkuwi/pallets/presale/ @pezkuwichain/core @pezkuwichain/pallets
# Treasury & Rewards - Financial critical
/pezkuwi/pallets/pez-treasury/ @pezkuwichain/core
/pezkuwi/pallets/pez-rewards/ @pezkuwichain/core
# Identity & KYC - Privacy sensitive
/pezkuwi/pallets/identity-kyc/ @pezkuwichain/core
/pezkuwi/pallets/tiki/ @pezkuwichain/core
# Governance - Critical
/pezkuwi/pallets/welati/ @pezkuwichain/core
# Runtime configurations
/pezkuwi/runtime/ @pezkuwichain/core
# TeyrChain Parachain Runtime
/pezkuwi/runtime/parachain/ @pezkuwichain/core
# PezkuwiChain Relay Runtime
/pezkuwi/runtime/pezkuwichain/ @pezkuwichain/core
# XCM Configuration - Cross-chain critical
/pezkuwi/runtime/parachain/src/configs/xcm_config.rs @pezkuwichain/core
# Chain specifications
/chain-specs/ @pezkuwichain/core
# CI/CD
/.github/ @pezkuwichain/devops
/.gitlab-ci.yml @pezkuwichain/devops
/.gitlab/ @pezkuwichain/devops
# Documentation
/docs/ @pezkuwichain/docs
README.md @pezkuwichain/core
# Scripts
/scripts/ @pezkuwichain/devops
# Substrate Core (inherited from upstream)
/substrate/frame/ @pezkuwichain/core
/substrate/client/ @pezkuwichain/core
/substrate/primitives/ @pezkuwichain/core
# Cumulus (parachain framework)
/cumulus/ @pezkuwichain/core
# Polkadot (relay chain framework)
/polkadot/ @pezkuwichain/core
# XCM
/polkadot/xcm/ @pezkuwichain/core
# Security-sensitive files
Cargo.toml @pezkuwichain/core
Cargo.lock @pezkuwichain/core
rust-toolchain.toml @pezkuwichain/core
+4
View File
@@ -0,0 +1,4 @@
---
name: New blank issue
about: New blank issue
---
+35
View File
@@ -0,0 +1,35 @@
name: Bug Report
description: Let us know about an issue you experienced with this software
labels: [ I2-bug, I10-unconfirmed ]
body:
- type: checkboxes
attributes:
label: Is there an existing issue?
description: Please search to see if an issue already exists and leave a comment that you also experienced this issue or add your specifics that are related to an existing issue.
options:
- label: I have searched the existing issues
required: true
- type: checkboxes
attributes:
label: Experiencing problems? Have you tried our Stack Exchange first?
description: Please search <https://exchange.pezkuwichain.app> to see if an post already exists, and ask if not. Please do not file support issues here.
options:
- label: This is not a support question.
required: true
- type: textarea
id: bug
attributes:
label: Description of bug
description: What seems to be the problem?
# placeholder: Describe the problem.
validations:
required: true
- type: textarea
id: steps
attributes:
label: Steps to reproduce
description: Provide the steps that led to the discovery of the issue.
# placeholder: Describe what you were doing so we can reproduce the problem.
validations:
required: false
+7
View File
@@ -0,0 +1,7 @@
blank_issues_enabled: true
contact_links:
- name: Support & Troubleshooting with the Bizinikiwi Stack Exchange Community
url: https://exchange.pezkuwichain.app
about: |
For general problems with Bizinikiwi or related technologies, please search here first
for solutions, by keyword and tags. If you discover no solution, please then ask and questions in our community! We highly encourage everyone also share their understanding by answering questions for others.
+55
View File
@@ -0,0 +1,55 @@
name: Feature Request
description: Submit your requests and suggestions to improve!
labels: [ I5-enhancement ]
body:
- type: checkboxes
id: existing
attributes:
label: Is there an existing issue?
description: Please search to see if an issue already exists and leave a comment that you also experienced this issue or add your specifics that are related to an existing issue.
options:
- label: I have searched the existing issues
required: true
- type: checkboxes
id: stackexchange
attributes:
label: Experiencing problems? Have you tried our Stack Exchange first?
description: Please search <https://exchange.pezkuwichain.app> to see if an post already exists, and ask if not. Please do not file support issues here.
options:
- label: This is not a support question.
required: true
- type: textarea
id: motivation
attributes:
label: Motivation
description: Please give precedence as to what lead you to file this issue.
# placeholder: Describe ...
validations:
required: false
- type: textarea
id: request
attributes:
label: Request
description: Please describe what is needed.
# placeholder: Describe what you would like to see added or changed.
validations:
required: true
- type: textarea
id: solution
attributes:
label: Solution
description: If possible, please describe what a solution could be.
# placeholder: Describe what you would like to see added or changed.
validations:
required: false
- type: dropdown
id: help
attributes:
label: Are you willing to help with this request?
multiple: true
options:
- Yes!
- No.
- Maybe (please elaborate above)
validations:
required: true
+83
View File
@@ -0,0 +1,83 @@
# Zombienet Flaky Tests
This document explains how to manage flaky or temporarily disabled zombienet tests in the Pezkuwi SDK repository.
## Overview
The `.github/zombienet-flaky-tests` file contains a list of zombienet tests that are currently disabled due to flakiness or known issues. These tests are automatically skipped during CI runs but are tracked for future re-enabling.
## File Format
Each line in the `zombienet-flaky-tests` file follows this format:
```
<test-job-name>:<issue-number>
```
**Example:**
```
zombienet-pezkuwi-functional-0014-chunk-fetching-network-compatibility:9980
zombienet-pezcumulus-0009-elastic_scaling_pov_recovery:8986
```
- **test-job-name**: The exact job name as defined in the zombienet test YAML files
- **issue-number**: GitHub issue number tracking the flaky test (for reference and follow-up)
## How It Works
1. **Test Discovery**: The zombienet workflows read the test definitions from:
- `.github/zombienet-tests/zombienet_pezkuwi_tests.yml`
- `.github/zombienet-tests/zombienet_cumulus_tests.yml`
- `.github/zombienet-tests/zombienet_substrate_tests.yml`
- `.github/zombienet-tests/zombienet_teyrchain-template_tests.yml`
2. **Filtering**: During the preflight job, tests listed in `zombienet-flaky-tests` are filtered out from the test matrix.
3. **Execution**: Only non-flaky tests are executed in the CI pipeline.
## Adding a Flaky Test
If you encounter a flaky test that needs to be temporarily disabled:
1. **Create or find a GitHub issue** tracking the flaky behavior
2. **Add an entry** to `.github/zombienet-flaky-tests`:
```
zombienet-<suite>-<test-name>:<issue-number>
```
3. **Commit and push** the change
4. The CI will automatically validate that:
- The entry follows the correct format
- The referenced GitHub issue exists
- (Warning if the issue is closed)
5. The test will be automatically skipped in subsequent CI runs
## Re-enabling a Test
Once a flaky test has been fixed:
1. **Verify the fix** by running the test locally or in a test branch
2. **Remove the entry** from `.github/zombienet-flaky-tests`
3. **Close the associated GitHub issue** (or update it with the fix)
4. **Commit and push** the change
5. The test will be automatically included in subsequent CI runs
## Validation
The `.github/zombienet-flaky-tests` file is automatically validated in CI whenever it's modified. The validation checks:
- **Format**: Each entry must follow the `<test-name>:<issue-number>` format
- **Issue vs PR**: The referenced number must be a GitHub Issue, not a Pull Request
- **Issue existence**: The referenced GitHub issue must exist in the repository
- **Issue state**: A warning is shown if the referenced issue is closed (suggesting the entry might be outdated)
The validation workflow runs on pull requests that modify:
- `.github/zombienet-flaky-tests`
- `.github/scripts/check-zombienet-flaky-tests.sh`
- `.github/workflows/check-zombienet-flaky-tests.yml`
## Monitoring
- The number of currently disabled tests is displayed in the CI logs during zombienet test runs
- You can view the current list at: [`.github/zombienet-flaky-tests`](./zombienet-flaky-tests)
- Each disabled test should have an associated GitHub issue for tracking
- The validation script can be run locally: `.github/scripts/check-zombienet-flaky-tests.sh .github/zombienet-flaky-tests`
@@ -0,0 +1,74 @@
name: "build and push image"
inputs:
dockerfile:
description: "dockerfile to build"
required: true
image-name:
description: "image name (without registry)"
required: true
username:
required: false
default: ""
password:
required: false
default: ""
outputs:
branch:
description: "Branch name for the PR"
value: ${{ steps.branch.outputs.branch }}
runs:
using: "composite"
steps:
# gcloud
# https://github.com/pezkuwichain/ci_cd/wiki/GitHub:-Push-Docker-image-to-GCP-Registry
- name: "Set up Cloud SDK"
uses: "google-github-actions/setup-gcloud@e427ad8a34f8676edf47cf7d7925499adf3eb74f" # v2.2.1
- name: "gcloud info"
shell: bash
run: "gcloud info"
- name: "Auth in gcloud registry"
shell: bash
run: "gcloud auth configure-docker europe-docker.pkg.dev --quiet"
- name: build
shell: bash
env:
ZOMBIENET_IMAGE: "docker.io/pezkuwichain/zombienet:v1.3.105"
IMAGE_TAG: europe-docker.pkg.dev/parity-ci-2024/temp-images/${{ inputs.image-name }}
run: |
export DOCKER_IMAGES_VERSION=${{ github.event.pull_request.head.sha || 'master' }}
if [[ ${{ github.event_name }} == "merge_group" ]]; then export DOCKER_IMAGES_VERSION="${GITHUB_SHA::8}"; fi
docker build \
--build-arg VCS_REF="${GITHUB_SHA}" \
--build-arg BUILD_DATE="$(date -u '+%Y-%m-%dT%H:%M:%SZ')" \
--build-arg IMAGE_NAME="${{ inputs.image-name }}" \
--build-arg ZOMBIENET_IMAGE="${ZOMBIENET_IMAGE}" \
-t "${{ env.IMAGE_TAG }}:$DOCKER_IMAGES_VERSION" \
-f ${{ inputs.dockerfile }} \
.
docker push "${{ env.IMAGE_TAG }}:$DOCKER_IMAGES_VERSION"
- name: login to dockerhub
id: login
# fork check
if: ${{ inputs.username != '' && inputs.password != '' && github.event_name != 'merge_group' }}
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
with:
username: ${{ inputs.username }}
password: ${{ inputs.password }}
- name: push to dockerhub
shell: bash
if: ${{ inputs.username != '' && inputs.password != '' && github.event_name != 'merge_group' }}
env:
GITHUB_PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
run: |
export DOCKERHUB_TAG=docker.io/paritypr/${{ inputs.image-name }}:${{ github.event.pull_request.number || 'master' }}
if [[ ${{ github.event_name }} == "pull_request" ]]; then export DOCKERHUB_TAG=$DOCKERHUB_TAG-${GITHUB_PR_HEAD_SHA::8}; fi
if [[ ${{ github.event_name }} == "push" ]]; then export DOCKERHUB_TAG=$DOCKERHUB_TAG-${GITHUB_SHA::8}; fi
#
docker tag "europe-docker.pkg.dev/parity-ci-2024/temp-images/${{ inputs.image-name }}:${{ github.event.pull_request.head.sha || 'master' }}" $DOCKERHUB_TAG
docker push $DOCKERHUB_TAG
@@ -0,0 +1,22 @@
name: 'cargo check runtimes'
description: 'Runs `cargo check` for every directory in provided root.'
inputs:
root:
description: "Root directory. Expected to contain several cargo packages inside."
required: true
runs:
using: "composite"
steps:
- name: Check
shell: bash
run: |
mkdir -p ~/.forklift
cp .forklift/config.toml ~/.forklift/config.toml
cd ${{ inputs.root }}
for directory in $(echo */); do
echo "_____Running cargo check for ${directory} ______";
cd ${directory};
pwd;
SKIP_WASM_BUILD=1 forklift cargo check --locked;
cd ..;
done
@@ -0,0 +1,98 @@
name: "Download and extract artifact"
description: "Downloads an artifact, extracts it, and optionally copies files to a destination"
inputs:
artifact-name:
description: "Name of the artifact to download"
required: true
gh-token:
description: "GITHUB_TOKEN to use for downloading artifacts"
required: true
run-id:
description: "Run ID from which to download the artifact"
required: true
extract-path:
description: "Path where to extract the artifact"
default: "."
required: false
files-to-copy:
description: "Comma-separated (or newline-separated, remember about |) list of files to copy from the extracted artifact"
required: false
destination-path:
description: "Destination path for copied files"
required: false
cleanup:
description: "Whether to remove downloaded artifacts after copying (true/false)"
required: false
default: "false"
runs:
using: "composite"
steps:
- name: Download artifact
shell: bash
run: |
echo "::group::📦 Downloading ${{ inputs.artifact-name }}"
echo "Artifact: ${{ inputs.artifact-name }}"
echo "Run ID: ${{ inputs.run-id }}"
echo "::endgroup::"
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: ${{ inputs.artifact-name }}
github-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.run-id }}
path: ${{ inputs.extract-path }}
- name: Extract artifact
shell: bash
working-directory: ${{ inputs.extract-path }}
run: |
echo "::group::📂 Extracting ${{ inputs.artifact-name }}"
if [[ -f artifacts.tar ]]; then
tar -xvf artifacts.tar
elif [[ -f *.tar ]]; then
tar -xvf *.tar
elif [[ -f *.tar.gz ]]; then
tar -xzvf *.tar.gz
elif [[ -f *.tgz ]]; then
tar -xzvf *.tgz
elif [[ -f *.zip ]]; then
unzip *.zip
else
echo "⚠️ No archive file found to extract"
ls -la
fi
echo "::endgroup::"
- name: Copy files if specified
if: inputs.files-to-copy != ''
env:
FILES_TO_COPY: ${{ inputs.files-to-copy }}
DESTINATION_PATH: ${{ inputs.destination-path }}
EXTRACT_PATH: ${{ inputs.extract-path }}
CLEANUP: ${{ inputs.cleanup }}
shell: bash
run: |
echo "::group::📋 Copying files from ${{ inputs.artifact-name }}"
# Create destination directory
mkdir -p "$DESTINATION_PATH"
FILE_COUNT=0
echo "$FILES_TO_COPY" | tr ',' '\n' | while read -r file; do
# trim leading and trailing whitespaces
file="$(echo "$file" | xargs)"
if [[ -n "$file" ]]; then
echo "✓ Copying $(basename "$file") to $DESTINATION_PATH"
cp -r "$EXTRACT_PATH/$file" "$DESTINATION_PATH/"
FILE_COUNT=$((FILE_COUNT + 1))
fi
done
# Cleanup if requested
if [[ "$CLEANUP" == "true" ]]; then
echo "🧹 Cleaning up temporary files in $EXTRACT_PATH"
rm -rf "$EXTRACT_PATH"
fi
echo "::endgroup::"
@@ -0,0 +1,104 @@
name: "Download binaries for zombienet tests"
description: "Zombienet native tests expects some set of binaries to be available in the filesystem"
inputs:
build-id:
description: ""
required: true
ref-slug:
description: "Ref slug (e.g branch-name-short)"
required: true
gh-token:
description: "GITHUB_TOKEN to use for downloading artifacts"
required: true
destination-path:
description: "Destination path for copied files"
required: false
runs:
using: "composite"
steps:
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-linux-bizinikiwi-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/bizinikiwi/bizinikiwi
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-linux-stable-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/pezkuwi
artifacts/pezkuwi-execute-worker
artifacts/pezkuwi-prepare-worker
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-linux-stable-pezcumulus-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/pezkuwi-teyrchain
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-test-teyrchain-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/test-teyrchain
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-test-collators-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/adder-collator
artifacts/undying-collator
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-malus-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
# TODO: should copy pezkuwi-execute-worker and pezkuwi-prepare-worker?
# if yes then it overlaps with build-linux-stable - address this
files-to-copy: |
artifacts/malus
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-templates-node-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/minimal-template-node
artifacts/teyrchain-template-node
artifacts/solochain-template-node
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
@@ -0,0 +1,49 @@
name: 'Free Disk Space'
description: 'Frees up disk space on GitHub Actions runners by removing unnecessary software'
runs:
using: 'composite'
steps:
- name: Free Disk Space (Ubuntu)
shell: bash
run: |
echo "=== Disk space before cleanup ==="
df -h /
echo "=== Removing unnecessary packages ==="
# Remove Android SDK (12GB+)
sudo rm -rf /usr/local/lib/android || true
# Remove .NET SDK (2GB+)
sudo rm -rf /usr/share/dotnet || true
# Remove Haskell/GHC (5GB+)
sudo rm -rf /opt/ghc || true
sudo rm -rf /usr/local/.ghcup || true
# Remove Swift (1.5GB+)
sudo rm -rf /usr/share/swift || true
# Remove CodeQL (1GB+)
sudo rm -rf /opt/hostedtoolcache/CodeQL || true
# Remove unused tool caches
sudo rm -rf /opt/hostedtoolcache/Python || true
sudo rm -rf /opt/hostedtoolcache/Ruby || true
sudo rm -rf /opt/hostedtoolcache/go || true
sudo rm -rf /opt/hostedtoolcache/node || true
# Remove large packages
sudo apt-get remove -y '^dotnet-.*' '^llvm-.*' 'php.*' '^mongodb-.*' '^mysql-.*' azure-cli google-cloud-cli google-chrome-stable firefox powershell mono-devel libgl1-mesa-dri --fix-missing 2>/dev/null || true
sudo apt-get autoremove -y 2>/dev/null || true
sudo apt-get clean 2>/dev/null || true
# Remove Docker images
docker system prune -af 2>/dev/null || true
# Remove swap (1GB+)
sudo swapoff -a || true
sudo rm -f /swapfile || true
echo "=== Disk space after cleanup ==="
df -h /
+28
View File
@@ -0,0 +1,28 @@
name: "install gh"
description: "Install the gh cli in a debian based distro and switches to the PR branch."
inputs:
pr-number:
description: "Number of the PR"
required: true
GH_TOKEN:
description: "GitHub token"
required: true
outputs:
branch:
description: "Branch name for the PR"
value: ${{ steps.branch.outputs.branch }}
runs:
using: "composite"
steps:
- name: Set up git
shell: bash
# Here it would get the script from previous step
run: git config --global --add safe.directory '*'
- run: gh pr checkout ${{ inputs.pr-number }}
shell: bash
env:
GITHUB_TOKEN: ${{ inputs.GH_TOKEN }}
- name: Export branch name
shell: bash
run: echo "branch=$(git rev-parse --abbrev-ref HEAD)" >> "$GITHUB_OUTPUT"
id: branch
@@ -0,0 +1,28 @@
name: "stop all workflows"
description: "Action stops all workflows in a PR to save compute resources."
inputs:
app-id:
description: "App id"
required: true
app-key:
description: "App token"
required: true
runs:
using: "composite"
steps:
- name: Worfklow stopper - Generate token
uses: actions/create-github-app-token@d72941d797fd3113feb6b93fd0dec494b13a2547 # v1.12.0
id: app-token
with:
app-id: ${{ inputs.app-id }}
private-key: ${{ inputs.app-key }}
owner: "pezkuwichain"
repositories: "workflow-stopper"
- name: Workflow stopper - Stop all workflows
uses: octokit/request-action@v2.x
with:
route: POST /repos/pezkuwichain/workflow-stopper/actions/workflows/stopper.yml/dispatches
ref: main
inputs: '${{ format(''{{ "github_sha": "{0}", "github_repository": "{1}", "github_ref_name": "{2}", "github_workflow_id": "{3}", "github_job_name": "{4}" }}'', github.event.pull_request.head.sha, github.repository, github.ref_name, github.run_id, github.job) }}'
env:
GITHUB_TOKEN: ${{ steps.app-token.outputs.token }}
+107
View File
@@ -0,0 +1,107 @@
name: "Zombienet-sdk test"
description: "Runs zombienet-sdk tests with archived artifacts"
inputs:
build-id:
description: ""
required: true
ref-slug:
description: "Ref slug (e.g branch-name-short)"
required: true
test-filter:
description: "test filter to pass to nextest (e.g: functional::spam_statement_distribution_requests::spam_statement_distribution_requests_test)"
required: true
job-name:
description: "Job name to use for artifact uploading"
required: true
prefix:
description: "Archive prefix for tests files (e.g pezkuwi, pezcumulus or bizinikiwi)"
required: true
gh-token:
description: "GITHUB_TOKEN to use for downloading artifacts"
required: true
runs:
using: "composite"
steps:
- name: common_vars
shell: bash
env:
TEST_FILTER: ${{ inputs.test-filter }}
PREFIX: ${{ inputs.prefix }}
run: |
echo "::group::Test Configuration (SDK)"
echo "Environment Variables:"
echo " ZOMBIENET_INTEGRATION_TEST_IMAGE: $ZOMBIENET_INTEGRATION_TEST_IMAGE"
echo " ZOMBIE_PROVIDER: $ZOMBIE_PROVIDER"
echo " POLKADOT_IMAGE: $POLKADOT_IMAGE"
echo " CUMULUS_IMAGE: $CUMULUS_IMAGE"
echo " COL_IMAGE: $COL_IMAGE"
echo " MALUS_IMAGE: $MALUS_IMAGE"
echo ""
echo "Test Parameters:"
echo " Test Filter: $TEST_FILTER"
echo " Prefix: $PREFIX"
echo " Job Name: ${{ inputs.job-name }}"
echo ""
# Show flaky tests information if any are disabled
if [[ -f .github/zombienet-flaky-tests ]]; then
FLAKY_COUNT=$(grep -v '^#' .github/zombienet-flaky-tests | grep -v '^$' | wc -l | tr -d ' ')
if [[ $FLAKY_COUNT -gt 0 ]]; then
echo "⚠️ Flaky/Disabled Tests: $FLAKY_COUNT test(s) currently disabled"
echo "📄 See: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/zombienet-flaky-tests"
echo "📖 Docs: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/ZOMBIENET_FLAKY_TESTS.md"
fi
fi
echo "::endgroup::"
- name: Download binaries for zombienet native tests
if: env.ZOMBIE_PROVIDER == 'native'
uses: ./.github/actions/download-binaries-for-zombienet-tests
with:
gh-token: ${{ inputs.gh-token }}
ref-slug: ${{ inputs.ref-slug }}
build-id: ${{ inputs.build-id }}
destination-path: ./bin
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: prepare-${{ inputs.prefix }}-zombienet-artifacts-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
- name: k8s_auth
if: env.ZOMBIE_PROVIDER == 'k8s'
shell: bash
run: |
. /home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh
k8s_auth
- name: zombie_test
shell: bash
env:
# don't retry sdk tests
NEXTEST_RETRIES: 0
TEST_FILTER: ${{ inputs.test-filter }}
PREFIX: ${{ inputs.prefix }}
run: |
# RUN_IN_CI=1 shall be set only for k8s provider
if [[ "$ZOMBIE_PROVIDER" == "native" ]]; then
export RUN_IN_CI=0
# set path to downloaded binaries
export PATH=$(pwd)/bin:$PATH
chmod +x $(pwd)/bin/*
else
export RUN_IN_CI=1
# no need to check other runner variables. for k8s they shall store the same value
if [[ $ZOMBIENET_SDK_DEFAULT_RUNNER == "parity-zombienet" ]]; then
export ZOMBIE_K8S_CI_NAMESPACE=$(cat /data/namespace)
fi
fi
ls -ltr ./artifacts
# We want to run tests sequentially, '--no-capture' ensures that.
# If we want to get rid of '--no-capture' some day, please use '--test-threads 1' or NEXTEST_TEST_THREADS=1
# Both options cannot coexist for cargo-nextest below v0.9.94
cargo nextest run --archive-file ./artifacts/${PREFIX}-zombienet-tests.tar.zst --no-capture -- ${TEST_FILTER}
+104
View File
@@ -0,0 +1,104 @@
name: "Zombienet test v1"
description: "Runs zombienet tests"
inputs:
test-definition:
description: "test definition (zndsl file)"
required: true
job-name:
description: "Job name to use for artifact uploading"
required: true
local-dir:
description: "Path to the directory tha contains the test file (.zndsl)"
required: true
concurrency:
description: "Concurrency to spawn nodes"
default: 4
required: false
build-id:
description: ""
required: true
ref-slug:
description: "Ref slug (e.g branch-name-short)"
required: true
gh-token:
description: "GITHUB_TOKEN to use for downloading artifacts"
required: true
runs:
using: "composite"
steps:
- name: common_vars
shell: bash
env:
TEST_DEFINITION: ${{ inputs.test-definition }}
LOCAL_PATH: ${{ inputs.local-dir }}
CONCURRENCY: ${{ inputs.concurrency }}
run: |
echo "::group::Test Configuration"
echo "══════════════════════════════════════════════════════════════════"
echo "Environment Variables:"
echo " ZOMBIENET_INTEGRATION_TEST_IMAGE: $ZOMBIENET_INTEGRATION_TEST_IMAGE"
echo " ZOMBIENET_PROVIDER: $ZOMBIENET_PROVIDER"
echo " COL_IMAGE: $COL_IMAGE"
echo ""
echo "Test Parameters:"
echo " Test Definition: $TEST_DEFINITION"
echo " Job Name: ${{ inputs.job-name }}"
echo " Local Directory: $LOCAL_PATH"
echo " Concurrency: $CONCURRENCY"
echo ""
# Show flaky tests information if any are disabled
if [[ -f .github/zombienet-flaky-tests ]]; then
FLAKY_COUNT=$(grep -v '^#' .github/zombienet-flaky-tests | grep -v '^$' | wc -l | tr -d ' ')
if [[ $FLAKY_COUNT -gt 0 ]]; then
echo "⚠️ Flaky/Disabled Tests: $FLAKY_COUNT test(s) currently disabled"
echo "📄 See: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/zombienet-flaky-tests"
echo "📖 Docs: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/ZOMBIENET_FLAKY_TESTS.md"
fi
fi
echo "════════════════════════════════════════════════════════════════"
echo "::endgroup::"
- name: Download binaries for zombienet native tests
if: env.ZOMBIENET_PROVIDER == 'native'
uses: ./.github/actions/download-binaries-for-zombienet-tests
with:
gh-token: ${{ inputs.gh-token }}
ref-slug: ${{ inputs.ref-slug }}
build-id: ${{ inputs.build-id }}
destination-path: ./bin
- name: k8s_auth
if: env.ZOMBIENET_PROVIDER == 'k8s'
shell: bash
run: |
. /home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh
k8s_auth
- name: zombie_test
shell: bash
env:
TEST_DEFINITION: ${{ inputs.test-definition }}
LOCAL_PATH: ${{ inputs.local-dir }}
CONCURRENCY: ${{ inputs.concurrency }}
run: |
if [[ "$ZOMBIENET_PROVIDER" == "native" ]]; then
# set path to downloaded binaries
export PATH=$(pwd)/bin:$PATH
chmod +x $(pwd)/bin/*
./.github/scripts/run-zombienet-test.sh \
"$(pwd)/$LOCAL_PATH" \
$CONCURRENCY \
"$TEST_DEFINITION"
else
# no need to check other runner variables. for k8s they shall store the same value
if [[ $ZOMBIENET_DEFAULT_RUNNER == "parity-zombienet" ]]; then
export ZOMBIE_K8S_CI_NAMESPACE=$(cat /data/namespace)
fi
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_PATH" \
--concurrency=$CONCURRENCY \
--test="$TEST_DEFINITION"
fi
+15
View File
@@ -0,0 +1,15 @@
coverage:
precision: 2
round: down
range: "1...100"
status:
project:
default:
target: 1.0
threshold: 2.0
comment:
behavior: new
fixes:
- "/__w/pezkuwi-sdk/pezkuwi-sdk/::"
+31
View File
@@ -0,0 +1,31 @@
version: 2
updates:
# Update github actions:
- package-ecosystem: github-actions
directory: '/'
labels: ["A1-insubstantial", "R0-no-crate-publish-required"]
schedule:
interval: weekly
groups:
ci_dependencies:
patterns:
- "*"
# Update Rust dependencies:
- package-ecosystem: "cargo"
directory: "/"
labels: ["A1-insubstantial", "R0-no-crate-publish-required"]
schedule:
interval: "weekly"
groups:
# We assume these crates to be semver abiding and can therefore group them together.
known_good_semver:
patterns:
- "syn"
- "quote"
- "log"
- "paste"
- "*serde*"
- "clap"
update-types:
- "minor"
- "patch"
+1
View File
@@ -0,0 +1 @@
IMAGE="docker.io/paritytech/ci-unified:bullseye-1.88.0-2025-06-27-v202511141243"
+1
View File
@@ -0,0 +1 @@
../docs/contributor/PULL_REQUEST_TEMPLATE.md
+82
View File
@@ -0,0 +1,82 @@
rules:
- name: CI files
countAuthor: true
condition:
include:
- ^\.gitlab-ci\.yml
- ^docker/.*
- ^\.github/.*
- ^\.gitlab/.*
- ^\.config/nextest.toml
- ^\.cargo/.*
- ^\.forklift/.*
exclude:
- ^\.gitlab/pipeline/zombienet.*
type: "or"
reviewers:
- minApprovals: 2
teams:
- ci
- minApprovals: 2
teams:
- core-devs
- name: Core developers
countAuthor: true
condition:
include:
- .*
# excluding files from 'Runtime files' and 'CI files' rules
exclude:
- ^pezcumulus/teyrchains/common/src/[^/]+\.rs$
- ^\.gitlab-ci\.yml
- ^docker/.*
- ^\.github/.*
- ^\.gitlab/.*
- ^\.forklift/.*
- ^\.config/nextest.toml
- ^\.cargo/.*
minApprovals: 2
type: basic
teams:
- core-devs
# if there are any changes in the bridges subtree (in case of backport changes back to bridges repo)
- name: Bridges subtree files
type: basic
condition:
include:
- ^bridges/.*
minApprovals: 1
teams:
- bridges-core
# Smart Contracts
- name: Smart Contracts
type: basic
condition:
include:
- ^bizinikiwi/frame/contracts/.*
- ^bizinikiwi/frame/revive/.*
minApprovals: 1
teams:
- smart-contracts
# Protection of THIS file
- name: Review Bot
countAuthor: true
condition:
include:
- review-bot\.yml
type: "and"
reviewers:
- minApprovals: 1
teams:
- opstooling
- minApprovals: 1
teams:
- locks-review
preventReviewRequests:
teams:
- core-devs
+17
View File
@@ -0,0 +1,17 @@
{
"pallets": {
"1": {
"constants": {
"EpochDuration": {
"value": [ 88, 2, 0, 0, 0, 0, 0, 0 ]}
}
},
"2": {
"constants": {
"MinimumPeriod": {
"value": [ 184, 11, 0, 0, 0, 0, 0, 0 ]}
}
}
}
}
+17
View File
@@ -0,0 +1,17 @@
{
"pallets": {
"1": {
"constants": {
"EpochDuration": {
"value": [ 88, 2, 0, 0, 0, 0, 0, 0 ]}
}
},
"2": {
"constants": {
"MinimumPeriod": {
"value": [ 184, 11, 0, 0, 0, 0, 0, 0 ]}
}
}
}
}
+70
View File
@@ -0,0 +1,70 @@
#!/usr/bin/env python3
# A script that checks each workspace crate individually.
# It's relevant to check workspace crates individually because otherwise their compilation problems
# due to feature misconfigurations won't be caught, as exemplified by
# https://github.com/paritytech/substrate/issues/12705
#
# `check-each-crate.py target_group groups_total`
#
# - `target_group`: Integer starting from 1, the group this script should execute.
# - `groups_total`: Integer starting from 1, total number of groups.
# - `disable_forklift`: Boolean, whether to disable forklift or not.
import subprocess, sys
# Get all crates
output = subprocess.check_output(["cargo", "tree", "--locked", "--workspace", "--depth", "0", "--prefix", "none"])
# Convert the output into a proper list
crates = []
for line in output.splitlines():
if line != b"":
line = line.decode('utf8').split(" ")
crate_name = line[0]
# The crate path is always the last element in the line.
crate_path = line[len(line) - 1].replace("(", "").replace(")", "")
crates.append((crate_name, crate_path))
# Make the list unique and sorted
crates = list(set(crates))
crates.sort()
target_group = int(sys.argv[1]) - 1
groups_total = int(sys.argv[2])
# Forklift is disabled by default since Pezkuwi doesn't have access to Parity's GCP infrastructure
disable_forklift = True
print(f"Target group: {target_group}, Total groups: {groups_total}, Disable forklift: {disable_forklift}", file=sys.stderr)
if len(crates) == 0:
print("No crates detected!", file=sys.stderr)
sys.exit(1)
print(f"Total crates: {len(crates)}", file=sys.stderr)
crates_per_group = len(crates) // groups_total
# If this is the last runner, we need to take care of crates
# after the group that we lost because of the integer division.
if target_group + 1 == groups_total:
overflow_crates = len(crates) % groups_total
else:
overflow_crates = 0
print(f"Crates per group: {crates_per_group}", file=sys.stderr)
# Check each crate
for i in range(0, crates_per_group + overflow_crates):
crate = crates_per_group * target_group + i
print(f"Checking {crates[crate][0]}", file=sys.stderr)
cmd = ["cargo", "check", "--locked"]
cmd.insert(0, 'forklift') if not disable_forklift else None
res = subprocess.run(cmd, cwd = crates[crate][1])
if res.returncode != 0:
sys.exit(1)
+36
View File
@@ -0,0 +1,36 @@
#!/usr/bin/env bash
echo "Running script relative to `pwd`"
# Find all README.docify.md files
DOCIFY_FILES=$(find . -name "README.docify.md")
# Initialize a variable to track directories needing README regeneration
NEED_REGENERATION=""
for file in $DOCIFY_FILES; do
echo "Processing $file"
# Get the directory containing the docify file
DIR=$(dirname "$file")
# Go to the directory and run cargo build
cd "$DIR"
cargo check --features generate-readme || { echo "Readme generation for $DIR failed. Ensure the crate compiles successfully and has a `generate-readme` feature which guards markdown compilation in the crate as follows: https://docs.rs/docify/latest/docify/macro.compile_markdown.html#conventions." && exit 1; }
# Check if README.md has any uncommitted changes
git diff --exit-code README.md
if [ $? -ne 0 ]; then
echo "Error: Found uncommitted changes in $DIR/README.md"
NEED_REGENERATION="$NEED_REGENERATION $DIR"
fi
# Return to the original directory
cd - > /dev/null
done
# Check if any directories need README regeneration
if [ -n "$NEED_REGENERATION" ]; then
echo "The following directories need README regeneration:"
echo "$NEED_REGENERATION"
exit 1
fi
+71
View File
@@ -0,0 +1,71 @@
#!/usr/bin/env python3
'''
Ensure that the prdoc files are valid.
# Example
```sh
python3 -m pip install cargo-workspace
python3 .github/scripts/check-prdoc.py Cargo.toml prdoc/*.prdoc
```
Produces example output:
```pre
🔎 Reading workspace pezkuwi-sdk/Cargo.toml
📦 Checking 32 prdocs against 493 crates.
✅ All prdocs are valid
```
'''
import os
import yaml
import argparse
import cargo_workspace
def check_prdoc_crate_names(root, paths):
'''
Check that all crates of the `crates` section of each prdoc is present in the workspace.
'''
print(f'🔎 Reading workspace {root}.')
workspace = cargo_workspace.Workspace.from_path(root)
crate_names = [crate.name for crate in workspace.crates]
print(f'📦 Checking {len(paths)} prdocs against {len(crate_names)} crates.')
faulty = {}
for path in paths:
with open(path, 'r') as f:
prdoc = yaml.safe_load(f)
for crate in prdoc.get('crates', []):
crate = crate['name']
if crate in crate_names:
continue
faulty.setdefault(path, []).append(crate)
if len(faulty) == 0:
print('✅ All prdocs are valid.')
else:
print('❌ Some prdocs are invalid.')
for path, crates in faulty.items():
print(f'💥 {path} lists invalid crate: {", ".join(crates)}')
exit(1)
def parse_args():
parser = argparse.ArgumentParser(description='Check prdoc files')
parser.add_argument('root', help='The cargo workspace manifest', metavar='root', type=str, nargs=1)
parser.add_argument('prdoc', help='The prdoc files', metavar='prdoc', type=str, nargs='*')
args = parser.parse_args()
if len(args.prdoc) == 0:
print('❌ Need at least one prdoc file as argument.')
exit(1)
return { 'root': os.path.abspath(args.root[0]), 'prdocs': args.prdoc }
if __name__ == '__main__':
args = parse_args()
check_prdoc_crate_names(args['root'], args['prdocs'])
+124
View File
@@ -0,0 +1,124 @@
#!/usr/bin/env python3
import json
import sys
import logging
import os
def check_constant(spec_pallet_id, spec_pallet_value, meta_constant):
"""
Check a single constant
:param spec_pallet_id:
:param spec_pallet_value:
:param meta_constant:
:return:
"""
if meta_constant['name'] == list(spec_pallet_value.keys())[0]:
constant = meta_constant['name']
res = list(spec_pallet_value.values())[0]["value"] == meta_constant["value"]
logging.debug(f" Checking pallet:{spec_pallet_id}/constants/{constant}")
logging.debug(f" spec_pallet_value: {spec_pallet_value}")
logging.debug(f" meta_constant: {meta_constant}")
logging.info(f"pallet:{spec_pallet_id}/constants/{constant} -> {res}")
return res
else:
# logging.warning(f" Skipping pallet:{spec_pallet_id}/constants/{meta_constant['name']}")
pass
def check_pallet(metadata, spec_pallet):
"""
Check one pallet
:param metadata:
:param spec_pallet_id:
:param spec_pallet_value:
:return:
"""
spec_pallet_id, spec_pallet_value = spec_pallet
logging.debug(f"Pallet: {spec_pallet_id}")
metadata_pallets = metadata["pallets"]
metadata_pallet = metadata_pallets[spec_pallet_id]
res = map(lambda meta_constant_value: check_constant(
spec_pallet_id, spec_pallet_value["constants"], meta_constant_value),
metadata_pallet["constants"].values())
res = list(filter(lambda item: item is not None, res))
return all(res)
def check_pallets(metadata, specs):
"""
CHeck all pallets
:param metadata:
:param specs:
:return:
"""
res = list(map(lambda spec_pallet: check_pallet(metadata, spec_pallet),
specs['pallets'].items()))
res = list(filter(lambda item: item is not None, res))
return all(res)
def check_metadata(metadata, specs):
"""
Check metadata (json) against a list of expectations
:param metadata: Metadata in JSON format
:param expectation: Expectations
:return: Bool
"""
res = check_pallets(metadata, specs)
return res
def help():
""" Show some simple help """
print(f"You must pass 2 args, you passed {len(sys.argv) - 1}")
print("Sample call:")
print("check-runtime.py <metadata.json> <specs.json>")
def load_json(file):
""" Load json from a file """
f = open(file)
return json.load(f)
def main():
LOGLEVEL = os.environ.get('LOGLEVEL', 'INFO').upper()
logging.basicConfig(level=LOGLEVEL)
if len(sys.argv) != 3:
help()
exit(1)
metadata_file = sys.argv[1]
specs_file = sys.argv[2]
print(f"Checking metadata from: {metadata_file} with specs from: {specs_file}")
metadata = load_json(metadata_file)
specs = load_json(specs_file)
res = check_metadata(metadata, specs)
if res:
logging.info(f"OK")
exit(0)
else:
print("")
logging.info(f"Some errors were found, run again with LOGLEVEL=debug")
exit(1)
if __name__ == "__main__":
main()
+179
View File
@@ -0,0 +1,179 @@
#!/usr/bin/env python3
# Ensures that:
# - all crates are added to the root workspace
# - local dependencies are resolved via `path`
#
# It does not check that the local paths resolve to the correct crate. This is already done by cargo.
#
# Must be called with a folder containing a `Cargo.toml` workspace file.
import os
import sys
import toml
import argparse
def parse_args():
parser = argparse.ArgumentParser(description='Check Rust workspace integrity.')
parser.add_argument('workspace_dir', help='The directory to check', metavar='workspace_dir', type=str, nargs=1)
parser.add_argument('--exclude', help='Exclude crate paths from the check', metavar='exclude', type=str, nargs='*', default=[])
args = parser.parse_args()
return (args.workspace_dir[0], args.exclude)
def main(root, exclude):
workspace_crates = get_members(root, exclude)
all_crates = get_crates(root, exclude)
print(f'📦 Found {len(all_crates)} crates in total')
check_duplicates(workspace_crates)
check_missing(workspace_crates, all_crates)
check_links(all_crates)
# Extract all members from a workspace.
# Return: list of all workspace paths
def get_members(workspace_dir, exclude):
print(f'🔎 Indexing workspace {os.path.abspath(workspace_dir)}')
root_manifest_path = os.path.join(workspace_dir, "Cargo.toml")
if not os.path.exists(root_manifest_path):
print(f'❌ No root manifest found at {root_manifest}')
sys.exit(1)
root_manifest = toml.load(root_manifest_path)
if not 'workspace' in root_manifest:
print(f'❌ No workspace found in root {root_manifest_path}')
sys.exit(1)
if not 'members' in root_manifest['workspace']:
return []
members = []
for member in root_manifest['workspace']['members']:
if member in exclude:
print(f'❌ Excluded member should not appear in the workspace {member}')
sys.exit(1)
members.append(member)
return members
# List all members of the workspace.
# Return: Map name -> (path, manifest)
def get_crates(workspace_dir, exclude_crates) -> dict:
crates = {}
for root, _dirs, files in os.walk(workspace_dir):
if "target" in root:
continue
for file in files:
if file != "Cargo.toml":
continue
path = os.path.join(root, file)
with open(path, "r") as f:
content = f.read()
manifest = toml.loads(content)
if 'workspace' in manifest:
if root != workspace_dir:
print("⏩ Excluded recursive workspace at %s" % path)
continue
# Cut off the root path and the trailing /Cargo.toml.
path = path[len(workspace_dir)+1:-11]
name = manifest['package']['name']
if path in exclude_crates:
print("⏩ Excluded crate %s at %s" % (name, path))
continue
crates[name] = (path, manifest)
return crates
# Check that there are no duplicate entries in the workspace.
def check_duplicates(workspace_crates):
print(f'🔎 Checking for duplicate crates')
found = {}
for path in workspace_crates:
if path in found:
print(f'❌ crate is listed twice in the workspace {path}')
sys.exit(1)
found[path] = True
# Check that all crates are in the workspace.
def check_missing(workspace_crates, all_crates):
print(f'🔎 Checking for missing crates')
if len(workspace_crates) == len(all_crates):
print(f'✅ All {len(all_crates)} crates are in the workspace')
return
missing = []
# Find out which ones are missing.
for name, (path, manifest) in all_crates.items():
if not path in workspace_crates:
missing.append([name, path, manifest])
missing.sort()
for name, path, _manifest in missing:
print("%s in %s" % (name, path))
print(f'😱 {len(all_crates) - len(workspace_crates)} crates are missing from the workspace')
sys.exit(1)
# Check that all local dependencies are good.
def check_links(all_crates):
print(f'🔎 Checking for broken dependency links')
links = []
broken = []
for name, (_path, manifest) in all_crates.items():
def check_deps(deps):
for dep in deps:
# Could be renamed:
dep_name = dep
if 'package' in deps[dep]:
dep_name = deps[dep]['package']
if dep_name in all_crates:
links.append((name, dep_name))
# For pezkuwi-sdk umbrella crate: accept both path and workspace inheritance
# For all other crates: require workspace inheritance
if name == 'pezkuwi-sdk':
has_path = 'path' in deps[dep]
has_workspace = 'workspace' in deps[dep] and deps[dep]['workspace']
if not has_path and not has_workspace:
broken.append((name, dep_name, "crate must use path or workspace inheritance"))
return
elif not 'workspace' in deps[dep] or not deps[dep]['workspace']:
broken.append((name, dep_name, "crate must use workspace inheritance"))
return
def check_crate(deps):
to_checks = ['dependencies', 'dev-dependencies', 'build-dependencies']
for to_check in to_checks:
if to_check in deps:
check_deps(deps[to_check])
# There could possibly target dependant deps:
if 'target' in manifest:
# Target dependant deps can only have one level of nesting:
for _, target in manifest['target'].items():
check_crate(target)
check_crate(manifest)
links.sort()
broken.sort()
if len(broken) > 0:
for (l, r, reason) in broken:
print(f'{l} -> {r} ({reason})')
print("💥 %d out of %d links are broken" % (len(broken), len(links)))
sys.exit(1)
else:
print("✅ All %d internal dependency links are correct" % len(links))
if __name__ == "__main__":
args = parse_args()
main(args[0], args[1])
+93
View File
@@ -0,0 +1,93 @@
#!/usr/bin/env bash
# Validates the .github/zombienet-flaky-tests file to ensure:
# 1. Each entry has the correct format: <test-name>:<issue-number>
# 2. The referenced number is a GitHub Issue
# 3. The GitHub issue exists
# 4. The issue is OPEN (warns if closed)
set -uo pipefail
FLAKY_TESTS_FILE="${1:-.github/zombienet-flaky-tests}"
if [[ ! -f "$FLAKY_TESTS_FILE" ]]; then
echo "Error: File not found: $FLAKY_TESTS_FILE" >&2
exit 1
fi
if ! command -v gh &> /dev/null; then
echo "Error: gh CLI is not installed" >&2
exit 1
fi
echo "Validating $FLAKY_TESTS_FILE..."
echo
has_errors=false
line_num=0
while IFS= read -r line || [[ -n "$line" ]]; do
line_num=$((line_num + 1))
if [[ -z "$line" ]]; then
continue
fi
# Parse format: test-name:issue-number
if [[ ! "$line" =~ ^([^:]+):([0-9]+)$ ]]; then
echo "❌ Line $line_num: Missing required issue number" >&2
echo " Entry: '$line'" >&2
echo " Expected format: <test-name>:<issue-number>" >&2
echo " Example: zombienet-pezkuwi-test-name:1234" >&2
has_errors=true
continue
fi
test_name="${BASH_REMATCH[1]}"
issue_number="${BASH_REMATCH[2]}"
set +e
issue_data=$(gh issue view "$issue_number" --json state,title,url 2>&1)
gh_exit_code=$?
set -e
if [[ $gh_exit_code -ne 0 ]]; then
echo "❌ Line $line_num: Issue #$issue_number does not exist" >&2
echo " Test: $test_name" >&2
has_errors=true
continue
fi
url=$(echo "$issue_data" | jq -r '.url')
state=$(echo "$issue_data" | jq -r '.state')
title=$(echo "$issue_data" | jq -r '.title')
# Check if it's an issue (not a PR) by verifying the URL contains '/issues/'
if [[ ! "$url" =~ /issues/ ]]; then
echo "❌ Line $line_num: #$issue_number is a Pull Request, not an Issue" >&2
echo " Test: $test_name" >&2
echo " URL: $url" >&2
echo " Please reference a GitHub Issue, not a PR" >&2
has_errors=true
continue
fi
if [[ "$state" == "OPEN" ]]; then
echo "✅ Line $line_num: $test_name -> Issue #$issue_number (open)"
else
echo "⚠️ Line $line_num: Issue #$issue_number is closed: '$title'" >&2
echo " Test: $test_name" >&2
echo " Consider removing this entry if the issue is resolved." >&2
fi
done < "$FLAKY_TESTS_FILE"
echo
if [[ "$has_errors" == "true" ]]; then
echo "❌ Validation failed with errors" >&2
exit 1
else
echo "✅ All entries are valid"
exit 0
fi
+62
View File
@@ -0,0 +1,62 @@
# Command Bot Documentation
The command bot allows contributors to perform self-service actions on PRs using comment commands.
## Available Commands
### Label Command (Self-service)
Add labels to your PR without requiring maintainer intervention:
```bash
/cmd label T1-FRAME # Add single label
/cmd label T1-FRAME R0-no-crate-publish-required # Add multiple labels
/cmd label T1-FRAME A2-substantial D3-involved # Add multiple labels
```
**Available Labels:**
The bot dynamically fetches all current labels from the repository, ensuring it's always up-to-date. For label meanings and descriptions, see the [official label documentation](https://docs.pezkuwichain.io/labels/doc_pezkuwi-sdk.html).
**Features**:
- **Auto-Correction**: Automatically fixes high-confidence typos (e.g., `T1-FRAM``T1-FRAME`)
- **Case Fixing**: Handles case variations (e.g., `I2-Bug``I2-bug`)
- **Smart Suggestions**: For ambiguous inputs, provides multiple options to choose from
### Other Commands
```bash
/cmd fmt # Format code (cargo +nightly fmt and taplo)
/cmd prdoc # Generate PR documentation
/cmd bench # Run benchmarks
/cmd update-ui # Update UI tests
/cmd --help # Show help for all commands
```
### Common Flags
- `--quiet`: Don't post start/end messages in PR
- `--clean`: Clean up previous bot comments
- `--image <image>`: Override docker image
## How It Works
1. **Command Detection**: The bot listens for comments starting with `/cmd` on PRs
2. **Permission Check**: Verifies if the user is an organization member
3. **Command Execution**: Runs the specified command in a containerized environment
4. **Result Handling**:
- For label commands: Applies labels via GitHub API
- For other commands: Commits changes back to the PR branch
5. **Feedback**: Posts success/failure messages in the PR
## Security
- Organization member check prevents unauthorized usage
- Commands from non-members run using bot scripts from master branch
## Troubleshooting
If a command fails:
1. Check the GitHub Actions logs linked in the bot's comment
2. Verify the command syntax matches the examples
3. Ensure you have permission to perform the action
4. For label commands, verify the label names are in the allowed list
+26
View File
@@ -0,0 +1,26 @@
import argparse
"""
Custom help action for argparse, it prints the help message for the main parser and all subparsers.
"""
class _HelpAction(argparse._HelpAction):
def __call__(self, parser, namespace, values, option_string=None):
parser.print_help()
# retrieve subparsers from parser
subparsers_actions = [
action for action in parser._actions
if isinstance(action, argparse._SubParsersAction)]
# there will probably only be one subparser_action,
# but better save than sorry
for subparsers_action in subparsers_actions:
# get all subparsers and print help
for choice, subparser in subparsers_action.choices.items():
print("\n### Command '{}'".format(choice))
print(subparser.format_help())
parser.exit()
+565
View File
@@ -0,0 +1,565 @@
#!/usr/bin/env python3
import os
import sys
import json
import argparse
import _help
import importlib.util
import re
import urllib.request
import urllib.parse
import difflib
_HelpAction = _help._HelpAction
f = open('.github/workflows/runtimes-matrix.json', 'r')
runtimesMatrix = json.load(f)
runtimeNames = list(map(lambda x: x['name'], runtimesMatrix))
common_args = {
'--quiet': {"action": "store_true", "help": "Won't print start/end/failed messages in PR"},
'--clean': {"action": "store_true", "help": "Clean up the previous bot's & author's comments in PR"},
'--image': {"help": "Override docker image '--image docker.io/paritytech/ci-unified:latest'"},
}
def print_and_log(message, output_file='/tmp/cmd/command_output.log'):
print(message)
with open(output_file, 'a') as f:
f.write(message + '\n')
def setup_logging():
if not os.path.exists('/tmp/cmd'):
os.makedirs('/tmp/cmd')
open('/tmp/cmd/command_output.log', 'w')
def fetch_repo_labels():
"""Fetch current labels from the GitHub repository"""
try:
# Use GitHub API to get current labels
repo_owner = os.environ.get('GITHUB_REPOSITORY_OWNER', 'pezkuwichain')
repo_name = os.environ.get('GITHUB_REPOSITORY', 'pezkuwichain/pezkuwi-sdk').split('/')[-1]
api_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/labels?per_page=100"
# Add GitHub token if available for higher rate limits
headers = {'User-Agent': 'pezkuwi-sdk-cmd-bot'}
github_token = os.environ.get('GITHUB_TOKEN')
if github_token:
headers['Authorization'] = f'token {github_token}'
req = urllib.request.Request(api_url, headers=headers)
with urllib.request.urlopen(req) as response:
if response.getcode() == 200:
labels_data = json.loads(response.read().decode())
label_names = [label['name'] for label in labels_data]
print_and_log(f"Fetched {len(label_names)} labels from repository")
return label_names
else:
print_and_log(f"Failed to fetch labels: HTTP {response.getcode()}")
return None
except Exception as e:
print_and_log(f"Error fetching labels from repository: {e}")
return None
def check_pr_status(pr_number):
"""Check if PR is merged or in merge queue"""
try:
# Get GitHub token from environment
github_token = os.environ.get('GITHUB_TOKEN')
if not github_token:
print_and_log("Error: GITHUB_TOKEN not set, cannot verify PR status")
return False # Prevent labeling if we can't check status
repo_owner = os.environ.get('GITHUB_REPOSITORY_OWNER', 'pezkuwichain')
repo_name = os.environ.get('GITHUB_REPOSITORY', 'pezkuwichain/pezkuwi-sdk').split('/')[-1]
api_url = f"https://api.github.com/repos/{repo_owner}/{repo_name}/pulls/{pr_number}"
headers = {
'User-Agent': 'pezkuwi-sdk-cmd-bot',
'Authorization': f'token {github_token}',
'Accept': 'application/vnd.github.v3+json'
}
req = urllib.request.Request(api_url, headers=headers)
with urllib.request.urlopen(req) as response:
if response.getcode() == 200:
data = json.loads(response.read().decode())
# Check if PR is merged
if data.get('merged', False):
return False
# Check if PR is closed
if data.get('state') == 'closed':
return False
# Check if PR is in merge queue (auto_merge enabled)
if data.get('auto_merge') is not None:
return False
return True # PR is open and not in merge queue
else:
print_and_log(f"Failed to fetch PR status: HTTP {response.getcode()}")
return False # Prevent labeling if we can't check status
except Exception as e:
print_and_log(f"Error checking PR status: {e}")
return False # Prevent labeling if we can't check status
def find_closest_labels(invalid_label, valid_labels, max_suggestions=3, cutoff=0.6):
"""Find the closest matching labels using fuzzy string matching"""
# Get close matches using difflib
close_matches = difflib.get_close_matches(
invalid_label,
valid_labels,
n=max_suggestions,
cutoff=cutoff
)
return close_matches
def auto_correct_labels(invalid_labels, valid_labels, auto_correct_threshold=0.8):
"""Automatically correct labels when confidence is high, otherwise suggest"""
corrections = []
suggestions = []
for invalid_label in invalid_labels:
closest = find_closest_labels(invalid_label, valid_labels, max_suggestions=1)
if closest:
# Calculate similarity for the top match
top_match = closest[0]
similarity = difflib.SequenceMatcher(None, invalid_label.lower(), top_match.lower()).ratio()
if similarity >= auto_correct_threshold:
# High confidence - auto-correct
corrections.append((invalid_label, top_match))
else:
# Lower confidence - suggest alternatives
all_matches = find_closest_labels(invalid_label, valid_labels, max_suggestions=3)
if all_matches:
labels_str = ', '.join(f"'{label}'" for label in all_matches)
suggestion = f"'{invalid_label}' → did you mean: {labels_str}?"
else:
suggestion = f"'{invalid_label}' → no close matches found"
suggestions.append(suggestion)
else:
# No close matches - try prefix suggestions
prefix_match = re.match(r'^([A-Z]\d+)-', invalid_label)
if prefix_match:
prefix = prefix_match.group(1)
prefix_labels = [label for label in valid_labels if label.startswith(prefix + '-')]
if prefix_labels:
# If there's exactly one prefix match, auto-correct it
if len(prefix_labels) == 1:
corrections.append((invalid_label, prefix_labels[0]))
else:
# Multiple prefix matches - suggest alternatives
suggestion = f"'{invalid_label}' → try labels starting with '{prefix}-': {', '.join(prefix_labels[:3])}"
suggestions.append(suggestion)
else:
suggestion = f"'{invalid_label}' → no labels found with prefix '{prefix}-'"
suggestions.append(suggestion)
else:
suggestion = f"'{invalid_label}' → invalid format (expected format: 'T1-FRAME', 'I2-bug', etc.)"
suggestions.append(suggestion)
return corrections, suggestions
parser = argparse.ArgumentParser(prog="/cmd ", description='A command runner for pezkuwi-sdk repo', add_help=False)
parser.add_argument('--help', action=_HelpAction, help='help for help if you need some help') # help for help
for arg, config in common_args.items():
parser.add_argument(arg, **config)
subparsers = parser.add_subparsers(help='a command to run', dest='command')
setup_logging()
"""
BENCH
"""
bench_example = '''**Examples**:
Runs all benchmarks
%(prog)s
Runs benchmarks for pallet_balances and pallet_multisig for all runtimes which have these pallets. **--quiet** makes it to output nothing to PR but reactions
%(prog)s --pallet pallet_balances pallet_xcm_benchmarks::generic --quiet
Runs bench for all pallets for zagros runtime and fails fast on first failed benchmark
%(prog)s --runtime zagros --fail-fast
Does not output anything and cleans up the previous bot's & author command triggering comments in PR
%(prog)s --runtime zagros pezkuwichain --pallet pallet_balances pallet_multisig --quiet --clean
'''
parser_bench = subparsers.add_parser('bench', aliases=['bench-omni'], help='Runs benchmarks (frame omni bencher)', epilog=bench_example, formatter_class=argparse.RawDescriptionHelpFormatter)
for arg, config in common_args.items():
parser_bench.add_argument(arg, **config)
parser_bench.add_argument('--runtime', help='Runtime(s) space separated', choices=runtimeNames, nargs='*', default=runtimeNames)
parser_bench.add_argument('--pallet', help='Pallet(s) space separated', nargs='*', default=[])
parser_bench.add_argument('--fail-fast', help='Fail fast on first failed benchmark', action='store_true')
"""
FMT
"""
parser_fmt = subparsers.add_parser('fmt', help='Formats code (cargo +nightly-VERSION fmt) and configs (taplo format)')
for arg, config in common_args.items():
parser_fmt.add_argument(arg, **config)
"""
Update UI
"""
parser_ui = subparsers.add_parser('update-ui', help='Updates UI tests')
for arg, config in common_args.items():
parser_ui.add_argument(arg, **config)
"""
PRDOC
"""
# Import generate-prdoc.py dynamically
spec = importlib.util.spec_from_file_location("generate_prdoc", ".github/scripts/generate-prdoc.py")
generate_prdoc = importlib.util.module_from_spec(spec)
spec.loader.exec_module(generate_prdoc)
parser_prdoc = subparsers.add_parser('prdoc', help='Generates PR documentation')
generate_prdoc.setup_parser(parser_prdoc, pr_required=False)
"""
LABEL
"""
# Fetch current labels from repository
def get_allowed_labels():
"""Get the current list of allowed labels"""
repo_labels = fetch_repo_labels()
if repo_labels is not None:
return repo_labels
else:
# Fail if API fetch fails
raise RuntimeError("Failed to fetch labels from repository. Please check your connection and try again.")
def validate_and_auto_correct_labels(input_labels, valid_labels):
"""Validate labels and auto-correct when confidence is high"""
final_labels = []
correction_messages = []
all_suggestions = []
no_match_labels = []
# Process all labels first to collect all issues
for label in input_labels:
if label in valid_labels:
final_labels.append(label)
else:
# Invalid label - try auto-correction
corrections, suggestions = auto_correct_labels([label], valid_labels)
if corrections:
# Auto-correct with high confidence
original, corrected = corrections[0]
final_labels.append(corrected)
similarity = difflib.SequenceMatcher(None, original.lower(), corrected.lower()).ratio()
correction_messages.append(f"Auto-corrected '{original}''{corrected}' (similarity: {similarity:.2f})")
elif suggestions:
# Low confidence - collect for batch error
all_suggestions.extend(suggestions)
else:
# No suggestions at all
no_match_labels.append(label)
# If there are any labels that couldn't be auto-corrected, show all at once
if all_suggestions or no_match_labels:
error_parts = []
if all_suggestions:
error_parts.append("Labels requiring manual selection:")
for suggestion in all_suggestions:
error_parts.append(f"{suggestion}")
if no_match_labels:
if all_suggestions:
error_parts.append("") # Empty line for separation
error_parts.append("Labels with no close matches:")
for label in no_match_labels:
error_parts.append(f"'{label}' → no valid suggestions available")
error_parts.append("")
error_parts.append("For all available labels, see: https://docs.pezkuwichain.io/labels/doc_pezkuwi-sdk.html")
error_msg = "\n".join(error_parts)
raise ValueError(error_msg)
return final_labels, correction_messages
label_example = '''**Examples**:
Add single label
%(prog)s T1-FRAME
Add multiple labels
%(prog)s T1-FRAME R0-no-crate-publish-required
Add multiple labels
%(prog)s T1-FRAME A2-substantial D3-involved
Labels are fetched dynamically from the repository.
Typos are auto-corrected when confidence is high (>80% similarity).
For label meanings, see: https://docs.pezkuwichain.io/labels/doc_pezkuwi-sdk.html
'''
parser_label = subparsers.add_parser('label', help='Add labels to PR (self-service for contributors)', epilog=label_example, formatter_class=argparse.RawDescriptionHelpFormatter)
for arg, config in common_args.items():
parser_label.add_argument(arg, **config)
parser_label.add_argument('labels', nargs='+', help='Labels to add to the PR (auto-corrects typos)')
def main():
global args, unknown, runtimesMatrix
args, unknown = parser.parse_known_args()
print(f'args: {args}')
if args.command == 'bench' or args.command == 'bench-omni':
runtime_pallets_map = {}
failed_benchmarks = {}
successful_benchmarks = {}
profile = "production"
print(f'Provided runtimes: {args.runtime}')
# convert to mapped dict
runtimesMatrix = list(filter(lambda x: x['name'] in args.runtime, runtimesMatrix))
runtimesMatrix = {x['name']: x for x in runtimesMatrix}
print(f'Filtered out runtimes: {runtimesMatrix}')
compile_bencher = os.system(f"cargo install -q --path substrate/utils/frame/omni-bencher --locked --profile {profile}")
if compile_bencher != 0:
print_and_log('❌ Failed to compile frame-omni-bencher')
sys.exit(1)
# loop over remaining runtimes to collect available pallets
for runtime in runtimesMatrix.values():
build_command = f"forklift cargo build -q -p {runtime['package']} --profile {profile} --features={runtime['bench_features']}"
print(f'-- building "{runtime["name"]}" with `{build_command}`')
build_status = os.system(build_command)
if build_status != 0:
print_and_log(f'❌ Failed to build {runtime["name"]}')
if args.fail_fast:
sys.exit(1)
else:
continue
print(f'-- listing pallets for benchmark for {runtime["name"]}')
wasm_file = f"target/{profile}/wbuild/{runtime['package']}/{runtime['package'].replace('-', '_')}.wasm"
list_command = f"frame-omni-bencher v1 benchmark pallet " \
f"--no-csv-header " \
f"--no-storage-info " \
f"--no-min-squares " \
f"--no-median-slopes " \
f"--all " \
f"--list " \
f"--runtime={wasm_file} " \
f"{runtime['bench_flags']}"
print(f'-- running: {list_command}')
output = os.popen(list_command).read()
raw_pallets = output.strip().split('\n')
all_pallets = set()
for pallet in raw_pallets:
if pallet:
all_pallets.add(pallet.split(',')[0].strip())
pallets = list(all_pallets)
print(f'Pallets in {runtime["name"]}: {pallets}')
runtime_pallets_map[runtime['name']] = pallets
print(f'\n')
# filter out only the specified pallets from collected runtimes/pallets
if args.pallet:
print(f'Pallets: {args.pallet}')
new_pallets_map = {}
# keep only specified pallets if they exist in the runtime
for runtime in runtime_pallets_map:
if set(args.pallet).issubset(set(runtime_pallets_map[runtime])):
new_pallets_map[runtime] = args.pallet
runtime_pallets_map = new_pallets_map
print(f'Filtered out runtimes & pallets: {runtime_pallets_map}\n')
if not runtime_pallets_map:
if args.pallet and not args.runtime:
print(f"No pallets {args.pallet} found in any runtime")
elif args.runtime and not args.pallet:
print(f"{args.runtime} runtime does not have any pallets")
elif args.runtime and args.pallet:
print(f"No pallets {args.pallet} found in {args.runtime}")
else:
print('No runtimes found')
sys.exit(1)
for runtime in runtime_pallets_map:
for pallet in runtime_pallets_map[runtime]:
config = runtimesMatrix[runtime]
header_path = os.path.abspath(config['header'])
template = None
print(f'-- config: {config}')
if runtime == 'dev':
# to support sub-modules (https://github.com/paritytech/command-bot/issues/275)
search_manifest_path = f"cargo metadata --locked --format-version 1 --no-deps | jq -r '.packages[] | select(.name == \"{pallet.replace('_', '-')}\") | .manifest_path'"
print(f'-- running: {search_manifest_path}')
manifest_path = os.popen(search_manifest_path).read()
if not manifest_path:
print(f'-- pallet {pallet} not found in dev runtime')
if args.fail_fast:
print_and_log(f'Error: {pallet} not found in dev runtime')
sys.exit(1)
package_dir = os.path.dirname(manifest_path)
print(f'-- package_dir: {package_dir}')
print(f'-- manifest_path: {manifest_path}')
output_path = os.path.join(package_dir, "src", "weights.rs")
# TODO: we can remove once all pallets in dev runtime are migrated to polkadot-sdk-frame
try:
uses_polkadot_sdk_frame = "true" in os.popen(f"cargo metadata --locked --format-version 1 --no-deps | jq -r '.packages[] | select(.name == \"{pallet.replace('_', '-')}\") | .dependencies | any(.name == \"polkadot-sdk-frame\")'").read()
print(f'uses_polkadot_sdk_frame: {uses_polkadot_sdk_frame}')
# Empty output from the previous os.popen command
except StopIteration:
print(f'Error: {pallet} not found in dev runtime')
uses_polkadot_sdk_frame = False
template = config['template']
if uses_polkadot_sdk_frame and re.match(r"frame-(:?umbrella-)?weight-template\.hbs", os.path.normpath(template).split(os.path.sep)[-1]):
template = "substrate/.maintain/frame-umbrella-weight-template.hbs"
print(f'template: {template}')
else:
default_path = f"./{config['path']}/src/weights"
xcm_path = f"./{config['path']}/src/weights/xcm"
output_path = default_path
if pallet.startswith("pallet_xcm_benchmarks"):
template = config['template']
output_path = xcm_path
print(f'-- benchmarking {pallet} in {runtime} into {output_path}')
cmd = f"frame-omni-bencher v1 benchmark pallet " \
f"--extrinsic=* " \
f"--runtime=target/{profile}/wbuild/{config['package']}/{config['package'].replace('-', '_')}.wasm " \
f"--pallet={pallet} " \
f"--header={header_path} " \
f"--output={output_path} " \
f"--wasm-execution=compiled " \
f"--steps=50 " \
f"--repeat=20 " \
f"--heap-pages=4096 " \
f"{f'--template={template} ' if template else ''}" \
f"--no-storage-info --no-min-squares --no-median-slopes " \
f"{config['bench_flags']}"
print(f'-- Running: {cmd} \n')
status = os.system(cmd)
if status != 0 and args.fail_fast:
print_and_log(f'❌ Failed to benchmark {pallet} in {runtime}')
sys.exit(1)
# Otherwise collect failed benchmarks and print them at the end
# push failed pallets to failed_benchmarks
if status != 0:
failed_benchmarks[f'{runtime}'] = failed_benchmarks.get(f'{runtime}', []) + [pallet]
else:
successful_benchmarks[f'{runtime}'] = successful_benchmarks.get(f'{runtime}', []) + [pallet]
if failed_benchmarks:
print_and_log('❌ Failed benchmarks of runtimes/pallets:')
for runtime, pallets in failed_benchmarks.items():
print_and_log(f'-- {runtime}: {pallets}')
if successful_benchmarks:
print_and_log('✅ Successful benchmarks of runtimes/pallets:')
for runtime, pallets in successful_benchmarks.items():
print_and_log(f'-- {runtime}: {pallets}')
elif args.command == 'fmt':
command = f"cargo +nightly fmt"
print(f'Formatting with `{command}`')
nightly_status = os.system(f'{command}')
taplo_status = os.system('taplo format --config .config/taplo.toml')
if (nightly_status != 0 or taplo_status != 0):
print_and_log('❌ Failed to format code')
sys.exit(1)
elif args.command == 'update-ui':
command = 'sh ./scripts/update-ui-tests.sh'
print(f'Updating ui with `{command}`')
status = os.system(f'{command}')
if status != 0:
print_and_log('❌ Failed to update ui')
sys.exit(1)
elif args.command == 'prdoc':
# Call the main function from ./github/scripts/generate-prdoc.py module
exit_code = generate_prdoc.main(args)
if exit_code != 0:
print_and_log('❌ Failed to generate prdoc')
sys.exit(exit_code)
elif args.command == 'label':
# The actual labeling is handled by the GitHub Action workflow
# This script validates and auto-corrects labels
try:
# Check if PR is still open and not merged/in merge queue
pr_number = os.environ.get('PR_NUM')
if pr_number:
if not check_pr_status(pr_number):
raise ValueError("Cannot modify labels on merged PRs or PRs in merge queue")
# Check if user has permission to modify labels
is_org_member = os.environ.get('IS_ORG_MEMBER', 'false').lower() == 'true'
is_pr_author = os.environ.get('IS_PR_AUTHOR', 'false').lower() == 'true'
if not is_org_member and not is_pr_author:
raise ValueError("Only the PR author or organization members can modify labels")
# Get allowed labels dynamically
try:
allowed_labels = get_allowed_labels()
except RuntimeError as e:
raise ValueError(str(e))
# Validate and auto-correct labels
final_labels, correction_messages = validate_and_auto_correct_labels(args.labels, allowed_labels)
# Show auto-correction messages
for message in correction_messages:
print(message)
# Output labels as JSON for GitHub Action
import json
labels_output = {"labels": final_labels}
print(f"LABELS_JSON: {json.dumps(labels_output)}")
except ValueError as e:
print_and_log(f'{e}')
# Output error as JSON for GitHub Action
import json
error_output = {
"error": "validation_failed",
"message": "Invalid labels found. Please check the suggestions below and try again.",
"details": str(e)
}
print(f"ERROR_JSON: {json.dumps(error_output)}")
sys.exit(1)
print('🚀 Done')
if __name__ == '__main__':
main()
+773
View File
@@ -0,0 +1,773 @@
import unittest
from unittest.mock import patch, mock_open, MagicMock, call
import json
import sys
import os
import argparse
# Mock data for runtimes-matrix.json
mock_runtimes_matrix = [
{
"name": "dev",
"package": "kitchensink-runtime",
"path": "substrate/frame",
"header": "substrate/HEADER-APACHE2",
"template": "substrate/.maintain/frame-weight-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "--flag1 --flag2"
},
{
"name": "zagros",
"package": "zagros-runtime",
"path": "pezkuwi/runtime/zagros",
"header": "pezkuwi/file_header.txt",
"template": "pezkuwi/xcm/pallet-xcm-benchmarks/template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "--flag3 --flag4"
},
{
"name": "pezkuwichain",
"package": "pezkuwichain-runtime",
"path": "pezkuwi/runtime/pezkuwichain",
"header": "pezkuwi/file_header.txt",
"template": "pezkuwi/xcm/pallet-xcm-benchmarks/template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": ""
},
{
"name": "asset-hub-zagros",
"package": "asset-hub-zagros-runtime",
"path": "cumulus/teyrchains/runtimes/assets/asset-hub-zagros",
"header": "cumulus/file_header.txt",
"template": "cumulus/templates/xcm-bench-template.hbs",
"bench_features": "runtime-benchmarks",
"bench_flags": "--flag7 --flag8"
}
]
def get_mock_bench_output(runtime, pallets, output_path, header, bench_flags, template = None):
return f"frame-omni-bencher v1 benchmark pallet --extrinsic=* " \
f"--runtime=target/production/wbuild/{runtime}-runtime/{runtime.replace('-', '_')}_runtime.wasm " \
f"--pallet={pallets} --header={header} " \
f"--output={output_path} " \
f"--wasm-execution=compiled " \
f"--steps=50 --repeat=20 --heap-pages=4096 " \
f"{f'--template={template} ' if template else ''}" \
f"--no-storage-info --no-min-squares --no-median-slopes " \
f"{bench_flags}"
class TestCmd(unittest.TestCase):
def setUp(self):
self.patcher1 = patch('builtins.open', new_callable=mock_open, read_data=json.dumps(mock_runtimes_matrix))
self.patcher2 = patch('json.load', return_value=mock_runtimes_matrix)
self.patcher3 = patch('argparse.ArgumentParser.parse_known_args')
self.patcher4 = patch('os.system', return_value=0)
self.patcher5 = patch('os.popen')
self.patcher6 = patch('importlib.util.spec_from_file_location', return_value=MagicMock())
self.patcher7 = patch('importlib.util.module_from_spec', return_value=MagicMock())
self.patcher8 = patch('cmd.generate_prdoc.main', return_value=0)
self.mock_open = self.patcher1.start()
self.mock_json_load = self.patcher2.start()
self.mock_parse_args = self.patcher3.start()
self.mock_system = self.patcher4.start()
self.mock_popen = self.patcher5.start()
self.mock_spec_from_file_location = self.patcher6.start()
self.mock_module_from_spec = self.patcher7.start()
self.mock_generate_prdoc_main = self.patcher8.start()
# Ensure that cmd.py uses the mock_runtimes_matrix
import cmd
cmd.runtimesMatrix = mock_runtimes_matrix
def tearDown(self):
self.patcher1.stop()
self.patcher2.stop()
self.patcher3.stop()
self.patcher4.stop()
self.patcher5.stop()
self.patcher6.stop()
self.patcher7.stop()
self.patcher8.stop()
def test_bench_command_normal_execution_all_runtimes(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=list(map(lambda x: x['name'], mock_runtimes_matrix)),
pallet=['pallet_balances'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
self.mock_popen.return_value.read.side_effect = [
"pallet_balances\npallet_staking\npallet_something\n", # Output for dev runtime
"pallet_balances\npallet_staking\npallet_something\n", # Output for zagros runtime
"pallet_staking\npallet_something\n", # Output for pezkuwichain runtime - no pallet here
"pallet_balances\npallet_staking\npallet_something\n", # Output for asset-hub-zagros runtime
"./substrate/frame/balances/Cargo.toml\n", # Mock manifest path for dev -> pallet_balances
]
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -q -p kitchensink-runtime --profile production --features=runtime-benchmarks"),
call("forklift cargo build -q -p zagros-runtime --profile production --features=runtime-benchmarks"),
call("forklift cargo build -q -p pezkuwichain-runtime --profile production --features=runtime-benchmarks"),
call("forklift cargo build -q -p asset-hub-zagros-runtime --profile production --features=runtime-benchmarks"),
call(get_mock_bench_output(
runtime='kitchensink',
pallets='pallet_balances',
output_path='./substrate/frame/balances/src/weights.rs',
header=os.path.abspath('substrate/HEADER-APACHE2'),
bench_flags='--flag1 --flag2',
template="substrate/.maintain/frame-weight-template.hbs"
)),
call(get_mock_bench_output(
runtime='zagros',
pallets='pallet_balances',
output_path='./pezkuwi/runtime/zagros/src/weights',
header=os.path.abspath('pezkuwi/file_header.txt'),
bench_flags='--flag3 --flag4'
)),
# skips pezkuwichain benchmark
call(get_mock_bench_output(
runtime='asset-hub-zagros',
pallets='pallet_balances',
output_path='./cumulus/teyrchains/runtimes/assets/asset-hub-zagros/src/weights',
header=os.path.abspath('cumulus/file_header.txt'),
bench_flags='--flag7 --flag8'
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_normal_execution(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['zagros'],
pallet=['pallet_balances', 'pallet_staking'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
header_path = os.path.abspath('pezkuwi/file_header.txt')
self.mock_popen.return_value.read.side_effect = [
"pallet_balances\npallet_staking\npallet_something\n", # Output for zagros runtime
]
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -q -p zagros-runtime --profile production --features=runtime-benchmarks"),
# Zagros runtime calls
call(get_mock_bench_output(
runtime='zagros',
pallets='pallet_balances',
output_path='./pezkuwi/runtime/zagros/src/weights',
header=header_path,
bench_flags='--flag3 --flag4'
)),
call(get_mock_bench_output(
runtime='zagros',
pallets='pallet_staking',
output_path='./pezkuwi/runtime/zagros/src/weights',
header=header_path,
bench_flags='--flag3 --flag4'
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_normal_execution_xcm(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['zagros'],
pallet=['pallet_xcm_benchmarks::generic'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
header_path = os.path.abspath('pezkuwi/file_header.txt')
self.mock_popen.return_value.read.side_effect = [
"pallet_balances\npallet_staking\npallet_something\npallet_xcm_benchmarks::generic\n", # Output for zagros runtime
]
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -q -p zagros-runtime --profile production --features=runtime-benchmarks"),
# Zagros runtime calls
call(get_mock_bench_output(
runtime='zagros',
pallets='pallet_xcm_benchmarks::generic',
output_path='./pezkuwi/runtime/zagros/src/weights/xcm',
header=header_path,
bench_flags='--flag3 --flag4',
template="pezkuwi/xcm/pallet-xcm-benchmarks/template.hbs"
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_two_runtimes_two_pallets(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['zagros', 'pezkuwichain'],
pallet=['pallet_balances', 'pallet_staking'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
self.mock_popen.return_value.read.side_effect = [
"pallet_staking\npallet_balances\n", # Output for zagros runtime
"pallet_staking\npallet_balances\n", # Output for pezkuwichain runtime
]
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
header_path = os.path.abspath('pezkuwi/file_header.txt')
expected_calls = [
# Build calls
call("forklift cargo build -q -p zagros-runtime --profile production --features=runtime-benchmarks"),
call("forklift cargo build -q -p pezkuwichain-runtime --profile production --features=runtime-benchmarks"),
# Zagros runtime calls
call(get_mock_bench_output(
runtime='zagros',
pallets='pallet_staking',
output_path='./pezkuwi/runtime/zagros/src/weights',
header=header_path,
bench_flags='--flag3 --flag4'
)),
call(get_mock_bench_output(
runtime='zagros',
pallets='pallet_balances',
output_path='./pezkuwi/runtime/zagros/src/weights',
header=header_path,
bench_flags='--flag3 --flag4'
)),
# Pezkuwichain runtime calls
call(get_mock_bench_output(
runtime='pezkuwichain',
pallets='pallet_staking',
output_path='./pezkuwi/runtime/pezkuwichain/src/weights',
header=header_path,
bench_flags=''
)),
call(get_mock_bench_output(
runtime='pezkuwichain',
pallets='pallet_balances',
output_path='./pezkuwi/runtime/pezkuwichain/src/weights',
header=header_path,
bench_flags=''
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_one_dev_runtime(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['dev'],
pallet=['pallet_balances'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
manifest_dir = "substrate/frame/kitchensink"
self.mock_popen.return_value.read.side_effect = [
"pallet_balances\npallet_something", # Output for dev runtime
manifest_dir + "/Cargo.toml" # Output for manifest path in dev runtime
]
header_path = os.path.abspath('substrate/HEADER-APACHE2')
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -q -p kitchensink-runtime --profile production --features=runtime-benchmarks"),
# Westend runtime calls
call(get_mock_bench_output(
runtime='kitchensink',
pallets='pallet_balances',
output_path=manifest_dir + "/src/weights.rs",
header=header_path,
bench_flags='--flag1 --flag2',
template="substrate/.maintain/frame-weight-template.hbs"
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_one_cumulus_runtime(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['asset-hub-zagros'],
pallet=['pallet_assets'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
self.mock_popen.return_value.read.side_effect = [
"pallet_assets\n", # Output for asset-hub-zagros runtime
]
header_path = os.path.abspath('cumulus/file_header.txt')
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -q -p asset-hub-zagros-runtime --profile production --features=runtime-benchmarks"),
# Asset-hub-zagros runtime calls
call(get_mock_bench_output(
runtime='asset-hub-zagros',
pallets='pallet_assets',
output_path='./cumulus/teyrchains/runtimes/assets/asset-hub-zagros/src/weights',
header=header_path,
bench_flags='--flag7 --flag8'
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
def test_bench_command_one_cumulus_runtime_xcm(self):
self.mock_parse_args.return_value = (argparse.Namespace(
command='bench-omni',
runtime=['asset-hub-zagros'],
pallet=['pallet_xcm_benchmarks::generic', 'pallet_assets'],
fail_fast=True,
quiet=False,
clean=False,
image=None
), [])
self.mock_popen.return_value.read.side_effect = [
"pallet_assets\npallet_xcm_benchmarks::generic\n", # Output for asset-hub-zagros runtime
]
header_path = os.path.abspath('cumulus/file_header.txt')
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
expected_calls = [
# Build calls
call("forklift cargo build -q -p asset-hub-zagros-runtime --profile production --features=runtime-benchmarks"),
# Asset-hub-zagros runtime calls
call(get_mock_bench_output(
runtime='asset-hub-zagros',
pallets='pallet_xcm_benchmarks::generic',
output_path='./cumulus/teyrchains/runtimes/assets/asset-hub-zagros/src/weights/xcm',
header=header_path,
bench_flags='--flag7 --flag8',
template="cumulus/templates/xcm-bench-template.hbs"
)),
call(get_mock_bench_output(
runtime='asset-hub-zagros',
pallets='pallet_assets',
output_path='./cumulus/teyrchains/runtimes/assets/asset-hub-zagros/src/weights',
header=header_path,
bench_flags='--flag7 --flag8'
)),
]
self.mock_system.assert_has_calls(expected_calls, any_order=True)
@patch('argparse.ArgumentParser.parse_known_args', return_value=(argparse.Namespace(command='fmt'), []))
@patch('os.system', return_value=0)
def test_fmt_command(self, mock_system, mock_parse_args):
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
mock_system.assert_any_call('cargo +nightly fmt')
mock_system.assert_any_call('taplo format --config .config/taplo.toml')
@patch('argparse.ArgumentParser.parse_known_args', return_value=(argparse.Namespace(command='update-ui'), []))
@patch('os.system', return_value=0)
def test_update_ui_command(self, mock_system, mock_parse_args):
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
mock_system.assert_called_with('sh ./scripts/update-ui-tests.sh')
@patch('argparse.ArgumentParser.parse_known_args', return_value=(argparse.Namespace(command='prdoc'), []))
@patch('os.system', return_value=0)
def test_prdoc_command(self, mock_system, mock_parse_args):
with patch('sys.exit') as mock_exit:
import cmd
cmd.main()
mock_exit.assert_not_called()
self.mock_generate_prdoc_main.assert_called_with(mock_parse_args.return_value[0])
@patch.dict('os.environ', {'PR_NUM': '123', 'IS_ORG_MEMBER': 'true', 'IS_PR_AUTHOR': 'false', 'GITHUB_TOKEN': 'fake_token'})
@patch('cmd.get_allowed_labels')
@patch('cmd.check_pr_status')
@patch('argparse.ArgumentParser.parse_known_args')
def test_label_command_valid_labels(self, mock_parse_args, mock_check_pr_status, mock_get_labels):
"""Test label command with valid labels"""
mock_get_labels.return_value = ['T1-FRAME', 'R0-no-crate-publish-required', 'D2-substantial']
mock_check_pr_status.return_value = True # PR is open
mock_parse_args.return_value = (argparse.Namespace(
command='label',
labels=['T1-FRAME', 'R0-no-crate-publish-required']
), [])
with patch('sys.exit') as mock_exit, patch('builtins.print') as mock_print:
import cmd
cmd.main()
mock_exit.assert_not_called()
# Check that JSON output was printed
json_call = None
for call in mock_print.call_args_list:
if 'LABELS_JSON:' in str(call):
json_call = call
break
self.assertIsNotNone(json_call)
self.assertIn('T1-FRAME', str(json_call))
self.assertIn('R0-no-crate-publish-required', str(json_call))
@patch.dict('os.environ', {'PR_NUM': '123', 'IS_ORG_MEMBER': 'true', 'IS_PR_AUTHOR': 'false', 'GITHUB_TOKEN': 'fake_token'})
@patch('cmd.get_allowed_labels')
@patch('cmd.check_pr_status')
@patch('argparse.ArgumentParser.parse_known_args')
def test_label_command_auto_correction(self, mock_parse_args, mock_check_pr_status, mock_get_labels):
"""Test label command with auto-correctable typos"""
mock_get_labels.return_value = ['T1-FRAME', 'R0-no-crate-publish-required', 'D2-substantial']
mock_check_pr_status.return_value = True # PR is open
mock_parse_args.return_value = (argparse.Namespace(
command='label',
labels=['T1-FRAM', 'R0-no-crate-publish'] # Typos that should be auto-corrected
), [])
with patch('sys.exit') as mock_exit, patch('builtins.print') as mock_print:
import cmd
cmd.main()
mock_exit.assert_not_called()
# Check for auto-correction messages
correction_messages = [str(call) for call in mock_print.call_args_list if 'Auto-corrected' in str(call)]
self.assertTrue(len(correction_messages) > 0)
# Check that JSON output contains corrected labels
json_call = None
for call in mock_print.call_args_list:
if 'LABELS_JSON:' in str(call):
json_call = call
break
self.assertIsNotNone(json_call)
self.assertIn('T1-FRAME', str(json_call))
self.assertIn('R0-no-crate-publish-required', str(json_call))
@patch.dict('os.environ', {'PR_NUM': '123', 'IS_ORG_MEMBER': 'true', 'IS_PR_AUTHOR': 'false', 'GITHUB_TOKEN': 'fake_token'})
@patch('cmd.get_allowed_labels')
@patch('cmd.check_pr_status')
@patch('argparse.ArgumentParser.parse_known_args')
def test_label_command_prefix_correction(self, mock_parse_args, mock_check_pr_status, mock_get_labels):
"""Test label command with prefix matching"""
mock_get_labels.return_value = ['T1-FRAME', 'T2-pallets', 'R0-no-crate-publish-required']
mock_check_pr_status.return_value = True # PR is open
mock_parse_args.return_value = (argparse.Namespace(
command='label',
labels=['T1-something'] # Should match T1-FRAME as the only T1- label
), [])
with patch('sys.exit') as mock_exit, patch('builtins.print') as mock_print:
import cmd
cmd.main()
mock_exit.assert_not_called()
# Check that JSON output contains corrected label
json_call = None
for call in mock_print.call_args_list:
if 'LABELS_JSON:' in str(call):
json_call = call
break
self.assertIsNotNone(json_call)
self.assertIn('T1-FRAME', str(json_call))
@patch.dict('os.environ', {'PR_NUM': '123', 'IS_ORG_MEMBER': 'true', 'IS_PR_AUTHOR': 'false', 'GITHUB_TOKEN': 'fake_token'})
@patch('cmd.get_allowed_labels')
@patch('cmd.check_pr_status')
@patch('argparse.ArgumentParser.parse_known_args')
def test_label_command_invalid_labels(self, mock_parse_args, mock_check_pr_status, mock_get_labels):
"""Test label command with invalid labels that cannot be corrected"""
mock_get_labels.return_value = ['T1-FRAME', 'R0-no-crate-publish-required', 'D2-substantial']
mock_check_pr_status.return_value = True # PR is open
mock_parse_args.return_value = (argparse.Namespace(
command='label',
labels=['INVALID-LABEL', 'ANOTHER-BAD-LABEL']
), [])
with patch('sys.exit') as mock_exit, patch('builtins.print') as mock_print:
import cmd
cmd.main()
mock_exit.assert_called_with(1) # Should exit with error code
# Check for error JSON output
error_json_call = None
for call in mock_print.call_args_list:
if 'ERROR_JSON:' in str(call):
error_json_call = call
break
self.assertIsNotNone(error_json_call)
self.assertIn('validation_failed', str(error_json_call))
@patch.dict('os.environ', {'PR_NUM': '123', 'IS_ORG_MEMBER': 'true', 'IS_PR_AUTHOR': 'false', 'GITHUB_TOKEN': 'fake_token'})
@patch('cmd.get_allowed_labels')
@patch('cmd.check_pr_status')
@patch('argparse.ArgumentParser.parse_known_args')
def test_label_command_mixed_valid_invalid(self, mock_parse_args, mock_check_pr_status, mock_get_labels):
"""Test label command with mix of valid and invalid labels"""
mock_get_labels.return_value = ['T1-FRAME', 'R0-no-crate-publish-required', 'D2-substantial']
mock_check_pr_status.return_value = True # PR is open
mock_parse_args.return_value = (argparse.Namespace(
command='label',
labels=['T1-FRAME', 'INVALID-LABEL', 'D2-substantial']
), [])
with patch('sys.exit') as mock_exit, patch('builtins.print') as mock_print:
import cmd
cmd.main()
mock_exit.assert_called_with(1) # Should exit with error code due to invalid label
# Check for error JSON output
error_json_call = None
for call in mock_print.call_args_list:
if 'ERROR_JSON:' in str(call):
error_json_call = call
break
self.assertIsNotNone(error_json_call)
@patch.dict('os.environ', {'PR_NUM': '123', 'IS_ORG_MEMBER': 'true', 'IS_PR_AUTHOR': 'false', 'GITHUB_TOKEN': 'fake_token'})
@patch('cmd.get_allowed_labels')
@patch('cmd.check_pr_status')
@patch('argparse.ArgumentParser.parse_known_args')
def test_label_command_fetch_failure(self, mock_parse_args, mock_check_pr_status, mock_get_labels):
"""Test label command when label fetching fails"""
mock_get_labels.side_effect = RuntimeError("Failed to fetch labels from repository. Please check your connection and try again.")
mock_check_pr_status.return_value = True # PR is open
mock_parse_args.return_value = (argparse.Namespace(
command='label',
labels=['T1-FRAME']
), [])
with patch('sys.exit') as mock_exit, patch('builtins.print') as mock_print:
import cmd
cmd.main()
mock_exit.assert_called_with(1) # Should exit with error code
# Check for error JSON output
error_json_call = None
for call in mock_print.call_args_list:
if 'ERROR_JSON:' in str(call):
error_json_call = call
break
self.assertIsNotNone(error_json_call)
self.assertIn('Failed to fetch labels from repository', str(error_json_call))
def test_auto_correct_labels_function(self):
"""Test the auto_correct_labels function directly"""
import cmd
valid_labels = ['T1-FRAME', 'R0-no-crate-publish-required', 'D2-substantial', 'I2-bug']
# Test high similarity auto-correction
corrections, suggestions = cmd.auto_correct_labels(['T1-FRAM'], valid_labels)
self.assertEqual(len(corrections), 1)
self.assertEqual(corrections[0][0], 'T1-FRAM')
self.assertEqual(corrections[0][1], 'T1-FRAME')
# Test low similarity suggestions
corrections, suggestions = cmd.auto_correct_labels(['TOTALLY-WRONG'], valid_labels)
self.assertEqual(len(corrections), 0)
self.assertEqual(len(suggestions), 1)
def test_find_closest_labels_function(self):
"""Test the find_closest_labels function directly"""
import cmd
valid_labels = ['T1-FRAME', 'T2-pallets', 'R0-no-crate-publish-required']
# Test finding close matches
matches = cmd.find_closest_labels('T1-FRAM', valid_labels)
self.assertIn('T1-FRAME', matches)
# Test no close matches
matches = cmd.find_closest_labels('COMPLETELY-DIFFERENT', valid_labels, cutoff=0.8)
self.assertEqual(len(matches), 0)
@patch.dict('os.environ', {'PR_NUM': '123', 'IS_ORG_MEMBER': 'true', 'IS_PR_AUTHOR': 'false', 'GITHUB_TOKEN': 'fake_token'})
@patch('cmd.get_allowed_labels')
@patch('cmd.check_pr_status')
@patch('argparse.ArgumentParser.parse_known_args')
def test_label_command_merged_pr(self, mock_parse_args, mock_check_pr_status, mock_get_labels):
"""Test label command on merged PR should fail"""
mock_get_labels.return_value = ['T1-FRAME', 'R0-no-crate-publish-required']
mock_check_pr_status.return_value = False # PR is merged/closed
mock_parse_args.return_value = (argparse.Namespace(
command='label',
labels=['T1-FRAME']
), [])
with patch('sys.exit') as mock_exit, patch('builtins.print') as mock_print:
import cmd
cmd.main()
mock_exit.assert_called_with(1)
# Check for error JSON output
error_json_call = None
for call in mock_print.call_args_list:
if 'ERROR_JSON:' in str(call):
error_json_call = call
break
self.assertIsNotNone(error_json_call)
self.assertIn('Cannot modify labels on merged PRs', str(error_json_call))
@patch.dict('os.environ', {'PR_NUM': '123', 'IS_ORG_MEMBER': 'true', 'IS_PR_AUTHOR': 'false', 'GITHUB_TOKEN': 'fake_token'})
@patch('cmd.get_allowed_labels')
@patch('cmd.check_pr_status')
@patch('argparse.ArgumentParser.parse_known_args')
def test_label_command_open_pr(self, mock_parse_args, mock_check_pr_status, mock_get_labels):
"""Test label command on open PR should succeed"""
mock_get_labels.return_value = ['T1-FRAME', 'R0-no-crate-publish-required']
mock_check_pr_status.return_value = True # PR is open
mock_parse_args.return_value = (argparse.Namespace(
command='label',
labels=['T1-FRAME']
), [])
with patch('sys.exit') as mock_exit, patch('builtins.print') as mock_print:
import cmd
cmd.main()
mock_exit.assert_not_called()
# Check that JSON output was printed
json_call = None
for call in mock_print.call_args_list:
if 'LABELS_JSON:' in str(call):
json_call = call
break
self.assertIsNotNone(json_call)
@patch.dict('os.environ', {'PR_NUM': '123', 'IS_ORG_MEMBER': 'false', 'IS_PR_AUTHOR': 'false', 'GITHUB_TOKEN': 'fake_token'})
@patch('cmd.get_allowed_labels')
@patch('cmd.check_pr_status')
@patch('argparse.ArgumentParser.parse_known_args')
def test_label_command_unauthorized_user(self, mock_parse_args, mock_check_pr_status, mock_get_labels):
"""Test label command by unauthorized user should fail"""
mock_get_labels.return_value = ['T1-FRAME', 'R0-no-crate-publish-required']
mock_check_pr_status.return_value = True # PR is open
mock_parse_args.return_value = (argparse.Namespace(
command='label',
labels=['T1-FRAME']
), [])
with patch('sys.exit') as mock_exit, patch('builtins.print') as mock_print:
import cmd
cmd.main()
mock_exit.assert_called_with(1)
# Check for error JSON output
error_json_call = None
for call in mock_print.call_args_list:
if 'ERROR_JSON:' in str(call):
error_json_call = call
break
self.assertIsNotNone(error_json_call)
self.assertIn('Only the PR author or organization members can modify labels', str(error_json_call))
@patch.dict('os.environ', {'PR_NUM': '123', 'IS_ORG_MEMBER': 'false', 'IS_PR_AUTHOR': 'true', 'GITHUB_TOKEN': 'fake_token'})
@patch('cmd.get_allowed_labels')
@patch('cmd.check_pr_status')
@patch('argparse.ArgumentParser.parse_known_args')
def test_label_command_pr_author(self, mock_parse_args, mock_check_pr_status, mock_get_labels):
"""Test label command by PR author should succeed"""
mock_get_labels.return_value = ['T1-FRAME', 'R0-no-crate-publish-required']
mock_check_pr_status.return_value = True # PR is open
mock_parse_args.return_value = (argparse.Namespace(
command='label',
labels=['T1-FRAME']
), [])
with patch('sys.exit') as mock_exit, patch('builtins.print') as mock_print:
import cmd
cmd.main()
mock_exit.assert_not_called()
# Check that JSON output was printed
json_call = None
for call in mock_print.call_args_list:
if 'LABELS_JSON:' in str(call):
json_call = call
break
self.assertIsNotNone(json_call)
@patch.dict('os.environ', {'PR_NUM': '123', 'IS_ORG_MEMBER': 'true', 'IS_PR_AUTHOR': 'false', 'GITHUB_TOKEN': 'fake_token'})
@patch('cmd.get_allowed_labels')
@patch('cmd.check_pr_status')
@patch('argparse.ArgumentParser.parse_known_args')
def test_label_command_org_member(self, mock_parse_args, mock_check_pr_status, mock_get_labels):
"""Test label command by org member should succeed"""
mock_get_labels.return_value = ['T1-FRAME', 'R0-no-crate-publish-required']
mock_check_pr_status.return_value = True # PR is open
mock_parse_args.return_value = (argparse.Namespace(
command='label',
labels=['T1-FRAME']
), [])
with patch('sys.exit') as mock_exit, patch('builtins.print') as mock_print:
import cmd
cmd.main()
mock_exit.assert_not_called()
# Check that JSON output was printed
json_call = None
for call in mock_print.call_args_list:
if 'LABELS_JSON:' in str(call):
json_call = call
break
self.assertIsNotNone(json_call)
if __name__ == '__main__':
unittest.main()
+588
View File
@@ -0,0 +1,588 @@
#!/bin/sh
api_base="https://api.github.com/repos"
# Function to take 2 git tags/commits and get any lines from commit messages
# that contain something that looks like a PR reference: e.g., (#1234)
sanitised_git_logs(){
git --no-pager log --pretty=format:"%s" "$1...$2" |
# Only find messages referencing a PR
grep -E '\(#[0-9]+\)' |
# Strip any asterisks
sed 's/^* //g'
}
# Checks whether a tag on github has been verified
# repo: 'organization/repo'
# tagver: 'v1.2.3'
# Usage: check_tag $repo $tagver
check_tag () {
repo=$1
tagver=$2
if [ -n "$GITHUB_RELEASE_TOKEN" ]; then
echo '[+] Fetching tag using privileged token'
tag_out=$(curl -H "Authorization: token $GITHUB_RELEASE_TOKEN" -s "$api_base/$repo/git/refs/tags/$tagver")
else
echo '[+] Fetching tag using unprivileged token'
tag_out=$(curl -H "Authorization: token $GITHUB_PR_TOKEN" -s "$api_base/$repo/git/refs/tags/$tagver")
fi
tag_sha=$(echo "$tag_out" | jq -r .object.sha)
object_url=$(echo "$tag_out" | jq -r .object.url)
if [ "$tag_sha" = "null" ]; then
return 2
fi
echo "[+] Tag object SHA: $tag_sha"
verified_str=$(curl -H "Authorization: token $GITHUB_RELEASE_TOKEN" -s "$object_url" | jq -r .verification.verified)
if [ "$verified_str" = "true" ]; then
# Verified, everything is good
return 0
else
# Not verified. Bad juju.
return 1
fi
}
# Checks whether a given PR has a given label.
# repo: 'organization/repo'
# pr_id: 12345
# label: B1-silent
# Usage: has_label $repo $pr_id $label
has_label(){
repo="$1"
pr_id="$2"
label="$3"
# These will exist if the function is called in Gitlab.
# If the function's called in Github, we should have GITHUB_ACCESS_TOKEN set
# already.
if [ -n "$GITHUB_RELEASE_TOKEN" ]; then
GITHUB_TOKEN="$GITHUB_RELEASE_TOKEN"
elif [ -n "$GITHUB_PR_TOKEN" ]; then
GITHUB_TOKEN="$GITHUB_PR_TOKEN"
fi
out=$(curl -H "Authorization: token $GITHUB_TOKEN" -s "$api_base/$repo/pulls/$pr_id")
[ -n "$(echo "$out" | tr -d '\r\n' | jq ".labels | .[] | select(.name==\"$label\")")" ]
}
github_label () {
echo
echo "# run github-api job for labeling it ${1}"
curl -sS -X POST \
-F "token=${CI_JOB_TOKEN}" \
-F "ref=master" \
-F "variables[LABEL]=${1}" \
-F "variables[PRNO]=${CI_COMMIT_REF_NAME}" \
-F "variables[PROJECT]=pezkuwichain/pezkuwi" \
"${GITLAB_API}/projects/${GITHUB_API_PROJECT}/trigger/pipeline"
}
# Formats a message into a JSON string for posting to Matrix
# message: 'any plaintext message'
# formatted_message: '<strong>optional message formatted in <em>html</em></strong>'
# Usage: structure_message $content $formatted_content (optional)
structure_message() {
if [ -z "$2" ]; then
body=$(jq -Rs --arg body "$1" '{"msgtype": "m.text", $body}' < /dev/null)
else
body=$(jq -Rs --arg body "$1" --arg formatted_body "$2" '{"msgtype": "m.text", $body, "format": "org.matrix.custom.html", $formatted_body}' < /dev/null)
fi
echo "$body"
}
# Post a message to a matrix room
# body: '{body: "JSON string produced by structure_message"}'
# room_id: !fsfSRjgjBWEWffws:matrix.parity.io
# access_token: see https://matrix.org/docs/guides/client-server-api/
# Usage: send_message $body (json formatted) $room_id $access_token
send_message() {
curl -XPOST -d "$1" "https://m.parity.io/_matrix/client/r0/rooms/$2/send/m.room.message?access_token=$3"
}
# Pretty-printing functions
boldprint () { printf "|\n| \033[1m%s\033[0m\n|\n" "${@}"; }
boldcat () { printf "|\n"; while read -r l; do printf "| \033[1m%s\033[0m\n" "${l}"; done; printf "|\n" ; }
skip_if_companion_pr() {
url="https://api.github.com/repos/pezkuwichain/pezkuwi/pulls/${CI_COMMIT_REF_NAME}"
echo "[+] API URL: $url"
pr_title=$(curl -sSL -H "Authorization: token ${GITHUB_PR_TOKEN}" "$url" | jq -r .title)
echo "[+] PR title: $pr_title"
if echo "$pr_title" | grep -qi '^companion'; then
echo "[!] PR is a companion PR. Build is already done in substrate"
exit 0
else
echo "[+] PR is not a companion PR. Proceeding test"
fi
}
# Fetches the tag name of the latest release from a repository
# repo: 'organisation/repo'
# Usage: latest_release 'pezkuwichain/pezkuwi'
latest_release() {
curl -s "$api_base/$1/releases/latest" | jq -r '.tag_name'
}
# Check for runtime changes between two commits. This is defined as any changes
# to /primitives/src/* and any *production* chains under /runtime
has_runtime_changes() {
from=$1
to=$2
if git diff --name-only "${from}...${to}" \
| grep -q -e '^runtime/pezkuwi' -e '^runtime/kusama' -e '^primitives/src/' -e '^runtime/common'
then
return 0
else
return 1
fi
}
# given a bootnode and the path to a chainspec file, this function will create a new chainspec file
# with only the bootnode specified and test whether that bootnode provides peers
# The optional third argument is the index of the bootnode in the list of bootnodes, this is just used to pick an ephemeral
# port for the node to run on. If you're only testing one, it'll just use the first ephemeral port
# BOOTNODE: /dns/pezkuwi-connect-0.parity.io/tcp/443/wss/p2p/12D3KooWEPmjoRpDSUuiTjvyNDd8fejZ9eNWH5bE965nyBMDrB4o
# CHAINSPEC_FILE: /path/to/pezkuwi.json
check_bootnode(){
BOOTNODE=$1
BASE_CHAINSPEC=$2
RUNTIME=$(basename "$BASE_CHAINSPEC" | cut -d '.' -f 1)
MIN_PEERS=1
# Generate a temporary chainspec file containing only the bootnode we care about
TMP_CHAINSPEC_FILE="$RUNTIME.$(echo "$BOOTNODE" | tr '/' '_').tmp.json"
jq ".bootNodes = [\"$BOOTNODE\"] " < "$CHAINSPEC_FILE" > "$TMP_CHAINSPEC_FILE"
# Grab an unused port by binding to port 0 and then immediately closing the socket
# This is a bit of a hack, but it's the only way to do it in the shell
RPC_PORT=$(python -c "import socket; s=socket.socket(); s.bind(('', 0)); print(s.getsockname()[1]); s.close()")
echo "[+] Checking bootnode $BOOTNODE"
pezkuwi --chain "$TMP_CHAINSPEC_FILE" --no-mdns --rpc-port="$RPC_PORT" --tmp > /dev/null 2>&1 &
# Wait a few seconds for the node to start up
sleep 5
PEZKUWI_PID=$!
MAX_POLLS=10
TIME_BETWEEN_POLLS=3
for _ in $(seq 1 "$MAX_POLLS"); do
# Check the health endpoint of the RPC node
PEERS="$(curl -s -X POST -H "Content-Type: application/json" --data '{"jsonrpc":"2.0","method":"system_health","params":[],"id":1}' http://localhost:"$RPC_PORT" | jq -r '.result.peers')"
# Sometimes due to machine load or other reasons, we don't get a response from the RPC node
# If $PEERS is an empty variable, make it 0 so we can still do the comparison
if [ -z "$PEERS" ]; then
PEERS=0
fi
if [ "$PEERS" -ge $MIN_PEERS ]; then
echo "[+] $PEERS peers found for $BOOTNODE"
echo " Bootnode appears contactable"
kill $PEZKUWI_PID
# Delete the temporary chainspec file now we're done running the node
rm "$TMP_CHAINSPEC_FILE"
return 0
fi
sleep "$TIME_BETWEEN_POLLS"
done
kill $PEZKUWI_PID
# Delete the temporary chainspec file now we're done running the node
rm "$TMP_CHAINSPEC_FILE"
echo "[!] No peers found for $BOOTNODE"
echo " Bootnode appears unreachable"
return 1
}
# Assumes the ENV are set:
# - RELEASE_ID
# - GITHUB_TOKEN
# - REPO in the form pezkuwichain/pezkuwi
fetch_release_artifacts() {
echo "Release ID : $RELEASE_ID"
echo "Repo : $REPO"
echo "Binary : $BINARY"
OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${BINARY}"}
echo "OUTPUT_DIR : $OUTPUT_DIR"
echo "Fetching release info..."
curl -L -s \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${GITHUB_TOKEN}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
https://api.github.com/repos/${REPO}/releases/${RELEASE_ID} > release.json
echo "Extract asset ids..."
ids=($(jq -r '.assets[].id' < release.json ))
echo "Extract asset count..."
count=$(jq '.assets|length' < release.json )
# Fetch artifacts
mkdir -p "$OUTPUT_DIR"
pushd "$OUTPUT_DIR" > /dev/null
echo "Fetching assets..."
iter=1
for id in "${ids[@]}"
do
echo " - $iter/$count: downloading asset id: $id..."
curl -s -OJ -L -H "Accept: application/octet-stream" \
-H "Authorization: Token ${GITHUB_TOKEN}" \
"https://api.github.com/repos/${REPO}/releases/assets/$id"
iter=$((iter + 1))
done
pwd
ls -al --color
popd > /dev/null
}
# Fetch rpm package from S3.
fetch_rpm_package_from_s3() {
BINARY=$1
OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${BINARY}"}
echo "--- Preparing to fetch RPM package ---"
echo "Git Tag (VERSION): $VERSION"
echo "Code Version (NODE_VERSION): $NODE_VERSION"
URL_BASE=$(get_s3_url_base $BINARY)
# CORRECTED FILENAME: Changed underscore to hyphen to match the uploaded file.
FILENAME="${BINARY}-${NODE_VERSION}-1.x86_64.rpm"
URL="${URL_BASE}/${VERSION}/x86_64-unknown-linux-gnu/${FILENAME}"
echo "Constructed URL: $URL"
echo "------------------------------------"
mkdir -p "$OUTPUT_DIR"
pushd "$OUTPUT_DIR" > /dev/null
echo "Fetching rpm package..."
# This curl command will now succeed because the URL is correct.
curl --fail --progress-bar -LO "$URL"
echo "Download successful."
ls -al
popd > /dev/null
}
# Fetch deb package from S3. Assumes the ENV are set:
# - RELEASE_ID
# - GITHUB_TOKEN
# - REPO in the form pezkuwichain/pezkuwi
fetch_debian_package_from_s3() {
BINARY=$1
echo "Version : $NODE_VERSION"
echo "Repo : $REPO"
echo "Binary : $BINARY"
echo "Tag : $VERSION"
OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${BINARY}"}
echo "OUTPUT_DIR : $OUTPUT_DIR"
URL_BASE=$(get_s3_url_base $BINARY)
echo "URL_BASE=$URL_BASE"
URL=$URL_BASE/$VERSION/x86_64-unknown-linux-gnu/${BINARY}_${NODE_VERSION}_amd64.deb
mkdir -p "$OUTPUT_DIR"
pushd "$OUTPUT_DIR" > /dev/null
echo "Fetching deb package..."
echo "Fetching %s" "$URL"
curl --progress-bar -LO "$URL" || echo "Missing $URL"
pwd
ls -al --color
popd > /dev/null
}
# Fetch the release artifacts like binary and signatures from S3. Assumes the ENV are set:
# inputs: binary (pezkuwi), target(aarch64-apple-darwin)
fetch_release_artifacts_from_s3() {
BINARY=$1
TARGET=$2
OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${TARGET}/${BINARY}"}
echo "OUTPUT_DIR : $OUTPUT_DIR"
URL_BASE=$(get_s3_url_base $BINARY)
echo "URL_BASE=$URL_BASE"
URL_BINARY=$URL_BASE/$VERSION/$TARGET/$BINARY
URL_SHA=$URL_BASE/$VERSION/$TARGET/$BINARY.sha256
URL_ASC=$URL_BASE/$VERSION/$TARGET/$BINARY.asc
# Fetch artifacts
mkdir -p "$OUTPUT_DIR"
pushd "$OUTPUT_DIR" > /dev/null
echo "Fetching artifacts..."
for URL in $URL_BINARY $URL_SHA $URL_ASC; do
echo "Fetching %s" "$URL"
curl --progress-bar -LO "$URL" || echo "Missing $URL"
done
pwd
ls -al --color
popd > /dev/null
}
# Pass the name of the binary as input, it will
# return the s3 base url
function get_s3_url_base() {
name=$1
case $name in
pezkuwi | pezkuwi-execute-worker | pezkuwi-prepare-worker )
printf "https://releases.parity.io/pezkuwi"
;;
pezkuwi-teyrchain)
printf "https://releases.parity.io/pezkuwi-teyrchain"
;;
pezkuwi-omni-node)
printf "https://releases.parity.io/pezkuwi-omni-node"
;;
chain-spec-builder)
printf "https://releases.parity.io/chain-spec-builder"
;;
frame-omni-bencher)
printf "https://releases.parity.io/frame-omni-bencher"
;;
*)
printf "UNSUPPORTED BINARY $name"
exit 1
;;
esac
}
# Check the checksum for a given binary
function check_sha256() {
echo "Checking SHA256 for $1"
shasum -qc $1.sha256
}
# Import GPG keys of the release team members
function import_gpg_keys() {
GPG_KEYSERVER=${GPG_KEYSERVER:-"hkps://keyserver.ubuntu.com"}
SEC="9D4B2B6EB8F97156D19669A9FF0812D491B96798"
EGOR="E6FC4D4782EB0FA64A4903CCDB7D3555DD3932D3"
MORGAN="2E92A9D8B15D7891363D1AE8AF9E6C43F7F8C4CF"
PARITY_RELEASES="90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE"
PARITY_RELEASES_SIGN_COMMITS="D8018FBB3F534D866A45998293C5FB5F6A367B51"
echo "Importing GPG keys from $GPG_KEYSERVER"
for key in $SEC $EGOR $MORGAN $PARITY_RELEASES $PARITY_RELEASES_SIGN_COMMITS; do
(
echo "Importing GPG key $key"
gpg --no-tty --quiet --keyserver $GPG_KEYSERVER --recv-keys $key
echo -e "5\ny\n" | gpg --no-tty --command-fd 0 --expert --edit-key $key trust;
)
done
wait
gpg -k
}
# Check the GPG signature for a given binary
function check_gpg() {
echo "Checking GPG Signature for $1"
gpg --no-tty --verify -q $1.asc $1
}
# GITHUB_REF will typically be like:
# - refs/heads/release-v1.2.3
# - refs/heads/release-pezkuwi-v1.2.3-rc2
# This function extracts the version
function get_version_from_ghref() {
GITHUB_REF=$1
stripped=${GITHUB_REF#refs/heads/release-}
re="v([0-9]+\.[0-9]+\.[0-9]+)"
if [[ $stripped =~ $re ]]; then
echo ${BASH_REMATCH[0]};
return 0
else
return 1
fi
}
# Get latest rc tag based on the release version and product
function get_latest_rc_tag() {
version=$1
product=$2
if [[ "$product" == "pezkuwi" ]]; then
last_rc=$(git tag -l "$version-rc*" | sort -V | tail -n 1)
elif [[ "$product" == "pezkuwi-teyrchain" ]]; then
last_rc=$(git tag -l "pezkuwi-teyrchains-$version-rc*" | sort -V | tail -n 1)
fi
echo "${last_rc}"
}
# Increment rc tag number based on the value of a suffix of the current rc tag
function increment_rc_tag() {
last_rc=$1
suffix=$(echo "$last_rc" | grep -Eo '[0-9]+$')
((suffix++))
echo $suffix
}
function relative_parent() {
echo "$1" | sed -E 's/(.*)\/(.*)\/\.\./\1/g'
}
# Find all the runtimes, it returns the result as JSON object, compatible to be
# used as Github Workflow Matrix. This call is exposed by the `scan` command and can be used as:
# podman run --rm -it -v /.../fellowship-runtimes:/build docker.io/chevdor/srtool:1.70.0-0.11.1 scan
function find_runtimes() {
libs=($(git grep -I -r --cached --max-depth 20 --files-with-matches '[frame_support::runtime]!' -- '*lib.rs'))
re=".*-runtime$"
JSON=$(jq --null-input '{ "include": [] }')
# EXCLUDED_RUNTIMES is a space separated list of runtime names (without the -runtime postfix)
# EXCLUDED_RUNTIMES=${EXCLUDED_RUNTIMES:-"substrate-test"}
IFS=' ' read -r -a exclusions <<< "$EXCLUDED_RUNTIMES"
for lib in "${libs[@]}"; do
crate_dir=$(dirname "$lib")
cargo_toml="$crate_dir/../Cargo.toml"
name=$(toml get -r $cargo_toml 'package.name')
chain=${name//-runtime/}
if [[ "$name" =~ $re ]] && ! [[ ${exclusions[@]} =~ $chain ]]; then
lib_dir=$(dirname "$lib")
runtime_dir=$(relative_parent "$lib_dir/..")
ITEM=$(jq --null-input \
--arg chain "$chain" \
--arg name "$name" \
--arg runtime_dir "$runtime_dir" \
'{ "chain": $chain, "crate": $name, "runtime_dir": $runtime_dir }')
JSON=$(echo $JSON | jq ".include += [$ITEM]")
fi
done
echo $JSON
}
# Filter the version matches the particular pattern and return it.
# input: version (v1.8.0 or v1.8.0-rc1)
# output: none
filter_version_from_input() {
version=$1
regex="^(v)?[0-9]+\.[0-9]+\.[0-9]+(-rc[0-9]+)?$"
if [[ $version =~ $regex ]]; then
echo $version
else
echo "Invalid version: $version"
exit 1
fi
}
# Check if the release_id is valid number
# input: release_id
# output: release_id or exit 1
check_release_id() {
input=$1
release_id=$(echo "$input" | sed 's/[^0-9]//g')
if [[ $release_id =~ ^[0-9]+$ ]]; then
echo "$release_id"
else
echo "Invalid release_id from input: $input"
exit 1
fi
}
# Get latest release tag
#
# input: none
# output: latest_release_tag
get_latest_release_tag() {
TOKEN="Authorization: Bearer $GITHUB_TOKEN"
latest_release_tag=$(curl -s -H "$TOKEN" $api_base/pezkuwichain/pezkuwi-sdk/releases/latest | jq -r '.tag_name')
printf $latest_release_tag
}
function get_pezkuwi_node_version_from_code() {
# list all the files with node version
git grep -e "\(NODE_VERSION[^=]*= \)\".*\"" |
# fetch only the one we need
grep "primitives/src/lib.rs:" |
# Print only the version
awk '{ print $7 }' |
# Remove the quotes
sed 's/"//g' |
# Remove the semicolon
sed 's/;//g'
}
validate_stable_tag() {
tag="$1"
pattern="^(pezkuwi-)?stable[0-9]{4}(-[0-9]+)?(-rc[0-9]+)?$"
if [[ $tag =~ $pattern ]]; then
echo $tag
else
echo "The input '$tag' does not match the pattern."
exit 1
fi
}
# Prepare docker stable tag from the pezkuwi stable tag
#
# input: tag (pezkuwi-stableYYMM(-X) or pezkuwi-stableYYMM(-X)-rcX)
# output: stableYYMM(-X) or stableYYMM(-X)-rcX
prepare_docker_stable_tag() {
tag="$1"
if [[ "$tag" =~ stable[0-9]{4}(-[0-9]+)?(-rc[0-9]+)? ]]; then
echo "${BASH_REMATCH[0]}"
else
echo "Tag is invalid: $tag"
exit 1
fi
}
# Parse names of the branches from the github labels based on the pattern
#
# input: labels (array of lables like ("A3-backport" "RO-silent" "A4-backport-stable2407" "A4-backport-stable2503"))
# output: BRANCHES (array of the branch names)
parse_branch_names_from_backport_labels() {
labels="$1"
BRANCHES=""
for label in $labels; do
if [[ "$label" =~ ^A4-backport-(stable|unstable)[0-9]{4}$ ]]; then
branch_name=$(sed 's/A4-backport-//' <<< "$label")
BRANCHES+=" ${branch_name}"
fi
done
BRANCHES=$(echo "$BRANCHES" | sed 's/^ *//')
echo "$BRANCHES"
}
# Extract the PR number from the PR title
#
# input: PR_TITLE
# output: PR_NUMBER or exit 1 if the PR title does not contain the PR number
extract_pr_number_from_pr_title() {
PR_TITLE=$1
if [[ "$PR_TITLE" =~ \#([0-9]+) ]]; then
PR_NUMBER="${BASH_REMATCH[1]}"
else
echo "⚠️ The PR title does not contain original PR number. PR title should be in form: [stableBranchName] Backport #originalPRNumber"
exit 1
fi
echo $PR_NUMBER
}
+49
View File
@@ -0,0 +1,49 @@
"""
Script to deny Git dependencies in the Cargo workspace. Can be passed one optional argument for the
root folder. If not provided, it will use the cwd.
## Usage
python3 .github/scripts/deny-git-deps.py pezkuwi-sdk
"""
import os
import sys
from cargo_workspace import Workspace, DependencyLocation
# Some crates are allowed to have git dependencies until we fix them.
ALLOWED_GIT_DEPS = {
'subwasmlib': ['pezkuwi-zombienet-sdk-tests'],
}
root = sys.argv[1] if len(sys.argv) > 1 else os.getcwd()
workspace = Workspace.from_path(root)
errors = []
def check_dep(dep, used_by):
if dep.location != DependencyLocation.GIT:
return
if used_by in ALLOWED_GIT_DEPS.get(dep.name, []):
print(f'🤨 Ignoring git dependency {dep.name} in {used_by}')
else:
errors.append(f'🚫 Found git dependency {dep.name} in {used_by}')
# Check the workspace dependencies that can be inherited:
for dep in workspace.dependencies:
check_dep(dep, "workspace")
if workspace.crates.find_by_name(dep.name):
if dep.location != DependencyLocation.PATH:
errors.append(f'🚫 Workspace must use path to link local dependency {dep.name}')
# And the dependencies of each crate:
for crate in workspace.crates:
for dep in crate.dependencies:
check_dep(dep, crate.name)
if errors:
print('❌ Found errors:')
for error in errors:
print(error)
sys.exit(1)
+123
View File
@@ -0,0 +1,123 @@
#!/bin/bash
# Zombienet Workflow Dispatcher
#
# This script triggers GitHub Actions workflows for zombienet tests and monitors their execution.
# It can run workflows multiple times for reliability testing and optionally filter tests by pattern.
# Results are automatically saved to a timestamped CSV file for analysis.
#
# Features:
# - Trigger workflows on specific branches
# - Filter tests by pattern (useful for debugging specific tests)
# - Run workflows multiple times for flaky test detection
# - Monitor workflow completion and collect results
# - Export results to CSV with job details (ID, name, conclusion, timing, URLs)
#
# Requirements:
# - GitHub CLI (gh) must be installed and authenticated
# - Must be run from pezkuwi-sdk repository root
# - Target branch must have corresponding PR with CI enabled
# Exit on error
# set -e
function dbg {
local msg="$@"
local tstamp=$(date "+%Y-%m-%d %T")
printf "%s - %s\n" "$tstamp" "$msg"
}
function write_job_results_to_csv {
local run_id="$1"
local branch="$2"
local csv_file="$3"
dbg "Writing job results for run $run_id to $csv_file"
# Get job details for the completed run, filtering only jobs starting with 'zombienet-' and with success or failure conclusions
gh run view "$run_id" --json jobs --jq \
'.jobs[] | select(.name | startswith("zombienet-")) |
select(.conclusion == "success" or .conclusion == "failure") |
[.databaseId, .name, .conclusion, .startedAt, "'"$branch"'", .url] | @csv' >> "$csv_file"
}
# Parse command line arguments
WORKFLOW_FILE=""
BRANCH=""
MAX_RESULT_CNT=-1
TEST_PATTERN=""
while getopts "w:b:m:p:h" opt; do
case $opt in
w) WORKFLOW_FILE="$OPTARG" ;;
b) BRANCH="$OPTARG" ;;
m) MAX_RESULT_CNT="$OPTARG" ;;
p) TEST_PATTERN="$OPTARG" ;;
h) echo "Usage: $0 -w <workflow-file> -b <branch> [-m max-triggers] [-p test-pattern]"
echo " -w: Workflow file (required)"
echo " -b: Branch name (required)"
echo " -m: Maximum number of triggers (optional, default: infinite)"
echo " -p: Test pattern for workflow input (optional)"
exit 0 ;;
\?) echo "Invalid option -$OPTARG" >&2
echo "Use -h for help"
exit 1 ;;
esac
done
if [[ -z "$WORKFLOW_FILE" || -z "$BRANCH" ]]; then
echo "Error: Both workflow file (-w) and branch (-b) are required"
echo "Usage: $0 -w <workflow-file> -b <branch> [-m max-triggers] [-p test-pattern]"
echo "Use -h for help"
exit 1
fi
# Create CSV file with headers
CSV_FILE="workflow_results_$(date +%Y%m%d_%H%M%S).csv"
echo "job_id,job_name,conclusion,started_at,branch,job_url" > "$CSV_FILE"
dbg "Created CSV file: $CSV_FILE"
dbg "Starting loop for workflow: $WORKFLOW_FILE on branch: $BRANCH"
TRIGGER_CNT=0
RESULT_CNT=0
while [[ $MAX_RESULT_CNT -eq -1 || $RESULT_CNT -lt $MAX_RESULT_CNT ]]; do
dbg "Waiting until workflow $WORKFLOW_FILE (branch: $BRANCH) jobs are completed"
while true ; do
echo ""
gh run list --workflow=$WORKFLOW_FILE -e workflow_dispatch -b $BRANCH -L 5
sleep 2
# if job is completed it should have non-empty conclusion field
ALL_JOBS_COMPLETED=$(gh run list --workflow=$WORKFLOW_FILE -e workflow_dispatch -b $BRANCH --json conclusion --jq 'all(.[]; .conclusion != "")')
if [[ "$ALL_JOBS_COMPLETED" == "true" ]]; then
break
fi
sleep 60
done
dbg "Workflow $WORKFLOW_FILE (branch: $BRANCH) jobs completed"
# Skip the first iteration - latest run id is not the one we triggered here
if [ $TRIGGER_CNT -gt 0 ]; then
# Get the most recent completed run ID and write job results to CSV
LATEST_RUN_ID=$(gh run list --workflow=$WORKFLOW_FILE -e workflow_dispatch -b $BRANCH -L 1 --json databaseId --jq '.[0].databaseId')
write_job_results_to_csv "$LATEST_RUN_ID" "$BRANCH" "$CSV_FILE"
RESULT_CNT=$(( RESULT_CNT + 1 ))
fi
TRIGGER_CNT=$(( TRIGGER_CNT + 1 ))
dbg "Triggering #$TRIGGER_CNT workflow $WORKFLOW_FILE (branch: $BRANCH)"
if [[ -n "$TEST_PATTERN" ]]; then
gh workflow run "$WORKFLOW_FILE" --ref "$BRANCH" -f test_pattern="$TEST_PATTERN"
else
gh workflow run "$WORKFLOW_FILE" --ref "$BRANCH"
fi
dbg "Sleeping 60s"
sleep 60
done
+156
View File
@@ -0,0 +1,156 @@
#!/usr/bin/env python3
"""
Generate the PrDoc for a Pull Request with a specific number, audience and bump level.
It downloads and parses the patch from the GitHub API to opulate the prdoc with all modified crates.
This will delete any prdoc that already exists for the PR if `--force` is passed.
Usage:
python generate-prdoc.py --pr 1234 --audience node_dev --bump patch
"""
import argparse
import os
import re
import sys
import toml
import yaml
import requests
from github import Github
import whatthepatch
from cargo_workspace import Workspace
# Download the patch and pass the info into `create_prdoc`.
def from_pr_number(n, audience, bump, force):
print(f"Fetching PR '{n}' from GitHub")
g = Github()
repo = g.get_repo("pezkuwichain/pezkuwi-sdk")
pr = repo.get_pull(n)
patch_url = pr.patch_url
patch = requests.get(patch_url).text
create_prdoc(n, audience, pr.title, pr.body, patch, bump, force)
def translate_audience(audience):
aliases = {
'runtime_dev': 'Runtime Dev',
'runtime_user': 'Runtime User',
'node_dev': 'Node Dev',
'node_operator': 'Node Operator',
'todo': 'Todo',
}
mapped = [aliases.get(a) for a in audience]
if len(mapped) == 1:
mapped = mapped[0]
print(f"Translated audience '{audience}' to '{mapped}'")
return mapped
def create_prdoc(pr, audience, title, description, patch, bump, force):
path = f"prdoc/pr_{pr}.prdoc"
if os.path.exists(path):
if force == True:
print(f"Overwriting existing PrDoc for PR {pr}")
else:
print(f"PrDoc already exists for PR {pr}. Use --force to overwrite.")
sys.exit(1)
else:
print(f"No preexisting PrDoc for PR {pr}")
prdoc = { "title": title, "doc": [{}], "crates": [] }
audience = translate_audience(audience)
prdoc["doc"][0]["audience"] = audience
prdoc["doc"][0]["description"] = description
workspace = Workspace.from_path(".")
modified_paths = []
for diff in whatthepatch.parse_patch(patch):
new_path = diff.header.new_path
# Sometimes this lib returns `/dev/null` as the new path...
if not new_path.startswith("/dev"):
modified_paths.append(new_path)
modified_crates = {}
for p in modified_paths:
# Go up until we find a Cargo.toml
p = os.path.join(workspace.path, p)
while not os.path.exists(os.path.join(p, "Cargo.toml")):
if p == '/':
exit(1)
p = os.path.dirname(p)
with open(os.path.join(p, "Cargo.toml")) as f:
manifest = toml.load(f)
if not "package" in manifest:
continue
crate_name = manifest["package"]["name"]
if workspace.crate_by_name(crate_name).publish:
modified_crates[crate_name] = True
else:
print(f"Skipping unpublished crate: {crate_name}")
for crate_name in modified_crates.keys():
entry = { "name": crate_name }
if bump == 'silent' or bump == 'ignore' or bump == 'no change':
entry["validate"] = False
else:
entry["bump"] = bump
print(f"Adding crate {entry}")
prdoc["crates"].append(entry)
# write the parsed PR documentation back to the file
with open(path, "w") as f:
yaml.dump(prdoc, f, sort_keys=False)
print(f"PrDoc for PR {pr} written to {path}")
# Make the `description` a multiline string instead of escaping \r\n.
def setup_yaml():
def yaml_multiline_string_presenter(dumper, data):
if len(data.splitlines()) > 1:
data = '\n'.join([line.rstrip() for line in data.strip().splitlines()])
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
return dumper.represent_scalar('tag:yaml.org,2002:str', data)
yaml.add_representer(str, yaml_multiline_string_presenter)
# parse_args is also used by cmd/cmd.py
# if pr_required is False, then --pr is optional, as it can be derived from the PR comment body
def setup_parser(parser=None, pr_required=True):
allowed_audiences = ["runtime_dev", "runtime_user", "node_dev", "node_operator", "todo"]
if parser is None:
parser = argparse.ArgumentParser()
parser.add_argument("--pr", type=int, required=pr_required, help="The PR number to generate the PrDoc for.")
parser.add_argument("--audience", type=str, nargs='*', choices=allowed_audiences, default=["todo"], help="The audience of whom the changes may concern. Example: --audience runtime_dev node_dev")
parser.add_argument("--bump", type=str, default="major", choices=["patch", "minor", "major", "silent", "ignore", "none"], help="A default bump level for all crates. Example: --bump patch")
parser.add_argument("--force", action="store_true", help="Whether to overwrite any existing PrDoc.")
return parser
def snake_to_title(s):
return ' '.join(word.capitalize() for word in s.split('_'))
def main(args):
print(f"Args: {args}, force: {args.force}")
setup_yaml()
try:
from_pr_number(args.pr, args.audience, args.bump, args.force)
return 0
except Exception as e:
print(f"Error generating prdoc: {e}")
return 1
if __name__ == "__main__":
parser = setup_parser()
args = parser.parse_args()
main(args)
@@ -0,0 +1,6 @@
requests
cargo-workspace
PyGithub
whatthepatch
pyyaml
toml
+136
View File
@@ -0,0 +1,136 @@
#!/usr/bin/env python3
"""
A script to generate READMEs for all public crates,
if they do not already have one.
It relies on functions from the `check-workspace.py` script.
The resulting README is based on a template defined below,
and includes the crate name, description, license,
and optionally - the SDK release version.
# Example
```sh
python3 -m pip install toml
.github/scripts/generate-readmes.py . --sdk-version 1.15.0
```
"""
import os
import toml
import importlib
import argparse
check_workspace = importlib.import_module("check-workspace")
README_TEMPLATE = """<div align="center">
<img src="https://raw.githubusercontent.com/pezkuwichain/pezkuwi-sdk/master/docs/images/Pezkuwi_Logo.png" alt="Pezkuwi logo" width="200">
# {name}
This crate is part of the [Pezkuwi SDK](https://github.com/pezkuwichain/pezkuwi-sdk/).
</div>
## Description
{description}
## Additional Resources
In order to learn about Pezkuwi SDK, head over to the [Pezkuwi SDK Developer Documentation](https://pezkuwichain.github.io/pezkuwi-sdk/master/pezkuwi_sdk_docs/index.html).
To learn about Pezkuwi, visit [pezkuwichain.io](https://pezkuwichain.io/).
## License
This crate is licensed with {license}.
"""
VERSION_TEMPLATE = """
## Version
This version of `{name}` is associated with Pezkuwi {sdk_version} release.
"""
def generate_readme(member, *, workspace_dir, workspace_license, sdk_version):
print(f"Loading manifest for: {member}")
manifest = toml.load(os.path.join(workspace_dir, member, "Cargo.toml"))
if manifest["package"].get("publish", True) == False:
print(f"⏩ Skipping un-published crate: {member}")
return
if os.path.exists(os.path.join(workspace_dir, member, "README.md")):
print(f"⏩ Skipping crate with an existing readme: {member}")
return
print(f"📝 Generating README for: {member}")
license = manifest["package"]["license"]
if isinstance(license, dict):
if not license.get("workspace", False):
print(
f"❌ License for {member} is unexpectedly declared as workspace=false."
)
# Skipping this crate as it is not clear what license it should use.
return
license = workspace_license
name = manifest["package"]["name"]
description = manifest["package"]["description"]
description = description + "." if not description.endswith(".") else description
filled_readme = README_TEMPLATE.format(
name=name, description=description, license=license
)
if sdk_version:
filled_readme += VERSION_TEMPLATE.format(name=name, sdk_version=sdk_version)
with open(os.path.join(workspace_dir, member, "README.md"), "w") as new_readme:
new_readme.write(filled_readme)
def parse_args():
parser = argparse.ArgumentParser(
description="Generate readmes for published crates."
)
parser.add_argument(
"workspace_dir",
help="The directory to check",
metavar="workspace_dir",
type=str,
nargs=1,
)
parser.add_argument(
"--sdk-version",
help="Optional SDK release version",
metavar="sdk_version",
type=str,
nargs=1,
required=False,
)
args = parser.parse_args()
return (args.workspace_dir[0], args.sdk_version[0] if args.sdk_version else None)
def main():
(workspace_dir, sdk_version) = parse_args()
root_manifest = toml.load(os.path.join(workspace_dir, "Cargo.toml"))
workspace_license = root_manifest["workspace"]["package"]["license"]
members = check_workspace.get_members(workspace_dir, [])
for member in members:
generate_readme(
member,
workspace_dir=workspace_dir,
workspace_license=workspace_license,
sdk_version=sdk_version,
)
if __name__ == "__main__":
main()
+63
View File
@@ -0,0 +1,63 @@
#!/usr/bin/env python3
"""
Zombienet Test Matrix Parser
This script parses YAML test definition files and converts them to JSON format
for use as GitHub Actions matrix jobs. It provides filtering capabilities to:
1. Exclude flaky tests (unless a specific test pattern is provided)
2. Filter tests by name pattern for targeted execution
3. Convert YAML test definitions to JSON matrix format
The script is used by GitHub Actions workflows to dynamically generate
test matrices based on YAML configuration files, enabling flexible
test execution and maintenance.
Usage:
python parse-zombienet-tests.py --matrix tests.yml [--flaky-tests flaky.txt] [--test-pattern pattern]
Output:
JSON array of test job objects suitable for GitHub Actions matrix strategy
"""
import argparse
import yaml
import json
import re
def parse_args():
parser = argparse.ArgumentParser(description="Parse test matrix YAML file with optional filtering")
parser.add_argument("--matrix", required=True, help="Path to the YAML matrix file")
parser.add_argument("--flaky-tests", default="", help="Newline-separated list of flaky job names")
parser.add_argument("--test-pattern", default="", help="Regex pattern to match job_name")
return parser.parse_args()
def load_jobs(matrix_path):
with open(matrix_path, "r") as f:
return yaml.safe_load(f)
def filter_jobs(jobs, flaky_tests, test_pattern):
flaky_set = set(name.strip() for name in flaky_tests.splitlines() if name.strip())
filtered = []
for job in jobs:
name = job.get("job-name", "")
# If test_pattern provided then don't care about flaky tests, just check test_pattern
if test_pattern and len(test_pattern) > 0:
if re.search(test_pattern, name):
filtered.append(job)
elif name not in flaky_set:
filtered.append(job)
return filtered
def main():
args = parse_args()
jobs = load_jobs(args.matrix)
result = filter_jobs(jobs, args.flaky_tests, args.test_pattern)
print(json.dumps(result))
if __name__ == "__main__":
main()
@@ -0,0 +1,259 @@
"""
This script is used to turn the JSON report produced by the revive differential tests tool into an
easy to consume markdown document for the purpose of reporting this information in the Pezkuwi SDK
CI. The full models used in the JSON report can be found in the revive differential tests repo and
the models used in this script are just a partial reproduction of the full report models.
"""
import json, typing, io, sys
class Report(typing.TypedDict):
context: "Context"
execution_information: dict["MetadataFilePathString", "MetadataFileReport"]
class MetadataFileReport(typing.TypedDict):
case_reports: dict["CaseIdxString", "CaseReport"]
class CaseReport(typing.TypedDict):
mode_execution_reports: dict["ModeString", "ExecutionReport"]
class ExecutionReport(typing.TypedDict):
status: "TestCaseStatus"
class Context(typing.TypedDict):
Test: "TestContext"
class TestContext(typing.TypedDict):
corpus_configuration: "CorpusConfiguration"
class CorpusConfiguration(typing.TypedDict):
test_specifiers: list["TestSpecifier"]
class CaseStatusSuccess(typing.TypedDict):
status: typing.Literal["Succeeded"]
steps_executed: int
class CaseStatusFailure(typing.TypedDict):
status: typing.Literal["Failed"]
reason: str
class CaseStatusIgnored(typing.TypedDict):
status: typing.Literal["Ignored"]
reason: str
TestCaseStatus = typing.Union[CaseStatusSuccess, CaseStatusFailure, CaseStatusIgnored]
"""A union type of all of the possible statuses that could be reported for a case."""
TestSpecifier = str
"""A test specifier string. For example resolc-compiler-tests/fixtures/solidity/test.json::0::Y+"""
ModeString = str
"""The mode string. For example Y+ >=0.8.13"""
MetadataFilePathString = str
"""The path to a metadata file. For example resolc-compiler-tests/fixtures/solidity/test.json"""
CaseIdxString = str
"""The index of a case as a string. For example '0'"""
PlatformString = typing.Union[
typing.Literal["revive-dev-node-revm-solc"],
typing.Literal["revive-dev-node-polkavm-resolc"],
]
"""A string of the platform on which the test was run"""
def path_relative_to_resolc_compiler_test_directory(path: str) -> str:
"""
Given a path, this function returns the path relative to the resolc-compiler-test directory. The
following is an example of an input and an output:
Input: ~/pezkuwi-sdk/revive-differential-tests/resolc-compiler-tests/fixtures/solidity/test.json
Output: test.json
"""
return f"{path.split('resolc-compiler-tests/fixtures/solidity')[-1].strip('/')}"
def main() -> None:
with open(sys.argv[1], "r") as file:
report: Report = json.load(file)
# Getting the platform string and resolving it into a simpler version of
# itself.
platform_identifier: PlatformString = typing.cast(PlatformString, sys.argv[2])
if platform_identifier == "revive-dev-node-polkavm-resolc":
platform: str = "PolkaVM"
elif platform_identifier == "revive-dev-node-revm-solc":
platform: str = "REVM"
else:
platform: str = platform_identifier
# Starting the markdown document and adding information to it as we go.
markdown_document: io.TextIOWrapper = open("report.md", "w")
print(f"# Differential Tests Results ({platform})", file=markdown_document)
# Getting all of the test specifiers from the report and making them relative to the tests dir.
test_specifiers: list[str] = list(
map(
path_relative_to_resolc_compiler_test_directory,
report["context"]["Test"]["corpus_configuration"]["test_specifiers"],
)
)
print("## Specified Tests", file=markdown_document)
for test_specifier in test_specifiers:
print(f"* ``{test_specifier}``", file=markdown_document)
# Counting the total number of test cases, successes, failures, and ignored tests
total_number_of_cases: int = 0
total_number_of_successes: int = 0
total_number_of_failures: int = 0
total_number_of_ignores: int = 0
for _, mode_to_case_mapping in report["execution_information"].items():
for _, case_idx_to_report_mapping in mode_to_case_mapping[
"case_reports"
].items():
for _, execution_report in case_idx_to_report_mapping[
"mode_execution_reports"
].items():
status: TestCaseStatus = execution_report["status"]
total_number_of_cases += 1
if status["status"] == "Succeeded":
total_number_of_successes += 1
elif status["status"] == "Failed":
total_number_of_failures += 1
elif status["status"] == "Ignored":
total_number_of_ignores += 1
else:
raise Exception(
f"Encountered a status that's unknown to the script: {status}"
)
print("## Counts", file=markdown_document)
print(
f"* **Total Number of Test Cases:** {total_number_of_cases}",
file=markdown_document,
)
print(
f"* **Total Number of Successes:** {total_number_of_successes}",
file=markdown_document,
)
print(
f"* **Total Number of Failures:** {total_number_of_failures}",
file=markdown_document,
)
print(
f"* **Total Number of Ignores:** {total_number_of_ignores}",
file=markdown_document,
)
# Grouping the various test cases into dictionaries and groups depending on their status to make
# them easier to include in the markdown document later on.
successful_cases: dict[
MetadataFilePathString, dict[CaseIdxString, set[ModeString]]
] = {}
for metadata_file_path, mode_to_case_mapping in report[
"execution_information"
].items():
for case_idx_string, case_idx_to_report_mapping in mode_to_case_mapping[
"case_reports"
].items():
for mode_string, execution_report in case_idx_to_report_mapping[
"mode_execution_reports"
].items():
status: TestCaseStatus = execution_report["status"]
metadata_file_path: str = (
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
)
mode_string: str = mode_string.replace(" M3", "+").replace(" M0", "-")
if status["status"] == "Succeeded":
successful_cases.setdefault(
metadata_file_path,
{},
).setdefault(
case_idx_string, set()
).add(mode_string)
print("## Failures", file=markdown_document)
print(
"The test specifiers seen in this section have the format 'path::case_idx::compilation_mode'\
and they're compatible with the revive differential tests framework and can be specified\
to it directly in the same way that they're provided through the `--test` argument of the\
framework.\n",
file=markdown_document,
)
print(
"The failures are provided in an expandable section to ensure that the PR does not get \
polluted with information. Please click on the section below for more information",
file=markdown_document,
)
print(
"<details><summary>Detailed Differential Tests Failure Information</summary>\n\n",
file=markdown_document,
)
print("| Test Specifier | Failure Reason | Note |", file=markdown_document)
print("| -- | -- | -- |", file=markdown_document)
for metadata_file_path, mode_to_case_mapping in report[
"execution_information"
].items():
for case_idx_string, case_idx_to_report_mapping in mode_to_case_mapping[
"case_reports"
].items():
for mode_string, execution_report in case_idx_to_report_mapping[
"mode_execution_reports"
].items():
status: TestCaseStatus = execution_report["status"]
metadata_file_path: str = (
path_relative_to_resolc_compiler_test_directory(metadata_file_path)
)
mode_string: str = mode_string.replace(" M3", "+").replace(" M0", "-")
if status["status"] != "Failed":
continue
failure_reason: str = (
status["reason"].replace("\n", " ").replace("|", " ")
)
note: str = ""
modes_where_this_case_succeeded: set[ModeString] = (
successful_cases.setdefault(
metadata_file_path,
{},
).setdefault(case_idx_string, set())
)
if len(modes_where_this_case_succeeded) != 0:
note: str = (
f"This test case succeeded with other compilation modes: {modes_where_this_case_succeeded}"
)
test_specifier: str = (
f"{metadata_file_path}::{case_idx_string}::{mode_string}"
)
print(
f"| ``{test_specifier}`` | ``{failure_reason}`` | {note} |",
file=markdown_document,
)
print("\n\n</details>", file=markdown_document)
# The primary downside of not using `with`, but I guess it's better since I don't want to over
# indent the code.
markdown_document.close()
if __name__ == "__main__":
main()
+213
View File
@@ -0,0 +1,213 @@
#!/bin/bash
set -euo pipefail
# This script processes logs produced by nodes spawned using the zombienet-sdk framework.
# The logs are prepared for upload as GitHub artifacts.
# If Loki logging is available, the corresponding log URLs are also printed.
# NOTE: P2838773B5F7DE937 is the loki.cicd until we switch to loki.zombienet
LOKI_URL_FOR_NODE='https://grafana.teleport.parity.io/explore?orgId=1&left=%7B%22datasource%22:%22P2838773B5F7DE937%22,%22queries%22:%5B%7B%22refId%22:%22A%22,%22datasource%22:%7B%22type%22:%22loki%22,%22uid%22:%22P2838773B5F7DE937%22%7D,%22editorMode%22:%22code%22,%22expr%22:%22%7Bzombie_ns%3D%5C%22{{namespace}}%5C%22,zombie_node%3D%5C%22{{podName}}%5C%22%7D%22,%22queryType%22:%22range%22%7D%5D,%22range%22:%7B%22from%22:%22{{from}}%22,%22to%22:%22{{to}}%22%7D%7D'
LOKI_DIR_FOR_NATIVE_LOGS="/tmp/zombienet"
# JQ queries
JQ_QUERY_RELAY_V1='.relay[].name'
JQ_QUERY_RELAY_SDK='.relay.nodes[].name'
JQ_QUERY_PARA_NODES_V1='.paras[$pid].nodes[].name'
JQ_QUERY_PARA_NODES_SDK='.teyrchains[$pid][] .collators[].name'
# current time in milliseconds + 60 secs to allow loki to ingest logs
TO=$(($(date +%s%3N) + 60000))
make_url() {
local name="$1"
local to="$2"
local url="${LOKI_URL_FOR_NODE//\{\{namespace\}\}/$NS}"
url="${url//\{\{podName\}\}/$name}"
url="${url//\{\{from\}\}/$FROM}"
url="${url//\{\{to\}\}/$to}"
echo "$url"
}
# Since we don't have the zombie.json file, we will make the best-effort to send the logs
process_logs_from_fallback() {
local BASE_DIR="$1"
local TARGET_DIR="$2"
# Extract namespace from BASE_DIR (e.g., /tmp/zombie-abc123 -> zombie-abc123)
NS=$(basename "$BASE_DIR")
echo "Using fallback mode for namespace: $NS"
# Use current time as FROM since we don't have zombie.json
FROM=$(($(date +%s%3N) - 600000)) # 10 minutes ago
# Find all logs with glob patterns
local log_files=()
# Search for SDK pattern: BASE_DIR/<name>/<name>.log
if [[ -d "$BASE_DIR" ]]; then
for node_dir in "$BASE_DIR"/*; do
if [[ -d "$node_dir" && "$node_dir" != "$TARGET_DIR" ]]; then
local node_name=$(basename "$node_dir")
if [[ -f "$node_dir/$node_name.log" ]]; then
log_files+=("$node_dir/$node_name.log")
fi
fi
done
fi
# Search for v1 pattern: BASE_DIR/logs/<name>.log
if [[ -d "$TARGET_DIR" ]]; then
for log_file in "$TARGET_DIR"/*.log; do
if [[ -f "$log_file" ]]; then
log_files+=("$log_file")
fi
done
fi
if [[ ${#log_files[@]} -eq 0 ]]; then
echo "::warning ::No log files found in $BASE_DIR using glob patterns"
return 1
fi
echo "Found ${#log_files[@]} log file(s) using glob patterns"
echo "Nodes:"
for log_file in "${log_files[@]}"; do
# Extract node name from log file path
local name=$(basename "$log_file" .log)
local_to=$TO
# Copy log to target directory if not already there
if [[ "$log_file" != "$TARGET_DIR/$name.log" ]]; then
if ! cp "$log_file" "$TARGET_DIR/$name.log" 2>/dev/null; then
echo "::warning ::Failed to copy log for $name"
continue
fi
fi
# Send logs to loki
if [[ -d "$LOKI_DIR_FOR_NATIVE_LOGS" ]]; then
if [[ -f "$TARGET_DIR/$name.log" ]]; then
awk -v NS="$NS" -v NAME="$name" '{print NS" "NAME" " $0}' "$TARGET_DIR/$name.log" >> "$LOKI_DIR_FOR_NATIVE_LOGS/to-loki.log"
local_to=$(($(date +%s%3N) + 60000))
fi
fi
echo -e "\t$name: $(make_url "$name" "$local_to")"
done
echo ""
}
process_logs_from_zombie_file() {
local BASE_DIR="$1"
local TARGET_DIR="$2"
local ZOMBIE_JSON="$3"
# Extract namespace (ns in sdk / namespace in v1)
NS=$(jq -r '.ns // .namespace' "$ZOMBIE_JSON")
# test start time in milliseconds
FROM=$(jq -r '.start_time_ts' "$ZOMBIE_JSON")
echo "Relay nodes:"
JQ_QUERY_RELAY=$JQ_QUERY_RELAY_V1
JQ_QUERY_PARA_NODES=$JQ_QUERY_PARA_NODES_V1
if [[ $(echo "$NS" | grep -E "zombie-[A-Fa-f0-9]+-") ]]; then
JQ_QUERY_RELAY=$JQ_QUERY_RELAY_SDK
JQ_QUERY_PARA_NODES=$JQ_QUERY_PARA_NODES_SDK
fi;
jq -r $JQ_QUERY_RELAY "$ZOMBIE_JSON" | while read -r name; do
[[ -z "$name" ]] && continue
local_to=$TO
if [[ "${ZOMBIE_PROVIDER:-}" == "k8s" ]]; then
# Fetching logs from k8s
if ! kubectl logs "$name" -c "$name" -n "$NS" > "$TARGET_DIR/$name.log" 2>&1; then
echo "::warning ::Failed to fetch logs for $name"
fi
else
# zombienet v1 dump the logs to the `/logs` directory
if [[ ! -f "$TARGET_DIR/$name.log" ]]; then
# `sdk` use this pattern to store the logs in native provider
if [[ -f "$BASE_DIR/$name/$name.log" ]]; then
cp "$BASE_DIR/$name/$name.log" "$TARGET_DIR/$name.log"
else
echo "::warning ::Log file not found: $BASE_DIR/$name/$name.log"
continue
fi
fi
# send logs to loki
if [[ -d "$LOKI_DIR_FOR_NATIVE_LOGS" && -f "$TARGET_DIR/$name.log" ]]; then
awk -v NS="$NS" -v NAME="$name" '{print NS" "NAME" " $0}' "$TARGET_DIR/$name.log" >> "$LOKI_DIR_FOR_NATIVE_LOGS/to-loki.log"
local_to=$(($(date +%s%3N) + 60000))
fi
fi
echo -e "\t$name: $(make_url "$name" "$local_to")"
done
echo ""
# Handle teyrchains grouped by paraId
jq -r '.paras // .teyrchains | to_entries[] | "\(.key)"' "$ZOMBIE_JSON" | while read -r para_id; do
echo "ParaId: $para_id"
jq -r --arg pid "$para_id" "$JQ_QUERY_PARA_NODES" "$ZOMBIE_JSON" | while read -r name; do
[[ -z "$name" ]] && continue
local_to=$TO
if [[ "${ZOMBIE_PROVIDER:-}" == "k8s" ]]; then
# Fetching logs from k8s
if ! kubectl logs "$name" -c "$name" -n "$NS" > "$TARGET_DIR/$name.log" 2>&1; then
echo "::warning ::Failed to fetch logs for $name"
fi
else
# zombienet v1 dump the logs to the `/logs` directory
if [[ ! -f "$TARGET_DIR/$name.log" ]]; then
# `sdk` use this pattern to store the logs in native provider
if [[ -f "$BASE_DIR/$name/$name.log" ]]; then
cp "$BASE_DIR/$name/$name.log" "$TARGET_DIR/$name.log"
else
echo "::warning ::Log file not found: $BASE_DIR/$name/$name.log"
continue
fi
fi
# send logs to loki
if [[ -d "$LOKI_DIR_FOR_NATIVE_LOGS" && -f "$TARGET_DIR/$name.log" ]]; then
awk -v NS="$NS" -v NAME="$name" '{print NS" "NAME" " $0}' "$TARGET_DIR/$name.log" >> "$LOKI_DIR_FOR_NATIVE_LOGS/to-loki.log"
local_to=$(($(date +%s%3N) + 60000))
fi
fi
echo -e "\t$name: $(make_url "$name" "$local_to")"
done
echo ""
done
}
# Main execution - Process all zombie-* directories (supports rstest with multiple tests per job)
BASE_DIRS=$(ls -dt /tmp/zombie-* 2>/dev/null || true)
if [[ -z "$BASE_DIRS" ]]; then
echo "No zombie directories found in /tmp/zombie-*"
exit 0
fi
for BASE_DIR in $BASE_DIRS; do
echo "Processing directory: $BASE_DIR"
# Make sure target directory exists
TARGET_DIR="$BASE_DIR/logs"
mkdir -p "$TARGET_DIR"
ZOMBIE_JSON="$BASE_DIR/zombie.json"
if [[ ! -f "$ZOMBIE_JSON" ]]; then
echo "Zombie file $ZOMBIE_JSON not present, calling fallback"
process_logs_from_fallback "$BASE_DIR" "$TARGET_DIR"
else
# we have a zombie.json file, let process it
echo "Processing logs from zombie.json"
process_logs_from_zombie_file "$BASE_DIR" "$TARGET_DIR" "$ZOMBIE_JSON"
fi
echo ""
done
# sleep for a minute to give alloy time to forward logs
sleep 60
+16
View File
@@ -0,0 +1,16 @@
#!/usr/bin/env bash
set -e
PRODUCT=$1
VERSION=$2
PROFILE=${PROFILE:-production}
cargo install --version 2.7.0 cargo-deb --locked -q
echo "Using cargo-deb v$(cargo-deb --version)"
echo "Building a Debian package for '$PRODUCT' in '$PROFILE' profile"
cargo deb --profile $PROFILE --no-strip --no-build -p $PRODUCT --deb-version $VERSION
deb=target/debian/$PRODUCT_*_amd64.deb
cp $deb target/production/
+40
View File
@@ -0,0 +1,40 @@
#!/usr/bin/env bash
# This is used to build our binaries:
# - pezkuwi
# - pezkuwi-teyrchain
# - pezkuwi-omni-node
#
# set -e
BIN=$1
PACKAGE=${2:-$BIN}
# must be given as feature1,feature2,feature3...
FEATURES=$3
if [ -n "$FEATURES" ]; then
FEATURES="--features ${FEATURES}"
fi
PROFILE=${PROFILE:-production}
ARTIFACTS=/artifacts/$BIN
echo "Artifacts will be copied into $ARTIFACTS"
mkdir -p "$ARTIFACTS"
git log --pretty=oneline -n 1
time cargo build --profile $PROFILE --locked --verbose --bin $BIN --package $PACKAGE $FEATURES
echo "Artifact target: $ARTIFACTS"
cp ./target/$PROFILE/$BIN "$ARTIFACTS"
pushd "$ARTIFACTS" > /dev/null
sha256sum "$BIN" | tee "$BIN.sha256"
chmod a+x "$BIN"
VERSION="$($ARTIFACTS/$BIN --version)"
EXTRATAG="$(echo "${VERSION}" |
sed -n -r 's/^'$BIN' ([0-9.]+.*-[0-9a-f]{7,13})-.*$/\1/p')"
EXTRATAG="${VERSION}-${EXTRATAG}-$(cut -c 1-8 $ARTIFACTS/$BIN.sha256)"
echo "$BIN version = ${VERSION} (EXTRATAG = ${EXTRATAG})"
echo -n ${VERSION} > "$ARTIFACTS/VERSION"
echo -n ${EXTRATAG} > "$ARTIFACTS/EXTRATAG"
+42
View File
@@ -0,0 +1,42 @@
#!/usr/bin/env bash
# This is used to build our binaries:
# - pezkuwi
# - pezkuwi-teyrchain
# - pezkuwi-omni-node
# set -e
BIN=$1
PACKAGE=${2:-$BIN}
PROFILE=${PROFILE:-production}
# parity-macos runner needs a path where it can
# write, so make it relative to github workspace.
ARTIFACTS=$GITHUB_WORKSPACE/artifacts/$BIN
VERSION=$(git tag -l --contains HEAD | grep -E "^v.*")
# must be given as feature1,feature2,feature3...
FEATURES=$3
if [ -n "$FEATURES" ]; then
FEATURES="--features ${FEATURES}"
fi
echo "Artifacts will be copied into $ARTIFACTS"
mkdir -p "$ARTIFACTS"
git log --pretty=oneline -n 1
time cargo build --profile $PROFILE --locked --verbose --bin $BIN --package $PACKAGE $FEATURES
echo "Artifact target: $ARTIFACTS"
cp ./target/$PROFILE/$BIN "$ARTIFACTS"
pushd "$ARTIFACTS" > /dev/null
sha256sum "$BIN" | tee "$BIN.sha256"
EXTRATAG="$($ARTIFACTS/$BIN --version |
sed -n -r 's/^'$BIN' ([0-9.]+.*-[0-9a-f]{7,13})-.*$/\1/p')"
EXTRATAG="${VERSION}-${EXTRATAG}-$(cut -c 1-8 $ARTIFACTS/$BIN.sha256)"
echo "$BIN version = ${VERSION} (EXTRATAG = ${EXTRATAG})"
echo -n ${VERSION} > "$ARTIFACTS/VERSION"
echo -n ${EXTRATAG} > "$ARTIFACTS/EXTRATAG"
+68
View File
@@ -0,0 +1,68 @@
#!/usr/bin/env bash
set -e
# --- Configuration ---
PRODUCT=${1:?"Usage: $0 <product_name> <version>"}
VERSION=${2:?"Usage: $0 <product_name> <version>"}
PROFILE=${PROFILE:-production}
ARCH="x86_64"
SOURCE_DIR="target/${PROFILE}"
STAGING_DIR="/tmp/${PRODUCT}-staging"
DEST_DIR="target/production"
# --- Script Start ---
echo "📦 Starting RPM build for '$PRODUCT' version '$VERSION'..."
# 1. Clean up and create a fresh staging directory
echo "🔧 Setting up staging directory: ${STAGING_DIR}"
rm -rf "$STAGING_DIR"
mkdir -p "$STAGING_DIR/usr/bin"
mkdir -p "$STAGING_DIR/usr/lib/${PRODUCT}"
mkdir -p "$STAGING_DIR/usr/lib/systemd/system"
mkdir -p "$STAGING_DIR/etc/default"
# 2. Copy compiled binaries and assets into the staging directory
echo "📂 Copying application files..."
cp "${SOURCE_DIR}/${PRODUCT}" "${STAGING_DIR}/usr/bin/"
cp "${SOURCE_DIR}/${PRODUCT}-prepare-worker" "${STAGING_DIR}/usr/lib/${PRODUCT}/"
cp "${SOURCE_DIR}/${PRODUCT}-execute-worker" "${STAGING_DIR}/usr/lib/${PRODUCT}/"
# MODIFIED PATH: Prefixed with the subdirectory name
cp "pezkuwi/scripts/packaging/pezkuwi.service" "${STAGING_DIR}/usr/lib/systemd/system/"
# Create default config file
echo 'PEZKUWI_CLI_ARGS=""' > "$STAGING_DIR/etc/default/pezkuwi"
# 3. Use fpm to package the staging directory into an RPM
# fpm config file .fpm is located in the pezkuwi-sdk root directory
echo "🎁 Running fpm to create the RPM package..."
fpm \
-s dir \
-t rpm \
-n "$PRODUCT" \
-v "$VERSION" \
-a "$ARCH" \
--rpm-os linux \
--description "Pezkuwi Node" \
--license "GPL-3.0-only" \
--url "https://pezkuwi.network/" \
--depends systemd \
--depends shadow-utils \
--after-install "pezkuwi/scripts/packaging/rpm-maintainer-scripts/rpm-postinst.sh" \
--before-remove "pezkuwi/scripts/packaging/rpm-maintainer-scripts/rpm-preun.sh" \
--after-remove "pezkuwi/scripts/packaging/rpm-maintainer-scripts/rpm-postun.sh" \
--config-files "/etc/default/pezkuwi" \
-C "$STAGING_DIR" \
.
# 4. Move the final RPM to the artifacts directory
echo "🚚 Moving RPM to '${DEST_DIR}'..."
mkdir -p "$DEST_DIR"
mv "${PRODUCT}-${VERSION}-1.${ARCH}.rpm" "$DEST_DIR/"
# 5. Clean up the staging directory
echo "🧹 Cleaning up temporary files..."
rm -rf "$STAGING_DIR"
echo "✅ RPM package build complete!"
ls -l "$DEST_DIR"
+39
View File
@@ -0,0 +1,39 @@
Origin: Parity
Label: Parity
Codename: release
Architectures: amd64
Components: main
Description: Apt repository for software made by Parity Technologies Ltd.
SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
Origin: Parity
Label: Parity Staging
Codename: staging
Architectures: amd64
Components: main
Description: Staging distribution for Parity Technologies Ltd. packages
SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
Origin: Parity
Label: Parity stable2407
Codename: stable2407
Architectures: amd64
Components: main
Description: Apt repository for software made by Parity Technologies Ltd.
SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
Origin: Parity
Label: Parity stable2409
Codename: stable2409
Architectures: amd64
Components: main
Description: Apt repository for software made by Parity Technologies Ltd.
SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
Origin: Parity
Label: Parity stable2412
Codename: stable2412
Architectures: amd64
Components: main
Description: Apt repository for software made by Parity Technologies Ltd.
SignWith: 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
@@ -0,0 +1,112 @@
#!/bin/bash
# pgpkms wrapper to make it compatible with RPM's GPG interface
# This script translates RPM's GPG arguments to pgpkms format
# Debug: log all arguments to stderr
echo "pgpkms-gpg-wrapper called with args: $*" >&2
# Parse arguments to find the input file and options
input_file=""
output_file=""
detach_sign=false
armor=false
local_user=""
read_from_stdin=false
while [[ $# -gt 0 ]]; do
case $1 in
--detach-sign)
detach_sign=true
shift
;;
--armor)
armor=true
shift
;;
--local-user)
local_user="$2"
shift 2
;;
-u)
local_user="$2"
shift 2
;;
-sbo)
# RPM uses -sbo which means: -s (sign), -b (detach), -o (output to file)
detach_sign=true
# The next argument should be the output file
shift
if [[ -n "$1" ]] && [[ "$1" != "--" ]]; then
output_file="$1"
shift
fi
;;
--no-verbose|--no-armor|--no-secmem-warning|--batch|--no-tty|--pinentry-mode|--passphrase-fd)
# Skip these GPG-specific options
shift
;;
--)
# End of options marker
shift
break
;;
--*)
# Skip other long options
shift
;;
-*)
# Skip other short options
shift
;;
*)
# This could be a file argument
if [[ "$1" == "-" ]]; then
read_from_stdin=true
elif [[ -z "$input_file" ]] && [[ -f "$1" ]]; then
input_file="$1"
fi
shift
;;
esac
done
# Handle remaining arguments after --
while [[ $# -gt 0 ]]; do
if [[ "$1" == "-" ]]; then
read_from_stdin=true
elif [[ -z "$input_file" ]] && [[ -f "$1" ]]; then
input_file="$1"
fi
shift
done
echo "Parsed: input_file='$input_file', output_file='$output_file', read_from_stdin=$read_from_stdin, armor=$armor" >&2
# If we're supposed to read from stdin, we need to create a temp file
if [[ "$read_from_stdin" == "true" ]]; then
temp_input=$(mktemp)
cat > "$temp_input"
input_file="$temp_input"
echo "Created temp file for stdin: $input_file" >&2
fi
if [[ -z "$input_file" ]]; then
echo "Error: No input file found" >&2
exit 1
fi
echo "Signing file: $input_file" >&2
# Call pgpkms with the appropriate arguments
pgpkms_args="sign --input $input_file"
if [[ -n "$output_file" ]]; then
pgpkms_args="$pgpkms_args --output $output_file"
fi
if [[ "$armor" != "true" ]]; then
pgpkms_args="$pgpkms_args --binary"
fi
echo "Running: /home/runner/.local/bin/pgpkms $pgpkms_args" >&2
exec /home/runner/.local/bin/pgpkms $pgpkms_args
+206
View File
@@ -0,0 +1,206 @@
#!/usr/bin/env bash
# Set the new version by replacing the value of the constant given as pattern
# in the file.
#
# input: pattern, version, file
#output: none
set_version() {
pattern=$1
version=$2
file=$3
sed -i "s/$pattern/\1\"${version}\"/g" $file
return 0
}
# Commit changes to git with specific message.
# "|| true" does not let script to fail with exit code 1,
# in case there is nothing to commit.
#
# input: MESSAGE (any message which should be used for the commit)
# output: none
commit_with_message() {
MESSAGE=$1
git commit -a -m "$MESSAGE" || true
}
# Retun list of the runtimes filterd
# input: none
# output: list of filtered runtimes
get_filtered_runtimes_list() {
grep_filters=("runtime.*" "test|template|starters|substrate")
git grep spec_version: | grep .rs: | grep -e "${grep_filters[0]}" | grep "lib.rs" | grep -vE "${grep_filters[1]}" | cut -d: -f1
}
# Sets provided spec version
# input: version
set_spec_versions() {
NEW_VERSION=$1
runtimes_list=(${@:2})
printf "Setting spec_version to $NEW_VERSION\n"
for f in ${runtimes_list[@]}; do
printf " processing $f"
sed -ri "s/spec_version: [0-9]+_[0-9]+_[0-9]+,/spec_version: $NEW_VERSION,/" $f
done
commit_with_message "Bump spec_version to $NEW_VERSION"
git_show_log 'spec_version'
}
# Displays formated results of the git log command
# for the given pattern which needs to be found in logs
# input: pattern, count (optional, default is 10)
git_show_log() {
PATTERN="$1"
COUNT=${2:-10}
git log --pretty=format:"%h %ad | %s%d [%an]" --graph --date=iso-strict | \
head -n $COUNT | grep -iE "$PATTERN" --color=always -z
}
# Get a spec_version number from the crate version
#
# ## inputs
# - v1.12.0 or 1.12.0
#
# ## output:
# 1_012_000 or 1_012_001 if SUFFIX is set
function get_spec_version() {
INPUT=$1
SUFFIX=${SUFFIX:-000} #this variable makes it possible to set a specific runtime version like 93826 it can be initialised as system variable
[[ $INPUT =~ .*([0-9]+\.[0-9]+\.[0-9]{1,2}).* ]]
VERSION="${BASH_REMATCH[1]}"
MATCH="${BASH_REMATCH[0]}"
if [ -z $MATCH ]; then
return 1
else
SPEC_VERSION="$(sed -e "s/\./_0/g" -e "s/_[^_]*\$/_$SUFFIX/" <<< $VERSION)"
echo "$SPEC_VERSION"
return 0
fi
}
# Reorganize the prdoc files for the release
#
# input: VERSION (e.g. v1.0.0)
# output: none
reorder_prdocs() {
VERSION="$1"
printf "[+] ️ Reordering prdocs:"
VERSION=$(sed -E 's/^v([0-9]+\.[0-9]+\.[0-9]+).*$/\1/' <<< "$VERSION") #getting reed of the 'v' prefix
mkdir -p "prdoc/$VERSION"
mv prdoc/pr_*.prdoc prdoc/$VERSION
git add -A
commit_with_message "Reordering prdocs for the release $VERSION"
}
# Bump the binary version of the pezkuwi-teyrchain binary with the
# new bumped version and commit changes.
#
# input: version e.g. 1.16.0
set_pezkuwi_teyrchain_binary_version() {
bumped_version="$1"
cargo_toml_file="$2"
set_version "\(^version = \)\".*\"" $bumped_version $cargo_toml_file
cargo update --workspace --offline # we need this to update Cargo.loc with the new versions as well
MESSAGE="Bump versions in: ${cargo_toml_file}"
commit_with_message "$MESSAGE"
git_show_log "$MESSAGE"
}
upload_s3_release() {
alias aws='podman run --rm -it docker.io/paritytech/awscli -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_BUCKET aws'
product=$1
version=$2
target=$3
echo "Working on product: $product "
echo "Working on version: $version "
echo "Working on platform: $target "
URL_BASE=$(get_s3_url_base $product)
echo "Current content, should be empty on new uploads:"
aws s3 ls "s3://${URL_BASE}/${version}/${target}" --recursive --human-readable --summarize || true
echo "Content to be uploaded:"
artifacts="release-artifacts/$target/$product/"
ls "$artifacts"
aws s3 sync --acl public-read "$artifacts" "s3://${URL_BASE}/${version}/${target}"
echo "Uploaded files:"
aws s3 ls "s3://${URL_BASE}/${version}/${target}" --recursive --human-readable --summarize
echo "✅ The release should be at https://${URL_BASE}/${version}/${target}"
}
# Upload runtimes artifacts to s3 release bucket
#
# input: version (stable release tag e.g. pezkuwi-stable2412 or pezkuwi-stable2412-rc1)
# output: none
upload_s3_runtimes_release_artifacts() {
alias aws='podman run --rm -it docker.io/paritytech/awscli -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_BUCKET aws'
version=$1
echo "Working on version: $version "
echo "Current content, should be empty on new uploads:"
aws s3 ls "s3://releases.parity.io/pezkuwi/runtimes/${version}/" --recursive --human-readable --summarize || true
echo "Content to be uploaded:"
artifacts="artifacts/runtimes/"
ls "$artifacts"
aws s3 sync --acl public-read "$artifacts" "s3://releases.parity.io/pezkuwi/runtimes/${version}/"
echo "Uploaded files:"
aws s3 ls "s3://releases.parity.io/pezkuwi/runtimes/${version}/" --recursive --human-readable --summarize
echo "✅ The release should be at https://releases.parity.io/pezkuwi/runtimes/${version}"
}
# Pass the name of the binary as input, it will
# return the s3 base url
function get_s3_url_base() {
name=$1
case $name in
pezkuwi | pezkuwi-execute-worker | pezkuwi-prepare-worker )
printf "releases.parity.io/pezkuwi"
;;
pezkuwi-teyrchain)
printf "releases.parity.io/pezkuwi-teyrchain"
;;
pezkuwi-omni-node)
printf "releases.parity.io/pezkuwi-omni-node"
;;
chain-spec-builder)
printf "releases.parity.io/chain-spec-builder"
;;
frame-omni-bencher)
printf "releases.parity.io/frame-omni-bencher"
;;
substrate-node)
printf "releases.parity.io/substrate-node"
;;
eth-rpc)
printf "releases.parity.io/eth-rpc"
;;
subkey)
printf "releases.parity.io/subkey"
;;
*)
printf "UNSUPPORTED BINARY $name"
exit 1
;;
esac
}
+4
View File
@@ -0,0 +1,4 @@
%_signature gpg
%_gpg_name 90BD75EBBB8E95CB3DA6078F94A4029AB4B35DAE
%__gpg /home/runner/work/pezkuwi-sdk/pezkuwi-sdk/.github/scripts/release/pgpkms-gpg-wrapper.sh
%__gpgbin /home/runner/work/pezkuwi-sdk/pezkuwi-sdk/.github/scripts/release/pgpkms-gpg-wrapper.sh
+85
View File
@@ -0,0 +1,85 @@
#!/usr/bin/env bash
# This script executes a given zombienet test for the `native` provider.
# It is equivalent to running run-test-local-env-manager.sh for the `k8s` provider.
function run_test {
cd "${OUTPUT_DIR}"
for i in $(find ${OUTPUT_DIR} -name "${TEST_TO_RUN}"| head -1); do
TEST_FOUND=1
# in order to let native provider work properly we need
# to unset ZOMBIENET_IMAGE, which controls 'inCI' internal flag.
# ZOMBIENET_IMAGE not set && RUN_IN_CONTAINER=0 => inCI=false
# Apparently inCI=true works properly only with k8s provider
unset ZOMBIENET_IMAGE
if [ -z "$ZOMBIE_BASE_DIR" ]; then
${ZOMBIE_COMMAND} -p native -c $CONCURRENCY test $i
else
${ZOMBIE_COMMAND} -p native -c $CONCURRENCY -d $ZOMBIE_BASE_DIR -f test $i
fi;
EXIT_STATUS=$?
done;
if [[ $TEST_FOUND -lt 1 ]]; then
EXIT_STATUS=1
fi;
}
function create_isolated_dir {
TS=$(date +%s)
ISOLATED=${OUTPUT_DIR}/${TS}
mkdir -p ${ISOLATED}
OUTPUT_DIR="${ISOLATED}"
}
function copy_to_isolated {
cd "${SCRIPT_PATH}"
echo $(pwd)
cp -r "${LOCAL_DIR}"/* "${OUTPUT_DIR}"
}
function rm_isolated_dir {
echo "Removing ${OUTPUT_DIR}"
rm -rf "${OUTPUT_DIR}"
}
function log {
local lvl msg fmt
lvl=$1 msg=$2
fmt='+%Y-%m-%d %H:%M:%S'
lg_date=$(date "${fmt}")
if [[ "${lvl}" = "DIE" ]] ; then
lvl="ERROR"
echo -e "\n${lg_date} - ${lvl} - ${msg}"
exit 1
else
echo -e "\n${lg_date} - ${lvl} - ${msg}"
fi
}
set -x
SCRIPT_NAME="$0"
SCRIPT_PATH=$(dirname "$0") # relative
SCRIPT_PATH=$(cd "${SCRIPT_PATH}" && pwd) # absolutized and normalized
ZOMBIE_COMMAND=zombie
EXIT_STATUS=0
# args
LOCAL_DIR="$1"
CONCURRENCY="$2"
TEST_TO_RUN="$3"
ZOMBIE_BASE_DIR="$4"
cd "${SCRIPT_PATH}"
OUTPUT_DIR="${SCRIPT_PATH}"
create_isolated_dir
copy_to_isolated
run_test
rm_isolated_dir
log INFO "Exit status is ${EXIT_STATUS}"
exit "${EXIT_STATUS}"
@@ -0,0 +1,79 @@
from github import Github
import re
import os
from datetime import date
g = Github(os.getenv("GH_TOKEN"))
# Regex pattern to match wish format:
wish_pattern = re.compile(
r"I wish for:? (https://github\.com/([a-zA-Z0-9_.-]+)/([a-zA-Z0-9_.-]+)/(issues|pull)/(\d+))"
)
wishlist_issue = g.get_repo(os.getenv("WISHLIST_REPOSITORY")).get_issue(
int(os.getenv("WISHLIST_ISSUE_NUMBER"))
)
new_leaderboard = (
"| Feature Request | Summary | Votes | Status |\n| --- | --- | --- | --- |\n"
)
wishes = {}
issue_details = {}
for comment in wishlist_issue.get_comments():
# in the comment body, if there is a string `#(\d)`, replace it with
# https://github.com/pezkuwichain/pezkuwi-sdk/issues/(number)
updated_body = re.sub(
r"#(\d+)", r"https://github.com/pezkuwichain/pezkuwi-sdk/issues/\1", comment.body
)
matches = wish_pattern.findall(updated_body)
for match in matches:
url, org, repo_name, _, issue_id = match
issue_key = (url, org, repo_name, issue_id)
if issue_key not in wishes:
wishes[issue_key] = []
# Get the author and upvoters of the wish comment.
wishes[issue_key].append(comment.user.id)
wishes[issue_key].extend(
[
reaction.user.id
for reaction in comment.get_reactions()
if reaction.content in ["+1", "heart", "rocket"]
]
)
# Get upvoters of the desired issue.
desired_issue = g.get_repo(f"{org}/{repo_name}").get_issue(int(issue_id))
wishes[issue_key].extend(
[
reaction.user.id
for reaction in desired_issue.get_reactions()
if reaction.content in ["+1", "heart", "rocket"]
]
)
issue_details[url] = [
desired_issue.title,
"👾 Open" if desired_issue.state == "open" else "✅Closed",
]
# Count unique wishes - the author of the wish, upvoters of the wish, and upvoters of the desired issue.
for key in wishes:
wishes[key] = len(list(set(wishes[key])))
# Sort wishes by count and add to the markdown table
sorted_wishes = sorted(wishes.items(), key=lambda x: x[1], reverse=True)
for (url, _, _, _), count in sorted_wishes:
[summary, status] = issue_details.get(url, "No summary available")
new_leaderboard += f"| {url} | {summary} | {count} | {status} |\n"
new_leaderboard += f"\n> Last updated: {date.today().strftime('%Y-%m-%d')}\n"
print(new_leaderboard)
new_content = re.sub(
r"(\| Feature Request \|)(.*?)(> Last updated:)(.*?\n)",
new_leaderboard,
wishlist_issue.body,
flags=re.DOTALL,
)
wishlist_issue.edit(body=new_content)
+214
View File
@@ -0,0 +1,214 @@
name: Bench all runtimes
on:
# schedule:
# - cron: '0 1 * * 0' # weekly on Sunday night 01:00 UTC
workflow_dispatch:
inputs:
draft:
type: boolean
default: false
description: "Whether to create a draft PR"
permissions: # allow the action to create a PR
contents: write
issues: write
pull-requests: write
actions: read
jobs:
preflight:
uses: ./.github/workflows/reusable-preflight.yml
runtime-matrix:
runs-on: ubuntu-latest
needs: [preflight]
timeout-minutes: 30
outputs:
runtime: ${{ steps.runtime.outputs.runtime }}
branch: ${{ steps.branch.outputs.branch }}
date: ${{ steps.branch.outputs.date }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
name: Extract runtimes from matrix
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: master
- name: Extract runtimes
id: runtime
run: |
RUNTIMES=$(jq '[.[] | select(.package != null)]' .github/workflows/runtimes-matrix.json)
RUNTIMES=$(echo $RUNTIMES | jq -c .)
echo "runtime=$RUNTIMES"
echo "runtime=$RUNTIMES" >> $GITHUB_OUTPUT
- name: Create branch
id: branch
run: |
DATE=$(date +'%Y-%m-%d-%s')
BRANCH="update-weights-weekly-$DATE"
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory $GITHUB_WORKSPACE
git checkout -b $BRANCH
git push --set-upstream origin $BRANCH
echo "date=$DATE" >> $GITHUB_OUTPUT
echo "branch=$BRANCH" >> $GITHUB_OUTPUT
run-pezframe-omni-bencher:
needs: [preflight, runtime-matrix]
runs-on: ${{ needs.preflight.outputs.RUNNER_WEIGHTS }}
# 24 hours per runtime.
# Max it takes 14hr for zagros to recalculate, but due to limited runners,
# sometimes it can take longer.
timeout-minutes: 1440
strategy:
fail-fast: false # keep running other workflows even if one fails, to see the logs of all possible failures
matrix:
runtime: ${{ fromJSON(needs.runtime-matrix.outputs.runtime) }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
PACKAGE_NAME: ${{ matrix.runtime.package }}
FLAGS: ${{ matrix.runtime.bench_flags }}
RUST_LOG: "frame_omni_bencher=info,pezkuwi_sdk_frame=info"
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
ref: ${{ needs.runtime-matrix.outputs.branch }} # checkout always from the initially created branch to avoid conflicts
- name: script
id: required
run: |
git --version
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory $GITHUB_WORKSPACE
git remote -v
python3 -m pip install -r .github/scripts/generate-prdoc.requirements.txt
python3 .github/scripts/cmd/cmd.py bench --runtime ${{ matrix.runtime.name }}
git add .
git status
if [ -f /tmp/cmd/command_output.log ]; then
CMD_OUTPUT=$(cat /tmp/cmd/command_output.log)
# export to summary to display in the PR
echo "$CMD_OUTPUT" >> $GITHUB_STEP_SUMMARY
# should be multiline, otherwise it captures the first line only
echo 'cmd_output<<EOF' >> $GITHUB_OUTPUT
echo "$CMD_OUTPUT" >> $GITHUB_OUTPUT
echo 'EOF' >> $GITHUB_OUTPUT
fi
# Create patch that includes both modifications and new files
git add -A
git diff --staged > diff-${{ matrix.runtime.name }}.patch -U0
git reset
- name: Upload diff
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: diff-${{ matrix.runtime.name }}
path: diff-${{ matrix.runtime.name }}.patch
apply-diff-commit:
runs-on: ubuntu-latest
needs: [runtime-matrix, run-pezframe-omni-bencher]
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
ref: ${{ needs.runtime-matrix.outputs.branch }}
- name: Download all artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
path: patches
# needs to be able to trigger CI
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: generate_token
with:
app-id: ${{ secrets.CMD_BOT_APP_ID }}
private-key: ${{ secrets.CMD_BOT_APP_KEY }}
- name: Apply diff and create PR
env:
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
BRANCH: ${{ needs.runtime-matrix.outputs.branch }}
DATE: ${{ needs.runtime-matrix.outputs.date }}
run: |
git --version
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git status
# Apply all patches
for file in patches/diff-*/diff-*.patch; do
if [ -f "$file" ] && [ -s "$file" ]; then
echo "Applying $file"
# using --3way and --ours for conflicts resolution. Requires git 2.47+
git apply "$file" --unidiff-zero --allow-empty --3way --ours || echo "Failed to apply $file"
else
echo "Skipping empty or non-existent patch file: $file"
fi
done
rm -rf patches
# Get release tags from 1 and 3 months ago
ONE_MONTH_AGO=$(date -d "1 month ago" +%Y-%m-%d)
THREE_MONTHS_AGO=$(date -d "3 months ago" +%Y-%m-%d)
# Get tags with their dates
ONE_MONTH_INFO=$(git for-each-ref --sort=-creatordate --format '%(refname:short)|%(creatordate:iso-strict-local)' 'refs/tags/pezkuwi-v*' | awk -v date="$ONE_MONTH_AGO" -F'|' '$2 <= date {print $0; exit}')
THREE_MONTHS_INFO=$(git for-each-ref --sort=-creatordate --format '%(refname:short)|%(creatordate:iso-strict-local)' 'refs/tags/pezkuwi-v*' | awk -v date="$THREE_MONTHS_AGO" -F'|' '$2 <= date {print $0; exit}')
# Split into tag and date
ONE_MONTH_TAG=$(echo "$ONE_MONTH_INFO" | cut -d'|' -f1)
ONE_MONTH_DATE=$(echo "$ONE_MONTH_INFO" | cut -d'|' -f2 | cut -d'T' -f1)
THREE_MONTHS_TAG=$(echo "$THREE_MONTHS_INFO" | cut -d'|' -f1)
THREE_MONTHS_DATE=$(echo "$THREE_MONTHS_INFO" | cut -d'|' -f2 | cut -d'T' -f1)
# Base URL for Subweight comparisons
BASE_URL="https://weights.tasty.limo/compare?repo=pezkuwi-sdk&threshold=5&path_pattern=.%2F**%2Fweights%2F**%2F*.rs%2C.%2F**%2Fweights.rs&method=asymptotic&ignore_errors=true&unit=time"
# Generate comparison links
MASTER_LINK="${BASE_URL}&old=master&new=${BRANCH}"
ONE_MONTH_LINK="${BASE_URL}&old=${ONE_MONTH_TAG}&new=${BRANCH}"
THREE_MONTHS_LINK="${BASE_URL}&old=${THREE_MONTHS_TAG}&new=${BRANCH}"
# Create PR body with all links in a temporary file
cat > /tmp/pr_body.md << EOF
Auto-update of all weights for ${DATE}.
Subweight results:
- [now vs master](${MASTER_LINK})
- [now vs ${ONE_MONTH_TAG} (${ONE_MONTH_DATE})](${ONE_MONTH_LINK})
- [now vs ${THREE_MONTHS_TAG} (${THREE_MONTHS_DATE})](${THREE_MONTHS_LINK})
EOF
git add .
git commit -m "Update all weights weekly for $DATE"
git push --set-upstream origin "$BRANCH"
MAYBE_DRAFT=${{ inputs.draft && '--draft' || '' }}
PR_TITLE="Auto-update of all weights for $DATE"
gh pr create \
--title "$PR_TITLE" \
--head "$BRANCH" \
--base "master" \
--reviewer pezkuwichain/ci \
--reviewer pezkuwichain/release-engineering \
$MAYBE_DRAFT \
--label "R0-no-crate-publish-required" \
--body "$(cat /tmp/pr_body.md)"
+109
View File
@@ -0,0 +1,109 @@
name: Networking Benchmarks
on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
preflight:
uses: ./.github/workflows/reusable-preflight.yml
build:
timeout-minutes: 50
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER_BENCHMARK }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
strategy:
fail-fast: false
matrix:
features:
[
{ bench: "notifications_protocol" },
{ bench: "request_response_protocol" },
]
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Run Benchmarks
id: run-benchmarks
run: |
mkdir -p ./charts
cargo bench -p pezsc-network --bench ${{ matrix.features.bench }} -- --output-format bencher | grep "^test" | tee ./charts/${{ matrix.features.bench }}.txt || echo "Benchmarks failed"
ls -lsa ./charts
- name: Upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ matrix.features.bench }}-${{ github.sha }}
path: ./charts
publish-benchmarks:
timeout-minutes: 60
needs: [build]
if: github.ref == 'refs/heads/master'
environment: subsystem-benchmarks
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: gh-pages
fetch-depth: 0
- run: git checkout master --
- name: Download artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: notifications_protocol-${{ github.sha }}
path: ./charts
- name: Download artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: request_response_protocol-${{ github.sha }}
path: ./charts
- name: Setup git
run: |
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory '*'
ls -lsR ./charts
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: app-token
with:
app-id: ${{ secrets.PEZKUWI_GHPAGES_APP_ID }}
private-key: ${{ secrets.PEZKUWI_GHPAGES_APP_KEY }}
- name: Generate ${{ env.BENCH }}
env:
BENCH: notifications_protocol
uses: benchmark-action/github-action-benchmark@4bdcce38c94cec68da58d012ac24b7b1155efe8b # v1.20.7
with:
tool: "cargo"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}.txt
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
- name: Generate ${{ env.BENCH }}
env:
BENCH: request_response_protocol
uses: benchmark-action/github-action-benchmark@4bdcce38c94cec68da58d012ac24b7b1155efe8b # v1.20.7
with:
tool: "cargo"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}.txt
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
+167
View File
@@ -0,0 +1,167 @@
name: Subsystem Benchmarks
on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
build:
timeout-minutes: 80
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
strategy:
fail-fast: false
matrix:
features:
[
{
name: "pezkuwi-availability-recovery",
bench: "availability-recovery-regression-bench",
},
{
name: "pezkuwi-availability-distribution",
bench: "availability-distribution-regression-bench",
},
{
name: "pezkuwi-node-core-approval-voting",
bench: "approval-voting-regression-bench",
},
{
name: "pezkuwi-statement-distribution",
bench: "statement-distribution-regression-bench",
},
{
name: "pezkuwi-node-core-dispute-coordinator",
bench: "dispute-coordinator-regression-bench",
},
]
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Check Rust
run: |
rustup show
rustup +nightly show
- name: Run Benchmarks
id: run-benchmarks
run: |
cargo bench -p ${{ matrix.features.name }} --bench ${{ matrix.features.bench }} --features subsystem-benchmarks
ls -lsa ./charts
- name: Upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{matrix.features.bench}}
path: ./charts
publish-benchmarks:
timeout-minutes: 60
needs: [build]
if: github.ref == 'refs/heads/master'
environment: subsystem-benchmarks
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: gh-pages
fetch-depth: 0
- run: git checkout master --
- name: Download artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
path: ./charts
- name: Setup git
run: |
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory '*'
ls -lsR ./charts
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: app-token
with:
app-id: ${{ secrets.PEZKUWI_GHPAGES_APP_ID }}
private-key: ${{ secrets.PEZKUWI_GHPAGES_APP_KEY }}
- name: Generate ${{ env.BENCH }}
env:
BENCH: availability-recovery-regression-bench
uses: benchmark-action/github-action-benchmark@4bdcce38c94cec68da58d012ac24b7b1155efe8b # v1.20.7
with:
tool: "customSmallerIsBetter"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}/${{ env.BENCH }}.json
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
max-items-in-chart: 500
- name: Generate ${{ env.BENCH }}
env:
BENCH: availability-distribution-regression-bench
uses: benchmark-action/github-action-benchmark@4bdcce38c94cec68da58d012ac24b7b1155efe8b # v1.20.7
with:
tool: "customSmallerIsBetter"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}/${{ env.BENCH }}.json
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
max-items-in-chart: 500
- name: Generate ${{ env.BENCH }}
env:
BENCH: approval-voting-regression-bench
uses: benchmark-action/github-action-benchmark@4bdcce38c94cec68da58d012ac24b7b1155efe8b # v1.20.7
with:
tool: "customSmallerIsBetter"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}/${{ env.BENCH }}.json
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
max-items-in-chart: 500
- name: Generate ${{ env.BENCH }}
env:
BENCH: statement-distribution-regression-bench
uses: benchmark-action/github-action-benchmark@4bdcce38c94cec68da58d012ac24b7b1155efe8b # v1.20.7
with:
tool: "customSmallerIsBetter"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}/${{ env.BENCH }}.json
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
max-items-in-chart: 500
- name: Generate ${{ env.BENCH }}
env:
BENCH: dispute-coordinator-regression-bench
uses: benchmark-action/github-action-benchmark@4bdcce38c94cec68da58d012ac24b7b1155efe8b # v1.20.7
with:
tool: "customSmallerIsBetter"
name: ${{ env.BENCH }}
output-file-path: ./charts/${{ env.BENCH }}/${{ env.BENCH }}.json
benchmark-data-dir-path: ./bench/${{ env.BENCH }}
github-token: ${{ steps.app-token.outputs.token }}
auto-push: true
max-items-in-chart: 500
+130
View File
@@ -0,0 +1,130 @@
name: Build Misc
on:
push:
branches:
- main
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
build-runtimes-polkavm:
timeout-minutes: 75
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Clean cargo cache to free disk space
run: |
cargo clean 2>/dev/null || true
rm -rf ~/.cargo/registry/cache 2>/dev/null || true
rm -rf ~/.cargo/git/db 2>/dev/null || true
- name: Check Rust
run: |
rustup show
rustup +nightly show
- name: Build
env:
BIZINIKIWI_RUNTIME_TARGET: riscv
id: required
run: cargo check -p pez-minimal-template-runtime -p zagros-runtime -p pezkuwichain-runtime -p pezkuwi-test-runtime
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
# As part of our test fixtures we build the revive-uapi crate always with the `unstable-hostfn` feature.
# To make sure that it won't break for users downstream which are not setting this feature
# It doesn't need to produce working code so we just use a similar enough RISC-V target
check-revive-stable-uapi-polkavm:
timeout-minutes: 30
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Check Rust
run: |
rustup show
rustup +nightly show
- name: Build
id: required
run: cargo +nightly check -p pezpallet-revive-uapi --no-default-features --target riscv64imac-unknown-none-elf -Zbuild-std=core
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
build-pez-subkey:
timeout-minutes: 20
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Check Rust
run: |
rustup show
rustup +nightly show
- name: Build
env:
SKIP_WASM_BUILD: 1
id: required
run: |
cd ./bizinikiwi/bin/utils/pez-subkey
cargo build --locked --release
- name: Stop all workflows if failed
if: ${{ failure() && steps.required.conclusion == 'failure' && !github.event.pull_request.head.repo.fork }}
uses: ./.github/actions/workflow-stopper
with:
app-id: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_ID }}
app-key: ${{ secrets.WORKFLOW_STOPPER_RUNNER_APP_KEY }}
confirm-required-build-mipezsc-jobs-passed:
runs-on: ubuntu-latest
name: All build misc jobs passed
# If any new job gets added, be sure to add it to this array
needs: [build-runtimes-polkavm, build-pez-subkey]
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
@@ -0,0 +1,82 @@
name: Build and push ETH-RPC image
on:
push:
branches:
- main
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
ETH_RPC_IMAGE_NAME: "docker.io/paritypr/eth-rpc"
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
set-variables:
# This workaround sets the container image for each job using 'set-variables' job output.
# env variables don't work for PR from forks, so we need to use outputs.
runs-on: ubuntu-latest
needs: isdraft
outputs:
VERSION: ${{ steps.version.outputs.VERSION }}
steps:
- name: Define version
id: version
run: |
export COMMIT_SHA=${{ github.sha }}
export COMMIT_SHA_SHORT=${COMMIT_SHA:0:8}
export REF_NAME=${{ github.ref_name }}
export REF_SLUG=${REF_NAME//\//_}
VERSION=${REF_SLUG}-${COMMIT_SHA_SHORT}
echo "VERSION=${REF_SLUG}-${COMMIT_SHA_SHORT}" >> $GITHUB_OUTPUT
echo "set VERSION=${VERSION}"
build_docker:
name: Build docker images
runs-on: ubuntu-latest
needs: [set-variables]
env:
VERSION: ${{ needs.set-variables.outputs.VERSION }}
steps:
- name: Check out the repo
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Build eth-rpc Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
context: .
file: ./bizinikiwi/frame/revive/rpc/dockerfiles/eth-rpc/Dockerfile
push: false
tags: |
${{ env.ETH_RPC_IMAGE_NAME }}:${{ env.VERSION }}
build_push_docker:
name: Build and push docker images
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/master'
needs: [set-variables]
env:
VERSION: ${{ needs.set-variables.outputs.VERSION }}
steps:
- name: Check out the repo
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Log in to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.PEZKUWI_DOCKERHUB_USERNAME }}
password: ${{ secrets.PEZKUWI_DOCKERHUB_PASSWORD }}
- name: Build eth-rpc Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
context: .
file: ./bizinikiwi/frame/revive/rpc/dockerfiles/eth-rpc/Dockerfile
push: true
tags: |
${{ env.ETH_RPC_IMAGE_NAME }}:${{ env.VERSION }}
+723
View File
@@ -0,0 +1,723 @@
# GHA for build-*
name: Build and push images
on:
push:
branches:
- main
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions: read-all
env:
COMMIT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
jobs:
#
#
#
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
### Build ########################
#
#
#
build-linux-stable:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUST_TOOLCHAIN: stable
# Enable debug assertions since we are running optimized builds for testing
# but still want to have debug assertions.
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: build
id: required
run: |
cargo build --locked --profile testnet --features pyroscope,fast-runtime --bin pezkuwi --bin pezkuwi-prepare-worker --bin pezkuwi-execute-worker
PEZKUWICHAIN_EPOCH_DURATION=10 ./pezkuwi/scripts/build-only-wasm.sh pezkuwichain-runtime $(pwd)/runtimes/pezkuwichain-runtime-10/
PEZKUWICHAIN_EPOCH_DURATION=100 ./pezkuwi/scripts/build-only-wasm.sh pezkuwichain-runtime $(pwd)/runtimes/pezkuwichain-runtime-100/
PEZKUWICHAIN_EPOCH_DURATION=600 ./pezkuwi/scripts/build-only-wasm.sh pezkuwichain-runtime $(pwd)/runtimes/pezkuwichain-runtime-600/
pwd
ls -alR runtimes
- name: pack artifacts
run: |
mkdir -p ./artifacts
VERSION="${{ needs.preflight.outputs.SOURCE_REF_SLUG }}" # will be tag or branch name
mv ./target/testnet/pezkuwi ./artifacts/.
mv ./target/testnet/pezkuwi-prepare-worker ./artifacts/.
mv ./target/testnet/pezkuwi-execute-worker ./artifacts/.
mv ./runtimes/ ./artifacts/.
cd artifacts/
sha256sum pezkuwi | tee pezkuwi.sha256
shasum -c pezkuwi.sha256
cd ../
EXTRATAG="${{ needs.preflight.outputs.SOURCE_REF_SLUG }}-${COMMIT_SHA}"
echo "Pezkuwi version = ${VERSION} (EXTRATAG = ${EXTRATAG})"
echo -n ${VERSION} > ./artifacts/VERSION
echo -n ${EXTRATAG} > ./artifacts/EXTRATAG
echo -n ${GITHUB_RUN_ID} > ./artifacts/BUILD_LINUX_JOB_ID
RELEASE_VERSION=$(./artifacts/pezkuwi -V | awk '{print $2}'| awk -F "-" '{print $1}')
echo -n "v${RELEASE_VERSION}" > ./artifacts/BUILD_RELEASE_VERSION
cp -r docker/* ./artifacts
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
build-linux-stable-pezcumulus:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: build
id: required
run: |
echo "___Building a binary, please refrain from using it in production since it goes with the debug assertions.___"
cargo build --release --locked -p pezkuwi-teyrchain-bin --bin pezkuwi-teyrchain
echo "___Packing the artifacts___"
mkdir -p ./artifacts
mv ./target/release/pezkuwi-teyrchain ./artifacts/.
echo "___The VERSION is either a tag name or the curent branch if triggered not by a tag___"
echo ${{ needs.preflight.outputs.SOURCE_REF_SLUG }} | tee ./artifacts/VERSION
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
build-test-teyrchain:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: build
id: required
run: |
echo "___Building a binary, please refrain from using it in production since it goes with the debug assertions.___"
cargo build --release --locked -p pezcumulus-test-service --bin test-teyrchain
- name: pack artifacts
run: |
echo "___Packing the artifacts___"
mkdir -p ./artifacts
mv ./target/release/test-teyrchain ./artifacts/.
mkdir -p ./artifacts/zombienet
mv ./target/release/wbuild/pezcumulus-test-runtime/wasm_binary_spec_version_incremented.rs.compact.compressed.wasm ./artifacts/zombienet/.
mv ./target/release/wbuild/pezcumulus-test-runtime/wasm_binary_elastic_scaling.rs.compact.compressed.wasm ./artifacts/zombienet/.
mv ./target/release/wbuild/pezcumulus-test-runtime/wasm_binary_elastic_scaling_12s_slot.rs.compact.compressed.wasm ./artifacts/zombienet/.
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
build-test-collators:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: build
id: required
run: |
cargo build --locked --profile testnet -p test-teyrchain-adder-collator
cargo build --locked --profile testnet -p test-teyrchain-undying-collator
- name: pack artifacts
run: |
mkdir -p ./artifacts
mv ./target/testnet/adder-collator ./artifacts/.
mv ./target/testnet/undying-collator ./artifacts/.
echo -n "${{ needs.preflight.outputs.SOURCE_REF_SLUG }}" > ./artifacts/VERSION
echo -n "${{ needs.preflight.outputs.SOURCE_REF_SLUG }}-${COMMIT_SHA}" > ./artifacts/EXTRATAG
echo "adder-collator version = $(cat ./artifacts/VERSION) (EXTRATAG = $(cat ./artifacts/EXTRATAG))"
echo "undying-collator version = $(cat ./artifacts/VERSION) (EXTRATAG = $(cat ./artifacts/EXTRATAG))"
cp -r ./docker/* ./artifacts
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
build-malus:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: build
id: required
run: |
cargo build --locked --profile testnet -p pezkuwi-test-malus --bin malus --bin pezkuwi-prepare-worker --bin pezkuwi-execute-worker
- name: pack artifacts
run: |
mkdir -p ./artifacts
mv ./target/testnet/malus ./artifacts/.
mv ./target/testnet/pezkuwi-execute-worker ./artifacts/.
mv ./target/testnet/pezkuwi-prepare-worker ./artifacts/.
echo -n "${{ needs.preflight.outputs.SOURCE_REF_SLUG }}" > ./artifacts/VERSION
echo -n "${{ needs.preflight.outputs.SOURCE_REF_SLUG }}-${COMMIT_SHA}" > ./artifacts/EXTRATAG
echo "pezkuwi-test-malus = $(cat ./artifacts/VERSION) (EXTRATAG = $(cat ./artifacts/EXTRATAG))"
cp -r ./docker/* ./artifacts
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
build-linux-bizinikiwi:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: build
id: required
run: |
mkdir -p ./artifacts/bizinikiwi/
WASM_BUILD_NO_COLOR=1 cargo build --locked --release -p pez-staging-node-cli
ls -la target/release/
- name: pack artifacts
shell: bash
run: |
mv target/release/bizinikiwi-node ./artifacts/bizinikiwi/bizinikiwi
echo -n "Bizinikiwi version = "
if [[ "${{ github.ref }}" == "refs/tags/"* ]]; then
echo "${{ github.ref_name }}" | tee ./artifacts/bizinikiwi/VERSION;
else
./artifacts/bizinikiwi/bizinikiwi --version |
cut -d ' ' -f 2 | tee ./artifacts/bizinikiwi/VERSION;
fi
sha256sum ./artifacts/bizinikiwi/bizinikiwi | tee ./artifacts/bizinikiwi/bizinikiwi.sha256
cp -r ./docker/dockerfiles/bizinikiwi_injected.Dockerfile ./artifacts/bizinikiwi/
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
build-templates-node:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: build
id: required
run: |
cargo build --locked --package teyrchain-template-node --release
cargo build --locked --package pez-minimal-template-node --release
cargo build --locked --package pez-solochain-template-node --release
- name: pack artifacts
run: |
mkdir -p ./artifacts
mv ./target/release/teyrchain-template-node ./artifacts/.
mv ./target/release/pez-minimal-template-node ./artifacts/.
mv ./target/release/pez-solochain-template-node ./artifacts/.
echo -n "${{ needs.preflight.outputs.SOURCE_REF_SLUG }}" > ./artifacts/VERSION
echo -n "${{ needs.preflight.outputs.SOURCE_REF_SLUG }}-${COMMIT_SHA}" > ./artifacts/EXTRATAG
echo "pezkuwi-test-malus = $(cat ./artifacts/VERSION) (EXTRATAG = $(cat ./artifacts/EXTRATAG))"
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
### Build zombienet test artifacts ########################
#
#
#
prepare-bridges-zombienet-artifacts:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: build
run: |
cargo build --locked --profile testnet -p pezkuwi-test-malus --bin malus --bin pezkuwi-prepare-worker --bin pezkuwi-execute-worker
- name: pack artifacts
run: |
mkdir -p ./artifacts/bridges-pezkuwi-sdk/bridges
cp -r bridges/testing ./artifacts/bridges-pezkuwi-sdk/bridges/testing
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
prepare-pezkuwi-zombienet-artifacts:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: build
run: |
cargo nextest --manifest-path pezkuwi/zombienet-sdk-tests/Cargo.toml archive --locked --features zombie-metadata,zombie-ci --archive-file pezkuwi-zombienet-tests.tar.zst
- name: pack artifacts
run: |
mkdir -p artifacts
cp pezkuwi-zombienet-tests.tar.zst ./artifacts
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
#
#
#
prepare-pezcumulus-zombienet-artifacts:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: build
run: |
cargo nextest --manifest-path pezcumulus/zombienet/zombienet-sdk/Cargo.toml archive --locked --features zombie-ci --archive-file pezcumulus-zombienet-tests.tar.zst
- name: pack artifacts
run: |
mkdir -p artifacts
cp pezcumulus-zombienet-tests.tar.zst ./artifacts
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
prepare-teyrchain-templates-zombienet-artifacts:
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: build
run: |
cargo nextest --manifest-path templates/zombienet/Cargo.toml archive --locked --features zombienet --archive-file teyrchain-templates-zombienet-tests.tar.zst
- name: pack artifacts
run: |
mkdir -p artifacts
cp teyrchain-templates-zombienet-tests.tar.zst ./artifacts
- name: tar
run: tar -cvf artifacts.tar artifacts
- name: upload artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.job }}-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
path: artifacts.tar
retention-days: 1
### Publish ########################
#
#
#
build-push-image-test-teyrchain:
needs: [preflight, build-test-teyrchain]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: build-test-teyrchain-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "test-teyrchain"
dockerfile: "docker/dockerfiles/test-teyrchain_injected.Dockerfile"
username: ${{ secrets.PEZKUWI_DOCKERHUB_USERNAME }}
password: ${{ secrets.PEZKUWI_DOCKERHUB_PASSWORD }}
#
#
#
build-push-image-pezkuwi-debug:
needs: [preflight, build-linux-stable]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: build-linux-stable-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "pezkuwi-debug"
dockerfile: "docker/dockerfiles/pezkuwi/pezkuwi_injected_debug.Dockerfile"
username: ${{ secrets.PEZKUWI_DOCKERHUB_USERNAME }}
password: ${{ secrets.PEZKUWI_DOCKERHUB_PASSWORD }}
#
#
#
build-push-image-colander:
needs: [preflight, build-test-collators]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: build-test-collators-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "colander"
dockerfile: "docker/dockerfiles/collator_injected.Dockerfile"
username: ${{ secrets.PEZKUWI_DOCKERHUB_USERNAME }}
password: ${{ secrets.PEZKUWI_DOCKERHUB_PASSWORD }}
#
#
#
build-push-image-malus:
needs: [preflight, build-malus]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: build-malus-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "malus"
dockerfile: "docker/dockerfiles/malus_injected.Dockerfile"
username: ${{ secrets.PEZKUWI_DOCKERHUB_USERNAME }}
password: ${{ secrets.PEZKUWI_DOCKERHUB_PASSWORD }}
#
#
#
build-push-image-bizinikiwi-pr:
needs: [preflight, build-linux-bizinikiwi]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: build-linux-bizinikiwi-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "bizinikiwi"
dockerfile: "docker/dockerfiles/bizinikiwi_injected.Dockerfile"
username: ${{ secrets.PEZKUWI_DOCKERHUB_USERNAME }}
password: ${{ secrets.PEZKUWI_DOCKERHUB_PASSWORD }}
#
#
#
# unlike other images, bridges+zombienet image is based on Zombienet image that pulls required binaries
# from other fresh images (pezkuwi and pezcumulus)
build-push-image-bridges-zombienet-tests:
needs:
[
preflight,
build-linux-stable,
build-linux-stable-pezcumulus,
prepare-bridges-zombienet-artifacts,
]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: build-linux-stable-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: |
tar -xvf artifacts.tar
rm artifacts.tar
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: build-linux-stable-pezcumulus-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: |
tar -xvf artifacts.tar
rm artifacts.tar
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: prepare-bridges-zombienet-artifacts-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: |
tar -xvf artifacts.tar
rm artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "bridges-zombienet-tests"
dockerfile: "docker/dockerfiles/bridges_zombienet_tests_injected.Dockerfile"
username: ${{ secrets.PEZKUWI_DOCKERHUB_USERNAME }}
password: ${{ secrets.PEZKUWI_DOCKERHUB_PASSWORD }}
#
#
#
build-push-image-pezkuwi-teyrchain-debug:
needs: [preflight, build-linux-stable-pezcumulus]
runs-on: ${{ needs.preflight.outputs.RUNNER_DEFAULT }}
timeout-minutes: 60
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: build-linux-stable-pezcumulus-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: tar
run: tar -xvf artifacts.tar
- name: build and push image
uses: ./.github/actions/build-push-image
with:
image-name: "pezkuwi-teyrchain-debug"
dockerfile: "docker/dockerfiles/pezkuwi-teyrchain/pezkuwi-teyrchain-debug_unsigned_injected.Dockerfile"
username: ${{ secrets.PEZKUWI_DOCKERHUB_USERNAME }}
password: ${{ secrets.PEZKUWI_DOCKERHUB_PASSWORD }}
confirm-required-build-jobs-passed:
runs-on: ubuntu-latest
name: All builds passed
# If any new job gets added, be sure to add it to this array
needs:
- build-linux-stable
- build-linux-stable-pezcumulus
- build-test-teyrchain
- build-test-collators
- build-malus
- build-linux-bizinikiwi
- build-templates-node
if: always() && !cancelled()
outputs:
build_success: ${{ steps.check_success.outputs.build_success }}
steps:
- name: Check build success
id: check_success
run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
echo "build_success=false" >> $GITHUB_OUTPUT
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
echo "build_success=true" >> $GITHUB_OUTPUT
fi
trigger-zombienet-pezkuwi:
needs: [preflight, confirm-required-build-jobs-passed]
if: ${{ needs.confirm-required-build-jobs-passed.outputs.build_success == 'true' }}
uses: ./.github/workflows/zombienet_pezkuwi.yml
with:
build_run_id: ${{ github.run_id }}
ref_slug: ${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
trigger-zombienet-pezcumulus:
needs: [preflight, confirm-required-build-jobs-passed]
if: ${{ needs.confirm-required-build-jobs-passed.outputs.build_success == 'true' }}
uses: ./.github/workflows/zombienet_pezcumulus.yml
with:
build_run_id: ${{ github.run_id }}
ref_slug: ${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
trigger-zombienet-bizinikiwi:
needs: [preflight, confirm-required-build-jobs-passed]
if: ${{ needs.confirm-required-build-jobs-passed.outputs.build_success == 'true' }}
uses: ./.github/workflows/zombienet_bizinikiwi.yml
with:
build_run_id: ${{ github.run_id }}
ref_slug: ${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
trigger-zombienet-teyrchain-template:
needs: [preflight, confirm-required-build-jobs-passed]
if: ${{ needs.confirm-required-build-jobs-passed.outputs.build_success == 'true' }}
uses: ./.github/workflows/zombienet_teyrchain-template.yml
with:
build_run_id: ${{ github.run_id }}
ref_slug: ${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
confirm-zombienet-tests-passed:
runs-on: ubuntu-latest
name: All zombienet tests passed
needs:
- trigger-zombienet-pezkuwi
- trigger-zombienet-pezcumulus
- trigger-zombienet-bizinikiwi
- trigger-zombienet-teyrchain-template
if: always() && !cancelled()
steps:
- name: Check zombienet success
id: check_success
run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(grep -c '"result": "failure"' resultfile || true)
if [ "$FAILURES" -gt 0 ]; then
echo "### At least one zombienet job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All zombienet jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
@@ -0,0 +1,127 @@
name: Check Cargo Check Runtimes
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
paths:
- "pezcumulus/teyrchains/runtimes/*"
# Jobs in this workflow depend on each other, only for limiting peak amount of spawned workers
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
check-runtime-assets:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
needs: [preflight]
timeout-minutes: 20
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: pezcumulus/teyrchains/runtimes/assets
check-runtime-collectives:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
needs: [check-runtime-assets, preflight]
timeout-minutes: 20
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: pezcumulus/teyrchains/runtimes/collectives
check-runtime-coretime:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
needs: [check-runtime-assets, preflight]
timeout-minutes: 20
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: pezcumulus/teyrchains/runtimes/coretime
check-runtime-bridge-hubs:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
needs: [preflight]
timeout-minutes: 20
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: pezcumulus/teyrchains/runtimes/bridge-hubs
check-runtime-contracts:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
needs: [check-runtime-collectives, preflight]
timeout-minutes: 20
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: pezcumulus/teyrchains/runtimes/contracts
check-runtime-testing:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
needs: [preflight]
timeout-minutes: 20
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Run cargo check
uses: ./.github/actions/cargo-check-runtimes
with:
root: pezcumulus/teyrchains/runtimes/testing
confirm-required-jobs-passed:
runs-on: ubuntu-latest
name: All check-runtime-* tests passed
# If any new job gets added, be sure to add it to this array
needs:
- check-runtime-assets
- check-runtime-collectives
- check-runtime-coretime
- check-runtime-bridge-hubs
- check-runtime-contracts
- check-runtime-testing
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
@@ -0,0 +1,126 @@
name: Short benchmarks (pezframe-omni-bencher)
on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
ARTIFACTS_NAME: pezframe-omni-bencher-artifacts
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
quick-benchmarks-omni:
runs-on: ${{ needs.preflight.outputs.RUNNER_BENCHMARK }}
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
env:
RUSTFLAGS: "-C debug-assertions"
RUST_BACKTRACE: "full"
WASM_BUILD_NO_COLOR: 1
WASM_BUILD_RUSTFLAGS: "-C debug-assertions"
RUST_LOG: "frame_omni_bencher=info,pezkuwi_sdk_frame=info"
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Clean cargo cache to free disk space
run: |
cargo clean 2>/dev/null || true
rm -rf ~/.cargo/registry/cache 2>/dev/null || true
rm -rf ~/.cargo/git/db 2>/dev/null || true
- name: script
id: required
run: |
cargo build --locked --quiet --release -p asset-hub-zagros-runtime --features runtime-benchmarks
cargo run --locked --release -p pezframe-omni-bencher --quiet -- v1 benchmark pallet --runtime target/release/wbuild/asset-hub-zagros-runtime/asset_hub_zagros_runtime.compact.compressed.wasm --all --steps 2 --repeat 1 --quiet
runtime-matrix:
runs-on: ubuntu-latest
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
timeout-minutes: 30
outputs:
runtime: ${{ steps.runtime.outputs.runtime }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
name: Extract runtimes from matrix
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- id: runtime
run: |
RUNTIMES=$(jq '[.[] | select(.package != null)]' .github/workflows/runtimes-matrix.json)
RUNTIMES=$(echo $RUNTIMES | jq -c .)
echo "runtime=$RUNTIMES"
echo "runtime=$RUNTIMES" >> $GITHUB_OUTPUT
run-pezframe-omni-bencher:
runs-on: ${{ needs.preflight.outputs.RUNNER_BENCHMARK }}
needs: [preflight, runtime-matrix]
if: ${{ needs.preflight.outputs.changes_rust }}
timeout-minutes: 60
strategy:
fail-fast: false # keep running other workflows even if one fails, to see the logs of all possible failures
matrix:
runtime: ${{ fromJSON(needs.runtime-matrix.outputs.runtime) }}
bench_cmd: ["pallet", "overhead"]
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
PACKAGE_NAME: ${{ matrix.runtime.package }}
FLAGS: ${{ matrix.runtime.bench_flags }}
RUST_LOG: "frame_omni_bencher=info,pezkuwi_sdk_frame=info"
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: script (benchmark ${{ matrix.bench_cmd }})
id: required
shell: bash
run: |
RUNTIME_BLOB_NAME=$(echo $PACKAGE_NAME | sed 's/-/_/g').compact.compressed.wasm
RUNTIME_BLOB_PATH=./target/release/wbuild/$PACKAGE_NAME/$RUNTIME_BLOB_NAME
BENCH_CMD=${{ matrix.bench_cmd }}
cargo build --release --locked -p $PACKAGE_NAME -p pezframe-omni-bencher --features=${{ matrix.runtime.bench_features }} --quiet
echo "Running short $BENCH_CMD benchmarking for PACKAGE_NAME=$PACKAGE_NAME and RUNTIME_BLOB_PATH=$RUNTIME_BLOB_PATH"
ls -lrt $RUNTIME_BLOB_PATH
if [[ "$BENCH_CMD" == "pallet" ]]; then
cmd="./target/release/pezframe-omni-bencher v1 benchmark pallet --runtime $RUNTIME_BLOB_PATH --all --steps 2 --repeat 1 $FLAGS"
elif [[ "$BENCH_CMD" == "overhead" ]]; then
cmd="./target/release/pezframe-omni-bencher v1 benchmark overhead --runtime $RUNTIME_BLOB_PATH"
else
echo "Error: Unknown BENCH_CMD value: $BENCH_CMD"
exit 1
fi
echo "Running command: $cmd"
eval "$cmd"
confirm-pezframe-omni-benchers-passed:
runs-on: ubuntu-latest
name: All benchmarks passed
needs: [quick-benchmarks-omni, run-pezframe-omni-bencher]
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
+300
View File
@@ -0,0 +1,300 @@
name: Check the getting-started.sh script
# This workflow aims to make sure that the `getting-started.sh` script
# is functional and allows to build the templates
# on different operating systems.
#
# There are two jobs inside.
# One for systems that can run in a docker container, and one for macOS.
#
# Each job consists of:
# 1. Some necessary prerequisites for the workflow itself.
# 2. A first pass of the script, which will install dependencies and clone a template.
# 3. A second pass of the script, to make sure the behaviour is as expected.
# 4. Building the template - making sure it's buildable and runnable.
#
# The script is interacted with using the `expect` tool, which is available on all relevant systems.
# The steps are not re-used between macOS and other systems,
# because they are very similar but a little different.
# Additionally, macOS does NOT start from scratch here - for example, we have homebrew already installed.
#
# There are many combinations of systems, shells and templates.
# We test a selected handful of combinations here.
on:
pull_request:
paths:
- ".github/workflows/check-getting-started.yml"
- "scripts/getting-started.sh"
types: [opened, synchronize, reopened, ready_for_review]
schedule:
- cron: "0 5 * * *"
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
check-getting-started:
needs: isdraft
strategy:
fail-fast: true
matrix:
include:
- name: ubuntu
container: ubuntu
template: minimal
shell: bash
- name: debian
container: debian
template: teyrchain
shell: sh
- name: arch
container: archlinux
template: solochain
shell: sh
- name: fedora
container: fedora
template: teyrchain
shell: sh
- name: opensuse
container: opensuse/tumbleweed
template: solochain
shell: sh
runs-on: ubuntu-latest
container: ${{ matrix.container }}:latest
steps:
# A minimal amount of prerequisites required before we can run the actual getting-started script,
# which will install the rest of requirements.
- name: Install ubuntu/debian prerequisites
run: apt update && apt install -y expect sudo git
if: contains(matrix.name, 'ubuntu') || contains(matrix.name, 'debian')
- name: Install arch prerequisites
run: pacman -Syu --needed --noconfirm expect sudo git
if: contains(matrix.name, 'arch')
- name: Install fedora prerequisites
run: dnf --assumeyes install expect sudo git
if: contains(matrix.name, 'fedora')
- name: Install opensuse prerequisites
run: zypper install --no-confirm expect sudo git
if: contains(matrix.name, 'opensuse')
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set additional expect flags if necessary
run: |
# Add a debug flag to expect, if github is re-run with debug logging enabled.
[ "${{ runner.debug }}" = "1" ] && EXPECT_FLAGS="-d" || EXPECT_FLAGS=""
echo "EXPECT_FLAGS=${EXPECT_FLAGS}" >> $GITHUB_ENV
- name: Check the first run of the script
run: |
expect $EXPECT_FLAGS -c '
set timeout 240
spawn ${{ matrix.shell }} scripts/getting-started.sh
expect_after {
timeout { puts stderr "Timed out on an expect"; exit 1 }
eof { puts stderr "EOF received on an expect"; exit 1 }
}
expect -nocase "Detected ${{ matrix.name }}"
expect "Rust is not installed. Install it?" {
send "y\r"
expect "Proceed with standard installation (default - just press enter)" {
send "\r"
expect "Rust is installed now"
}
}
expect "Setup the Rust environment" {
send "y\r"
}
expect "start with one of the templates" {
send "y\r"
}
expect -re "(.)\\) ${{ matrix.template }} template" {
send "$expect_out(1,string)\r"
}
expect "compile the node?" {
send "n\r"
}
expect eof
'
timeout-minutes: 15
- name: Check the second run of the script
run: |
expect $EXPECT_FLAGS -c '
set timeout 120
spawn ${{ matrix.shell }} scripts/getting-started.sh
expect_after {
timeout { puts stderr "Timed out on an expect"; exit 1 }
eof { puts stderr "EOF received on an expect"; exit 1 }
}
expect "Rust already installed" {}
expect "Setup the Rust environment" {
send "n\r"
}
expect "start with one of the templates" {
send "y\r"
}
expect -re "(.)\\) ${{ matrix.template }} template" {
send "$expect_out(1,string)\r"
expect "directory already exists" {}
}
expect "compile the node?" {
send "n\r"
}
expect eof
'
timeout-minutes: 15
- name: Compile the node outside of the script
run: |
. "$HOME/.cargo/env"
cd ${{ matrix.template }}-template
cargo build --release
timeout-minutes: 120
- name: Check that the binary is executable
run: |
. "$HOME/.cargo/env"
cd ${{ matrix.template }}-template
cargo run --release -- --help
timeout-minutes: 5
check-getting-started-macos:
needs: isdraft
strategy:
fail-fast: true
matrix:
include:
- template: teyrchain
shell: sh
- template: solochain
shell: bash
runs-on: macos-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set additional expect flags if necessary
run: |
# Add a debug flag to expect, if github is re-run with debug logging enabled.
[ "${{ runner.debug }}" = "1" ] && EXPECT_FLAGS="-d" || EXPECT_FLAGS=""
echo "EXPECT_FLAGS=${EXPECT_FLAGS}" >> $GITHUB_ENV
- name: Check the first run of the script
run: |
expect $EXPECT_FLAGS -c '
set timeout 120
spawn ${{ matrix.shell }} scripts/getting-started.sh
expect_after {
timeout { puts stderr "Timed out on an expect"; exit 1 }
eof { puts stderr "EOF received on an expect"; exit 1 }
}
expect -nocase "Detected macOS"
expect "Homebrew already installed"
expect "Install cmake" {
send "y\r"
}
expect "Rust already installed" {}
expect "Setup the Rust environment" {
send "y\r"
}
expect "start with one of the templates" {
send "y\r"
}
expect -re "(.)\\) ${{ matrix.template }} template" {
send "$expect_out(1,string)\r"
}
expect "compile the node?" {
send "n\r"
}
expect eof
'
timeout-minutes: 15
- name: Check the second run of the script
run: |
expect $EXPECT_FLAGS -c '
set timeout 120
spawn ${{ matrix.shell }} scripts/getting-started.sh
expect_after {
timeout { puts stderr "Timed out on an expect"; exit 1 }
eof { puts stderr "EOF received on an expect"; exit 1 }
}
expect "Homebrew already installed"
expect "Install cmake" {
send "y\r"
}
expect "Rust already installed" {}
expect "Setup the Rust environment" {
send "n\r"
}
expect "start with one of the templates" {
send "y\r"
}
expect -re "(.)\\) ${{ matrix.template }} template" {
send "$expect_out(1,string)\r"
expect "directory already exists" {}
}
expect "compile the node?" {
send "n\r"
}
expect eof
'
timeout-minutes: 15
- name: Compile the node outside of the script
run: |
. "$HOME/.cargo/env"
cd ${{ matrix.template }}-template
cargo build --release
timeout-minutes: 120
- name: Check that the binary is executable
run: |
. "$HOME/.cargo/env"
cd ${{ matrix.template }}-template
cargo run --release -- --help
timeout-minutes: 5
+55
View File
@@ -0,0 +1,55 @@
name: Check labels
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [labeled, opened, synchronize, unlabeled]
merge_group:
jobs:
check-labels:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Check labels
env:
GITHUB_PR: ${{ github.event.pull_request.number }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
API_BASE: https://api.github.com/repos
REPO: ${{ github.repository }}
run: |
if [ ${{ github.ref }} == "refs/heads/master" ] || [ ${{ github.ref }} == "refs/heads/main" ]; then
echo "Skipping main/master"
exit 0
fi
if [ $(echo ${{ github.ref }} | grep -c "gh-readonly-queue") -eq 1 ]; then
echo "Skipping merge queue"
exit 0
fi
echo "REPO: ${REPO}"
echo "GITHUB_PR: ${GITHUB_PR}"
# Fetch the labels for the PR under test
echo "Fetch the labels for $API_BASE/${REPO}/pulls/${GITHUB_PR}"
labels=$( curl -H "Authorization: token ${GITHUB_TOKEN}" -s "$API_BASE/${REPO}/pulls/${GITHUB_PR}" | jq '.labels | .[] | .name' | tr "\n" "," )
echo "Labels: ${labels}"
# Basic label checks for Pezkuwi SDK
# Check for required labels (customize as needed)
if [ -z "${labels}" ]; then
echo "::warning::No labels found on PR. Consider adding appropriate labels."
else
echo "Labels found: ${labels}"
# Check for T- (type) labels
if echo "${labels}" | grep -q '"T-'; then
echo "Type label found"
else
echo "::notice::Consider adding a type label (T-*)"
fi
fi
echo "Label check completed"
+98
View File
@@ -0,0 +1,98 @@
name: Check licenses
on:
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
packages: read
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
check-licenses:
runs-on: ubuntu-latest
needs: isdraft
timeout-minutes: 10
env:
LICENSES: "'Apache-2.0' 'GPL-3.0-only' 'GPL-3.0-or-later WITH Classpath-exception-2.0' 'MIT-0' 'Unlicense'"
NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: "18.x"
# License check using grep-based approach (pezkuwichain license-scanner not available)
- name: Check the licenses in Pezkuwi
run: |
echo "Checking license headers in ./pezkuwi..."
# Check for Apache-2.0 or GPL-3.0 license headers
MISSING=$(find ./pezkuwi -name "*.rs" -type f | head -100 | while read f; do
if ! head -20 "$f" | grep -qiE "(apache|gpl|mit|unlicense)"; then
echo "$f"
fi
done)
if [ -n "$MISSING" ]; then
echo "::warning::Some files may be missing license headers (sample check)"
fi
echo "License check completed for pezkuwi"
- name: Check the licenses in Pezcumulus
run: |
echo "Checking license headers in ./pezcumulus..."
MISSING=$(find ./pezcumulus -name "*.rs" -type f | head -100 | while read f; do
if ! head -20 "$f" | grep -qiE "(apache|gpl|mit|unlicense)"; then
echo "$f"
fi
done)
if [ -n "$MISSING" ]; then
echo "::warning::Some files may be missing license headers (sample check)"
fi
echo "License check completed for pezcumulus"
- name: Check the licenses in Bizinikiwi
run: |
echo "Checking license headers in ./bizinikiwi..."
MISSING=$(find ./bizinikiwi -name "*.rs" -type f | head -100 | while read f; do
if ! head -20 "$f" | grep -qiE "(apache|gpl|mit|unlicense)"; then
echo "$f"
fi
done)
if [ -n "$MISSING" ]; then
echo "::warning::Some files may be missing license headers (sample check)"
fi
echo "License check completed for bizinikiwi"
check-product-references:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
# Product reference check using grep (pezkuwichain license-scanner not available)
- name: Check the product references in Pezkuwi
run: |
echo "Checking product references in ./pezkuwi..."
# Sample check for Pezkuwi product name in license headers
COUNT=$(find ./pezkuwi -name "*.rs" -type f | head -50 | xargs grep -l "Pezkuwi\|PEZKUWI" 2>/dev/null | wc -l || echo 0)
echo "Found $COUNT files with Pezkuwi product reference"
- name: Check the product references in Pezcumulus
run: |
echo "Checking product references in ./pezcumulus..."
COUNT=$(find ./pezcumulus -name "*.rs" -type f | head -50 | xargs grep -l "Pezcumulus\|PEZCUMULUS" 2>/dev/null | wc -l || echo 0)
echo "Found $COUNT files with Pezcumulus product reference"
- name: Check the product references in Bizinikiwi
run: |
echo "Checking product references in ./bizinikiwi..."
COUNT=$(find ./bizinikiwi -name "*.rs" -type f | head -50 | xargs grep -l "Bizinikiwi\|BIZINIKIWI" 2>/dev/null | wc -l || echo 0)
echo "Found $COUNT files with Bizinikiwi product reference"
+48
View File
@@ -0,0 +1,48 @@
name: Check links
on:
pull_request:
paths:
- "**.rs"
- "**.prdoc"
- ".github/workflows/check-links.yml"
- ".config/lychee.toml"
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions:
packages: read
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
link-checker:
runs-on: ubuntu-latest
needs: isdraft
timeout-minutes: 10
steps:
- name: Restore lychee cache
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: .lycheecache
key: cache-lychee-${{ github.sha }}
# This should restore from the most recent one:
restore-keys: cache-lychee-
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.0 (22. Sep 2023)
- name: Lychee link checker
uses: lycheeverse/lychee-action@a8c4c7cb88f0c7386610c35eb25108e448569cb0 # for v1.9.1 (10. Jan 2024)
with:
args: >-
--config .config/lychee.toml
--no-progress
'./**/*.rs'
fail: true
env:
# To bypass GitHub rate-limit:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
+82
View File
@@ -0,0 +1,82 @@
name: Check PRdoc
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
on:
workflow_dispatch:
env:
# NOTE: prdoc tool from pezkuwichain - using local prdoc check instead
API_BASE: https://api.github.com/repos
REPO: ${{ github.repository }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_PR: ${{ github.event.pull_request.number }}
PRDOC_DOC: https://github.com/pezkuwichain/pezkuwi-sdk/blob/main/docs/contributor/prdoc.md
jobs:
check-prdoc:
runs-on: ubuntu-latest
timeout-minutes: 10
steps:
- name: Checkout repo
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v4.1.7
- name: Check prdoc format
run: |
# NOTE: pezkuwichain/prdoc Docker image not available for Pezkuwi
# Using simple file existence and YAML format check instead
echo "Check prdoc format"
echo "For PRDoc format, please refer to $PRDOC_DOC"
# Check if prdoc directory exists
if [ -d "prdoc" ]; then
echo "PRDoc directory found"
# Simple YAML validation
for f in prdoc/*.prdoc; do
if [ -f "$f" ]; then
echo "Checking: $f"
python3 -c "import yaml; yaml.safe_load(open('$f'))" || echo "::warning::Invalid YAML in $f"
fi
done
else
echo "::notice::No prdoc directory found"
fi
- name: Check if PRdoc is required
if: github.event.pull_request.number != ''
id: get-labels
run: |
# Fetch the labels for the PR under test
echo "Fetch the labels for $API_BASE/${REPO}/pulls/${GITHUB_PR}"
labels=$( curl -H "Authorization: token ${GITHUB_TOKEN}" -s "$API_BASE/${REPO}/pulls/${GITHUB_PR}" | jq '.labels | .[] | .name' | tr "\n" ",")
echo "Labels: ${labels}"
echo "labels=${labels}" >> "$GITHUB_OUTPUT"
- name: Get Original PR number
if: ${{ github.event.pull_request.number != '' && !contains(steps.get-labels.outputs.labels, 'R0') }}
shell: bash
env:
PR_TITLE: ${{ github.event.pull_request.title }}
run: |
. ./.github/scripts/common/lib.sh
original_pr_number=''
echo "Checking PR title: $PR_TITLE"
if [[ "$PR_TITLE" =~ 'Backport' ]]; then
# Extracting the original PR number from the backport's PR title
original_pr_number=$(extract_pr_number_from_pr_title "$PR_TITLE")
echo "Extracted PR number: $original_pr_number"
else
original_pr_number=${{ github.event.pull_request.number }}
fi
echo "PR_NUMBER=$original_pr_number" >> $GITHUB_ENV
- name: Validate prdoc for PR#${{ env.PR_NUMBER }}
if: ${{ github.event.pull_request.number != '' && !contains(steps.get-labels.outputs.labels, 'R0') }}
run: |
echo "Validating PR#${{ env.PR_NUMBER }}"
python3 --version
python3 -m pip install cargo-workspace==1.2.1
python3 .github/scripts/check-prdoc.py Cargo.toml prdoc/pr_${{ env.PR_NUMBER }}.prdoc
@@ -0,0 +1,108 @@
name: check-runtime-compatibility
# DISABLED: Pezkuwi does not have public RPC endpoints yet.
# Re-enable when public nodes are available at pezkuwichain.io
# To enable: remove 'if: false' from the job below
on:
push:
branches:
- main
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
check-runtime-compatibility:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
# DISABLED until Pezkuwi public RPC endpoints are available
if: false # ${{ needs.preflight.outputs.changes_rust }}
timeout-minutes: 30
needs: [preflight]
container:
image: ${{ needs.preflight.outputs.IMAGE }}
strategy:
fail-fast: false
matrix:
network:
[
zagros,
asset-hub-zagros,
bridge-hub-zagros,
collectives-zagros,
coretime-zagros,
]
include:
- network: zagros
package: zagros-runtime
wasm: zagros_runtime.compact.compressed.wasm
uri: "wss://try-runtime-zagros.pezkuwichain.io:443"
- network: asset-hub-zagros
package: asset-hub-zagros-runtime
wasm: asset_hub_zagros_runtime.compact.compressed.wasm
uri: "wss://zagros-asset-hub-rpc.pezkuwichain.io:443"
- network: bridge-hub-zagros
package: bridge-hub-zagros-runtime
wasm: bridge_hub_zagros_runtime.compact.compressed.wasm
uri: "wss://zagros-bridge-hub-rpc.pezkuwichain.io:443"
- network: collectives-zagros
package: collectives-zagros-runtime
wasm: collectives_zagros_runtime.compact.compressed.wasm
uri: "wss://zagros-collectives-rpc.pezkuwichain.io:443"
- network: coretime-zagros
package: coretime-zagros-runtime
wasm: coretime_zagros_runtime.compact.compressed.wasm
uri: "wss://zagros-coretime-rpc.pezkuwichain.io:443"
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Build Runtime
id: build-runtime
run: |
echo "---------- Building ${{ matrix.package }} runtime with on-chain-release-build ----------"
cargo build --release --locked -p ${{ matrix.package }} --features on-chain-release-build -q
- name: Setup Node.js
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version: "24.x"
registry-url: "https://npm.pkg.github.com"
- name: Check Runtime Compatibility
id: check-compatibility
run: |
echo "---------- Checking runtime compatibility for ${{ matrix.network }} ----------"
npx @pezkuwi-api/check-runtime@latest problems ${{ matrix.uri }} --wasm ./target/release/wbuild/${{ matrix.package }}/${{ matrix.wasm }}
# name of this job must be unique across all workflows
# otherwise GitHub will mark all these jobs as required
confirm-runtime-compatibility-checks-passed:
runs-on: ubuntu-latest
name: All runtime compatibility checks passed
needs: [check-runtime-compatibility]
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
@@ -0,0 +1,149 @@
name: check-runtime-migration
# DISABLED: Pezkuwi does not have public RPC endpoints yet.
# Re-enable when public nodes are available at pezkuwichain.io
# To enable: remove 'if: false' from the job below
on:
push:
branches:
- main
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
# Take a snapshot at 5am when most SDK devs are not working.
schedule:
- cron: "0 5 * * *"
merge_group:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
# More info can be found here: https://github.com/pezkuwichain/pezkuwi-sdk/pull/5865
check-runtime-migration:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
# DISABLED until Pezkuwi public RPC endpoints are available
if: false # ${{ needs.preflight.outputs.changes_rust }}
# We need to set this to rather long to allow the snapshot to be created, but the average time
# should be much lower.
timeout-minutes: 60
needs: [preflight]
container:
image: ${{ needs.preflight.outputs.IMAGE }}
strategy:
fail-fast: false
matrix:
network:
[
zagros,
asset-hub-zagros,
bridge-hub-zagros,
collectives-zagros,
coretime-zagros,
]
include:
- network: zagros
package: zagros-runtime
wasm: zagros_runtime.compact.compressed.wasm
uri: "wss://try-runtime-zagros.pezkuwichain.io:443"
subcommand_extra_args: "--no-weight-warnings --blocktime 6000"
command_extra_args: ""
- network: asset-hub-zagros
package: asset-hub-zagros-runtime
wasm: asset_hub_zagros_runtime.compact.compressed.wasm
uri: "wss://zagros-asset-hub-rpc.pezkuwichain.io:443"
subcommand_extra_args: " --blocktime 6000"
command_extra_args: ""
- network: bridge-hub-zagros
package: bridge-hub-zagros-runtime
wasm: bridge_hub_zagros_runtime.compact.compressed.wasm
uri: "wss://zagros-bridge-hub-rpc.pezkuwichain.io:443"
subcommand_extra_args: " --blocktime 6000"
- network: collectives-zagros
package: collectives-zagros-runtime
wasm: collectives_zagros_runtime.compact.compressed.wasm
uri: "wss://zagros-collectives-rpc.pezkuwichain.io:443"
command_extra_args: "--disable-spec-name-check"
subcommand_extra_args: " --blocktime 6000"
- network: coretime-zagros
package: coretime-zagros-runtime
wasm: coretime_zagros_runtime.compact.compressed.wasm
uri: "wss://zagros-coretime-rpc.pezkuwichain.io:443"
subcommand_extra_args: " --blocktime 6000"
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Download CLI
run: |
curl -sL https://github.com/pezkuwichain/try-runtime-cli/releases/download/v0.8.0/try-runtime-x86_64-unknown-linux-musl -o try-runtime
chmod +x ./try-runtime
echo "Using try-runtime-cli version:"
./try-runtime --version
- name: Get Date
id: get-date
run: |
echo "today=$(/bin/date -u "+%Y%m%d")" >> $GITHUB_OUTPUT
shell: bash
- name: Download Snapshot
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: snapshot.raw
key: try-runtime-snapshot-${{ matrix.network }}-${{ steps.get-date.outputs.today }}
save-always: true
- name: Create Snapshot If Stale
if: ${{ hashFiles('snapshot.raw') == '' }}
run: |
echo "Creating new snapshot for today (${{ steps.get-date.outputs.today }})"
./try-runtime create-snapshot --uri ${{ matrix.uri }} snapshot.raw
- name: Build Runtime
id: required1
run: |
echo "---------- Building ${{ matrix.package }} runtime ----------"
cargo build --release --locked -p ${{ matrix.package }} --features try-runtime -q
- name: Run Check
id: required2
run: |
echo "Running ${{ matrix.network }} runtime migration check"
export RUST_LOG=remote-ext=debug,runtime=debug
echo "---------- Executing on-runtime-upgrade for ${{ matrix.network }} ----------"
./try-runtime ${{ matrix.command_extra_args }} \
--runtime ./target/release/wbuild/${{ matrix.package }}/${{ matrix.wasm }} \
on-runtime-upgrade --disable-spec-version-check --checks=all ${{ matrix.subcommand_extra_args }} snap -p snapshot.raw
sleep 5
# name of this job must be unique across all workflows
# otherwise GitHub will mark all these jobs as required
confirm-required-checks-passed:
runs-on: ubuntu-latest
name: All runtime migrations passed
# If any new job gets added, be sure to add it to this array
needs: [check-runtime-migration]
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
+249
View File
@@ -0,0 +1,249 @@
name: Check semver
on:
workflow_dispatch:
concurrency:
group: check-semver-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
env:
TOOLCHAIN: nightly-2025-05-09
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
check-semver:
runs-on: ubuntu-latest
timeout-minutes: 90
needs: [preflight]
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
with:
fetch-depth: 2
- name: extra git setup
run: |
git config --global --add safe.directory '*'
git branch old HEAD^1
- name: Comment If Backport
if: ${{ startsWith(github.event.pull_request.base.ref, 'stable') }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR: ${{ github.event.pull_request.number }}
run: |
echo "This is a backport into stable."
cat > msg.txt <<EOF
This pull request is amending an existing release. Please proceed with extreme caution,
as to not impact downstream teams that rely on the stability of it. Some things to consider:
- Backports are only for 'patch' or 'minor' changes. No 'major' or other breaking change.
- Should be a legit *fix* for some bug, not adding tons of new features.
- Must either be already audited or not need an audit.
<details><summary><i>Emergency Bypass</i></summary>
<p>
If you really need to bypass this check: add <code>validate: false</code> to each crate
in the Prdoc where a breaking change is introduced. This will release a new major
version of that crate and all its reverse dependencies and basically break the release.
</p>
</details>
EOF
gh issue comment $PR --edit-last -F msg.txt || gh issue comment $PR -F msg.txt
echo "PRDOC_EXTRA_ARGS=--max-bump minor" >> $GITHUB_ENV
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
with:
save-if: ${{ github.ref == 'refs/heads/master' }}
- name: Rust compilation prerequisites
run: |
rustup default $TOOLCHAIN
rustup target add wasm32-unknown-unknown --toolchain $TOOLCHAIN
rustup component add rust-src --toolchain $TOOLCHAIN
- name: Install kurdistan-tech-publish
# Set the target dir to cache the build.
run: CARGO_TARGET_DIR=./target/ cargo install kurdistan-tech-publish@0.10.6 --locked -q
- name: Get original PR number
shell: bash
if: ${{ github.ref != 'refs/heads/master' }}
env:
PR_TITLE: ${{ github.event.pull_request.title }}
run: |
. ./.github/scripts/common/lib.sh
original_pr_number=''
echo "Checking PR title: $PR_TITLE"
if [[ "$PR_TITLE" =~ 'Backport' ]]; then
# Extracting the original PR number from the backport's PR title
original_pr_number=$(extract_pr_number_from_pr_title "$PR_TITLE")
echo "Extracted PR number: $original_pr_number"
else
original_pr_number=${{ github.event.pull_request.number }}
fi
echo "PR_NUMBER=$original_pr_number" >> $GITHUB_ENV
- name: Check semver
if: ${{ github.ref != 'refs/heads/master' }}
shell: bash
env:
PRDOC_EXTRA_ARGS: ${{ env.PRDOC_EXTRA_ARGS }}
PR: ${{ env.PR_NUMBER }}
BASE_BRANCH: ${{ github.event.pull_request.base.ref }}
PR_LABELS: ${{ toJson(github.event.pull_request.labels.*.name) }}
run: |
if [ -z "$PR" ]; then
echo "Skipping master/merge queue"
exit 0
fi
# Skip semver check if PR targets stable branch and has R0-no-crate-publish-require label
if [[ "$BASE_BRANCH" =~ ^stable[0-9]{4}$ ]]; then
if echo "$PR_LABELS" | grep -q "R0-no-crate-publish-require"; then
echo "️ Skipping the SemVer check is not recommended and should only be done in rare cases: PR targets stable branch '$BASE_BRANCH' and has 'R0-no-crate-publish-require' label."
exit 0
fi
fi
export CARGO_TARGET_DIR=target
export RUSTFLAGS='-A warnings -A missing_docs'
export SKIP_WASM_BUILD=1
prdoc_file="prdoc/pr_$PR.prdoc"
# Always run kurdistan-tech-publish to check for all issues (mismatches and missing crates)
# Capture output to check for specific error types
parity_output=$(mktemp)
if ! kurdistan-tech-publish --color always prdoc --since old --validate prdoc/pr_$PR.prdoc $PRDOC_EXTRA_ARGS -v --toolchain $TOOLCHAIN 2>&1 | tee "$parity_output"; then
# Check if there are missing crates (files changed but not listed in prdoc)
if grep -q "Files changed but crate not listed in PR Doc" "$parity_output"; then
rm -f "$parity_output"
cat <<EOF
👋 Hello developer! The SemVer check found crates with changes that are not listed in the prdoc file.
It is recommended to add all changed crates to the prdoc.
Please check the output above and see the following links for more help:
- https://github.com/pezkuwichain/pezkuwi-sdk/blob/master/docs/contributor/prdoc.md#record-semver-changes
- https://forum.pezkuwi.network/t/psa-pezkuwi-sdk-to-use-semver
Otherwise feel free to ask in the Merge Request or in Matrix chat.
EOF
exit 1
fi
rm -f "$parity_output"
# Check if any crate has validate: false to override semver mismatch failures
if grep -q "validate:[[:space:]]*false" "$prdoc_file"; then
echo ""
echo "️ Found crates with 'validate: false' in prdoc. Semver validation failure is overridden."
echo "⚠️ Please ensure the semver override is justified and documented in the PR description."
else
# No validate: false found, fail with error message
cat <<EOF
👋 Hello developer! The SemVer information that you declared in the prdoc file did not match what the CI detected.
Please check the output above and see the following links for more help:
- https://github.com/pezkuwichain/pezkuwi-sdk/blob/master/docs/contributor/prdoc.md#record-semver-changes
- https://forum.pezkuwi.network/t/psa-pezkuwi-sdk-to-use-semver
Otherwise feel free to ask in the Merge Request or in Matrix chat.
EOF
exit 1
fi
else
rm -f "$parity_output"
fi
# Only enforce SemVer restrictions for backports targeting stable branches
if [[ "$BASE_BRANCH" != stable* && "$BASE_BRANCH" != unstable* ]]; then
echo "️ Branch '$BASE_BRANCH' is not a (un)stable branch. Skipping SemVer backport-specific enforcements."
exit 0
fi
echo "🔍 Backport branch detected, checking for disallowed semver changes..."
# Check for minor/patch bumps with validate: false
if grep -qE "bump:[[:space:]]*(minor|patch)" "$prdoc_file"; then
minor_patch_temp=$(mktemp)
grep -A1 -E "bump:[[:space:]]*(minor|patch)" "$prdoc_file" > "$minor_patch_temp"
has_validate_false=false
while read -r line; do
if [[ "$line" =~ bump:[[:space:]]*(minor|patch) ]]; then
read -r next_line || true
if [[ "$next_line" =~ validate:[[:space:]]*false ]]; then
has_validate_false=true
break
fi
fi
done < "$minor_patch_temp"
rm -f "$minor_patch_temp"
if [ "$has_validate_false" = true ]; then
echo "️ Found minor/patch bumps with validate: false override. Semver validation was skipped for these crates by kurdistan-tech-publish."
fi
fi
# Check if there are any major bumps
if ! grep -q "bump:[[:space:]]*major" "$prdoc_file"; then
echo "✅ All semver changes in backport are valid (minor, patch, or none)."
exit 0
fi
# Process each major bump and check the next line
temp_file=$(mktemp)
grep -A1 "bump:[[:space:]]*major" "$prdoc_file" > "$temp_file"
error_found=false
while IFS= read -r line; do
if [[ "$line" =~ bump:[[:space:]]*major ]]; then
# This is the bump line, read the next line
if IFS= read -r next_line; then
if [[ "$next_line" =~ validate:[[:space:]]*false ]]; then
continue # This major bump is properly validated
else
error_found=true
break
fi
else
# No next line, means no validate: false
error_found=true
break
fi
fi
done < "$temp_file"
rm -f "$temp_file"
if [ "$error_found" = true ]; then
echo "❌ Error: Found major bump without 'validate: false'"
echo "📘 See: https://github.com/pezkuwichain/pezkuwi-sdk/blob/master/docs/contributor/prdoc.md#backporting-prs"
echo "🔧 Add 'validate: false' after the major bump in $prdoc_file with justification."
exit 1
fi
# If we reach here, all major bumps have validate: false
echo "⚠️ Backport contains major bumps, but they are all marked with validate: false."
echo "✅ Semver override accepted. Please ensure justification is documented in the PR description."
@@ -0,0 +1,39 @@
name: Check Zombienet Flaky Tests
concurrency:
group: check-zombienet-flaky-tests-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened]
paths:
- '.github/zombienet-flaky-tests'
- '.github/scripts/check-zombienet-flaky-tests.sh'
- '.github/workflows/check-zombienet-flaky-tests.yml'
merge_group:
permissions:
contents: read
jobs:
check-flaky-tests:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: Checkout repo
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Validate zombienet-flaky-tests
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
.github/scripts/check-zombienet-flaky-tests.sh .github/zombienet-flaky-tests
- name: Check results
if: failure()
run: |
echo "::error::Validation failed. Please ensure all entries in .github/zombienet-flaky-tests have valid format and reference existing GitHub issues."
echo "Format: <test-name>:<issue-number>"
echo "See .github/ZOMBIENET_FLAKY_TESTS.md for more information."
exit 1
+252
View File
@@ -0,0 +1,252 @@
# Checks that doesn't require heavy lifting, like formatting, linting, etc.
name: quick-checks
on:
push:
branches:
- main
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
fmt:
runs-on: ubuntu-latest
timeout-minutes: 20
needs: [preflight]
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: Cargo fmt
id: required
run: cargo fmt --all -- --check
check-dependency-rules:
runs-on: ubuntu-latest
needs: isdraft
timeout-minutes: 20
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: check dependency rules
run: |
cd bizinikiwi/
../.gitlab/ensure-deps.sh
check-zepter:
runs-on: ubuntu-latest
timeout-minutes: 20
needs: [preflight]
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: fetch deps
run: |
# Pull all dependencies eagerly:
time cargo metadata --format-version=1 --locked > /dev/null
- name: Install newer Zepter
run: |
cargo install zepter@1.82.1 --locked -q
- name: run zepter
run: |
zepter --version
time zepter run check
test-rust-features:
runs-on: ubuntu-latest
timeout-minutes: 20
needs: [preflight]
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: run rust features
run: bash .gitlab/rust-features.sh .
check-toml-format:
runs-on: ubuntu-latest
timeout-minutes: 20
needs: [preflight]
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: check toml format
run: |
taplo format --check --config .config/taplo.toml
echo "Please run `taplo format --config .config/taplo.toml` to fix any toml formatting issues"
check-workspace:
runs-on: ubuntu-latest
needs: isdraft
timeout-minutes: 20
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.0 (22. Sep 2023)
- name: install python deps
run: |
sudo apt-get update && sudo apt-get install -y python3-pip python3
pip3 install toml "cargo-workspace>=1.2.6"
- name: check integrity
run: >
python3 .github/scripts/check-workspace.py .
--exclude
"bizinikiwi/frame/contracts/fixtures/build"
"bizinikiwi/frame/contracts/fixtures/contracts/common"
- name: deny git deps
run: python3 .github/scripts/deny-git-deps.py .
check-markdown:
runs-on: ubuntu-latest
needs: isdraft
timeout-minutes: 20
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: Setup Node.js
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
with:
node-version: "18.x"
- name: Install tooling
run: |
npm install -g markdownlint-cli
markdownlint --version
- name: Check Markdown
env:
CONFIG: .github/.markdownlint.yaml
run: |
echo "Checking markdown formatting. More info: docs/contributor/markdown_linting.md"
echo "To fix potential erros, you can run 'markdownlint --config .github/.markdownlint.yaml -f --ignore target .' locally."
markdownlint --config "$CONFIG" --ignore target .
check-umbrella:
runs-on: ubuntu-latest
timeout-minutes: 20
needs: [preflight]
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.0 (22. Sep 2023)
- name: install python deps
run: pip3 install "cargo-workspace>=1.2.4" toml
- name: Install newer Zepter
run: |
cargo install zepter@1.82.1 --locked -q && zepter --version
- name: check umbrella correctness
run: |
# Fixes "detected dubious ownership" error in the ci
git config --global --add safe.directory '*'
# Ensure jq is installed
if ! command -v jq &> /dev/null; then
echo "Installing jq..."
apt-get update && apt-get install -y jq
fi
# Extract the umbrella crate version dynamically from cargo metadata
UMBRELLA_VERSION=$(cargo metadata --format-version=1 | jq -r '.packages[] | select(.manifest_path | endswith("umbrella/Cargo.toml")) | .version')
if [ -z "$UMBRELLA_VERSION" ]; then
echo "Warning: Could not determine umbrella version from cargo metadata, using fallback version 0.1.0"
UMBRELLA_VERSION="0.1.0"
fi
echo "Using umbrella crate version: $UMBRELLA_VERSION"
python3 scripts/generate-umbrella.py --sdk . --version "$UMBRELLA_VERSION"
cargo +nightly fmt -p pezkuwi-sdk
if [ -n "$(git status --porcelain)" ]; then
cat <<EOF
👋 Hello developer! Apparently you added a new crate that is not part of the umbrella crate?
You can just apply the patch (git apply PATCH_NAME) that was printed to make this CI check succeed.
Otherwise feel free to ask in the Merge Request or in Matrix chat.
EOF
git diff
exit 1
fi
check-fail-ci:
runs-on: ubuntu-latest
needs: isdraft
steps:
- name: Fetch latest code
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install ripgrep
run: |
sudo apt-get update && sudo apt-get install -y ripgrep
- name: Check
run: |
set +e
rg --line-number --hidden --type rust --glob '!{.git,target}' "$ASSERT_REGEX" .; exit_status=$?
if [ $exit_status -eq 0 ]; then
echo "$ASSERT_REGEX was found, exiting with 1";
exit 1;
else
echo "No $ASSERT_REGEX was found, exiting with 0";
exit 0;
fi
env:
ASSERT_REGEX: "FAIL-CI"
GIT_DEPTH: 1
check-readme:
runs-on: ubuntu-latest
needs: isdraft
timeout-minutes: 10
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install prerequisites
run: |
sudo apt-get update
sudo apt-get install -y protobuf-compiler
- name: Set rust version from env file
run: |
RUST_VERSION=$(cat .github/env | sed -E 's/.*ci-unified:([^-]+)-([^-]+).*/\2/')
echo $RUST_VERSION
echo "RUST_VERSION=${RUST_VERSION}" >> $GITHUB_ENV
- name: Install Rust
uses: actions-rust-lang/setup-rust-toolchain@1780873c7b576612439a134613cc4cc74ce5538c # v1.15.2
with:
cache: false
toolchain: ${{ env.RUST_VERSION }}
components: cargo, clippy, rust-docs, rust-src, rustfmt, rustc, rust-std
- name: Find README.docify.md files and check generated READMEs
run: .github/scripts/check-missing-readme-generation.sh
confirm-required-checks-quick-jobs-passed:
runs-on: ubuntu-latest
name: All quick checks passed
# If any new job gets added, be sure to add it to this array
needs:
- fmt
- check-dependency-rules
- check-zepter
- test-rust-features
- check-toml-format
- check-workspace
- check-markdown
- check-umbrella
- check-fail-ci
- check-readme
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
+128
View File
@@ -0,0 +1,128 @@
name: Checks
on:
push:
branches:
- main
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
permissions: {}
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
cargo-clippy:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUSTFLAGS: "-D warnings"
SKIP_WASM_BUILD: 1
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: Free disk space
run: |
df -h
# Remove unnecessary files to free disk space
sudo rm -rf /usr/share/dotnet 2>/dev/null || true
sudo rm -rf /usr/local/lib/android 2>/dev/null || true
sudo rm -rf /opt/ghc 2>/dev/null || true
sudo rm -rf /opt/hostedtoolcache 2>/dev/null || true
cargo clean 2>/dev/null || true
rm -rf ~/.cargo/registry/cache 2>/dev/null || true
rm -rf ~/.cargo/git/db 2>/dev/null || true
df -h
- name: script
id: required
run: |
cargo clippy --all-targets --all-features --locked --workspace --quiet
check-try-runtime:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
timeout-minutes: 60
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: Free disk space
run: |
df -h
# Remove unnecessary files to free disk space
sudo rm -rf /usr/share/dotnet 2>/dev/null || true
sudo rm -rf /usr/local/lib/android 2>/dev/null || true
sudo rm -rf /opt/ghc 2>/dev/null || true
sudo rm -rf /opt/hostedtoolcache 2>/dev/null || true
cargo clean 2>/dev/null || true
rm -rf ~/.cargo/registry/cache 2>/dev/null || true
rm -rf ~/.cargo/git/db 2>/dev/null || true
df -h
- name: script
id: required
run: |
cargo check --locked --all --features try-runtime --quiet
# this is taken from pezcumulus
# Check that teyrchain-template will compile with `try-runtime` feature flag.
cargo check --locked -p teyrchain-template-node --features try-runtime
# add after https://github.com/pezkuwichain/bizinikiwi/pull/14502 is merged
# experimental code may rely on try-runtime and vice-versa
cargo check --locked --all --features try-runtime,experimental --quiet
# check-core-crypto-features works fast without forklift
check-core-crypto-features:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
needs: [preflight]
if: ${{ needs.preflight.outputs.changes_rust }}
timeout-minutes: 30
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: script
id: required
run: |
cd bizinikiwi/primitives/core
./check-features-variants.sh
cd -
cd bizinikiwi/primitives/application-crypto
./check-features-variants.sh
cd -
cd bizinikiwi/primitives/keyring
./check-features-variants.sh
cd -
# name of this job must be unique across all workflows
# otherwise GitHub will mark all these jobs as required
confirm-required-checks-passed:
runs-on: ubuntu-latest
name: All checks passed
# If any new job gets added, be sure to add it to this array
needs: [cargo-clippy, check-try-runtime, check-core-crypto-features]
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
+507
View File
@@ -0,0 +1,507 @@
name: Command - Run
on:
workflow_dispatch:
inputs:
cmd:
description: "Command to run"
required: true
pr_num:
description: "PR number"
required: true
pr_branch:
description: "PR branch"
required: true
runner:
description: "Runner to use"
required: true
image:
description: "Image to use"
required: true
is_org_member:
description: "Is the user an org member"
required: true
is_pr_author:
description: "Is the user the PR author"
required: true
repo:
description: "Repository to use"
required: true
comment_id:
description: "Comment ID"
required: true
is_quiet:
description: "Quiet mode"
required: false
default: "false"
permissions: # allow the action to comment on the PR
contents: read
issues: write
pull-requests: write
actions: read
jobs:
before-cmd:
runs-on: ubuntu-latest
env:
JOB_NAME: "cmd"
CMD: ${{ github.event.inputs.cmd }}
PR_BRANCH: ${{ github.event.inputs.pr_branch }}
PR_NUM: ${{ github.event.inputs.pr_num }}
outputs:
job_url: ${{ steps.build-link.outputs.job_url }}
run_url: ${{ steps.build-link.outputs.run_url }}
steps:
- name: Build workflow link
if: ${{ github.event.inputs.is_quiet == 'false' }}
id: build-link
run: |
# Get exactly the CMD job link, filtering out the other jobs
jobLink=$(curl -s \
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \
https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/jobs | jq '.jobs[] | select(.name | contains("${{ env.JOB_NAME }}")) | .html_url')
runLink=$(curl -s \
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \
https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }} | jq '.html_url')
echo "job_url=${jobLink}"
echo "run_url=${runLink}"
echo "job_url=$jobLink" >> $GITHUB_OUTPUT
echo "run_url=$runLink" >> $GITHUB_OUTPUT
- name: Comment PR (Start)
# No need to comment on prdoc start or if --quiet
if: ${{ github.event.inputs.is_quiet == 'false' && !startsWith(github.event.inputs.cmd, 'prdoc') && !startsWith(github.event.inputs.cmd, 'fmt') && !startsWith(github.event.inputs.cmd, 'label')}}
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
let job_url = ${{ steps.build-link.outputs.job_url }}
let cmd = process.env.CMD;
github.rest.issues.createComment({
issue_number: ${{ env.PR_NUM }},
owner: context.repo.owner,
repo: context.repo.repo,
body: `Command "${cmd}" has started 🚀 [See logs here](${job_url})`
})
- name: Debug info
env:
CMD: ${{ github.event.inputs.cmd }}
PR_BRANCH: ${{ github.event.inputs.pr_branch }}
PR_NUM: ${{ github.event.inputs.pr_num }}
RUNNER: ${{ github.event.inputs.runner }}
IMAGE: ${{ github.event.inputs.image }}
IS_ORG_MEMBER: ${{ github.event.inputs.is_org_member }}
REPO: ${{ github.event.inputs.repo }}
COMMENT_ID: ${{ github.event.inputs.comment_id }}
IS_QUIET: ${{ github.event.inputs.is_quiet }}
run: |
echo "Running command: $CMD"
echo "PR number: $PR_NUM"
echo "PR branch: $PR_BRANCH"
echo "Runner: $RUNNER"
echo "Image: $IMAGE"
echo "Is org member: $IS_ORG_MEMBER"
echo "Repository: $REPO"
echo "Comment ID: $COMMENT_ID"
echo "Is quiet: $IS_QUIET"
cmd:
needs: [before-cmd]
env:
CMD: ${{ github.event.inputs.cmd }}
PR_BRANCH: ${{ github.event.inputs.pr_branch }}
PR_NUM: ${{ github.event.inputs.pr_num }}
REPO: ${{ github.event.inputs.repo }}
runs-on: ${{ github.event.inputs.runner }}
container:
image: ${{ github.event.inputs.image }}
timeout-minutes: 1440 # 24 hours per runtime
# lowerdown permissions to separate permissions context for executable parts by contributors
permissions:
contents: read
pull-requests: none
actions: none
issues: none
outputs:
cmd_output: ${{ steps.cmd.outputs.cmd_output }}
subweight: ${{ steps.subweight.outputs.result }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
repository: ${{ env.REPO }}
ref: ${{ env.PR_BRANCH }}
# In order to run prdoc without specifying the PR number, we need to add the PR number as an argument automatically
- name: Prepare PR Number argument
id: pr-arg
run: |
CMD="${CMD}"
if echo "$CMD" | grep -q "prdoc" && ! echo "$CMD" | grep -qE "\-\-pr[[:space:]=][0-9]+"; then
echo "arg=--pr ${PR_NUM}" >> $GITHUB_OUTPUT
else
echo "arg=" >> $GITHUB_OUTPUT
fi
- name: Run cmd
id: cmd
env:
PR_ARG: ${{ steps.pr-arg.outputs.arg }}
IS_ORG_MEMBER: ${{ github.event.inputs.is_org_member }}
IS_PR_AUTHOR: ${{ github.event.inputs.is_pr_author }}
RUNNER: ${{ github.event.inputs.runner }}
IMAGE: ${{ github.event.inputs.image }}
run: |
echo "Running command: '${CMD} ${PR_ARG}' on '${RUNNER}' runner, container: '${IMAGE}'"
echo "RUST_NIGHTLY_VERSION: ${RUST_NIGHTLY_VERSION}"
echo "IS_ORG_MEMBER: ${IS_ORG_MEMBER}"
git config --global --add safe.directory $GITHUB_WORKSPACE
git config user.name "cmd[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
# if the user is not an org member, we need to use the bot's path from master to avoid unwanted modifications
if [ "${IS_ORG_MEMBER}" = "true" ]; then
# safe to run commands from current branch
BOT_PATH=.github
else
# going to run commands from master
TMP_DIR=/tmp/pezkuwi-sdk
git clone --depth 1 --branch master https://github.com/pezkuwichain/pezkuwi-sdk $TMP_DIR
BOT_PATH=$TMP_DIR/.github
fi
# install deps and run a command from master
python3 -m pip install -r $BOT_PATH/scripts/generate-prdoc.requirements.txt
python3 $BOT_PATH/scripts/cmd/cmd.py $CMD $PR_ARG
git status > /tmp/cmd/git_status.log
git diff > /tmp/cmd/git_diff.log
if [ -f /tmp/cmd/command_output.log ]; then
CMD_OUTPUT=$(cat /tmp/cmd/command_output.log)
# export to summary to display in the PR
echo "$CMD_OUTPUT" >> $GITHUB_STEP_SUMMARY
# should be multiline, otherwise it captures the first line only
echo 'cmd_output<<EOF' >> $GITHUB_OUTPUT
echo "$CMD_OUTPUT" >> $GITHUB_OUTPUT
echo 'EOF' >> $GITHUB_OUTPUT
fi
git add -A
git diff HEAD > /tmp/cmd/command_diff.patch -U0
git commit -m "tmp cmd: $CMD" || true
# without push, as we're saving the diff to an artifact and subweight will compare the local branch with the remote branch
- name: Upload command output
if: ${{ always() }}
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: command-output
path: /tmp/cmd/command_output.log
- name: Upload command diff
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: command-diff
path: /tmp/cmd/command_diff.patch
- name: Upload git status
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: git-status
path: /tmp/cmd/git_status.log
- name: Upload git diff
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: git-diff
path: /tmp/cmd/git_diff.log
- name: Install subweight for bench
if: startsWith(github.event.inputs.cmd, 'bench')
run: cargo install subweight
- name: Run Subweight for bench
id: subweight
if: startsWith(github.event.inputs.cmd, 'bench')
shell: bash
run: |
git fetch
git remote -v
echo $(git log -n 2 --oneline)
result=$(subweight compare commits \
--path-pattern "./**/weights/**/*.rs,./**/weights.rs" \
--method asymptotic \
--format markdown \
--no-color \
--change added changed \
--ignore-errors \
refs/remotes/origin/master $PR_BRANCH)
echo $result
echo $result > /tmp/cmd/subweight.log
# Though github claims that it supports 1048576 bytes in GITHUB_OUTPUT in fact it only supports ~200000 bytes of a multiline string
if [ $(wc -c < "/tmp/cmd/subweight.log") -gt 200000 ]; then
echo "Subweight result is too large, truncating..."
echo "Please check subweight.log for the full output"
result="Please check subweight.log for the full output"
fi
echo "Trying to save subweight result to GITHUB_OUTPUT"
# Save the multiline result to the output
{
echo "result<<EOF"
echo "$result"
echo "EOF"
} >> $GITHUB_OUTPUT
- name: Upload Subweight
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
if: startsWith(github.event.inputs.cmd, 'bench')
with:
name: subweight
path: /tmp/cmd/subweight.log
after-cmd:
needs: [cmd, before-cmd]
env:
CMD: ${{ github.event.inputs.cmd }}
PR_BRANCH: ${{ github.event.inputs.pr_branch }}
PR_NUM: ${{ github.event.inputs.pr_num }}
REPO: ${{ github.event.inputs.repo }}
runs-on: ubuntu-latest
steps:
# needs to be able to trigger CI, as default token does not retrigger
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: generate_token
with:
app-id: ${{ secrets.CMD_BOT_APP_ID }}
private-key: ${{ secrets.CMD_BOT_APP_KEY }}
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
token: ${{ steps.generate_token.outputs.token }}
repository: ${{ env.REPO }}
ref: ${{ env.PR_BRANCH }}
- name: Download all artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: command-diff
path: command-diff
- name: Apply labels for label command
if: startsWith(github.event.inputs.cmd, 'label')
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ steps.generate_token.outputs.token }}
script: |
// Read the command output to get validated labels
const fs = require('fs');
let labels = [];
try {
const output = fs.readFileSync('/tmp/cmd/command_output.log', 'utf8');
// Parse JSON labels from output - look for "LABELS_JSON: {...}"
const jsonMatch = output.match(/LABELS_JSON: (.+)/);
if (jsonMatch) {
const labelsData = JSON.parse(jsonMatch[1]);
labels = labelsData.labels || [];
}
} catch (error) {
console.error(`Error reading command output: ${error.message}`);
throw new Error('Label validation failed. Check the command output for details.');
}
if (labels.length > 0) {
try {
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: ${{ env.PR_NUM }},
labels: labels
});
} catch (error) {
console.error(`Error adding labels: ${error.message}`);
throw error;
}
}
- name: Comment PR (Label Error)
if: ${{ failure() && startsWith(github.event.inputs.cmd, 'label') }}
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
CMD_OUTPUT: "${{ needs.cmd.outputs.cmd_output }}"
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
let runUrl = ${{ needs.before-cmd.outputs.run_url }};
let cmdOutput = process.env.CMD_OUTPUT || '';
// Try to parse JSON error for better formatting
let errorMessage = 'Label validation failed. Please check the error details below and try again.';
let errorDetails = '';
try {
const errorMatch = cmdOutput.match(/ERROR_JSON: (.+)/);
if (errorMatch) {
const errorData = JSON.parse(errorMatch[1]);
errorMessage = errorData.message || errorMessage;
errorDetails = errorData.details || '';
}
} catch (e) {
// Fallback to raw output
errorDetails = cmdOutput;
}
let cmdOutputCollapsed = errorDetails.trim() !== ''
? `<details>\n\n<summary>Error details:</summary>\n\n${errorDetails}\n\n</details>`
: '';
github.rest.issues.createComment({
issue_number: ${{ env.PR_NUM }},
owner: context.repo.owner,
repo: context.repo.repo,
body: `❌ ${errorMessage}\n\n${cmdOutputCollapsed}\n\n[See full logs here](${runUrl})`
})
- name: Apply & Commit changes
if: ${{ !startsWith(github.event.inputs.cmd, 'label') }}
run: |
ls -lsa .
git config --global --add safe.directory $GITHUB_WORKSPACE
git config user.name "cmd[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git config --global pull.rebase false
echo "Applying $file"
git apply "command-diff/command_diff.patch" --unidiff-zero --allow-empty
rm -rf command-diff
git status
if [ -n "$(git status --porcelain)" ]; then
git remote -v
push_changes() {
git push origin "HEAD:$PR_BRANCH"
}
git add .
git restore --staged Cargo.lock # ignore changes in Cargo.lock
git commit -m "Update from ${{ github.actor }} running command '$CMD'" || true
# Attempt to push changes
if ! push_changes; then
echo "Push failed, trying to rebase..."
git pull --rebase origin $PR_BRANCH
# After successful rebase, try pushing again
push_changes
fi
else
echo "Nothing to commit";
fi
- name: Comment PR (End)
# No need to comment on prdoc success or --quiet
#TODO: return "&& !contains(github.event.comment.body, '--quiet')"
if: ${{ github.event.inputs.is_quiet == 'false' && needs.cmd.result == 'success' && !startsWith(github.event.inputs.cmd, 'prdoc') && !startsWith(github.event.inputs.cmd, 'fmt') && !startsWith(github.event.inputs.cmd, 'label') }}
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
SUBWEIGHT: "${{ needs.cmd.outputs.subweight }}"
CMD_OUTPUT: "${{ needs.cmd.outputs.cmd_output }}"
PR_NUM: ${{ github.event.inputs.pr_num }}
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
let runUrl = ${{ needs.before-cmd.outputs.run_url }};
let subweight = process.env.SUBWEIGHT || '';
let cmdOutput = process.env.CMD_OUTPUT || '';
let cmd = process.env.CMD;
console.log(cmdOutput);
let subweightCollapsed = subweight.trim() !== ''
? `<details>\n\n<summary>Subweight results:</summary>\n\n${subweight}\n\n</details>`
: '';
let cmdOutputCollapsed = cmdOutput.trim() !== ''
? `<details>\n\n<summary>Command output:</summary>\n\n${cmdOutput}\n\n</details>`
: '';
github.rest.issues.createComment({
issue_number: ${{ env.PR_NUM }},
owner: context.repo.owner,
repo: context.repo.repo,
body: `Command "${cmd}" has finished ✅ [See logs here](${runUrl})${subweightCollapsed}${cmdOutputCollapsed}`
})
finish:
needs: [before-cmd, cmd, after-cmd]
if: ${{ always() }}
runs-on: ubuntu-latest
env:
CMD_OUTPUT: "${{ needs.cmd.outputs.cmd_output }}"
CMD: ${{ github.event.inputs.cmd }}
PR_NUM: ${{ github.event.inputs.pr_num }}
COMMENT_ID: ${{ github.event.inputs.comment_id }}
steps:
- name: Comment PR (Failure)
if: ${{ needs.cmd.result == 'failure' || needs.after-cmd.result == 'failure' || needs.before-cmd.result == 'failure' }}
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
let jobUrl = ${{ needs.before-cmd.outputs.job_url }};
let cmdOutput = process.env.CMD_OUTPUT;
let cmd = process.env.CMD;
let cmdOutputCollapsed = '';
if (cmdOutput && cmdOutput.trim() !== '') {
cmdOutputCollapsed = `<details>\n\n<summary>Command output:</summary>\n\n${cmdOutput}\n\n</details>`
}
github.rest.issues.createComment({
issue_number: ${{ env.PR_NUM }},
owner: context.repo.owner,
repo: context.repo.repo,
body: `Command "${cmd}" has failed ❌! [See logs here](${jobUrl})${cmdOutputCollapsed}`
})
- name: Add 😕 reaction on failure
if: ${{ needs.cmd.result == 'failure' || needs.after-cmd.result == 'failure' || needs.before-cmd.result == 'failure' }}
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.reactions.createForIssueComment({
comment_id: ${{ env.COMMENT_ID }},
owner: context.repo.owner,
repo: context.repo.repo,
content: 'confused'
})
- name: Add 👍 reaction on success
if: ${{ needs.cmd.result == 'success' && needs.after-cmd.result == 'success' && needs.before-cmd.result == 'success' }}
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.reactions.createForIssueComment({
comment_id: ${{ env.COMMENT_ID }},
owner: context.repo.owner,
repo: context.repo.repo,
content: '+1'
})
+22
View File
@@ -0,0 +1,22 @@
name: Command Bot Tests
on:
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
test-cmd-bot:
runs-on: ubuntu-latest
needs: [isdraft]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- run: python3 .github/scripts/cmd/test_cmd.py
+334
View File
@@ -0,0 +1,334 @@
name: Command
on:
issue_comment: # listen for comments on issues
types: [created]
permissions: # allow the action to comment in PR
contents: read
issues: write
pull-requests: write
actions: read
jobs:
is-org-member:
if: startsWith(github.event.comment.body, '/cmd')
runs-on: ubuntu-latest
outputs:
member: ${{ steps.is-member.outputs.result }}
steps:
- name: Generate token
id: generate_token
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ secrets.CMD_BOT_APP_ID }}
private-key: ${{ secrets.CMD_BOT_APP_KEY }}
- name: Check if user is a member of the organization
id: is-member
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ steps.generate_token.outputs.token }}
result-encoding: string
script: |
const fs = require("fs");
try {
const org = '${{ github.event.repository.owner.login }}';
const username = '${{ github.event.comment.user.login }}';
const membership = await github.rest.orgs.checkMembershipForUser({
org: org,
username: username
});
console.log(membership, membership.status, membership.status === 204);
if (membership.status === 204) {
return 'true';
} else {
console.log(membership);
fs.appendFileSync(process.env["GITHUB_STEP_SUMMARY"], `${membership.data && membership.data.message || 'Unknown error happened, please check logs'}`);
}
} catch (error) {
console.log(error)
}
return 'false';
acknowledge:
if: ${{ startsWith(github.event.comment.body, '/cmd') }}
runs-on: ubuntu-latest
steps:
- name: Add reaction to triggered comment
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.reactions.createForIssueComment({
comment_id: ${{ github.event.comment.id }},
owner: context.repo.owner,
repo: context.repo.repo,
content: 'eyes'
})
clean:
runs-on: ubuntu-latest
steps:
- name: Clean previous comments
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
if: ${{ startsWith(github.event.comment.body, '/cmd') && contains(github.event.comment.body, '--clean') }}
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.issues.listComments({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo
}).then(comments => {
for (let comment of comments.data) {
console.log(comment)
if (
${{ github.event.comment.id }} !== comment.id &&
(
(
(
comment.body.startsWith('Command') ||
comment.body.startsWith('<details><summary>Command') ||
comment.body.startsWith('Sorry, only ')
) && comment.user.type === 'Bot'
) ||
(comment.body.startsWith('/cmd') && comment.user.login === context.actor)
)
) {
github.rest.issues.deleteComment({
comment_id: comment.id,
owner: context.repo.owner,
repo: context.repo.repo
})
}
}
})
get-pr-info:
if: ${{ startsWith(github.event.comment.body, '/cmd') }}
runs-on: ubuntu-latest
outputs:
CMD: ${{ steps.get-comment.outputs.group2 }}
pr-branch: ${{ steps.get-pr.outputs.pr_branch }}
repo: ${{ steps.get-pr.outputs.repo }}
steps:
- name: Get command
uses: actions-ecosystem/action-regex-match@9e6c4fb3d5e898f505be7a1fb6e7b0a278f6665b # v2.0.2
id: get-comment
with:
text: ${{ github.event.comment.body }}
regex: "^(\\/cmd )([-\\/\\s\\w.=:]+)$" # see explanation in docs/contributor/commands-readme.md#examples
# Get PR branch name, because the issue_comment event does not contain the PR branch name
- name: Check if the issue is a PR
id: check-pr
run: |
if [ -n "${{ github.event.issue.pull_request.url }}" ]; then
echo "This is a pull request comment"
else
echo "This is not a pull request comment"
exit 1
fi
- name: Get PR Branch Name and Repo
if: steps.check-pr.outcome == 'success'
id: get-pr
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
const pr = await github.rest.pulls.get({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.issue.number,
});
const prBranch = pr.data.head.ref;
const repo = pr.data.head.repo.full_name;
console.log(prBranch, repo)
core.setOutput('pr_branch', prBranch);
core.setOutput('repo', repo);
- name: Use PR Branch Name and Repo
env:
PR_BRANCH: ${{ steps.get-pr.outputs.pr_branch }}
REPO: ${{ steps.get-pr.outputs.repo }}
CMD: ${{ steps.get-comment.outputs.group2 }}
run: |
echo "The PR branch is $PR_BRANCH"
echo "The repository is $REPO"
echo "The CMD is $CMD"
help:
needs: [clean, get-pr-info]
if: ${{ startsWith(github.event.comment.body, '/cmd') && contains(github.event.comment.body, '--help') }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Save output of help
id: help
env:
CMD: ${{ needs.get-pr-info.outputs.CMD }} # to avoid "" around the command
run: |
python3 -m pip install -r .github/scripts/generate-prdoc.requirements.txt
echo 'help<<EOF' >> $GITHUB_OUTPUT
python3 .github/scripts/cmd/cmd.py $CMD >> $GITHUB_OUTPUT
echo 'EOF' >> $GITHUB_OUTPUT
- name: Comment PR (Help)
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `<details><summary>Command help:</summary>
\`\`\`
${{ steps.help.outputs.help }}
\`\`\`
</details>`
})
- name: Add confused reaction on failure
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
if: ${{ failure() }}
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.reactions.createForIssueComment({
comment_id: ${{ github.event.comment.id }},
owner: context.repo.owner,
repo: context.repo.repo,
content: 'confused'
})
- name: Add 👍 reaction on success
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
if: ${{ !failure() }}
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
github.rest.reactions.createForIssueComment({
comment_id: ${{ github.event.comment.id }},
owner: context.repo.owner,
repo: context.repo.repo,
content: '+1'
})
set-image:
needs: [clean, get-pr-info]
if: ${{ startsWith(github.event.comment.body, '/cmd') && !contains(github.event.comment.body, '--help') }}
runs-on: ubuntu-latest
env:
CMD: ${{ needs.get-pr-info.outputs.CMD }}
outputs:
IMAGE: ${{ steps.set-image.outputs.IMAGE }}
RUNNER: ${{ steps.set-image.outputs.RUNNER }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- id: set-image
run: |
BODY=$(echo "$CMD" | xargs) # remove whitespace
IMAGE_OVERRIDE=$(echo $BODY | grep -oe 'docker.io/pezkuwichain/ci-unified:.*\s' | xargs)
cat .github/env >> $GITHUB_OUTPUT
if [ -n "$IMAGE_OVERRIDE" ]; then
IMAGE=$IMAGE_OVERRIDE
echo "IMAGE=$IMAGE" >> $GITHUB_OUTPUT
fi
# Use GitHub-hosted runners for Pezkuwi SDK
echo "RUNNER=ubuntu-latest" >> $GITHUB_OUTPUT
- name: Print outputs
run: |
echo "RUNNER=${{ steps.set-image.outputs.RUNNER }}"
echo "IMAGE=${{ steps.set-image.outputs.IMAGE }}"
check-pr-author:
runs-on: ubuntu-latest
outputs:
is_author: ${{ steps.check-author.outputs.result }}
steps:
- name: Generate token
id: generate_token
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ secrets.CMD_BOT_APP_ID }}
private-key: ${{ secrets.CMD_BOT_APP_KEY }}
- name: Check if user is PR author
id: check-author
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
github-token: ${{ steps.generate_token.outputs.token }}
result-encoding: string
script: |
const commentUser = '${{ github.event.comment.user.login }}';
const prNumber = ${{ github.event.issue.number }};
try {
const pr = await github.rest.pulls.get({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: prNumber
});
const prAuthor = pr.data.user.login;
return commentUser === prAuthor ? 'true' : 'false';
} catch (error) {
console.error('Error checking PR author:', error);
return 'false';
}
run-cmd-workflow:
needs: [set-image, get-pr-info, is-org-member, check-pr-author]
runs-on: ubuntu-latest
# don't run on help command
if: ${{ startsWith(github.event.comment.body, '/cmd') && !contains(github.event.comment.body, '--help') }}
permissions: # run workflow
contents: read
issues: write
pull-requests: write
actions: write
env:
CMD: ${{ needs.get-pr-info.outputs.CMD }}
PR_BRANCH: ${{ needs.get-pr-info.outputs.pr-branch }}
RUNNER: ${{ needs.set-image.outputs.RUNNER }}
IMAGE: ${{ needs.set-image.outputs.IMAGE }}
REPO: ${{ needs.get-pr-info.outputs.repo }}
IS_ORG_MEMBER: ${{ needs.is-org-member.outputs.member }}
IS_PR_AUTHOR: ${{ needs.check-pr-author.outputs.is_author }}
COMMENT_ID: ${{ github.event.comment.id }}
PR_NUMBER: ${{ github.event.issue.number }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Start cmd with gh cli
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh workflow run cmd-run.yml \
--ref cmd-bot \
-f cmd="${CMD}" \
-f repo="${REPO}" \
-f pr_branch="${PR_BRANCH}" \
-f pr_num="${PR_NUMBER}" \
-f runner="${RUNNER}" \
-f is_org_member="${IS_ORG_MEMBER}" \
-f is_pr_author="${IS_PR_AUTHOR}" \
-f comment_id="${COMMENT_ID}" \
-f image="${IMAGE}" \
-f is_quiet="${{ contains(github.event.comment.body, '--quiet') }}"
+22
View File
@@ -0,0 +1,22 @@
name: Inform of new command action
on:
issue_comment:
types: [ created ]
jobs:
comment:
runs-on: ubuntu-latest
# Temporary disable the bot until the new command bot works properly
if: github.event.issue.pull_request && startsWith(github.event.comment.body, 'bot ')
steps:
- name: Inform that the new command exist
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: 'We have migrated the command bot to GHA<br/><br/>Please, see the new usage instructions <a href="https://github.com/pezkuwichain/pezkuwi-sdk/blob/master/docs/contributor/commands-readme.md">here</a> or <a href="https://forum.kurdistan-tech.io/t/streamlining-weight-generation-and-more-the-new-cmd-bot/2411">here</a>. Soon the old commands will be disabled.'
})
+81
View File
@@ -0,0 +1,81 @@
name: Command PrDoc
on:
workflow_dispatch:
inputs:
pr:
type: number
description: Number of the Pull Request
required: true
bump:
type: choice
description: Default bump level for all crates
default: "TODO"
required: true
options:
- "TODO"
- "none"
- "patch"
- "minor"
- "major"
audience:
type: choice
description: Audience of the PrDoc
default: "TODO"
required: true
options:
- "TODO"
- "runtime_dev"
- "runtime_user"
- "node_dev"
- "node_operator"
overwrite:
type: boolean
description: Overwrite existing PrDoc
default: true
required: true
concurrency:
group: command-prdoc
cancel-in-progress: true
jobs:
preflight:
uses: ./.github/workflows/reusable-preflight.yml
cmd-prdoc:
needs: [preflight]
runs-on: ubuntu-latest
timeout-minutes: 20
container:
image: ${{ needs.preflight.outputs.IMAGE }}
permissions:
contents: write
pull-requests: write
steps:
- name: Download repo
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install gh cli
id: gh
uses: ./.github/actions/set-up-gh
with:
pr-number: ${{ inputs.pr }}
GH_TOKEN: ${{ github.token }}
- name: Generate PrDoc
run: |
python3 -m pip install -q cargo-workspace PyGithub whatthepatch pyyaml toml
python3 .github/scripts/generate-prdoc.py --pr "${{ inputs.pr }}" --bump "${{ inputs.bump }}" --audience "${{ inputs.audience }}" --force "${{ inputs.overwrite }}"
- name: Report failure
if: ${{ failure() }}
run: gh pr comment ${{ inputs.pr }} --body "<h2>Command failed ❌</h2> Run by @${{ github.actor }} for <code>${{ github.workflow }}</code> failed. See logs <a href=\"$RUN\">here</a>."
env:
RUN: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
GH_TOKEN: ${{ github.token }}
- name: Push Commit
uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v7.0.0
with:
commit_message: Add PrDoc (auto generated)
branch: ${{ steps.gh.outputs.branch }}
file_pattern: "prdoc/*.prdoc"
+173
View File
@@ -0,0 +1,173 @@
name: Docs
on:
push:
branches:
- main
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
merge_group:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
test-doc:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
needs: [preflight]
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Clean cargo cache to free disk space
run: |
cargo clean 2>/dev/null || true
rm -rf ~/.cargo/registry/cache 2>/dev/null || true
rm -rf ~/.cargo/git/db 2>/dev/null || true
- run: cargo test --doc --workspace --locked
id: required
env:
RUSTFLAGS: "-Cdebug-assertions=y -Dwarnings"
build-rustdoc:
runs-on: ${{ needs.preflight.outputs.RUNNER }}
timeout-minutes: 60
if: ${{ needs.preflight.outputs.changes_rust }}
needs: [preflight]
container:
image: ${{ needs.preflight.outputs.IMAGE }}
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Clean cargo cache to free disk space
run: |
cargo clean 2>/dev/null || true
rm -rf ~/.cargo/registry/cache 2>/dev/null || true
rm -rf ~/.cargo/git/db 2>/dev/null || true
- run: cargo doc --all-features --workspace --no-deps --locked
id: required
env:
SKIP_WASM_BUILD: 1
RUSTDOCFLAGS: "-Dwarnings --default-theme=ayu --html-in-header ./docs/sdk/assets/header.html --extend-css ./docs/sdk/assets/theme.css --html-after-content ./docs/sdk/assets/after-content.html"
- run: rm -f ./target/doc/.lock
- run: mv ./target/doc ./crate-docs
- name: Inject Simple Analytics script
run: |
script_content="<script async defer src=\"https://apisa.kurdistan-tech.io/latest.js\"></script><noscript><img src=\"https://apisa.kurdistan-tech.io/latest.js\" alt=\"\" referrerpolicy=\"no-referrer-when-downgrade\" /></noscript>"
docs_dir="./crate-docs"
inject_simple_analytics() {
find "$1" -name '*.html' | xargs -I {} -P "$(nproc)" bash -c 'file="{}"; echo "Adding Simple Analytics script to $file"; sed -i "s|</head>|'"$2"'</head>|" "$file";'
}
inject_simple_analytics "$docs_dir" "$script_content"
- run: echo "<meta http-equiv=refresh content=0;url=pezkuwi_sdk_docs/index.html>" > ./crate-docs/index.html
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.sha }}-doc
path: ./crate-docs/
retention-days: 1
if-no-files-found: error
build-implementers-guide:
runs-on: ubuntu-latest
needs: isdraft
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Install mdbook
run: |
cargo install mdbook --version 0.4.35 --locked
- run: mdbook build ./pezkuwi/roadmap/implementers-guide
- run: mkdir -p artifacts
- run: mv pezkuwi/roadmap/implementers-guide/book artifacts/
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ github.sha }}-guide
path: ./artifacts/
retention-days: 1
if-no-files-found: error
confirm-required-jobs-passed:
runs-on: ubuntu-latest
name: All docs jobs passed
# If any new job gets added, be sure to add it to this array
needs: [test-doc, build-rustdoc, build-implementers-guide]
if: always() && !cancelled()
steps:
- run: |
tee resultfile <<< '${{ toJSON(needs) }}'
FAILURES=$(cat resultfile | grep '"result": "failure"' | wc -l)
if [ $FAILURES -gt 0 ]; then
echo "### At least one required job failed ❌" >> $GITHUB_STEP_SUMMARY
exit 1
else
echo '### Good job! All the required jobs passed 🚀' >> $GITHUB_STEP_SUMMARY
fi
publish-rustdoc:
if: github.ref == 'refs/heads/master'
runs-on: ubuntu-latest
environment: subsystem-benchmarks
needs: [build-rustdoc, build-implementers-guide]
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: gh-pages
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
id: app-token
with:
app-id: ${{ secrets.PEZKUWI_GHPAGES_APP_ID }}
private-key: ${{ secrets.PEZKUWI_GHPAGES_APP_KEY }}
- name: Ensure destination dir does not exist
run: |
rm -rf book/
rm -rf ${REF_NAME}
env:
REF_NAME: ${{ github.head_ref || github.ref_name }}
- name: Download rustdocs
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: ${{ github.sha }}-doc
path: ${{ github.head_ref || github.ref_name }}
- name: Download guide
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: ${{ github.sha }}-guide
path: /tmp
- run: mkdir -p book
- name: Move book files
run: mv /tmp/book/html/* book/
- name: Push changes to gh-pages
env:
TOKEN: ${{ steps.app-token.outputs.token }}
APP_NAME: "pezkuwichain-upd-ghpages"
REF_NAME: ${{ github.head_ref || github.ref_name }}
Green: "\e[32m"
NC: "\e[0m"
run: |
echo "${Green}Git add${NC}"
git add book/
git add ${REF_NAME}/
echo "${Green}git status | wc -l${NC}"
git status | wc -l
echo "${Green}Add new remote with gh app token${NC}"
git remote set-url origin $(git config remote.origin.url | sed "s/github.com/${APP_NAME}:${TOKEN}@github.com/g")
echo "${Green}Remove http section that causes issues with gh app auth token${NC}"
sed -i.bak '/\[http/d' ./.git/config
sed -i.bak '/extraheader/d' ./.git/config
echo "${Green}Git push${NC}"
git config user.email "ci@kurdistan-tech.io"
git config user.name "${APP_NAME}"
git commit --amend -m "___Updated docs" || echo "___Nothing to commit___"
git push origin gh-pages --force
@@ -0,0 +1,30 @@
# If there are new issues related to the async backing feature,
# add it to the teyrchain team's board and set a custom "meta" field.
name: Add selected issues to Teyrchain team board
on:
issues:
types:
- labeled
jobs:
add-teyrchain-issues:
if: github.event.label.name == 'T16-async_backing'
runs-on: ubuntu-latest
steps:
- name: Generate token
id: generate_token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a # v2.1.0
with:
app_id: ${{ secrets.PROJECT_APP_ID }}
private_key: ${{ secrets.PROJECT_APP_KEY }}
- name: Sync issues
uses: actions/github-script@v7
with:
github-token: ${{ steps.generate_token.outputs.token }}
script: |
// TODO: Implement issue sync for pezkuwichain project board
// Original action was pezkuwichain/github-issue-sync
// Project: 119 (Teyrchain team board)
// Fields: meta = 'async backing'
console.log('Issue sync placeholder - configure for pezkuwichain project board');
+17
View File
@@ -0,0 +1,17 @@
# If the author of the issues is not a contributor to the project, label
# the issue with 'Z0-unconfirmed'
name: Label New Issues
on:
issues:
types: [opened]
jobs:
label-new-issues:
runs-on: ubuntu-latest
steps:
- name: Label drafts
uses: andymckay/labeler@e6c4322d0397f3240f0e7e30a33b5c5df2d39e90 # 1.0.4
if: github.event.issue.author_association == 'NONE'
with:
add-labels: "I10-unconfirmed"
@@ -0,0 +1,18 @@
# DISABLED: This workflow was for Kurdistan-Tech DevOps notifications.
# Pezkuwi SDK will implement its own notification system if needed.
name: Notify DevOps when burn-in label applied (DISABLED)
on:
workflow_dispatch:
inputs:
note:
description: 'This workflow is disabled - Kurdistan-Tech Matrix notifications not applicable'
required: false
jobs:
disabled:
runs-on: ubuntu-latest
steps:
- name: Notice
run: echo "Burn-in label notification is disabled - Pezkuwi SDK uses different DevOps channels"
@@ -0,0 +1,25 @@
# Actions that makes review-bot green in the merge queue
name: Merge-Queue
on:
merge_group:
jobs:
trigger-merge-queue-action:
runs-on: ubuntu-latest
environment: merge-queues
steps:
- name: Generate token
id: app_token
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a # v2.1.0
with:
app_id: ${{ secrets.REVIEW_APP_ID }}
private_key: ${{ secrets.REVIEW_APP_KEY }}
- name: Add Merge Queue status check
uses: billyjbryant/create-status-check@3e6fa0ac599d10d9588cf9516ca4330ef669b858 # v2
with:
authToken: ${{ steps.app_token.outputs.token }}
context: "review-bot"
description: "PRs for merge queue gets approved"
state: "success"
sha: ${{ github.event.merge_group.head_commit.id }}
@@ -0,0 +1,18 @@
# DISABLED: This workflow was for Kurdistan-Tech's wishlist leaderboard feature.
# Pezkuwi SDK will implement its own community engagement features.
name: Update wishlist leaderboard (DISABLED)
on:
workflow_dispatch:
inputs:
note:
description: 'This workflow is disabled - Pezkuwi will implement own solution'
required: false
jobs:
disabled:
runs-on: ubuntu-latest
steps:
- name: Notice
run: echo "Wishlist leaderboard is disabled - Pezkuwi SDK uses different community engagement"
@@ -0,0 +1,85 @@
name: Check publish build
on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/reusable-preflight.yml
check-publish-compile:
timeout-minutes: 90
needs: [preflight]
runs-on: ${{ needs.preflight.outputs.RUNNER }}
container:
image: ${{ needs.preflight.outputs.IMAGE }}
env:
RUSTFLAGS: "-D warnings"
SKIP_WASM_BUILD: 1
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: Rust Cache
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
with:
save-if: ${{ github.ref == 'refs/heads/master' }}
- name: install kurdistan-tech-publish
run: |
cargo install kurdistan-tech-publish@0.10.6 --locked -q
- name: set current PR's prdoc name in a variable
env:
GITHUB_PR_NUM: ${{ github.event.pull_request.number }}
run: |
echo "CURRENT_PRDOC=pr_${GITHUB_PR_NUM}.prdoc" >> $GITHUB_ENV
- name: kurdistan-tech-publish update plan w/o current prdoc
run: |
if [ -f prdoc/$CURRENT_PRDOC ]; then
mv prdoc/$CURRENT_PRDOC .
fi
kurdistan-tech-publish --color always plan --skip-check --prdoc prdoc/
# The code base is not in master's state (due to commits brought by the
# current PR), but we're interested in all master's prdocs to be applied
# as if master is a stable branch, and in next steps we're following up with
# a patch release of all crates based on some newly added prdocs
# (meaning only the current prdoc).
- name: kurdistan-tech-publish apply plan on the code state prior to current prdoc
run: kurdistan-tech-publish --color always apply --registry
- name: move all prdocs except current one to unstable dir
run: |
if [ -f $CURRENT_PRDOC ]; then
mkdir prdoc/unstable
mv prdoc/pr_*.prdoc prdoc/unstable
mv $CURRENT_PRDOC prdoc
fi
- name: kurdistan-tech-publish update plan just for PR's prdoc
run: |
if [ -f "prdoc/$CURRENT_PRDOC" ]; then
kurdistan-tech-publish --color always plan --skip-check --prdoc prdoc/
fi
- name: kurdistan-tech-publish apply plan
run: |
if [ -f "prdoc/$CURRENT_PRDOC" ]; then
kurdistan-tech-publish --color always apply --registry
fi
- name: kurdistan-tech-publish check compile
run: |
packages="$(kurdistan-tech-publish apply --print)"
if [ -n "$packages" ]; then
cargo --color always check $(printf -- '-p %s ' $packages)
fi
@@ -0,0 +1,48 @@
name: Check publish
on:
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
check-publish:
runs-on: ubuntu-latest
needs: isdraft
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: Check for publishable crates
id: check-publishable
run: |
# Find crates that are publishable (don't have publish = false)
PUBLISHABLE=$(find . -name "Cargo.toml" -exec grep -L 'publish = false' {} \; | grep -v target | head -20)
if [ -z "$PUBLISHABLE" ]; then
echo "No publishable crates found (all have publish = false)"
echo "has_publishable=false" >> $GITHUB_OUTPUT
else
echo "Found publishable crates:"
echo "$PUBLISHABLE"
echo "has_publishable=true" >> $GITHUB_OUTPUT
fi
- name: Rust Cache
if: steps.check-publishable.outputs.has_publishable == 'true'
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
with:
save-if: ${{ github.ref == 'refs/heads/master' }}
- name: Install pezkuwi-publish (when ready)
if: steps.check-publishable.outputs.has_publishable == 'true'
run: |
echo "Publishable crates detected - publish checks will run when pezkuwi-publish tool is ready"
# TODO: Replace with pezkuwi-publish when available
# cargo install pezkuwi-publish --locked -q
- name: Skip - all crates have publish = false
if: steps.check-publishable.outputs.has_publishable == 'false'
run: echo "All crates have publish = false, skipping publish checks"
@@ -0,0 +1,45 @@
name: Claim Crates
on:
workflow_dispatch:
jobs:
claim-crates:
runs-on: ubuntu-latest
environment: master
steps:
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: Check for publishable crates
id: check-publishable
run: |
# Find crates that are publishable (don't have publish = false)
PUBLISHABLE=$(find . -name "Cargo.toml" -exec grep -L 'publish = false' {} \; | grep -v target | head -20)
if [ -z "$PUBLISHABLE" ]; then
echo "No publishable crates found (all have publish = false)"
echo "has_publishable=false" >> $GITHUB_OUTPUT
else
echo "Found publishable crates:"
echo "$PUBLISHABLE"
echo "has_publishable=true" >> $GITHUB_OUTPUT
fi
- name: Rust Cache
if: steps.check-publishable.outputs.has_publishable == 'true'
uses: Swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 # v2.8.1
with:
save-if: ${{ github.ref == 'refs/heads/master' }}
- name: Claim crates on crates.io (when ready)
if: steps.check-publishable.outputs.has_publishable == 'true'
env:
PEZKUWI_CRATESIO_TOKEN: ${{ secrets.PEZKUWI_CRATESIO_TOKEN }}
run: |
echo "Publishable crates detected - claim will run when pezkuwi-publish tool is ready"
# TODO: Replace with pezkuwi-publish when available
# cargo install pezkuwi-publish --locked -q
# pezkuwi-publish --color always claim
- name: Skip - all crates have publish = false
if: steps.check-publishable.outputs.has_publishable == 'false'
run: echo "All crates have publish = false, skipping crate claiming"
@@ -0,0 +1,143 @@
# This workflow has combined functionality of branching-off a new stable release branch and tagging an RC.
# The options to branch-off and/or tag an RC can be chosen independently by ticking the appropriate checkbox in the launching form,
# as the branch-off happens only ones per quarter and a tagging activity done more frequently for each new RC during the release process.
name: Release - Branch off stable branch and/or tag rc
on:
workflow_dispatch:
inputs:
stable_version:
description: Stable version in the format stableYYMM that will be used as branch name and rc tag base
required: true
type: string
node_version:
description: Version of the pezkuwi node in the format X.XX.X (e.g. 1.15.0). ️ Node version is needed only for the branch-off
type: string
required: false
is_new_stable:
description: Check this box if this is a new stable release and the stable branch needs to be created
type: boolean
tag_rc:
description: Check this box if the rc tag needs to be created
type: boolean
jobs:
validate-inputs:
runs-on: ubuntu-latest
outputs:
node_version: ${{ steps.validate_inputs.outputs.node_version }}
stable_version: ${{ steps.validate_inputs.outputs.stable_version }}
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
- name: Validate inputs
id: validate_inputs
run: |
. ./.github/scripts/common/lib.sh
if [ -n "${{ inputs.node_version }}" ]; then
node_version=$(filter_version_from_input "${{ inputs.node_version }}")
echo "node_version=${node_version}" >> $GITHUB_OUTPUT
fi
stable_version=$(validate_stable_tag ${{ inputs.stable_version }})
echo "stable_version=${stable_version}" >> $GITHUB_OUTPUT
create-stable-branch:
if: ${{ inputs.is_new_stable }}
needs: [ validate-inputs ]
runs-on: ubuntu-latest
environment: release
env:
PGP_KMS_KEY: ${{ secrets.PGP_KMS_SIGN_COMMITS_KEY }}
PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
STABLE_BRANCH_NAME: ${{ needs.validate-inputs.outputs.stable_version }}
steps:
- name: Install pgpkkms
run: |
# Install pgpkms that is used to sign commits
pip install git+https://github.com/pezkuwichain-release/pgpkms.git@6cb1cecce1268412189b77e4b130f4fa248c4151
- name: Generate content write token for the release automation
id: generate_write_token
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ vars.RELEASE_AUTOMATION_APP_ID }}
private-key: ${{ secrets.RELEASE_AUTOMATION_APP_PRIVATE_KEY }}
owner: pezkuwichain
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
with:
ref: master
token: ${{ steps.generate_write_token.outputs.token }}
- name: Import gpg keys
run: |
. ./.github/scripts/common/lib.sh
import_gpg_keys
- name: Config git
run: |
git config --global commit.gpgsign true
git config --global gpg.program /home/runner/.local/bin/pgpkms-git
git config --global user.name "ParityReleases"
git config --global user.email "release-team@kurdistan-tech.io"
git config --global user.signingKey "D8018FBB3F534D866A45998293C5FB5F6A367B51"
- name: Create stable branch
run: |
git checkout -b "$STABLE_BRANCH_NAME"
git show-ref "$STABLE_BRANCH_NAME"
- name: Bump versions, reorder prdocs and push stable branch
env:
GH_TOKEN: ${{ steps.generate_write_token.outputs.token }}
run: |
. ./.github/scripts/release/release_lib.sh
NODE_VERSION="${{ needs.validate-inputs.outputs.node_version }}"
NODE_VERSION_PATTERN="\(NODE_VERSION[^=]*= \)\".*\""
set_version "$NODE_VERSION_PATTERN" $NODE_VERSION "pezkuwi/node/primitives/src/lib.rs"
commit_with_message "Bump node version to $NODE_VERSION in pezkuwi-cli"
set_version "$NODE_VERSION_PATTERN" $NODE_VERSION "pezcumulus/pezkuwi-omni-node/lib/src/nodes/mod.rs"
commit_with_message "Bump node version to $NODE_VERSION in pezkuwi-omni-node-lib"
SPEC_VERSION=$(get_spec_version $NODE_VERSION)
runtimes_list=$(get_filtered_runtimes_list)
set_spec_versions $SPEC_VERSION "${runtimes_list[@]}"
reorder_prdocs $STABLE_BRANCH_NAME
gh auth setup-git
git push origin "$STABLE_BRANCH_NAME"
- name: Tag RC after branch off
if: ${{ inputs.tag_rc }}
env:
GH_TOKEN: ${{ steps.generate_write_token.outputs.token }} # or use a PAT with workflow scope
run: |
stable_tag_base=pezkuwi-${{ needs.validate-inputs.outputs.stable_version }}
gh workflow run release-11_rc-automation.yml \
--repo ${{ github.repository }} \
--ref ${{ needs.validate-inputs.outputs.stable_version }} \
--field version=${stable_tag_base}
tag-rc-without-branchoff:
if: ${{ !inputs.is_new_stable && inputs.tag_rc }}
needs: [ validate-inputs ]
uses: ./.github/workflows/release-11_rc-automation.yml
with:
version: pezkuwi-${{ needs.validate-inputs.outputs.stable_version }}
secrets: inherit
@@ -0,0 +1,106 @@
name: Release - RC tagging automation
on:
workflow_dispatch:
inputs:
version:
description: Current release/rc version in format pezkuwi-stableYYMM
workflow_call:
inputs:
version:
description: Current release/rc version in format pezkuwi-stableYYMM
type: string
jobs:
tag_rc:
runs-on: ubuntu-latest
strategy:
matrix:
channel:
- name: "RelEng: Pezkuwi Release Coordination"
room: '!cqAmzdIcbOFwrdrubV:kurdistan-tech.io'
environment: release
env:
PGP_KMS_KEY: ${{ secrets.PGP_KMS_SIGN_COMMITS_KEY }}
PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
steps:
- name: Install pgpkkms
run: |
# Install pgpkms that is used to sign commits
pip install git+https://github.com/pezkuwichain-release/pgpkms.git@6cb1cecce1268412189b77e4b130f4fa248c4151
- name: Generate content write token for the release automation
id: generate_write_token
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ vars.RELEASE_AUTOMATION_APP_ID }}
private-key: ${{ secrets.RELEASE_AUTOMATION_APP_PRIVATE_KEY }}
owner: pezkuwichain
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v4.1.7
with:
fetch-depth: 0
token: ${{ steps.generate_write_token.outputs.token }}
- name: Import gpg keys
run: |
. ./.github/scripts/common/lib.sh
import_gpg_keys
- name: Config git
run: |
git config --global commit.gpgsign true
git config --global gpg.program /home/runner/.local/bin/pgpkms-git
git config --global user.name "ParityReleases"
git config --global user.email "release-team@kurdistan-tech.io"
git config --global user.signingKey "D8018FBB3F534D866A45998293C5FB5F6A367B51"
- name: Compute next rc tag
# if: ${{ steps.get_rel_product.outputs.product == 'pezkuwi' }}
id: compute_tag
shell: bash
run: |
. ./.github/scripts/common/lib.sh
# Get last rc tag if exists, else set it to {version}-rc1
if [[ -z "${{ inputs.version }}" ]]; then
version=v$(get_pezkuwi_node_version_from_code)
else
version=$(validate_stable_tag ${{ inputs.version }})
fi
echo "$version"
echo "version=$version" >> $GITHUB_OUTPUT
last_rc=$(get_latest_rc_tag $version pezkuwi)
if [ -n "$last_rc" ]; then
suffix=$(increment_rc_tag $last_rc)
echo "new_tag=$version-rc$suffix" >> $GITHUB_OUTPUT
echo "first_rc=false" >> $GITHUB_OUTPUT
else
echo "new_tag=$version-rc1" >> $GITHUB_OUTPUT
echo "first_rc=true" >> $GITHUB_OUTPUT
fi
- name: Apply new tag
env:
GH_TOKEN: ${{ steps.generate_write_token.outputs.token }}
RC_TAG: ${{ steps.compute_tag.outputs.new_tag }}
run: |
git tag -s $RC_TAG -m "new rc tag $RC_TAG"
git push origin $RC_TAG
- name: Send Matrix message to ${{ matrix.channel.name }}
uses: s3krit/matrix-message-action@70ad3fb812ee0e45ff8999d6af11cafad11a6ecf # v0.0.3
# if: steps.create-issue.outputs.url != ''
with:
room_id: ${{ matrix.channel.room }}
access_token: ${{ secrets.RELEASENOTES_MATRIX_V2_ACCESS_TOKEN }}
server: m.kurdistan-tech.io
message: |
Release process for pezkuwi ${{ steps.compute_tag.outputs.new_tag }} has been started.<br/>
+300
View File
@@ -0,0 +1,300 @@
name: Release - Build node release candidate
on:
workflow_dispatch:
inputs:
binary:
description: Binary to be build for the release
default: all
type: choice
options:
- pezkuwi
- pezkuwi-teyrchain
- pezkuwi-omni-node
- pezframe-omni-bencher
- chain-spec-builder
- bizinikiwi-node
- eth-rpc
- pez-subkey
- all
release_tag:
description: Tag matching the actual release candidate with the format pezkuwi-stableYYMM(-X)-rcX or pezkuwi-stableYYMM(-X)
type: string
workflow_call:
inputs:
binary:
description: Binary to be built for the release
default: all
type: string
release_tag:
description: Tag matching the actual release candidate with the format pezkuwi-stableYYMM(-X)-rcY or pezkuwi-stableYYMM(-X)
type: string
jobs:
# DISABLED: Workflow synchronization check
# check-synchronization:
# uses: pezkuwichain-release/sync-workflows/.github/workflows/check-synchronization.yml@main
# secrets:
# fork_writer_app_key: ${{ secrets.UPSTREAM_CONTENT_SYNC_APP_KEY }}
validate-inputs:
runs-on: ubuntu-latest
outputs:
release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Validate inputs
id: validate_inputs
run: |
. ./.github/scripts/common/lib.sh
RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
build-pezkuwi-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'pezkuwi' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["pezkuwi", "pezkuwi-prepare-worker", "pezkuwi-execute-worker"]'
package: pezkuwi
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: x86_64-unknown-linux-gnu
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-pezkuwi-teyrchain-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'pezkuwi-teyrchain' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["pezkuwi-teyrchain"]'
package: "pezkuwi-teyrchain-bin"
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: x86_64-unknown-linux-gnu
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-pezkuwi-omni-node-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'pezkuwi-omni-node' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["pezkuwi-omni-node"]'
package: "pezkuwi-omni-node"
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: x86_64-unknown-linux-gnu
features: runtime-benchmarks
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-pezframe-omni-bencher-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'pezframe-omni-bencher' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["pezframe-omni-bencher"]'
package: "pezframe-omni-bencher"
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: x86_64-unknown-linux-gnu
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-chain-spec-builder-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["chain-spec-builder"]'
package: pez-staging-chain-spec-builder
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: x86_64-unknown-linux-gnu
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-bizinikiwi-node-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'bizinikiwi-node' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["bizinikiwi-node"]'
package: pez-staging-node-cli
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: x86_64-unknown-linux-gnu
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-eth-rpc-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'eth-rpc' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["eth-rpc"]'
package: pezpallet-revive-eth-rpc
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: x86_64-unknown-linux-gnu
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-pez-subkey-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'pez-subkey' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["pez-subkey"]'
package: pez-subkey
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: x86_64-unknown-linux-gnu
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-pezkuwi-macos-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'pezkuwi' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["pezkuwi", "pezkuwi-prepare-worker", "pezkuwi-execute-worker"]'
package: pezkuwi
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-pezkuwi-teyrchain-macos-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'pezkuwi-teyrchain' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["pezkuwi-teyrchain"]'
package: pezkuwi-teyrchain-bin
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-pezkuwi-omni-node-macos-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'pezkuwi-omni-node' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["pezkuwi-omni-node"]'
package: pezkuwi-omni-node
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
features: runtime-benchmarks
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-pezframe-omni-bencher-macos-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'pezframe-omni-bencher' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["pezframe-omni-bencher"]'
package: pezframe-omni-bencher
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-chain-spec-builder-macos-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["chain-spec-builder"]'
package: pez-staging-chain-spec-builder
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-bizinikiwi-node-macos-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'bizinikiwi-node' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["bizinikiwi-node"]'
package: pez-staging-node-cli
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-eth-rpc-macos-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'eth-rpc' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["eth-rpc"]'
package: pezpallet-revive-eth-rpc
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-pez-subkey-macos-binary:
needs: [validate-inputs]
if: ${{ inputs.binary == 'pez-subkey' || inputs.binary == 'all' }}
uses: "./.github/workflows/release-reusable-rc-build.yml"
with:
binary: '["pez-subkey"]'
package: pez-subkey
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: aarch64-apple-darwin
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
@@ -0,0 +1,90 @@
name: Release - Build runtimes
on:
workflow_dispatch:
inputs:
chain:
description: The chain to use
default: all
required: true
type: choice
options:
- all
- zagros
- asset-hub-zagros
- bridge-hub-zagros
- collectives-zagros
- coretime-zagros
- glutton-zagros
- people-zagros
runtime_dir:
description: The runtime dir to be used (⚠️ this parameter is optional and needed only in case of the single runtime build, set it accordingly to the runtime you want to build)
default: pezkuwi/runtime/zagros
type: choice
options:
- pezkuwi/runtime/zagros
- pezcumulus/teyrchains/runtimes/assets/asset-hub-zagros
- pezcumulus/teyrchains/runtimes/bridge-hubs/bridge-hub-zagros
- pezcumulus/teyrchains/runtimes/collectives/collectives-zagros
- pezcumulus/teyrchains/runtimes/coretime/coretime-zagros
- pezcumulus/teyrchains/runtimes/people/people-zagros
- pezcumulus/teyrchains/runtimes/glutton/glutton-zagros
release_tag:
description: Tag matching the actual release candidate with the format pezkuwi-stableYYMM(-X)-rcY or pezkuwi-stableYYMM(-X)
type: string
workflow_call:
inputs:
chain:
description: The chain to use
default: all
required: true
type: string
runtime_dir:
description: The runtime dir to be used (⚠️ this parameter is optional and needed only in case of the single runtime build, set it accordingly to the runtime you want to build)
default: pezkuwi/runtime/zagros
type: string
release_tag:
description: Tag matching the actual release candidate with the format pezkuwi-stableYYMM(-X)-rcY or pezkuwi-stableYYMM(-X)
type: string
outputs:
published_runtimes:
value: ${{ jobs.build-runtimes.outputs.published_runtimes }}
jobs:
# DISABLED: Workflow synchronization check
# check-synchronization:
# uses: pezkuwichain-release/sync-workflows/.github/workflows/check-synchronization.yml@main
# secrets:
# fork_writer_app_key: ${{ secrets.UPSTREAM_CONTENT_SYNC_APP_KEY }}
validate-inputs:
runs-on: ubuntu-latest
outputs:
release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Validate inputs
id: validate_inputs
run: |
. ./.github/scripts/common/lib.sh
RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
build-runtimes:
needs: [validate-inputs]
uses: "./.github/workflows/release-srtool.yml"
with:
excluded_runtimes: "asset-hub-pezkuwichain bridge-hub-pezkuwichain coretime-pezkuwichain people-pezkuwichain pezkuwichain pezkuwichain-teyrchain bizinikiwi-test bp pezcumulus-test kitchensink minimal-template teyrchain-template penpal pezkuwi-test seedling shell pezframe-try sp solochain-template pezkuwi-sdk-docs-first pezpallet-staking-async-teyrchain pezpallet-staking-async-rc pezframe-storage-access-test yet-another-teyrchain revive-dev"
build_opts: "--features on-chain-release-build"
profile: production
chain: ${{ inputs.chain }}
runtime_dir: ${{ inputs.runtime_dir }}
permissions:
id-token: write
attestations: write
contents: read
@@ -0,0 +1,126 @@
name: Release - Combined Builds Flow
on:
workflow_dispatch:
inputs:
chain:
description: The chain to use for runtime builds
default: all
required: true
type: choice
options:
- all
- zagros
- asset-hub-zagros
- bridge-hub-zagros
- collectives-zagros
- coretime-zagros
- glutton-zagros
- people-zagros
runtime_dir:
description: The runtime dir to be used (⚠️ this parameter is optional and needed only in case of the single runtime build, set it accordingly to the runtime you want to build)
default: pezkuwi/runtime/zagros
type: choice
options:
- pezkuwi/runtime/zagros
- pezcumulus/teyrchains/runtimes/assets/asset-hub-zagros
- pezcumulus/teyrchains/runtimes/bridge-hubs/bridge-hub-zagros
- pezcumulus/teyrchains/runtimes/collectives/collectives-zagros
- pezcumulus/teyrchains/runtimes/coretime/coretime-zagros
- pezcumulus/teyrchains/runtimes/people/people-zagros
- pezcumulus/teyrchains/runtimes/glutton/glutton-zagros
binary:
description: Binary to be built for the release candidate
default: all
type: choice
options:
- pezkuwi
- pezkuwi-teyrchain
- pezkuwi-omni-node
- pezframe-omni-bencher
- chain-spec-builder
- bizinikiwi-node
- eth-rpc
- pez-subkey
- all
release_tag:
description: Tag matching the actual release candidate with the format pezkuwi-stableYYMM(-X)-rcY or pezkuwi-stableYYMM(-X)
type: string
required: true
no_runtimes:
description: If true, no runtime build will be triggered and release draft will be published without runtimes (⚠️ use it for the patch releases of the latest stable)
required: true
type: boolean
default: false
jobs:
# DISABLED: Workflow synchronization check
# check-synchronization:
# uses: pezkuwichain-release/sync-workflows/.github/workflows/check-synchronization.yml@main
# secrets:
# fork_writer_app_key: ${{ secrets.UPSTREAM_CONTENT_SYNC_APP_KEY }}
validate-inputs:
runs-on: ubuntu-latest
outputs:
release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Validate inputs
id: validate_inputs
run: |
. ./.github/scripts/common/lib.sh
RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
build-runtimes-flow:
if: ${{ inputs.no_runtimes == false }}
needs: [validate-inputs]
uses: "./.github/workflows/release-21_build-runtimes.yml"
with:
chain: ${{ inputs.chain }}
runtime_dir: ${{ inputs.runtime_dir }}
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
build-rc-flow:
needs: [validate-inputs]
uses: "./.github/workflows/release-20_build-rc.yml"
with:
binary: ${{ inputs.binary }}
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
secrets: inherit
permissions:
id-token: write
attestations: write
contents: read
trigger-release-draft-with-runtimes:
if: ${{ inputs.no_runtimes == false }}
needs: [build-runtimes-flow, build-rc-flow, validate-inputs]
uses: "./.github/workflows/release-30_publish_release_draft.yml"
with:
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
build_run_id: ${{ github.run_id }}
runtimes: '${{ needs.build-runtimes-flow.outputs.published_runtimes }}'
no_runtimes: ${{ inputs.no_runtimes }}
crates_only: false
secrets: inherit
trigger-release-draft-without-runtimes:
if: ${{ inputs.no_runtimes == true }}
needs: [build-rc-flow, validate-inputs]
uses: "./.github/workflows/release-30_publish_release_draft.yml"
with:
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
build_run_id: ${{ github.run_id }}
no_runtimes: ${{ inputs.no_runtimes }}
crates_only: false
secrets: inherit
@@ -0,0 +1,306 @@
name: Release - Publish draft
# This workflow runs in pezkuwichain-release and creates full release draft with:
# - release notes
# - info about the runtimes
# - attached artifacts:
# - runtimes
# - binaries
# - signatures
on:
workflow_dispatch:
inputs:
release_tag:
description: Tag matching the actual release candidate with the format pezkuwi-stableYYMM(-X)-rcX or pezkuwi-stableYYMM(-X)
required: true
type: string
build_run_id:
description: Run ID of the current release workflow run to be used to download the artifacts
required: true
type: string
runtimes:
description: Runtimes to be published (⚠️ this needs to be provided in case of the complete release, for the crates only release or a patch release without runtimes it is not needed)
no_runtimes:
description: If true, release draft will be published without runtimes
required: true
type: boolean
default: false
crates_only:
description: If true, release draft will contain only release notes and no artifacts will be published (needed for stable releases that are crates only)
required: true
type: boolean
default: false
workflow_call:
inputs:
release_tag:
description: Tag matching the actual release candidate with the format pezkuwi-stableYYMM(-X)-rcY or pezkuwi-stableYYMM(-X)
required: true
type: string
build_run_id:
description: Run ID of the current release workflow run to be used to download the artifacts
required: true
type: string
runtimes:
description: Runtimes to be published
type: string
no_runtimes:
description: If true, release draft will be published without runtimes
required: true
type: boolean
default: false
crates_only:
description: If true, release draft will contain only release notes and no artifacts will be published (needed for stable releases that are crates only)
required: true
type: boolean
default: false
jobs:
# DISABLED: Workflow synchronization check
# check-synchronization:
# uses: pezkuwichain-release/sync-workflows/.github/workflows/check-synchronization.yml@main
# secrets:
# fork_writer_app_key: ${{ secrets.UPSTREAM_CONTENT_SYNC_APP_KEY }}
validate-inputs:
runs-on: ubuntu-latest
outputs:
release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Validate inputs
id: validate_inputs
run: |
. ./.github/scripts/common/lib.sh
RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
publish-release-draft:
runs-on: ubuntu-latest
environment: release
needs: [ validate-inputs ]
outputs:
release_url: ${{ steps.create-release.outputs.html_url }}
asset_upload_url: ${{ steps.create-release.outputs.upload_url }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Generate content write token for the release automation
id: generate_write_token
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ vars.PEZKUWI_SDK_RELEASE_RW_APP_ID }}
private-key: ${{ secrets.PEZKUWI_SDK_RELEASE_RW_APP_KEY }}
owner: pezkuwichain
repositories: pezkuwi-sdk
- name: Download runtimes artifacts
if: ${{ inputs.no_runtimes == false && inputs.crates_only == false }}
env:
GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
run: |
mkdir -p ${{ github.workspace}}/runtimes/
gh run download ${{ inputs.build_run_id }} --dir ${{ github.workspace}}/runtimes
ls -la ${{ github.workspace}}/runtimes
- name: Prepare tooling
run: |
URL=https://github.com/chevdor/tera-cli/releases/download/v0.4.0/tera-cli_linux_amd64.deb
wget $URL -O tera.deb
sudo dpkg -i tera.deb
- name: Prepare draft
id: draft
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
ASSET_HUB_ZAGROS_DIGEST: ${{ github.workspace}}/runtimes/asset-hub-zagros-runtime/asset-hub-zagros-srtool-digest.json
BRIDGE_HUB_ZAGROS_DIGEST: ${{ github.workspace}}/runtimes/bridge-hub-zagros-runtime/bridge-hub-zagros-srtool-digest.json
COLLECTIVES_ZAGROS_DIGEST: ${{ github.workspace}}/runtimes/collectives-zagros-runtime/collectives-zagros-srtool-digest.json
CORETIME_ZAGROS_DIGEST: ${{ github.workspace}}/runtimes/coretime-zagros-runtime/coretime-zagros-srtool-digest.json
GLUTTON_ZAGROS_DIGEST: ${{ github.workspace}}/runtimes/glutton-zagros-runtime/glutton-zagros-srtool-digest.json
PEOPLE_ZAGROS_DIGEST: ${{ github.workspace}}/runtimes/people-zagros-runtime/people-zagros-srtool-digest.json
ZAGROS_DIGEST: ${{ github.workspace}}/runtimes/zagros-runtime/zagros-srtool-digest.json
RELEASE_TAG: ${{ needs.validate-inputs.outputs.release_tag }}
NO_RUNTIMES: ${{ inputs.no_runtimes }}
CRATES_ONLY: ${{ inputs.crates_only }}
shell: bash
run: |
. ./.github/scripts/common/lib.sh
export RUSTC_STABLE=$(grep -oP '(?<=-)[0-9]+\.[0-9]+\.[0-9]+(?=-)' .github/env)
export REF1=$(get_latest_release_tag)
if [[ -z "$RELEASE_TAG" ]]; then
export REF2="${{ github.ref_name }}"
echo "REF2: ${REF2}"
else
export REF2="$RELEASE_TAG"
echo "REF2: ${REF2}"
fi
echo "REL_TAG=$REF2" >> $GITHUB_ENV
export VERSION=$(echo "$REF2" | sed -E 's/.*(stable[0-9]{4}(-[0-9]+)?).*$/\1/')
echo "Version: $VERSION"
./scripts/release/build-changelogs.sh
- name: Archive artifact context.json
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: release-notes-context
path: |
scripts/release/context.json
**/*-srtool-digest.json
- name: Create draft release
id: create-release
env:
GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
run: |
gh release create ${{ env.REL_TAG }} \
--repo pezkuwichain/pezkuwi-sdk \
--draft \
--title "Pezkuwi ${{ env.REL_TAG }}" \
--notes-file ${{ github.workspace}}/scripts/release/RELEASE_DRAFT.md
publish-runtimes:
if: ${{ inputs.crates_only == false && inputs.no_runtimes == false }}
needs: [ validate-inputs, publish-release-draft ]
environment: release
continue-on-error: true
runs-on: ubuntu-latest
strategy:
matrix: ${{ fromJSON(inputs.runtimes) }}
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Download artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- name: Generate content write token for the release automation
id: generate_write_token
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ vars.PEZKUWI_SDK_RELEASE_RW_APP_ID }}
private-key: ${{ secrets.PEZKUWI_SDK_RELEASE_RW_APP_KEY }}
owner: pezkuwichain
repositories: pezkuwi-sdk
- name: Download runtimes
env:
GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
run: |
mkdir -p ${{ github.workspace}}/runtimes/
gh run download ${{ inputs.build_run_id }} --dir ${{ github.workspace}}/runtimes
ls -la ${{ github.workspace}}/runtimes
- name: Get runtime info
env:
JSON: ${{ github.workspace}}/release-notes-context/runtimes/${{ matrix.chain }}-runtime/${{ matrix.chain }}-srtool-digest.json
run: |
cd ${{ github.workspace}}/runtimes
>>$GITHUB_ENV echo ASSET=$(find ${{ matrix.chain }}-runtime -name '*.compact.compressed.wasm')
>>$GITHUB_ENV echo SPEC=$(<${JSON} jq -r .runtimes.compact.subwasm.core_version.specVersion)
- name: Upload compressed ${{ matrix.chain }} v${{ env.SPEC }} wasm
working-directory: ${{ github.workspace}}/runtimes
env:
GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
run: |
VERSIONED_ASSET="${{ matrix.chain }}_runtime-v${{ env.SPEC }}.compact.compressed.wasm"
mv "${{ env.ASSET }}" "$VERSIONED_ASSET"
gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
--repo pezkuwichain/pezkuwi-sdk "$VERSIONED_ASSET"
publish-release-artifacts:
if: ${{ inputs.crates_only == false }}
needs: [ validate-inputs, publish-release-draft ]
environment: release
continue-on-error: true
runs-on: ubuntu-latest
strategy:
matrix:
binary: [ pezkuwi, pezkuwi-execute-worker, pezkuwi-prepare-worker, pezkuwi-teyrchain, pezkuwi-omni-node, pezframe-omni-bencher, chain-spec-builder ]
target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Fetch binaries from s3 based on version
run: |
. ./.github/scripts/common/lib.sh
VERSION="${{ needs.validate-inputs.outputs.release_tag }}"
fetch_release_artifacts_from_s3 ${{ matrix.binary }} ${{ matrix.target }}
- name: Rename aarch64-apple-darwin binaries
if: ${{ matrix.target == 'aarch64-apple-darwin' }}
working-directory: ${{ github.workspace}}/release-artifacts/${{ matrix.target }}/${{ matrix.binary }}
run: |
. ../../../.github/scripts/common/lib.sh
mv ${{ matrix.binary }} ${{ matrix.binary }}-aarch64-apple-darwin
mv ${{ matrix.binary }}.asc ${{ matrix.binary }}-aarch64-apple-darwin.asc
sha256sum "${{ matrix.binary }}-aarch64-apple-darwin" | tee "${{ matrix.binary }}-aarch64-apple-darwin.sha256"
check_sha256 "${{ matrix.binary }}-aarch64-apple-darwin" && echo "OK" || echo "ERR"
- name: Generate content write token for the release automation
id: generate_write_token
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ vars.PEZKUWI_SDK_RELEASE_RW_APP_ID }}
private-key: ${{ secrets.PEZKUWI_SDK_RELEASE_RW_APP_KEY }}
owner: pezkuwichain
repositories: pezkuwi-sdk
- name: Upload ${{ matrix.binary }} binary to release draft
env:
GITHUB_TOKEN: ${{ steps.generate_write_token.outputs.token }}
working-directory: ${{ github.workspace}}/release-artifacts/${{ matrix.target }}/${{ matrix.binary }}
run: |
if [[ ${{ matrix.target }} == "aarch64-apple-darwin" ]]; then
gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
--repo pezkuwichain/pezkuwi-sdk \
${{ matrix.binary }}-aarch64-apple-darwin \
${{ matrix.binary }}-aarch64-apple-darwin.asc \
${{ matrix.binary }}-aarch64-apple-darwin.sha256
else
gh release upload ${{ needs.validate-inputs.outputs.release_tag }} \
--repo pezkuwichain/pezkuwi-sdk \
${{ matrix.binary }} \
${{ matrix.binary }}.asc \
${{ matrix.binary }}.sha256
fi
post_to_matrix:
runs-on: ubuntu-latest
needs: [ validate-inputs, publish-release-draft ]
environment: release
strategy:
matrix:
channel:
- name: "Team: RelEng Internal"
room: '!GvAyzgCDgaVrvibaAF:kurdistan-tech.io'
steps:
- name: Send Matrix message to ${{ matrix.channel.name }}
uses: s3krit/matrix-message-action@70ad3fb812ee0e45ff8999d6af11cafad11a6ecf # v0.0.3
with:
room_id: ${{ matrix.channel.room }}
access_token: ${{ secrets.RELEASENOTES_MATRIX_V2_ACCESS_TOKEN }}
server: m.kurdistan-tech.io
message: |
**New version of pezkuwi tagged**: ${{ needs.validate-inputs.outputs.release_tag }}<br/>
And release draft is release created in [pezkuwi-sdk repo](https://github.com/pezkuwichain/pezkuwi-sdk/releases)
@@ -0,0 +1,136 @@
name: Release - Promote RC to final candidate on S3
on:
workflow_dispatch:
inputs:
binary:
description: Binary to be build for the release
default: all
type: choice
options:
- pezkuwi
- pezkuwi-teyrchain
- pezkuwi-omni-node
- pezframe-omni-bencher
- chain-spec-builder
- all
release_tag:
description: Tag matching the actual release candidate with the format pezkuwi-stableYYMM(-X)-rcX
type: string
workflow_call:
inputs:
binary:
description: Binary to be build for the release
default: all
type: string
release_tag:
description: Tag matching the actual release candidate with the format pezkuwi-stableYYMM(-X)-rcX
type: string
required: true
jobs:
# DISABLED: Workflow synchronization check
# check-synchronization:
# uses: pezkuwichain-release/sync-workflows/.github/workflows/check-synchronization.yml@main
# secrets:
# fork_writer_app_key: ${{ secrets.UPSTREAM_CONTENT_SYNC_APP_KEY }}
validate-inputs:
runs-on: ubuntu-latest
outputs:
release_tag: ${{ steps.validate_inputs.outputs.release_tag }}
final_tag: ${{ steps.validate_inputs.outputs.final_tag }}
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Validate inputs
id: validate_inputs
run: |
. ./.github/scripts/common/lib.sh
RELEASE_TAG=$(validate_stable_tag ${{ inputs.release_tag }})
echo "release_tag=${RELEASE_TAG}" >> $GITHUB_OUTPUT
promote-pezkuwi-rc-to-final:
if: ${{ inputs.binary == 'pezkuwi' || inputs.binary == 'all' }}
needs: [ validate-inputs ]
uses: ./.github/workflows/release-reusable-promote-to-final.yml
strategy:
matrix:
target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
with:
package: pezkuwi
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: ${{ matrix.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
promote-pezkuwi-teyrchain-rc-to-final:
if: ${{ inputs.binary == 'pezkuwi-teyrchain' || inputs.binary == 'all' }}
needs: [ validate-inputs ]
uses: ./.github/workflows/release-reusable-promote-to-final.yml
strategy:
matrix:
target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
with:
package: pezkuwi-teyrchain
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: ${{ matrix.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
promote-pezkuwi-omni-node-rc-to-final:
if: ${{ inputs.binary == 'pezkuwi-omni-node' || inputs.binary == 'all' }}
needs: [ validate-inputs ]
uses: ./.github/workflows/release-reusable-promote-to-final.yml
strategy:
matrix:
target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
with:
package: pezkuwi-omni-node
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: ${{ matrix.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
promote-pezframe-omni-bencher-rc-to-final:
if: ${{ inputs.binary == 'pezframe-omni-bencher' || inputs.binary == 'all' }}
needs: [ validate-inputs ]
uses: ./.github/workflows/release-reusable-promote-to-final.yml
strategy:
matrix:
target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
with:
package: pezframe-omni-bencher
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: ${{ matrix.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
promote-chain-spec-builder-rc-to-final:
if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
needs: [ validate-inputs ]
uses: ./.github/workflows/release-reusable-promote-to-final.yml
strategy:
matrix:
target: [ x86_64-unknown-linux-gnu, aarch64-apple-darwin ]
with:
package: chain-spec-builder
release_tag: ${{ needs.validate-inputs.outputs.release_tag }}
target: ${{ matrix.target }}
secrets:
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_RELEASE_ACCESS_KEY_ID: ${{ secrets.AWS_RELEASE_ACCESS_KEY_ID }}
AWS_RELEASE_SECRET_ACCESS_KEY: ${{ secrets.AWS_RELEASE_SECRET_ACCESS_KEY }}
@@ -0,0 +1,40 @@
name: Release - Publish pezkuwi deb package
# This workflow publishes the pezkuwi Debian package by calling a reusable workflow.
on:
workflow_dispatch:
inputs:
tag:
description: Current final release tag in the format pezkuwi-stableYYMM or pezkuwi-stable-YYMM-X
default: pezkuwi-stable2412
required: true
type: string
distribution:
description: Distribution where to publish deb package (release, staging, stable2407, etc)
default: staging
required: true
type: string
workflow_call:
inputs:
tag:
description: Current final release tag in the format pezkuwi-stableYYMM or pezkuwi-stable-YYMM-X
required: true
type: string
distribution:
description: Distribution where to publish deb package (release, staging, stable2407, etc)
default: staging
required: true
type: string
jobs:
call-publish-workflow:
uses: ./.github/workflows/release-reusable-publish-packages.yml
with:
tag: ${{ inputs.tag }}
distribution: ${{ inputs.distribution }}
package_type: 'deb'
aws_repo_base_path: "s3://releases-package-repos"
cloudfront_distribution_id: "E36FKEYWDXAZYJ"
secrets: inherit
@@ -0,0 +1,29 @@
name: Release - Publish pezkuwi RPM package
# This workflow publishes the pezkuwi RPM package by calling a reusable workflow.
on:
workflow_dispatch:
inputs:
tag:
description: Current final release tag in the format pezkuwi-stableYYMM or pezkuwi-stable-YYMM-X
default: pezkuwi-stable2412
required: true
type: string
workflow_call:
inputs:
tag:
description: Current final release tag in the format pezkuwi-stableYYMM or pezkuwi-stable-YYMM-X
required: true
type: string
jobs:
call-publish-workflow:
uses: ./.github/workflows/release-reusable-publish-packages.yml
with:
tag: ${{ inputs.tag }}
distribution: ${{ inputs.distribution }}
package_type: 'rpm'
aws_repo_base_path: "s3://releases-package-repos"
cloudfront_distribution_id: "E36FKEYWDXAZYJ"
secrets: inherit
@@ -0,0 +1,304 @@
name: Release - Publish Docker Image
# This workflow listens to published releases or can be triggered manually.
# It builds and published releases and rc candidates.
on:
workflow_dispatch:
inputs:
image_type:
description: Type of the image to be published
required: true
default: rc
type: choice
options:
- rc
- release
binary:
description: Binary to be published
required: true
default: pezkuwi
type: choice
options:
- pezkuwi
- pezkuwi-omni-node
- pezkuwi-teyrchain
- chain-spec-builder
registry:
description: Container registry
required: true
type: string
default: docker.io
# The owner is often the same as the Docker Hub username but does ont have to be.
# In our case, it is not.
owner:
description: Owner of the container image repo
required: true
type: string
default: kurdistan-tech
version:
description: Version of the pezkuwi node release in format v1.16.0 or v1.16.0-rc1
default: v0.9.18
required: true
stable_tag:
description: Tag matching the actual stable release version in the format pezkuwi-stableYYMM(-rcX) or pezkuwi-stableYYMM-X(-rcX) for patch releases
required: true
workflow_call:
inputs:
image_type:
description: Type of the image to be published
required: true
default: rc
type: string
binary:
description: Binary to be published
required: true
default: pezkuwi
type: string
registry:
description: Container registry
required: true
type: string
default: docker.io
owner:
description: Owner of the container image repo
required: true
type: string
default: kurdistan-tech
version:
description: Version of the pezkuwi node release in format v1.16.0 or v1.16.0-rc1
required: true
type: string
stable_tag:
description: Tag matching the actual stable release version in the format pezkuwi-stableYYMM(-rcX) or pezkuwi-stableYYMM-X(-rcX) for patch releases
required: true
type: string
permissions:
contents: write
env:
ENGINE: docker
REGISTRY: ${{ inputs.registry }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DOCKER_OWNER: ${{ inputs.owner || github.repository_owner }}
REPO: ${{ github.repository }}
BINARY: ${{ inputs.binary }}
# EVENT_ACTION: ${{ github.event.action }}
EVENT_NAME: ${{ github.event_name }}
IMAGE_TYPE: ${{ inputs.image_type }}
jobs:
# check-synchronization job disabled - pezkuwichain-release sync not needed for pezkuwichain
# Original: uses: pezkuwichain-release/sync-workflows/.github/workflows/check-synchronization.yml@main
validate-inputs:
# Removed dependency on check-synchronization (disabled)
runs-on: ubuntu-latest
outputs:
version: ${{ steps.validate_inputs.outputs.VERSION }}
stable_tag: ${{ steps.validate_inputs.outputs.stable_tag }}
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Validate inputs
id: validate_inputs
run: |
. ./.github/scripts/common/lib.sh
VERSION=$(filter_version_from_input "${{ inputs.version }}")
echo "VERSION=${VERSION}" >> $GITHUB_OUTPUT
STABLE_TAG=$(validate_stable_tag ${{ inputs.stable_tag }})
echo "stable_tag=${STABLE_TAG}" >> $GITHUB_OUTPUT
fetch-artifacts: # this job will be triggered for the pezkuwi-teyrchain rc and release or pezkuwi rc image build
runs-on: ubuntu-latest
needs: [ validate-inputs ]
steps:
- name: Checkout sources
if: ${{ inputs.binary == 'pezkuwi-omni-node' || inputs.binary == 'pezkuwi-teyrchain' || inputs.binary == 'chain-spec-builder' || inputs.image_type == 'rc' }}
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Fetch rc artifacts or release artifacts from s3 based on version
if: ${{ inputs.binary == 'pezkuwi-omni-node' || inputs.binary == 'pezkuwi-teyrchain' || inputs.binary == 'chain-spec-builder' || inputs.image_type == 'rc' }}
run: |
. ./.github/scripts/common/lib.sh
VERSION="${{ needs.validate-inputs.outputs.stable_tag }}"
if [[ ${{ inputs.binary }} == 'pezkuwi' ]]; then
bins=(pezkuwi pezkuwi-prepare-worker pezkuwi-execute-worker)
for bin in "${bins[@]}"; do
fetch_release_artifacts_from_s3 $bin x86_64-unknown-linux-gnu
done
else
fetch_release_artifacts_from_s3 $BINARY x86_64-unknown-linux-gnu
fi
- name: Upload artifacts
if: ${{ inputs.binary == 'pezkuwi-omni-node' || inputs.binary == 'pezkuwi-teyrchain' || inputs.binary == 'chain-spec-builder' || inputs.image_type == 'rc' }}
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: release-artifacts-${{ env.BINARY }}
path: release-artifacts/x86_64-unknown-linux-gnu/${{ env.BINARY }}/**/*
build-container: # this job will be triggered for the pezkuwi-teyrchain rc and release or pezkuwi rc image build
runs-on: ubuntu-latest
needs: [ fetch-artifacts, validate-inputs ]
environment: release
steps:
- name: Checkout sources
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Download artifacts
if: ${{ inputs.binary == 'pezkuwi-omni-node' || inputs.binary == 'pezkuwi-teyrchain' || inputs.binary == 'chain-spec-builder' || inputs.image_type == 'rc' }}
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: release-artifacts-${{ env.BINARY }}
path: release-artifacts
- name: Check sha256 ${{ env.BINARY }}
if: ${{ inputs.binary == 'pezkuwi-omni-node' || inputs.binary == 'pezkuwi-teyrchain' || inputs.binary == 'chain-spec-builder' || inputs.image_type == 'rc' }}
working-directory: release-artifacts
run: |
. ../.github/scripts/common/lib.sh
echo "Checking binary $BINARY"
check_sha256 $BINARY && echo "OK" || echo "ERR"
- name: Check GPG ${{ env.BINARY }}
if: ${{ inputs.binary == 'pezkuwi-omni-node' || inputs.binary == 'pezkuwi-teyrchain' || inputs.binary == 'chain-spec-builder' || inputs.image_type == 'rc' }}
working-directory: release-artifacts
run: |
. ../.github/scripts/common/lib.sh
import_gpg_keys
check_gpg $BINARY
- name: Fetch rc commit and tag
working-directory: release-artifacts
if: ${{ env.IMAGE_TYPE == 'rc' }}
id: fetch_rc_refs
shell: bash
run: |
. ../.github/scripts/common/lib.sh
commit=$(git rev-parse --short HEAD) && \
echo "commit=${commit}" >> $GITHUB_OUTPUT
echo "release=$(echo ${{ needs.validate-inputs.outputs.version }})" >> $GITHUB_OUTPUT
echo "tag=$(prepare_docker_stable_tag ${{ needs.validate-inputs.outputs.stable_tag }})" >> $GITHUB_OUTPUT
- name: Fetch release tags
if: ${{ env.IMAGE_TYPE == 'release'}}
id: fetch_release_refs
shell: bash
run: |
. .github/scripts/common/lib.sh
echo "tag=latest" >> $GITHUB_OUTPUT
echo "release=$(echo ${{ needs.validate-inputs.outputs.version }})" >> $GITHUB_OUTPUT
echo "stable=$(prepare_docker_stable_tag ${{ needs.validate-inputs.outputs.stable_tag }})" >> $GITHUB_OUTPUT
- name: Build Injected Container image for pezkuwi
if: ${{ env.BINARY == 'pezkuwi' }}
env:
ARTIFACTS_FOLDER: release-artifacts
IMAGE_NAME: ${{ env.BINARY }}
OWNER: ${{ env.DOCKER_OWNER }}
TAGS: ${{ join(steps.fetch_rc_refs.outputs.*, ',') || join(steps.fetch_release_refs.outputs.*, ',') }}
shell: bash
run: |
ls -al
echo "Building container for $BINARY"
echo "IMAGE_TYPE: ${{ inputs.image_type }}"
if [[ "${{ inputs.image_type }}" == "rc" ]]; then
echo "Building RC container for pezkuwi"
export DOCKERFILE="docker/dockerfiles/pezkuwi/pezkuwi_injected.Dockerfile"
export BINARY="pezkuwi,pezkuwi-execute-worker,pezkuwi-prepare-worker"
./docker/scripts/build-injected.sh
else
echo "Building release container for pezkuwi"
export DOCKERFILE="docker/dockerfiles/pezkuwi/pezkuwi_injected_debian.Dockerfile"
export BINARY="pezkuwi,pezkuwi-execute-worker,pezkuwi-prepare-worker"
export PEZKUWI_DEB=true
export VERSION=${{ needs.validate-inputs.outputs.version }}
./docker/scripts/build-injected.sh
fi
- name: Build Injected Container image for pezkuwi-omni-node/chain-spec-builder
if: ${{ env.BINARY == 'pezkuwi-omni-node' || env.BINARY == 'chain-spec-builder' }}
shell: bash
env:
ARTIFACTS_FOLDER: release-artifacts
IMAGE_NAME: ${{ env.BINARY }}
OWNER: ${{ env.DOCKER_OWNER }}
TAGS: ${{ join(steps.fetch_rc_refs.outputs.*, ',') || join(steps.fetch_release_refs.outputs.*, ',') }}
VERSION: ${{ needs.validate-inputs.outputs.version }}
run: |
ls -al
echo "Building container for $BINARY"
./docker/scripts/build-injected.sh
- name: Build Injected Container image for pezkuwi-teyrchain
if: ${{ env.BINARY == 'pezkuwi-teyrchain' }}
shell: bash
env:
ARTIFACTS_FOLDER: release-artifacts
IMAGE_NAME: ${{ env.BINARY }}
OWNER: ${{ env.DOCKER_OWNER }}
DOCKERFILE: docker/dockerfiles/pezkuwi-teyrchain/pezkuwi-teyrchain_injected.Dockerfile
TAGS: ${{ join(steps.fetch_rc_refs.outputs.*, ',') || join(steps.fetch_release_refs.outputs.*, ',') }}
VERSION: ${{ needs.validate-inputs.outputs.version }}
run: |
ls -al
mkdir -p $ARTIFACTS_FOLDER/specs
cp pezcumulus/teyrchains/chain-specs/*.json $ARTIFACTS_FOLDER/specs
echo "Building container for $BINARY"
./docker/scripts/build-injected.sh
- name: Login to Dockerhub to publish pezkuwi
if: ${{ env.BINARY == 'pezkuwi' }}
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.PEZKUWI_DOCKERHUB_USERNAME }}
password: ${{ secrets.PEZKUWI_DOCKERHUB_TOKEN }}
- name: Login to Dockerhub to publish pezkuwi-omni-node/pezkuwi-teyrchain/chain-spec-builder
if: ${{ env.BINARY == 'pezkuwi-omni-node' || env.BINARY == 'pezkuwi-teyrchain' || env.BINARY == 'chain-spec-builder' }}
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
username: ${{ secrets.CUMULUS_DOCKERHUB_USERNAME }}
password: ${{ secrets.CUMULUS_DOCKERHUB_TOKEN }}
- name: Push Container image for ${{ env.BINARY }}
id: docker_push
run: |
$ENGINE images | grep ${BINARY}
$ENGINE push --all-tags ${REGISTRY}/${DOCKER_OWNER}/${BINARY}
- name: Check version for the published image for ${{ env.BINARY }}
env:
RELEASE_TAG: ${{ steps.fetch_rc_refs.outputs.release || steps.fetch_release_refs.outputs.release }}
run: |
echo "Checking tag ${RELEASE_TAG} for image ${REGISTRY}/${DOCKER_OWNER}/${BINARY}"
if [[ ${BINARY} == 'chain-spec-builder' ]]; then
$ENGINE run -i ${REGISTRY}/${DOCKER_OWNER}/${BINARY}:${RELEASE_TAG}
else
$ENGINE run -i ${REGISTRY}/${DOCKER_OWNER}/${BINARY}:${RELEASE_TAG} --version
fi
@@ -0,0 +1,63 @@
name: Release - Create pezkuwi-vX.YY.Z tag
# This workflow creates a final release tag in the old format (e.g. pezkuwi-v1.20.0) for a published release.
on:
release:
types: published
jobs:
create-old-release-tag:
runs-on: ubuntu-latest
environment: release
env:
PGP_KMS_KEY: ${{ secrets.PGP_KMS_SIGN_COMMITS_KEY }}
PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
steps:
- name: Install pgpkkms
run: |
# Install pgpkms that is used to sign commits
pip install git+https://github.com/pezkuwichain-release/pgpkms.git@6cb1cecce1268412189b77e4b130f4fa248c4151
- name: Generate content write token for the release automation
id: generate_write_token
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ vars.RELEASE_AUTOMATION_APP_ID }}
private-key: ${{ secrets.RELEASE_AUTOMATION_APP_PRIVATE_KEY }}
owner: pezkuwichain
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.event.release.tag_name }}
token: ${{ steps.generate_write_token.outputs.token }}
- name: Import gpg keys
run: |
. ./.github/scripts/common/lib.sh
import_gpg_keys
- name: Config git
run: |
git config --global commit.gpgsign true
git config --global gpg.program /home/runner/.local/bin/pgpkms-git
git config --global user.name "ParityReleases"
git config --global user.email "release-team@kurdistan-tech.io"
git config --global user.signingKey "D8018FBB3F534D866A45998293C5FB5F6A367B51"
- name: Create old release tag
env:
GH_TOKEN: ${{ steps.generate_write_token.outputs.token }}
run: |
. ./.github/scripts/common/lib.sh
version=$(get_pezkuwi_node_version_from_code)
echo "Extracted node version: $version"
git tag -s "pezkuwi-v${version}" -m "Old release tag pezkuwi-v${version}"
git push origin "pezkuwi-v${version}"
@@ -0,0 +1,294 @@
name: Release - Post Crates Release Activities
on:
push:
branches:
- 'post-crates-release-*'
permissions:
contents: write
pull-requests: write
jobs:
set-image:
runs-on: ubuntu-latest
outputs:
IMAGE: ${{ steps.set_image.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@v4
- id: set_image
run: cat .github/env >> $GITHUB_OUTPUT
post-crates-activities:
needs: set-image
runs-on: ubuntu-latest
environment: release
env:
PGP_KMS_KEY: ${{ secrets.PGP_KMS_SIGN_COMMITS_KEY }}
PGP_KMS_HASH: ${{ secrets.PGP_KMS_HASH }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
container:
image: ${{ needs.set-image.outputs.IMAGE }}
steps:
- name: Install pgpkms
run: |
# Install pgpkms that is used to sign commits
pip install git+https://github.com/pezkuwichain-release/pgpkms.git@6cb1cecce1268412189b77e4b130f4fa248c4151
# Find and display where pgpkms-git is installed
echo "pgpkms-git location: $(which pgpkms-git)"
ls -la $(which pgpkms-git)
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Import GPG keys
shell: bash
run: |
. ./.github/scripts/common/lib.sh
import_gpg_keys
- name: Configure git
shell: bash
run: |
git config --global --add safe.directory "${GITHUB_WORKSPACE}"
git config --global commit.gpgsign true
# Dynamically find pgpkms-git path
PGPKMS_PATH=$(which pgpkms-git)
echo "Using pgpkms-git at: $PGPKMS_PATH"
git config --global gpg.program "$PGPKMS_PATH"
git config --global user.name "ParityReleases"
git config --global user.email "release-team@kurdistan-tech.io"
git config --global user.signingKey "D8018FBB3F534D866A45998293C5FB5F6A367B51"
- name: Bump NODE_VERSION for pezkuwi
run: |
echo "Bumping NODE_VERSION in pezkuwi..."
FILE="pezkuwi/node/primitives/src/lib.rs"
# Extract current NODE_VERSION
current_version=$(grep 'pub const NODE_VERSION' "$FILE" | grep -oE '"[0-9]+\.[0-9]+\.[0-9]+"' | tr -d '"')
echo "Current version: $current_version"
# Bump patch version
new_version=$(echo "$current_version" | awk -F. '{print $1"."$2"."$3+1}')
echo "New version: $new_version"
# Update the file
sed -i.bak "s/NODE_VERSION: &'static str = \"$current_version\"/NODE_VERSION: \&'static str = \"$new_version\"/" "$FILE"
rm -f "$FILE.bak"
echo "Successfully bumped NODE_VERSION from $current_version to $new_version"
- name: Bump NODE_VERSION for pezkuwi-teyrchain and pezkuwi-omni-node
run: |
echo "Bumping NODE_VERSION in pezcumulus..."
FILE="pezcumulus/pezkuwi-omni-node/lib/src/nodes/mod.rs"
# Extract current NODE_VERSION
current_version=$(grep 'pub const NODE_VERSION' "$FILE" | grep -oE '"[0-9]+\.[0-9]+\.[0-9]+"' | tr -d '"')
echo "Current version: $current_version"
# Bump patch version
new_version=$(echo "$current_version" | awk -F. '{print $1"."$2"."$3+1}')
echo "New version: $new_version"
# Update the file
sed -i.bak "s/NODE_VERSION: &'static str = \"$current_version\"/NODE_VERSION: \&'static str = \"$new_version\"/" "$FILE"
rm -f "$FILE.bak"
echo "Successfully bumped NODE_VERSION from $current_version to $new_version"
- name: Commit NODE_VERSION bumps
shell: bash
run: |
. ./.github/scripts/release/release_lib.sh
# Extract the bumped NODE_VERSION
FILE="pezkuwi/node/primitives/src/lib.rs"
NODE_VERSION=$(grep 'pub const NODE_VERSION' "$FILE" | grep -oE '"[0-9]+\.[0-9]+\.[0-9]+"' | tr -d '"')
echo "Committing NODE_VERSION bump to $NODE_VERSION"
commit_with_message "Bump NODE_VERSION to $NODE_VERSION"
echo "✅ Successfully committed NODE_VERSION bump"
- name: Move prdocs to release folder
shell: bash
run: |
. ./.github/scripts/release/release_lib.sh
# Extract release name from branch name (everything after "post-crates-release-")
BRANCH_NAME="${{ github.ref_name }}"
echo "Branch name: $BRANCH_NAME"
if [[ "$BRANCH_NAME" =~ post-crates-release-(.+)$ ]]; then
RELEASE_FOLDER="${BASH_REMATCH[1]}"
echo "Release folder name: $RELEASE_FOLDER"
# Use the reorder_prdocs helper function
reorder_prdocs "$RELEASE_FOLDER"
else
echo "WARNING: Could not extract release name from branch name: $BRANCH_NAME"
echo "Expected format: post-crates-release-<release-name>"
exit 1
fi
- name: Replace path dependencies
run: |
echo "Running replace-all-path-deps.sh..."
bash scripts/release/replace-all-path-deps.sh
# Show git diff to see what changed
git diff --stat
- name: Remove versions where path deps are present
run: |
echo "Running delete-versions-if-path-is-present.sh..."
bash scripts/release/delete-versions-if-path-is-present.sh
# Show git diff to see what changed
git diff --stat
- name: Remove version from umbrella/Cargo.toml
run: |
echo "Running delete-version-from-umbrella.sh..."
bash scripts/release/delete-version-from-umbrella.sh
# Show git diff to see what changed
git diff --stat
- name: Run Zepter - check issues
run: |
echo "Running zepter run check to identify issues..."
zepter run check || echo "Zepter found issues that need to be fixed"
- name: Run Zepter - fix issues
run: |
echo "Running zepter to fix issues..."
zepter || echo "Zepter fix completed"
# Show git diff to see what changed
git diff --stat
- name: Run Zepter - verify fixes
run: |
echo "Running zepter run check again to verify fixes..."
zepter run check || echo "There are still issues to fix manually"
- name: Run taplo - check formatting
run: |
echo "Running taplo format check..."
taplo format --check --config .config/taplo.toml || echo "Taplo found formatting issues"
- name: Run taplo - format
run: |
echo "Running taplo format..."
taplo format --config .config/taplo.toml
# Show git diff to see what changed
git diff --stat
- name: Run taplo - verify formatting
run: |
echo "Running taplo format check again..."
taplo format --check --config .config/taplo.toml || echo "There are still formatting issues"
- name: Install Python dependencies
run: |
echo "Installing Python dependencies..."
pip3 install toml "cargo-workspace>=1.2.6"
- name: Run workspace check
run: |
echo "Running workspace check..."
python3 .github/scripts/check-workspace.py . --exclude \
"bizinikiwi/frame/contracts/fixtures/build" \
"bizinikiwi/frame/contracts/fixtures/contracts/common"
- name: Deny git dependencies
run: |
echo "Checking for git dependencies..."
python3 .github/scripts/deny-git-deps.py .
- name: Check git status before commit
run: |
echo "=== Git status ==="
git status
echo ""
echo "=== Git status --porcelain ==="
git status --porcelain
echo ""
echo "=== Changed files count ==="
git status --porcelain | wc -l
- name: Commit and push changes
shell: bash
run: |
. ./.github/scripts/release/release_lib.sh
# Check if there are changes to commit
if [[ -n $(git status --porcelain) ]]; then
commit_with_message "chore: post crates release actions - version bumps, path deps, zepter, taplo"
echo "Changes committed successfully"
# Push changes to the branch
echo "Pushing changes to branch..."
git push
echo "Changes pushed successfully"
else
echo "No changes to commit"
fi
- name: Create Pull Request to base release branch
env:
GH_TOKEN: ${{ github.token }}
shell: bash
run: |
BRANCH_NAME="${{ github.ref_name }}"
echo "Current branch: $BRANCH_NAME"
# Extract base release branch name
if [[ "$BRANCH_NAME" =~ ^post-crates-release-(.+)$ ]]; then
FULL_RELEASE="${BASH_REMATCH[1]}"
if [[ "$FULL_RELEASE" =~ ^(.+)-[^-]+$ ]]; then
BASE_RELEASE="${BASH_REMATCH[1]}"
else
BASE_RELEASE="$FULL_RELEASE"
fi
echo "Creating PR from $BRANCH_NAME to $BASE_RELEASE..."
gh pr create \
--title "Post crates release activities for $BASE_RELEASE" \
--body "Automated PR containing post-crates-release activities:
- NODE_VERSION bumps
- Path dependencies replacement
- Zepter fixes
- Taplo formatting
- PRDocs reorganization" \
--base "$BASE_RELEASE" \
--head "$BRANCH_NAME" || echo "PR may already exist or there was an error creating it"
else
echo "ERROR: Could not extract base release branch from: $BRANCH_NAME, probably wrong format"
exit 1
fi
- name: Add comment about spec_version
env:
GH_TOKEN: ${{ github.token }}
shell: bash
run: |
BRANCH_NAME="${{ github.ref_name }}"
# Find the PR number for this branch
PR_NUMBER=$(gh pr list --head "$BRANCH_NAME" --json number --jq '.[0].number')
if [ -n "$PR_NUMBER" ]; then
echo "Adding comment to PR #$PR_NUMBER..."
gh pr comment "$PR_NUMBER" --body "⚠️ **Reminder:** spec_version is not bumped automatically as part of this flow. Please ensure it is updated manually if required."
else
echo "WARNING: Could not find PR for branch $BRANCH_NAME"
fi
@@ -0,0 +1,156 @@
name: Release - Combined Publish Release
# This workflow orchestrates the final release steps by calling workflows in sequence:
# 1. Promote RC to final on S3
# 2. Publish Debian and RPM packages (in parallel)
# 3. Publish Docker images
on:
workflow_dispatch:
inputs:
release_tag:
description: Release tag in the format pezkuwi-stableYYMM or pezkuwi-stableYYMM-X or pezkuwi-stableYYMM(-X)-rcX
type: string
required: true
binary:
description: Binary to be released
default: all
type: choice
required: true
options:
- all
- pezkuwi
- pezkuwi-teyrchain
- pezkuwi-omni-node
- pezframe-omni-bencher
- chain-spec-builder
image_type:
description: Type of Docker image (rc for release candidates, release for final)
required: true
default: rc
type: choice
options:
- rc
- release
distribution:
description: Distribution for Debian package (release, staging, stable2407, etc)
default: staging
required: true
type: string
registry:
description: Container registry for Docker images
required: true
type: string
default: docker.io
owner:
description: Owner of the container image repo
required: true
type: string
default: kurdistan-tech
version:
description: Version for Docker tags in format v1.16.0 or v1.16.0-rc1
required: true
type: string
jobs:
# DISABLED: Workflow synchronization check
# check-synchronization:
# uses: pezkuwichain-release/sync-workflows/.github/workflows/check-synchronization.yml@main
# secrets:
# fork_writer_app_key: ${{ secrets.UPSTREAM_CONTENT_SYNC_APP_KEY }}
# ==============================================
# PHASE 1: Promote RC to Final on S3
# ==============================================
promote-rc-to-final:
name: Promote RC to final on S3
uses: ./.github/workflows/release-31_promote-rc-to-final.yml
with:
binary: ${{ inputs.binary }}
release_tag: ${{ inputs.release_tag }}
secrets: inherit
# ==============================================
# PHASE 2: Publish Packages (Debian and RPM)
# ==============================================
publish-deb-package:
name: Publish Debian package
needs: [promote-rc-to-final]
uses: ./.github/workflows/release-40_publish-deb-package.yml
with:
tag: ${{ inputs.release_tag }}
distribution: ${{ inputs.distribution }}
secrets: inherit
publish-rpm-package:
name: Publish RPM package
needs: [promote-rc-to-final]
uses: ./.github/workflows/release-41_publish-rpm-package.yml
with:
tag: ${{ inputs.release_tag }}
secrets: inherit
# ==============================================
# PHASE 3: Publish Docker Images
# ==============================================
publish-docker-pezkuwi:
name: Publish Docker image - pezkuwi
# needs: [publish-deb-package, publish-rpm-package]
if: ${{ inputs.binary == 'pezkuwi' || inputs.binary == 'all' }}
uses: ./.github/workflows/release-50_publish-docker.yml
with:
image_type: ${{ inputs.image_type }}
binary: pezkuwi
registry: ${{ inputs.registry }}
owner: ${{ inputs.owner }}
version: ${{ inputs.version }}
stable_tag: ${{ inputs.release_tag }}
secrets: inherit
publish-docker-pezkuwi-teyrchain:
name: Publish Docker image - pezkuwi-teyrchain
# needs: [publish-deb-package, publish-rpm-package]
if: ${{ inputs.binary == 'pezkuwi-teyrchain' || inputs.binary == 'all' }}
uses: ./.github/workflows/release-50_publish-docker.yml
with:
image_type: ${{ inputs.image_type }}
binary: pezkuwi-teyrchain
registry: ${{ inputs.registry }}
owner: ${{ inputs.owner }}
version: ${{ inputs.version }}
stable_tag: ${{ inputs.release_tag }}
secrets: inherit
publish-docker-pezkuwi-omni-node:
name: Publish Docker image - pezkuwi-omni-node
# needs: [publish-deb-package, publish-rpm-package]
if: ${{ inputs.binary == 'pezkuwi-omni-node' || inputs.binary == 'all' }}
uses: ./.github/workflows/release-50_publish-docker.yml
with:
image_type: ${{ inputs.image_type }}
binary: pezkuwi-omni-node
registry: ${{ inputs.registry }}
owner: ${{ inputs.owner }}
version: ${{ inputs.version }}
stable_tag: ${{ inputs.release_tag }}
secrets: inherit
publish-docker-chain-spec-builder:
name: Publish Docker image - chain-spec-builder
# needs: [publish-deb-package, publish-rpm-package]
if: ${{ inputs.binary == 'chain-spec-builder' || inputs.binary == 'all' }}
uses: ./.github/workflows/release-50_publish-docker.yml
with:
image_type: ${{ inputs.image_type }}
binary: chain-spec-builder
registry: ${{ inputs.registry }}
owner: ${{ inputs.owner }}
version: ${{ inputs.version }}
stable_tag: ${{ inputs.release_tag }}
secrets: inherit
@@ -0,0 +1,99 @@
name: Release - Announce release to Discord
on:
release:
types:
- published
- prereleased
jobs:
ping_discord:
runs-on: ubuntu-latest
environment: release
# Discord notification - Pezkuwi uses Discord instead of Matrix
# Server ID: 1444335345935057049
# Discord webhook should be configured in repository secrets as PEZKUWI_DISCORD_WEBHOOK
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.event.release.tag_name }}
- name: Extract node version
id: extract_version
run: |
. ./.github/scripts/common/lib.sh
version=v$(get_pezkuwi_node_version_from_code)
echo "Extracted node version: $version"
echo "node_version=$version" >> $GITHUB_OUTPUT
- name: Send Discord notification
env:
DISCORD_WEBHOOK: ${{ secrets.PEZKUWI_DISCORD_WEBHOOK }}
run: |
if [ -z "$DISCORD_WEBHOOK" ]; then
echo "::notice::Discord webhook not configured. Release notification skipped."
echo "Release: ${{ github.event.release.tag_name }}"
echo "URL: ${{ github.event.release.html_url }}"
echo "Node Version: ${{ steps.extract_version.outputs.node_version }}"
exit 0
fi
RELEASE_TYPE="${{ github.event.action }}"
TAG_NAME="${{ github.event.release.tag_name }}"
RELEASE_URL="${{ github.event.release.html_url }}"
NODE_VERSION="${{ steps.extract_version.outputs.node_version }}"
REPO_NAME="${{ github.event.repository.full_name }}"
# Set emoji based on release type
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
EMOJI="🧪"
TITLE="Pre-release Published"
else
EMOJI="🚀"
TITLE="New Release Published"
fi
# Create Discord embed payload
PAYLOAD=$(cat <<'PAYLOAD_EOF'
{
"embeds": [{
"title": "EMOJI_PLACEHOLDER TITLE_PLACEHOLDER",
"description": "A new node release has been RELEASE_TYPE_PLACEHOLDER in **REPO_NAME_PLACEHOLDER**",
"color": 5814783,
"fields": [
{
"name": "Release Version",
"value": "[TAG_NAME_PLACEHOLDER](RELEASE_URL_PLACEHOLDER)",
"inline": true
},
{
"name": "Node Version",
"value": "NODE_VERSION_PLACEHOLDER",
"inline": true
}
],
"footer": {
"text": "Pezkuwi SDK Release"
},
"timestamp": "TIMESTAMP_PLACEHOLDER"
}]
}
PAYLOAD_EOF
)
# Replace placeholders with actual values
TIMESTAMP=$(date -u +%Y-%m-%dT%H:%M:%SZ)
PAYLOAD="${PAYLOAD//EMOJI_PLACEHOLDER/$EMOJI}"
PAYLOAD="${PAYLOAD//TITLE_PLACEHOLDER/$TITLE}"
PAYLOAD="${PAYLOAD//RELEASE_TYPE_PLACEHOLDER/$RELEASE_TYPE}"
PAYLOAD="${PAYLOAD//REPO_NAME_PLACEHOLDER/$REPO_NAME}"
PAYLOAD="${PAYLOAD//TAG_NAME_PLACEHOLDER/$TAG_NAME}"
PAYLOAD="${PAYLOAD//RELEASE_URL_PLACEHOLDER/$RELEASE_URL}"
PAYLOAD="${PAYLOAD//NODE_VERSION_PLACEHOLDER/$NODE_VERSION}"
PAYLOAD="${PAYLOAD//TIMESTAMP_PLACEHOLDER/$TIMESTAMP}"
curl -H "Content-Type: application/json" \
-d "$PAYLOAD" \
"$DISCORD_WEBHOOK"
@@ -0,0 +1,81 @@
name: Binary Build
# This workflow can be used to build a binary like pezkuwi + workers, omninode or pezkuwi-teyrchain
# from any branch with release or profuction profile to be later used for testing.
# ⚠️ IT should not be used for release purposes!
on:
workflow_dispatch:
inputs:
binary:
required: true
default: "pezkuwi"
description: "The binary to build"
package:
description: Package to be built, can be pezkuwi, pezkuwi-teyrchain-bin, pezkuwi-omni-node etc.
required: true
type: string
profile:
required: true
default: "release"
description: "The profile to use for the binary build"
features:
required: false
type: string
description: "Features to enable when building the binary (must be a list of comma-separated features)"
jobs:
setup:
# GitHub Actions allows using 'env' in a container context.
# However, env variables don't work for forks: https://github.com/orgs/community/discussions/44322
# This workaround sets the container image for each job using 'set-image' job output.
runs-on: ubuntu-latest
outputs:
IMAGE: ${{ steps.set_image.outputs.IMAGE }}
RUNNER: ${{ steps.set_runner.outputs.RUNNER }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Set image
id: set_image
run: cat .github/env >> $GITHUB_OUTPUT
- name: Set runner
id: set_runner
shell: bash
run: |
if [[ "${{ inputs.binary }}" == "pezkuwi-teyrchain" ]]; then
echo "RUNNER=kurdistan-tech-large" >> $GITHUB_OUTPUT
else
echo "RUNNER=ubuntu-latest" >> $GITHUB_OUTPUT
fi
build:
needs: [setup]
runs-on: ${{ needs.setup.outputs.RUNNER }}
container:
image: ${{ needs.setup.outputs.IMAGE }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
- name: Build binary
run: |
git config --global --add safe.directory "${GITHUB_WORKSPACE}" #avoid "detected dubious ownership" error
PROFILE=${{ inputs.profile }}
if [ "${{ inputs.binary }}" = "pezkuwi" ]; then
for binary in pezkuwi pezkuwi-prepare-worker pezkuwi-execute-worker; do
echo "Building $binary with profile $PROFILE and features ${{ inputs.features }}"
./.github/scripts/release/build-linux-release.sh $binary ${{ inputs.package }} ${{ inputs.features }}
done
else
echo "Building ${{ inputs.binary }} with profile $PROFILE and features ${{ inputs.features }}"
./.github/scripts/release/build-linux-release.sh ${{ inputs.binary }} ${{ inputs.package }} ${{ inputs.features }}
fi
- name: Upload ${{ inputs.binary }} artifacts
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: ${{ inputs.binary }}
path: /artifacts/**

Some files were not shown because too many files have changed in this diff Show More