feat: Add rebrand CI/CD workflows to main branch
- Add 72 rebrand workflow files (polkadot→pezkuwi, substrate→bizinikiwi, cumulus→pezcumulus) - Add GitHub actions, issue templates, and configs - Removed unnecessary workflows (fork-sync, gitspiegel, upstream-tracker, sync-templates, backport) - Renamed zombienet test files to match new naming convention
This commit is contained in:
@@ -0,0 +1,74 @@
|
||||
name: "build and push image"
|
||||
inputs:
|
||||
dockerfile:
|
||||
description: "dockerfile to build"
|
||||
required: true
|
||||
image-name:
|
||||
description: "image name (without registry)"
|
||||
required: true
|
||||
username:
|
||||
required: false
|
||||
default: ""
|
||||
password:
|
||||
required: false
|
||||
default: ""
|
||||
outputs:
|
||||
branch:
|
||||
description: "Branch name for the PR"
|
||||
value: ${{ steps.branch.outputs.branch }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
# gcloud
|
||||
# https://github.com/pezkuwichain/ci_cd/wiki/GitHub:-Push-Docker-image-to-GCP-Registry
|
||||
- name: "Set up Cloud SDK"
|
||||
uses: "google-github-actions/setup-gcloud@e427ad8a34f8676edf47cf7d7925499adf3eb74f" # v2.2.1
|
||||
- name: "gcloud info"
|
||||
shell: bash
|
||||
run: "gcloud info"
|
||||
- name: "Auth in gcloud registry"
|
||||
shell: bash
|
||||
run: "gcloud auth configure-docker europe-docker.pkg.dev --quiet"
|
||||
|
||||
- name: build
|
||||
shell: bash
|
||||
env:
|
||||
ZOMBIENET_IMAGE: "docker.io/pezkuwichain/zombienet:v1.3.105"
|
||||
IMAGE_TAG: europe-docker.pkg.dev/parity-ci-2024/temp-images/${{ inputs.image-name }}
|
||||
run: |
|
||||
export DOCKER_IMAGES_VERSION=${{ github.event.pull_request.head.sha || 'master' }}
|
||||
if [[ ${{ github.event_name }} == "merge_group" ]]; then export DOCKER_IMAGES_VERSION="${GITHUB_SHA::8}"; fi
|
||||
docker build \
|
||||
--build-arg VCS_REF="${GITHUB_SHA}" \
|
||||
--build-arg BUILD_DATE="$(date -u '+%Y-%m-%dT%H:%M:%SZ')" \
|
||||
--build-arg IMAGE_NAME="${{ inputs.image-name }}" \
|
||||
--build-arg ZOMBIENET_IMAGE="${ZOMBIENET_IMAGE}" \
|
||||
-t "${{ env.IMAGE_TAG }}:$DOCKER_IMAGES_VERSION" \
|
||||
-f ${{ inputs.dockerfile }} \
|
||||
.
|
||||
docker push "${{ env.IMAGE_TAG }}:$DOCKER_IMAGES_VERSION"
|
||||
|
||||
- name: login to dockerhub
|
||||
id: login
|
||||
# fork check
|
||||
if: ${{ inputs.username != '' && inputs.password != '' && github.event_name != 'merge_group' }}
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
username: ${{ inputs.username }}
|
||||
password: ${{ inputs.password }}
|
||||
|
||||
- name: push to dockerhub
|
||||
shell: bash
|
||||
if: ${{ inputs.username != '' && inputs.password != '' && github.event_name != 'merge_group' }}
|
||||
env:
|
||||
GITHUB_PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
run: |
|
||||
export DOCKERHUB_TAG=docker.io/paritypr/${{ inputs.image-name }}:${{ github.event.pull_request.number || 'master' }}
|
||||
if [[ ${{ github.event_name }} == "pull_request" ]]; then export DOCKERHUB_TAG=$DOCKERHUB_TAG-${GITHUB_PR_HEAD_SHA::8}; fi
|
||||
if [[ ${{ github.event_name }} == "push" ]]; then export DOCKERHUB_TAG=$DOCKERHUB_TAG-${GITHUB_SHA::8}; fi
|
||||
#
|
||||
docker tag "europe-docker.pkg.dev/parity-ci-2024/temp-images/${{ inputs.image-name }}:${{ github.event.pull_request.head.sha || 'master' }}" $DOCKERHUB_TAG
|
||||
docker push $DOCKERHUB_TAG
|
||||
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
name: 'cargo check runtimes'
|
||||
description: 'Runs `cargo check` for every directory in provided root.'
|
||||
inputs:
|
||||
root:
|
||||
description: "Root directory. Expected to contain several cargo packages inside."
|
||||
required: true
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Check
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p ~/.forklift
|
||||
cp .forklift/config.toml ~/.forklift/config.toml
|
||||
cd ${{ inputs.root }}
|
||||
for directory in $(echo */); do
|
||||
echo "_____Running cargo check for ${directory} ______";
|
||||
cd ${directory};
|
||||
pwd;
|
||||
SKIP_WASM_BUILD=1 forklift cargo check --locked;
|
||||
cd ..;
|
||||
done
|
||||
@@ -0,0 +1,98 @@
|
||||
name: "Download and extract artifact"
|
||||
description: "Downloads an artifact, extracts it, and optionally copies files to a destination"
|
||||
|
||||
inputs:
|
||||
artifact-name:
|
||||
description: "Name of the artifact to download"
|
||||
required: true
|
||||
gh-token:
|
||||
description: "GITHUB_TOKEN to use for downloading artifacts"
|
||||
required: true
|
||||
run-id:
|
||||
description: "Run ID from which to download the artifact"
|
||||
required: true
|
||||
extract-path:
|
||||
description: "Path where to extract the artifact"
|
||||
default: "."
|
||||
required: false
|
||||
files-to-copy:
|
||||
description: "Comma-separated (or newline-separated, remember about |) list of files to copy from the extracted artifact"
|
||||
required: false
|
||||
destination-path:
|
||||
description: "Destination path for copied files"
|
||||
required: false
|
||||
cleanup:
|
||||
description: "Whether to remove downloaded artifacts after copying (true/false)"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Download artifact
|
||||
shell: bash
|
||||
run: |
|
||||
echo "::group::📦 Downloading ${{ inputs.artifact-name }}"
|
||||
echo "Artifact: ${{ inputs.artifact-name }}"
|
||||
echo "Run ID: ${{ inputs.run-id }}"
|
||||
echo "::endgroup::"
|
||||
|
||||
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: ${{ inputs.artifact-name }}
|
||||
github-token: ${{ inputs.gh-token }}
|
||||
run-id: ${{ inputs.run-id }}
|
||||
path: ${{ inputs.extract-path }}
|
||||
|
||||
- name: Extract artifact
|
||||
shell: bash
|
||||
working-directory: ${{ inputs.extract-path }}
|
||||
run: |
|
||||
echo "::group::📂 Extracting ${{ inputs.artifact-name }}"
|
||||
if [[ -f artifacts.tar ]]; then
|
||||
tar -xvf artifacts.tar
|
||||
elif [[ -f *.tar ]]; then
|
||||
tar -xvf *.tar
|
||||
elif [[ -f *.tar.gz ]]; then
|
||||
tar -xzvf *.tar.gz
|
||||
elif [[ -f *.tgz ]]; then
|
||||
tar -xzvf *.tgz
|
||||
elif [[ -f *.zip ]]; then
|
||||
unzip *.zip
|
||||
else
|
||||
echo "⚠️ No archive file found to extract"
|
||||
ls -la
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Copy files if specified
|
||||
if: inputs.files-to-copy != ''
|
||||
env:
|
||||
FILES_TO_COPY: ${{ inputs.files-to-copy }}
|
||||
DESTINATION_PATH: ${{ inputs.destination-path }}
|
||||
EXTRACT_PATH: ${{ inputs.extract-path }}
|
||||
CLEANUP: ${{ inputs.cleanup }}
|
||||
|
||||
shell: bash
|
||||
run: |
|
||||
echo "::group::📋 Copying files from ${{ inputs.artifact-name }}"
|
||||
# Create destination directory
|
||||
mkdir -p "$DESTINATION_PATH"
|
||||
|
||||
FILE_COUNT=0
|
||||
echo "$FILES_TO_COPY" | tr ',' '\n' | while read -r file; do
|
||||
# trim leading and trailing whitespaces
|
||||
file="$(echo "$file" | xargs)"
|
||||
if [[ -n "$file" ]]; then
|
||||
echo "✓ Copying $(basename "$file") to $DESTINATION_PATH"
|
||||
cp -r "$EXTRACT_PATH/$file" "$DESTINATION_PATH/"
|
||||
FILE_COUNT=$((FILE_COUNT + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
# Cleanup if requested
|
||||
if [[ "$CLEANUP" == "true" ]]; then
|
||||
echo "🧹 Cleaning up temporary files in $EXTRACT_PATH"
|
||||
rm -rf "$EXTRACT_PATH"
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
@@ -0,0 +1,104 @@
|
||||
name: "Download binaries for zombienet tests"
|
||||
description: "Zombienet native tests expects some set of binaries to be available in the filesystem"
|
||||
|
||||
inputs:
|
||||
build-id:
|
||||
description: ""
|
||||
required: true
|
||||
ref-slug:
|
||||
description: "Ref slug (e.g branch-name-short)"
|
||||
required: true
|
||||
gh-token:
|
||||
description: "GITHUB_TOKEN to use for downloading artifacts"
|
||||
required: true
|
||||
destination-path:
|
||||
description: "Destination path for copied files"
|
||||
required: false
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- uses: ./.github/actions/download-artifact-extract
|
||||
with:
|
||||
artifact-name: build-linux-bizinikiwi-${{ inputs.ref-slug }}
|
||||
gh-token: ${{ inputs.gh-token }}
|
||||
run-id: ${{ inputs.build-id }}
|
||||
extract-path: ./tmp
|
||||
files-to-copy: |
|
||||
artifacts/bizinikiwi/bizinikiwi
|
||||
destination-path: ${{ inputs.destination-path }}
|
||||
cleanup: "true"
|
||||
|
||||
- uses: ./.github/actions/download-artifact-extract
|
||||
with:
|
||||
artifact-name: build-linux-stable-${{ inputs.ref-slug }}
|
||||
gh-token: ${{ inputs.gh-token }}
|
||||
run-id: ${{ inputs.build-id }}
|
||||
extract-path: ./tmp
|
||||
files-to-copy: |
|
||||
artifacts/pezkuwi
|
||||
artifacts/pezkuwi-execute-worker
|
||||
artifacts/pezkuwi-prepare-worker
|
||||
destination-path: ${{ inputs.destination-path }}
|
||||
cleanup: "true"
|
||||
|
||||
|
||||
- uses: ./.github/actions/download-artifact-extract
|
||||
with:
|
||||
artifact-name: build-linux-stable-pezcumulus-${{ inputs.ref-slug }}
|
||||
gh-token: ${{ inputs.gh-token }}
|
||||
run-id: ${{ inputs.build-id }}
|
||||
extract-path: ./tmp
|
||||
files-to-copy: |
|
||||
artifacts/pezkuwi-teyrchain
|
||||
destination-path: ${{ inputs.destination-path }}
|
||||
cleanup: "true"
|
||||
|
||||
- uses: ./.github/actions/download-artifact-extract
|
||||
with:
|
||||
artifact-name: build-test-teyrchain-${{ inputs.ref-slug }}
|
||||
gh-token: ${{ inputs.gh-token }}
|
||||
run-id: ${{ inputs.build-id }}
|
||||
extract-path: ./tmp
|
||||
files-to-copy: |
|
||||
artifacts/test-teyrchain
|
||||
destination-path: ${{ inputs.destination-path }}
|
||||
cleanup: "true"
|
||||
|
||||
- uses: ./.github/actions/download-artifact-extract
|
||||
with:
|
||||
artifact-name: build-test-collators-${{ inputs.ref-slug }}
|
||||
gh-token: ${{ inputs.gh-token }}
|
||||
run-id: ${{ inputs.build-id }}
|
||||
extract-path: ./tmp
|
||||
files-to-copy: |
|
||||
artifacts/adder-collator
|
||||
artifacts/undying-collator
|
||||
destination-path: ${{ inputs.destination-path }}
|
||||
cleanup: "true"
|
||||
|
||||
- uses: ./.github/actions/download-artifact-extract
|
||||
with:
|
||||
artifact-name: build-malus-${{ inputs.ref-slug }}
|
||||
gh-token: ${{ inputs.gh-token }}
|
||||
run-id: ${{ inputs.build-id }}
|
||||
extract-path: ./tmp
|
||||
# TODO: should copy pezkuwi-execute-worker and pezkuwi-prepare-worker?
|
||||
# if yes then it overlaps with build-linux-stable - address this
|
||||
files-to-copy: |
|
||||
artifacts/malus
|
||||
destination-path: ${{ inputs.destination-path }}
|
||||
cleanup: "true"
|
||||
|
||||
- uses: ./.github/actions/download-artifact-extract
|
||||
with:
|
||||
artifact-name: build-templates-node-${{ inputs.ref-slug }}
|
||||
gh-token: ${{ inputs.gh-token }}
|
||||
run-id: ${{ inputs.build-id }}
|
||||
extract-path: ./tmp
|
||||
files-to-copy: |
|
||||
artifacts/minimal-template-node
|
||||
artifacts/teyrchain-template-node
|
||||
artifacts/solochain-template-node
|
||||
destination-path: ${{ inputs.destination-path }}
|
||||
cleanup: "true"
|
||||
@@ -0,0 +1,49 @@
|
||||
name: 'Free Disk Space'
|
||||
description: 'Frees up disk space on GitHub Actions runners by removing unnecessary software'
|
||||
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
shell: bash
|
||||
run: |
|
||||
echo "=== Disk space before cleanup ==="
|
||||
df -h /
|
||||
|
||||
echo "=== Removing unnecessary packages ==="
|
||||
# Remove Android SDK (12GB+)
|
||||
sudo rm -rf /usr/local/lib/android || true
|
||||
|
||||
# Remove .NET SDK (2GB+)
|
||||
sudo rm -rf /usr/share/dotnet || true
|
||||
|
||||
# Remove Haskell/GHC (5GB+)
|
||||
sudo rm -rf /opt/ghc || true
|
||||
sudo rm -rf /usr/local/.ghcup || true
|
||||
|
||||
# Remove Swift (1.5GB+)
|
||||
sudo rm -rf /usr/share/swift || true
|
||||
|
||||
# Remove CodeQL (1GB+)
|
||||
sudo rm -rf /opt/hostedtoolcache/CodeQL || true
|
||||
|
||||
# Remove unused tool caches
|
||||
sudo rm -rf /opt/hostedtoolcache/Python || true
|
||||
sudo rm -rf /opt/hostedtoolcache/Ruby || true
|
||||
sudo rm -rf /opt/hostedtoolcache/go || true
|
||||
sudo rm -rf /opt/hostedtoolcache/node || true
|
||||
|
||||
# Remove large packages
|
||||
sudo apt-get remove -y '^dotnet-.*' '^llvm-.*' 'php.*' '^mongodb-.*' '^mysql-.*' azure-cli google-cloud-cli google-chrome-stable firefox powershell mono-devel libgl1-mesa-dri --fix-missing 2>/dev/null || true
|
||||
sudo apt-get autoremove -y 2>/dev/null || true
|
||||
sudo apt-get clean 2>/dev/null || true
|
||||
|
||||
# Remove Docker images
|
||||
docker system prune -af 2>/dev/null || true
|
||||
|
||||
# Remove swap (1GB+)
|
||||
sudo swapoff -a || true
|
||||
sudo rm -f /swapfile || true
|
||||
|
||||
echo "=== Disk space after cleanup ==="
|
||||
df -h /
|
||||
@@ -0,0 +1,28 @@
|
||||
name: "install gh"
|
||||
description: "Install the gh cli in a debian based distro and switches to the PR branch."
|
||||
inputs:
|
||||
pr-number:
|
||||
description: "Number of the PR"
|
||||
required: true
|
||||
GH_TOKEN:
|
||||
description: "GitHub token"
|
||||
required: true
|
||||
outputs:
|
||||
branch:
|
||||
description: "Branch name for the PR"
|
||||
value: ${{ steps.branch.outputs.branch }}
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Set up git
|
||||
shell: bash
|
||||
# Here it would get the script from previous step
|
||||
run: git config --global --add safe.directory '*'
|
||||
- run: gh pr checkout ${{ inputs.pr-number }}
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ inputs.GH_TOKEN }}
|
||||
- name: Export branch name
|
||||
shell: bash
|
||||
run: echo "branch=$(git rev-parse --abbrev-ref HEAD)" >> "$GITHUB_OUTPUT"
|
||||
id: branch
|
||||
@@ -0,0 +1,28 @@
|
||||
name: "stop all workflows"
|
||||
description: "Action stops all workflows in a PR to save compute resources."
|
||||
inputs:
|
||||
app-id:
|
||||
description: "App id"
|
||||
required: true
|
||||
app-key:
|
||||
description: "App token"
|
||||
required: true
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Worfklow stopper - Generate token
|
||||
uses: actions/create-github-app-token@d72941d797fd3113feb6b93fd0dec494b13a2547 # v1.12.0
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ inputs.app-id }}
|
||||
private-key: ${{ inputs.app-key }}
|
||||
owner: "pezkuwichain"
|
||||
repositories: "workflow-stopper"
|
||||
- name: Workflow stopper - Stop all workflows
|
||||
uses: octokit/request-action@v2.x
|
||||
with:
|
||||
route: POST /repos/pezkuwichain/workflow-stopper/actions/workflows/stopper.yml/dispatches
|
||||
ref: main
|
||||
inputs: '${{ format(''{{ "github_sha": "{0}", "github_repository": "{1}", "github_ref_name": "{2}", "github_workflow_id": "{3}", "github_job_name": "{4}" }}'', github.event.pull_request.head.sha, github.repository, github.ref_name, github.run_id, github.job) }}'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
@@ -0,0 +1,107 @@
|
||||
name: "Zombienet-sdk test"
|
||||
description: "Runs zombienet-sdk tests with archived artifacts"
|
||||
inputs:
|
||||
build-id:
|
||||
description: ""
|
||||
required: true
|
||||
ref-slug:
|
||||
description: "Ref slug (e.g branch-name-short)"
|
||||
required: true
|
||||
test-filter:
|
||||
description: "test filter to pass to nextest (e.g: functional::spam_statement_distribution_requests::spam_statement_distribution_requests_test)"
|
||||
required: true
|
||||
job-name:
|
||||
description: "Job name to use for artifact uploading"
|
||||
required: true
|
||||
prefix:
|
||||
description: "Archive prefix for tests files (e.g pezkuwi, pezcumulus or bizinikiwi)"
|
||||
required: true
|
||||
gh-token:
|
||||
description: "GITHUB_TOKEN to use for downloading artifacts"
|
||||
required: true
|
||||
|
||||
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: common_vars
|
||||
shell: bash
|
||||
env:
|
||||
TEST_FILTER: ${{ inputs.test-filter }}
|
||||
PREFIX: ${{ inputs.prefix }}
|
||||
run: |
|
||||
echo "::group::Test Configuration (SDK)"
|
||||
echo "Environment Variables:"
|
||||
echo " ZOMBIENET_INTEGRATION_TEST_IMAGE: $ZOMBIENET_INTEGRATION_TEST_IMAGE"
|
||||
echo " ZOMBIE_PROVIDER: $ZOMBIE_PROVIDER"
|
||||
echo " POLKADOT_IMAGE: $POLKADOT_IMAGE"
|
||||
echo " CUMULUS_IMAGE: $CUMULUS_IMAGE"
|
||||
echo " COL_IMAGE: $COL_IMAGE"
|
||||
echo " MALUS_IMAGE: $MALUS_IMAGE"
|
||||
echo ""
|
||||
echo "Test Parameters:"
|
||||
echo " Test Filter: $TEST_FILTER"
|
||||
echo " Prefix: $PREFIX"
|
||||
echo " Job Name: ${{ inputs.job-name }}"
|
||||
echo ""
|
||||
# Show flaky tests information if any are disabled
|
||||
if [[ -f .github/zombienet-flaky-tests ]]; then
|
||||
FLAKY_COUNT=$(grep -v '^#' .github/zombienet-flaky-tests | grep -v '^$' | wc -l | tr -d ' ')
|
||||
if [[ $FLAKY_COUNT -gt 0 ]]; then
|
||||
echo "⚠️ Flaky/Disabled Tests: $FLAKY_COUNT test(s) currently disabled"
|
||||
echo "📄 See: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/zombienet-flaky-tests"
|
||||
echo "📖 Docs: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/ZOMBIENET_FLAKY_TESTS.md"
|
||||
fi
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Download binaries for zombienet native tests
|
||||
if: env.ZOMBIE_PROVIDER == 'native'
|
||||
uses: ./.github/actions/download-binaries-for-zombienet-tests
|
||||
with:
|
||||
gh-token: ${{ inputs.gh-token }}
|
||||
ref-slug: ${{ inputs.ref-slug }}
|
||||
build-id: ${{ inputs.build-id }}
|
||||
destination-path: ./bin
|
||||
|
||||
- uses: ./.github/actions/download-artifact-extract
|
||||
with:
|
||||
artifact-name: prepare-${{ inputs.prefix }}-zombienet-artifacts-${{ inputs.ref-slug }}
|
||||
gh-token: ${{ inputs.gh-token }}
|
||||
run-id: ${{ inputs.build-id }}
|
||||
|
||||
- name: k8s_auth
|
||||
if: env.ZOMBIE_PROVIDER == 'k8s'
|
||||
shell: bash
|
||||
run: |
|
||||
. /home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh
|
||||
k8s_auth
|
||||
|
||||
- name: zombie_test
|
||||
shell: bash
|
||||
env:
|
||||
# don't retry sdk tests
|
||||
NEXTEST_RETRIES: 0
|
||||
TEST_FILTER: ${{ inputs.test-filter }}
|
||||
PREFIX: ${{ inputs.prefix }}
|
||||
run: |
|
||||
# RUN_IN_CI=1 shall be set only for k8s provider
|
||||
if [[ "$ZOMBIE_PROVIDER" == "native" ]]; then
|
||||
export RUN_IN_CI=0
|
||||
# set path to downloaded binaries
|
||||
export PATH=$(pwd)/bin:$PATH
|
||||
chmod +x $(pwd)/bin/*
|
||||
else
|
||||
export RUN_IN_CI=1
|
||||
# no need to check other runner variables. for k8s they shall store the same value
|
||||
if [[ $ZOMBIENET_SDK_DEFAULT_RUNNER == "parity-zombienet" ]]; then
|
||||
export ZOMBIE_K8S_CI_NAMESPACE=$(cat /data/namespace)
|
||||
fi
|
||||
fi
|
||||
|
||||
ls -ltr ./artifacts
|
||||
# We want to run tests sequentially, '--no-capture' ensures that.
|
||||
# If we want to get rid of '--no-capture' some day, please use '--test-threads 1' or NEXTEST_TEST_THREADS=1
|
||||
# Both options cannot coexist for cargo-nextest below v0.9.94
|
||||
cargo nextest run --archive-file ./artifacts/${PREFIX}-zombienet-tests.tar.zst --no-capture -- ${TEST_FILTER}
|
||||
@@ -0,0 +1,104 @@
|
||||
name: "Zombienet test v1"
|
||||
description: "Runs zombienet tests"
|
||||
inputs:
|
||||
test-definition:
|
||||
description: "test definition (zndsl file)"
|
||||
required: true
|
||||
job-name:
|
||||
description: "Job name to use for artifact uploading"
|
||||
required: true
|
||||
local-dir:
|
||||
description: "Path to the directory tha contains the test file (.zndsl)"
|
||||
required: true
|
||||
concurrency:
|
||||
description: "Concurrency to spawn nodes"
|
||||
default: 4
|
||||
required: false
|
||||
build-id:
|
||||
description: ""
|
||||
required: true
|
||||
ref-slug:
|
||||
description: "Ref slug (e.g branch-name-short)"
|
||||
required: true
|
||||
gh-token:
|
||||
description: "GITHUB_TOKEN to use for downloading artifacts"
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: common_vars
|
||||
shell: bash
|
||||
env:
|
||||
TEST_DEFINITION: ${{ inputs.test-definition }}
|
||||
LOCAL_PATH: ${{ inputs.local-dir }}
|
||||
CONCURRENCY: ${{ inputs.concurrency }}
|
||||
run: |
|
||||
echo "::group::Test Configuration"
|
||||
echo "══════════════════════════════════════════════════════════════════"
|
||||
echo "Environment Variables:"
|
||||
echo " ZOMBIENET_INTEGRATION_TEST_IMAGE: $ZOMBIENET_INTEGRATION_TEST_IMAGE"
|
||||
echo " ZOMBIENET_PROVIDER: $ZOMBIENET_PROVIDER"
|
||||
echo " COL_IMAGE: $COL_IMAGE"
|
||||
echo ""
|
||||
echo "Test Parameters:"
|
||||
echo " Test Definition: $TEST_DEFINITION"
|
||||
echo " Job Name: ${{ inputs.job-name }}"
|
||||
echo " Local Directory: $LOCAL_PATH"
|
||||
echo " Concurrency: $CONCURRENCY"
|
||||
echo ""
|
||||
# Show flaky tests information if any are disabled
|
||||
if [[ -f .github/zombienet-flaky-tests ]]; then
|
||||
FLAKY_COUNT=$(grep -v '^#' .github/zombienet-flaky-tests | grep -v '^$' | wc -l | tr -d ' ')
|
||||
if [[ $FLAKY_COUNT -gt 0 ]]; then
|
||||
echo "⚠️ Flaky/Disabled Tests: $FLAKY_COUNT test(s) currently disabled"
|
||||
echo "📄 See: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/zombienet-flaky-tests"
|
||||
echo "📖 Docs: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/ZOMBIENET_FLAKY_TESTS.md"
|
||||
fi
|
||||
fi
|
||||
echo "════════════════════════════════════════════════════════════════"
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Download binaries for zombienet native tests
|
||||
if: env.ZOMBIENET_PROVIDER == 'native'
|
||||
uses: ./.github/actions/download-binaries-for-zombienet-tests
|
||||
with:
|
||||
gh-token: ${{ inputs.gh-token }}
|
||||
ref-slug: ${{ inputs.ref-slug }}
|
||||
build-id: ${{ inputs.build-id }}
|
||||
destination-path: ./bin
|
||||
|
||||
- name: k8s_auth
|
||||
if: env.ZOMBIENET_PROVIDER == 'k8s'
|
||||
shell: bash
|
||||
run: |
|
||||
. /home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh
|
||||
k8s_auth
|
||||
|
||||
- name: zombie_test
|
||||
shell: bash
|
||||
env:
|
||||
TEST_DEFINITION: ${{ inputs.test-definition }}
|
||||
LOCAL_PATH: ${{ inputs.local-dir }}
|
||||
CONCURRENCY: ${{ inputs.concurrency }}
|
||||
run: |
|
||||
if [[ "$ZOMBIENET_PROVIDER" == "native" ]]; then
|
||||
# set path to downloaded binaries
|
||||
export PATH=$(pwd)/bin:$PATH
|
||||
chmod +x $(pwd)/bin/*
|
||||
|
||||
./.github/scripts/run-zombienet-test.sh \
|
||||
"$(pwd)/$LOCAL_PATH" \
|
||||
$CONCURRENCY \
|
||||
"$TEST_DEFINITION"
|
||||
else
|
||||
# no need to check other runner variables. for k8s they shall store the same value
|
||||
if [[ $ZOMBIENET_DEFAULT_RUNNER == "parity-zombienet" ]]; then
|
||||
export ZOMBIE_K8S_CI_NAMESPACE=$(cat /data/namespace)
|
||||
fi
|
||||
|
||||
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
|
||||
--local-dir="$(pwd)/$LOCAL_PATH" \
|
||||
--concurrency=$CONCURRENCY \
|
||||
--test="$TEST_DEFINITION"
|
||||
fi
|
||||
Reference in New Issue
Block a user