ci: remove all zombienet CI infrastructure

Zombienet tests are upstream Polkadot SDK tests with no custom pallet
coverage. Mainnet has 500K+ blocks, 9 successful upgrades, and zero
breakage — these tests provide no value for our project.

Removed 22 files (2293 lines):
- 6 workflow files (zombienet_*.yml, preflight, flaky-tests check)
- 3 custom actions (zombienet, zombienet-sdk, download-binaries)
- 5 scripts (dispatch, run, parse, process-logs, check-flaky)
- 5 config files (zombienet-env, flaky-tests, test definitions)
- 1 doc file (ZOMBIENET_CI.md)
- Remaining comment references in build-publish-images.yml
This commit is contained in:
2026-03-16 17:27:37 +03:00
parent 86e44c151c
commit 2ad475ceef
22 changed files with 0 additions and 2293 deletions
@@ -1,104 +0,0 @@
name: "Download binaries for zombienet tests"
description: "Zombienet native tests expects some set of binaries to be available in the filesystem"
inputs:
build-id:
description: ""
required: true
ref-slug:
description: "Ref slug (e.g branch-name-short)"
required: true
gh-token:
description: "GITHUB_TOKEN to use for downloading artifacts"
required: true
destination-path:
description: "Destination path for copied files"
required: false
runs:
using: "composite"
steps:
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-linux-bizinikiwi-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/bizinikiwi/bizinikiwi
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-linux-stable-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/pezkuwi
artifacts/pezkuwi-execute-worker
artifacts/pezkuwi-prepare-worker
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-linux-stable-pezcumulus-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/pezkuwi-teyrchain
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-test-teyrchain-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/test-teyrchain
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-test-collators-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/adder-collator
artifacts/undying-collator
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-malus-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
# TODO: should copy pezkuwi-execute-worker and pezkuwi-prepare-worker?
# if yes then it overlaps with build-linux-stable - address this
files-to-copy: |
artifacts/malus
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: build-templates-node-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
extract-path: ./tmp
files-to-copy: |
artifacts/minimal-template-node
artifacts/teyrchain-template-node
artifacts/solochain-template-node
destination-path: ${{ inputs.destination-path }}
cleanup: "true"
-107
View File
@@ -1,107 +0,0 @@
name: "Zombienet-sdk test"
description: "Runs zombienet-sdk tests with archived artifacts"
inputs:
build-id:
description: ""
required: true
ref-slug:
description: "Ref slug (e.g branch-name-short)"
required: true
test-filter:
description: "test filter to pass to nextest (e.g: functional::spam_statement_distribution_requests::spam_statement_distribution_requests_test)"
required: true
job-name:
description: "Job name to use for artifact uploading"
required: true
prefix:
description: "Archive prefix for tests files (e.g pezkuwi, pezcumulus or bizinikiwi)"
required: true
gh-token:
description: "GITHUB_TOKEN to use for downloading artifacts"
required: true
runs:
using: "composite"
steps:
- name: common_vars
shell: bash
env:
TEST_FILTER: ${{ inputs.test-filter }}
PREFIX: ${{ inputs.prefix }}
run: |
echo "::group::Test Configuration (SDK)"
echo "Environment Variables:"
echo " ZOMBIENET_INTEGRATION_TEST_IMAGE: $ZOMBIENET_INTEGRATION_TEST_IMAGE"
echo " ZOMBIE_PROVIDER: $ZOMBIE_PROVIDER"
echo " POLKADOT_IMAGE: $POLKADOT_IMAGE"
echo " CUMULUS_IMAGE: $CUMULUS_IMAGE"
echo " COL_IMAGE: $COL_IMAGE"
echo " MALUS_IMAGE: $MALUS_IMAGE"
echo ""
echo "Test Parameters:"
echo " Test Filter: $TEST_FILTER"
echo " Prefix: $PREFIX"
echo " Job Name: ${{ inputs.job-name }}"
echo ""
# Show flaky tests information if any are disabled
if [[ -f .github/zombienet-flaky-tests ]]; then
FLAKY_COUNT=$(grep -v '^#' .github/zombienet-flaky-tests | grep -v '^$' | wc -l | tr -d ' ')
if [[ $FLAKY_COUNT -gt 0 ]]; then
echo "⚠️ Flaky/Disabled Tests: $FLAKY_COUNT test(s) currently disabled"
echo "📄 See: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/zombienet-flaky-tests"
echo "📖 Docs: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/ZOMBIENET_FLAKY_TESTS.md"
fi
fi
echo "::endgroup::"
- name: Download binaries for zombienet native tests
if: env.ZOMBIE_PROVIDER == 'native'
uses: ./.github/actions/download-binaries-for-zombienet-tests
with:
gh-token: ${{ inputs.gh-token }}
ref-slug: ${{ inputs.ref-slug }}
build-id: ${{ inputs.build-id }}
destination-path: ./bin
- uses: ./.github/actions/download-artifact-extract
with:
artifact-name: prepare-${{ inputs.prefix }}-zombienet-artifacts-${{ inputs.ref-slug }}
gh-token: ${{ inputs.gh-token }}
run-id: ${{ inputs.build-id }}
- name: k8s_auth
if: env.ZOMBIE_PROVIDER == 'k8s'
shell: bash
run: |
. /home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh
k8s_auth
- name: zombie_test
shell: bash
env:
# don't retry sdk tests
NEXTEST_RETRIES: 0
TEST_FILTER: ${{ inputs.test-filter }}
PREFIX: ${{ inputs.prefix }}
run: |
# RUN_IN_CI=1 shall be set only for k8s provider
if [[ "$ZOMBIE_PROVIDER" == "native" ]]; then
export RUN_IN_CI=0
# set path to downloaded binaries
export PATH=$(pwd)/bin:$PATH
chmod +x $(pwd)/bin/*
else
export RUN_IN_CI=1
# no need to check other runner variables. for k8s they shall store the same value
if [[ $ZOMBIENET_SDK_DEFAULT_RUNNER == "pezkuwi-runner" ]]; then
export ZOMBIE_K8S_CI_NAMESPACE=$(cat /data/namespace)
fi
fi
ls -ltr ./artifacts
# We want to run tests sequentially, '--no-capture' ensures that.
# If we want to get rid of '--no-capture' some day, please use '--test-threads 1' or NEXTEST_TEST_THREADS=1
# Both options cannot coexist for cargo-nextest below v0.9.94
cargo nextest run --archive-file ./artifacts/${PREFIX}-zombienet-tests.tar.zst --no-capture -- ${TEST_FILTER}
-104
View File
@@ -1,104 +0,0 @@
name: "Zombienet test v1"
description: "Runs zombienet tests"
inputs:
test-definition:
description: "test definition (zndsl file)"
required: true
job-name:
description: "Job name to use for artifact uploading"
required: true
local-dir:
description: "Path to the directory tha contains the test file (.zndsl)"
required: true
concurrency:
description: "Concurrency to spawn nodes"
default: 4
required: false
build-id:
description: ""
required: true
ref-slug:
description: "Ref slug (e.g branch-name-short)"
required: true
gh-token:
description: "GITHUB_TOKEN to use for downloading artifacts"
required: true
runs:
using: "composite"
steps:
- name: common_vars
shell: bash
env:
TEST_DEFINITION: ${{ inputs.test-definition }}
LOCAL_PATH: ${{ inputs.local-dir }}
CONCURRENCY: ${{ inputs.concurrency }}
run: |
echo "::group::Test Configuration"
echo "══════════════════════════════════════════════════════════════════"
echo "Environment Variables:"
echo " ZOMBIENET_INTEGRATION_TEST_IMAGE: $ZOMBIENET_INTEGRATION_TEST_IMAGE"
echo " ZOMBIENET_PROVIDER: $ZOMBIENET_PROVIDER"
echo " COL_IMAGE: $COL_IMAGE"
echo ""
echo "Test Parameters:"
echo " Test Definition: $TEST_DEFINITION"
echo " Job Name: ${{ inputs.job-name }}"
echo " Local Directory: $LOCAL_PATH"
echo " Concurrency: $CONCURRENCY"
echo ""
# Show flaky tests information if any are disabled
if [[ -f .github/zombienet-flaky-tests ]]; then
FLAKY_COUNT=$(grep -v '^#' .github/zombienet-flaky-tests | grep -v '^$' | wc -l | tr -d ' ')
if [[ $FLAKY_COUNT -gt 0 ]]; then
echo "⚠️ Flaky/Disabled Tests: $FLAKY_COUNT test(s) currently disabled"
echo "📄 See: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/zombienet-flaky-tests"
echo "📖 Docs: https://github.com/pezkuwichain/pezkuwi-sdk/blob/${{ github.sha }}/.github/ZOMBIENET_FLAKY_TESTS.md"
fi
fi
echo "════════════════════════════════════════════════════════════════"
echo "::endgroup::"
- name: Download binaries for zombienet native tests
if: env.ZOMBIENET_PROVIDER == 'native'
uses: ./.github/actions/download-binaries-for-zombienet-tests
with:
gh-token: ${{ inputs.gh-token }}
ref-slug: ${{ inputs.ref-slug }}
build-id: ${{ inputs.build-id }}
destination-path: ./bin
- name: k8s_auth
if: env.ZOMBIENET_PROVIDER == 'k8s'
shell: bash
run: |
. /home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh
k8s_auth
- name: zombie_test
shell: bash
env:
TEST_DEFINITION: ${{ inputs.test-definition }}
LOCAL_PATH: ${{ inputs.local-dir }}
CONCURRENCY: ${{ inputs.concurrency }}
run: |
if [[ "$ZOMBIENET_PROVIDER" == "native" ]]; then
# set path to downloaded binaries
export PATH=$(pwd)/bin:$PATH
chmod +x $(pwd)/bin/*
./.github/scripts/run-zombienet-test.sh \
"$(pwd)/$LOCAL_PATH" \
$CONCURRENCY \
"$TEST_DEFINITION"
else
# no need to check other runner variables. for k8s they shall store the same value
if [[ $ZOMBIENET_DEFAULT_RUNNER == "pezkuwi-runner" ]]; then
export ZOMBIE_K8S_CI_NAMESPACE=$(cat /data/namespace)
fi
/home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh \
--local-dir="$(pwd)/$LOCAL_PATH" \
--concurrency=$CONCURRENCY \
--test="$TEST_DEFINITION"
fi
@@ -1,93 +0,0 @@
#!/usr/bin/env bash
# Validates the .github/zombienet-flaky-tests file to ensure:
# 1. Each entry has the correct format: <test-name>:<issue-number>
# 2. The referenced number is a GitHub Issue
# 3. The GitHub issue exists
# 4. The issue is OPEN (warns if closed)
set -uo pipefail
FLAKY_TESTS_FILE="${1:-.github/zombienet-flaky-tests}"
if [[ ! -f "$FLAKY_TESTS_FILE" ]]; then
echo "Error: File not found: $FLAKY_TESTS_FILE" >&2
exit 1
fi
if ! command -v gh &> /dev/null; then
echo "Error: gh CLI is not installed" >&2
exit 1
fi
echo "Validating $FLAKY_TESTS_FILE..."
echo
has_errors=false
line_num=0
while IFS= read -r line || [[ -n "$line" ]]; do
line_num=$((line_num + 1))
if [[ -z "$line" ]]; then
continue
fi
# Parse format: test-name:issue-number
if [[ ! "$line" =~ ^([^:]+):([0-9]+)$ ]]; then
echo "❌ Line $line_num: Missing required issue number" >&2
echo " Entry: '$line'" >&2
echo " Expected format: <test-name>:<issue-number>" >&2
echo " Example: zombienet-pezkuwi-test-name:1234" >&2
has_errors=true
continue
fi
test_name="${BASH_REMATCH[1]}"
issue_number="${BASH_REMATCH[2]}"
set +e
issue_data=$(gh issue view "$issue_number" --json state,title,url 2>&1)
gh_exit_code=$?
set -e
if [[ $gh_exit_code -ne 0 ]]; then
echo "❌ Line $line_num: Issue #$issue_number does not exist" >&2
echo " Test: $test_name" >&2
has_errors=true
continue
fi
url=$(echo "$issue_data" | jq -r '.url')
state=$(echo "$issue_data" | jq -r '.state')
title=$(echo "$issue_data" | jq -r '.title')
# Check if it's an issue (not a PR) by verifying the URL contains '/issues/'
if [[ ! "$url" =~ /issues/ ]]; then
echo "❌ Line $line_num: #$issue_number is a Pull Request, not an Issue" >&2
echo " Test: $test_name" >&2
echo " URL: $url" >&2
echo " Please reference a GitHub Issue, not a PR" >&2
has_errors=true
continue
fi
if [[ "$state" == "OPEN" ]]; then
echo "✅ Line $line_num: $test_name -> Issue #$issue_number (open)"
else
echo "⚠️ Line $line_num: Issue #$issue_number is closed: '$title'" >&2
echo " Test: $test_name" >&2
echo " Consider removing this entry if the issue is resolved." >&2
fi
done < "$FLAKY_TESTS_FILE"
echo
if [[ "$has_errors" == "true" ]]; then
echo "❌ Validation failed with errors" >&2
exit 1
else
echo "✅ All entries are valid"
exit 0
fi
@@ -1,123 +0,0 @@
#!/bin/bash
# Zombienet Workflow Dispatcher
#
# This script triggers GitHub Actions workflows for zombienet tests and monitors their execution.
# It can run workflows multiple times for reliability testing and optionally filter tests by pattern.
# Results are automatically saved to a timestamped CSV file for analysis.
#
# Features:
# - Trigger workflows on specific branches
# - Filter tests by pattern (useful for debugging specific tests)
# - Run workflows multiple times for flaky test detection
# - Monitor workflow completion and collect results
# - Export results to CSV with job details (ID, name, conclusion, timing, URLs)
#
# Requirements:
# - GitHub CLI (gh) must be installed and authenticated
# - Must be run from pezkuwi-sdk repository root
# - Target branch must have corresponding PR with CI enabled
# Exit on error
# set -e
function dbg {
local msg="$@"
local tstamp=$(date "+%Y-%m-%d %T")
printf "%s - %s\n" "$tstamp" "$msg"
}
function write_job_results_to_csv {
local run_id="$1"
local branch="$2"
local csv_file="$3"
dbg "Writing job results for run $run_id to $csv_file"
# Get job details for the completed run, filtering only jobs starting with 'zombienet-' and with success or failure conclusions
gh run view "$run_id" --json jobs --jq \
'.jobs[] | select(.name | startswith("zombienet-")) |
select(.conclusion == "success" or .conclusion == "failure") |
[.databaseId, .name, .conclusion, .startedAt, "'"$branch"'", .url] | @csv' >> "$csv_file"
}
# Parse command line arguments
WORKFLOW_FILE=""
BRANCH=""
MAX_RESULT_CNT=-1
TEST_PATTERN=""
while getopts "w:b:m:p:h" opt; do
case $opt in
w) WORKFLOW_FILE="$OPTARG" ;;
b) BRANCH="$OPTARG" ;;
m) MAX_RESULT_CNT="$OPTARG" ;;
p) TEST_PATTERN="$OPTARG" ;;
h) echo "Usage: $0 -w <workflow-file> -b <branch> [-m max-triggers] [-p test-pattern]"
echo " -w: Workflow file (required)"
echo " -b: Branch name (required)"
echo " -m: Maximum number of triggers (optional, default: infinite)"
echo " -p: Test pattern for workflow input (optional)"
exit 0 ;;
\?) echo "Invalid option -$OPTARG" >&2
echo "Use -h for help"
exit 1 ;;
esac
done
if [[ -z "$WORKFLOW_FILE" || -z "$BRANCH" ]]; then
echo "Error: Both workflow file (-w) and branch (-b) are required"
echo "Usage: $0 -w <workflow-file> -b <branch> [-m max-triggers] [-p test-pattern]"
echo "Use -h for help"
exit 1
fi
# Create CSV file with headers
CSV_FILE="workflow_results_$(date +%Y%m%d_%H%M%S).csv"
echo "job_id,job_name,conclusion,started_at,branch,job_url" > "$CSV_FILE"
dbg "Created CSV file: $CSV_FILE"
dbg "Starting loop for workflow: $WORKFLOW_FILE on branch: $BRANCH"
TRIGGER_CNT=0
RESULT_CNT=0
while [[ $MAX_RESULT_CNT -eq -1 || $RESULT_CNT -lt $MAX_RESULT_CNT ]]; do
dbg "Waiting until workflow $WORKFLOW_FILE (branch: $BRANCH) jobs are completed"
while true ; do
echo ""
gh run list --workflow=$WORKFLOW_FILE -e workflow_dispatch -b $BRANCH -L 5
sleep 2
# if job is completed it should have non-empty conclusion field
ALL_JOBS_COMPLETED=$(gh run list --workflow=$WORKFLOW_FILE -e workflow_dispatch -b $BRANCH --json conclusion --jq 'all(.[]; .conclusion != "")')
if [[ "$ALL_JOBS_COMPLETED" == "true" ]]; then
break
fi
sleep 60
done
dbg "Workflow $WORKFLOW_FILE (branch: $BRANCH) jobs completed"
# Skip the first iteration - latest run id is not the one we triggered here
if [ $TRIGGER_CNT -gt 0 ]; then
# Get the most recent completed run ID and write job results to CSV
LATEST_RUN_ID=$(gh run list --workflow=$WORKFLOW_FILE -e workflow_dispatch -b $BRANCH -L 1 --json databaseId --jq '.[0].databaseId')
write_job_results_to_csv "$LATEST_RUN_ID" "$BRANCH" "$CSV_FILE"
RESULT_CNT=$(( RESULT_CNT + 1 ))
fi
TRIGGER_CNT=$(( TRIGGER_CNT + 1 ))
dbg "Triggering #$TRIGGER_CNT workflow $WORKFLOW_FILE (branch: $BRANCH)"
if [[ -n "$TEST_PATTERN" ]]; then
gh workflow run "$WORKFLOW_FILE" --ref "$BRANCH" -f test_pattern="$TEST_PATTERN"
else
gh workflow run "$WORKFLOW_FILE" --ref "$BRANCH"
fi
dbg "Sleeping 60s"
sleep 60
done
-63
View File
@@ -1,63 +0,0 @@
#!/usr/bin/env python3
"""
Zombienet Test Matrix Parser
This script parses YAML test definition files and converts them to JSON format
for use as GitHub Actions matrix jobs. It provides filtering capabilities to:
1. Exclude flaky tests (unless a specific test pattern is provided)
2. Filter tests by name pattern for targeted execution
3. Convert YAML test definitions to JSON matrix format
The script is used by GitHub Actions workflows to dynamically generate
test matrices based on YAML configuration files, enabling flexible
test execution and maintenance.
Usage:
python parse-zombienet-tests.py --matrix tests.yml [--flaky-tests flaky.txt] [--test-pattern pattern]
Output:
JSON array of test job objects suitable for GitHub Actions matrix strategy
"""
import argparse
import yaml
import json
import re
def parse_args():
parser = argparse.ArgumentParser(description="Parse test matrix YAML file with optional filtering")
parser.add_argument("--matrix", required=True, help="Path to the YAML matrix file")
parser.add_argument("--flaky-tests", default="", help="Newline-separated list of flaky job names")
parser.add_argument("--test-pattern", default="", help="Regex pattern to match job_name")
return parser.parse_args()
def load_jobs(matrix_path):
with open(matrix_path, "r") as f:
return yaml.safe_load(f)
def filter_jobs(jobs, flaky_tests, test_pattern):
flaky_set = set(name.strip() for name in flaky_tests.splitlines() if name.strip())
filtered = []
for job in jobs:
name = job.get("job-name", "")
# If test_pattern provided then don't care about flaky tests, just check test_pattern
if test_pattern and len(test_pattern) > 0:
if re.search(test_pattern, name):
filtered.append(job)
elif name not in flaky_set:
filtered.append(job)
return filtered
def main():
args = parse_args()
jobs = load_jobs(args.matrix)
result = filter_jobs(jobs, args.flaky_tests, args.test_pattern)
print(json.dumps(result))
if __name__ == "__main__":
main()
-214
View File
@@ -1,214 +0,0 @@
#!/bin/bash
set -euo pipefail
# This script processes logs produced by nodes spawned using the zombienet-sdk framework.
# The logs are prepared for upload as GitHub artifacts.
# If Loki logging is available, the corresponding log URLs are also printed.
# NOTE: Loki URL disabled - Pezkuwi does not use external Grafana.
# Zombienet logs are available as GitHub Actions artifacts.
LOKI_URL_FOR_NODE=''
LOKI_DIR_FOR_NATIVE_LOGS="/tmp/zombienet"
# JQ queries
JQ_QUERY_RELAY_V1='.relay[].name'
JQ_QUERY_RELAY_SDK='.relay.nodes[].name'
JQ_QUERY_PARA_NODES_V1='.paras[$pid].nodes[].name'
JQ_QUERY_PARA_NODES_SDK='.teyrchains[$pid][] .collators[].name'
# current time in milliseconds + 60 secs to allow loki to ingest logs
TO=$(($(date +%s%3N) + 60000))
make_url() {
local name="$1"
local to="$2"
local url="${LOKI_URL_FOR_NODE//\{\{namespace\}\}/$NS}"
url="${url//\{\{podName\}\}/$name}"
url="${url//\{\{from\}\}/$FROM}"
url="${url//\{\{to\}\}/$to}"
echo "$url"
}
# Since we don't have the zombie.json file, we will make the best-effort to send the logs
process_logs_from_fallback() {
local BASE_DIR="$1"
local TARGET_DIR="$2"
# Extract namespace from BASE_DIR (e.g., /tmp/zombie-abc123 -> zombie-abc123)
NS=$(basename "$BASE_DIR")
echo "Using fallback mode for namespace: $NS"
# Use current time as FROM since we don't have zombie.json
FROM=$(($(date +%s%3N) - 600000)) # 10 minutes ago
# Find all logs with glob patterns
local log_files=()
# Search for SDK pattern: BASE_DIR/<name>/<name>.log
if [[ -d "$BASE_DIR" ]]; then
for node_dir in "$BASE_DIR"/*; do
if [[ -d "$node_dir" && "$node_dir" != "$TARGET_DIR" ]]; then
local node_name=$(basename "$node_dir")
if [[ -f "$node_dir/$node_name.log" ]]; then
log_files+=("$node_dir/$node_name.log")
fi
fi
done
fi
# Search for v1 pattern: BASE_DIR/logs/<name>.log
if [[ -d "$TARGET_DIR" ]]; then
for log_file in "$TARGET_DIR"/*.log; do
if [[ -f "$log_file" ]]; then
log_files+=("$log_file")
fi
done
fi
if [[ ${#log_files[@]} -eq 0 ]]; then
echo "::warning ::No log files found in $BASE_DIR using glob patterns"
return 1
fi
echo "Found ${#log_files[@]} log file(s) using glob patterns"
echo "Nodes:"
for log_file in "${log_files[@]}"; do
# Extract node name from log file path
local name=$(basename "$log_file" .log)
local_to=$TO
# Copy log to target directory if not already there
if [[ "$log_file" != "$TARGET_DIR/$name.log" ]]; then
if ! cp "$log_file" "$TARGET_DIR/$name.log" 2>/dev/null; then
echo "::warning ::Failed to copy log for $name"
continue
fi
fi
# Send logs to loki
if [[ -d "$LOKI_DIR_FOR_NATIVE_LOGS" ]]; then
if [[ -f "$TARGET_DIR/$name.log" ]]; then
awk -v NS="$NS" -v NAME="$name" '{print NS" "NAME" " $0}' "$TARGET_DIR/$name.log" >> "$LOKI_DIR_FOR_NATIVE_LOGS/to-loki.log"
local_to=$(($(date +%s%3N) + 60000))
fi
fi
echo -e "\t$name: $(make_url "$name" "$local_to")"
done
echo ""
}
process_logs_from_zombie_file() {
local BASE_DIR="$1"
local TARGET_DIR="$2"
local ZOMBIE_JSON="$3"
# Extract namespace (ns in sdk / namespace in v1)
NS=$(jq -r '.ns // .namespace' "$ZOMBIE_JSON")
# test start time in milliseconds
FROM=$(jq -r '.start_time_ts' "$ZOMBIE_JSON")
echo "Relay nodes:"
JQ_QUERY_RELAY=$JQ_QUERY_RELAY_V1
JQ_QUERY_PARA_NODES=$JQ_QUERY_PARA_NODES_V1
if [[ $(echo "$NS" | grep -E "zombie-[A-Fa-f0-9]+-") ]]; then
JQ_QUERY_RELAY=$JQ_QUERY_RELAY_SDK
JQ_QUERY_PARA_NODES=$JQ_QUERY_PARA_NODES_SDK
fi;
jq -r $JQ_QUERY_RELAY "$ZOMBIE_JSON" | while read -r name; do
[[ -z "$name" ]] && continue
local_to=$TO
if [[ "${ZOMBIE_PROVIDER:-}" == "k8s" ]]; then
# Fetching logs from k8s
if ! kubectl logs "$name" -c "$name" -n "$NS" > "$TARGET_DIR/$name.log" 2>&1; then
echo "::warning ::Failed to fetch logs for $name"
fi
else
# zombienet v1 dump the logs to the `/logs` directory
if [[ ! -f "$TARGET_DIR/$name.log" ]]; then
# `sdk` use this pattern to store the logs in native provider
if [[ -f "$BASE_DIR/$name/$name.log" ]]; then
cp "$BASE_DIR/$name/$name.log" "$TARGET_DIR/$name.log"
else
echo "::warning ::Log file not found: $BASE_DIR/$name/$name.log"
continue
fi
fi
# send logs to loki
if [[ -d "$LOKI_DIR_FOR_NATIVE_LOGS" && -f "$TARGET_DIR/$name.log" ]]; then
awk -v NS="$NS" -v NAME="$name" '{print NS" "NAME" " $0}' "$TARGET_DIR/$name.log" >> "$LOKI_DIR_FOR_NATIVE_LOGS/to-loki.log"
local_to=$(($(date +%s%3N) + 60000))
fi
fi
echo -e "\t$name: $(make_url "$name" "$local_to")"
done
echo ""
# Handle teyrchains grouped by paraId
jq -r '.paras // .teyrchains | to_entries[] | "\(.key)"' "$ZOMBIE_JSON" | while read -r para_id; do
echo "ParaId: $para_id"
jq -r --arg pid "$para_id" "$JQ_QUERY_PARA_NODES" "$ZOMBIE_JSON" | while read -r name; do
[[ -z "$name" ]] && continue
local_to=$TO
if [[ "${ZOMBIE_PROVIDER:-}" == "k8s" ]]; then
# Fetching logs from k8s
if ! kubectl logs "$name" -c "$name" -n "$NS" > "$TARGET_DIR/$name.log" 2>&1; then
echo "::warning ::Failed to fetch logs for $name"
fi
else
# zombienet v1 dump the logs to the `/logs` directory
if [[ ! -f "$TARGET_DIR/$name.log" ]]; then
# `sdk` use this pattern to store the logs in native provider
if [[ -f "$BASE_DIR/$name/$name.log" ]]; then
cp "$BASE_DIR/$name/$name.log" "$TARGET_DIR/$name.log"
else
echo "::warning ::Log file not found: $BASE_DIR/$name/$name.log"
continue
fi
fi
# send logs to loki
if [[ -d "$LOKI_DIR_FOR_NATIVE_LOGS" && -f "$TARGET_DIR/$name.log" ]]; then
awk -v NS="$NS" -v NAME="$name" '{print NS" "NAME" " $0}' "$TARGET_DIR/$name.log" >> "$LOKI_DIR_FOR_NATIVE_LOGS/to-loki.log"
local_to=$(($(date +%s%3N) + 60000))
fi
fi
echo -e "\t$name: $(make_url "$name" "$local_to")"
done
echo ""
done
}
# Main execution - Process all zombie-* directories (supports rstest with multiple tests per job)
BASE_DIRS=$(ls -dt /tmp/zombie-* 2>/dev/null || true)
if [[ -z "$BASE_DIRS" ]]; then
echo "No zombie directories found in /tmp/zombie-*"
exit 0
fi
for BASE_DIR in $BASE_DIRS; do
echo "Processing directory: $BASE_DIR"
# Make sure target directory exists
TARGET_DIR="$BASE_DIR/logs"
mkdir -p "$TARGET_DIR"
ZOMBIE_JSON="$BASE_DIR/zombie.json"
if [[ ! -f "$ZOMBIE_JSON" ]]; then
echo "Zombie file $ZOMBIE_JSON not present, calling fallback"
process_logs_from_fallback "$BASE_DIR" "$TARGET_DIR"
else
# we have a zombie.json file, let process it
echo "Processing logs from zombie.json"
process_logs_from_zombie_file "$BASE_DIR" "$TARGET_DIR" "$ZOMBIE_JSON"
fi
echo ""
done
# sleep for a minute to give alloy time to forward logs
sleep 60
-85
View File
@@ -1,85 +0,0 @@
#!/usr/bin/env bash
# This script executes a given zombienet test for the `native` provider.
# It is equivalent to running run-test-local-env-manager.sh for the `k8s` provider.
function run_test {
cd "${OUTPUT_DIR}"
for i in $(find ${OUTPUT_DIR} -name "${TEST_TO_RUN}"| head -1); do
TEST_FOUND=1
# in order to let native provider work properly we need
# to unset ZOMBIENET_IMAGE, which controls 'inCI' internal flag.
# ZOMBIENET_IMAGE not set && RUN_IN_CONTAINER=0 => inCI=false
# Apparently inCI=true works properly only with k8s provider
unset ZOMBIENET_IMAGE
if [ -z "$ZOMBIE_BASE_DIR" ]; then
${ZOMBIE_COMMAND} -p native -c $CONCURRENCY test $i
else
${ZOMBIE_COMMAND} -p native -c $CONCURRENCY -d $ZOMBIE_BASE_DIR -f test $i
fi;
EXIT_STATUS=$?
done;
if [[ $TEST_FOUND -lt 1 ]]; then
EXIT_STATUS=1
fi;
}
function create_isolated_dir {
TS=$(date +%s)
ISOLATED=${OUTPUT_DIR}/${TS}
mkdir -p ${ISOLATED}
OUTPUT_DIR="${ISOLATED}"
}
function copy_to_isolated {
cd "${SCRIPT_PATH}"
echo $(pwd)
cp -r "${LOCAL_DIR}"/* "${OUTPUT_DIR}"
}
function rm_isolated_dir {
echo "Removing ${OUTPUT_DIR}"
rm -rf "${OUTPUT_DIR}"
}
function log {
local lvl msg fmt
lvl=$1 msg=$2
fmt='+%Y-%m-%d %H:%M:%S'
lg_date=$(date "${fmt}")
if [[ "${lvl}" = "DIE" ]] ; then
lvl="ERROR"
echo -e "\n${lg_date} - ${lvl} - ${msg}"
exit 1
else
echo -e "\n${lg_date} - ${lvl} - ${msg}"
fi
}
set -x
SCRIPT_NAME="$0"
SCRIPT_PATH=$(dirname "$0") # relative
SCRIPT_PATH=$(cd "${SCRIPT_PATH}" && pwd) # absolutized and normalized
ZOMBIE_COMMAND=zombie
EXIT_STATUS=0
# args
LOCAL_DIR="$1"
CONCURRENCY="$2"
TEST_TO_RUN="$3"
ZOMBIE_BASE_DIR="$4"
cd "${SCRIPT_PATH}"
OUTPUT_DIR="${SCRIPT_PATH}"
create_isolated_dir
copy_to_isolated
run_test
rm_isolated_dir
log INFO "Exit status is ${EXIT_STATUS}"
exit "${EXIT_STATUS}"
@@ -344,13 +344,6 @@ jobs:
retention-days: 1
# Zombienet test artifact builds removed from automatic CI pipeline.
# Zombienet tests can be triggered manually via workflow_dispatch:
# gh workflow run zombienet_pezkuwi.yml
# gh workflow run zombienet_pezcumulus.yml
# gh workflow run zombienet_bizinikiwi.yml
# gh workflow run zombienet_teyrchain-template.yml
### Publish ########################
#
@@ -518,9 +511,6 @@ jobs:
username: ${{ secrets.PEZKUWI_DOCKERHUB_USERNAME }}
password: ${{ secrets.PEZKUWI_DOCKERHUB_PASSWORD }}
# build-push-image-bridges-zombienet-tests: removed from automatic CI
# (depends on zombienet artifacts which are now manual-only)
#
#
#
@@ -584,9 +574,3 @@ jobs:
echo "build_success=true" >> $GITHUB_OUTPUT
fi
# Zombienet test triggers removed from automatic CI pipeline.
# Use manual workflow_dispatch to run zombienet tests when needed:
# gh workflow run zombienet_pezkuwi.yml --field build_run_id=<RUN_ID>
# gh workflow run zombienet_pezcumulus.yml --field build_run_id=<RUN_ID>
# gh workflow run zombienet_bizinikiwi.yml --field build_run_id=<RUN_ID>
# gh workflow run zombienet_teyrchain-template.yml --field build_run_id=<RUN_ID>
@@ -1,39 +0,0 @@
name: Check Zombienet Flaky Tests
concurrency:
group: check-zombienet-flaky-tests-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
on:
pull_request:
types: [opened, synchronize, reopened]
paths:
- '.github/zombienet-flaky-tests'
- '.github/scripts/check-zombienet-flaky-tests.sh'
- '.github/workflows/check-zombienet-flaky-tests.yml'
merge_group:
permissions:
contents: read
jobs:
check-flaky-tests:
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: Checkout repo
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Validate zombienet-flaky-tests
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
.github/scripts/check-zombienet-flaky-tests.sh .github/zombienet-flaky-tests
- name: Check results
if: failure()
run: |
echo "::error::Validation failed. Please ensure all entries in .github/zombienet-flaky-tests have valid format and reference existing GitHub issues."
echo "Format: <test-name>:<issue-number>"
echo "See .github/ZOMBIENET_FLAKY_TESTS.md for more information."
exit 1
@@ -1,346 +0,0 @@
# Reusable workflow to set various useful variables
# and to perform checks and generate conditions for other workflows.
# Currently it checks if any Rust (build-related) file is changed
# and if the current (caller) workflow file is changed.
# Example:
#
# jobs:
# preflight:
# uses: ./.github/workflows/reusable-preflight.yml
# some-job:
# needs: changes
# if: ${{ needs.preflight.outputs.changes_rust }}
# .......
name: Zombienet Preflight
on:
workflow_call:
inputs:
tests_yaml:
required: true
type: string
test_pattern:
required: false
type: string
build_run_id:
required: true
type: string
description: "Build run ID from the build workflow."
ref_slug:
required: false
type: string
# Map the workflow outputs to job outputs
outputs:
changes_bizinikiwi:
value: ${{ jobs.preflight.outputs.changes_bizinikiwi }}
description: |
True iff there are changes in bizinikiwi directory or the current workflow
changes_pezcumulus:
value: ${{ jobs.preflight.outputs.changes_pezcumulus }}
description: |
True iff there are changes in pezcumulus directory or the current workflow
changes_pezkuwi:
value: ${{ jobs.preflight.outputs.changes_pezkuwi }}
description: |
True iff there are changes in pezkuwi directory or the current workflow
changes_bridges:
value: ${{ jobs.preflight.outputs.changes_bridges }}
description: |
True iff there are changes in bridges directory or the current workflow
changes_templates:
value: ${{ jobs.preflight.outputs.changes_templates }}
description: |
True iff there are changes in templates directory or the current workflow
changes_zombienet:
value: ${{ jobs.preflight.outputs.changes_zombienet }}
description: |
True iff there are changes in zombienet tests/actions/scripts or the current workflow
CI_IMAGE:
value: ${{ jobs.preflight.outputs.CI_IMAGE }}
description: "CI image"
DOCKER_IMAGES_VERSION:
value: ${{ jobs.preflight.outputs.DOCKER_IMAGES_VERSION }}
description: |
Version for temp docker images.
SOURCE_REF_SLUG:
value: ${{ jobs.preflight.outputs.SOURCE_REF_SLUG }}
BUILD_RUN_ID:
value: ${{ inputs.build_run_id }}
description: |
Id of the build run, needed to download the artifacts.
# zombienet related vars
ZOMBIENET_PROVIDER:
value: ${{ jobs.preflight.outputs.ZOMBIENET_PROVIDER }}
description: "Provider to use in zombienet tests."
ZOMBIENET_IMAGE:
value: ${{ jobs.preflight.outputs.ZOMBIENET_IMAGE }}
description: "ZOMBIENET CI image"
ZOMBIENET_DEFAULT_RUNNER:
value: ${{ jobs.preflight.outputs.ZOMBIENET_DEFAULT_RUNNER }}
description: |
Main runner for zombienet tests.
ZOMBIENET_LARGE_RUNNER:
value: ${{ jobs.preflight.outputs.ZOMBIENET_LARGE_RUNNER }}
description: |
Large runner for zombienet tests.
DEBUG:
value: ${{ jobs.preflight.outputs.DEBUG }}
description: "Debug value to zombienet v1 tests."
# zombienet-sdk related vars
ZOMBIE_PROVIDER:
value: ${{ jobs.preflight.outputs.ZOMBIE_PROVIDER }}
description: "Provider to use in zombienet-sdk tests."
RUST_LOG:
value: ${{ jobs.preflight.outputs.RUST_LOG }}
description: "Log value to use in zombinet-sdk tests."
ZOMBIENET_SDK_DEFAULT_RUNNER:
value: ${{ jobs.preflight.outputs.ZOMBIENET_SDK_DEFAULT_RUNNER }}
description: |
Main runner for zombienet-sdk tests.
ZOMBIENET_SDK_LARGE_RUNNER:
value: ${{ jobs.preflight.outputs.ZOMBIENET_SDK_LARGE_RUNNER }}
description: |
Large runner for zombienet-sdk tests.
ZOMBIENET_SDK_IMAGE:
value: ${{ jobs.preflight.outputs.ZOMBIENET_SDK_IMAGE }}
description: "zombienet-sdk CI image"
# common vars
PUSHGATEWAY_URL:
value: ${{ jobs.preflight.outputs.PUSHGATEWAY_URL }}
description: "Gateway (url) to push metrics related to test."
KUBERNETES_CPU_REQUEST:
value: ${{ jobs.preflight.outputs.KUBERNETES_CPU_REQUEST }}
description: "Base cpu (request) for pod runner."
KUBERNETES_MEMORY_REQUEST:
value: ${{ jobs.preflight.outputs.KUBERNETES_MEMORY_REQUEST }}
description: "Base memory (request) for pod runner."
TEMP_IMAGES_BASE:
value: ${{ jobs.preflight.outputs.TEMP_IMAGES_BASE }}
description: |
Base location for 'temp' images used in tests.
FLAKY_TESTS:
value: ${{ jobs.preflight.outputs.FLAKY_TESTS }}
description: |
comma separated list of flaky tests to skip.
TEST_MATRIX:
value: ${{ jobs.preflight.outputs.TEST_MATRIX }}
description: |
JSON formatted test matrix parsed from test yaml
jobs:
#
#
#
preflight:
runs-on: ubuntu-latest
if: github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' || ! contains(github.event.pull_request.labels.*.name, 'T19-skip-zombienet_tests')
outputs:
changes_bizinikiwi: ${{ steps.set_changes.outputs.bizinikiwi_any_changed == 'true' || steps.set_changes.outputs.currentWorkflow_any_changed == 'true' }}
changes_pezcumulus: ${{ steps.set_changes.outputs.pezcumulus_any_changed == 'true' || steps.set_changes.outputs.currentWorkflow_any_changed == 'true' }}
changes_pezkuwi: ${{ steps.set_changes.outputs.pezkuwi_any_changed == 'true' || steps.set_changes.outputs.currentWorkflow_any_changed == 'true' }}
changes_bridges: ${{ steps.set_changes.outputs.bridges_any_changed == 'true' || steps.set_changes.outputs.currentWorkflow_any_changed == 'true' }}
changes_templates: ${{ steps.set_changes.outputs.templates_any_changed == 'true' || steps.set_changes.outputs.currentWorkflow_any_changed == 'true' }}
changes_zombienet: ${{ steps.set_changes.outputs.zombienet_any_changed == 'true' || steps.set_changes.outputs.currentWorkflow_any_changed == 'true' }}
CI_IMAGE: ${{ steps.set_vars.outputs.IMAGE }}
# images versions
DOCKER_IMAGES_VERSION: ${{ steps.set_images_version.outputs.DOCKER_IMAGES_VERSION }}
SOURCE_REF_SLUG: ${{ steps.set_vars.outputs.SOURCE_REF_SLUG }}
# zombienet-env vars
ZOMBIENET_PROVIDER: ${{ steps.set_vars.outputs.ZOMBIENET_PROVIDER }}
ZOMBIENET_IMAGE: ${{ steps.set_vars.outputs.ZOMBIENET_IMAGE }}
ZOMBIENET_DEFAULT_RUNNER: ${{ steps.set_vars.outputs.ZOMBIENET_DEFAULT_RUNNER }}
ZOMBIENET_LARGE_RUNNER: ${{ steps.set_vars.outputs.ZOMBIENET_LARGE_RUNNER }}
PUSHGATEWAY_URL: ${{ steps.set_vars.outputs.PUSHGATEWAY_URL }}
DEBUG: ${{ steps.set_vars.outputs.DEBUG }}
KUBERNETES_CPU_REQUEST: ${{ steps.set_vars.outputs.KUBERNETES_CPU_REQUEST }}
KUBERNETES_MEMORY_REQUEST: ${{ steps.set_vars.outputs.KUBERNETES_MEMORY_REQUEST }}
TEMP_IMAGES_BASE: ${{ steps.set_vars.outputs.TEMP_IMAGES_BASE }}
FLAKY_TESTS: ${{ steps.set_vars.outputs.FLAKY_TESTS }}
TEST_MATRIX: ${{ steps.generate_test_matrix.outputs.TEST_MATRIX }}
# zombienet-sdk vars
RUST_LOG: ${{ steps.set_vars.outputs.RUST_LOG }}
ZOMBIE_PROVIDER: ${{ steps.set_vars.outputs.ZOMBIE_PROVIDER }}
ZOMBIENET_SDK_IMAGE: ${{ steps.set_vars.outputs.ZOMBIENET_SDK_IMAGE }}
ZOMBIENET_SDK_DEFAULT_RUNNER: ${{ steps.set_vars.outputs.ZOMBIENET_SDK_DEFAULT_RUNNER }}
ZOMBIENET_SDK_LARGE_RUNNER: ${{ steps.set_vars.outputs.ZOMBIENET_SDK_LARGE_RUNNER }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
#
# Set changes
#
- name: Current file
id: current_file
shell: bash
run: |
echo "currentWorkflowFile=$(echo ${{ github.workflow_ref }} | sed -nE "s/.*(\.github\/workflows\/[a-zA-Z0-9_-]*\.y[a]?ml)@refs.*/\1/p")" >> $GITHUB_OUTPUT
echo "currentActionDir=$(echo ${{ github.action_path }} | sed -nE "s/.*(\.github\/actions\/[a-zA-Z0-9_-]*)/\1/p")" >> $GITHUB_OUTPUT
- name: Set changes
id: set_changes
uses: tj-actions/changed-files@22103cc46bda19c2b464ffe86db46df6922fd323 #v47.0.5
with:
files_yaml: |
bizinikiwi:
- 'bizinikiwi/**/*'
pezcumulus:
- 'pezcumulus/**/*'
pezkuwi:
- 'pezkuwi/**/*'
bridges:
- 'bridges/**/*'
templates:
- 'templates/**/*'
zombienet:
- '.github/zombienet-tests/**/*'
- '.github/actions/zombienet/**'
- '.github/actions/zombienet-sdk/**'
- '.github/scripts/parse-zombienet-tests.py'
- '.github/scripts/process-logs-zombienet.sh'
- '.github/workflows/zombienet*.yml'
currentWorkflow:
- '${{ steps.current_file.outputs.currentWorkflowFile }}'
- '.github/workflows/zombienet-reusable-preflight.yml'
- '.github/zombienet-env'
- '.github/zombienet-flaky-tests'
#
# Set environment vars (including runner/image)
#
- name: Set vars
id: set_vars
shell: bash
env:
INPUT_REF_SLUG: ${{ inputs.ref_slug }}
run: |
# Determine SOURCE_REF_SLUG
if [[ -n "${INPUT_REF_SLUG}" ]]; then
echo "Using provided ref_slug: ${INPUT_REF_SLUG}"
SOURCE_REF_SLUG="${INPUT_REF_SLUG}"
else
echo "Calculating ref_slug from current context"
export SOURCE_REF_NAME=${{ github.head_ref || github.ref_name }}
SOURCE_REF_SLUG="${SOURCE_REF_NAME//\//_}"
fi
{
echo "SOURCE_REF_SLUG=${SOURCE_REF_SLUG}"
# filter out comments and empty lines
cat .github/zombienet-env | grep -Ev '^\s*#|^\s*$'
. .github/zombienet-env
# Determine if we should use persistent runners (for merge queues)
RUNNER_SUFFIX=""
if [[ $GITHUB_REF_NAME == *"gh-readonly-queue"* ]]; then
RUNNER_SUFFIX="_PERSISTENT"
fi
# Set zombienet v1 configuration
if [[ "$ZOMBIENET_PROVIDER" == "native" ]]; then
echo "ZOMBIENET_IMAGE=${ZOMBIENET_IMAGE_FOR_NATIVE}"
DEFAULT_RUNNER_VAR="ZOMBIENET_DEFAULT_RUNNER_FOR_NATIVE${RUNNER_SUFFIX}"
LARGE_RUNNER_VAR="ZOMBIENET_LARGE_RUNNER_FOR_NATIVE${RUNNER_SUFFIX}"
echo "ZOMBIENET_DEFAULT_RUNNER=${!DEFAULT_RUNNER_VAR}"
echo "ZOMBIENET_LARGE_RUNNER=${!LARGE_RUNNER_VAR}"
else
echo "ZOMBIENET_IMAGE=${ZOMBIENET_IMAGE_FOR_K8S}"
# runner size for k8s is not relevant, it "only" spawns pods and runs the test
echo "ZOMBIENET_DEFAULT_RUNNER=${ZOMBIENET_RUNNER_FOR_K8S}"
echo "ZOMBIENET_LARGE_RUNNER=${ZOMBIENET_RUNNER_FOR_K8S}"
fi
if [[ "$ZOMBIE_PROVIDER" == "native" ]]; then
echo "ZOMBIENET_SDK_IMAGE=${ZOMBIENET_SDK_IMAGE_FOR_NATIVE}"
SDK_DEFAULT_RUNNER_VAR="ZOMBIENET_SDK_DEFAULT_RUNNER_FOR_NATIVE${RUNNER_SUFFIX}"
SDK_LARGE_RUNNER_VAR="ZOMBIENET_SDK_LARGE_RUNNER_FOR_NATIVE${RUNNER_SUFFIX}"
echo "ZOMBIENET_SDK_DEFAULT_RUNNER=${!SDK_DEFAULT_RUNNER_VAR}"
echo "ZOMBIENET_SDK_LARGE_RUNNER=${!SDK_LARGE_RUNNER_VAR}"
else
echo "ZOMBIENET_SDK_IMAGE=${ZOMBIENET_SDK_IMAGE_FOR_K8S}"
# runner size for k8s is not relevant, it "only" spawns pods and runs the test
echo "ZOMBIENET_SDK_DEFAULT_RUNNER=${ZOMBIENET_SDK_RUNNER_FOR_K8S}"
echo "ZOMBIENET_SDK_LARGE_RUNNER=${ZOMBIENET_SDK_RUNNER_FOR_K8S}"
fi
# Trick for multline strings: https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/workflow-commands-for-github-actions#example-of-a-multiline-string
echo 'FLAKY_TESTS<<EOF'
cat .github/zombienet-flaky-tests | sed 's/:[0-9]*$//'
echo EOF
# global img from ci
cat .github/env
} >> $GITHUB_OUTPUT
cat .github/zombienet-env
cat .github/env
echo "FLAKY_TESTS:"
cat .github/zombienet-flaky-tests
#
#
#
- name: Set docker images version
id: set_images_version
shell: bash
run: |
export DOCKER_IMAGES_VERSION=${{ github.event.pull_request.head.sha }}
if [[ ${{ github.event_name }} == "merge_group" || ${{ github.event_name }} == "workflow_dispatch" || ${{ github.event_name }} == "push" ]]; then
export DOCKER_IMAGES_VERSION="${GITHUB_SHA}";
fi
echo "DOCKER_IMAGES_VERSION=${DOCKER_IMAGES_VERSION}" >> $GITHUB_OUTPUT
- name: log
shell: bash
run: |
echo "workflow file: ${{ steps.current_file.outputs.currentWorkflowFile }}"
echo "Modified: ${{ steps.set_changes.outputs.modified_keys }}"
echo "CI_IMAGE: ${{ steps.set_vars.outputs.IMAGE }}"
echo "ZOMBIENET_IMAGE: ${{ steps.set_vars.outputs.ZOMBIENET_IMAGE }}"
echo "ZOMBIENET_SDK_IMAGE: ${{ steps.set_vars.outputs.ZOMBIENET_SDK_IMAGE }}"
- name: Generate test matrix
id: generate_test_matrix
shell: bash
env:
TESTS_YAML: ${{ inputs.tests_yaml }}
TEST_PATTERN: ${{ inputs.test_pattern || '' }}
run: |
python3 .github/scripts/parse-zombienet-tests.py \
--matrix ${TESTS_YAML} \
--flaky-tests "${{ steps.set_vars.outputs.FLAKY_TESTS }}" \
--test-pattern "${TEST_PATTERN}" > matrix.json
echo "TEST_MATRIX=$(cat matrix.json)" >> $GITHUB_OUTPUT
echo "TEST_MATRIX:"
cat matrix.json | jq '.'
-122
View File
@@ -1,122 +0,0 @@
name: Zombienet Bizinikiwi
on:
workflow_call:
inputs:
build_run_id:
type: string
description: "Build run ID from the build workflow."
required: true
ref_slug:
type: string
description: "Source ref slug from the build workflow."
required: false
test_pattern:
type: string
description: "Run tests which names match this pattern (also flaky)"
default: ""
required: false
workflow_dispatch:
inputs:
test_pattern:
type: string
description: "Run tests which names match this pattern (also flaky)"
default: ""
required: false
concurrency:
group: bizinikiwi-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-${{ github.run_id }}
cancel-in-progress: true
permissions: read-all
env:
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 1
GHA_CLUSTER_SERVER_ADDR: "https://kubernetes.default:443"
# use spot by default
X_INFRA_INSTANCE: "spot"
# don't retry sdk tests
NEXTEST_RETRIES: 0
KUBECONFIG: "/data/config"
ZOMBIE_CLEANER_DISABLED: 1
# DB generated from commit: https://github.com/pezkuwichain/pezkuwi-sdk/commit/868788a5bff3ef94869bd36432726703fe3b4e96
# TODO: As a workaround for https://github.com/pezkuwichain/pezkuwi-sdk/issues/2568 the DB was generated in archive mode.
# After the issue is fixed, we should replace it with a pruned version of the DB.
DB_SNAPSHOT: "https://storage.googleapis.com/zombienet-db-snaps/bizinikiwi/0001-basic-warp-sync/chains-9677807d738b951e9f6c82e5fd15518eb0ae0419.tgz"
DB_BLOCK_HEIGHT: 56687
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/zombienet-reusable-preflight.yml
with:
tests_yaml: .github/zombienet-tests/zombienet_bizinikiwi_tests.yml
test_pattern: ${{ inputs.test_pattern }}
build_run_id: ${{ inputs.build_run_id }}
ref_slug: ${{ inputs.ref_slug }}
zombienet-bizinikiwi-tests:
name: ${{ matrix.test.job-name }}
runs-on: ${{ matrix.test.runner-type == 'large' && (matrix.test.use-zombienet-sdk && needs.preflight.outputs.ZOMBIENET_SDK_LARGE_RUNNER || needs.preflight.outputs.ZOMBIENET_LARGE_RUNNER) || (matrix.test.use-zombienet-sdk && needs.preflight.outputs.ZOMBIENET_SDK_DEFAULT_RUNNER || needs.preflight.outputs.ZOMBIENET_DEFAULT_RUNNER) }}
timeout-minutes: 70 # 60 for test + 10 for send logs
needs: [preflight]
if: ${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' || github.event_name == 'merge_group' || (needs.preflight.outputs.changes_bizinikiwi == 'true' || needs.preflight.outputs.changes_pezkuwi == 'true' || needs.preflight.outputs.changes_zombienet == 'true') }}
container:
image: ${{ matrix.test.use-zombienet-sdk && needs.preflight.outputs.ZOMBIENET_SDK_IMAGE || needs.preflight.outputs.ZOMBIENET_IMAGE }}
options: -v /tmp/zombienet:/tmp/zombienet
env:
ZOMBIENET_INTEGRATION_TEST_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/pezkuwi-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
DEBUG: ${{ needs.preflight.outputs.DEBUG }}
ZOMBIENET_PROVIDER: ${{ needs.preflight.outputs.ZOMBIENET_PROVIDER }}
strategy:
fail-fast: false
matrix:
test: ${{ fromJson(needs.preflight.outputs.TEST_MATRIX) }}
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Additional setup
if: ${{ matrix.test.additional-setup }}
shell: bash
run: ${{ matrix.test.additional-setup }}
- name: zombienet_test (v1)
timeout-minutes: 60
uses: ./.github/actions/zombienet
with:
test-definition: ${{ matrix.test.test-definition }}
job-name: ${{ matrix.test.job-name }}
local-dir: ${{ matrix.test.local-dir }}
concurrency: ${{ matrix.test.concurrency || 1 }}
gh-token: ${{ secrets.GITHUB_TOKEN }}
build-id: ${{ needs.preflight.outputs.BUILD_RUN_ID }}
ref-slug: ${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: process_logs
if: ${{ always() }}
shell: bash
run: |
echo "Processing log files"
echo "::group::Logs"
# do not fail the whole run if this step fails
if ! ./.github/scripts/process-logs-zombienet.sh ; then
echo "::endgroup::"
echo "::warning ::WARNING: Failed to process logs"
else
echo "::endgroup::"
fi
- name: upload_logs
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
if: ${{ always() }}
with:
name: zombienet-logs-${{ matrix.test.job-name }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
-133
View File
@@ -1,133 +0,0 @@
name: Zombienet Pezcumulus
on:
workflow_call:
inputs:
build_run_id:
type: string
description: "Build run ID from the build workflow."
required: true
ref_slug:
type: string
description: "Source ref slug from the build workflow."
required: false
test_pattern:
type: string
description: "Run tests which names match this pattern (also flaky)"
default: ""
required: false
workflow_dispatch:
inputs:
test_pattern:
type: string
description: "Run tests which names match this pattern (also flaky)"
default: ""
required: false
concurrency:
group: pezcumulus-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-${{ github.run_id }}
cancel-in-progress: true
permissions: read-all
env:
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 1
LOCAL_DIR: "./pezcumulus/zombienet/tests"
GHA_CLUSTER_SERVER_ADDR: "https://kubernetes.default:443"
# use spot by default
X_INFRA_INSTANCE: "spot"
# don't retry sdk tests
NEXTEST_RETRIES: 0
KUBECONFIG: "/data/config"
ZOMBIE_CLEANER_DISABLED: 1
# only run if we have changes in [bizinikiwi, pezcumulus, pezkuwi] directories or this workflow.
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/zombienet-reusable-preflight.yml
with:
tests_yaml: .github/zombienet-tests/zombienet_pezcumulus_tests.yml
test_pattern: ${{ inputs.test_pattern }}
build_run_id: ${{ inputs.build_run_id }}
ref_slug: ${{ inputs.ref_slug }}
zombienet-pezcumulus-tests:
name: ${{ matrix.test.job-name }}
runs-on: ${{ matrix.test.runner-type == 'large' && needs.preflight.outputs.ZOMBIENET_SDK_LARGE_RUNNER || needs.preflight.outputs.ZOMBIENET_SDK_DEFAULT_RUNNER }}
timeout-minutes: 70 # 60 for test + 10 for send logs
needs: [preflight]
# Run if: called from build workflow OR merge_group OR (changes detected)
if: ${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' || github.event_name == 'merge_group' || (needs.preflight.outputs.changes_bizinikiwi == 'true' || needs.preflight.outputs.changes_pezcumulus == 'true' || needs.preflight.outputs.changes_pezkuwi == 'true' || needs.preflight.outputs.changes_zombienet == 'true') }}
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_SDK_IMAGE }}
options: -v /tmp/zombienet:/tmp/zombienet
env:
PEZKUWI_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/pezkuwi-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
CUMULUS_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/${{ matrix.test.pezcumulus-image }}:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
RUST_LOG: ${{ needs.preflight.outputs.RUST_LOG }}
ZOMBIE_PROVIDER: ${{ needs.preflight.outputs.ZOMBIE_PROVIDER }}
strategy:
fail-fast: false
matrix:
test: ${{ fromJson(needs.preflight.outputs.TEST_MATRIX) }}
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
if: ${{ matrix.test.needs-wasm-binary }}
with:
name: build-test-teyrchain-${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
github-token: ${{ secrets.GITHUB_TOKEN }}
run-id: ${{ needs.preflight.outputs.BUILD_RUN_ID }}
- name: provide_wasm_binary
if: ${{ matrix.test.needs-wasm-binary }}
run: |
tar -xvf artifacts.tar
ls -ltr artifacts/*
cp ./artifacts/zombienet/wasm_binary_spec_version_incremented.rs.compact.compressed.wasm /tmp/
cp ./artifacts/zombienet/wasm_binary_elastic_scaling.rs.compact.compressed.wasm /tmp/
cp ./artifacts/zombienet/wasm_binary_elastic_scaling_12s_slot.rs.compact.compressed.wasm /tmp/
ls -ltr /tmp
rm -rf artifacts
- name: zombienet_test
timeout-minutes: 60
uses: ./.github/actions/zombienet-sdk
with:
test-filter: ${{ matrix.test.test-filter }}
job-name: ${{ matrix.test.job-name }}
prefix: "pezcumulus"
gh-token: ${{ secrets.GITHUB_TOKEN }}
build-id: ${{ needs.preflight.outputs.BUILD_RUN_ID }}
ref-slug: ${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: process_logs
if: ${{ always() }}
shell: bash
run: |
echo "Processing log files"
echo "::group::Logs"
# do not fail the whole run if this step fails
if ! ./.github/scripts/process-logs-zombienet.sh ; then
echo "::endgroup::"
echo "::warning ::WARNING: Failed to process logs"
else
echo "::endgroup::"
fi
- name: upload_logs
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
if: ${{ always() }}
with:
name: zombienet-logs-${{ matrix.test.job-name }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
-141
View File
@@ -1,141 +0,0 @@
name: Zombienet Pezkuwi
on:
workflow_call:
inputs:
build_run_id:
type: string
description: "Build run ID from the build workflow."
required: true
ref_slug:
type: string
description: "Source ref slug from the build workflow."
required: false
test_pattern:
type: string
description: "Run tests which names match this pattern (also flaky)"
default: ""
required: false
workflow_dispatch:
inputs:
test_pattern:
type: string
description: "Run tests which names match this pattern (also flaky)"
default: ""
required: false
concurrency:
group: pezkuwi-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-${{ github.run_id }}
cancel-in-progress: true
permissions: read-all
env:
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 1
LOCAL_DIR: "./pezkuwi/zombienet_tests"
GHA_CLUSTER_SERVER_ADDR: "https://kubernetes.default:443"
# use spot by default
X_INFRA_INSTANCE: "spot"
# don't retry sdk tests
NEXTEST_RETRIES: 0
KUBECONFIG: "/data/config"
ZOMBIE_CLEANER_DISABLED: 1
# only run if we have changes in [bizinikiwi, pezkuwi] directories or this workflow.
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/zombienet-reusable-preflight.yml
with:
tests_yaml: .github/zombienet-tests/zombienet_pezkuwi_tests.yml
test_pattern: ${{ inputs.test_pattern }}
build_run_id: ${{ inputs.build_run_id }}
ref_slug: ${{ inputs.ref_slug }}
zombienet-pezkuwi-tests:
name: ${{ matrix.test.job-name }}
runs-on: ${{ matrix.test.runner-type == 'large' && (matrix.test.use-zombienet-sdk && needs.preflight.outputs.ZOMBIENET_SDK_LARGE_RUNNER || needs.preflight.outputs.ZOMBIENET_LARGE_RUNNER) || (matrix.test.use-zombienet-sdk && needs.preflight.outputs.ZOMBIENET_SDK_DEFAULT_RUNNER || needs.preflight.outputs.ZOMBIENET_DEFAULT_RUNNER) }}
timeout-minutes: 70 # 60 for test + 10 for send logs
needs: [preflight]
if: ${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' || github.event_name == 'merge_group' || (needs.preflight.outputs.changes_bizinikiwi == 'true' || needs.preflight.outputs.changes_pezkuwi == 'true' || needs.preflight.outputs.changes_zombienet == 'true') }}
container:
image: ${{ matrix.test.use-zombienet-sdk && needs.preflight.outputs.ZOMBIENET_SDK_IMAGE || needs.preflight.outputs.ZOMBIENET_IMAGE }}
options: -v /tmp/zombienet:/tmp/zombienet
env:
ZOMBIENET_INTEGRATION_TEST_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/pezkuwi-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
PEZKUWI_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/pezkuwi-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
COL_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/colander:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
CUMULUS_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/${{ matrix.test.pezcumulus-image || 'pezkuwi-teyrchain-debug' }}:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
MALUS_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/malus:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
DEBUG: ${{ needs.preflight.outputs.DEBUG }}
ZOMBIENET_PROVIDER: ${{ needs.preflight.outputs.ZOMBIENET_PROVIDER }}
RUST_LOG: ${{ needs.preflight.outputs.RUST_LOG }}
ZOMBIE_PROVIDER: ${{ needs.preflight.outputs.ZOMBIE_PROVIDER }}
strategy:
fail-fast: false
matrix:
test: ${{ fromJson(needs.preflight.outputs.TEST_MATRIX) }}
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Set additional environment variables
if: ${{ matrix.test.additional-env }}
shell: bash
run: |
echo '${{ toJson(matrix.test.additional-env) }}' | jq -r 'to_entries[] | "\(.key)=\(.value)"' >> $GITHUB_ENV
- name: Additional setup
if: ${{ matrix.test.additional-setup }}
shell: bash
run: ${{ matrix.test.additional-setup }}
- name: zombienet_test (v1)
if: ${{ !matrix.test.use-zombienet-sdk }}
timeout-minutes: 60
uses: ./.github/actions/zombienet
with:
test-definition: ${{ matrix.test.test-definition }}
job-name: ${{ matrix.test.job-name }}
local-dir: ${{ matrix.test.local-dir }}
concurrency: ${{ matrix.test.concurrency || 1 }}
gh-token: ${{ secrets.GITHUB_TOKEN }}
build-id: ${{ needs.preflight.outputs.BUILD_RUN_ID }}
ref-slug: ${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: zombienet_test (sdk)
if: ${{ matrix.test.use-zombienet-sdk }}
uses: ./.github/actions/zombienet-sdk
with:
test-filter: ${{ matrix.test.test-filter }}
job-name: ${{ matrix.test.job-name }}
prefix: "pezkuwi"
gh-token: ${{ secrets.GITHUB_TOKEN }}
build-id: ${{ needs.preflight.outputs.BUILD_RUN_ID }}
ref-slug: ${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: process_logs
if: ${{ always() }}
shell: bash
run: |
echo "Processing log files"
echo "::group::Logs"
# do not fail the whole run if this step fails
if ! ./.github/scripts/process-logs-zombienet.sh ; then
echo "::endgroup::"
echo "::warning ::WARNING: Failed to process logs"
else
echo "::endgroup::"
fi
- name: upload_logs
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
if: ${{ always() }}
with:
name: zombienet-logs-${{ matrix.test.job-name }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
@@ -1,110 +0,0 @@
name: Zombienet Teyrchain Templates
on:
workflow_call:
inputs:
build_run_id:
type: string
description: "Build run ID from the build workflow."
required: true
ref_slug:
type: string
description: "Source ref slug from the build workflow."
required: false
test_pattern:
type: string
description: "Run tests which names match this pattern (also flaky)"
default: ""
required: false
workflow_dispatch:
inputs:
test_pattern:
type: string
description: "Run tests which names match this pattern (also flaky)"
default: ""
required: false
concurrency:
group: teyrchain-template-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}-${{ github.run_id }}
cancel-in-progress: true
permissions: read-all
env:
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 1
GHA_CLUSTER_SERVER_ADDR: "https://kubernetes.default:443"
# use spot by default
X_INFRA_INSTANCE: "spot"
# only run if we have changes in [bizinikiwi, pezkuwi] directories or this workflow.
jobs:
isdraft:
uses: ./.github/workflows/reusable-isdraft.yml
preflight:
needs: isdraft
uses: ./.github/workflows/zombienet-reusable-preflight.yml
with:
tests_yaml: .github/zombienet-tests/zombienet_teyrchain-template_tests.yml
test_pattern: ${{ inputs.test_pattern }}
build_run_id: ${{ inputs.build_run_id }}
ref_slug: ${{ inputs.ref_slug }}
zombienet-teyrchain-template-tests:
name: ${{ matrix.test.job-name }}
runs-on: ${{ matrix.test.runner-type == 'large' && needs.preflight.outputs.ZOMBIENET_SDK_LARGE_RUNNER || needs.preflight.outputs.ZOMBIENET_SDK_DEFAULT_RUNNER }}
timeout-minutes: 40 # 30 for test + 10 for send logs
needs: [preflight]
# Run if: called from build workflow OR merge_group OR (changes detected)
if: ${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch' || github.event_name == 'merge_group' || (needs.preflight.outputs.changes_bizinikiwi == 'true' || needs.preflight.outputs.changes_pezcumulus == 'true' || needs.preflight.outputs.changes_pezkuwi == 'true' || needs.preflight.outputs.changes_zombienet == 'true') }}
container:
image: ${{ needs.preflight.outputs.ZOMBIENET_SDK_IMAGE }}
options: -v /tmp/zombienet:/tmp/zombienet
env:
PEZKUWI_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/pezkuwi-debug:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
CUMULUS_IMAGE: "${{ needs.preflight.outputs.TEMP_IMAGES_BASE }}/${{ matrix.test.pezcumulus-image }}:${{ needs.preflight.outputs.DOCKER_IMAGES_VERSION }}"
RUST_LOG: ${{ needs.preflight.outputs.RUST_LOG }}
ZOMBIE_PROVIDER: ${{ needs.preflight.outputs.ZOMBIE_PROVIDER }}
strategy:
fail-fast: false
matrix:
test: ${{ fromJson(needs.preflight.outputs.TEST_MATRIX) }}
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: zombienet_test
timeout-minutes: 30
uses: ./.github/actions/zombienet-sdk
with:
test-filter: ${{ matrix.test.test-filter }}
job-name: ${{ matrix.test.job-name }}
prefix: "teyrchain-templates"
gh-token: ${{ secrets.GITHUB_TOKEN }}
build-id: ${{ needs.preflight.outputs.BUILD_RUN_ID }}
ref-slug: ${{ needs.preflight.outputs.SOURCE_REF_SLUG }}
- name: process_logs
if: ${{ always() }}
shell: bash
run: |
echo "Processing log files"
echo "::group::Logs"
# do not fail the whole run if this step fails
if ! ./.github/scripts/process-logs-zombienet.sh ; then
echo "::endgroup::"
echo "::warning ::WARNING: Failed to process logs"
else
echo "::endgroup::"
fi
- name: upload_logs
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
if: ${{ always() }}
with:
name: zombienet-logs-${{ matrix.test.job-name }}-${{ github.sha }}
path: |
/tmp/zombie*/logs/*
-27
View File
@@ -1,27 +0,0 @@
# zombienet settings
ZOMBIENET_PROVIDER=native
ZOMBIENET_IMAGE_FOR_NATIVE=docker.io/paritytech/zombienet:v1.3.133
ZOMBIENET_DEFAULT_RUNNER_FOR_NATIVE=pezkuwi-runner
ZOMBIENET_LARGE_RUNNER_FOR_NATIVE=pezkuwi-runner
ZOMBIENET_DEFAULT_RUNNER_FOR_NATIVE_PERSISTENT=pezkuwi-runner
ZOMBIENET_LARGE_RUNNER_FOR_NATIVE_PERSISTENT=pezkuwi-runner
ZOMBIENET_IMAGE_FOR_K8S=docker.io/paritytech/zombienet:v1.3.133
ZOMBIENET_RUNNER_FOR_K8S=pezkuwi-runner
DEBUG=zombie,zombie::network-node,zombie::kube::client::logs
# zombienet-sdk settings
ZOMBIE_PROVIDER=native
ZOMBIENET_SDK_IMAGE_FOR_NATIVE=ghcr.io/pezkuwichain/ci-unified:bullseye-1.88.0-2025-06-27-v202511141243
ZOMBIENET_SDK_DEFAULT_RUNNER_FOR_NATIVE=pezkuwi-runner
ZOMBIENET_SDK_LARGE_RUNNER_FOR_NATIVE=pezkuwi-runner
ZOMBIENET_SDK_DEFAULT_RUNNER_FOR_NATIVE_PERSISTENT=pezkuwi-runner
ZOMBIENET_SDK_LARGE_RUNNER_FOR_NATIVE_PERSISTENT=pezkuwi-runner
ZOMBIENET_SDK_IMAGE_FOR_K8S=docker.io/paritytech/zombienet:v1.3.133
ZOMBIENET_SDK_RUNNER_FOR_K8S=pezkuwi-runner
RUST_LOG=info,zombienet_orchestrator=trace,cumulus_zombienet_sdk_helpers=debug
# common settings
PUSHGATEWAY_URL=http://prometheus-pushgateway.monitoring.svc.cluster.local:9091/metrics/job/zombie-metrics
KUBERNETES_CPU_REQUEST=512m
KUBERNETES_MEMORY_REQUEST=1Gi
TEMP_IMAGES_BASE=ghcr.io/pezkuwichain
View File
-75
View File
@@ -1,75 +0,0 @@
# Zombienet Tests
This folder contains zombienet test definitions for CI execution.
## Structure
- **Test definitions**: YAML files defining test matrices (e.g., `zombienet_substrate_tests.yml`)
- **Flaky tests**: Listed in `.github/zombienet-flaky-tests` - tests with non-deterministic behavior
- **Parser**: `.github/scripts/parse-zombienet-tests.py` converts YAML to GitHub Actions matrix
## Benefits
- Easy test maintenance (add/remove tests)
- Efficient flaky test handling
- Pattern-based test execution for debugging
## Manual Workflow Triggering
### Prerequisites
Before using the dispatch script, you must:
1. **Create a branch** with your changes
2. **Create a Pull Request** for that branch
3. **Ensure CI starts building images** - the PR triggers image builds that the `preflight / wait_build_images` step depends on
4. [OPTIONAL] **Wait for image builds to complete** - zombienet tests require these images.
But if we don't wait then the job triggered by the script will wait for images if their building is in progress.
**Important**: When you push new changes to the PR, CI will rebuild the images. Any jobs triggered after the rebuild will use the updated images.
**Image Retention**: CI images have a 1-day retention period by default. For long-term testing (e.g., over weekends) without pushing changes, temporarily extend the retention by updating the `retention-days` value in `.github/workflows/build-publish-images.yml` to the required number of days.
### Usage
The dispatch script triggers GitHub Actions workflows remotely and monitors their execution.
The script should be executed on developer's machine.
Use `.github/scripts/dispatch-zombienet-workflow.sh`:
```bash
Usage: .github/scripts/dispatch-zombienet-workflow.sh -w <workflow-file> -b <branch> [-m max-triggers] [-p test-pattern]
-w: Workflow file (required)
-b: Branch name (required)
-m: Max triggers (optional, default: infinite)
-p: Test pattern (optional, supports regex)
```
The script automatically creates a CSV file (`workflow_results_YYYYMMDD_HHMMSS.csv`) containing job results with columns: job_id, job_name, conclusion, started_at, branch, job_url.
### Examples
**Run workflow 5 times (respects flaky test exclusions):**
```bash
.github/scripts/dispatch-zombienet-workflow.sh -w zombienet_substrate.yml -b "my-branch" -m 5
```
**Run specific test infinitely (includes flaky tests):**
```bash
.github/scripts/dispatch-zombienet-workflow.sh -w zombienet_substrate.yml -b "my-branch" -p zombienet-bizinikiwi-0000-block-building
```
**Run multiple specific tests using regex pattern:**
```bash
.github/scripts/dispatch-zombienet-workflow.sh -w zombienet_cumulus.yml -b "my-branch" -p "zombienet-pezcumulus-0002-pov_recovery|zombienet-pezcumulus-0006-rpc_collator_builds_blocks"
```
### Requirements
- Run from `pezkuwi-sdk` repository root
- Requires `gh` CLI (will prompt for login on first use)
## Flaky Tests
Flaky tests should have corresponding issues in the [Zombienet CI reliability project](https://github.com/orgs/pezkuwichain/projects/216/views/1).
@@ -1,32 +0,0 @@
- job-name: "zombienet-bizinikiwi-0000-block-building"
test-definition: "block-building.zndsl"
local-dir: "./bizinikiwi/zombienet/0000-block-building"
runner-type: "default"
concurrency: 4
use-zombienet-sdk: false
- job-name: "zombienet-bizinikiwi-0001-basic-warp-sync"
test-definition: "test-warp-sync.zndsl"
local-dir: "./bizinikiwi/zombienet/0001-basic-warp-sync"
runner-type: "default"
concurrency: 4
use-zombienet-sdk: false
# TODO: Disabled, fails 1 in 50 runs
- job-name: "zombienet-bizinikiwi-0002-validators-warp-sync"
test-definition: "test-validators-warp-sync.zndsl"
local-dir: "./bizinikiwi/zombienet/0002-validators-warp-sync"
runner-type: "default"
concurrency: 4
use-zombienet-sdk: false
additional-setup: |
cp --remove-destination ./bizinikiwi/zombienet/0001-basic-warp-sync/chain-spec.json ./bizinikiwi/zombienet/0002-validators-warp-sync
- job-name: "zombienet-bizinikiwi-0003-block-building-warp-sync"
test-definition: "test-block-building-warp-sync.zndsl"
local-dir: "./bizinikiwi/zombienet/0003-block-building-warp-sync"
runner-type: "default"
concurrency: 4
use-zombienet-sdk: false
additional-setup: |
cp --remove-destination ./bizinikiwi/zombienet/0001-basic-warp-sync/chain-spec.json ./bizinikiwi/zombienet/0003-block-building-warp-sync
@@ -1,88 +0,0 @@
- job-name: "zombienet-pezcumulus-0001-sync_blocks_from_tip_without_connected_collator"
test-filter: "zombie_ci::sync_blocks::sync_blocks_from_tip_without_connected_collator"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
- job-name: "zombienet-pezcumulus-0002-pov_recovery"
test-filter: "zombie_ci::pov_recovery::pov_recovery"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
- job-name: "zombienet-pezcumulus-0003-full_node_catching_up"
test-filter: "zombie_ci::full_node_catching_up::full_node_catching_up"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
- job-name: "zombienet-pezcumulus-0004-runtime_upgrade"
test-filter: "zombie_ci::runtime_upgrade::runtime_upgrade"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
needs-wasm-binary: true
use-zombienet-sdk: true
- job-name: "zombienet-pezcumulus-0005-migrate_solo_to_para"
test-filter: "zombie_ci::migrate_solo::migrate_solo_to_para"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
- job-name: "zombienet-pezcumulus-0006-rpc_collator_builds_blocks"
test-filter: "zombie_ci::rpc_collator_build_blocks::rpc_collator_builds_blocks"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
- job-name: "zombienet-pezcumulus-0007-full_node_warp_sync"
test-filter: "zombie_ci::full_node_warp_sync::full_node_warp_sync"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
- job-name: "zombienet-pezcumulus-0008-elastic_authoring"
test-filter: "zombie_ci::elastic_scaling::slot_based_authoring::elastic_scaling_slot_based_authoring"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
# Disabled, occasionally fails
# See https://github.com/pezkuwichain/pezkuwi-sdk/issues/8986
- job-name: "zombienet-pezcumulus-0009-elastic_scaling_pov_recovery"
test-filter: "zombie_ci::elastic_scaling::pov_recovery::elastic_scaling_pov_recovery"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
# Disabled, occasionally fails.
# See https://github.com/pezkuwichain/pezkuwi-sdk/issues/8999
- job-name: "zombienet-pezcumulus-0010-elastic_scaling_multiple_block_per_slot"
test-filter: "zombie_ci::elastic_scaling::multiple_blocks_per_slot::elastic_scaling_multiple_blocks_per_slot"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
- job-name: "zombienet-pezcumulus-0011-dht-bootnodes"
test-filter: "zombie_ci::bootnodes::dht_bootnodes_test"
runner-type: "default"
pezcumulus-image: "pezkuwi-teyrchain-debug"
use-zombienet-sdk: true
- job-name: "zombienet-pezcumulus-0012-teyrchain_extrinsic_gets_finalized"
test-filter: "zombie_ci::teyrchain_extrinsic_get_finalized::teyrchain_extrinsic_gets_finalized"
runner-type: "default"
pezcumulus-image: "pezkuwi-teyrchain-debug"
use-zombienet-sdk: true
- job-name: "zombienet-pezcumulus-0013-elastic_scaling_slot_based_rp_offset"
test-filter: "zombie_ci::elastic_scaling::slot_based_rp_offset::elastic_scaling_slot_based_relay_parent_offset_test"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
- job-name: "zombienet-pezcumulus-0014-elastic_scaling_upgrade_to_3_cores"
test-filter: "zombie_ci::elastic_scaling::upgrade_to_3_cores::elastic_scaling_upgrade_to_3_cores"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
needs-wasm-binary: true
@@ -1,254 +0,0 @@
# Functional tests using traditional zombienet
- job-name: "zombienet-pezkuwi-functional-0001-teyrchains-pvf"
test-definition: "0001-teyrchains-pvf.zndsl"
local-dir: "./pezkuwi/zombienet_tests/functional"
runner-type: "default"
concurrency: 1
use-zombienet-sdk: false
- job-name: "zombienet-pezkuwi-functional-0002-teyrchains-disputes"
test-definition: "0002-teyrchains-disputes.zndsl"
local-dir: "./pezkuwi/zombienet_tests/functional"
runner-type: "default"
concurrency: 1
use-zombienet-sdk: false
- job-name: "zombienet-pezkuwi-functional-0003-beefy-and-mmr"
test-definition: "0003-beefy-and-mmr.zndsl"
local-dir: "./pezkuwi/zombienet_tests/functional"
runner-type: "default"
use-zombienet-sdk: false
- job-name: "zombienet-pezkuwi-functional-0004-teyrchains-disputes-garbage-candidate"
test-definition: "0004-teyrchains-garbage-candidate.zndsl"
local-dir: "./pezkuwi/zombienet_tests/functional"
runner-type: "default"
use-zombienet-sdk: false
- job-name: "zombienet-pezkuwi-functional-0006-teyrchains-max-tranche0"
test-definition: "0006-teyrchains-max-tranche0.zndsl"
local-dir: "./pezkuwi/zombienet_tests/functional"
runner-type: "default"
use-zombienet-sdk: false
- job-name: "zombienet-pezkuwi-functional-0007-dispute-freshly-finalized"
test-definition: "0007-dispute-freshly-finalized.zndsl"
local-dir: "./pezkuwi/zombienet_tests/functional"
runner-type: "default"
use-zombienet-sdk: false
- job-name: "zombienet-pezkuwi-functional-0013-systematic-chunk-recovery"
test-definition: "0013-systematic-chunk-recovery.zndsl"
local-dir: "./pezkuwi/zombienet_tests/functional"
runner-type: "default"
use-zombienet-sdk: false
- job-name: "zombienet-pezkuwi-functional-0014-chunk-fetching-network-compatibility"
test-definition: "0014-chunk-fetching-network-compatibility.zndsl"
local-dir: "./pezkuwi/zombienet_tests/functional"
runner-type: "default"
use-zombienet-sdk: false
additional-setup: |
BIN_DIR="$(pwd)/bin_old"
mkdir -p $BIN_DIR
for bin in pezkuwi pezkuwi-teyrchain; do
OLD_NAME="$bin-old"
echo "downloading $bin as $OLD_NAME in $BIN_DIR";
curl -L -o $BIN_DIR/$OLD_NAME https://github.com/pezkuwichain/pezkuwi-sdk/releases/download/pezkuwi-v1.7.0/$bin
chmod 755 $BIN_DIR/$OLD_NAME;
done
for bin in pezkuwi-execute-worker pezkuwi-prepare-worker; do
OLD_NAME="$bin"
echo "downloading $bin as $OLD_NAME in $BIN_DIR";
curl -L -o $BIN_DIR/$OLD_NAME https://github.com/pezkuwichain/pezkuwi-sdk/releases/download/pezkuwi-v1.7.0/$bin
chmod 755 $BIN_DIR/$OLD_NAME;
done
ls -ltr $BIN_DIR
export PATH=$BIN_DIR:$PATH
echo "PATH=$PATH" >> $GITHUB_ENV
echo "OLD_SUFFIX=-old" >> $GITHUB_ENV
additional-env:
OLD_SUFFIX: "-old"
- job-name: "zombienet-pezkuwi-functional-0015-coretime-shared-core"
test-definition: "0015-coretime-shared-core.zndsl"
local-dir: "./pezkuwi/zombienet_tests/functional"
runner-type: "default"
use-zombienet-sdk: false
additional-setup: |
cp --remove-destination ./pezkuwi/zombienet_tests/assign-core.js ./pezkuwi/zombienet_tests/functional
- job-name: "zombienet-pezkuwi-functional-0019-coretime-collation-fetching-fairness"
test-definition: "0019-coretime-collation-fetching-fairness.zndsl"
local-dir: "./pezkuwi/zombienet_tests/functional"
runner-type: "default"
use-zombienet-sdk: false
additional-setup: |
cp --remove-destination ./pezkuwi/zombienet_tests/assign-core.js ./pezkuwi/zombienet_tests/functional
# Smoke tests using traditional zombienet
- job-name: "zombienet-pezkuwi-smoke-0001-teyrchains-smoke-test"
test-definition: "0001-teyrchains-smoke-test.zndsl"
local-dir: "./pezkuwi/zombienet_tests/smoke"
runner-type: "default"
use-zombienet-sdk: false
- job-name: "zombienet-pezkuwi-smoke-0002-teyrchains-teyrchains-upgrade-smoke"
test-definition: "0002-teyrchains-upgrade-smoke-test.zndsl"
local-dir: "./pezkuwi/zombienet_tests/smoke"
runner-type: "default"
use-zombienet-sdk: false
# TODO: Disabled
- job-name: "zombienet-pezkuwi-smoke-0003-deregister-register-validator"
test-definition: "0003-deregister-register-validator-smoke.zndsl"
local-dir: "./pezkuwi/zombienet_tests/smoke"
runner-type: "default"
use-zombienet-sdk: false
- job-name: "zombienet-pezkuwi-smoke-0004-coretime-smoke-test"
test-definition: "0004-coretime-smoke-test.zndsl"
local-dir: "./pezkuwi/zombienet_tests/smoke"
runner-type: "default"
use-zombienet-sdk: false
- job-name: "zombienet-pezkuwi-smoke-0005-precompile-pvf-smoke"
test-definition: "0005-precompile-pvf-smoke.zndsl"
local-dir: "./pezkuwi/zombienet_tests/smoke"
runner-type: "default"
use-zombienet-sdk: false
# Misc tests using traditional zombienet
# TODO: Disabled, occasionally (1 on ~50-70 runs) fails
- job-name: "zombienet-pezkuwi-misc-0001-teyrchains-paritydb"
test-definition: "0001-paritydb.zndsl"
local-dir: "./pezkuwi/zombienet_tests/misc"
runner-type: "default"
use-zombienet-sdk: false
# TODO: needs to resolve how to pass the GH_TOKEN to pods
# - job-name: "zombienet-pezkuwi-misc-0002-upgrade-node"
# test-definition: "0002-upgrade-node.zndsl"
# local-dir: "./pezkuwi/zombienet_tests/misc"
# runner-type: "large"
# additional-env:
# ZOMBIENET_INTEGRATION_TEST_IMAGE: "docker.io/parity/pezkuwi:latest"
# POLKADOT_PR_ARTIFACTS_URL: ${{ needs.preflight.outputs.POLKADOT_PR_ARTIFACTS_URL }}
# use-zombienet-sdk: false
# Malus tests using traditional zombienet
- job-name: "zombienet-pezkuwi-malus-0001-dispute-valid"
test-definition: "0001-dispute-valid-block.zndsl"
local-dir: "./pezkuwi/node/malus/integrationtests"
runner-type: "default"
use-zombienet-sdk: false
# SDK tests using zombienet-sdk
# TODO: Disabled
- job-name: "zombienet-pezkuwi-coretime-revenue"
test-filter: "smoke::coretime_revenue::coretime_revenue_test"
runner-type: "default"
use-zombienet-sdk: true
pezcumulus-image: "colander"
- job-name: "zombienet-pezkuwi-elastic-scaling-slot-based-3cores"
test-filter: "elastic_scaling::slot_based_3cores::slot_based_3cores_test"
runner-type: "default"
use-zombienet-sdk: true
pezcumulus-image: "test-teyrchain"
# TODO: Disabled, fails very often with zombienet native provider
- job-name: "zombienet-pezkuwi-elastic-scaling-slot-based-12cores"
test-filter: "elastic_scaling::slot_based_12cores::slot_based_12cores_test"
runner-type: "default"
use-zombienet-sdk: true
pezcumulus-image: "test-teyrchain"
- job-name: "zombienet-pezkuwi-elastic-scaling-doesnt-break-teyrchains"
test-filter: "elastic_scaling::doesnt_break_teyrchains::doesnt_break_teyrchains_test"
runner-type: "default"
use-zombienet-sdk: true
- job-name: "zombienet-pezkuwi-elastic-scaling-basic-3cores"
test-filter: "elastic_scaling::basic_3cores::basic_3cores_test"
runner-type: "default"
use-zombienet-sdk: true
pezcumulus-image: "colander"
- job-name: "zombienet-pezkuwi-functional-sync-backing"
test-filter: "functional::sync_backing::sync_backing_test"
runner-type: "default"
use-zombienet-sdk: true
pezcumulus-image: "test-teyrchain"
- job-name: "zombienet-pezkuwi-functional-async-backing-6-seconds-rate"
test-filter: "functional::async_backing_6_seconds_rate::async_backing_6_seconds_rate_test"
runner-type: "default"
use-zombienet-sdk: true
# TODO: Disabled, occasionally (1 on ~50-100 runs) fails
- job-name: "zombienet-pezkuwi-functional-duplicate-collations"
test-filter: "functional::duplicate_collations::duplicate_collations_test"
runner-type: "default"
use-zombienet-sdk: true
# TODO: Disabled, occasionally (2 on ~50-70 runs) fails
- job-name: "zombienet-pezkuwi-disputes-slashing"
test-filter: "disabling::slashing"
runner-type: "default"
use-zombienet-sdk: true
# TODO: Disabled, occasionally (1 on ~50-100 runs) fails
- job-name: "zombienet-pezkuwi-functional-spam-statement-distribution-requests"
test-filter: "functional::spam_statement_distribution_requests::spam_statement_distribution_requests_test"
runner-type: "default"
use-zombienet-sdk: true
pezcumulus-image: "colander"
- job-name: "zombienet-pezkuwi-approval-voting-coalescing"
test-filter: "functional::approval_voting_coalescing::approval_voting_coalescing_test"
runner-type: "default"
use-zombienet-sdk: true
pezcumulus-image: "colander"
- job-name: "zombienet-pezkuwi-approved-peer-mixed-validators"
test-filter: "functional::approved_peer_mixed_validators::approved_peer_mixed_validators_test"
runner-type: "default"
use-zombienet-sdk: true
pezcumulus-image: "colander"
additional-setup: |
BIN_DIR="$(pwd)/bin_old"
mkdir -p $BIN_DIR
for bin in pezkuwi pezkuwi-teyrchain; do
OLD_NAME="$bin-old"
echo "downloading $bin as $OLD_NAME in $BIN_DIR";
curl -L -o $BIN_DIR/$OLD_NAME https://github.com/pezkuwichain/pezkuwi-sdk/releases/download/pezkuwi-stable2503/$bin
chmod 755 $BIN_DIR/$OLD_NAME;
done
for bin in pezkuwi-execute-worker pezkuwi-prepare-worker; do
OLD_NAME="$bin"
echo "downloading $bin as $OLD_NAME in $BIN_DIR";
curl -L -o $BIN_DIR/$OLD_NAME https://github.com/pezkuwichain/pezkuwi-sdk/releases/download/pezkuwi-stable2503/$bin
chmod 755 $BIN_DIR/$OLD_NAME;
done
ls -ltr $BIN_DIR
export PATH=$BIN_DIR:$PATH
echo "PATH=$PATH" >> $GITHUB_ENV
additional-env:
OLD_POLKADOT_IMAGE: "ghcr.io/pezkuwichain/pezkuwi-debug:latest"
OLD_POLKADOT_COMMAND: "pezkuwi-old"
- job-name: "zombienet-pezkuwi-functional-validator-disabling"
test-filter: "functional::validator_disabling::validator_disabling_test"
runner-type: "default"
use-zombienet-sdk: true
- job-name: "zombienet-pezkuwi-dispute-old-finalized"
test-filter: "functional::dispute_old_finalized::dispute_old_finalized"
runner-type: "default"
use-zombienet-sdk: true
- job-name: "zombienet-pezkuwi-shared-core-idle-teyrchain"
test-filter: "functional::shared_core_idle_teyrchain::shared_core_idle_teyrchain_test"
runner-type: "default"
use-zombienet-sdk: true
@@ -1,17 +0,0 @@
- job-name: "zombienet-teyrchain-template-0001-minimal_template_block_production"
test-filter: "smoke::minimal_template_block_production_test"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
- job-name: "zombienet-teyrchain-template-0002-teyrchain_template_block_production"
test-filter: "smoke::teyrchain_template_block_production_test"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true
- job-name: "zombienet-teyrchain-template-0003-solochain_template_block_production"
test-filter: "smoke::solochain_template_block_production_test"
runner-type: "default"
pezcumulus-image: "test-teyrchain"
use-zombienet-sdk: true