feat: initialize Kurdistan SDK - independent fork of Polkadot SDK
This commit is contained in:
@@ -0,0 +1,231 @@
|
||||
"""
|
||||
|
||||
Creates the Pezkuwi-SDK umbrella crate that re-exports all other crates.
|
||||
|
||||
This re-creates the `umbrella/` folder. Ensure that it does not contain any changes you want to keep.
|
||||
|
||||
Usage:
|
||||
python3 generate-umbrella.py --sdk <path> --version <version>
|
||||
|
||||
Example:
|
||||
python3 generate-umbrella.py --sdk .. --version 1.11.0
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import toml
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from cargo_workspace import Workspace
|
||||
|
||||
"""
|
||||
Crate names that should be excluded from the umbrella crate.
|
||||
"""
|
||||
def exclude(crate):
|
||||
name = crate.name
|
||||
if crate.metadata.get("pezkuwi-sdk.exclude-from-umbrella", False):
|
||||
return True
|
||||
|
||||
# No fuzzers or examples:
|
||||
if "example" in name or name.endswith("fuzzer"):
|
||||
return True
|
||||
|
||||
# No runtime crates:
|
||||
if name.endswith("-runtime"):
|
||||
# Note: this is a bit hacky. We should use custom crate metadata instead.
|
||||
return name != "sp-runtime" and name != "bp-runtime" and name != "frame-try-runtime"
|
||||
|
||||
# Exclude snowbridge crates.
|
||||
if name.startswith("snowbridge-"):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def main(path, version):
|
||||
delete_umbrella(path)
|
||||
workspace = Workspace.from_path(path)
|
||||
print(f'Indexed {workspace}')
|
||||
|
||||
std_crates = [] # name -> path. use list for sorting
|
||||
nostd_crates = []
|
||||
for crate in workspace.crates:
|
||||
if crate.name == 'pezkuwi-sdk':
|
||||
continue
|
||||
if not crate.publish:
|
||||
print(f"Skipping {crate.name} as it is not published")
|
||||
continue
|
||||
|
||||
lib_path = os.path.dirname(crate.abs_path)
|
||||
manifest_path = os.path.join(lib_path, "Cargo.toml")
|
||||
lib_path = os.path.join(lib_path, "src", "lib.rs")
|
||||
path = os.path.dirname(crate.rel_path)
|
||||
|
||||
# Guess which crates support no_std. Proc-macro crates are always no_std:
|
||||
with open(manifest_path, "r") as f:
|
||||
manifest = toml.load(f)
|
||||
if 'lib' in manifest and 'proc-macro' in manifest['lib']:
|
||||
if manifest['lib']['proc-macro']:
|
||||
nostd_crates.append((crate, path))
|
||||
continue
|
||||
|
||||
# Crates without a lib.rs cannot be no_std
|
||||
if not os.path.exists(lib_path):
|
||||
print(f"Skipping {crate.name} as it does not have a 'src/lib.rs'")
|
||||
continue
|
||||
if exclude(crate):
|
||||
print(f"Skipping {crate.name} as it is in the exclude list")
|
||||
continue
|
||||
|
||||
# No search for a no_std attribute:
|
||||
with open(lib_path, "r") as f:
|
||||
nostd_crate = False
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line == "#![no_std]" or line == '#![cfg_attr(not(feature = "std"), no_std)]':
|
||||
nostd_crate = True
|
||||
break
|
||||
elif "no_std" in line:
|
||||
print(line)
|
||||
|
||||
if nostd_crate:
|
||||
nostd_crates.append((crate, path))
|
||||
else:
|
||||
std_crates.append((crate, path))
|
||||
|
||||
# Sort by name
|
||||
std_crates.sort(key=lambda x: x[0].name)
|
||||
nostd_crates.sort(key=lambda x: x[0].name)
|
||||
|
||||
runtime_crates = [crate for crate in nostd_crates if 'frame' in crate[0].name or crate[0].name.startswith('sp-')]
|
||||
all_crates = std_crates + nostd_crates
|
||||
all_crates.sort(key=lambda x: x[0].name)
|
||||
dependencies = {}
|
||||
|
||||
for (crate, path) in nostd_crates:
|
||||
dependencies[crate.name] = {"path": f"../{path}", "default-features": False, "optional": True}
|
||||
|
||||
for (crate, path) in std_crates:
|
||||
dependencies[crate.name] = {"path": f"../{path}", "default-features": False, "optional": True}
|
||||
|
||||
# The empty features are filled by Zepter
|
||||
features = {
|
||||
"default": [ "std" ],
|
||||
"std": [],
|
||||
"runtime-benchmarks": [],
|
||||
"try-runtime": [],
|
||||
"serde": [],
|
||||
"experimental": [],
|
||||
"with-tracing": [],
|
||||
"runtime-full": list([f"{d.name}" for d, _ in nostd_crates]),
|
||||
"runtime": list([f"{d.name}" for d, _ in runtime_crates]),
|
||||
"node": ["std"] + list([f"{d.name}" for d, _ in std_crates]),
|
||||
"tuples-96": [],
|
||||
}
|
||||
|
||||
manifest = {
|
||||
"package": {
|
||||
"name": "pezkuwi-sdk",
|
||||
"version": version,
|
||||
"edition": { "workspace": True },
|
||||
"authors": { "workspace": True },
|
||||
"description": "Pezkuwi SDK umbrella crate.",
|
||||
"homepage": { "workspace": True },
|
||||
"repository": { "workspace": True },
|
||||
"license": "Apache-2.0",
|
||||
"metadata": { "docs": { "rs": {
|
||||
"features": ["runtime-full", "node"],
|
||||
"targets": ["x86_64-unknown-linux-gnu"]
|
||||
}}}
|
||||
},
|
||||
"dependencies": dependencies,
|
||||
"features": features,
|
||||
}
|
||||
|
||||
umbrella_dir = os.path.join(workspace.path, "umbrella")
|
||||
manifest_path = os.path.join(umbrella_dir, "Cargo.toml")
|
||||
lib_path = os.path.join(umbrella_dir, "src", "lib.rs")
|
||||
# create all dir
|
||||
os.makedirs(os.path.dirname(lib_path), exist_ok=True)
|
||||
# Write the manifest
|
||||
with open(manifest_path, "w") as f:
|
||||
toml_manifest = toml.dumps(manifest)
|
||||
f.write(toml_manifest)
|
||||
print(f"Wrote {manifest_path}")
|
||||
# Format with taplo to match CI expectations
|
||||
taplo_config = os.path.join(workspace.path, ".config", "taplo.toml")
|
||||
if os.path.exists(taplo_config):
|
||||
try:
|
||||
subprocess.run(["taplo", "format", "--config", taplo_config, manifest_path], check=True, capture_output=True)
|
||||
print(f"Formatted {manifest_path} with taplo")
|
||||
except (subprocess.CalledProcessError, FileNotFoundError) as e:
|
||||
print(f"Warning: Could not format with taplo: {e}")
|
||||
# and the lib.rs
|
||||
with open(lib_path, "w") as f:
|
||||
f.write('''// Copyright (C) Parity Technologies (UK) Ltd.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
#![cfg_attr(not(feature = "std"), no_std)]
|
||||
|
||||
//! Pezkuwi SDK umbrella crate re-exporting all other published crates.
|
||||
//!
|
||||
//! This helps to set a single version number for all your dependencies. Docs are in the
|
||||
//! `pezkuwi-sdk-docs` crate.
|
||||
|
||||
// This file is auto-generated and checked by the CI. You can edit it manually, but it must be
|
||||
// exactly the way that the CI expects it.
|
||||
''')
|
||||
|
||||
for crate, _ in all_crates:
|
||||
use = crate.name.replace("-", "_")
|
||||
desc = crate.description if crate.description.endswith(".") else crate.description + "."
|
||||
f.write(f'\n/// {desc}')
|
||||
f.write(f'\n#[cfg(feature = "{crate.name}")]\n')
|
||||
f.write(f"pub use {use};\n")
|
||||
|
||||
print(f"Wrote {lib_path}")
|
||||
|
||||
add_to_workspace(workspace.path)
|
||||
|
||||
"""
|
||||
Delete the umbrella folder and remove the umbrella crate from the workspace.
|
||||
"""
|
||||
def delete_umbrella(path):
|
||||
# remove the umbrella crate from the workspace
|
||||
manifest = os.path.join(path, "Cargo.toml")
|
||||
manifest = open(manifest, "r").read()
|
||||
manifest = re.sub(r'\s+"umbrella",\n', "", manifest)
|
||||
with open(os.path.join(path, "Cargo.toml"), "w") as f:
|
||||
f.write(manifest)
|
||||
umbrella_dir = os.path.join(path, "umbrella")
|
||||
if os.path.exists(umbrella_dir):
|
||||
print(f"Deleting {umbrella_dir}")
|
||||
os.remove(os.path.join(umbrella_dir, "Cargo.toml"))
|
||||
shutil.rmtree(os.path.join(umbrella_dir, "src"))
|
||||
|
||||
"""
|
||||
Create the umbrella crate and add it to the workspace.
|
||||
"""
|
||||
def add_to_workspace(path):
|
||||
manifest = os.path.join(path, "Cargo.toml")
|
||||
manifest = open(manifest, "r").read()
|
||||
manifest = re.sub(r'^members = \[', 'members = [\n "umbrella",', manifest, flags=re.M)
|
||||
with open(os.path.join(path, "Cargo.toml"), "w") as f:
|
||||
f.write(manifest)
|
||||
|
||||
os.chdir(path) # hack
|
||||
os.system("cargo metadata --format-version 1 > /dev/null") # update the lockfile
|
||||
os.system(f"zepter") # enable the features
|
||||
os.system(f"taplo format --config .config/taplo.toml Cargo.toml umbrella/Cargo.toml")
|
||||
os.system(f"cargo fmt -- umbrella/src/lib.rs") # format lib.rs for rustfmt compliance
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="Create a pezkuwi-sdk crate")
|
||||
parser.add_argument("--sdk", type=str, default="pezkuwi-sdk", help="Path to the pezkuwi-sdk crate")
|
||||
parser.add_argument("--version", type=str, help="Version of the pezkuwi-sdk crate")
|
||||
return parser.parse_args()
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = parse_args()
|
||||
main(args.sdk, args.version)
|
||||
Executable
+179
@@ -0,0 +1,179 @@
|
||||
#!/usr/bin/env sh
|
||||
|
||||
set -e
|
||||
|
||||
prompt() {
|
||||
while true; do
|
||||
printf "$1 [y/N]\n"
|
||||
read yn
|
||||
case $yn in
|
||||
[Yy]* ) return 0;; # Yes, return 0 (true)
|
||||
[Nn]* ) return 1;; # No, return 1 (false)
|
||||
"" ) return 1;; # Default to no if user just presses Enter
|
||||
* ) printf "Please answer yes or no.\n";;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
prompt_default_yes() {
|
||||
while true; do
|
||||
printf "$1 [Y/n]\n"
|
||||
read yn
|
||||
case $yn in
|
||||
[Yy]* ) return 0;; # Yes, return 0 (true)
|
||||
[Nn]* ) return 1;; # No, return 1 (false)
|
||||
"" ) return 0;; # Default to yes if user just presses Enter
|
||||
* ) printf "Please answer yes or no.\n";;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
clone_and_enter_template() {
|
||||
template="$1" # minimal, solochain, or teyrchain
|
||||
if [ -d "${template}-template" ]; then
|
||||
printf "\n✅︎ ${template}-template directory already exists. -> Entering.\n"
|
||||
else
|
||||
printf "\n↓ Let's grab the ${template} template from github.\n"
|
||||
git clone --quiet https://github.com/pezkuwichain/${template}-template.git ${template}-template
|
||||
fi
|
||||
cd ${template}-template
|
||||
}
|
||||
|
||||
cat <<EOF
|
||||
|
||||
Welcome to the
|
||||
|
||||
____ _ _ ____ ____ _ __
|
||||
| _ \ ___ ____| | ___ ___ _(_) / ___|| _ \| |/ /
|
||||
| |_) / _ \_ /| |/ / | | \ \ / / | \___ \| | | | ' /
|
||||
| __/ __// /_| <| |_| |\ V /| | ___) | |_| | . \
|
||||
|_| \___/____|_|\_\\__,_| \_/ |_| |____/|____/|_|\_\
|
||||
quickstart!
|
||||
|
||||
⚡ We will help setting up the environment for you to experiment with.
|
||||
EOF
|
||||
|
||||
# Determine OS
|
||||
os_name=$(uname -s)
|
||||
if [ "$os_name" = "Darwin" ]; then
|
||||
printf "🍎 Detected macOS. Installing dependencies via Homebrew.\n"
|
||||
|
||||
# Check if brew is installed
|
||||
if command -v brew >/dev/null 2>&1; then
|
||||
printf "\n✅︎🍺 Homebrew already installed.\n"
|
||||
else
|
||||
if prompt_default_yes "\n🍺 Homebrew is not installed. Install it?\n"; then
|
||||
printf "🍺 Installing Homebrew.\n"
|
||||
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)"
|
||||
else
|
||||
printf "❌ Cannot continue without homebrew. Aborting.\n"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
brew update
|
||||
if command -v git >/dev/null 2>&1; then
|
||||
printf "\n✅︎🍺 git already installed.\n"
|
||||
else
|
||||
if prompt_default_yes "\n🍺 git seems to be missing but we will need it; install git?\n"; then
|
||||
brew install git
|
||||
else
|
||||
printf "❌ Cannot continue without git. Aborting.\n"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if prompt "\n🍺 Install cmake, openssl and protobuf?"; then
|
||||
brew install cmake openssl protobuf
|
||||
else
|
||||
printf "🍺 Assuming cmake, openssl and protobuf are present.\n"
|
||||
fi
|
||||
elif [ "$os_name" = "Linux" ]; then
|
||||
# find the distro name in the release files
|
||||
distro=$( cat /etc/*-release | tr '[:upper:]' '[:lower:]' | grep -Poi '(debian|ubuntu|arch|fedora|opensuse)' | uniq | head -n 1 )
|
||||
|
||||
if [ "$distro" = "ubuntu" ]; then
|
||||
printf "\n🐧 Detected Ubuntu. Using apt to install dependencies.\n"
|
||||
sudo apt -qq update
|
||||
sudo apt -qq install --assume-yes git clang curl libssl-dev protobuf-compiler make
|
||||
elif [ "$distro" = "debian" ]; then
|
||||
printf "\n🐧 Detected Debian. Using apt to install dependencies.\n"
|
||||
sudo apt -qq update
|
||||
sudo apt -qq install --assume-yes git clang curl libssl-dev llvm libudev-dev make protobuf-compiler
|
||||
elif [ "$distro" = "arch" ]; then
|
||||
printf "\n🐧 Detected Arch Linux. Using pacman to install dependencies.\n"
|
||||
pacman -Syu --needed --noconfirm curl git clang make protobuf
|
||||
elif [ "$distro" = "fedora" ]; then
|
||||
printf "\n🐧 Detected Fedora. Using dnf to install dependencies.\n"
|
||||
sudo dnf update --assumeyes
|
||||
sudo dnf install --assumeyes clang curl git openssl-devel make protobuf-compiler perl
|
||||
elif [ "$distro" = "opensuse" ]; then
|
||||
printf "\n🐧 Detected openSUSE. Using zypper to install dependencies.\n"
|
||||
sudo zypper install --no-confirm clang gcc gcc-c++ curl git openssl-devel llvm-devel libudev-devel make awk protobuf-devel
|
||||
else
|
||||
if prompt "\n🐧 Unknown Linux distribution. Unable to install dependencies. Continue anyway?\n"; then
|
||||
printf "\n🐧 Proceeding with unknown linux distribution...\n"
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
printf "❌ Unknown operating system. Aborting.\n"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if rust is installed
|
||||
[ -f "$HOME/.cargo/env" ] && . "$HOME/.cargo/env"
|
||||
if command -v rustc >/dev/null 2>&1; then
|
||||
printf "\n✅︎🦀 Rust already installed.\n"
|
||||
else
|
||||
if prompt_default_yes "\n🦀 Rust is not installed. Install it?"; then
|
||||
printf "🦀 Installing via rustup.\n"
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
|
||||
. "$HOME/.cargo/env"
|
||||
else
|
||||
printf "Aborting.\n"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Ensure that we have wasm support
|
||||
if prompt_default_yes "\n🦀 Setup the Rust environment (e.g. WASM support)?"; then
|
||||
printf "🦀 Setting up Rust environment.\n"
|
||||
rustup default stable
|
||||
rustup update
|
||||
rustup target add wasm32-unknown-unknown
|
||||
rustup component add rust-src
|
||||
fi
|
||||
|
||||
if ! prompt "\nWould you like to start with one of the templates?"; then
|
||||
printf "⚡ All done, the environment is ready for hacking.\n"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
while true; do
|
||||
printf "\nWhich template would you like to start with?\n"
|
||||
printf "1) minimal template\n"
|
||||
printf "2) teyrchain template\n"
|
||||
printf "3) solochain template\n"
|
||||
printf "q) cancel\n"
|
||||
read -p "#? " template
|
||||
case $template in
|
||||
[1]* ) clone_and_enter_template minimal; break;;
|
||||
[2]* ) clone_and_enter_template teyrchain; break;;
|
||||
[3]* ) clone_and_enter_template solochain; break;;
|
||||
[qQ]* ) printf "Canceling, not using a template.\n"; exit 0;;
|
||||
* ) printf "Selection not recognized.\n";;
|
||||
esac
|
||||
done
|
||||
|
||||
if ! prompt_default_yes "\n⚙️ Let's compile the node? It might take a while."; then
|
||||
printf "⚡ Script finished, you can continue in the ${template}-template directory.\n"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
cargo build --release
|
||||
|
||||
if prompt_default_yes "\n🚀 Everything ready to go, let's run the node?\n"; then
|
||||
cargo run --release -- --dev
|
||||
fi
|
||||
Executable
+105
@@ -0,0 +1,105 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export PRODUCT=pezkuwi
|
||||
export VERSION=${VERSION:-stable2409}
|
||||
export ENGINE=${ENGINE:-podman}
|
||||
export REF1=${REF1:-'HEAD'}
|
||||
export REF2=${REF2}
|
||||
export RUSTC_STABLE=${RUSTC_STABLE:-'1.0'}
|
||||
export NO_RUNTIMES=${NO_RUNTIMES:-'false'}
|
||||
export CRATES_ONLY=${CRATES_ONLY:-'false'}
|
||||
|
||||
PROJECT_ROOT=`git rev-parse --show-toplevel`
|
||||
echo $PROJECT_ROOT
|
||||
|
||||
TMP=${TMP:-$(mktemp -d)}
|
||||
TEMPLATE_AUDIENCE="${PROJECT_ROOT}/scripts/release/templates/audience.md.tera"
|
||||
TEMPLATE_CHANGELOG="${PROJECT_ROOT}/scripts/release/templates/changelog.md.tera"
|
||||
|
||||
DATA_JSON="${TMP}/data.json"
|
||||
CONTEXT_JSON="${TMP}/context.json"
|
||||
echo -e "TEMPLATE_AUDIENCE: \t$TEMPLATE_AUDIENCE"
|
||||
echo -e "DATA_JSON: \t\t$DATA_JSON"
|
||||
echo -e "CONTEXT_JSON: \t\t$CONTEXT_JSON"
|
||||
|
||||
# Create output folder
|
||||
OUTPUT="${TMP}/changelogs/$PRODUCT/$VERSION"
|
||||
echo -e "OUTPUT: \t\t$OUTPUT"
|
||||
mkdir -p $OUTPUT
|
||||
|
||||
$ENGINE run --rm -v ${PROJECT_ROOT}:/repo paritytech/prdoc load -d "prdoc/$VERSION" --json > $DATA_JSON
|
||||
|
||||
cat $DATA_JSON | jq ' { "prdoc" : .}' > $CONTEXT_JSON
|
||||
|
||||
# Fetch the list of valid audiences and their descriptions
|
||||
SCHEMA_URL=https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json
|
||||
SCHEMA=$(curl -s $SCHEMA_URL | sed 's|^//.*||')
|
||||
aud_desc_array=()
|
||||
while IFS= read -r line; do
|
||||
audience=$(jq -r '.const' <<< "$line" )
|
||||
description=$(jq -r '.description' <<< "$line")
|
||||
if [ -n "$audience" ] && [ -n "$description" ]; then
|
||||
aud_desc_array+=("($audience; $description)")
|
||||
fi
|
||||
done < <(jq -c '."$defs".audience_id.oneOf[]' <<< "$SCHEMA")
|
||||
|
||||
# Generate a release notes doc per audience
|
||||
for tuple in "${aud_desc_array[@]}"; do
|
||||
audience=$(echo "$tuple" | cut -d ';' -f 1 | sed 's/(//')
|
||||
audience_id="$(tr [A-Z] [a-z] <<< "$audience")"
|
||||
audience_id="$(tr ' ' '_' <<< "$audience_id")"
|
||||
|
||||
description=$(echo "$tuple" | cut -d ';' -f 2 | sed 's/)//')
|
||||
|
||||
echo "Processing audience: $audience ($audience_id)"
|
||||
export TARGET_AUDIENCE="$audience"
|
||||
export AUDIENCE_DESC="**ℹ️ These changes are relevant to:** $description"
|
||||
|
||||
tera -t "${TEMPLATE_AUDIENCE}" --env --env-key env "${CONTEXT_JSON}" > "$OUTPUT/relnote_${audience_id}.md"
|
||||
cat "$OUTPUT/relnote_${audience_id}.md" >> "$PROJECT_ROOT/scripts/release/templates/changelog.md"
|
||||
done
|
||||
|
||||
|
||||
# Generate a changelog containing list of the commits
|
||||
echo "Generating changelog..."
|
||||
tera -t "${TEMPLATE_CHANGELOG}" --env --env-key env "${CONTEXT_JSON}" > "$OUTPUT/relnote_commits.md"
|
||||
echo "Changelog ready in $OUTPUT/relnote_commits.md"
|
||||
|
||||
# Show the files
|
||||
tree -s -h -c $OUTPUT/
|
||||
|
||||
if [[ "$NO_RUNTIMES" == "false" && "$CRATES_ONLY" == "false" ]]; then
|
||||
ASSET_HUB_ZAGROS_DIGEST=${ASSET_HUB_ZAGROS_DIGEST:-"$PROJECT_ROOT/scripts/release/digests/asset-hub-zagros-srtool-digest.json"}
|
||||
BRIDGE_HUB_ZAGROS_DIGEST=${BRIDGE_HUB_ZAGROS_DIGEST:-"$PROJECT_ROOT/scripts/release/digests/bridge-hub-zagros-srtool-digest.json"}
|
||||
COLLECTIVES_ZAGROS_DIGEST=${COLLECTIVES_ZAGROS_DIGEST:-"$PROJECT_ROOT/scripts/release/digests/collectives-zagros-srtool-digest.json"}
|
||||
CORETIME_ZAGROS_DIGEST=${CORETIME_ZAGROS_DIGEST:-"$PROJECT_ROOT/scripts/release/digests/coretime-zagros-srtool-digest.json"}
|
||||
GLUTTON_ZAGROS_DIGEST=${GLUTTON_ZAGROS_DIGEST:-"$PROJECT_ROOT/scripts/release/digests/glutton-zagros-srtool-digest.json"}
|
||||
PEOPLE_ZAGROS_DIGEST=${PEOPLE_ZAGROS_DIGEST:-"$PROJECT_ROOT/scripts/release/digests/people-zagros-srtool-digest.json"}
|
||||
ZAGROS_DIGEST=${ZAGROS_DIGEST:-"$PROJECT_ROOT/scripts/release/digests/zagros-srtool-digest.json"}
|
||||
|
||||
jq \
|
||||
--slurpfile srtool_asset_hub_zagros $ASSET_HUB_ZAGROS_DIGEST \
|
||||
--slurpfile srtool_bridge_hub_zagros $BRIDGE_HUB_ZAGROS_DIGEST \
|
||||
--slurpfile srtool_collectives_zagros $COLLECTIVES_ZAGROS_DIGEST \
|
||||
--slurpfile srtool_coretime_zagros $CORETIME_ZAGROS_DIGEST \
|
||||
--slurpfile srtool_glutton_zagros $GLUTTON_ZAGROS_DIGEST \
|
||||
--slurpfile srtool_people_zagros $PEOPLE_ZAGROS_DIGEST \
|
||||
--slurpfile srtool_zagros $ZAGROS_DIGEST \
|
||||
-n '{
|
||||
srtool: [
|
||||
{ order: 10, name: "Zagros", data: $srtool_zagros[0] },
|
||||
{ order: 11, name: "Zagros AssetHub", data: $srtool_asset_hub_zagros[0] },
|
||||
{ order: 12, name: "Zagros BridgeHub", data: $srtool_bridge_hub_zagros[0] },
|
||||
{ order: 13, name: "Zagros Collectives", data: $srtool_collectives_zagros[0] },
|
||||
{ order: 14, name: "Zagros Coretime", data: $srtool_coretime_zagros[0] },
|
||||
{ order: 15, name: "Zagros Glutton", data: $srtool_glutton_zagros[0] },
|
||||
{ order: 16, name: "Zagros People", data: $srtool_people_zagros[0] }
|
||||
] }' > "$PROJECT_ROOT/scripts/release/context.json"
|
||||
else
|
||||
echo '{}' > "$PROJECT_ROOT/scripts/release/context.json"
|
||||
fi
|
||||
|
||||
RELEASE_DIR="$PROJECT_ROOT/scripts/release/"
|
||||
pushd $RELEASE_DIR >/dev/null
|
||||
tera --env --env-key env --include-path templates --template templates/template.md.tera context.json > RELEASE_DRAFT.md
|
||||
popd >/dev/null
|
||||
+34
@@ -0,0 +1,34 @@
|
||||
#!/bin/bash
|
||||
|
||||
TARGET_FILE="umbrella/Cargo.toml"
|
||||
TMP_FILE="${TARGET_FILE}.tmp"
|
||||
|
||||
echo "Processing $TARGET_FILE..."
|
||||
|
||||
# Find and remove version lines in [dependencies.*] sections only
|
||||
awk '
|
||||
# Match [dependencies.<crate>] section
|
||||
/^\[dependencies\.[^]]+\]/ {
|
||||
in_dependencies_section = 1
|
||||
print
|
||||
next
|
||||
}
|
||||
|
||||
# Any new section turns off the flag
|
||||
/^\[.*\]/ {
|
||||
in_dependencies_section = 0
|
||||
print
|
||||
next
|
||||
}
|
||||
|
||||
# Skip version = "..." if in a [dependencies.*] section
|
||||
{
|
||||
if (in_dependencies_section && $0 ~ /^[ \t]*version[ \t]*=[ \t]*".*"/) {
|
||||
next
|
||||
} else {
|
||||
print
|
||||
}
|
||||
}
|
||||
' "$TARGET_FILE" > "$TMP_FILE" && mv "$TMP_FILE" "$TARGET_FILE"
|
||||
|
||||
echo "✅ Done: Removed version lines inside [dependencies.*] sections."
|
||||
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
ROOT_TOML="./Cargo.toml"
|
||||
|
||||
echo "Processing $ROOT_TOML..."
|
||||
|
||||
# Find lines that have path = "..." and version = "..."
|
||||
# and remove only the version = "..." part, regardless of other fields
|
||||
sed -i.bak -E 's/(path\s*=\s*"[^"]*"\s*(,\s*[^,]*?)*)\s*,\s*version\s*=\s*"[^"]*"/\1/g' "$ROOT_TOML"
|
||||
|
||||
# Clean up backup
|
||||
rm -f "${ROOT_TOML}.bak"
|
||||
|
||||
echo "Done. Removed version fields from local path dependencies."
|
||||
@@ -0,0 +1 @@
|
||||
*.json
|
||||
Executable
+33
@@ -0,0 +1,33 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Find all Cargo.toml files excluding the root,umbrella/Cargo.toml,
|
||||
# substrate/frame/contracts/fixtures/build/Cargo.toml,
|
||||
# substrate/frame/contracts/fixtures/contracts/common/Cargo.toml
|
||||
find . -name "Cargo.toml" \
|
||||
! -path "./Cargo.toml" \
|
||||
! -path "./umbrella/Cargo.toml" \
|
||||
! -path "./substrate/frame/contracts/fixtures/build/Cargo.toml" \
|
||||
! -path "./substrate/frame/contracts/fixtures/contracts/common/Cargo.toml"| while read -r file; do
|
||||
|
||||
echo "Processing $file..."
|
||||
|
||||
# Find and replace path dependencies with "workspace = true"
|
||||
awk '
|
||||
BEGIN { in_section = 0 }
|
||||
/^\[dependencies\]/ { in_section = 1; print; next }
|
||||
/^\[dev-dependencies\]/ { in_section = 2; print; next }
|
||||
/^\[.*\]/ { in_section = 0; print; next }
|
||||
|
||||
{
|
||||
if (in_section == 1 || in_section == 2) {
|
||||
if ($0 ~ /path *= *".*"/) {
|
||||
gsub(/path *= *".*"/, "workspace = true")
|
||||
}
|
||||
}
|
||||
print
|
||||
}
|
||||
' "$file" > "${file}.tmp" && mv "${file}.tmp" "$file"
|
||||
|
||||
done
|
||||
|
||||
echo "All applicable Cargo.toml files updated."
|
||||
@@ -0,0 +1,10 @@
|
||||
|
||||
{# This file uses the Markdown format with additional templating such as this comment. -#}
|
||||
{# Such a comment will not show up in the rendered release notes. -#}
|
||||
{# The content of this file (if any) will be inserted at the top of the release notes -#}
|
||||
{# and generated for each new release candidate. -#}
|
||||
{# Ensure you leave an empty line at both top and bottom of this file. -#}
|
||||
|
||||
<!-- Such a comment will be rendered but remain invisible in the rendered markdown -->
|
||||
<!-- Edit below this line -->
|
||||
<!-- Edit above this line -->
|
||||
@@ -0,0 +1,13 @@
|
||||
### Changelog for `{{ env.TARGET_AUDIENCE }}`
|
||||
|
||||
{{ env.AUDIENCE_DESC }}
|
||||
|
||||
{% for file in prdoc -%}
|
||||
{% for doc_item in file.content.doc %}
|
||||
{%- if doc_item.audience is containing(env.TARGET_AUDIENCE) %}
|
||||
#### [#{{file.doc_filename.number}}]: {{ file.content.title }}
|
||||
{{ doc_item.description }}
|
||||
{% endif -%}
|
||||
|
||||
{%- endfor %}
|
||||
{%- endfor %}
|
||||
@@ -0,0 +1,7 @@
|
||||
## Changelog for `{{ env.PRODUCT | capitalize }} {{ env.VERSION }}`
|
||||
|
||||
{% for file in prdoc | sort(attribute="doc_filename.number") -%}
|
||||
{%- set author= file.content.author | default(value="n/a") -%}
|
||||
{%- set topic= file.content.topic | default(value="n/a") -%}
|
||||
- #{{file.doc_filename.number}}: {{ file.content.title }} (@{{ author }}) [{{ topic | capitalize }}]
|
||||
{% endfor -%}
|
||||
@@ -0,0 +1,4 @@
|
||||
{# This include generates the section showing the changes #}
|
||||
## Changelog
|
||||
|
||||
{% include "changelog.md" -%}
|
||||
@@ -0,0 +1,6 @@
|
||||
## Rust compiler versions
|
||||
|
||||
This release was built and tested against the following versions of `rustc`.
|
||||
Other versions may work.
|
||||
|
||||
- Rust Stable: `{{ env.RUSTC_STABLE }}`
|
||||
@@ -0,0 +1,19 @@
|
||||
|
||||
## Docker images
|
||||
|
||||
The docker images for the `polkadot` node binary and the `polkadot-parachain` binary can be found at Docker hub (will be available a few minutes after the release has been published):
|
||||
- [Polkadot image](https://hub.docker.com/r/parity/polkadot/tags?page=1&ordering=last_updated)
|
||||
- [Polkadot-Parachain image](https://hub.docker.com/r/parity/polkadot-parachain/tags?page=1&ordering=last_updated)
|
||||
|
||||
|
||||
You may also pull it with:
|
||||
|
||||
```
|
||||
docker pull parity/polkadot:{{ env.VERSION }}
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```
|
||||
docker pull parity/polkadot-parachain:{{ env.VERSION }}
|
||||
```
|
||||
@@ -0,0 +1,26 @@
|
||||
{# This macro shows one runtime #}
|
||||
{%- macro runtime(runtime) -%}
|
||||
|
||||
### {{ runtime.name | title }}
|
||||
|
||||
{%- if runtime.data.runtimes.compressed.subwasm.compression.compressed %}
|
||||
{%- set compressed = "Yes" %}
|
||||
{%- else %}
|
||||
{%- set compressed = "No" %}
|
||||
{%- endif %}
|
||||
|
||||
{%- set comp_ratio = 100 - (runtime.data.runtimes.compressed.subwasm.compression.size_compressed /
|
||||
runtime.data.runtimes.compressed.subwasm.compression.size_decompressed *100) %}
|
||||
|
||||
```
|
||||
🏋️ Runtime Size: {{ runtime.data.runtimes.compressed.subwasm.size | filesizeformat }} ({{
|
||||
runtime.data.runtimes.compressed.subwasm.size }} bytes)
|
||||
🔥 Core Version: {{ runtime.data.runtimes.compressed.subwasm.core_version.specName }}-{{runtime.data.runtimes.compressed.subwasm.core_version.specVersion }} ({{runtime.data.runtimes.compressed.subwasm.core_version.implName }}-{{runtime.data.runtimes.compressed.subwasm.core_version.implVersion }}.tx{{runtime.data.runtimes.compressed.subwasm.core_version.transactionVersion }}.au{{runtime.data.runtimes.compressed.subwasm.core_version.authoringVersion }})
|
||||
🗜 Compressed: {{ compressed }}: {{ comp_ratio | round(method="ceil", precision=2) }}%
|
||||
🎁 Metadata version: V{{ runtime.data.runtimes.compressed.subwasm.metadata_version }}
|
||||
🗳️ system.setCode hash: {{ runtime.data.runtimes.compressed.subwasm.proposal_hash }}
|
||||
🗳️ authorizeUpgrade hash: {{ runtime.data.runtimes.compressed.subwasm.parachain_authorize_upgrade_hash }}
|
||||
🗳️ Blake2-256 hash: {{ runtime.data.runtimes.compressed.subwasm.blake2_256 }}
|
||||
📦 IPFS: {{ runtime.data.runtimes.compressed.subwasm.ipfs_hash }}
|
||||
```
|
||||
{%- endmacro runtime %}
|
||||
@@ -0,0 +1,19 @@
|
||||
{# This include shows the list and details of the runtimes #}
|
||||
{%- import "runtime.md.tera" as m_r -%}
|
||||
|
||||
{# --- #}
|
||||
|
||||
## Runtimes
|
||||
|
||||
{% set rtm = srtool[0] -%}
|
||||
|
||||
The information about the runtimes included in this release can be found below.
|
||||
The runtimes have been built using [{{ rtm.data.gen }}](https://github.com/paritytech/srtool) and `{{ rtm.data.rustc }}`.
|
||||
|
||||
{%- for runtime in srtool | sort(attribute="name") %}
|
||||
{%- set HIDE_VAR = "HIDE_SRTOOL_" ~ runtime.name | upper %}
|
||||
{%- if not env is containing(HIDE_VAR) %}
|
||||
|
||||
{{ m_r::runtime(runtime=runtime) }}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
@@ -0,0 +1,19 @@
|
||||
{# This is the entry point of the template -#}
|
||||
|
||||
This release contains the changes from `{{ env.REF1 | replace(from="refs/tags/", to="") }}` to `{{ env.REF2 | replace(from="refs/tags/", to="") }}`.
|
||||
|
||||
{# -- Manual free notes section -- #}
|
||||
{% include "_free_notes.md.tera" -%}
|
||||
|
||||
{# -- Automatic section -- #}
|
||||
{% include "changes.md.tera" -%}
|
||||
|
||||
{% include "compiler.md.tera" -%}
|
||||
|
||||
{% if env.NO_RUNTIMES == "false" and env.CRATES_ONLY == "false" -%}
|
||||
{% include "runtimes.md.tera" -%}
|
||||
{% endif -%}
|
||||
|
||||
{% if env.CRATES_ONLY == "false" -%}
|
||||
{% include "docker_image.md.tera" -%}
|
||||
{% endif -%}
|
||||
Executable
+66
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# A script to update bridges repo as subtree to Cumulus
|
||||
# Usage:
|
||||
# ./scripts/update_subtree_snowbridge.sh fetch
|
||||
# ./scripts/update_subtree_snowbridge.sh patch
|
||||
|
||||
set -e
|
||||
|
||||
SNOWBRIDGE_BRANCH="${SNOWBRIDGE_BRANCH:-main}"
|
||||
PEZKUWI_SDK_BRANCH="${PEZKUWI_SDK_BRANCH:-master}"
|
||||
SNOWBRIDGE_TARGET_DIR="${TARGET_DIR:-bridges/snowbridge}"
|
||||
|
||||
function fetch() {
|
||||
# the script is able to work only on clean git copy
|
||||
[[ -z "$(git status --porcelain)" ]] || {
|
||||
echo >&2 "The git copy must be clean (stash all your changes):";
|
||||
git status --porcelain
|
||||
exit 1;
|
||||
}
|
||||
|
||||
local snowbridge_remote=$(git remote -v | grep "snowbridge.git (fetch)" | head -n1 | awk '{print $1;}')
|
||||
if [ -z "$snowbridge_remote" ]; then
|
||||
echo "Adding new remote: 'snowbridge' repo..."
|
||||
git remote add -f snowbridge https://github.com/Snowfork/snowbridge.git
|
||||
snowbridge_remote="snowbridge"
|
||||
else
|
||||
echo "Fetching remote: '${snowbridge_remote}' repo..."
|
||||
git fetch https://github.com/Snowfork/snowbridge.git --prune
|
||||
fi
|
||||
|
||||
echo "Syncing/updating subtree with remote branch '${snowbridge_remote}/$SNOWBRIDGE_BRANCH' to target directory: '$SNOWBRIDGE_TARGET_DIR'"
|
||||
git subtree pull --prefix=$SNOWBRIDGE_TARGET_DIR ${snowbridge_remote} $SNOWBRIDGE_BRANCH --squash
|
||||
}
|
||||
|
||||
function clean() {
|
||||
echo "Patching/removing unneeded stuff from subtree in target directory: '$SNOWBRIDGE_TARGET_DIR'"
|
||||
chmod +x $SNOWBRIDGE_TARGET_DIR/teyrchain/scripts/verify-pallets-build.sh
|
||||
$SNOWBRIDGE_TARGET_DIR/teyrchain/scripts/verify-pallets-build.sh --ignore-git-state --no-revert
|
||||
}
|
||||
|
||||
function create_patch() {
|
||||
[[ -z "$(git status --porcelain)" ]] || {
|
||||
echo >&2 "The git copy must be clean (stash all your changes):";
|
||||
git status --porcelain
|
||||
exit 1;
|
||||
}
|
||||
echo "Creating diff patch file to apply to snowbridge. No Cargo.toml files will be included in the patch."
|
||||
git diff snowbridge/$SNOWBRIDGE_BRANCH $PEZKUWI_SDK_BRANCH:bridges/snowbridge --diff-filter=ACM -- . ':(exclude)*/Cargo.toml' > snowbridge.patch
|
||||
}
|
||||
|
||||
case "$1" in
|
||||
fetch)
|
||||
fetch
|
||||
;;
|
||||
clean)
|
||||
clean
|
||||
;;
|
||||
create_patch)
|
||||
create_patch
|
||||
;;
|
||||
update)
|
||||
fetch
|
||||
clean
|
||||
;;
|
||||
esac
|
||||
Executable
+43
@@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env bash
|
||||
# Script for updating the UI tests for a new rust stable version.
|
||||
# Exit on error
|
||||
set -e
|
||||
|
||||
# by default current rust stable will be used
|
||||
RUSTUP_RUN=""
|
||||
# check if we have a parameter
|
||||
# ./scripts/update-ui-tests.sh 1.70
|
||||
if [ ! -z "$1" ]; then
|
||||
echo "RUST_VERSION: $1"
|
||||
# This will run all UI tests with the rust stable 1.70.
|
||||
# The script requires that rustup is installed.
|
||||
RUST_VERSION=$1
|
||||
RUSTUP_RUN="rustup run $RUST_VERSION"
|
||||
|
||||
|
||||
echo "installing rustup $RUST_VERSION"
|
||||
if ! command -v rustup &> /dev/null
|
||||
then
|
||||
echo "rustup needs to be installed"
|
||||
exit
|
||||
fi
|
||||
|
||||
rustup install $RUST_VERSION
|
||||
rustup component add rust-src --toolchain $RUST_VERSION
|
||||
fi
|
||||
|
||||
# Ensure we run the ui tests
|
||||
export RUN_UI_TESTS=1
|
||||
# We don't need any wasm files for ui tests
|
||||
export SKIP_WASM_BUILD=1
|
||||
# Let trybuild overwrite the .stderr files
|
||||
export TRYBUILD=overwrite
|
||||
# Warnings are part of our UI and the CI also sets this.
|
||||
export RUSTFLAGS="-C debug-assertions -D warnings"
|
||||
|
||||
# ./substrate
|
||||
$RUSTUP_RUN cargo test -q --locked --manifest-path substrate/primitives/runtime-interface/Cargo.toml ui
|
||||
$RUSTUP_RUN cargo test -q --locked -p sp-api-test ui
|
||||
$RUSTUP_RUN cargo test -q --locked -p frame-election-provider-solution-type ui
|
||||
$RUSTUP_RUN cargo test -q --locked -p frame-support-test --features=no-metadata-docs,try-runtime,experimental ui
|
||||
$RUSTUP_RUN cargo test -q --locked -p xcm-procedural ui
|
||||
Reference in New Issue
Block a user