feat: initialize Kurdistan SDK - independent fork of Polkadot SDK

This commit is contained in:
2025-12-13 15:44:15 +03:00
commit e4778b4576
6838 changed files with 1847450 additions and 0 deletions
+344
View File
@@ -0,0 +1,344 @@
#!/usr/bin/env bash
function relayer_path() {
local default_path=~/local_bridge_testing/bin/substrate-relay
local path="${SUBSTRATE_RELAY_BINARY:-$default_path}"
echo "$path"
}
function ensure_relayer() {
local path=$(relayer_path)
if [[ ! -f "$path" ]]; then
echo " Required substrate-relay binary '$path' does not exist!"
echo " You need to build it and copy to this location!"
echo " Please, check ./teyrchains/runtimes/bridge-hubs/README.md (Prepare/Build/Deploy)"
exit 1
fi
echo $path
}
function ensure_pezkuwi_js_api() {
if ! which pezkuwi-js-api &> /dev/null; then
echo ''
echo 'Required command `pezkuwi-js-api` not in PATH, please, install, e.g.:'
echo "npm install -g @pezkuwi/api-cli@beta"
echo " or"
echo "yarn global add @pezkuwi/api-cli"
echo ''
exit 1
fi
if ! which jq &> /dev/null; then
echo ''
echo 'Required command `jq` not in PATH, please, install, e.g.:'
echo "apt install -y jq"
echo ''
exit 1
fi
generate_hex_encoded_call_data "check" "--"
local retVal=$?
if [ $retVal -ne 0 ]; then
echo ""
echo ""
echo "-------------------"
echo "Installing (nodejs) sub module: ${BASH_SOURCE%/*}/generate_hex_encoded_call"
pushd ${BASH_SOURCE%/*}/generate_hex_encoded_call
npm install
popd
fi
}
function call_pezkuwi_js_api() {
# --noWait: without that argument `pezkuwi-js-api` waits until transaction is included into the block.
# With it, it just submits it to the tx pool and exits.
# --nonce -1: means to compute transaction nonce using `system_accountNextIndex` RPC, which includes all
# transaction that are in the tx pool.
pezkuwi-js-api --nonce -1 "$@" || true
}
function generate_hex_encoded_call_data() {
local type=$1
local endpoint=$2
local output=$3
shift
shift
shift
echo "Input params: $@"
node ${BASH_SOURCE%/*}/../utils/generate_hex_encoded_call "$type" "$endpoint" "$output" "$@"
local retVal=$?
if [ $type != "check" ]; then
local hex_encoded_data=$(cat $output)
echo "Generated hex-encoded bytes to file '$output': $hex_encoded_data"
fi
return $retVal
}
function transfer_balance() {
local runtime_para_endpoint=$1
local seed=$2
local target_account=$3
local amount=$4
echo " calling transfer_balance:"
echo " runtime_para_endpoint: ${runtime_para_endpoint}"
echo " seed: ${seed}"
echo " target_account: ${target_account}"
echo " amount: ${amount}"
echo "--------------------------------------------------"
call_pezkuwi_js_api \
--ws "${runtime_para_endpoint}" \
--seed "${seed?}" \
tx.balances.transferAllowDeath \
"${target_account}" \
"${amount}"
}
function send_governance_transact() {
local relay_url=$1
local relay_chain_seed=$2
local para_id=$3
local hex_encoded_data=$4
local require_weight_at_most_ref_time=$5
local require_weight_at_most_proof_size=$6
echo " calling send_governance_transact:"
echo " relay_url: ${relay_url}"
echo " relay_chain_seed: ${relay_chain_seed}"
echo " para_id: ${para_id}"
echo " hex_encoded_data: ${hex_encoded_data}"
echo " require_weight_at_most_ref_time: ${require_weight_at_most_ref_time}"
echo " require_weight_at_most_proof_size: ${require_weight_at_most_proof_size}"
echo " params:"
local dest=$(jq --null-input \
--arg para_id "$para_id" \
'{ "V4": { "parents": 0, "interior": { "X1": [{ "Teyrchain": $para_id }] } } }')
local message=$(jq --null-input \
--argjson hex_encoded_data $hex_encoded_data \
--arg require_weight_at_most_ref_time "$require_weight_at_most_ref_time" \
--arg require_weight_at_most_proof_size "$require_weight_at_most_proof_size" \
'
{
"V4": [
{
"UnpaidExecution": {
"weight_limit": "Unlimited"
}
},
{
"Transact": {
"origin_kind": "Superuser",
"require_weight_at_most": {
"ref_time": $require_weight_at_most_ref_time,
"proof_size": $require_weight_at_most_proof_size,
},
"call": {
"encoded": $hex_encoded_data
}
}
}
]
}
')
echo ""
echo " dest:"
echo "${dest}"
echo ""
echo " message:"
echo "${message}"
echo ""
echo "--------------------------------------------------"
call_pezkuwi_js_api \
--ws "${relay_url?}" \
--seed "${relay_chain_seed?}" \
--sudo \
tx.xcmPallet.send \
"${dest}" \
"${message}"
}
function open_hrmp_channels() {
local relay_url=$1
local relay_chain_seed=$2
local sender_para_id=$3
local recipient_para_id=$4
local max_capacity=$5
local max_message_size=$6
echo " calling open_hrmp_channels:"
echo " relay_url: ${relay_url}"
echo " relay_chain_seed: ${relay_chain_seed}"
echo " sender_para_id: ${sender_para_id}"
echo " recipient_para_id: ${recipient_para_id}"
echo " max_capacity: ${max_capacity}"
echo " max_message_size: ${max_message_size}"
echo " params:"
echo "--------------------------------------------------"
call_pezkuwi_js_api \
--ws "${relay_url?}" \
--seed "${relay_chain_seed?}" \
--sudo \
tx.hrmp.forceOpenHrmpChannel \
${sender_para_id} \
${recipient_para_id} \
${max_capacity} \
${max_message_size}
}
function force_xcm_version() {
local relay_url=$1
local relay_chain_seed=$2
local runtime_para_id=$3
local runtime_para_endpoint=$4
local dest=$5
local xcm_version=$6
echo " calling force_xcm_version:"
echo " relay_url: ${relay_url}"
echo " relay_chain_seed: ${relay_chain_seed}"
echo " runtime_para_id: ${runtime_para_id}"
echo " runtime_para_endpoint: ${runtime_para_endpoint}"
echo " dest: ${dest}"
echo " xcm_version: ${xcm_version}"
echo " params:"
# 1. generate data for Transact (PezkuwiXcm::force_xcm_version)
local tmp_output_file=$(mktemp)
generate_hex_encoded_call_data "force-xcm-version" "${runtime_para_endpoint}" "${tmp_output_file}" "$dest" "$xcm_version"
local hex_encoded_data=$(cat $tmp_output_file)
# 2. trigger governance call
send_governance_transact "${relay_url}" "${relay_chain_seed}" "${runtime_para_id}" "${hex_encoded_data}" 200000000 12000
}
function force_create_foreign_asset() {
local relay_url=$1
local relay_chain_seed=$2
local runtime_para_id=$3
local runtime_para_endpoint=$4
local asset_multilocation=$5
local asset_owner_account_id=$6
local min_balance=$7
local is_sufficient=$8
echo " calling force_create_foreign_asset:"
echo " relay_url: ${relay_url}"
echo " relay_chain_seed: ${relay_chain_seed}"
echo " runtime_para_id: ${runtime_para_id}"
echo " runtime_para_endpoint: ${runtime_para_endpoint}"
echo " asset_multilocation: ${asset_multilocation}"
echo " asset_owner_account_id: ${asset_owner_account_id}"
echo " min_balance: ${min_balance}"
echo " is_sufficient: ${is_sufficient}"
echo " params:"
# 1. generate data for Transact (ForeignAssets::force_create)
local tmp_output_file=$(mktemp)
generate_hex_encoded_call_data "force-create-asset" "${runtime_para_endpoint}" "${tmp_output_file}" "$asset_multilocation" "$asset_owner_account_id" $is_sufficient $min_balance
local hex_encoded_data=$(cat $tmp_output_file)
# 2. trigger governance call
send_governance_transact "${relay_url}" "${relay_chain_seed}" "${runtime_para_id}" "${hex_encoded_data}" 200000000 12000
}
function create_pool() {
local runtime_para_endpoint=$1
local seed=$2
local native_asset_id=$3
local foreign_asset_id=$4
call_pezkuwi_js_api \
--ws "${runtime_para_endpoint?}" \
--seed "${seed?}" \
tx.assetConversion.createPool \
"${native_asset_id}" \
"${foreign_asset_id}"
}
function add_liquidity() {
local runtime_para_endpoint=$1
local seed=$2
local native_asset_id=$3
local foreign_asset_id=$4
local native_asset_amount=$5
local foreign_asset_amount=$6
local pool_owner_account_id=$7
call_pezkuwi_js_api \
--ws "${runtime_para_endpoint?}" \
--seed "${seed?}" \
tx.assetConversion.addLiquidity \
"${native_asset_id}" \
"${foreign_asset_id}" \
"${native_asset_amount}" \
"${foreign_asset_amount}" \
"1" \
"1" \
"${pool_owner_account_id}"
}
function limited_reserve_transfer_assets() {
local url=$1
local seed=$2
local destination=$3
local beneficiary=$4
local assets=$5
local fee_asset_id=$6
local weight_limit=$7
echo " calling limited_reserve_transfer_assets:"
echo " url: ${url}"
echo " seed: ${seed}"
echo " destination: ${destination}"
echo " beneficiary: ${beneficiary}"
echo " assets: ${assets}"
echo " fee_asset_id: ${fee_asset_id}"
echo " weight_limit: ${weight_limit}"
echo ""
echo "--------------------------------------------------"
call_pezkuwi_js_api \
--ws "${url?}" \
--seed "${seed?}" \
tx.pezkuwiXcm.limitedReserveTransferAssets \
"${destination}" \
"${beneficiary}" \
"${assets}" \
"${fee_asset_id}" \
"${weight_limit}"
}
function claim_rewards() {
local runtime_para_endpoint=$1
local seed=$2
local lane_id=$3
local bridged_chain_id=$4
local owner=$5
echo " calling claim_rewards:"
echo " runtime_para_endpoint: ${runtime_para_endpoint}"
echo " seed: ${seed}"
echo " lane_id: ${lane_id}"
echo " bridged_chain_id: ${bridged_chain_id}"
echo " owner: ${owner}"
echo ""
local rewards_account_params=$(jq --null-input \
--arg lane_id "$lane_id" \
--arg bridged_chain_id "$bridged_chain_id" \
--arg owner "$owner" \
'{
"laneId": $lane_id,
"bridgedChainId": $bridged_chain_id,
"owner": $owner
}')
echo " rewards_account_params:"
echo "${rewards_account_params}"
echo "--------------------------------------------------"
call_pezkuwi_js_api \
--ws "${runtime_para_endpoint}" \
--seed "${seed?}" \
tx.bridgeRelayers.claimRewards \
"${rewards_account_params}"
}
+45
View File
@@ -0,0 +1,45 @@
#!/usr/bin/env bash
function start_background_process() {
local command=$1
local log_file=$2
local __pid=$3
$command > $log_file 2>&1 &
eval $__pid="'$!'"
}
function wait_for_process_file() {
local pid=$1
local file=$2
local timeout=$3
local __found=$4
local time=0
until [ -e $file ]; do
if ! kill -0 $pid; then
echo "Process finished unsuccessfully"
return
fi
if (( time++ >= timeout )); then
echo "Timeout waiting for file $file: $timeout seconds"
eval $__found=0
return
fi
sleep 1
done
echo "File $file found after $time seconds"
eval $__found=1
}
function ensure_process_file() {
local pid=$1
local file=$2
local timeout=$3
wait_for_process_file $pid $file $timeout file_found
if [ "$file_found" != "1" ]; then
exit 1
fi
}
@@ -0,0 +1,165 @@
const fs = require("fs");
const { exit } = require("process");
const { WsProvider, ApiPromise } = require("@polkadot/api");
const util = require("@polkadot/util");
// connect to a substrate chain and return the api object
async function connect(endpoint, types = {}) {
const provider = new WsProvider(endpoint);
const api = await ApiPromise.create({
provider,
types,
throwOnConnect: false,
});
return api;
}
function writeHexEncodedBytesToOutput(method, outputFile) {
console.log("Payload (hex): ", method.toHex());
console.log("Payload (bytes): ", Array.from(method.toU8a()));
console.log("Payload (plain): ", JSON.stringify(method));
fs.writeFileSync(outputFile, JSON.stringify(Array.from(method.toU8a())));
}
function remarkWithEvent(endpoint, outputFile) {
console.log(`Generating remarkWithEvent from RPC endpoint: ${endpoint} to outputFile: ${outputFile}`);
connect(endpoint)
.then((api) => {
const call = api.tx.system.remarkWithEvent("Hello");
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function addExporterConfig(endpoint, outputFile, bridgedNetwork, bridgeConfig) {
console.log(`Generating addExporterConfig from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on bridgedNetwork: ${bridgedNetwork}, bridgeConfig: ${bridgeConfig}`);
connect(endpoint)
.then((api) => {
const call = api.tx.bridgeTransfer.addExporterConfig(bridgedNetwork, JSON.parse(bridgeConfig));
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function addUniversalAlias(endpoint, outputFile, location, junction) {
console.log(`Generating addUniversalAlias from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on location: ${location}, junction: ${junction}`);
connect(endpoint)
.then((api) => {
const call = api.tx.bridgeTransfer.addUniversalAlias(JSON.parse(location), JSON.parse(junction));
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function addReserveLocation(endpoint, outputFile, reserve_location) {
console.log(`Generating addReserveLocation from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on reserve_location: ${reserve_location}`);
connect(endpoint)
.then((api) => {
const call = api.tx.bridgeTransfer.addReserveLocation(JSON.parse(reserve_location));
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function removeExporterConfig(endpoint, outputFile, bridgedNetwork) {
console.log(`Generating removeExporterConfig from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on bridgedNetwork: ${bridgedNetwork}`);
connect(endpoint)
.then((api) => {
const call = api.tx.bridgeTransfer.removeExporterConfig(bridgedNetwork);
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function forceCreateAsset(endpoint, outputFile, assetId, assetOwnerAccountId, isSufficient, minBalance) {
var isSufficient = isSufficient == "true" ? true : false;
console.log(`Generating forceCreateAsset from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on assetId: ${assetId}, assetOwnerAccountId: ${assetOwnerAccountId}, isSufficient: ${isSufficient}, minBalance: ${minBalance}`);
connect(endpoint)
.then((api) => {
const call = api.tx.foreignAssets.forceCreate(JSON.parse(assetId), assetOwnerAccountId, isSufficient, minBalance);
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function forceXcmVersion(endpoint, outputFile, dest, xcm_version) {
console.log(`Generating forceXcmVersion from RPC endpoint: ${endpoint} to outputFile: ${outputFile}, dest: ${dest}, xcm_version: ${xcm_version}`);
connect(endpoint)
.then((api) => {
const call = api.tx.polkadotXcm.forceXcmVersion(JSON.parse(dest), xcm_version);
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
if (!process.argv[2] || !process.argv[3]) {
console.log("usage: node ./script/generate_hex_encoded_call <type> <endpoint> <output hex-encoded data file> <input message>");
exit(1);
}
const type = process.argv[2];
const rpcEndpoint = process.argv[3];
const output = process.argv[4];
const inputArgs = process.argv.slice(5, process.argv.length);
console.log(`Generating hex-encoded call data for:`);
console.log(` type: ${type}`);
console.log(` rpcEndpoint: ${rpcEndpoint}`);
console.log(` output: ${output}`);
console.log(` inputArgs: ${inputArgs}`);
switch (type) {
case 'remark-with-event':
remarkWithEvent(rpcEndpoint, output);
break;
case 'add-exporter-config':
addExporterConfig(rpcEndpoint, output, inputArgs[0], inputArgs[1]);
break;
case 'remove-exporter-config':
removeExporterConfig(rpcEndpoint, output, inputArgs[0], inputArgs[1]);
break;
case 'add-universal-alias':
addUniversalAlias(rpcEndpoint, output, inputArgs[0], inputArgs[1]);
break;
case 'add-reserve-location':
addReserveLocation(rpcEndpoint, output, inputArgs[0]);
break;
case 'force-create-asset':
forceCreateAsset(rpcEndpoint, output, inputArgs[0], inputArgs[1], inputArgs[2], inputArgs[3]);
break;
case 'force-xcm-version':
forceXcmVersion(rpcEndpoint, output, inputArgs[0], inputArgs[1]);
break;
case 'check':
console.log(`Checking nodejs installation, if you see this everything is ready!`);
break;
default:
console.log(`Sorry, we are out of ${type} - not yet supported!`);
}
@@ -0,0 +1,11 @@
{
"name": "y",
"version": "y",
"description": "create a scale hex-encoded call values from given message",
"main": "index.js",
"license": "MIT",
"dependencies": {
"@pezkuwi/api": "^14.0",
"@pezkuwi/util": "^13.1"
}
}
@@ -0,0 +1,39 @@
#!/usr/bin/env bash
source "${BASH_SOURCE%/*}/common.sh"
function start_zombienet() {
local test_dir=$1
local definition_path=$2
local __zombienet_dir=$3
local __zombienet_pid=$4
local zombienet_name=`basename $definition_path .toml`
local zombienet_dir=$test_dir/$zombienet_name
eval $__zombienet_dir="'$zombienet_dir'"
mkdir -p $zombienet_dir
rm -rf $zombienet_dir
local logs_dir=$test_dir/logs
mkdir -p $logs_dir
local zombienet_log=$logs_dir/$zombienet_name.log
echo "Starting $zombienet_name zombienet. Logs available at: $zombienet_log"
start_background_process \
"$ZOMBIENET_BINARY spawn --dir $zombienet_dir --provider native $definition_path" \
"$zombienet_log" zombienet_pid
ensure_process_file $zombienet_pid "$zombienet_dir/zombie.json" 180
echo "$zombienet_name zombienet started successfully"
eval $__zombienet_pid="'$zombienet_pid'"
}
function run_zndsl() {
local zndsl_file=$1
local zombienet_dir=$2
echo "Running $zndsl_file."
$ZOMBIENET_BINARY test --dir $zombienet_dir --provider native $zndsl_file $zombienet_dir/zombie.json
echo
}