feat: initialize Kurdistan SDK - independent fork of Polkadot SDK

This commit is contained in:
2025-12-13 15:44:15 +03:00
commit e4778b4576
6838 changed files with 1847450 additions and 0 deletions
@@ -0,0 +1,25 @@
async function run(nodeName, networkInfo, args) {
const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName];
const api = await zombie.connect(wsUri, userDefinedTypes);
// TODO: could be replaced with https://github.com/polkadot-js/api/issues/4930 (depends on metadata v15) later
const bridgedChainName = args[0];
const expectedBridgedChainHeaderNumber = Number(args[1]);
const runtimeApiMethod = bridgedChainName + "FinalityApi_best_finalized";
while (true) {
const encodedBestFinalizedHeaderId = await api.rpc.state.call(runtimeApiMethod, []);
const bestFinalizedHeaderId = api.createType("Option<BpRuntimeHeaderId>", encodedBestFinalizedHeaderId);
if (bestFinalizedHeaderId.isSome) {
const bestFinalizedHeaderNumber = Number(bestFinalizedHeaderId.unwrap().toHuman()[0]);
if (bestFinalizedHeaderNumber > expectedBridgedChainHeaderNumber) {
return bestFinalizedHeaderNumber;
}
}
// else sleep and retry
await new Promise((resolve) => setTimeout(resolve, 6000));
}
}
module.exports = { run }
@@ -0,0 +1,6 @@
module.exports = {
grandpaPalletName: "bridgePezkuwichainGrandpa",
teyrchainsPalletName: "bridgePezkuwichainTeyrchains",
messagesPalletName: "bridgePezkuwichainMessages",
bridgedBridgeHubParaId: 1013,
}
@@ -0,0 +1,6 @@
module.exports = {
grandpaPalletName: "bridgeZagrosGrandpa",
teyrchainsPalletName: "bridgeZagrosTeyrchains",
messagesPalletName: "bridgeZagrosMessages",
bridgedBridgeHubParaId: 1002,
}
@@ -0,0 +1,34 @@
const utils = require("./utils");
async function run(nodeName, networkInfo, args) {
const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName];
const api = await zombie.connect(wsUri, userDefinedTypes);
// parse arguments
const exitAfterSeconds = Number(args[0]);
const bridgedChain = require("./chains/" + args[1]);
// start listening to new blocks
let totalGrandpaHeaders = 0;
let totalTeyrchainHeaders = 0;
api.rpc.chain.subscribeNewHeads(async function (header) {
const apiAtParent = await api.at(header.parentHash);
const apiAtCurrent = await api.at(header.hash);
const currentEvents = await apiAtCurrent.query.system.events();
totalGrandpaHeaders += await utils.countGrandpaHeaderImports(bridgedChain, currentEvents);
totalTeyrchainHeaders += await utils.countTeyrchainHeaderImports(bridgedChain, currentEvents);
});
// wait given time
await new Promise(resolve => setTimeout(resolve, exitAfterSeconds * 1000));
// if we haven't seen many (>1) new GRANDPA or teyrchain headers => fail
if (totalGrandpaHeaders <= 1) {
throw new Error("No bridged relay chain headers imported");
}
if (totalTeyrchainHeaders <= 1) {
throw new Error("No bridged teyrchain headers imported");
}
}
module.exports = { run }
@@ -0,0 +1,12 @@
async function run(nodeName, networkInfo, args) {
const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName];
const api = await zombie.connect(wsUri, userDefinedTypes);
const accountAddress = args[0];
const accountData = await api.query.system.account(accountAddress);
const accountBalance = accountData.data['free'];
console.log("Balance of " + accountAddress + ": " + accountBalance);
return accountBalance;
}
module.exports = {run}
@@ -0,0 +1,21 @@
async function run(nodeName, networkInfo, args) {
const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName];
const api = await zombie.connect(wsUri, userDefinedTypes);
const accountAddress = args[0];
const expectedIncrease = BigInt(args[1]);
const initialAccountData = await api.query.system.account(accountAddress);
const initialAccountBalance = initialAccountData.data['free'];
while (true) {
const accountData = await api.query.system.account(accountAddress);
const accountBalance = accountData.data['free'];
if (accountBalance > initialAccountBalance + expectedIncrease) {
return accountBalance;
}
// else sleep and retry
await new Promise((resolve) => setTimeout(resolve, 6000));
}
}
module.exports = {run}
@@ -0,0 +1,85 @@
const utils = require("./utils");
async function run(nodeName, networkInfo, args) {
const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName];
const api = await zombie.connect(wsUri, userDefinedTypes);
// parse arguments
const exitAfterSeconds = Number(args[0]);
const bridgedChain = require("./chains/" + args[1]);
// start listening to new blocks
let atLeastOneMessageReceived = false;
let atLeastOneMessageDelivered = false;
const unsubscribe = await api.rpc.chain.subscribeNewHeads(async function (header) {
const apiAtParent = await api.at(header.parentHash);
const apiAtCurrent = await api.at(header.hash);
const currentEvents = await apiAtCurrent.query.system.events();
const messagesReceived = currentEvents.find((record) => {
return record.event.section == bridgedChain.messagesPalletName
&& record.event.method == "MessagesReceived";
}) != undefined;
const messagesDelivered = currentEvents.find((record) => {
return record.event.section == bridgedChain.messagesPalletName &&
record.event.method == "MessagesDelivered";
}) != undefined;
const hasMessageUpdates = messagesReceived || messagesDelivered;
atLeastOneMessageReceived = atLeastOneMessageReceived || messagesReceived;
atLeastOneMessageDelivered = atLeastOneMessageDelivered || messagesDelivered;
if (!hasMessageUpdates) {
// if there are no any message update transactions, we only expect mandatory GRANDPA
// headers and initial teyrchain headers
await utils.ensureOnlyMandatoryGrandpaHeadersImported(
bridgedChain,
apiAtParent,
apiAtCurrent,
currentEvents,
);
await utils.ensureOnlyInitialTeyrchainHeaderImported(
bridgedChain,
apiAtParent,
apiAtCurrent,
currentEvents,
);
} else {
const messageTransactions = (messagesReceived ? 1 : 0) + (messagesDelivered ? 1 : 0);
// otherwise we only accept at most one GRANDPA header
const newGrandpaHeaders = utils.countGrandpaHeaderImports(bridgedChain, currentEvents);
if (newGrandpaHeaders > 1) {
utils.logEvents(currentEvents);
throw new Error("Unexpected relay chain header import: " + newGrandpaHeaders + " / " + messageTransactions);
}
// ...and at most one teyrchain header
const newTeyrchainHeaders = utils.countTeyrchainHeaderImports(bridgedChain, currentEvents);
if (newTeyrchainHeaders > 1) {
utils.logEvents(currentEvents);
throw new Error("Unexpected teyrchain header import: " + newTeyrchainHeaders + " / " + messageTransactions);
}
}
});
// wait until we have received + delivered messages OR until timeout
await utils.pollUntil(
exitAfterSeconds,
() => {
return atLeastOneMessageReceived && atLeastOneMessageDelivered;
},
() => {
unsubscribe();
},
() => {
if (!atLeastOneMessageReceived) {
throw new Error("No messages received from bridged chain");
}
if (!atLeastOneMessageDelivered) {
throw new Error("No messages delivered to bridged chain");
}
},
);
}
module.exports = {run}
@@ -0,0 +1,24 @@
async function run(nodeName, networkInfo, args) {
const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName];
const api = await zombie.connect(wsUri, userDefinedTypes);
// TODO: could be replaced with https://github.com/polkadot-js/api/issues/4930 (depends on metadata v15) later
const relayerAccountAddress = args.relayerAccountAddress;
const reward = args.reward;
const expectedRelayerReward = BigInt(args.expectedRelayerReward);
console.log("Waiting rewards for relayerAccountAddress: " + relayerAccountAddress + " expecting minimal rewards at least: " + expectedRelayerReward + " for " + JSON.stringify(reward));
while (true) {
const relayerReward = await api.query.bridgeRelayers.relayerRewards(relayerAccountAddress, reward);
if (relayerReward.isSome) {
const relayerRewardBalance = relayerReward.unwrap().toBigInt();
if (relayerRewardBalance > expectedRelayerReward) {
return relayerRewardBalance;
}
}
// else sleep and retry
await new Promise((resolve) => setTimeout(resolve, 6000));
}
}
module.exports = { run }
@@ -0,0 +1,103 @@
module.exports = {
logEvents: function(events) {
let stringifiedEvents = "";
events.forEach((record) => {
if (stringifiedEvents != "") {
stringifiedEvents += ", ";
}
stringifiedEvents += record.event.section + "::" + record.event.method;
});
console.log("Block events: " + stringifiedEvents);
},
countGrandpaHeaderImports: function(bridgedChain, events) {
return events.reduce(
(count, record) => {
const { event } = record;
if (event.section == bridgedChain.grandpaPalletName && event.method == "UpdatedBestFinalizedHeader") {
count += 1;
}
return count;
},
0,
);
},
countTeyrchainHeaderImports: function(bridgedChain, events) {
return events.reduce(
(count, record) => {
const { event } = record;
if (event.section == bridgedChain.teyrchainsPalletName && event.method == "UpdatedTeyrchainHead") {
count += 1;
}
return count;
},
0,
);
},
pollUntil: async function(
timeoutInSecs,
predicate,
cleanup,
onFailure,
) {
const begin = new Date().getTime();
const end = begin + timeoutInSecs * 1000;
while (new Date().getTime() < end) {
if (predicate()) {
cleanup();
return;
}
await new Promise(resolve => setTimeout(resolve, 100));
}
cleanup();
onFailure();
},
ensureOnlyMandatoryGrandpaHeadersImported: async function(
bridgedChain,
apiAtParent,
apiAtCurrent,
currentEvents,
) {
// remember id of bridged relay chain GRANDPA authorities set at parent block
const authoritySetAtParent = await apiAtParent.query[bridgedChain.grandpaPalletName].currentAuthoritySet();
const authoritySetIdAtParent = authoritySetAtParent["setId"];
// now read the id of bridged relay chain GRANDPA authorities set at current block
const authoritySetAtCurrent = await apiAtCurrent.query[bridgedChain.grandpaPalletName].currentAuthoritySet();
const authoritySetIdAtCurrent = authoritySetAtCurrent["setId"];
// we expect to see no more than `authoritySetIdAtCurrent - authoritySetIdAtParent` new GRANDPA headers
const maxNewGrandpaHeaders = authoritySetIdAtCurrent - authoritySetIdAtParent;
const newGrandpaHeaders = module.exports.countGrandpaHeaderImports(bridgedChain, currentEvents);
// check that our assumptions are correct
if (newGrandpaHeaders > maxNewGrandpaHeaders) {
module.exports.logEvents(currentEvents);
throw new Error("Unexpected relay chain header import: " + newGrandpaHeaders + " / " + maxNewGrandpaHeaders);
}
return newGrandpaHeaders;
},
ensureOnlyInitialTeyrchainHeaderImported: async function(
bridgedChain,
apiAtParent,
apiAtCurrent,
currentEvents,
) {
// remember whether we already know bridged teyrchain header at a parent block
const bestBridgedTeyrchainHeader = await apiAtParent.query[bridgedChain.teyrchainsPalletName].parasInfo(bridgedChain.bridgedBridgeHubParaId);;
const hasBestBridgedTeyrchainHeader = bestBridgedTeyrchainHeader.isSome;
// we expect to see: no more than `1` bridged teyrchain header if there were no teyrchain header before.
const maxNewTeyrchainHeaders = hasBestBridgedTeyrchainHeader ? 0 : 1;
const newTeyrchainHeaders = module.exports.countTeyrchainHeaderImports(bridgedChain, currentEvents);
// check that our assumptions are correct
if (newTeyrchainHeaders > maxNewTeyrchainHeaders) {
module.exports.logEvents(currentEvents);
throw new Error("Unexpected teyrchain header import: " + newTeyrchainHeaders + " / " + maxNewTeyrchainHeaders);
}
return hasBestBridgedTeyrchainHeader;
},
}
@@ -0,0 +1,22 @@
async function run(nodeName, networkInfo, args) {
const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName];
const api = await zombie.connect(wsUri, userDefinedTypes);
const sibling = args[0];
while (true) {
const messagingStateAsObj = await api.query.teyrchainSystem.relevantMessagingState();
const messagingState = api.createType("Option<CumulusPalletTeyrchainSystemRelayStateSnapshotMessagingStateSnapshot>", messagingStateAsObj);
if (messagingState.isSome) {
const egressChannels = messagingState.unwrap().egressChannels;
if (egressChannels.find(x => x[0] == sibling)) {
return;
}
}
// else sleep and retry
await new Promise((resolve) => setTimeout(resolve, 6000));
}
}
module.exports = { run }
@@ -0,0 +1,24 @@
async function run(nodeName, networkInfo, args) {
const {wsUri, userDefinedTypes} = networkInfo.nodesByName[nodeName];
const api = await zombie.connect(wsUri, userDefinedTypes);
// TODO: could be replaced with https://github.com/polkadot-js/api/issues/4930 (depends on metadata v15) later
const accountAddress = args.accountAddress;
const expectedAssetId = args.expectedAssetId;
const expectedAssetBalance = BigInt(args.expectedAssetBalance);
while (true) {
const foreignAssetAccount = await api.query.foreignAssets.account(expectedAssetId, accountAddress);
if (foreignAssetAccount.isSome) {
const foreignAssetAccountBalance = foreignAssetAccount.unwrap().balance.toBigInt();
if (foreignAssetAccountBalance > expectedAssetBalance) {
return foreignAssetAccountBalance;
}
}
// else sleep and retry
await new Promise((resolve) => setTimeout(resolve, 6000));
}
}
module.exports = { run }
+344
View File
@@ -0,0 +1,344 @@
#!/usr/bin/env bash
function relayer_path() {
local default_path=~/local_bridge_testing/bin/substrate-relay
local path="${SUBSTRATE_RELAY_BINARY:-$default_path}"
echo "$path"
}
function ensure_relayer() {
local path=$(relayer_path)
if [[ ! -f "$path" ]]; then
echo " Required substrate-relay binary '$path' does not exist!"
echo " You need to build it and copy to this location!"
echo " Please, check ./teyrchains/runtimes/bridge-hubs/README.md (Prepare/Build/Deploy)"
exit 1
fi
echo $path
}
function ensure_pezkuwi_js_api() {
if ! which pezkuwi-js-api &> /dev/null; then
echo ''
echo 'Required command `pezkuwi-js-api` not in PATH, please, install, e.g.:'
echo "npm install -g @pezkuwi/api-cli@beta"
echo " or"
echo "yarn global add @pezkuwi/api-cli"
echo ''
exit 1
fi
if ! which jq &> /dev/null; then
echo ''
echo 'Required command `jq` not in PATH, please, install, e.g.:'
echo "apt install -y jq"
echo ''
exit 1
fi
generate_hex_encoded_call_data "check" "--"
local retVal=$?
if [ $retVal -ne 0 ]; then
echo ""
echo ""
echo "-------------------"
echo "Installing (nodejs) sub module: ${BASH_SOURCE%/*}/generate_hex_encoded_call"
pushd ${BASH_SOURCE%/*}/generate_hex_encoded_call
npm install
popd
fi
}
function call_pezkuwi_js_api() {
# --noWait: without that argument `pezkuwi-js-api` waits until transaction is included into the block.
# With it, it just submits it to the tx pool and exits.
# --nonce -1: means to compute transaction nonce using `system_accountNextIndex` RPC, which includes all
# transaction that are in the tx pool.
pezkuwi-js-api --nonce -1 "$@" || true
}
function generate_hex_encoded_call_data() {
local type=$1
local endpoint=$2
local output=$3
shift
shift
shift
echo "Input params: $@"
node ${BASH_SOURCE%/*}/../utils/generate_hex_encoded_call "$type" "$endpoint" "$output" "$@"
local retVal=$?
if [ $type != "check" ]; then
local hex_encoded_data=$(cat $output)
echo "Generated hex-encoded bytes to file '$output': $hex_encoded_data"
fi
return $retVal
}
function transfer_balance() {
local runtime_para_endpoint=$1
local seed=$2
local target_account=$3
local amount=$4
echo " calling transfer_balance:"
echo " runtime_para_endpoint: ${runtime_para_endpoint}"
echo " seed: ${seed}"
echo " target_account: ${target_account}"
echo " amount: ${amount}"
echo "--------------------------------------------------"
call_pezkuwi_js_api \
--ws "${runtime_para_endpoint}" \
--seed "${seed?}" \
tx.balances.transferAllowDeath \
"${target_account}" \
"${amount}"
}
function send_governance_transact() {
local relay_url=$1
local relay_chain_seed=$2
local para_id=$3
local hex_encoded_data=$4
local require_weight_at_most_ref_time=$5
local require_weight_at_most_proof_size=$6
echo " calling send_governance_transact:"
echo " relay_url: ${relay_url}"
echo " relay_chain_seed: ${relay_chain_seed}"
echo " para_id: ${para_id}"
echo " hex_encoded_data: ${hex_encoded_data}"
echo " require_weight_at_most_ref_time: ${require_weight_at_most_ref_time}"
echo " require_weight_at_most_proof_size: ${require_weight_at_most_proof_size}"
echo " params:"
local dest=$(jq --null-input \
--arg para_id "$para_id" \
'{ "V4": { "parents": 0, "interior": { "X1": [{ "Teyrchain": $para_id }] } } }')
local message=$(jq --null-input \
--argjson hex_encoded_data $hex_encoded_data \
--arg require_weight_at_most_ref_time "$require_weight_at_most_ref_time" \
--arg require_weight_at_most_proof_size "$require_weight_at_most_proof_size" \
'
{
"V4": [
{
"UnpaidExecution": {
"weight_limit": "Unlimited"
}
},
{
"Transact": {
"origin_kind": "Superuser",
"require_weight_at_most": {
"ref_time": $require_weight_at_most_ref_time,
"proof_size": $require_weight_at_most_proof_size,
},
"call": {
"encoded": $hex_encoded_data
}
}
}
]
}
')
echo ""
echo " dest:"
echo "${dest}"
echo ""
echo " message:"
echo "${message}"
echo ""
echo "--------------------------------------------------"
call_pezkuwi_js_api \
--ws "${relay_url?}" \
--seed "${relay_chain_seed?}" \
--sudo \
tx.xcmPallet.send \
"${dest}" \
"${message}"
}
function open_hrmp_channels() {
local relay_url=$1
local relay_chain_seed=$2
local sender_para_id=$3
local recipient_para_id=$4
local max_capacity=$5
local max_message_size=$6
echo " calling open_hrmp_channels:"
echo " relay_url: ${relay_url}"
echo " relay_chain_seed: ${relay_chain_seed}"
echo " sender_para_id: ${sender_para_id}"
echo " recipient_para_id: ${recipient_para_id}"
echo " max_capacity: ${max_capacity}"
echo " max_message_size: ${max_message_size}"
echo " params:"
echo "--------------------------------------------------"
call_pezkuwi_js_api \
--ws "${relay_url?}" \
--seed "${relay_chain_seed?}" \
--sudo \
tx.hrmp.forceOpenHrmpChannel \
${sender_para_id} \
${recipient_para_id} \
${max_capacity} \
${max_message_size}
}
function force_xcm_version() {
local relay_url=$1
local relay_chain_seed=$2
local runtime_para_id=$3
local runtime_para_endpoint=$4
local dest=$5
local xcm_version=$6
echo " calling force_xcm_version:"
echo " relay_url: ${relay_url}"
echo " relay_chain_seed: ${relay_chain_seed}"
echo " runtime_para_id: ${runtime_para_id}"
echo " runtime_para_endpoint: ${runtime_para_endpoint}"
echo " dest: ${dest}"
echo " xcm_version: ${xcm_version}"
echo " params:"
# 1. generate data for Transact (PezkuwiXcm::force_xcm_version)
local tmp_output_file=$(mktemp)
generate_hex_encoded_call_data "force-xcm-version" "${runtime_para_endpoint}" "${tmp_output_file}" "$dest" "$xcm_version"
local hex_encoded_data=$(cat $tmp_output_file)
# 2. trigger governance call
send_governance_transact "${relay_url}" "${relay_chain_seed}" "${runtime_para_id}" "${hex_encoded_data}" 200000000 12000
}
function force_create_foreign_asset() {
local relay_url=$1
local relay_chain_seed=$2
local runtime_para_id=$3
local runtime_para_endpoint=$4
local asset_multilocation=$5
local asset_owner_account_id=$6
local min_balance=$7
local is_sufficient=$8
echo " calling force_create_foreign_asset:"
echo " relay_url: ${relay_url}"
echo " relay_chain_seed: ${relay_chain_seed}"
echo " runtime_para_id: ${runtime_para_id}"
echo " runtime_para_endpoint: ${runtime_para_endpoint}"
echo " asset_multilocation: ${asset_multilocation}"
echo " asset_owner_account_id: ${asset_owner_account_id}"
echo " min_balance: ${min_balance}"
echo " is_sufficient: ${is_sufficient}"
echo " params:"
# 1. generate data for Transact (ForeignAssets::force_create)
local tmp_output_file=$(mktemp)
generate_hex_encoded_call_data "force-create-asset" "${runtime_para_endpoint}" "${tmp_output_file}" "$asset_multilocation" "$asset_owner_account_id" $is_sufficient $min_balance
local hex_encoded_data=$(cat $tmp_output_file)
# 2. trigger governance call
send_governance_transact "${relay_url}" "${relay_chain_seed}" "${runtime_para_id}" "${hex_encoded_data}" 200000000 12000
}
function create_pool() {
local runtime_para_endpoint=$1
local seed=$2
local native_asset_id=$3
local foreign_asset_id=$4
call_pezkuwi_js_api \
--ws "${runtime_para_endpoint?}" \
--seed "${seed?}" \
tx.assetConversion.createPool \
"${native_asset_id}" \
"${foreign_asset_id}"
}
function add_liquidity() {
local runtime_para_endpoint=$1
local seed=$2
local native_asset_id=$3
local foreign_asset_id=$4
local native_asset_amount=$5
local foreign_asset_amount=$6
local pool_owner_account_id=$7
call_pezkuwi_js_api \
--ws "${runtime_para_endpoint?}" \
--seed "${seed?}" \
tx.assetConversion.addLiquidity \
"${native_asset_id}" \
"${foreign_asset_id}" \
"${native_asset_amount}" \
"${foreign_asset_amount}" \
"1" \
"1" \
"${pool_owner_account_id}"
}
function limited_reserve_transfer_assets() {
local url=$1
local seed=$2
local destination=$3
local beneficiary=$4
local assets=$5
local fee_asset_id=$6
local weight_limit=$7
echo " calling limited_reserve_transfer_assets:"
echo " url: ${url}"
echo " seed: ${seed}"
echo " destination: ${destination}"
echo " beneficiary: ${beneficiary}"
echo " assets: ${assets}"
echo " fee_asset_id: ${fee_asset_id}"
echo " weight_limit: ${weight_limit}"
echo ""
echo "--------------------------------------------------"
call_pezkuwi_js_api \
--ws "${url?}" \
--seed "${seed?}" \
tx.pezkuwiXcm.limitedReserveTransferAssets \
"${destination}" \
"${beneficiary}" \
"${assets}" \
"${fee_asset_id}" \
"${weight_limit}"
}
function claim_rewards() {
local runtime_para_endpoint=$1
local seed=$2
local lane_id=$3
local bridged_chain_id=$4
local owner=$5
echo " calling claim_rewards:"
echo " runtime_para_endpoint: ${runtime_para_endpoint}"
echo " seed: ${seed}"
echo " lane_id: ${lane_id}"
echo " bridged_chain_id: ${bridged_chain_id}"
echo " owner: ${owner}"
echo ""
local rewards_account_params=$(jq --null-input \
--arg lane_id "$lane_id" \
--arg bridged_chain_id "$bridged_chain_id" \
--arg owner "$owner" \
'{
"laneId": $lane_id,
"bridgedChainId": $bridged_chain_id,
"owner": $owner
}')
echo " rewards_account_params:"
echo "${rewards_account_params}"
echo "--------------------------------------------------"
call_pezkuwi_js_api \
--ws "${runtime_para_endpoint}" \
--seed "${seed?}" \
tx.bridgeRelayers.claimRewards \
"${rewards_account_params}"
}
+45
View File
@@ -0,0 +1,45 @@
#!/usr/bin/env bash
function start_background_process() {
local command=$1
local log_file=$2
local __pid=$3
$command > $log_file 2>&1 &
eval $__pid="'$!'"
}
function wait_for_process_file() {
local pid=$1
local file=$2
local timeout=$3
local __found=$4
local time=0
until [ -e $file ]; do
if ! kill -0 $pid; then
echo "Process finished unsuccessfully"
return
fi
if (( time++ >= timeout )); then
echo "Timeout waiting for file $file: $timeout seconds"
eval $__found=0
return
fi
sleep 1
done
echo "File $file found after $time seconds"
eval $__found=1
}
function ensure_process_file() {
local pid=$1
local file=$2
local timeout=$3
wait_for_process_file $pid $file $timeout file_found
if [ "$file_found" != "1" ]; then
exit 1
fi
}
@@ -0,0 +1,165 @@
const fs = require("fs");
const { exit } = require("process");
const { WsProvider, ApiPromise } = require("@polkadot/api");
const util = require("@polkadot/util");
// connect to a substrate chain and return the api object
async function connect(endpoint, types = {}) {
const provider = new WsProvider(endpoint);
const api = await ApiPromise.create({
provider,
types,
throwOnConnect: false,
});
return api;
}
function writeHexEncodedBytesToOutput(method, outputFile) {
console.log("Payload (hex): ", method.toHex());
console.log("Payload (bytes): ", Array.from(method.toU8a()));
console.log("Payload (plain): ", JSON.stringify(method));
fs.writeFileSync(outputFile, JSON.stringify(Array.from(method.toU8a())));
}
function remarkWithEvent(endpoint, outputFile) {
console.log(`Generating remarkWithEvent from RPC endpoint: ${endpoint} to outputFile: ${outputFile}`);
connect(endpoint)
.then((api) => {
const call = api.tx.system.remarkWithEvent("Hello");
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function addExporterConfig(endpoint, outputFile, bridgedNetwork, bridgeConfig) {
console.log(`Generating addExporterConfig from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on bridgedNetwork: ${bridgedNetwork}, bridgeConfig: ${bridgeConfig}`);
connect(endpoint)
.then((api) => {
const call = api.tx.bridgeTransfer.addExporterConfig(bridgedNetwork, JSON.parse(bridgeConfig));
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function addUniversalAlias(endpoint, outputFile, location, junction) {
console.log(`Generating addUniversalAlias from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on location: ${location}, junction: ${junction}`);
connect(endpoint)
.then((api) => {
const call = api.tx.bridgeTransfer.addUniversalAlias(JSON.parse(location), JSON.parse(junction));
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function addReserveLocation(endpoint, outputFile, reserve_location) {
console.log(`Generating addReserveLocation from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on reserve_location: ${reserve_location}`);
connect(endpoint)
.then((api) => {
const call = api.tx.bridgeTransfer.addReserveLocation(JSON.parse(reserve_location));
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function removeExporterConfig(endpoint, outputFile, bridgedNetwork) {
console.log(`Generating removeExporterConfig from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on bridgedNetwork: ${bridgedNetwork}`);
connect(endpoint)
.then((api) => {
const call = api.tx.bridgeTransfer.removeExporterConfig(bridgedNetwork);
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function forceCreateAsset(endpoint, outputFile, assetId, assetOwnerAccountId, isSufficient, minBalance) {
var isSufficient = isSufficient == "true" ? true : false;
console.log(`Generating forceCreateAsset from RPC endpoint: ${endpoint} to outputFile: ${outputFile} based on assetId: ${assetId}, assetOwnerAccountId: ${assetOwnerAccountId}, isSufficient: ${isSufficient}, minBalance: ${minBalance}`);
connect(endpoint)
.then((api) => {
const call = api.tx.foreignAssets.forceCreate(JSON.parse(assetId), assetOwnerAccountId, isSufficient, minBalance);
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
function forceXcmVersion(endpoint, outputFile, dest, xcm_version) {
console.log(`Generating forceXcmVersion from RPC endpoint: ${endpoint} to outputFile: ${outputFile}, dest: ${dest}, xcm_version: ${xcm_version}`);
connect(endpoint)
.then((api) => {
const call = api.tx.polkadotXcm.forceXcmVersion(JSON.parse(dest), xcm_version);
writeHexEncodedBytesToOutput(call.method, outputFile);
exit(0);
})
.catch((e) => {
console.error(e);
exit(1);
});
}
if (!process.argv[2] || !process.argv[3]) {
console.log("usage: node ./script/generate_hex_encoded_call <type> <endpoint> <output hex-encoded data file> <input message>");
exit(1);
}
const type = process.argv[2];
const rpcEndpoint = process.argv[3];
const output = process.argv[4];
const inputArgs = process.argv.slice(5, process.argv.length);
console.log(`Generating hex-encoded call data for:`);
console.log(` type: ${type}`);
console.log(` rpcEndpoint: ${rpcEndpoint}`);
console.log(` output: ${output}`);
console.log(` inputArgs: ${inputArgs}`);
switch (type) {
case 'remark-with-event':
remarkWithEvent(rpcEndpoint, output);
break;
case 'add-exporter-config':
addExporterConfig(rpcEndpoint, output, inputArgs[0], inputArgs[1]);
break;
case 'remove-exporter-config':
removeExporterConfig(rpcEndpoint, output, inputArgs[0], inputArgs[1]);
break;
case 'add-universal-alias':
addUniversalAlias(rpcEndpoint, output, inputArgs[0], inputArgs[1]);
break;
case 'add-reserve-location':
addReserveLocation(rpcEndpoint, output, inputArgs[0]);
break;
case 'force-create-asset':
forceCreateAsset(rpcEndpoint, output, inputArgs[0], inputArgs[1], inputArgs[2], inputArgs[3]);
break;
case 'force-xcm-version':
forceXcmVersion(rpcEndpoint, output, inputArgs[0], inputArgs[1]);
break;
case 'check':
console.log(`Checking nodejs installation, if you see this everything is ready!`);
break;
default:
console.log(`Sorry, we are out of ${type} - not yet supported!`);
}
@@ -0,0 +1,11 @@
{
"name": "y",
"version": "y",
"description": "create a scale hex-encoded call values from given message",
"main": "index.js",
"license": "MIT",
"dependencies": {
"@pezkuwi/api": "^14.0",
"@pezkuwi/util": "^13.1"
}
}
@@ -0,0 +1,39 @@
#!/usr/bin/env bash
source "${BASH_SOURCE%/*}/common.sh"
function start_zombienet() {
local test_dir=$1
local definition_path=$2
local __zombienet_dir=$3
local __zombienet_pid=$4
local zombienet_name=`basename $definition_path .toml`
local zombienet_dir=$test_dir/$zombienet_name
eval $__zombienet_dir="'$zombienet_dir'"
mkdir -p $zombienet_dir
rm -rf $zombienet_dir
local logs_dir=$test_dir/logs
mkdir -p $logs_dir
local zombienet_log=$logs_dir/$zombienet_name.log
echo "Starting $zombienet_name zombienet. Logs available at: $zombienet_log"
start_background_process \
"$ZOMBIENET_BINARY spawn --dir $zombienet_dir --provider native $definition_path" \
"$zombienet_log" zombienet_pid
ensure_process_file $zombienet_pid "$zombienet_dir/zombie.json" 180
echo "$zombienet_name zombienet started successfully"
eval $__zombienet_pid="'$zombienet_pid'"
}
function run_zndsl() {
local zndsl_file=$1
local zombienet_dir=$2
echo "Running $zndsl_file."
$ZOMBIENET_BINARY test --dir $zombienet_dir --provider native $zndsl_file $zombienet_dir/zombie.json
echo
}