feat: initial Pezkuwi Apps rebrand from polkadot-apps

Rebranded terminology:
- Polkadot → Pezkuwi
- Kusama → Dicle
- Westend → Zagros
- Rococo → PezkuwiChain
- Substrate → Bizinikiwi
- parachain → teyrchain

Custom logos with Kurdistan brand colors (#e6007a → #86e62a):
- bizinikiwi-hexagon.svg
- sora-bizinikiwi.svg
- hezscanner.svg
- heztreasury.svg
- pezkuwiscan.svg
- pezkuwistats.svg
- pezkuwiassembly.svg
- pezkuwiholic.svg
This commit is contained in:
2026-01-07 13:05:27 +03:00
commit d21bfb1320
5867 changed files with 329019 additions and 0 deletions
+27
View File
@@ -0,0 +1,27 @@
// Copyright 2017-2025 @polkadot/apps authors & contributors
// SPDX-License-Identifier: Apache-2.0
const fs = require('node:fs');
const path = require('node:path');
module.exports = function findPackages () {
const pkgRoot = path.join(__dirname, '..', 'packages');
return fs
.readdirSync(pkgRoot)
.filter((entry) => {
const pkgPath = path.join(pkgRoot, entry);
return !['.', '..'].includes(entry) &&
fs.lstatSync(pkgPath).isDirectory() &&
fs.existsSync(path.join(pkgPath, 'package.json'));
})
.map((dir) => {
const jsonPath = path.join(pkgRoot, dir, 'package.json');
const { name } = JSON.parse(
fs.readFileSync(jsonPath).toString('utf-8')
);
return { dir, name };
});
};
+102
View File
@@ -0,0 +1,102 @@
// Copyright 2017-2025 @polkadot/apps authors & contributors
// SPDX-License-Identifier: Apache-2.0
const fs = require('node:fs');
const path = require('node:path');
/** @type {Record<string, string[]>} */
const defaults = {};
const i18nRoot = path.join(__dirname, '../packages/apps/public/locales');
/**
* @param {string} langRoot
* @returns {string[]}
*/
function getEntries (langRoot) {
return fs
.readdirSync(langRoot)
.filter((entry) =>
!['.', '..'].includes(entry) &&
fs.lstatSync(path.join(langRoot, entry)).isFile() &&
entry.endsWith('.json') &&
!['index.json', 'translation.json'].includes(entry)
)
.sort();
}
/**
* @param {string} lang
*/
function checkLanguage (lang) {
console.log(`*** Checking ${lang}`);
const langRoot = path.join(i18nRoot, lang);
const entries = getEntries(langRoot);
const roots = Object.keys(defaults);
const missing = roots.filter((entry) => !entries.includes(entry));
if (missing.length) {
console.log(`\ttop-level missing ${missing.length}: ${missing.join(', ')}`);
}
entries.forEach((entry) => {
const json = require(path.join(langRoot, entry));
const keys = Object.keys(json);
const root = defaults[entry];
if (!root) {
console.log(`\t> ${entry} not found in default, not checking`);
return;
}
const missing = root.filter((key) => !keys.includes(key));
const extra = keys.filter((key) => !root.includes(key));
if (missing.length) {
console.log(`\t> ${entry} ${missing.length} keys missing`);
missing.forEach((key) =>
console.log(`\t\t${key}`)
);
}
if (extra.length) {
console.log(`\t> ${entry} ${extra.length} keys extra`);
extra.forEach((key) =>
console.log(`\t\t${key}`)
);
}
});
}
function checkLanguages () {
fs
.readdirSync(i18nRoot)
.filter((entry) =>
!['.', '..'].includes(entry) &&
fs.lstatSync(path.join(i18nRoot, entry)).isDirectory() &&
entry !== 'en'
)
.sort()
.forEach(checkLanguage);
}
function initDefault () {
const enRoot = path.join(i18nRoot, 'en');
getEntries(enRoot).forEach((entry) => {
const json = require(path.join(enRoot, entry));
const keys = Object.keys(json);
// if (keys.length > 0) {
// console.log(`${entry} ${keys.length} keys`);
// }
defaults[entry] = keys;
});
}
initDefault();
checkLanguages();
+79
View File
@@ -0,0 +1,79 @@
// Copyright 2017-2025 @polkadot/apps authors & contributors
// SPDX-License-Identifier: Apache-2.0
const fs = require('node:fs');
const path = require('node:path');
const i18nRoot = path.join(__dirname, '../packages/apps/public/locales');
const SKIP_NS = ['translation'].map((f) => `${f}.json`);
/**
* @param {string} langRoot
* @returns {string[]}
*/
function getEntries (langRoot) {
return fs
.readdirSync(langRoot)
.filter((entry) =>
!['.', '..'].includes(entry) &&
fs.lstatSync(path.join(langRoot, entry)).isFile() &&
entry.endsWith('.json') &&
!['index.json'].includes(entry)
)
.sort();
}
/**
* @param {string} lang
*/
function sortLanguage (lang) {
const langRoot = path.join(i18nRoot, lang);
const entries = getEntries(langRoot);
/** @type {Record<String, boolean>} */
const hasKeys = {};
entries.forEach((entry) => {
const filename = path.join(langRoot, entry);
const json = require(filename);
const sorted = Object
.keys(json)
.sort()
.reduce((/** @type {Record<String, string>} */ result, key) => {
result[key] = json[key];
return result;
}, {});
hasKeys[entry] = Object.keys(sorted).length !== 0;
fs.writeFileSync(filename, JSON.stringify(sorted, null, 2));
});
if (lang === 'en') {
const filtered = entries
.filter((entry) => !SKIP_NS.includes(entry))
.filter((entry) => hasKeys[entry]);
fs.writeFileSync(
path.join(langRoot, 'index.json'),
JSON.stringify(filtered, null, 2)
);
}
}
function checkLanguages () {
const languages = fs
.readdirSync(i18nRoot)
.filter((entry) =>
!['.', '..'].includes(entry) &&
fs.lstatSync(path.join(i18nRoot, entry)).isDirectory()
)
.sort();
languages.forEach(sortLanguage);
fs.writeFileSync(path.join(i18nRoot, 'index.json'), JSON.stringify(languages, null, 2));
}
checkLanguages();
+157
View File
@@ -0,0 +1,157 @@
// Copyright 2017-2025 @polkadot/apps authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'node:fs';
import path from 'node:path';
import { formatNumber, stringCamelCase } from '@polkadot/util';
const MAX_SIZE = 48 * 1024;
// FIXME The sorting here and the sorting from linting seems like a mismatch...
const HEADER = '// Copyright 2017-2025 @polkadot/apps authors & contributors\n// SPDX-License-Identifier: Apache-2.0\n\n// Do not edit. Auto-generated via node scripts/imgConvert.mjs\n\n';
/** @type {Record<string, string>} */
const MIME = {
gif: 'image/gif',
jpeg: 'image/jpeg',
png: 'image/png',
svg: 'image/svg+xml'
};
/**
* @param {string} k
* @param {string} contents
* @returns {string}
*/
function makeContents (k, contents) {
return `${HEADER}export const ${k} = '${contents}';\n`;
}
/** @type {Record<string, string>} */
const all = {};
/** @type {Record<string, number>} */
const oversized = {};
for (const dir of ['extensions', 'external', 'chains', 'nodes']) {
const sub = path.join('packages/apps-config/src/ui/logos', dir);
const generated = path.join(sub, 'generated');
/** @type {Record<string, string>} */
const result = {};
if (fs.existsSync(generated)) {
fs.rmSync(generated, { force: true, recursive: true });
}
fs.mkdirSync(generated);
fs
.readdirSync(sub)
.forEach((file) => {
const full = path.join(sub, file);
if (fs.lstatSync(full).isFile() && !(file.endsWith('.ts') || file.startsWith('.'))) {
const parts = file.split('.');
const ext = parts[parts.length - 1];
const nameParts = parts.slice(0, parts.length - 1);
const mime = MIME[ext];
if (!mime) {
throw new Error(`Unable to determine mime for ${file}`);
} else {
const buf = fs.readFileSync(full);
const data = `data:${mime};base64,${buf.toString('base64')}`;
const k = `${stringCamelCase(`${dir}_${nameParts.join('_')}`)}${ext.toUpperCase()}`;
const fileprefix = `generated/${nameParts.join('.')}${ext.toUpperCase()}`;
fs.writeFileSync(path.join(sub, `${fileprefix}.ts`), makeContents(k, data));
result[k] = fileprefix;
all[k] = data;
if (buf.length > MAX_SIZE) {
oversized[k] = buf.length;
}
}
}
});
if (Object.keys(result).length) {
let srcs = '';
for (const dir of ['endpoints', 'extensions', 'links']) {
const srcroot = path.join('packages/apps-config/src', dir);
fs
.readdirSync(srcroot)
.forEach((file) => {
const full = path.join(srcroot, file);
if (fs.lstatSync(full).isFile() && file.endsWith('.ts')) {
srcs += fs.readFileSync(full).toString();
}
});
}
const notfound = Object
.keys(result)
.filter((k) => !srcs.includes(k));
if (notfound.length) {
console.log('\n', notfound.length.toString().padStart(3), 'not referenced in', dir, '::\n\n\t', notfound.join(', '), '\n');
}
fs.writeFileSync(path.join(sub, 'index.ts'), `${HEADER}${
Object
.keys(result)
.sort((a, b) => result[a].localeCompare(result[b]))
.map((k) => `export { ${k} } from './${result[k]}.js';`)
.join('\n')
}\n`);
}
}
const allKeys = Object.keys(all);
/** @type {Record<string, string[]>} */
const dupes = {};
allKeys.forEach((a) => {
const d = allKeys.filter((b) =>
a !== b &&
all[a] === all[b]
);
if (d.length) {
dupes[a] = d;
}
});
if (Object.keys(dupes).length) {
const errMsg = `${Object.keys(dupes).length.toString().padStart(3)} dupes found`;
console.log('\n', errMsg, '::\n');
for (const [k, d] of Object.entries(dupes)) {
console.log('\t', k.padStart(30), ' >> ', d.join(', '));
}
console.log();
throw new Error(`FATAL: ${errMsg}. Please remove the duplicates.`);
}
const numOversized = Object.keys(oversized).length;
if (numOversized) {
const errMsg = `${numOversized.toString().padStart(3)} files with byte sizes > 48K`;
console.log('\n', errMsg, '::\n');
for (const [k, v] of Object.entries(oversized)) {
console.log('\t', k.padStart(30), formatNumber(v).padStart(15), `(+${formatNumber(v - MAX_SIZE)} bytes)`);
}
console.log();
throw new Error(`FATAL: ${errMsg}. Please resize the images.`);
}
+215
View File
@@ -0,0 +1,215 @@
// Copyright 2017-2025 @polkadot/apps authors & contributors
// SPDX-License-Identifier: Apache-2.0
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-nocheck Currently we have a bit too many of these
import CrustPinner from '@crustio/crust-pin';
import PinataSDK from '@pinata/sdk';
// @ts-expect-error No definition file
import cloudflare from 'dnslink-cloudflare';
import fs from 'node:fs';
// @ts-expect-error No definition file
import { execSync } from '@polkadot/dev/scripts/util.mjs';
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore Using ignore since the file won't be there in dev
import { createWsEndpoints } from '../packages/apps-config/build/endpoints/index.js';
console.log('$ scripts/ipfsUpload.mjs', process.argv.slice(2).join(' '));
// https://gateway.pinata.cloud/ipfs/
const GATEWAY = 'https://ipfs.io/ipfs/';
const DOMAIN = 'dotapps.io';
const DST = 'packages/apps/build';
const SRC = 'packages/apps/public';
const WOPTS = { encoding: 'utf8', flag: 'w' };
const PINMETA = { name: DOMAIN };
const repo = `https://${process.env.GH_PAT}@github.com/${process.env.GITHUB_REPOSITORY}.git`;
async function wait (delay = 2500) {
return new Promise((resolve) => {
setTimeout(() => resolve(undefined), delay);
});
}
function createPinata () {
try {
// For 1.2.1
return PinataSDK(process.env.PINATA_API_KEY, process.env.PINATA_SECRET_KEY);
// For 2.1.0+
// return new PinataSDK({
// pinataApiKey: process.env.PINATA_API_KEY,
// pinataSecretApiKey: process.env.PINATA_SECRET_KEY
// });
} catch {
console.error('Unable to create Pinata');
}
return null;
}
function createCrust () {
try {
// eslint-disable-next-line new-cap
return new CrustPinner.default(process.env.CRUST_SEEDS);
} catch {
console.error('Unable to create Crust');
}
return null;
}
const pinata = createPinata();
const crust = createCrust();
function writeFiles (name, content) {
[DST, SRC].forEach((root) =>
fs.writeFileSync(`${root}/ipfs/${name}`, content, WOPTS)
);
}
function updateGh (hash) {
execSync('git add --all .');
execSync(`git commit --no-status --quiet -m "[CI Skip] publish/ipfs ${hash}
skip-checks: true"`);
execSync(`git push ${repo} HEAD:${process.env.GITHUB_REF}`, true);
}
async function pin () {
if (!pinata) {
console.error('Pinata not available, cannot pin');
return;
}
// 1. Pin on pinata
const result = await pinata.pinFromFS(DST, { pinataMetadata: PINMETA });
const url = `${GATEWAY}${result.IpfsHash}/`;
const html = `<!DOCTYPE html>
<html>
<head>
<title>Redirecting to ipfs gateway</title>
<meta http-equiv="refresh" content="0; url=${url}" />
<style>
body { font-family: 'Nunito Sans',sans-serif; line-height: 1.5rem; padding: 2rem; text-align: center }
p { margin: 0 }
</style>
</head>
<body>
<p>Redirecting to</p>
<p><a href="${url}">${url}</a></p>
</body>
</html>`;
writeFiles('index.html', html);
writeFiles('pin.json', JSON.stringify(result));
updateGh(result.IpfsHash);
// 2. Decentralized pin on Crust
if (crust) {
await crust.pin(result.IpfsHash).catch(console.error);
}
console.log(`Pinned ${result.IpfsHash}`);
await wait();
return result.IpfsHash;
}
async function unpin (exclude) {
if (!pinata) {
console.error('Pinata not available, cannot unpin');
return;
}
const result = await pinata.pinList({ metadata: PINMETA, status: 'pinned' });
await wait();
console.log('Available Pinata pins', result.rows);
if (result.count > 1) {
const hashes = result.rows
.map((r) => r.ipfs_pin_hash)
.filter((hash) => hash !== exclude);
for (let i = 0, count = hashes.length; i < count; i++) {
const hash = hashes[i];
try {
await pinata.unpin(hash);
console.log(`Unpinned ${hash}`);
} catch (error) {
console.error(`Failed unpinning ${hash}`, error);
}
await wait();
}
}
}
async function dnslink (hash) {
const records = createWsEndpoints(() => '')
.map((e) => e.dnslink)
.reduce((all, dnslink) => {
if (dnslink && !all.includes(dnslink)) {
all.push(dnslink);
}
return all;
}, [null])
.map((sub) =>
['_dnslink', sub, DOMAIN]
.filter((entry) => !!entry)
.join('.')
);
for (let i = 0, count = records.length; i < count; i++) {
const record = records[i];
try {
await cloudflare(
{ token: process.env.CF_API_TOKEN },
{ link: `/ipfs/${hash}`, record, zone: DOMAIN }
);
console.log(`Updated dnslink ${record}`);
} catch (error) {
console.error(`Failed updating dnslink ${record}`, error);
}
await wait();
}
console.log(`Dnslink ${hash} for ${records.join(', ')}`);
}
async function main () {
const pkgJson = JSON.parse(fs.readFileSync('package.json', 'utf-8'));
// only run on non-beta versions
if (!pkgJson.version.includes('-')) {
console.log('Pinning');
const hash = await pin();
await dnslink(hash);
await unpin(hash);
console.log('Completed');
} else {
console.log('Skipping');
}
}
main()
.catch(console.error)
.finally(() => process.exit(0));
+310
View File
@@ -0,0 +1,310 @@
#!/bin/bash
# PezkuwiChain Apps Rebrand Script
# Converts polkadot-apps to pezkuwi-apps
set -e
APPS_DIR="/home/mamostehp/pezkuwi-apps"
echo "=== PezkuwiChain Apps Rebrand Script ==="
echo ""
# Step 1: Update package names in all package.json files
echo "[1/6] Updating package.json files..."
# Root package.json
sed -i 's/"name": "polkadot-apps"/"name": "pezkuwi-apps"/g' "$APPS_DIR/package.json"
sed -i 's|https://github.com/polkadot-js/apps|https://github.com/pezkuwichain/pezkuwi-apps|g' "$APPS_DIR/package.json"
sed -i 's|An Apps portal into the Polkadot network|An Apps portal into the Pezkuwi network|g' "$APPS_DIR/package.json"
# Update all @polkadot dependencies to @pezkuwi in package.json files
find "$APPS_DIR" -name "package.json" -type f ! -path "*/node_modules/*" | while read file; do
# Package names: @polkadot-apps/* -> @pezkuwi/*
sed -i 's/@polkadot-apps\//@pezkuwi\//g' "$file"
# Dependencies: @polkadot/* -> @pezkuwi/*
sed -i 's/"@polkadot\//"@pezkuwi\//g' "$file"
# URLs
sed -i 's|polkadot-js/apps|pezkuwichain/pezkuwi-apps|g' "$file"
sed -i 's|polkadot-js|pezkuwichain|g' "$file"
done
echo " Package.json files updated."
# Step 2: Update TypeScript/JavaScript imports
echo "[2/6] Updating TypeScript/JavaScript imports..."
find "$APPS_DIR/packages" -type f \( -name "*.ts" -o -name "*.tsx" -o -name "*.js" -o -name "*.jsx" \) ! -path "*/node_modules/*" | while read file; do
# @polkadot/* imports -> @pezkuwi/*
sed -i "s|from '@polkadot/|from '@pezkuwi/|g" "$file"
sed -i "s|from \"@polkadot/|from \"@pezkuwi/|g" "$file"
# @polkadot-apps/* imports -> @pezkuwi/*
sed -i "s|from '@polkadot-apps/|from '@pezkuwi/|g" "$file"
sed -i "s|from \"@polkadot-apps/|from \"@pezkuwi/|g" "$file"
# require('@polkadot/*') -> require('@pezkuwi/*')
sed -i "s|require('@polkadot/|require('@pezkuwi/|g" "$file"
sed -i "s|require(\"@polkadot/|require(\"@pezkuwi/|g" "$file"
done
echo " TypeScript/JavaScript imports updated."
# Step 3: Terminology replacements
echo "[3/6] Applying terminology replacements..."
find "$APPS_DIR" -type f \( -name "*.ts" -o -name "*.tsx" -o -name "*.js" -o -name "*.jsx" -o -name "*.json" -o -name "*.md" -o -name "*.html" -o -name "*.css" -o -name "*.scss" \) ! -path "*/node_modules/*" | while read file; do
# Case-sensitive replacements
sed -i 's/Polkadot/Pezkuwi/g' "$file"
sed -i 's/polkadot/pezkuwi/g' "$file"
sed -i 's/POLKADOT/PEZKUWI/g' "$file"
# Kusama -> Dicle
sed -i 's/Kusama/Dicle/g' "$file"
sed -i 's/kusama/dicle/g' "$file"
sed -i 's/KUSAMA/DICLE/g' "$file"
# Westend -> Zagros
sed -i 's/Westend/Zagros/g' "$file"
sed -i 's/westend/zagros/g' "$file"
sed -i 's/WESTEND/ZAGROS/g' "$file"
# Rococo -> PezkuwiChain (test network)
sed -i 's/Rococo/PezkuwiChain/g' "$file"
sed -i 's/rococo/pezkuwichain/g' "$file"
sed -i 's/ROCOCO/PEZKUWICHAIN/g' "$file"
# DOT -> HEZ
sed -i 's/\bDOT\b/HEZ/g' "$file"
# WND -> ZGR (Westend token)
sed -i 's/\bWND\b/ZGR/g' "$file"
# ROC -> TYR (Rococo token)
sed -i 's/\bROC\b/TYR/g' "$file"
# KSM -> DCL (Kusama token -> Dicle)
sed -i 's/\bKSM\b/DCL/g' "$file"
# Substrate -> Bizinikiwi
sed -i 's/Substrate/Bizinikiwi/g' "$file"
sed -i 's/substrate/bizinikiwi/g' "$file"
sed -i 's/SUBSTRATE/BIZINIKIWI/g' "$file"
# Parachain -> Teyrchain
sed -i 's/Parachain/Teyrchain/g' "$file"
sed -i 's/parachain/teyrchain/g' "$file"
sed -i 's/PARACHAIN/TEYRCHAIN/g' "$file"
# Paseo -> remove or comment (Parity test network)
# We'll keep Paseo references for now but they can be cleaned up later
done
echo " Terminology replacements applied."
# Step 4: Update URLs and domains
echo "[4/6] Updating URLs and domains..."
find "$APPS_DIR" -type f \( -name "*.ts" -o -name "*.tsx" -o -name "*.js" -o -name "*.jsx" -o -name "*.json" -o -name "*.md" -o -name "*.html" -o -name "*.cjs" -o -name "*.mjs" \) ! -path "*/node_modules/*" | while read file; do
# polkadot.js.org -> pezkuwichain.app
sed -i 's|polkadot.js.org|pezkuwichain.app|g' "$file"
# apps.polkadot.io -> pezkuwichain.app
sed -i 's|apps.polkadot.io|pezkuwichain.app|g' "$file"
# polkadot.network -> pezkuwichain.io
sed -i 's|polkadot.network|pezkuwichain.io|g' "$file"
# kusama.network -> dicle.pezkuwichain.io
sed -i 's|kusama.network|dicle.pezkuwichain.io|g' "$file"
# GitHub URLs
sed -i 's|github.com/polkadot-js|github.com/pezkuwichain|g' "$file"
sed -i 's|github.com/paritytech|github.com/pezkuwichain|g' "$file"
done
echo " URLs and domains updated."
# Step 5: Rename files with polkadot/kusama in names
echo "[5/6] Renaming files..."
# Find and rename files
cd "$APPS_DIR"
find . -type f -name "*polkadot*" ! -path "*/node_modules/*" | while read file; do
newname=$(echo "$file" | sed 's/polkadot/pezkuwi/g')
if [ "$file" != "$newname" ]; then
mkdir -p "$(dirname "$newname")"
mv "$file" "$newname" 2>/dev/null || true
fi
done
find . -type f -name "*kusama*" ! -path "*/node_modules/*" | while read file; do
newname=$(echo "$file" | sed 's/kusama/dicle/g')
if [ "$file" != "$newname" ]; then
mkdir -p "$(dirname "$newname")"
mv "$file" "$newname" 2>/dev/null || true
fi
done
find . -type f -name "*westend*" ! -path "*/node_modules/*" | while read file; do
newname=$(echo "$file" | sed 's/westend/zagros/g')
if [ "$file" != "$newname" ]; then
mkdir -p "$(dirname "$newname")"
mv "$file" "$newname" 2>/dev/null || true
fi
done
find . -type f -name "*rococo*" ! -path "*/node_modules/*" | while read file; do
newname=$(echo "$file" | sed 's/rococo/pezkuwichain/g')
if [ "$file" != "$newname" ]; then
mkdir -p "$(dirname "$newname")"
mv "$file" "$newname" 2>/dev/null || true
fi
done
echo " Files renamed."
# Step 6: Update resolutions in root package.json with @pezkuwi versions
echo "[6/6] Updating package.json resolutions..."
cat > "$APPS_DIR/package.json" << 'PACKAGEJSON'
{
"author": "Dijital Kurdistan Tech Institute <info@pezkuwichain.io>",
"bugs": "https://github.com/pezkuwichain/pezkuwi-apps/issues",
"description": "An Apps portal into the Pezkuwi network",
"engines": {
"node": ">=18.14"
},
"homepage": "https://github.com/pezkuwichain/pezkuwi-apps#readme",
"license": "Apache-2.0",
"name": "pezkuwi-apps",
"packageManager": "yarn@4.6.0",
"private": true,
"repository": {
"type": "git",
"url": "https://github.com/pezkuwichain/pezkuwi-apps.git"
},
"sideEffects": false,
"type": "module",
"version": "1.0.0",
"versions": {
"git": "1.0.0",
"npm": "1.0.0"
},
"workspaces": [
"packages/*"
],
"main": "packages/apps-electron/build/electron.js",
"scripts": {
"analyze": "yarn clean && BUILD_ANALYZE=1 yarn run build:code && yarn source-map-explorer packages/apps/build/app.*.js",
"build": "yarn run build:i18n && yarn run build:code",
"build:before": "yarn build:images && yarn build:typesBundle",
"build:code": "NODE_ENV=production yarn pezkuwi-dev-build-ts",
"build:devElectronMain": "cd packages/apps-electron && yarn pezkuwi-exec-webpack --config webpack.main.cjs",
"build:devElectronRenderer": "cd packages/apps-electron && yarn pezkuwi-exec-webpack --config webpack.renderer.cjs",
"build:electron": "yarn clean:electronBuild && yarn build:electronMain && yarn build:electronRenderer",
"build:electronMain": "cd packages/apps-electron && NODE_ENV=production yarn pezkuwi-exec-webpack --config webpack.main.cjs",
"build:electronRenderer": "cd packages/apps-electron && NODE_ENV=production yarn pezkuwi-exec-webpack --config webpack.renderer.cjs",
"build:i18n": "i18next-scanner --config i18next-scanner.config.cjs && node ./scripts/i18nSort.cjs",
"build:images": "node scripts/imgConvert.mjs",
"build:release:electron": "yarn build && yarn build:electron && yarn postinstall:electron",
"build:release:ghpages": "yarn pezkuwi-ci-ghact-docs",
"build:release:ipfs": "node scripts/ipfsUpload.mjs",
"build:release:www": "yarn pezkuwi-ci-ghact-build && yarn build:release:ghpages && yarn build:release:ipfs",
"build:robohash": "node scripts/robohash.cjs",
"build:typesBundle": "pezkuwi-dev-run-test --env node --loader extensionless typesBundle",
"build:www": "rm -rf packages/apps/build && mkdir -p packages/apps/build && yarn run build:i18n && cd packages/apps && yarn pezkuwi-exec-webpack --config webpack.config.cjs",
"ci:chainEndpoints": "pezkuwi-dev-run-test --env node --logfile .github/chain-endpoints.md packages/apps-config/src/ci/chainEndpoints",
"ci:chainTypes": "echo ok",
"clean": "pezkuwi-dev-clean-build",
"clean:electronBuild": "cd packages/apps-electron && pezkuwi-dev-clean-build",
"clean:electronRelease": "cd packages/apps-electron && rm -rf release",
"clean:i18n": "rm -rf packages/apps/public/locales/en && mkdir -p packages/apps/public/locales/en",
"docs": "echo \"skipping docs\"",
"lint": "pezkuwi-dev-run-lint",
"packElectron": "yarn build:release:electron && yarn clean:electronRelease && electron-builder build -mwl",
"packElectron:linux": "yarn build:release:electron && electron-builder build --linux --project packages/apps-electron",
"packElectron:mac": "yarn build:release:electron && electron-builder build --mac --project packages/apps-electron",
"packElectron:test": "yarn build:release:electron && electron-builder --dir --project packages/apps-electron",
"packElectron:win": "yarn build:release:electron && electron-builder build --win --project packages/apps-electron",
"postinstall": "pezkuwi-dev-yarn-only",
"postinstall:electron": "electron-builder install-app-deps",
"start": "yarn clean && cd packages/apps && yarn pezkuwi-exec-webpack serve --config webpack.serve.cjs --port 3000",
"start:electron": "yarn clean:electronBuild && concurrently 'yarn build:devElectronMain && cd packages/apps-electron && electron ./build/electron.js' 'yarn build:devElectronRenderer'",
"test": "pezkuwi-dev-run-test --env browser ^typesBundle ^chainEndpoints ^chainTypes ^page- ^react- ^apps-electron",
"test:all": "pezkuwi-dev-run-test --env browser ^chainEndpoints ^chainTypes",
"test:one": "pezkuwi-dev-run-test --env browser",
"test:skipped": "echo 'tests skipped'"
},
"devDependencies": {
"@crustio/crust-pin": "^1.0.0",
"@pinata/sdk": "^1.2.1",
"@pezkuwi/dev": "^0.85.2",
"@types/chart.js": "^2.9.41",
"@types/file-saver": "^2.0.7",
"@types/react-beautiful-dnd": "^13.1.7",
"@types/react-copy-to-clipboard": "^5.0.7",
"@types/react-dom": "^18.2.18",
"@types/react-router-dom": "^5.3.3",
"@types/store": "^2.0.5",
"concurrently": "^8.2.2",
"devtron": "^1.4.0",
"dnslink-cloudflare": "^3.0.0",
"electron": "28.0.0",
"electron-builder": "24.10.0",
"electron-builder-notarize": "^1.5.1",
"extensionless": "^1.9.6",
"i18next-scanner": "^4.4.0",
"react": "^18.2.0",
"react-dom": "^18.2.0",
"react-is": "^18.2.0",
"source-map-explorer": "^2.5.3"
},
"resolutions": {
"@pezkuwi/api": "^16.5.6",
"@pezkuwi/api-augment": "^16.5.6",
"@pezkuwi/api-base": "^16.5.6",
"@pezkuwi/api-contract": "^16.5.6",
"@pezkuwi/api-derive": "^16.5.6",
"@pezkuwi/hw-ledger": "^14.0.7",
"@pezkuwi/keyring": "^14.0.7",
"@pezkuwi/networks": "^14.0.7",
"@pezkuwi/phishing": "^0.25.22",
"@pezkuwi/rpc-augment": "^16.5.6",
"@pezkuwi/rpc-core": "^16.5.6",
"@pezkuwi/rpc-provider": "^16.5.6",
"@pezkuwi/typegen": "^16.5.6",
"@pezkuwi/types": "^16.5.6",
"@pezkuwi/types-augment": "^16.5.6",
"@pezkuwi/types-codec": "^16.5.6",
"@pezkuwi/types-create": "^16.5.6",
"@pezkuwi/types-known": "^16.5.6",
"@pezkuwi/types-support": "^16.5.6",
"@pezkuwi/util": "^14.0.7",
"@pezkuwi/util-crypto": "^14.0.7",
"@pezkuwi/wasm-crypto": "^7.5.4",
"@pezkuwi/x-bigint": "^14.0.7",
"@pezkuwi/x-fetch": "^14.0.7",
"@pezkuwi/x-global": "^14.0.7",
"@pezkuwi/x-randomvalues": "^14.0.7",
"@pezkuwi/x-textdecoder": "^14.0.7",
"@pezkuwi/x-textencoder": "^14.0.7",
"@pezkuwi/x-ws": "^14.0.7",
"@zondax/ledger-substrate": "1.1.1",
"typescript": "^5.5.4"
}
}
PACKAGEJSON
echo " Resolutions updated."
echo ""
echo "=== Rebrand Phase 1 Complete ==="
echo ""
echo "Next steps:"
echo " 1. Review and fix any broken imports"
echo " 2. Update chain logos"
echo " 3. Update endpoint configurations"
echo " 4. Run yarn install and build"
+119
View File
@@ -0,0 +1,119 @@
// Copyright 2017-2025 @polkadot/react-components authors & contributors
// SPDX-License-Identifier: Apache-2.0
const fs = require('node:fs');
const path = require('node:path');
const HEADER = `// Copyright 2017-2025 @polkadot/react-components authors & contributors
// SPDX-License-Identifier: Apache-2.0
// Automatically generated, do not edit
/* eslint-disable simple-import-sort/imports */`;
const PATH = 'packages/react-components/src/IdentityIcon/RoboHash';
/**
* @param {number} index
* @returns {string}
*/
function getCounter (index) {
return `000${index}`.slice(-3);
}
/**
* @param {string} dir
* @returns {string[]}
*/
function getFiles (dir) {
const genpath = path.join(dir, 'generated');
if (!fs.existsSync(genpath)) {
fs.mkdirSync(genpath, { recursive: true });
}
const all = fs
.readdirSync(dir)
.filter((entry) => {
if (entry.endsWith('.ts')) {
fs.rmSync(path.join(dir, entry), { force: true });
return false;
}
return !entry.startsWith('.') && entry !== 'generated';
})
.map((entry) => {
if (entry.includes('#')) {
const newName = entry.replace(/#/g, '-');
fs.renameSync(path.join(dir, entry), path.join(dir, newName));
return newName;
}
return entry;
})
.sort((a, b) =>
(a.includes('-') && b.includes('-'))
? a.split('-')[1].localeCompare(b.split('-')[1])
: 0
);
for (const f of all) {
if (f.endsWith('.png')) {
fs.writeFileSync(path.join(dir, `generated/${f}`).replace('.png', '.ts'), `${HEADER}\n\nexport default 'data:image/png;base64,${fs.readFileSync(path.join(dir, f)).toString('base64')}';\n`);
}
}
return all;
}
function extractBg () {
const root = path.join(__dirname, '..', PATH, 'backgrounds');
/** @type {string[]} */
const files = [];
getFiles(root).forEach((sub) => {
getFiles(path.join(root, sub)).forEach((entry) => files.push(`./${sub}/generated/${entry}`));
});
fs.writeFileSync(path.join(root, 'index.ts'), `${HEADER}\n\n${files.map((file, index) => `import b${getCounter(index)} from '${file.replace('.png', '')}';`).join('\n')}\n\nexport default [${files.map((_, index) => `b${getCounter(index)}`).join(', ')}];\n`);
}
function extractSets () {
const root = path.join(__dirname, '..', PATH, 'sets');
const sets = getFiles(root).map((sub) =>
getFiles(path.join(root, sub)).map((dir) =>
getFiles(path.join(root, sub, dir)).map((entry) => `./${sub}/${dir}/generated/${entry}`)
)
);
/** @type {string[]} */
const imports = [];
let list = '[';
sets.forEach((areas, sindex) => {
list = `${list}${sindex ? ',' : ''}\n [`;
areas.forEach((files, aindex) => {
const indexes = files.map((file, findex) => {
const index = `s${getCounter(sindex)}${getCounter(aindex)}${getCounter(findex)}`;
imports.push(`import ${index} from '${file.replace('.png', '')}';`);
return index;
});
list = `${list}${aindex ? ',' : ''}\n [${indexes.join(', ')}]`;
});
list = `${list}\n ]`;
});
list = `${list}\n];`;
fs.writeFileSync(path.join(root, 'index.ts'), `${HEADER}\n\n${imports.join('\n')}\n\nexport default ${list}\n`);
}
extractBg();
extractSets();