Initial rebrand: @polkadot -> @pezkuwi (3 packages)

- Package namespace: @polkadot/dev -> @pezkuwi/dev
- Repository: polkadot-js/dev -> pezkuwichain/pezkuwi-dev
- Author: Pezkuwi Team <team@pezkuwichain.io>

Packages:
- @pezkuwi/dev (build tools, linting, CI scripts)
- @pezkuwi/dev-test (test runner)
- @pezkuwi/dev-ts (TypeScript build)

Upstream: polkadot-js/dev v0.83.3
This commit is contained in:
2026-01-05 14:22:47 +03:00
commit 8d28b36f9c
135 changed files with 19232 additions and 0 deletions
+540
View File
@@ -0,0 +1,540 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import process from 'node:process';
import yargs from 'yargs';
import { copyDirSync, copyFileSync, denoCreateDir, execGit, execPm, execSync, exitFatal, GITHUB_REPO, GITHUB_TOKEN_URL, gitSetup, logBin, mkdirpSync, rimrafSync, topoSort } from './util.mjs';
/** @typedef {Record<string, any>} ChangelogMap */
logBin('polkadot-ci-ghact-build');
const DENO_REPO = 'polkadot-js/build-deno.land';
const BUND_REPO = 'polkadot-js/build-bundle';
const repo = `${GITHUB_TOKEN_URL}/${GITHUB_REPO}.git`;
const denoRepo = `${GITHUB_TOKEN_URL}/${DENO_REPO}.git`;
const bundRepo = `${GITHUB_TOKEN_URL}/${BUND_REPO}.git`;
const bundClone = 'build-bundle-clone';
const denoClone = 'build-deno-clone';
let withDeno = false;
let withBund = false;
let withNpm = false;
/** @type {string[]} */
const shouldDeno = [];
/** @type {string[]} */
const shouldBund = [];
const argv = await yargs(process.argv.slice(2))
.options({
'skip-beta': {
description: 'Do not increment as beta',
type: 'boolean'
}
})
.strict()
.argv;
/**
* Removes a specific file, returning true if found, false otherwise
*
* @param {string} file
* @returns {boolean}
*/
function rmFile (file) {
if (fs.existsSync(file)) {
rimrafSync(file);
return true;
}
return false;
}
/**
* Retrieves the path of the root package.json
*
* @returns {string}
*/
function npmGetJsonPath () {
return path.resolve(process.cwd(), 'package.json');
}
/**
* Retrieves the contents of the root package.json
*
* @returns {{ name: string; version: string; versions?: { npm?: string; git?: string } }}
*/
function npmGetJson () {
return JSON.parse(
fs.readFileSync(npmGetJsonPath(), 'utf8')
);
}
/**
* Writes the contents of the root package.json
*
* @param {any} json
*/
function npmSetJson (json) {
fs.writeFileSync(npmGetJsonPath(), `${JSON.stringify(json, null, 2)}\n`);
}
/**
* Retrieved the current version included in package.json
*
* @returns {string}
*/
function npmGetVersion () {
return npmGetJson().version;
}
/**
* Sets the current to have an -x version specifier (aka beta)
*/
function npmAddVersionX () {
const json = npmGetJson();
if (!json.version.endsWith('-x')) {
json.version = json.version + '-x';
npmSetJson(json);
}
}
/**
* Removes the current -x version specifier (aka beta)
*/
function npmDelVersionX () {
const json = npmGetJson();
if (json.version.endsWith('-x')) {
json.version = json.version.replace('-x', '');
npmSetJson(json);
}
}
/**
* Sets the {versions: { npm, git } } fields in package.json
*/
function npmSetVersionFields () {
const json = npmGetJson();
if (!json.versions) {
json.versions = {};
}
json.versions.git = json.version;
if (!json.version.endsWith('-x')) {
json.versions.npm = json.version;
}
npmSetJson(json);
rmFile('.123current');
}
/**
* Sets the npm token in the home directory
*/
function npmSetup () {
const registry = 'registry.npmjs.org';
fs.writeFileSync(path.join(os.homedir(), '.npmrc'), `//${registry}/:_authToken=${process.env['NPM_TOKEN']}`);
}
/**
* Publishes the current package
*
* @returns {void}
*/
function npmPublish () {
if (fs.existsSync('.skip-npm') || !withNpm) {
return;
}
['LICENSE', 'package.json']
.filter((file) => !fs.existsSync(path.join(process.cwd(), 'build', file)))
.forEach((file) => copyFileSync(file, 'build'));
process.chdir('build');
const tag = npmGetVersion().includes('-') ? '--tag beta' : '';
let count = 1;
while (true) {
try {
execSync(`npm publish --quiet --access public ${tag}`);
break;
} catch {
if (count < 5) {
const end = Date.now() + 15000;
console.error(`Publish failed on attempt ${count}/5. Retrying in 15s`);
count++;
while (Date.now() < end) {
// just spin our wheels
}
}
}
}
process.chdir('..');
}
/**
* Creates a map of changelog entries
*
* @param {string[][]} parts
* @param {ChangelogMap} result
* @returns {ChangelogMap}
*/
function createChangelogMap (parts, result = {}) {
for (let i = 0, count = parts.length; i < count; i++) {
const [n, ...e] = parts[i];
if (!result[n]) {
if (e.length) {
result[n] = createChangelogMap([e]);
} else {
result[n] = { '': {} };
}
} else {
if (e.length) {
createChangelogMap([e], result[n]);
} else {
result[n][''] = {};
}
}
}
return result;
}
/**
* Creates an array of changelog entries
*
* @param {ChangelogMap} map
* @returns {string[]}
*/
function createChangelogArr (map) {
const result = [];
const entries = Object.entries(map);
for (let i = 0, count = entries.length; i < count; i++) {
const [name, imap] = entries[i];
if (name) {
if (imap['']) {
result.push(name);
}
const inner = createChangelogArr(imap);
if (inner.length === 1) {
result.push(`${name}-${inner[0]}`);
} else if (inner.length) {
result.push(`${name}-{${inner.join(', ')}}`);
}
}
}
return result;
}
/**
* Adds changelog entries
*
* @param {string[]} changelog
* @returns {string}
*/
function addChangelog (changelog) {
const [version, ...names] = changelog;
const entry = `${
createChangelogArr(
createChangelogMap(
names
.sort()
.map((n) => n.split('-'))
)
).join(', ')
} ${version}`;
const newInfo = `## master\n\n- ${entry}\n`;
if (!fs.existsSync('CHANGELOG.md')) {
fs.writeFileSync('CHANGELOG.md', `# CHANGELOG\n\n${newInfo}`);
} else {
const md = fs.readFileSync('CHANGELOG.md', 'utf-8');
fs.writeFileSync('CHANGELOG.md', md.includes('## master\n\n')
? md.replace('## master\n\n', newInfo)
: md.replace('# CHANGELOG\n\n', `# CHANGELOG\n\n${newInfo}\n`)
);
}
return entry;
}
/**
*
* @param {string} repo
* @param {string} clone
* @param {string[]} names
*/
function commitClone (repo, clone, names) {
if (names.length) {
process.chdir(clone);
const entry = addChangelog(names);
gitSetup();
execGit('add --all .');
execGit(`commit --no-status --quiet -m "${entry}"`);
execGit(`push ${repo}`, true);
process.chdir('..');
}
}
/**
* Publishes a specific package to polkadot-js bundles
*
* @returns {void}
*/
function bundlePublishPkg () {
const { name, version } = npmGetJson();
const dirName = name.split('/')[1];
const bundName = `bundle-polkadot-${dirName}.js`;
const srcPath = path.join('build', bundName);
const dstDir = path.join('../..', bundClone);
if (!fs.existsSync(srcPath)) {
return;
}
console.log(`\n *** bundle ${name}`);
if (shouldBund.length === 0) {
shouldBund.push(version);
}
shouldBund.push(dirName);
rimrafSync(path.join(dstDir, bundName));
copyFileSync(srcPath, dstDir);
}
/**
* Publishes all packages to polkadot-js bundles
*
* @returns {void}
*/
function bundlePublish () {
const { version } = npmGetJson();
if (!withBund && version.includes('-')) {
return;
}
execGit(`clone ${bundRepo} ${bundClone}`, true);
loopFunc(bundlePublishPkg);
commitClone(bundRepo, bundClone, shouldBund);
}
/**
* Publishes a specific package to Deno
*
* @returns {void}
*/
function denoPublishPkg () {
const { name, version } = npmGetJson();
if (fs.existsSync('.skip-deno') || !fs.existsSync('build-deno')) {
return;
}
console.log(`\n *** deno ${name}`);
const dirName = denoCreateDir(name);
const denoPath = `../../${denoClone}/${dirName}`;
if (shouldDeno.length === 0) {
shouldDeno.push(version);
}
shouldDeno.push(dirName);
rimrafSync(denoPath);
mkdirpSync(denoPath);
copyDirSync('build-deno', denoPath);
}
/**
* Publishes all packages to Deno
*
* @returns {void}
*/
function denoPublish () {
const { version } = npmGetJson();
if (!withDeno && version.includes('-')) {
return;
}
execGit(`clone ${denoRepo} ${denoClone}`, true);
loopFunc(denoPublishPkg);
commitClone(denoRepo, denoClone, shouldDeno);
}
/**
* Retrieves flags based on current specifications
*/
function getFlags () {
withDeno = rmFile('.123deno');
withBund = rmFile('.123bundle');
withNpm = rmFile('.123npm');
}
/**
* Bumps the current version, also applying to all sub-packages
*/
function verBump () {
const { version: currentVersion, versions } = npmGetJson();
const [version, tag] = currentVersion.split('-');
const [,, patch] = version.split('.');
const lastVersion = versions?.npm || currentVersion;
if (argv['skip-beta'] || patch === '0') {
// don't allow beta versions
execPm('polkadot-dev-version patch');
withNpm = true;
} else if (tag || currentVersion === lastVersion) {
// if we don't want to publish, add an X before passing
if (!withNpm) {
npmAddVersionX();
} else {
npmDelVersionX();
}
// beta version, just continue the stream of betas
execPm('polkadot-dev-version pre');
} else {
// manually set, got for publish
withNpm = true;
}
// always ensure we have made some changes, so we can commit
npmSetVersionFields();
rmFile('.123trigger');
execPm('polkadot-dev-contrib');
execGit('add --all .');
}
/**
* Commits and pushes the current version on git
*/
function gitPush () {
const version = npmGetVersion();
let doGHRelease = false;
if (process.env['GH_RELEASE_GITHUB_API_TOKEN']) {
const changes = fs.readFileSync('CHANGELOG.md', 'utf8');
if (changes.includes(`## ${version}`)) {
doGHRelease = true;
} else if (version.endsWith('.1')) {
exitFatal(`Unable to release, no CHANGELOG entry for ${version}`);
}
}
execGit('add --all .');
if (fs.existsSync('docs/README.md')) {
execGit('add --all -f docs');
}
// add the skip checks for GitHub ...
execGit(`commit --no-status --quiet -m "[CI Skip] ${version.includes('-x') ? 'bump' : 'release'}/${version.includes('-') ? 'beta' : 'stable'} ${version}
skip-checks: true"`);
// Make sure the release commit is on top of the latest master
execGit(`pull --rebase ${repo} master`);
// Now push normally
execGit(`push ${repo} HEAD:${process.env['GITHUB_REF']}`, true);
if (doGHRelease) {
const files = process.env['GH_RELEASE_FILES']
? `--assets ${process.env['GH_RELEASE_FILES']}`
: '';
execPm(`polkadot-exec-ghrelease --draft ${files} --yes`);
}
}
/**
* Loops through the packages/* (or root), executing the supplied
* function for each package found
*
* @param {() => unknown} fn
*/
function loopFunc (fn) {
if (fs.existsSync('packages')) {
const dirs = fs
.readdirSync('packages')
.filter((dir) => {
const pkgDir = path.join(process.cwd(), 'packages', dir);
return fs.statSync(pkgDir).isDirectory() &&
fs.existsSync(path.join(pkgDir, 'package.json')) &&
fs.existsSync(path.join(pkgDir, 'build'));
});
topoSort(dirs)
.forEach((dir) => {
process.chdir(path.join('packages', dir));
fn();
process.chdir('../..');
});
} else {
fn();
}
}
// first do infrastructure setup
gitSetup();
npmSetup();
// get flags immediate, then adjust
getFlags();
verBump();
// perform the actual CI build
execPm('polkadot-dev-clean-build');
execPm('lint');
execPm('test');
execPm('build');
// publish to all GH repos
gitPush();
denoPublish();
bundlePublish();
// publish to npm
loopFunc(npmPublish);
+14
View File
@@ -0,0 +1,14 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import { execPm, GITHUB_REPO, GITHUB_TOKEN_URL, gitSetup, logBin } from './util.mjs';
const repo = `${GITHUB_TOKEN_URL}/${GITHUB_REPO}.git`;
logBin('polkadot-ci-ghact-docs');
gitSetup();
execPm('run docs');
execPm(`polkadot-exec-ghpages --dotfiles --repo ${repo} --dist ${process.env['GH_PAGES_SRC']} --dest .`, true);
+43
View File
@@ -0,0 +1,43 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'node:fs';
import { execGit, logBin } from './util.mjs';
logBin('polkadot-ci-ghpages-force');
// ensure we are on master
execGit('checkout master');
// checkout latest
execGit('fetch');
execGit('checkout gh-pages');
execGit('pull');
execGit('checkout --orphan gh-pages-temp');
// ignore relevant files
fs.writeFileSync('.gitignore', `
.github/
.vscode/
.yarn/
build/
coverage/
node_modules/
packages/
test/
NOTES.md
`);
// add
execGit('add -A');
execGit('commit -am "refresh history"');
// danger, force new
execGit('branch -D gh-pages');
execGit('branch -m gh-pages');
execGit('push -f origin gh-pages');
// switch to master
execGit('checkout master');
+19
View File
@@ -0,0 +1,19 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'node:fs';
import path from 'node:path';
import { copyDirSync, logBin, rimrafSync } from './util.mjs';
logBin('polkadot-dev-build-docs');
let docRoot = path.join(process.cwd(), 'docs');
if (fs.existsSync(docRoot)) {
docRoot = path.join(process.cwd(), 'build-docs');
rimrafSync(docRoot);
copyDirSync(path.join(process.cwd(), 'docs'), docRoot);
}
File diff suppressed because it is too large Load Diff
+29
View File
@@ -0,0 +1,29 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
// @ts-expect-error For scripts we don't include @types/* definitions
import madge from 'madge';
import { exitFatal, logBin } from './util.mjs';
logBin('polkadot-dev-circular');
const res = await madge('./', { fileExtensions: ['ts', 'tsx'] });
/** @type {string[][]} */
const circular = res.circular();
if (!circular.length) {
process.stdout.write('No circular dependency found!\n');
process.exit(0);
}
const err = `Failed with ${circular.length} circular dependencies`;
const all = circular
.map((files, idx) => `${(idx + 1).toString().padStart(4)}: ${files.join(' > ')}`)
.join('\n');
process.stdout.write(`\n${err}:\n\n${all}\n\n`);
exitFatal(err);
+61
View File
@@ -0,0 +1,61 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'node:fs';
import path from 'node:path';
import { logBin, PATHS_BUILD, rimrafSync } from './util.mjs';
const PKGS = path.join(process.cwd(), 'packages');
const DIRS = PATHS_BUILD.map((d) => `build${d}`);
logBin('polkadot-dev-clean-build');
/**
* @internal
*
* Retrieves all the files containing tsconfig.*.tsbuildinfo contained withing the directory
*
* @param {string} dir
* @returns {string[]}
*/
function getPaths (dir) {
if (!fs.existsSync(dir)) {
return [];
}
return fs
.readdirSync(dir)
.reduce((all, p) => {
if (p.startsWith('tsconfig.') && p.endsWith('.tsbuildinfo')) {
all.push(path.join(dir, p));
}
return all;
}, DIRS.map((p) => path.join(dir, p)));
}
/**
* @internal
*
* Removes all the specified directories
*
* @param {string[]} dirs
*/
function cleanDirs (dirs) {
dirs.forEach((d) => rimrafSync(d));
}
cleanDirs(getPaths(process.cwd()));
if (fs.existsSync(PKGS)) {
cleanDirs(getPaths(PKGS));
cleanDirs(
fs
.readdirSync(PKGS)
.map((f) => path.join(PKGS, f))
.filter((f) => fs.statSync(f).isDirectory())
.reduce((/** @type {string[]} */ res, d) => res.concat(getPaths(d)), [])
);
}
+74
View File
@@ -0,0 +1,74 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'node:fs';
import { execGit, logBin, mkdirpSync } from './util.mjs';
const tmpDir = 'packages/build';
const tmpFile = `${tmpDir}/CONTRIBUTORS`;
logBin('polkadot-dev-contrib');
mkdirpSync(tmpDir);
execGit(`shortlog master -e -n -s > ${tmpFile}`);
fs.writeFileSync(
'CONTRIBUTORS',
Object
.entries(
fs
.readFileSync(tmpFile, 'utf-8')
.split('\n')
.map((l) => l.trim())
.filter((l) => !!l)
.reduce((/** @type {Record<string, { count: number; name: string; }>} */ all, line) => {
const [c, e] = line.split('\t');
const count = parseInt(c, 10);
const [name, rest] = e.split(' <');
const isExcluded = (
['GitHub', 'Travis CI'].some((n) => name.startsWith(n)) ||
['>', 'action@github.com>'].some((e) => rest === e) ||
[name, rest].some((n) => n.includes('[bot]'))
);
if (!isExcluded) {
let [email] = rest.split('>');
if (!all[email]) {
email = Object.keys(all).find((k) =>
name.includes(' ') &&
all[k].name === name
) || email;
}
if (all[email]) {
all[email].count += count;
} else {
all[email] = { count, name };
}
}
return all;
}, {})
)
.sort((a, b) => {
const diff = b[1].count - a[1].count;
return diff === 0
? a[1].name.localeCompare(b[1].name)
: diff;
})
.map(([email, { count, name }], i) => {
execGit(`log master -1 --author=${email} > ${tmpFile}-${i}`);
const commit = fs
.readFileSync(`${tmpFile}-${i}`, 'utf-8')
.split('\n')[4]
.trim();
return `${`${count}`.padStart(8)}\t${name.padEnd(30)}\t${commit}`;
})
.join('\n')
);
+44
View File
@@ -0,0 +1,44 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import { copyDirSync, exitFatal, logBin } from './util.mjs';
const argv = process.argv.slice(2);
const args = [];
let cd = '';
let flatten = false;
for (let i = 0; i < argv.length; i++) {
switch (argv[i]) {
case '--cd':
cd = argv[++i];
break;
case '--flatten':
flatten = true;
break;
default:
args.push(argv[i]);
break;
}
}
const sources = args.slice(0, args.length - 1);
const dest = args[args.length - 1];
logBin('polkadot-dev-copy-dir');
if (!sources || !dest) {
exitFatal('Expected at least one <source>... and one <destination> argument');
}
sources.forEach((src) =>
copyDirSync(
cd
? `${cd}/${src}`
: src,
cd
? `${cd}/${dest}${flatten ? '' : `/${src}`}`
: `${dest}${flatten ? '' : `/${src}`}`
)
);
+53
View File
@@ -0,0 +1,53 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'node:fs';
import path from 'node:path';
import { copyDirSync, execPm, exitFatal, logBin, mkdirpSync, rimrafSync } from './util.mjs';
const args = process.argv.slice(2);
logBin('polkadot-dev-copy-to');
if (args.length !== 1) {
exitFatal('Expected one <destination> argument');
}
const dest = path.join(process.cwd(), '..', args[0], 'node_modules');
if (!fs.existsSync(dest)) {
exitFatal('Destination node_modules folder does not exist');
}
// build to ensure we actually have latest
execPm('build');
// map across what is available and copy it
fs
.readdirSync('packages')
.map((dir) => {
const pkgPath = path.join(process.cwd(), 'packages', dir);
return [pkgPath, path.join(pkgPath, 'package.json')];
})
.filter(([, jsonPath]) => fs.existsSync(jsonPath))
.map(([pkgPath, json]) => [JSON.parse(fs.readFileSync(json, 'utf8')).name, pkgPath])
.forEach(([name, pkgPath]) => {
console.log(`*** Copying ${name} to ${dest}`);
const outDest = path.join(dest, name);
// remove the destination
rimrafSync(outDest);
// create the root
mkdirpSync(outDest);
// copy the build output
copyDirSync(path.join(pkgPath, 'build'), outDest);
// copy node_modules, as available
copyDirSync(path.join(pkgPath, 'node_modules'), path.join(outDest, 'node_modules'));
});
+35
View File
@@ -0,0 +1,35 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'node:fs';
import { DENO_POL_PRE } from './util.mjs';
const [e, i] = fs
.readdirSync('packages')
.filter((p) => fs.existsSync(`packages/${p}/src/mod.ts`))
.sort()
.reduce((/** @type {[string[], Record<String, string>]} */ [e, i], p) => {
e.push(`export * as ${p.replace(/-/g, '_')} from '${DENO_POL_PRE}/${p}/mod.ts';`);
i[`${DENO_POL_PRE}/${p}/`] = `./packages/${p}/build-deno/`;
return [e, i];
}, [[], {}]);
if (!fs.existsSync('mod.ts')) {
fs.writeFileSync('mod.ts', `// Copyright 2017-${new Date().getFullYear()} @polkadot/dev authors & contributors\n// SPDX-License-Identifier: Apache-2.0\n\n// auto-generated via polkadot-dev-deno-map, do not edit\n\n// This is a Deno file, so we can allow .ts imports
/* eslint-disable import/extensions */\n\n${e.join('\n')}\n`);
}
if (fs.existsSync('import_map.in.json')) {
const o = JSON.parse(fs.readFileSync('import_map.in.json', 'utf-8'));
Object
.entries(o.imports)
.forEach(([k, v]) => {
i[k] = v;
});
}
fs.writeFileSync('import_map.json', JSON.stringify({ imports: i }, null, 2));
+40
View File
@@ -0,0 +1,40 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import process from 'node:process';
import yargs from 'yargs';
import { __dirname, execPm, GITHUB_REPO, logBin } from './util.mjs';
const TS_CONFIG_BUILD = true;
logBin('polkadot-dev-run-lint');
// Since yargs can also be a promise, we just relax the type here completely
const argv = await yargs(process.argv.slice(2))
.options({
'skip-eslint': {
description: 'Skips running eslint',
type: 'boolean'
},
'skip-tsc': {
description: 'Skips running tsc',
type: 'boolean'
}
})
.strict()
.argv;
if (!argv['skip-eslint']) {
// We don't want to run with fix on CI
const extra = GITHUB_REPO
? ''
: '--fix';
execPm(`polkadot-exec-eslint ${extra} ${process.cwd()}`);
}
if (!argv['skip-tsc']) {
execPm(`polkadot-exec-tsc --noEmit --emitDeclarationOnly false --pretty${TS_CONFIG_BUILD ? ' --project tsconfig.build.json' : ''}`);
}
+9
View File
@@ -0,0 +1,9 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import { execNodeTs, logBin } from './util.mjs';
logBin('polkadot-run-node-ts');
execNodeTs(process.argv.slice(2).join(' '));
+163
View File
@@ -0,0 +1,163 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import process from 'node:process';
import { execNodeTs, exitFatal, exitFatalEngine, importPath, logBin, readdirSync } from './util.mjs';
// A & B are just helpers here and in the errors below
const EXT_A = ['spec', 'test'];
const EXT_B = ['ts', 'tsx', 'js', 'jsx', 'cjs', 'mjs'];
// The actual extensions we are looking for
const EXTS = EXT_A.reduce((/** @type {string[]} */ exts, s) => exts.concat(...EXT_B.map((e) => `.${s}.${e}`)), []);
logBin('polkadot-dev-run-test');
exitFatalEngine();
const cmd = [];
const nodeFlags = [];
const filters = [];
/** @type {Record<string, string[]>} */
const filtersExcl = {};
/** @type {Record<string, string[]>} */
const filtersIncl = {};
const args = process.argv.slice(2);
let testEnv = 'node';
let isDev = false;
for (let i = 0; i < args.length; i++) {
switch (args[i]) {
// when running inside a dev environment, specifically @polkadot/dev
case '--dev-build':
isDev = true;
break;
// environment, not passed-through
case '--env':
if (!['browser', 'node'].includes(args[++i])) {
throw new Error(`Invalid --env ${args[i]}, expected 'browser' or 'node'`);
}
testEnv = args[i];
break;
// internal flags with no params
case '--bail':
case '--console':
cmd.push(args[i]);
break;
// internal flags, with params
case '--logfile':
cmd.push(args[i]);
cmd.push(args[++i]);
break;
// node flags that could have additional params
case '--import':
case '--loader':
case '--require':
nodeFlags.push(args[i]);
nodeFlags.push(args[++i]);
break;
// any other non-flag arguments are passed-through
default:
if (args[i].startsWith('-')) {
throw new Error(`Unknown flag ${args[i]} found`);
}
filters.push(args[i]);
if (args[i].startsWith('^')) {
const key = args[i].slice(1);
if (filtersIncl[key]) {
delete filtersIncl[key];
} else {
filtersExcl[key] = key.split(/[\\/]/);
}
} else {
const key = args[i];
if (filtersExcl[key]) {
delete filtersExcl[key];
} else {
filtersIncl[key] = key.split(/[\\/]/);
}
}
break;
}
}
/**
* @param {string[]} parts
* @param {Record<string, string[]>} filters
* @returns {boolean}
*/
function applyFilters (parts, filters) {
return Object
.values(filters)
.some((filter) =>
parts
.map((_, i) => i)
.filter((i) =>
filter[0].startsWith(':')
? parts[i].includes(filter[0].slice(1))
: filter.length === 1
? parts[i].startsWith(filter[0])
: parts[i] === filter[0]
)
.some((start) =>
filter.every((f, i) =>
parts[start + i] && (
f.startsWith(':')
? parts[start + i].includes(f.slice(1))
: i === (filter.length - 1)
? parts[start + i].startsWith(f)
: parts[start + i] === f
)
)
)
);
}
const files = readdirSync('packages', EXTS).filter((file) => {
const parts = file.split(/[\\/]/);
let isIncluded = true;
if (Object.keys(filtersIncl).length) {
isIncluded = applyFilters(parts, filtersIncl);
}
if (isIncluded && Object.keys(filtersExcl).length) {
isIncluded = !applyFilters(parts, filtersExcl);
}
return isIncluded;
});
if (files.length === 0) {
exitFatal(`No files matching *.{${EXT_A.join(', ')}}.{${EXT_B.join(', ')}} found${filters.length ? ` (filtering on ${filters.join(', ')})` : ''}`);
}
try {
const allFlags = `${importPath('@polkadot/dev/scripts/polkadot-exec-node-test.mjs')} ${[...cmd, ...files].join(' ')}`;
nodeFlags.push('--require');
nodeFlags.push(
isDev
? `./packages/dev-test/build/cjs/${testEnv}.js`
: `@polkadot/dev-test/${testEnv}`
);
execNodeTs(allFlags, nodeFlags, false, isDev ? './packages/dev-ts/build/testCached.js' : '@polkadot/dev-ts/testCached');
} catch {
process.exit(1);
}
+143
View File
@@ -0,0 +1,143 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'node:fs';
import path from 'node:path';
import yargs from 'yargs';
import { execPm, exitFatal, logBin } from './util.mjs';
/** @typedef {{ dependencies?: Record<string, string>; devDependencies?: Record<string, string>; peerDependencies?: Record<string, string>; optionalDependencies?: Record<string, string>; resolutions?: Record<string, string>; name?: string; stableVersion?: string; version: string; }} PkgJson */
const TYPES = ['major', 'minor', 'patch', 'pre'];
const [type] = (
await yargs(process.argv.slice(2))
.demandCommand(1)
.argv
)._;
if (typeof type !== 'string' || !TYPES.includes(type)) {
exitFatal(`Invalid version bump "${type}", expected one of ${TYPES.join(', ')}`);
}
/**
* @param {Record<string, string>} dependencies
* @param {string[]} others
* @param {string} version
* @returns {Record<string, string>}
*/
function updateDependencies (dependencies, others, version) {
return Object
.entries(dependencies)
.sort((a, b) => a[0].localeCompare(b[0]))
.reduce((/** @type {Record<string, string>} */ result, [key, value]) => {
result[key] = others.includes(key) && value !== '*'
? value.startsWith('^')
? `^${version}`
: version
: value;
return result;
}, {});
}
/**
* @returns {[string, PkgJson]}
*/
function readCurrentPkgJson () {
const rootPath = path.join(process.cwd(), 'package.json');
const rootJson = JSON.parse(fs.readFileSync(rootPath, 'utf8'));
return [rootPath, rootJson];
}
/**
* @param {string} path
* @param {unknown} json
*/
function writePkgJson (path, json) {
fs.writeFileSync(path, `${JSON.stringify(json, null, 2)}\n`);
}
/**
*
* @param {string} version
* @param {string[]} others
* @param {string} pkgPath
* @param {Record<String, any>} json
*/
function updatePackage (version, others, pkgPath, json) {
const updated = Object
.keys(json)
.reduce((/** @type {Record<String, unknown>} */ result, key) => {
if (key === 'version') {
result[key] = version;
} else if (['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies', 'resolutions'].includes(key)) {
result[key] = updateDependencies(json[key], others, version);
} else if (key !== 'stableVersion') {
result[key] = json[key];
}
return result;
}, {});
writePkgJson(pkgPath, updated);
}
function removeX () {
const [rootPath, json] = readCurrentPkgJson();
if (!json.version?.endsWith('-x')) {
return false;
}
json.version = json.version.replace('-x', '');
writePkgJson(rootPath, json);
return true;
}
function addX () {
const [rootPath, json] = readCurrentPkgJson();
if (json.version.endsWith('-x')) {
return false;
}
json.version = json.version + '-x';
writePkgJson(rootPath, json);
return true;
}
logBin('polkadot-dev-version');
const isX = removeX();
execPm(`version ${type === 'pre' ? 'prerelease' : type}`);
if (isX && type === 'pre') {
addX();
}
const [rootPath, rootJson] = readCurrentPkgJson();
updatePackage(rootJson.version, [], rootPath, rootJson);
// yarn workspaces does an OOM, manual looping takes ages
if (fs.existsSync('packages')) {
const packages = fs
.readdirSync('packages')
.map((dir) => path.join(process.cwd(), 'packages', dir, 'package.json'))
.filter((pkgPath) => fs.existsSync(pkgPath))
.map((pkgPath) => [pkgPath, JSON.parse(fs.readFileSync(pkgPath, 'utf8'))]);
const others = packages.map(([, json]) => json.name);
packages.forEach(([pkgPath, json]) => {
updatePackage(rootJson.version, others, pkgPath, json);
});
}
execPm('install');
+11
View File
@@ -0,0 +1,11 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import process from 'node:process';
import { exitFatalYarn } from './util.mjs';
exitFatalYarn();
process.exit(0);
+7
View File
@@ -0,0 +1,7 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import { importRelative } from './util.mjs';
await importRelative('eslint', 'eslint/bin/eslint.js');
+11
View File
@@ -0,0 +1,11 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import { importRelative } from './util.mjs';
const ghp = await importRelative('gh-pages', 'gh-pages/bin/gh-pages.js');
await ghp.default(process.argv);
console.log('Published');
+7
View File
@@ -0,0 +1,7 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import { importRelative } from './util.mjs';
await importRelative('gh-release', 'gh-release/bin/cli.js');
+368
View File
@@ -0,0 +1,368 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
// For Node 18, earliest usable is 18.14:
//
// - node:test added in 18.0,
// - run method exposed in 18.9,
// - mock in 18.13,
// - diagnostics changed in 18.14
//
// Node 16 is not supported:
//
// - node:test added is 16.17,
// - run method exposed in 16.19,
// - mock not available
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import process from 'node:process';
import { run } from 'node:test';
import { isMainThread, parentPort, Worker, workerData } from 'node:worker_threads';
// NOTE error should be defined as "Error", however the @types/node definitions doesn't include all
/** @typedef {{ file?: string; message?: string; }} DiagStat */
/** @typedef {{ details: { type: string; duration_ms: number; error: { message: string; failureType: unknown; stack: string; cause: { code: number; message: string; stack: string; generatedMessage?: any; }; code: number; } }; file?: string; name: string; testNumber: number; nesting: number; }} FailStat */
/** @typedef {{ details: { duration_ms: number }; name: string; }} PassStat */
/** @typedef {{ diag: DiagStat[]; fail: FailStat[]; pass: PassStat[]; skip: unknown[]; todo: unknown[]; total: number; [key: string]: any; }} Stats */
console.time('\t elapsed :');
const WITH_DEBUG = false;
const args = process.argv.slice(2);
/** @type {string[]} */
const files = [];
/** @type {Stats} */
const stats = {
diag: [],
fail: [],
pass: [],
skip: [],
todo: [],
total: 0
};
/** @type {string | null} */
let logFile = null;
/** @type {number} */
let startAt = 0;
/** @type {boolean} */
let bail = false;
/** @type {boolean} */
let toConsole = false;
/** @type {number} */
let progressRowCount = 0;
for (let i = 0; i < args.length; i++) {
if (args[i] === '--bail') {
bail = true;
} else if (args[i] === '--console') {
toConsole = true;
} else if (args[i] === '--logfile') {
logFile = args[++i];
} else {
files.push(args[i]);
}
}
/**
* @internal
*
* Performs an indent of the line (and containing lines) with the specific count
*
* @param {number} count
* @param {string} str
* @param {string} start
* @returns {string}
*/
function indent (count, str = '', start = '') {
let pre = '\n';
switch (count) {
case 0:
break;
case 1:
pre += '\t';
break;
case 2:
pre += '\t\t';
break;
default:
pre += '\t\t\t';
break;
}
pre += ' ';
return `${pre}${start}${
str
.split('\n')
.map((l) => l.trim())
.join(`${pre}${start ? ' '.padStart(start.length, ' ') : ''}`)
}\n`;
}
/**
* @param {FailStat} r
* @return {string | undefined}
*/
function getFilename (r) {
if (r.file?.includes('.spec.') || r.file?.includes('.test.')) {
return r.file;
}
if (r.details.error.cause.stack) {
const stack = r.details.error.cause.stack
.split('\n')
.map((l) => l.trim())
.filter((l) => l.startsWith('at ') && (l.includes('.spec.') || l.includes('.test.')))
.map((l) => l.match(/\(.*:\d\d?:\d\d?\)$/)?.[0])
.map((l) => l?.replace('(', '')?.replace(')', ''));
if (stack.length) {
return stack[0];
}
}
return r.file;
}
function complete () {
process.stdout.write('\n');
let logError = '';
stats.fail.forEach((r) => {
WITH_DEBUG && console.error(JSON.stringify(r, null, 2));
let item = '';
item += indent(1, [getFilename(r), r.name].filter((s) => !!s).join('\n'), 'x ');
item += indent(2, `${r.details.error.failureType} / ${r.details.error.code}${r.details.error.cause.code && r.details.error.cause.code !== r.details.error.code ? ` / ${r.details.error.cause.code}` : ''}`);
if (r.details.error.cause.message) {
item += indent(2, r.details.error.cause.message);
}
logError += item;
if (r.details.error.cause.stack) {
item += indent(2, r.details.error.cause.stack);
}
process.stdout.write(item);
});
if (logFile && logError) {
try {
fs.appendFileSync(path.join(process.cwd(), logFile), logError);
} catch (e) {
console.error(e);
}
}
console.log();
console.log('\t passed ::', stats.pass.length);
console.log('\t failed ::', stats.fail.length);
console.log('\t skipped ::', stats.skip.length);
console.log('\t todo ::', stats.todo.length);
console.log('\t total ::', stats.total);
console.timeEnd('\t elapsed :');
console.log();
// The full error information can be quite useful in the case of overall failures
if ((stats.fail.length || toConsole) && stats.diag.length) {
/** @type {string | undefined} */
let lastFilename = '';
stats.diag.forEach((r) => {
WITH_DEBUG && console.error(JSON.stringify(r, null, 2));
if (typeof r === 'string') {
console.log(r); // Node.js <= 18.14
} else if (r.file && r.file.includes('@polkadot/dev/scripts')) {
// Ignore internal diagnostics
} else {
if (lastFilename !== r.file) {
lastFilename = r.file;
console.log(lastFilename ? `\n${lastFilename}::\n` : '\n');
}
// Edge case: We don't need additional noise that is not useful.
if (!r.message?.split(' ').includes('tests')) {
console.log(`\t${r.message?.split('\n').join('\n\t')}`);
}
}
});
}
if (toConsole) {
stats.pass.forEach((r) => {
console.log(`pass ${r.name} ${r.details.duration_ms} ms`);
});
console.log();
stats.fail.forEach((r) => {
console.log(`fail ${r.name}`);
});
console.log();
}
if (stats.total === 0) {
console.error('FATAL: No tests executed');
console.error();
process.exit(1);
}
process.exit(stats.fail.length);
}
/**
* Prints the progress in real-time as data is passed from the worker.
*
* @param {string} symbol
*/
function printProgress (symbol) {
if (!progressRowCount) {
progressRowCount = 0;
}
if (!startAt) {
startAt = performance.now();
}
// If starting a new row, calculate and print the elapsed time
if (progressRowCount === 0) {
const now = performance.now();
const elapsed = (now - startAt) / 1000;
const minutes = Math.floor(elapsed / 60);
const seconds = elapsed - minutes * 60;
process.stdout.write(
`${`${minutes}:${seconds.toFixed(3).padStart(6, '0')}`.padStart(11)} `
);
}
// Print the symbol with formatting
process.stdout.write(symbol);
progressRowCount++;
// Add spaces for readability
if (progressRowCount % 10 === 0) {
process.stdout.write(' '); // Double space every 10 symbols
} else if (progressRowCount % 5 === 0) {
process.stdout.write(' '); // Single space every 5 symbols
}
// If the row reaches 100 symbols, start a new row
if (progressRowCount >= 100) {
process.stdout.write('\n');
progressRowCount = 0;
}
}
async function runParallel () {
const MAX_WORKERS = Math.min(os.cpus().length, files.length);
const chunks = Math.ceil(files.length / MAX_WORKERS);
try {
// Create and manage worker threads
const results = await Promise.all(
Array.from({ length: MAX_WORKERS }, (_, i) => {
const fileSubset = files.slice(i * chunks, (i + 1) * chunks);
return new Promise((resolve, reject) => {
const worker = new Worker(new URL(import.meta.url), {
workerData: { files: fileSubset }
});
worker.on('message', (message) => {
if (message.type === 'progress') {
printProgress(message.data);
} else if (message.type === 'result') {
resolve(message.data);
}
});
worker.on('error', reject);
worker.on('exit', (code) => {
if (code !== 0) {
reject(new Error(`Worker stopped with exit code ${code}`));
}
});
});
})
);
// Aggregate results from workers
results.forEach((result) => {
Object.keys(stats).forEach((key) => {
if (Array.isArray(stats[key])) {
stats[key] = stats[key].concat(result[key]);
} else if (typeof stats[key] === 'number') {
stats[key] += result[key];
}
});
});
complete();
} catch (err) {
console.error('Error during parallel execution:', err);
process.exit(1);
}
}
if (isMainThread) {
console.time('\tElapsed:');
runParallel().catch((err) => console.error(err));
} else {
run({ files: workerData.files, timeout: 3_600_000 })
.on('data', () => undefined)
.on('end', () => parentPort && parentPort.postMessage(stats))
.on('test:coverage', () => undefined)
.on('test:diagnostic', (/** @type {DiagStat} */data) => {
stats.diag.push(data);
parentPort && parentPort.postMessage({ data: stats, type: 'result' });
})
.on('test:fail', (/** @type {FailStat} */ data) => {
const statFail = structuredClone(data);
if (data.details.error.cause?.stack) {
statFail.details.error.cause.stack = data.details.error.cause.stack;
}
stats.fail.push(statFail);
stats.total++;
parentPort && parentPort.postMessage({ data: 'x', type: 'progress' });
if (bail) {
complete();
}
})
.on('test:pass', (data) => {
const symbol = typeof data.skip !== 'undefined' ? '>' : typeof data.todo !== 'undefined' ? '!' : '·';
if (symbol === '>') {
stats.skip.push(data);
} else if (symbol === '!') {
stats.todo.push(data);
} else {
stats.pass.push(data);
}
stats.total++;
parentPort && parentPort.postMessage({ data: symbol, type: 'progress' });
})
.on('test:plan', () => undefined)
.on('test:start', () => undefined);
}
+7
View File
@@ -0,0 +1,7 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import { execViaNode } from './util.mjs';
execViaNode('rollup', 'rollup/dist/bin/rollup');
+7
View File
@@ -0,0 +1,7 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import { importDirect } from './util.mjs';
await importDirect('tsc', 'typescript/lib/tsc.js');
+7
View File
@@ -0,0 +1,7 @@
#!/usr/bin/env node
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import { importDirect } from './util.mjs';
await importDirect('webpack', 'webpack-cli/bin/cli.js');
+540
View File
@@ -0,0 +1,540 @@
// Copyright 2017-2025 @polkadot/dev authors & contributors
// SPDX-License-Identifier: Apache-2.0
import cp from 'node:child_process';
import fs from 'node:fs';
import path from 'node:path';
import process from 'node:process';
import url from 'node:url';
/** @internal logging */
const BLANK = ''.padStart(75);
/** CJS/ESM compatible __dirname */
export const __dirname = path.dirname(url.fileURLToPath(import.meta.url));
/** Deno prefix for externals */
export const DENO_EXT_PRE = 'https://esm.sh';
/** Deno prefix for built-ins */
export const DENO_LND_PRE = 'https://deno.land';
/** Deno prefix for the polkadot package */
export const DENO_POL_PRE = `${DENO_LND_PRE}/x/polkadot`;
/** The GH user that we use for actions */
export const GITHUB_USER = 'github-actions[bot]';
/** The GH email for actions */
export const GITHUB_MAIL = '41898282+github-actions[bot]@users.noreply.github.com';
/** The GH repo link */
export const GITHUB_REPO = process.env['GITHUB_REPOSITORY'];
/** The GH token */
export const GITHUB_TOKEN = process.env['GH_PAT'];
/** The GH repo URL */
export const GITHUB_TOKEN_URL = `https://${GITHUB_TOKEN}@github.com`;
/** Paths that we generally building to (catch-all for possible usages) */
export const PATHS_BUILD = ['', '-cjs', '-esm'].reduce((r, a) => r.concat(['', '-babel', '-esbuild', '-swc', '-tsc'].map((b) => `${b}${a}`)), ['-deno', '-docs', '-loader', '-wasm']).sort();
/** Paths that are generally excluded from source operations */
export const PATHS_EXCL = ['node_modules', ...PATHS_BUILD.map((e) => `build${e}`)];
/**
* Copy a file to a target dir
*
* @param {string | string[]} src
* @param {string} destDir
**/
export function copyFileSync (src, destDir) {
if (Array.isArray(src)) {
src.forEach((s) => copyFileSync(s, destDir));
} else {
fs.copyFileSync(src, path.join(destDir, path.basename(src)));
}
}
/**
* Recursively copies a directory to a target dir
*
* @param {string | string[]} src
* @param {string} dest
* @param {string[]} [include]
* @param {string[]} [exclude]
**/
export function copyDirSync (src, dest, include, exclude) {
if (Array.isArray(src)) {
src.forEach((s) => copyDirSync(s, dest, include, exclude));
} else if (!fs.existsSync(src)) {
// it doesn't exist, so we have nothing to copy
} else if (!fs.statSync(src).isDirectory()) {
exitFatal(`Source ${src} should be a directory`);
} else {
mkdirpSync(dest);
fs
.readdirSync(src)
.forEach((file) => {
const srcPath = path.join(src, file);
if (fs.statSync(srcPath).isDirectory()) {
copyDirSync(srcPath, path.join(dest, file), include, exclude);
} else if (!include?.length || include.some((e) => file.endsWith(e))) {
if (!exclude || !exclude.some((e) => file.endsWith(e))) {
copyFileSync(srcPath, dest);
}
}
});
}
}
/**
* Creates a deno directory name
*
* @param {string} name
* @returns {string}
**/
export function denoCreateDir (name) {
// aligns with name above - since we have sub-paths, we only return
// the actual path inside packages/* (i.e. the last part of the name)
return name.replace('@polkadot/', '');
}
/**
* @internal
*
* Adjusts the engine setting, highest of current and requested
*
* @param {string} [a]
* @param {string} [b]
* @returns {number}
*/
export function engineVersionCmp (a, b) {
const aVer = engineVersionSplit(a);
const bVer = engineVersionSplit(b);
for (let i = 0; i < 3; i++) {
if (aVer[i] < bVer[i]) {
return -1;
} else if (aVer[i] > bVer[i]) {
return 1;
}
}
return 0;
}
/**
* @internal
*
* Splits a engines version, i.e. >=xx(.yy) into
* the major/minor/patch parts
*
* @param {string} [ver]
* @returns {[number, number, number]}
*/
export function engineVersionSplit (ver) {
const parts = (ver || '>=0')
.replace('v', '') // process.version returns v18.14.0
.replace('>=', '') // engines have >= prefix
.split('.')
.map((e) => e.trim());
return [parseInt(parts[0] || '0', 10), parseInt(parts[1] || '0', 10), parseInt(parts[2] || '0', 10)];
}
/**
* Process execution
*
* @param {string} cmd
* @param {boolean} [noLog]
**/
export function execSync (cmd, noLog) {
const exec = cmd
.replace(/ {2}/g, ' ')
.trim();
if (!noLog) {
logBin(exec, true);
}
cp.execSync(exec, { stdio: 'inherit' });
}
/**
* Node execution with ts support
*
* @param {string} cmd
* @param {string[]} [nodeFlags]
* @param {boolean} [noLog]
* @param {string} [loaderPath]
**/
export function execNodeTs (cmd, nodeFlags = [], noLog, loaderPath = '@polkadot/dev-ts/cached') {
const loadersGlo = [];
const loadersLoc = [];
const otherFlags = [];
for (let i = 0; i < nodeFlags.length; i++) {
const flag = nodeFlags[i];
if (['--import', '--loader', '--require'].includes(flag)) {
const arg = nodeFlags[++i];
// We split the loader arguments based on type in execSync. The
// split here is to extract the various provided types:
//
// 1. Global loaders are added first, then
// 2. Our specific dev-ts loader is added, then
// 3. Any provided local loaders are added
//
// The ordering requirement here is driven from the use of global
// loaders inside the apps repo (specifically extensionless), while
// ensuring we don't break local loader usage in the wasm repo
if (arg.startsWith('.')) {
loadersLoc.push(flag);
loadersLoc.push(arg);
} else {
loadersGlo.push(flag);
loadersGlo.push(arg);
}
} else {
otherFlags.push(flag);
}
}
execSync(`${process.execPath} ${otherFlags.join(' ')} --no-warnings --enable-source-maps ${loadersGlo.join(' ')} --loader ${loaderPath} ${loadersLoc.join(' ')} ${cmd}`, noLog);
}
/**
* Execute the git command
*
* @param {string} cmd
* @param {boolean} [noLog]
**/
export function execGit (cmd, noLog) {
execSync(`git ${cmd}`, noLog);
}
/**
* Execute the package manager (yarn by default)
*
* @param {string} cmd
* @param {boolean} [noLog]
**/
export function execPm (cmd, noLog) {
// It could be possible to extends this to npm/pnpm, but the package manager
// arguments are not quite the same between them, so we may need to do mangling
// and adjust to convert yarn-isms to the specific target.
//
// Instead of defaulting here, we could possibly use process.env['npm_execpath']
// to determine the package manager which would work in most (???) cases where the
// top-level has been executed via a package manager and the env is set - no bets
// atm for what happens when execSync/fork is used
//
// TL;DR Not going to spend effort on this, but quite possibly there is an avenue
// to support other package managers, aka pick-your-poison
execSync(`yarn ${cmd}`, noLog);
}
/**
* Node binary execution
*
* @param {string} name
* @param {string} cmd
**/
export function execViaNode (name, cmd) {
logBin(name);
execSync(`${importPath(cmd)} ${process.argv.slice(2).join(' ')}`, true);
}
/** A consistent setup for git variables */
export function gitSetup () {
execGit(`config user.name "${GITHUB_USER}"`);
execGit(`config user.email "${GITHUB_MAIL}"`);
execGit('config push.default simple');
execGit('config merge.ours.driver true');
execGit('checkout master');
}
/**
* Create an absolute import path into node_modules from a
* <this module> module name
*
* @param {string} req
* @returns {string}
**/
export function importPath (req) {
return path.join(process.cwd(), 'node_modules', req);
}
/**
* Do an async import
*
* @param {string} bin
* @param {string} req
* @returns {Promise<any>}
**/
export async function importDirect (bin, req) {
logBin(bin);
try {
const mod = await import(req);
return mod;
} catch (/** @type {any} */ error) {
exitFatal(`Error importing ${req}`, error);
}
}
/**
* Do a relative async import
*
* @param {string} bin
* @param {string} req
* @returns {Promise<any>}
**/
export function importRelative (bin, req) {
return importDirect(bin, importPath(req));
}
/**
* Logs the binary name with the calling args
*
* @param {string} bin
* @param {boolean} [noArgs]
*/
export function logBin (bin, noArgs) {
const extra = noArgs
? ''
: process.argv.slice(2).join(' ');
console.log(`$ ${bin} ${extra}`.replace(/ {2}/g, ' ').trim());
}
/**
* Do a mkdirp (no global support, native)
*
* @param {string} dir
**/
export function mkdirpSync (dir) {
fs.mkdirSync(dir, { recursive: true });
}
/**
* Delete the ful path (no glob support)
*
* @param {string} dir
**/
export function rimrafSync (dir) {
if (fs.existsSync(dir)) {
fs.rmSync(dir, { force: true, recursive: true });
}
}
/**
* Recursively reads a directory, making a list of the matched extensions
*
* @param {string} src
* @param {string[]} extensions
* @param {string[]} [files]
**/
export function readdirSync (src, extensions, files = []) {
if (!fs.statSync(src).isDirectory()) {
exitFatal(`Source ${src} should be a directory`);
}
fs
.readdirSync(src)
.forEach((file) => {
const srcPath = path.join(src, file);
if (fs.statSync(srcPath).isDirectory()) {
if (!PATHS_EXCL.includes(file)) {
readdirSync(srcPath, extensions, files);
}
} else if (extensions.some((e) => file.endsWith(e))) {
files.push(srcPath);
}
});
return files;
}
/**
* Prints the fatal error message and exit with a non-zero return code
*
* @param {string} message
* @param {Error} [error]
* @returns {never}
**/
export function exitFatal (message, error) {
console.error();
console.error('FATAL:', message);
if (error) {
console.error();
console.error(error);
}
console.error();
process.exit(1);
}
/**
* Checks for Node version with a fatal exit code
*/
export function exitFatalEngine () {
const pkg = JSON.parse(fs.readFileSync(path.join(process.cwd(), 'package.json'), 'utf-8'));
if (engineVersionCmp(process.version, pkg.engines?.node) === -1) {
console.error(
`${BLANK}\n FATAL: At least Node version ${pkg.engines.node} is required for development.\n${BLANK}`
);
console.error(`
Technical explanation: For a development environment all projects in
the @polkadot famility uses node:test in their operation. Currently the
minimum required version of Node is thus set at the first first version
with operational support, hence this limitation. Additionally only LTS
Node versions are supported.
LTS Node versions are detailed on https://nodejs.dev/en/about/releases/
`);
process.exit(1);
}
}
/**
* Checks for yarn usage with a fatal exit code
*/
export function exitFatalYarn () {
if (!process.env['npm_execpath']?.includes('yarn')) {
console.error(
`${BLANK}\n FATAL: The use of yarn is required, install via npm is not supported.\n${BLANK}`
);
console.error(`
Technical explanation: All the projects in the @polkadot' family use
yarn specific configs and assume yarn for build operations and locks.
If yarn is not available, you can get it from https://yarnpkg.com/
`);
process.exit(1);
}
}
/**
* Topological sort of dependencies. It handles circular deps by placing them at the end
* of the sorted array from circular dep with the smallest vertices to the greatest vertices.
*
* Credit to: https://gist.github.com/shinout/1232505 (Parts of this were used as a starting point for the structure of the topoSort)
*
* @param {string[]} dirs
*/
export function topoSort (dirs) {
/** @type {Record<string, Node>} */
const nodes = {};
/** @type {string[]} */
const sorted = [];
/** @type {Record<string, boolean>} */
const visited = {};
/** @type {Record<string, Node>} */
const circular = {};
if (dirs.length === 1) {
return dirs;
}
class Node {
/** @param {string} id */
constructor (id) {
this.id = id;
/** @type {string[]} */
this.vertices = [];
}
}
/**
* @param {*} key
* @param {string[]} ancestors
* @returns
*/
function cb (key, ancestors) {
const node = nodes[key];
const id = node.id;
if (visited[key]) {
return;
}
ancestors.push(id);
visited[key] = true;
node.vertices.forEach((i) => {
if (ancestors.indexOf(i) >= 0) {
console.log('CIRCULAR: closed chain : ' + i + ' is in ' + id);
if (nodes[id].vertices.includes(i)) {
circular[id] = nodes[id];
}
circular[i] = nodes[i];
}
cb(i.toString(), ancestors.map((v) => v));
});
if (!circular[id]) {
sorted.push(id);
}
}
// Build edges
const edges = dirs.map((dir) => {
const json = fs.readFileSync(path.join('packages', dir, 'package.json'), 'utf8');
const deps = JSON.parse(json).dependencies;
return dirs
.filter((d) => d !== dir && deps && Object.keys(deps).includes(`@polkadot/${d}`))
.map((d) => [dir, d]);
}).flat();
edges.forEach((v) => {
const from = v[0]; const to = v[1];
if (!nodes[from]) {
nodes[from] = new Node(from);
}
if (!nodes[to]) {
nodes[to] = new Node(to);
}
nodes[from].vertices.push(to);
});
const keys = Object.keys(nodes);
for (const key of keys) {
cb(key, []);
}
const circularSorted = Object.keys(circular)
.sort((a, b) => circular[a].vertices.length < circular[b].vertices.length ? -1 : 1);
const flattenedEdges = edges.flat();
// Packages that have no edges
/** @type {string[]} */
const standAlones = dirs.filter((d) => !flattenedEdges.includes(d));
return sorted.concat(circularSorted).concat(standAlones);
}