mirror of
https://github.com/pezkuwichain/pezkuwi-dev.git
synced 2026-04-21 22:37:58 +00:00
Initial rebrand: @polkadot -> @pezkuwi (3 packages)
- Package namespace: @polkadot/dev -> @pezkuwi/dev - Repository: polkadot-js/dev -> pezkuwichain/pezkuwi-dev - Author: Pezkuwi Team <team@pezkuwichain.io> Packages: - @pezkuwi/dev (build tools, linting, CI scripts) - @pezkuwi/dev-test (test runner) - @pezkuwi/dev-ts (TypeScript build) Upstream: polkadot-js/dev v0.83.3
This commit is contained in:
@@ -0,0 +1,10 @@
|
||||
root = true
|
||||
[*]
|
||||
indent_style=space
|
||||
indent_size=2
|
||||
tab_width=2
|
||||
end_of_line=lf
|
||||
charset=utf-8
|
||||
trim_trailing_whitespace=true
|
||||
max_line_length=120
|
||||
insert_final_newline=true
|
||||
@@ -0,0 +1,16 @@
|
||||
name: bot
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
approve:
|
||||
if: "! startsWith(github.event.head_commit.message, '[CI Skip]') && (!github.event.pull_request || github.event.pull_request.head.repo.full_name == github.repository)"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: jacogr/action-approve@795afd1dd096a2071d7ec98740661af4e853b7da
|
||||
with:
|
||||
authors: jacogr, TarikGul
|
||||
labels: -auto
|
||||
token: ${{ secrets.GH_PAT_BOT }}
|
||||
@@ -0,0 +1,16 @@
|
||||
name: bot
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: jacogr/action-merge@d2d64b4545acd93b0a9575177d3d215ae3f92029
|
||||
with:
|
||||
checks: pr (build),pr (docs),pr (lint),pr (test)
|
||||
labels: -auto
|
||||
strategy: squash
|
||||
token: ${{ secrets.GH_PAT_BOT }}
|
||||
@@ -0,0 +1,25 @@
|
||||
name: 'Lock Threads'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '30 1/3 * * *'
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
YARN_ENABLE_SCRIPTS: false
|
||||
steps:
|
||||
- uses: dessant/lock-threads@c1b35aecc5cdb1a34539d14196df55838bb2f836
|
||||
with:
|
||||
github-token: ${{ secrets.GH_PAT_BOT }}
|
||||
issue-inactive-days: '7'
|
||||
issue-comment: >
|
||||
This thread has been automatically locked since there has not been
|
||||
any recent activity after it was closed. Please open a new issue
|
||||
if you think you have a related problem or query.
|
||||
pr-inactive-days: '2'
|
||||
pr-comment: >
|
||||
This pull request has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new issue for related bugs.
|
||||
@@ -0,0 +1,21 @@
|
||||
name: PR
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
pr:
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
matrix:
|
||||
step: ['lint', 'test', 'build', 'docs']
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
YARN_ENABLE_SCRIPTS: false
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
- name: ${{ matrix.step }}
|
||||
run: |
|
||||
yarn install --immutable
|
||||
yarn ${{ matrix.step }}
|
||||
@@ -0,0 +1,31 @@
|
||||
name: Master
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
master:
|
||||
if: "! startsWith(github.event.head_commit.message, '[CI Skip]')"
|
||||
strategy:
|
||||
matrix:
|
||||
step: ['build:release']
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
YARN_ENABLE_SCRIPTS: false
|
||||
CC_TEST_REPORTER_ID: ${{ secrets.CC_TEST_REPORTER_ID }}
|
||||
GH_PAT: ${{ secrets.GH_PAT_BOT }}
|
||||
GH_RELEASE_GITHUB_API_TOKEN: ${{ secrets.GH_PAT_BOT }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
token: ${{ secrets.GH_PAT_BOT }}
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 'lts/*'
|
||||
- name: ${{ matrix.step }}
|
||||
run: |
|
||||
yarn install --immutable
|
||||
yarn ${{ matrix.step }}
|
||||
+23
@@ -0,0 +1,23 @@
|
||||
.DS_Store
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.npmrc
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/releases
|
||||
!.yarn/plugins
|
||||
!.yarn/sdks
|
||||
/mod.ts
|
||||
/import_map.json
|
||||
build/
|
||||
build-*/
|
||||
cc-test-reporter
|
||||
coverage/
|
||||
node_modules/
|
||||
npm-debug.log*
|
||||
tmp/
|
||||
tsconfig.*buildinfo
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
@@ -0,0 +1,3 @@
|
||||
Jaco <jacogr@gmail.com>
|
||||
github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> <action@github.com>
|
||||
github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Github Actions <action@github.com>
|
||||
@@ -0,0 +1,4 @@
|
||||
build
|
||||
coverage
|
||||
docs
|
||||
src
|
||||
@@ -0,0 +1,3 @@
|
||||
build
|
||||
coverage
|
||||
packages
|
||||
@@ -0,0 +1,4 @@
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
module.exports = require('@polkadot/dev/config/prettier.cjs');
|
||||
Vendored
+4
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"eslint.enable": true,
|
||||
"eslint.experimental.useFlatConfig": true
|
||||
}
|
||||
Vendored
+934
File diff suppressed because one or more lines are too long
+15
@@ -0,0 +1,15 @@
|
||||
compressionLevel: mixed
|
||||
|
||||
enableGlobalCache: false
|
||||
|
||||
enableImmutableInstalls: false
|
||||
|
||||
enableProgressBars: false
|
||||
|
||||
logFilters:
|
||||
- code: YN0013
|
||||
level: discard
|
||||
|
||||
nodeLinker: node-modules
|
||||
|
||||
yarnPath: .yarn/releases/yarn-4.6.0.cjs
|
||||
+252
@@ -0,0 +1,252 @@
|
||||
# CHANGELOG
|
||||
|
||||
## 0.84.2
|
||||
|
||||
- Write the Buffer Import to necessary Deno build files
|
||||
- build(dev): ignore removing private fields
|
||||
|
||||
|
||||
## 0.83.1
|
||||
|
||||
- Parallelize the tests in `polkadot-exec-node-test`
|
||||
|
||||
|
||||
## 0.82.1
|
||||
|
||||
- Create seperate testLoader and testCached for Node v22 compatibility with JSON assertions
|
||||
|
||||
|
||||
## 0.81.1
|
||||
|
||||
- Duplicate .d.ts files into cjs
|
||||
|
||||
|
||||
## 0.80.1
|
||||
|
||||
- Typescript 5.5.4
|
||||
|
||||
|
||||
## 0.79.4
|
||||
|
||||
- fix: Add check for `deps` in topo sort
|
||||
|
||||
|
||||
## 0.79.1
|
||||
|
||||
- feat: add topological sorting to the packages when releasing
|
||||
|
||||
|
||||
## 0.78.1
|
||||
|
||||
- Update internal scripts to dedupe git & yarn commands
|
||||
- Update to latest yarn berry 4.0.2
|
||||
|
||||
|
||||
## 0.77.1
|
||||
|
||||
- Drop support for Node 16
|
||||
|
||||
|
||||
## 0.76.1
|
||||
|
||||
- Update to latest typescript-eslint (incl. new rulesets)
|
||||
|
||||
|
||||
## 0.75.1
|
||||
|
||||
- Swap eslint to flat config
|
||||
|
||||
|
||||
## 0.74.1
|
||||
|
||||
- Ensure correct structures for `tsconfig.*.json`
|
||||
|
||||
|
||||
## 0.73.1
|
||||
|
||||
- Drop support for Node 14
|
||||
|
||||
|
||||
## 0.72.1
|
||||
|
||||
- Split `@pezkuwi/dev-ts` & `@pezkuwi/dev-test` packages
|
||||
|
||||
|
||||
## 0.71.1
|
||||
|
||||
- Ensure all `src/*` has `.js` extensions (as per ESM, eslint rules, build updates)
|
||||
|
||||
|
||||
## 0.70.1
|
||||
|
||||
- Remove Babel (all compilation via tsc)
|
||||
|
||||
|
||||
## 0.69.1
|
||||
|
||||
- Remove Jest
|
||||
|
||||
|
||||
## 0.68.1
|
||||
|
||||
- 2023
|
||||
- Cleanup internally used script dependencies
|
||||
|
||||
|
||||
## 0.67.1
|
||||
|
||||
- Default to `esModuleInterop: false`
|
||||
|
||||
|
||||
## 0.66.1
|
||||
|
||||
- Output commonjs into `cjs/*`
|
||||
|
||||
|
||||
## 0.65.1
|
||||
|
||||
- 2022
|
||||
- Generate `detectPackage` template (with cjs `__dirname`)
|
||||
|
||||
|
||||
## 0.64.1
|
||||
|
||||
- Use tsconfig references and per-package TS build/lint
|
||||
|
||||
|
||||
## 0.63.1
|
||||
|
||||
- eslint 8
|
||||
|
||||
|
||||
## 0.62.1
|
||||
|
||||
- Swap default package build to esm with type: module
|
||||
|
||||
|
||||
## 0.61.1
|
||||
|
||||
- Build & publish both esm and cjs
|
||||
|
||||
|
||||
## 0.60.1
|
||||
|
||||
- Allow for both esm & cjs Babel config
|
||||
|
||||
|
||||
## 0.59.1
|
||||
|
||||
- Default to new React runtime preset (after React 16.14)
|
||||
|
||||
|
||||
## 0.58.1
|
||||
|
||||
- Drop vuepress dependency completely
|
||||
|
||||
|
||||
## 0.57.1
|
||||
|
||||
- Drop lerna dependency completely
|
||||
|
||||
|
||||
## 0.56.1
|
||||
|
||||
- Optional lerna in publish
|
||||
|
||||
|
||||
## 0.55.3
|
||||
|
||||
- Publish draft release
|
||||
|
||||
|
||||
## 0.54.1
|
||||
|
||||
- typescript-eslint 3
|
||||
|
||||
|
||||
## 0.53.1
|
||||
|
||||
- TypeScript 3.9
|
||||
|
||||
|
||||
## 0.52.1
|
||||
|
||||
- Stricter JSX rules
|
||||
|
||||
|
||||
## 0.51.1
|
||||
|
||||
- Arrow functions with ()
|
||||
- JSX sample tests
|
||||
|
||||
|
||||
## 0.50.1
|
||||
|
||||
- Yarn 2
|
||||
|
||||
|
||||
## 0.41.1
|
||||
|
||||
- TypeScript 3.8.2
|
||||
- Extend Babel plugins with latest TS features
|
||||
|
||||
|
||||
## 0.40.1
|
||||
|
||||
- Remove `@pezkuwi/dev-react`, combine into `@pezkuwi/dev`
|
||||
- Move all user-facing (non-CI scripts) to JS, which makes cross-platform easier
|
||||
- Add `polkadot-dev-circular` script to extract circular deps
|
||||
|
||||
|
||||
## 0.34.1
|
||||
|
||||
- Bump deps
|
||||
|
||||
|
||||
## 0.33.1
|
||||
|
||||
- Package scoping checks, build & pre-publish
|
||||
- Allow `.skip-{npm,build}` files to control build
|
||||
- Bump deps
|
||||
|
||||
|
||||
## 0.32.1
|
||||
|
||||
- GitHub workflows
|
||||
- Don't publish this package as beta
|
||||
- Bump deps
|
||||
|
||||
|
||||
## 0.31.1
|
||||
|
||||
- TypeScript eslint 2
|
||||
- Bump deps
|
||||
|
||||
|
||||
## 0.30.1
|
||||
|
||||
- Swap to TypeScript eslint
|
||||
- Bump deps
|
||||
|
||||
|
||||
## 0.29.1
|
||||
|
||||
- Split deploy & build steps
|
||||
- Rename `yarn run check` to `yarn lint`
|
||||
- Bump deps
|
||||
|
||||
|
||||
## 0.28.1
|
||||
|
||||
- Remove `useBuiltins` from babel config (corejs)
|
||||
- Bump deps
|
||||
|
||||
|
||||
## 0.27.1
|
||||
|
||||
- Beta versions now publish with a `beta` tag
|
||||
|
||||
|
||||
## 0.26.1
|
||||
|
||||
- Publish `<major>.<minor>.<patch>-beta.x` versions from CI. This helps a lot with the stream of versions that arrise from merging.
|
||||
@@ -0,0 +1,45 @@
|
||||
# Contributing
|
||||
|
||||
## What?
|
||||
|
||||
Individuals making significant and valuable contributions are given commit-access to a project to contribute as they see fit.
|
||||
A project is more like an open wiki than a standard guarded open source project.
|
||||
|
||||
## Rules
|
||||
|
||||
There are a few basic ground-rules for contributors (including the maintainer(s) of the project):
|
||||
|
||||
1. **No `--force` pushes** or modifying the Git history in any way. If you need to rebase, ensure you do it in your own repo.
|
||||
2. **Non-master branches**, prefixed with a short name moniker (e.g. `<initials>-<feature>`) must be used for ongoing work.
|
||||
3. **All modifications** must be made in a **pull-request** to solicit feedback from other contributors.
|
||||
4. A pull-request *must not be merged until CI* has finished successfully.
|
||||
|
||||
#### Merging pull requests once CI is successful:
|
||||
- A pull request with no large change to logic that is an urgent fix may be merged after a non-author contributor has reviewed it well.
|
||||
- No PR should be merged until all reviews' comments are addressed.
|
||||
|
||||
#### Reviewing pull requests:
|
||||
When reviewing a pull request, the end-goal is to suggest useful changes to the author. Reviews should finish with approval unless there are issues that would result in:
|
||||
|
||||
- Buggy behaviour.
|
||||
- Undue maintenance burden.
|
||||
- Breaking with house coding style.
|
||||
- Pessimisation (i.e. reduction of speed as measured in the projects benchmarks).
|
||||
- Feature reduction (i.e. it removes some aspect of functionality that a significant minority of users rely on).
|
||||
- Uselessness (i.e. it does not strictly add a feature or fix a known issue).
|
||||
|
||||
#### Reviews may not be used as an effective veto for a PR because:
|
||||
- There exists a somewhat cleaner/better/faster way of accomplishing the same feature/fix.
|
||||
- It does not fit well with some other contributors' longer-term vision for the project.
|
||||
|
||||
## Releases
|
||||
|
||||
Declaring formal releases remains the prerogative of the project maintainer(s).
|
||||
|
||||
## Changes to this arrangement
|
||||
|
||||
This is an experiment and feedback is welcome! This document may also be subject to pull-requests or changes by contributors where you believe you have something valuable to add or change.
|
||||
|
||||
## Heritage
|
||||
|
||||
These contributing guidelines are modified from the "OPEN Open Source Project" guidelines for the Level project: [https://github.com/Level/community/blob/master/CONTRIBUTING.md](https://github.com/Level/community/blob/master/CONTRIBUTING.md)
|
||||
@@ -0,0 +1,13 @@
|
||||
1382 Jaco Bump deps (#1134)
|
||||
19 Tarik Gul Write the Buffer Import to necessary Deno build files (#1156)
|
||||
2 Arjun Porwal Fix CI issue (#1159)
|
||||
2 Nikos Kontakis Add rollup dynamic import variables plugin (#789)
|
||||
1 Alex Saft Fix Vite build bundling error about EISDIR on `new URL('.', import.meta.url)` (#637)
|
||||
1 Alex Wang support build when using lerna with on package (#214)
|
||||
1 Amaury Martiny Allow .spec.ts files (#143)
|
||||
1 Axel Chalon Include dotfiles when pushing to gh-pages (#268)
|
||||
1 Ewa Kowalska Add fix flag to eslint script (#419)
|
||||
1 Nazar Mokrynskyi Remove redundant babel plugins (#501)
|
||||
1 rajk93 build(dev): ignore removing private fields (#1157)
|
||||
1 Stefanie Doll Added react-hot-loader to 'dev-react' config (#191)
|
||||
1 StefansArya Change bundle configuration to UMD (#557)
|
||||
@@ -0,0 +1,201 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
@@ -0,0 +1,10 @@
|
||||
# @pezkuwi/dev
|
||||
|
||||
A collection of shared CI scripts and development environment (configuration, dependencies) used by all [@pezkuwi](https://pezkuwi.js.org) projects.
|
||||
|
||||
Included here -
|
||||
|
||||
- [@pezkuwi/dev](packages/dev/) Common base configurations for our TypeScript projects
|
||||
- [Scripts](packages/dev/README.md)
|
||||
- [@pezkuwi/dev-test](packages/dev-test/) A Jest-like global environment for usage alongside `node:test`
|
||||
- [@pezkuwi/dev-ts](packages/dev-ts/) An ESM loader for TS environments
|
||||
@@ -0,0 +1,12 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="initial-scale=0.5, maximum-scale=1">
|
||||
<meta http-equiv="refresh" content="0;URL='https://polkadot.js.org/docs/'" />
|
||||
<title>Redirecting to https://polkadot.js.org/docs/</title>
|
||||
</head>
|
||||
<body>
|
||||
Redirecting to <a href="https://polkadot.js.org/docs/">https://polkadot.js.org/docs/</a>
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,12 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="initial-scale=0.5, maximum-scale=1">
|
||||
<meta http-equiv="refresh" content="0;URL='https://polkadot.js.org/docs/'" />
|
||||
<title>Redirecting to https://polkadot.js.org/docs/</title>
|
||||
</head>
|
||||
<body>
|
||||
Redirecting to <a href="https://polkadot.js.org/docs/">https://polkadot.js.org/docs/</a>
|
||||
</body>
|
||||
</html>
|
||||
@@ -0,0 +1,8 @@
|
||||
// Copyright 2017-2025 @pezkuwi/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import baseConfig from '@pezkuwi/dev/config/eslint';
|
||||
|
||||
export default [
|
||||
...baseConfig
|
||||
];
|
||||
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"author": "Pezkuwi Team <team@pezkuwichain.io>",
|
||||
"bugs": "https://github.com/pezkuwi/dev/issues",
|
||||
"engines": {
|
||||
"node": ">=18.14"
|
||||
},
|
||||
"homepage": "https://github.com/pezkuwichain/pezkuwi-dev#readme",
|
||||
"license": "Apache-2.0",
|
||||
"packageManager": "yarn@4.6.0",
|
||||
"private": true,
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/pezkuwi/dev.git"
|
||||
},
|
||||
"sideEffects": false,
|
||||
"type": "module",
|
||||
"version": "0.84.2",
|
||||
"versions": {
|
||||
"git": "0.84.2",
|
||||
"npm": "0.84.2"
|
||||
},
|
||||
"workspaces": [
|
||||
"packages/*"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "polkadot-dev-build-ts",
|
||||
"build:before": "polkadot-dev-copy-dir --cd packages/dev config scripts build",
|
||||
"build:release": "polkadot-ci-ghact-build --skip-beta",
|
||||
"clean": "polkadot-dev-clean-build",
|
||||
"docs": "polkadot-dev-build-docs",
|
||||
"lint": "polkadot-dev-run-lint",
|
||||
"postinstall": "./packages/dev/scripts/polkadot-dev-yarn-only.mjs",
|
||||
"test": "yarn build && polkadot-dev-run-test --dev-build --env browser"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@pezkuwi/dev": "workspace:packages/dev",
|
||||
"@pezkuwi/dev-test": "workspace:packages/dev-test",
|
||||
"@pezkuwi/dev-ts": "workspace:packages/dev-ts"
|
||||
},
|
||||
"resolutions": {
|
||||
"typescript": "^5.5.4"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
# @pezkuwi/dev-test
|
||||
|
||||
This is a very basic Jest-compatible environment that could be used alongside tests. The need for this came from replacing Jest with `node --test` without rewriting all assertions.
|
||||
|
||||
It provides the following -
|
||||
|
||||
1. Browser `window`, `document`, `navigator` (see usage for browser-specific path)
|
||||
2. `jest` functions, specifically `spyOn` (not comprehensive, some will error, some witll noop)
|
||||
3. `expect` functions (not comprehensive, caters for specific polkadot-js usage)
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
On thing to note is that `node:test` is still rapidly evolving - this includes the APIs and features. As such this requires at least Node 18.14, however 18.15+ is recommended.
|
||||
|
||||
The entry points are different based on the environment you would like to operate in. For a browser-like environment,
|
||||
|
||||
```
|
||||
node --require @pezkuwi/dev-test/browser ...
|
||||
```
|
||||
|
||||
or for a basic describe/expect/jest-only global environment
|
||||
|
||||
```
|
||||
node --require @pezkuwi/dev-test/node ...
|
||||
```
|
||||
|
||||
The `...` above indicates any additional Node options, for instance a full command could be -
|
||||
|
||||
```
|
||||
node --require @pezkuwi/dev-test/node --test something.test.js
|
||||
```
|
||||
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"author": "Jaco Greeff <jacogr@gmail.com>",
|
||||
"bugs": "https://github.com/pezkuwi/dev/issues",
|
||||
"description": "A basic test-functions-as-global library on top of node:test",
|
||||
"engines": {
|
||||
"node": ">=18.14"
|
||||
},
|
||||
"homepage": "https://github.com/pezkuwi/dev/tree/master/packages/dev-test#readme",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@pezkuwi/dev-test",
|
||||
"repository": {
|
||||
"directory": "packages/dev-test",
|
||||
"type": "git",
|
||||
"url": "https://github.com/pezkuwi/dev.git"
|
||||
},
|
||||
"sideEffects": false,
|
||||
"type": "module",
|
||||
"version": "0.84.2",
|
||||
"main": "./index.js",
|
||||
"exports": {
|
||||
"./globals.d.ts": "./src/globals.d.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"jsdom": "^24.0.0",
|
||||
"tslib": "^2.7.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jsdom": "^21.1.6"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { exposeEnv } from './env/index.js';
|
||||
|
||||
exposeEnv(true);
|
||||
+26
@@ -0,0 +1,26 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { browser } from './browser.js';
|
||||
|
||||
const all = browser();
|
||||
|
||||
describe('browser', () => {
|
||||
it('contains window', () => {
|
||||
expect(all.window).toBeDefined();
|
||||
});
|
||||
|
||||
it('contains a crypto implementation', () => {
|
||||
expect(all.crypto).toBeTruthy();
|
||||
expect(typeof all.crypto.getRandomValues).toBe('function');
|
||||
});
|
||||
|
||||
it('contains the top-level objects', () => {
|
||||
expect(all.document).toBeDefined();
|
||||
expect(all.navigator).toBeDefined();
|
||||
});
|
||||
|
||||
it('contains HTML*Element', () => {
|
||||
expect(typeof all.HTMLElement).toBe('function');
|
||||
});
|
||||
});
|
||||
Vendored
+100
@@ -0,0 +1,100 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { JSDOM } from 'jsdom';
|
||||
|
||||
/**
|
||||
* Export a very basic JSDom environment - this is just enough so we have
|
||||
* @testing-environment/react tests passing in this repo
|
||||
*
|
||||
* FIXME: This approach is actually _explicitly_ discouraged by JSDOM - when
|
||||
* using window you should run the tests inside that context, instead of just
|
||||
* blindly relying on the globals as we do here
|
||||
*/
|
||||
export function browser () {
|
||||
const { window } = new JSDOM('', { url: 'http://localhost' });
|
||||
|
||||
return {
|
||||
// All HTML Elements that are defined on the JSDOM window object.
|
||||
// (we copied as-is from the types definition). We cannot get this
|
||||
// via Object.keys(window).filter(...) so we have to specify explicitly
|
||||
HTMLAnchorElement: window.HTMLAnchorElement,
|
||||
HTMLAreaElement: window.HTMLAreaElement,
|
||||
HTMLAudioElement: window.HTMLAudioElement,
|
||||
HTMLBRElement: window.HTMLBRElement,
|
||||
HTMLBaseElement: window.HTMLBaseElement,
|
||||
HTMLBodyElement: window.HTMLBodyElement,
|
||||
HTMLButtonElement: window.HTMLButtonElement,
|
||||
HTMLCanvasElement: window.HTMLCanvasElement,
|
||||
HTMLDListElement: window.HTMLDListElement,
|
||||
HTMLDataElement: window.HTMLDataElement,
|
||||
HTMLDataListElement: window.HTMLDataListElement,
|
||||
HTMLDetailsElement: window.HTMLDetailsElement,
|
||||
HTMLDialogElement: window.HTMLDialogElement,
|
||||
HTMLDirectoryElement: window.HTMLDirectoryElement,
|
||||
HTMLDivElement: window.HTMLDivElement,
|
||||
HTMLElement: window.HTMLElement,
|
||||
HTMLEmbedElement: window.HTMLEmbedElement,
|
||||
HTMLFieldSetElement: window.HTMLFieldSetElement,
|
||||
HTMLFontElement: window.HTMLFontElement,
|
||||
HTMLFormElement: window.HTMLFormElement,
|
||||
HTMLFrameElement: window.HTMLFrameElement,
|
||||
HTMLFrameSetElement: window.HTMLFrameSetElement,
|
||||
HTMLHRElement: window.HTMLHRElement,
|
||||
HTMLHeadElement: window.HTMLHeadElement,
|
||||
HTMLHeadingElement: window.HTMLHeadingElement,
|
||||
HTMLHtmlElement: window.HTMLHtmlElement,
|
||||
HTMLIFrameElement: window.HTMLIFrameElement,
|
||||
HTMLImageElement: window.HTMLImageElement,
|
||||
HTMLInputElement: window.HTMLInputElement,
|
||||
HTMLLIElement: window.HTMLLIElement,
|
||||
HTMLLabelElement: window.HTMLLabelElement,
|
||||
HTMLLegendElement: window.HTMLLegendElement,
|
||||
HTMLLinkElement: window.HTMLLinkElement,
|
||||
HTMLMapElement: window.HTMLMapElement,
|
||||
HTMLMarqueeElement: window.HTMLMarqueeElement,
|
||||
HTMLMediaElement: window.HTMLMediaElement,
|
||||
HTMLMenuElement: window.HTMLMenuElement,
|
||||
HTMLMetaElement: window.HTMLMetaElement,
|
||||
HTMLMeterElement: window.HTMLMeterElement,
|
||||
HTMLModElement: window.HTMLModElement,
|
||||
HTMLOListElement: window.HTMLOListElement,
|
||||
HTMLObjectElement: window.HTMLObjectElement,
|
||||
HTMLOptGroupElement: window.HTMLOptGroupElement,
|
||||
HTMLOptionElement: window.HTMLOptionElement,
|
||||
HTMLOutputElement: window.HTMLOutputElement,
|
||||
HTMLParagraphElement: window.HTMLParagraphElement,
|
||||
HTMLParamElement: window.HTMLParamElement,
|
||||
HTMLPictureElement: window.HTMLPictureElement,
|
||||
HTMLPreElement: window.HTMLPreElement,
|
||||
HTMLProgressElement: window.HTMLProgressElement,
|
||||
HTMLQuoteElement: window.HTMLQuoteElement,
|
||||
HTMLScriptElement: window.HTMLScriptElement,
|
||||
HTMLSelectElement: window.HTMLSelectElement,
|
||||
HTMLSlotElement: window.HTMLSlotElement,
|
||||
HTMLSourceElement: window.HTMLSourceElement,
|
||||
HTMLSpanElement: window.HTMLSpanElement,
|
||||
HTMLStyleElement: window.HTMLStyleElement,
|
||||
HTMLTableCaptionElement: window.HTMLTableCaptionElement,
|
||||
HTMLTableCellElement: window.HTMLTableCellElement,
|
||||
HTMLTableColElement: window.HTMLTableColElement,
|
||||
HTMLTableElement: window.HTMLTableElement,
|
||||
HTMLTableRowElement: window.HTMLTableRowElement,
|
||||
HTMLTableSectionElement: window.HTMLTableSectionElement,
|
||||
HTMLTemplateElement: window.HTMLTemplateElement,
|
||||
HTMLTextAreaElement: window.HTMLTextAreaElement,
|
||||
HTMLTimeElement: window.HTMLTimeElement,
|
||||
HTMLTitleElement: window.HTMLTitleElement,
|
||||
HTMLTrackElement: window.HTMLTrackElement,
|
||||
HTMLUListElement: window.HTMLUListElement,
|
||||
HTMLUnknownElement: window.HTMLUnknownElement,
|
||||
HTMLVideoElement: window.HTMLVideoElement,
|
||||
// normal service resumes, the base top-level names
|
||||
crypto: window.crypto,
|
||||
document: window.document,
|
||||
localStorage: window.localStorage,
|
||||
navigator: window.navigator,
|
||||
// window...
|
||||
window
|
||||
};
|
||||
}
|
||||
+222
@@ -0,0 +1,222 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
describe('expect', () => {
|
||||
it('has been decorated', () => {
|
||||
expect(expect(true).not).toBeDefined();
|
||||
});
|
||||
|
||||
it('throws on unimplemented', () => {
|
||||
expect(
|
||||
() => expect(true).not.toHaveReturnedWith()
|
||||
).toThrow('expect(...).not.toHaveReturnedWith has not been implemented');
|
||||
});
|
||||
|
||||
it('throws on unimplemented (with alternative)', () => {
|
||||
expect(
|
||||
() => expect(true).not.toBeFalsy()
|
||||
).toThrow('expect(...).not.toBeFalsy has not been implemented (Use expect(...).toBeTruthy instead)');
|
||||
});
|
||||
|
||||
describe('rejects', () => {
|
||||
it('matches a rejection via .toThrow', async () => {
|
||||
await expect(
|
||||
Promise.reject(new Error('this is a rejection message'))
|
||||
).rejects.toThrow(/rejection/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('.toBeDefined', () => {
|
||||
it('does not throw on null', () => {
|
||||
expect(null).toBeDefined();
|
||||
});
|
||||
|
||||
it('throws on undefined', () => {
|
||||
expect(
|
||||
() => expect(undefined).toBeDefined()
|
||||
).toThrow();
|
||||
});
|
||||
|
||||
it('.not does not throw on undefined', () => {
|
||||
expect(undefined).not.toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('.toThrow', () => {
|
||||
const thrower = () => {
|
||||
throw new Error('some error');
|
||||
};
|
||||
|
||||
it('matches error with empty throw', () => {
|
||||
expect(thrower).toThrow();
|
||||
});
|
||||
|
||||
it('matches error with exact message', () => {
|
||||
expect(thrower).toThrow('some error');
|
||||
});
|
||||
|
||||
it('matches error with regex message', () => {
|
||||
expect(thrower).toThrow(/me er/);
|
||||
});
|
||||
|
||||
it('handles .not correctly (no throw, empty message)', () => {
|
||||
expect(() => undefined).not.toThrow();
|
||||
});
|
||||
|
||||
it('handles .not correctly (no throw, regex match)', () => {
|
||||
expect(() => undefined).not.toThrow(/me er/);
|
||||
});
|
||||
|
||||
it('handles .not correctly (throw, string match)', () => {
|
||||
expect(() => undefined).not.toThrow('no match');
|
||||
});
|
||||
|
||||
it('handles .not correctly (throw, regex match)', () => {
|
||||
expect(() => undefined).not.toThrow(/no match/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('.toMatch', () => {
|
||||
it('fails matching when non-object passed in', () => {
|
||||
expect(
|
||||
() => expect(undefined).toMatch(/match/)
|
||||
).toThrow(/Expected string/);
|
||||
});
|
||||
|
||||
it('fails matching when non-matching string passed in', () => {
|
||||
expect(
|
||||
() => expect('some').toMatch(/match/)
|
||||
).toThrow(/did not match/);
|
||||
});
|
||||
|
||||
it('matches string passed', () => {
|
||||
expect(
|
||||
() => expect('matching').toMatch(/match/)
|
||||
).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('.toMatchObject', () => {
|
||||
it('fails matching when non-object passed in', () => {
|
||||
expect(
|
||||
() => expect(undefined).toMatchObject({ foo: 'bar' })
|
||||
).toThrow(/Expected object/);
|
||||
});
|
||||
|
||||
it('matches empty object', () => {
|
||||
expect({
|
||||
a: 'foo',
|
||||
b: 'bar'
|
||||
}).toMatchObject({});
|
||||
});
|
||||
|
||||
it('matches object with some fields', () => {
|
||||
expect({
|
||||
a: 'foo',
|
||||
b: 'bar',
|
||||
c: 123,
|
||||
d: [456, 789]
|
||||
}).toMatchObject({
|
||||
a: 'foo',
|
||||
c: 123,
|
||||
d: [456, 789]
|
||||
});
|
||||
});
|
||||
|
||||
it('matches an object with some expect.stringMatching supplied', () => {
|
||||
expect({
|
||||
a: 'foo bar',
|
||||
b: 'baz',
|
||||
c: 'zaz'
|
||||
}).toMatchObject({
|
||||
a: expect.stringMatching(/o b/),
|
||||
b: expect.stringMatching('baz'),
|
||||
c: 'zaz'
|
||||
});
|
||||
});
|
||||
|
||||
it('matches an object with expect.any supplied', () => {
|
||||
expect({
|
||||
a: 123,
|
||||
b: Boolean(true),
|
||||
c: 'foo'
|
||||
}).toMatchObject({
|
||||
a: expect.any(Number),
|
||||
b: expect.any(Boolean),
|
||||
c: 'foo'
|
||||
});
|
||||
});
|
||||
|
||||
it('does not match an object with non instance value for expect.any', () => {
|
||||
expect(
|
||||
() => expect({
|
||||
a: true,
|
||||
b: 'foo'
|
||||
}).toMatchObject({
|
||||
a: expect.any(Number),
|
||||
b: 'foo'
|
||||
})
|
||||
).toThrow(/not an instance of Number/);
|
||||
});
|
||||
|
||||
it('matches an object with expect.anything supplied', () => {
|
||||
expect({
|
||||
a: 123,
|
||||
b: 'foo'
|
||||
}).toMatchObject({
|
||||
a: expect.anything(),
|
||||
b: 'foo'
|
||||
});
|
||||
});
|
||||
|
||||
it('does not match an object with undefined value for expect.anything', () => {
|
||||
expect(
|
||||
() => expect({
|
||||
b: 'foo'
|
||||
}).toMatchObject({
|
||||
a: expect.anything(),
|
||||
b: 'foo'
|
||||
})
|
||||
).toThrow(/non-nullish/);
|
||||
});
|
||||
|
||||
it('does not match an object with non-array value', () => {
|
||||
expect(
|
||||
() => expect({
|
||||
a: 'foo',
|
||||
b: 'bar'
|
||||
}).toMatchObject({
|
||||
a: 'foo',
|
||||
b: [123, 456]
|
||||
})
|
||||
).toThrow(/Expected array value/);
|
||||
});
|
||||
|
||||
it('allows for deep matching', () => {
|
||||
expect({
|
||||
a: 123,
|
||||
b: {
|
||||
c: 456,
|
||||
d: {
|
||||
e: 'foo',
|
||||
f: 'bar',
|
||||
g: {
|
||||
h: [789, { z: 'baz' }]
|
||||
}
|
||||
}
|
||||
}
|
||||
}).toMatchObject({
|
||||
a: 123,
|
||||
b: {
|
||||
c: expect.any(Number),
|
||||
d: {
|
||||
f: 'bar',
|
||||
g: {
|
||||
h: [expect.any(Number), { z: 'baz' }]
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
Vendored
+248
@@ -0,0 +1,248 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import type { AnyFn, WithMock } from '../types.js';
|
||||
|
||||
import { strict as assert } from 'node:assert';
|
||||
|
||||
import { enhanceObj, stubObj } from '../util.js';
|
||||
|
||||
type AssertMatchFn = (value: unknown) => void;
|
||||
|
||||
type Mocked = Partial<WithMock<AnyFn>>;
|
||||
|
||||
// logged via Object.keys(expect).sort()
|
||||
const EXPECT_KEYS = ['addEqualityTesters', 'addSnapshotSerializer', 'any', 'anything', 'arrayContaining', 'assertions', 'closeTo', 'extend', 'extractExpectedAssertionsErrors', 'getState', 'hasAssertions', 'not', 'objectContaining', 'setState', 'stringContaining', 'stringMatching', 'toMatchInlineSnapshot', 'toMatchSnapshot', 'toThrowErrorMatchingInlineSnapshot', 'toThrowErrorMatchingSnapshot'] as const;
|
||||
|
||||
// logged via Object.keys(expect(0)).sort()
|
||||
const EXPECT_KEYS_FN = ['lastCalledWith', 'lastReturnedWith', 'not', 'nthCalledWith', 'nthReturnedWith', 'rejects', 'resolves', 'toBe', 'toBeCalled', 'toBeCalledTimes', 'toBeCalledWith', 'toBeCloseTo', 'toBeDefined', 'toBeFalsy', 'toBeGreaterThan', 'toBeGreaterThanOrEqual', 'toBeInstanceOf', 'toBeLessThan', 'toBeLessThanOrEqual', 'toBeNaN', 'toBeNull', 'toBeTruthy', 'toBeUndefined', 'toContain', 'toContainEqual', 'toEqual', 'toHaveBeenCalled', 'toHaveBeenCalledTimes', 'toHaveBeenCalledWith', 'toHaveBeenLastCalledWith', 'toHaveBeenNthCalledWith', 'toHaveLastReturnedWith', 'toHaveLength', 'toHaveNthReturnedWith', 'toHaveProperty', 'toHaveReturned', 'toHaveReturnedTimes', 'toHaveReturnedWith', 'toMatch', 'toMatchInlineSnapshot', 'toMatchObject', 'toMatchSnapshot', 'toReturn', 'toReturnTimes', 'toReturnWith', 'toStrictEqual', 'toThrow', 'toThrowError', 'toThrowErrorMatchingInlineSnapshot', 'toThrowErrorMatchingSnapshot'] as const;
|
||||
|
||||
const stubExpect = stubObj('expect', EXPECT_KEYS);
|
||||
const stubExpectFn = stubObj('expect(...)', EXPECT_KEYS_FN, {
|
||||
toThrowError: 'expect(...).toThrow'
|
||||
});
|
||||
const stubExpectFnRejects = stubObj('expect(...).rejects', EXPECT_KEYS_FN, {
|
||||
toThrowError: 'expect(...).rejects.toThrow'
|
||||
});
|
||||
const stubExpectFnResolves = stubObj('expect(...).resolves', EXPECT_KEYS_FN);
|
||||
const stubExpectFnNot = stubObj('expect(...).not', EXPECT_KEYS_FN, {
|
||||
toBeFalsy: 'expect(...).toBeTruthy',
|
||||
toBeTruthy: 'expect(...).toBeFalsy',
|
||||
toThrowError: 'expect(...).not.toThrow'
|
||||
});
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* A helper that wraps a matching function in an ExpectMatcher. This is (currently)
|
||||
* only used/checked for in the calledWith* helpers
|
||||
*
|
||||
* TODO We don't use it in polkadot-js, but a useful enhancement could be for
|
||||
* any of the to* expectations to detect and use those. An example of useful code
|
||||
* in that case:
|
||||
*
|
||||
* ```js
|
||||
* expect({
|
||||
* a: 'blah',
|
||||
* b: 3
|
||||
* }).toEqual(
|
||||
* expect.objectContaining({ b: 3 })
|
||||
* )
|
||||
* ```
|
||||
*
|
||||
* An example of matcher use can be seen in the isCalledWith loops
|
||||
*/
|
||||
class Matcher {
|
||||
assertMatch: AssertMatchFn;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
constructor (assertFn: (value: any, check: any) => void, check?: unknown) {
|
||||
this.assertMatch = (value) => assertFn(value, check);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Asserts that the input value is non-nullish
|
||||
*/
|
||||
function assertNonNullish (value: unknown): void {
|
||||
assert.ok(value !== null && value !== undefined, `Expected non-nullish value, found ${value as string}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* A helper that checks a single call arguments, which may include the
|
||||
* use of matchers. This is used in finding any call or checking a specific
|
||||
* call
|
||||
*/
|
||||
function assertCallHasArgs (call: { arguments: unknown[] } | undefined, args: unknown[]): void {
|
||||
assert.ok(call && args.length === call.arguments?.length, 'Number of arguments does not match');
|
||||
|
||||
args.forEach((arg, i) => assertMatch(call.arguments[i], arg));
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* A helper that checks for the first instance of a match on the actual call
|
||||
* arguments (this extracts the toHaveBeenCalledWith logic)
|
||||
*/
|
||||
function assertSomeCallHasArgs (value: Mocked | undefined, args: unknown[]) {
|
||||
assert.ok(value?.mock?.calls.some((call) => {
|
||||
try {
|
||||
assertCallHasArgs(call, args);
|
||||
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}), 'No call found matching arguments');
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Asserts that the value is either (equal deep) or matches the matcher (if supplied)
|
||||
*/
|
||||
function assertMatch (value: unknown, check: unknown): void {
|
||||
check instanceof Matcher
|
||||
? check.assertMatch(value)
|
||||
: Array.isArray(check)
|
||||
? assertMatchArr(value, check)
|
||||
: check && typeof check === 'object'
|
||||
? assertMatchObj(value, check)
|
||||
: assert.deepStrictEqual(value, check);
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* A helper to match the supplied array check against the resulting array
|
||||
*
|
||||
* @param {unknown} value
|
||||
* @param {unknown[]} check
|
||||
*/
|
||||
function assertMatchArr (value: unknown, check: unknown[]): void {
|
||||
assert.ok(value && Array.isArray(value), `Expected array value, found ${typeof value}`);
|
||||
assert.ok(value.length === check.length, `Expected array with ${check.length} entries, found ${value.length}`);
|
||||
|
||||
check.forEach((other, i) => assertMatch(value[i], other));
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* A helper to match the supplied fields against the resulting object
|
||||
*/
|
||||
function assertMatchObj (value: unknown, check: object): void {
|
||||
assert.ok(value && typeof value === 'object', `Expected object value, found ${typeof value}`);
|
||||
|
||||
Object
|
||||
.entries(check)
|
||||
.forEach(([key, other]) => assertMatch((value as Record<string, unknown>)[key], other));
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* A helper to match a string value against another string or regex
|
||||
*/
|
||||
function assertMatchStr (value: unknown, check: string | RegExp): void {
|
||||
assert.ok(typeof value === 'string', `Expected string value, found ${typeof value}`);
|
||||
|
||||
typeof check === 'string'
|
||||
? assert.strictEqual(value, check)
|
||||
: assert.match(value, check);
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* A helper to check the type of a specific value as used in the expect.any(Clazz) matcher
|
||||
*
|
||||
* @see https://github.com/facebook/jest/blob/a49c88610e49a3242576160740a32a2fe11161e1/packages/expect/src/asymmetricMatchers.ts#L103-L133
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
function assertInstanceOf (value: unknown, Clazz: Function): void {
|
||||
assert.ok(
|
||||
(Clazz === Array && Array.isArray(value)) ||
|
||||
(Clazz === BigInt && typeof value === 'bigint') ||
|
||||
(Clazz === Boolean && typeof value === 'boolean') ||
|
||||
(Clazz === Function && typeof value === 'function') ||
|
||||
(Clazz === Number && typeof value === 'number') ||
|
||||
(Clazz === Object && typeof value === 'object') ||
|
||||
(Clazz === String && typeof value === 'string') ||
|
||||
(Clazz === Symbol && typeof value === 'symbol') ||
|
||||
(value instanceof Clazz),
|
||||
`${value as string} is not an instance of ${Clazz.name}`
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* A helper to ensure that the supplied string/array does include the checker string.
|
||||
*
|
||||
* @param {string | unknown[]} value
|
||||
* @param {string} check
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
function assertIncludes (value: string | unknown[], [check, Clazz]: [string, Function]): void {
|
||||
assertInstanceOf(value, Clazz);
|
||||
assert.ok(value?.includes(check), `${value as string} does not include ${check}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up the shimmed expect(...) function, including all .to* and .not.to*
|
||||
* functions. This is not comprehensive, rather is contains what we need to
|
||||
* make all polkadot-js usages pass
|
||||
**/
|
||||
export function expect () {
|
||||
const rootMatchers = {
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
any: (Clazz: Function) => new Matcher(assertInstanceOf, Clazz),
|
||||
anything: () => new Matcher(assertNonNullish),
|
||||
arrayContaining: (check: string) => new Matcher(assertIncludes, [check, Array]),
|
||||
objectContaining: (check: object) => new Matcher(assertMatchObj, check),
|
||||
stringContaining: (check: string) => new Matcher(assertIncludes, [check, String]),
|
||||
stringMatching: (check: string | RegExp) => new Matcher(assertMatchStr, check)
|
||||
};
|
||||
|
||||
return {
|
||||
expect: enhanceObj(enhanceObj((value: unknown) =>
|
||||
enhanceObj({
|
||||
not: enhanceObj({
|
||||
toBe: (other: unknown) => assert.notStrictEqual(value, other),
|
||||
toBeDefined: () => assert.ok(value === undefined),
|
||||
toBeNull: (value: unknown) => assert.ok(value !== null),
|
||||
toBeUndefined: () => assert.ok(value !== undefined),
|
||||
toEqual: (other: unknown) => assert.notDeepEqual(value, other),
|
||||
toHaveBeenCalled: () => assert.ok(!(value as Mocked | undefined)?.mock?.calls.length),
|
||||
toThrow: (message?: RegExp | Error | string) => assert.doesNotThrow(value as () => unknown, message && { message } as Error)
|
||||
}, stubExpectFnNot),
|
||||
rejects: enhanceObj({
|
||||
toThrow: (message?: RegExp | Error | string) => assert.rejects(value as Promise<unknown>, message && { message } as Error)
|
||||
}, stubExpectFnRejects),
|
||||
resolves: enhanceObj({}, stubExpectFnResolves),
|
||||
toBe: (other: unknown) => assert.strictEqual(value, other),
|
||||
toBeDefined: () => assert.ok(value !== undefined),
|
||||
toBeFalsy: () => assert.ok(!value),
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
toBeInstanceOf: (Clazz: Function) => assertInstanceOf(value, Clazz),
|
||||
toBeNull: (value: unknown) => assert.ok(value === null),
|
||||
toBeTruthy: () => assert.ok(value),
|
||||
toBeUndefined: () => assert.ok(value === undefined),
|
||||
toEqual: (other: unknown) => assert.deepEqual(value, other),
|
||||
toHaveBeenCalled: () => assert.ok((value as Mocked | undefined)?.mock?.calls.length),
|
||||
toHaveBeenCalledTimes: (count: number) => assert.equal((value as Mocked | undefined)?.mock?.calls.length, count),
|
||||
toHaveBeenCalledWith: (...args: unknown[]) => assertSomeCallHasArgs((value as Mocked | undefined), args),
|
||||
toHaveBeenLastCalledWith: (...args: unknown[]) => assertCallHasArgs((value as Mocked | undefined)?.mock?.calls.at(-1), args),
|
||||
toHaveLength: (length: number) => assert.equal((value as unknown[] | undefined)?.length, length),
|
||||
toMatch: (check: string | RegExp) => assertMatchStr(value, check),
|
||||
toMatchObject: (check: object) => assertMatchObj(value, check),
|
||||
toThrow: (message?: RegExp | Error | string) => assert.throws(value as () => unknown, message && { message } as Error)
|
||||
}, stubExpectFn), rootMatchers), stubExpect)
|
||||
};
|
||||
}
|
||||
Vendored
+21
@@ -0,0 +1,21 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { browser } from './browser.js';
|
||||
import { expect } from './expect.js';
|
||||
import { jest } from './jest.js';
|
||||
import { lifecycle } from './lifecycle.js';
|
||||
import { suite } from './suite.js';
|
||||
|
||||
/**
|
||||
* Exposes the jest-y environment via globals.
|
||||
*/
|
||||
export function exposeEnv (isBrowser: boolean): void {
|
||||
[expect, jest, lifecycle, suite, isBrowser && browser].forEach((env) => {
|
||||
env && Object
|
||||
.entries(env())
|
||||
.forEach(([key, fn]) => {
|
||||
globalThis[key as 'undefined'] ??= fn;
|
||||
});
|
||||
});
|
||||
}
|
||||
+168
@@ -0,0 +1,168 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
describe('jest', () => {
|
||||
it('has been enhanced', () => {
|
||||
expect(jest.setTimeout).toBeDefined();
|
||||
});
|
||||
|
||||
describe('.fn', () => {
|
||||
it('works on .toHaveBeenCalled', () => {
|
||||
const mock = jest.fn(() => 3);
|
||||
|
||||
expect(mock).not.toHaveBeenCalled();
|
||||
expect(mock()).toBe(3);
|
||||
expect(mock).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('works on .toHaveBeenCalledTimes', () => {
|
||||
const mock = jest.fn(() => 3);
|
||||
|
||||
expect(mock()).toBe(3);
|
||||
expect(mock()).toBe(3);
|
||||
|
||||
expect(mock).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('works with .toHaveBeenCalledWith', () => {
|
||||
const sum = jest.fn((a: number, b: number) => a + b);
|
||||
|
||||
expect(sum(1, 2)).toBe(3);
|
||||
|
||||
expect(sum).toHaveBeenCalledWith(1, 2);
|
||||
|
||||
expect(sum(2, 3)).toBe(5);
|
||||
expect(sum(4, 5)).toBe(9);
|
||||
|
||||
expect(sum).toHaveBeenCalledWith(1, 2);
|
||||
expect(sum).toHaveBeenCalledWith(2, 3);
|
||||
expect(sum).toHaveBeenCalledWith(4, 5);
|
||||
|
||||
expect(sum).toHaveBeenLastCalledWith(4, 5);
|
||||
});
|
||||
|
||||
it('works with .toHaveBeenCalledWith & expect.objectContaining', () => {
|
||||
const test = jest.fn((a: unknown, b: unknown) => !!a && !!b);
|
||||
|
||||
test({ a: 123, b: 'test' }, null);
|
||||
|
||||
expect(test).toHaveBeenLastCalledWith({ a: 123, b: 'test' }, null);
|
||||
expect(test).toHaveBeenLastCalledWith(expect.objectContaining({}), null);
|
||||
expect(test).toHaveBeenLastCalledWith(expect.objectContaining({ a: 123 }), null);
|
||||
expect(test).toHaveBeenLastCalledWith(expect.objectContaining({ b: 'test' }), null);
|
||||
});
|
||||
|
||||
it('allows .mockImplementation', () => {
|
||||
const mock = jest.fn(() => 3);
|
||||
|
||||
expect(mock()).toBe(3);
|
||||
|
||||
mock.mockImplementation(() => 4);
|
||||
|
||||
expect(mock()).toBe(4);
|
||||
expect(mock()).toBe(4);
|
||||
});
|
||||
|
||||
it('allows .mockImplementationOnce', () => {
|
||||
const mock = jest.fn(() => 3);
|
||||
|
||||
expect(mock()).toBe(3);
|
||||
|
||||
mock.mockImplementationOnce(() => 4);
|
||||
|
||||
expect(mock()).toBe(4);
|
||||
expect(mock()).toBe(3);
|
||||
});
|
||||
|
||||
it('allows resets', () => {
|
||||
const mock = jest.fn(() => 3);
|
||||
|
||||
expect(mock).not.toHaveBeenCalled();
|
||||
expect(mock()).toBe(3);
|
||||
expect(mock).toHaveBeenCalled();
|
||||
|
||||
mock.mockReset();
|
||||
|
||||
expect(mock).not.toHaveBeenCalled();
|
||||
expect(mock()).toBe(3);
|
||||
expect(mock).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('.spyOn', () => {
|
||||
it('works on .toHaveBeenCalled', () => {
|
||||
const obj = {
|
||||
add: (a: number, b: number) => a + b
|
||||
};
|
||||
const spy = jest.spyOn(obj, 'add');
|
||||
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
expect(obj.add(1, 2)).toBe(3);
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('allows .mockImplementation', () => {
|
||||
const obj = {
|
||||
add: (a: number, b: number) => a + b
|
||||
};
|
||||
const spy = jest.spyOn(obj, 'add');
|
||||
|
||||
expect(obj.add(1, 2)).toBe(3);
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
|
||||
spy.mockImplementation(() => 4);
|
||||
|
||||
expect(obj.add(1, 2)).toBe(4);
|
||||
expect(spy).toHaveBeenCalledTimes(2);
|
||||
expect(obj.add(1, 2)).toBe(4);
|
||||
expect(spy).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('allows .mockImplementationOnce', () => {
|
||||
const obj = {
|
||||
add: (a: number, b: number) => a + b
|
||||
};
|
||||
const spy = jest.spyOn(obj, 'add');
|
||||
|
||||
expect(obj.add(1, 2)).toBe(3);
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
|
||||
spy.mockImplementationOnce(() => 4);
|
||||
|
||||
expect(obj.add(1, 2)).toBe(4);
|
||||
expect(spy).toHaveBeenCalledTimes(2);
|
||||
expect(obj.add(1, 2)).toBe(3);
|
||||
expect(spy).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('allows resets', () => {
|
||||
const obj = {
|
||||
add: (a: number, b: number) => a + b
|
||||
};
|
||||
const spy = jest.spyOn(obj, 'add');
|
||||
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
expect(obj.add(1, 2)).toBe(3);
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
|
||||
spy.mockReset();
|
||||
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
expect(obj.add(1, 2)).toBe(3);
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('allows restores', () => {
|
||||
const obj = {
|
||||
add: (a: number, b: number) => a + b
|
||||
};
|
||||
const spy = jest.spyOn(obj, 'add');
|
||||
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
expect(obj.add(1, 2)).toBe(3);
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
|
||||
spy.mockRestore();
|
||||
});
|
||||
});
|
||||
});
|
||||
Vendored
+68
@@ -0,0 +1,68 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import type { AnyFn, WithMock } from '../types.js';
|
||||
|
||||
import { mock } from 'node:test';
|
||||
|
||||
import { enhanceObj, stubObj, warnObj } from '../util.js';
|
||||
|
||||
// logged via Object.keys(jest).sort()
|
||||
const JEST_KEYS_STUB = ['advanceTimersByTime', 'advanceTimersToNextTimer', 'autoMockOff', 'autoMockOn', 'clearAllMocks', 'clearAllTimers', 'createMockFromModule', 'deepUnmock', 'disableAutomock', 'doMock', 'dontMock', 'enableAutomock', 'fn', 'genMockFromModule', 'getRealSystemTime', 'getSeed', 'getTimerCount', 'isEnvironmentTornDown', 'isMockFunction', 'isolateModules', 'isolateModulesAsync', 'mock', 'mocked', 'now', 'replaceProperty', 'requireActual', 'requireMock', 'resetAllMocks', 'resetModules', 'restoreAllMocks', 'retryTimes', 'runAllImmediates', 'runAllTicks', 'runAllTimers', 'runOnlyPendingTimers', 'setMock', 'setSystemTime', 'setTimeout', 'spyOn', 'unmock', 'unstable_mockModule', 'useFakeTimers', 'useRealTimers'] as const;
|
||||
|
||||
const JEST_KEYS_WARN = ['setTimeout'] as const;
|
||||
|
||||
// logged via Object.keys(jest.fn()).sort()
|
||||
const MOCK_KEYS_STUB = ['_isMockFunction', 'getMockImplementation', 'getMockName', 'mock', 'mockClear', 'mockImplementation', 'mockImplementationOnce', 'mockName', 'mockRejectedValue', 'mockRejectedValueOnce', 'mockReset', 'mockResolvedValue', 'mockResolvedValueOnce', 'mockRestore', 'mockReturnThis', 'mockReturnValue', 'mockReturnValueOnce', 'withImplementation'] as const;
|
||||
|
||||
const jestStub = stubObj('jest', JEST_KEYS_STUB);
|
||||
const jestWarn = warnObj('jest', JEST_KEYS_WARN);
|
||||
const mockStub = stubObj('jest.fn()', MOCK_KEYS_STUB);
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* This adds the mockReset and mockRestore functionality to any
|
||||
* spy or mock function
|
||||
**/
|
||||
function extendMock <F extends AnyFn> (mocked: WithMock<F>) {
|
||||
// We use the node:test mock here for casting below - however we
|
||||
// don't want this in any method signature since this is a private
|
||||
// types export, which could get us in "some" trouble
|
||||
//
|
||||
// Effectively the casts below ensure that our WithMock<*> aligns
|
||||
// on a high-level to what we use via private type...
|
||||
const spy = (mocked as unknown as ReturnType<typeof mock['fn']>);
|
||||
|
||||
return enhanceObj(enhanceObj(mocked, {
|
||||
mockImplementation: <F extends AnyFn> (fn: F): void => {
|
||||
spy.mock.mockImplementation(fn);
|
||||
},
|
||||
mockImplementationOnce: <F extends AnyFn> (fn: F): void => {
|
||||
spy.mock.mockImplementationOnce(fn);
|
||||
},
|
||||
mockReset: (): void => {
|
||||
spy.mock.resetCalls();
|
||||
},
|
||||
mockRestore: (): void => {
|
||||
spy.mock.restore();
|
||||
}
|
||||
}), mockStub);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up the jest object. This is certainly not extensive, and probably
|
||||
* not quite meant to be (never say never). Rather this adds the functionality
|
||||
* that we use in the polkadot-js projects.
|
||||
**/
|
||||
export function jest () {
|
||||
return {
|
||||
jest: enhanceObj(enhanceObj({
|
||||
fn: <F extends AnyFn> (fn?: F) => extendMock<F>(mock.fn(fn)),
|
||||
restoreAllMocks: () => {
|
||||
mock.reset();
|
||||
},
|
||||
spyOn: <F extends AnyFn> (obj: object, key: string) => extendMock<F>(mock.method(obj, key as keyof typeof obj))
|
||||
}, jestWarn), jestStub)
|
||||
};
|
||||
}
|
||||
+18
@@ -0,0 +1,18 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { after, afterEach, before, beforeEach } from 'node:test';
|
||||
|
||||
/**
|
||||
* This ensures that the before/after functions are exposed
|
||||
**/
|
||||
export function lifecycle () {
|
||||
return {
|
||||
after,
|
||||
afterAll: after,
|
||||
afterEach,
|
||||
before,
|
||||
beforeAll: before,
|
||||
beforeEach
|
||||
};
|
||||
}
|
||||
+61
@@ -0,0 +1,61 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
describe('describe()', () => {
|
||||
// eslint-disable-next-line jest/no-focused-tests
|
||||
describe.only('.only', () => {
|
||||
it('runs this one', () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('.skip', () => {
|
||||
// eslint-disable-next-line jest/no-disabled-tests
|
||||
describe.skip('.only (.skip)', () => {
|
||||
it('skips inside .only', () => {
|
||||
expect(true).toBe(true);
|
||||
|
||||
throw new Error('FATAL: This should not run');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('it()', () => {
|
||||
it('has been enhanced', () => {
|
||||
expect(it.todo).toBeDefined();
|
||||
});
|
||||
|
||||
it('allows promises', async () => {
|
||||
expect(await Promise.resolve(true)).toBe(true);
|
||||
});
|
||||
|
||||
describe('.only', () => {
|
||||
// eslint-disable-next-line jest/no-focused-tests
|
||||
it.only('runs this test when .only is used', () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
|
||||
// eslint-disable-next-line jest/no-disabled-tests
|
||||
it.skip('skips when .skip is used', () => {
|
||||
expect(true).toBe(true);
|
||||
|
||||
throw new Error('FATAL: This should not run');
|
||||
});
|
||||
});
|
||||
|
||||
describe('.skip', () => {
|
||||
// eslint-disable-next-line jest/no-disabled-tests
|
||||
it.skip('skips when .skip is used', () => {
|
||||
expect(true).toBe(true);
|
||||
|
||||
throw new Error('FATAL: This should not run');
|
||||
});
|
||||
});
|
||||
|
||||
describe('.todo', () => {
|
||||
it.todo('marks as a todo when .todo is used', () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
Vendored
+48
@@ -0,0 +1,48 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
|
||||
import { enhanceObj } from '../util.js';
|
||||
|
||||
interface WrapOpts {
|
||||
only?: boolean;
|
||||
skip?: boolean;
|
||||
todo?: boolean;
|
||||
}
|
||||
|
||||
type WrapFn = (name: string, options: { only?: boolean; skip?: boolean; timeout?: number; todo?: boolean; }, fn: () => void | Promise<void>) => void | Promise<void>;
|
||||
|
||||
const MINUTE = 60 * 1000;
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Wraps either describe or it with relevant .only, .skip, .todo & .each helpers,
|
||||
* shimming it into a Jest-compatible environment.
|
||||
*
|
||||
* @param {} fn
|
||||
*/
|
||||
function createWrapper <T extends WrapFn> (fn: T, defaultTimeout: number) {
|
||||
const wrap = (opts: WrapOpts) => (name: string, exec: () => void | Promise<void>, timeout?: number) => fn(name, { ...opts, timeout: (timeout || defaultTimeout) }, exec) as unknown as void;
|
||||
|
||||
// Ensure that we have consistent helpers on the function. These are not consistently
|
||||
// applied accross all node:test versions, latest has all, so always apply ours.
|
||||
// Instead of node:test options for e.g. timeout, we provide a Jest-compatible signature
|
||||
return enhanceObj(wrap({}), {
|
||||
only: wrap({ only: true }),
|
||||
skip: wrap({ skip: true }),
|
||||
todo: wrap({ todo: true })
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* This ensures that the describe and it functions match our actual usages.
|
||||
* This includes .only, .skip and .todo helpers (.each is not applied)
|
||||
**/
|
||||
export function suite () {
|
||||
return {
|
||||
describe: createWrapper(describe, 60 * MINUTE),
|
||||
it: createWrapper(it, 2 * MINUTE)
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
module.exports = {};
|
||||
Vendored
+32
@@ -0,0 +1,32 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
/* eslint-disable no-var */
|
||||
|
||||
import type { expect } from './env/expect.js';
|
||||
import type { jest } from './env/jest.js';
|
||||
import type { lifecycle } from './env/lifecycle.js';
|
||||
import type { suite } from './env/suite.js';
|
||||
|
||||
type Expect = ReturnType<typeof expect>;
|
||||
|
||||
type Jest = ReturnType<typeof jest>;
|
||||
|
||||
type Lifecycle = ReturnType<typeof lifecycle>;
|
||||
|
||||
type Suite = ReturnType<typeof suite>;
|
||||
|
||||
declare global {
|
||||
var after: Lifecycle['after'];
|
||||
var afterAll: Lifecycle['afterAll'];
|
||||
var afterEach: Lifecycle['afterEach'];
|
||||
var before: Lifecycle['before'];
|
||||
var beforeAll: Lifecycle['beforeAll'];
|
||||
var beforeEach: Lifecycle['beforeEach'];
|
||||
var describe: Suite['describe'];
|
||||
var expect: Expect['expect'];
|
||||
var it: Suite['it'];
|
||||
var jest: Jest['jest'];
|
||||
}
|
||||
|
||||
export {};
|
||||
@@ -0,0 +1,4 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
throw new Error('Use node --require @polkadot/dev-test/{node, browser} depending on the required environment');
|
||||
@@ -0,0 +1,6 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { exposeEnv } from './env/index.js';
|
||||
|
||||
exposeEnv(false);
|
||||
@@ -0,0 +1,6 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// Do not edit, auto-generated by @polkadot/dev
|
||||
|
||||
export const packageInfo = { name: '@polkadot/dev-test', path: 'auto', type: 'auto', version: '0.84.2' };
|
||||
@@ -0,0 +1,28 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export type AnyFn = (...args: any[]) => any;
|
||||
|
||||
export type BaseObj = Record<string, unknown>;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
export type BaseFn = Function;
|
||||
|
||||
export type StubFn = (...args: unknown[]) => unknown;
|
||||
|
||||
// These basically needs to align with the ReturnType<typeof node:test:mock['fn']>
|
||||
// functions at least for the functionality that we are using: accessing calls &
|
||||
// managing the mock interface with resets and restores
|
||||
export type WithMock<F extends AnyFn> = F & {
|
||||
mock: {
|
||||
calls: {
|
||||
arguments: unknown[];
|
||||
}[];
|
||||
|
||||
mockImplementation: (fn: AnyFn) => void;
|
||||
mockImplementationOnce: (fn: AnyFn) => void;
|
||||
resetCalls: () => void;
|
||||
restore: () => void;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
/// <reference types="@polkadot/dev-test/globals.d.ts" />
|
||||
|
||||
import { enhanceObj, stubObj, warnObj } from './util.js';
|
||||
|
||||
describe('enhanceObj', () => {
|
||||
it('extends objects with non-existing values', () => {
|
||||
const test = enhanceObj(
|
||||
enhanceObj(
|
||||
{ a: () => 1 },
|
||||
{ b: () => 2 }
|
||||
),
|
||||
{ c: () => 3 }
|
||||
);
|
||||
|
||||
expect(test.a()).toBe(1);
|
||||
expect(test.b()).toBe(2);
|
||||
expect(test.c()).toBe(3);
|
||||
});
|
||||
|
||||
it('does not override existing values', () => {
|
||||
const test = enhanceObj(
|
||||
enhanceObj(
|
||||
{ a: 0, b: () => 1 },
|
||||
{ a: () => 0, b: () => 2 }
|
||||
),
|
||||
{ c: () => 2 }
|
||||
);
|
||||
|
||||
expect(test.a).toBe(0);
|
||||
expect(test.b()).toBe(1);
|
||||
expect(test.c()).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('stubObj', () => {
|
||||
it('has entries throwing for unimplemented values', () => {
|
||||
const test = stubObj('obj', ['a', 'b'] as const);
|
||||
|
||||
expect(
|
||||
() => test.b()
|
||||
).toThrow('obj.b has not been implemented');
|
||||
});
|
||||
|
||||
it('has entries throwing for unimplemented values (w/ alternatives)', () => {
|
||||
const test = stubObj('obj', ['a', 'b'] as const, { b: 'obj.a' });
|
||||
|
||||
expect(
|
||||
() => test.b()
|
||||
).toThrow('obj.b has not been implemented (Use obj.a instead)');
|
||||
});
|
||||
});
|
||||
|
||||
describe('warnObj', () => {
|
||||
let spy: ReturnType<typeof jest.spyOn>;
|
||||
|
||||
beforeEach(() => {
|
||||
spy = jest.spyOn(console, 'warn');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
spy.mockRestore();
|
||||
});
|
||||
|
||||
it('has entries warning on unimplemented', () => {
|
||||
const test = warnObj('obj', ['a', 'b'] as const);
|
||||
|
||||
test.b();
|
||||
|
||||
expect(spy).toHaveBeenCalledWith('obj.b has been implemented as a noop');
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,57 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-test authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import type { BaseFn, BaseObj, StubFn } from './types.js';
|
||||
|
||||
/**
|
||||
* Extends an existing object with the additional function if they
|
||||
* are not already existing.
|
||||
*/
|
||||
export function enhanceObj <T extends BaseObj | BaseFn, X> (obj: T, extra: X) {
|
||||
Object
|
||||
.entries(extra as Record<string, unknown>)
|
||||
.forEach(([key, value]) => {
|
||||
(obj as Record<string, unknown>)[key] ??= value;
|
||||
});
|
||||
|
||||
return obj as T & Omit<X, keyof T>;
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* A helper to create a stub object based wite the stub creator supplied
|
||||
*/
|
||||
function createStub <N extends readonly string[]> (keys: N, creator: (key: string) => StubFn) {
|
||||
return keys.reduce<Record<string, StubFn>>((obj, key) => {
|
||||
obj[key] ??= creator(key);
|
||||
|
||||
return obj;
|
||||
}, {}) as unknown as { [K in N[number]]: StubFn };
|
||||
}
|
||||
|
||||
/**
|
||||
* Extends a given object with the named functions if they do not
|
||||
* already exist on the object.
|
||||
*
|
||||
* @type {StubObjFn}
|
||||
*/
|
||||
export function stubObj <N extends readonly string[]> (objName: string, keys: N, alts?: Record<string, string>) {
|
||||
return createStub(keys, (key) => () => {
|
||||
const alt = alts?.[key];
|
||||
|
||||
throw new Error(`${objName}.${key} has not been implemented${alt ? ` (Use ${alt} instead)` : ''}`);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extends a given object with the named functions if they do not
|
||||
* already exist on the object.
|
||||
*
|
||||
* @type {StubObjFn}
|
||||
*/
|
||||
export function warnObj <N extends readonly string[]> (objName: string, keys: N) {
|
||||
return createStub(keys, (key) => () => {
|
||||
console.warn(`${objName}.${key} has been implemented as a noop`);
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "..",
|
||||
"outDir": "./build",
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"exclude": [
|
||||
"**/mod.ts",
|
||||
"src/**/*.spec.ts"
|
||||
],
|
||||
"include": [
|
||||
"src/**/*"
|
||||
],
|
||||
"references": []
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "..",
|
||||
"outDir": "./build",
|
||||
"rootDir": "./src",
|
||||
"emitDeclarationOnly": false,
|
||||
"noEmit": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.spec.ts"
|
||||
],
|
||||
"references": [
|
||||
{ "path": "../dev-test/tsconfig.build.json" }
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
# @pezkuwi/dev-ts
|
||||
|
||||
This is an Node TS loader, specifically written to cater for the polkadot-js needs, aka it is meant to be used inside polkadot-js projects. It doesn't aim to be a catch-all resolver, although it does cover quite a large spectrum of functionality.
|
||||
|
||||
It caters for -
|
||||
|
||||
1. Pass through resolution and compiling of .ts & .tsx sources
|
||||
2. Resolution of TS aliases
|
||||
3. Resolution of .json files (alongside aliases)
|
||||
4. Resolution of extensionless imports (basic, best-effort)
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
Just add the loader via the Node.js `--loader` option. The API supported here is only for Node 16.12+, so ensure a new-ish LTS version is used.
|
||||
|
||||
```
|
||||
node --loader @pezkuwi/dev-ts ...
|
||||
```
|
||||
|
||||
Internally to the polkadot-js libraries, loader caching is used. This means that compiled files are store on-disk alongside the `/src/` folder in `/build-loader/`. To enable caching behavior, the loader endpoint is changed slightly,
|
||||
|
||||
```
|
||||
node --loader @pezkuwi/dev-ts/cached ...
|
||||
```
|
||||
|
||||
This is generally the suggested default, but it is only exposed via a different loader endpoint to ensure that users explicitly opt-in and not be suprised by "random output folders" being created.
|
||||
|
||||
|
||||
## Caveats
|
||||
|
||||
The Node.js loader API could change in the future (as it has in the Node.js 16.12 version), so it _may_ break or stop working on newer versions, and obviously won't work at all on older versions. As of this writing (Node.js 18.14 being the most-recent LTS), using the `--loader` option will print a warning.
|
||||
|
||||
With all that said, it is used as-is for the polkadot-js test infrastructure and currently operates without issues in _that_ environment.
|
||||
|
||||
TL;DR Different configs could yield some issues.
|
||||
|
||||
|
||||
## Why
|
||||
|
||||
Yes, there are other options available - [@babel/register](https://babeljs.io/docs/babel-register), [@esbuild-kit/esm-loader](https://github.com/esbuild-kit/esm-loader), [@swc/register](https://github.com/swc-project/register), [@swc-node/loader](https://github.com/swc-project/swc-node/tree/master/packages/loader), [ts-node/esm](https://github.com/TypeStrong/ts-node), ...
|
||||
|
||||
We started off with a basic `swc` loader (after swapping the infrastructure from Jest & Babel), just due to the fact that (at that time, and as of writing still) the base swc loader is still a WIP against the newer loader APIs. Since we didn't want to add more dependencies (and compile differently to our internal compiler infrastructure), we [adapted our own](https://nodejs.org/api/esm.html#esm_transpiler_loader).
|
||||
|
||||
Since then we just swapped to using base `tsc` everywhere (for all builds) and may look at changing again (swc, esbuild. etc...) in the future. So effectively having a single loader, while re-inventing the wheel somewhat (since there seems to be a _lot_ of options available) allows us to just keep the loader compiling options fully aligned with what TS -> JS output approach we take.
|
||||
|
||||
It meets our requirements: aligns fully with the overall configs we accross polkadot-js, compiles to ESM (no CJS used when testing/running) and has minimal dependencies that doesn't add bloat. In most cases you would probably be better off with one of the loaders/registration approaches linked in the first paragraph.
|
||||
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"author": "Jaco Greeff <jacogr@gmail.com>",
|
||||
"bugs": "https://github.com/pezkuwi/dev/issues",
|
||||
"description": "An TS -> ESM loader for Node >= 16.12",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"homepage": "https://github.com/pezkuwi/dev/tree/master/packages/dev-ts#readme",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@pezkuwi/dev-ts",
|
||||
"repository": {
|
||||
"directory": "packages/dev-ts",
|
||||
"type": "git",
|
||||
"url": "https://github.com/pezkuwi/dev.git"
|
||||
},
|
||||
"sideEffects": false,
|
||||
"type": "module",
|
||||
"version": "0.84.2",
|
||||
"main": "./index.js",
|
||||
"exports": {
|
||||
"./globals.d.ts": "./src/globals.d.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"json5": "^2.2.3",
|
||||
"tslib": "^2.7.0",
|
||||
"typescript": "^5.5.4"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-ts authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { loaderOptions } from './common.js';
|
||||
|
||||
loaderOptions.isCached = true;
|
||||
|
||||
export * from './index.js';
|
||||
@@ -0,0 +1,32 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-ts authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import type { LoaderOptions } from './types.js';
|
||||
|
||||
import path from 'node:path';
|
||||
import process from 'node:process';
|
||||
import { pathToFileURL } from 'node:url';
|
||||
|
||||
/** The path we are being executed from */
|
||||
export const CWD_PATH = process.cwd();
|
||||
|
||||
/** The cwd path we are being executed from in URL form */
|
||||
export const CWD_URL = pathToFileURL(`${CWD_PATH}/`);
|
||||
|
||||
/** The root path to node_modules (assuming it is in the root) */
|
||||
export const MOD_PATH = path.join(CWD_PATH, 'node_modules');
|
||||
|
||||
/** List of allowed extensions for mappings */
|
||||
export const EXT_TS_ARRAY = ['.ts', '.tsx'];
|
||||
|
||||
/** RegEx for files that we support via this loader */
|
||||
export const EXT_TS_REGEX = /\.tsx?$/;
|
||||
|
||||
/** RegEx for matching JS files (imports map to TS) */
|
||||
export const EXT_JS_REGEX = /\.jsx?$/;
|
||||
|
||||
/** RegEx for json files (as actually aliassed in polkadot-js) */
|
||||
export const EXT_JSON_REGEX = /\.json$/;
|
||||
|
||||
/** Options for loader config */
|
||||
export const loaderOptions: LoaderOptions = {};
|
||||
@@ -0,0 +1,13 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-ts authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// Adapted from: https://nodejs.org/api/esm.html#esm_transpiler_loader
|
||||
//
|
||||
// NOTE: This assumes the loader implementation for Node.js >= 18
|
||||
|
||||
import { loaderOptions } from './common.js';
|
||||
|
||||
loaderOptions.isCached = new URL(import.meta.url).searchParams.get('isCached') === 'true';
|
||||
|
||||
export { load } from './loader.js';
|
||||
export { resolve } from './resolver.js';
|
||||
@@ -0,0 +1,89 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-ts authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import crypto from 'node:crypto';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import ts from 'typescript';
|
||||
|
||||
import { EXT_TS_REGEX, loaderOptions } from './common.js';
|
||||
|
||||
interface Loaded {
|
||||
format: 'commonjs' | 'module';
|
||||
shortCircuit?: boolean;
|
||||
source: string;
|
||||
}
|
||||
|
||||
type NexLoad = (url: string, context: Record<string, unknown>) => Promise<Loaded>;
|
||||
|
||||
/**
|
||||
* Load all TypeScript files, compile via tsc on-the-fly
|
||||
**/
|
||||
export async function load (url: string, context: Record<string, unknown>, nextLoad: NexLoad): Promise<Loaded> {
|
||||
if (EXT_TS_REGEX.test(url)) {
|
||||
// used the chained loaders to retrieve
|
||||
const { source } = await nextLoad(url, {
|
||||
...context,
|
||||
format: 'module'
|
||||
});
|
||||
|
||||
// we use a hash of the source to determine caching
|
||||
const sourceHash = `//# sourceHash=${crypto.createHash('sha256').update(source).digest('hex')}`;
|
||||
const compiledFile = url.includes('/src/')
|
||||
? fileURLToPath(
|
||||
url
|
||||
.replace(/\.tsx?$/, '.js')
|
||||
.replace('/src/', '/build-loader/')
|
||||
)
|
||||
: null;
|
||||
|
||||
if (loaderOptions.isCached && compiledFile && fs.existsSync(compiledFile)) {
|
||||
const compiled = fs.readFileSync(compiledFile, 'utf-8');
|
||||
|
||||
if (compiled.includes(sourceHash)) {
|
||||
return {
|
||||
format: 'module',
|
||||
source: compiled
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// compile via typescript
|
||||
const { outputText } = ts.transpileModule(source.toString(), {
|
||||
compilerOptions: {
|
||||
...(
|
||||
url.endsWith('.tsx')
|
||||
? { jsx: ts.JsxEmit.ReactJSX }
|
||||
: {}
|
||||
),
|
||||
esModuleInterop: true,
|
||||
importHelpers: true,
|
||||
inlineSourceMap: true,
|
||||
module: ts.ModuleKind.ESNext,
|
||||
moduleResolution: ts.ModuleResolutionKind.NodeNext,
|
||||
skipLibCheck: true,
|
||||
// Aligns with packages/dev/scripts/polkadot-dev-build-ts & packages/dev/config/tsconfig
|
||||
target: ts.ScriptTarget.ES2022
|
||||
},
|
||||
fileName: fileURLToPath(url)
|
||||
});
|
||||
|
||||
if (loaderOptions.isCached && compiledFile) {
|
||||
const compiledDir = path.dirname(compiledFile);
|
||||
|
||||
if (!fs.existsSync(compiledDir)) {
|
||||
fs.mkdirSync(compiledDir, { recursive: true });
|
||||
}
|
||||
|
||||
fs.writeFileSync(compiledFile, `${outputText}\n${sourceHash}`, 'utf-8');
|
||||
}
|
||||
|
||||
return {
|
||||
format: 'module',
|
||||
source: outputText
|
||||
};
|
||||
}
|
||||
|
||||
return nextLoad(url, context);
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-ts authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// Do not edit, auto-generated by @polkadot/dev
|
||||
|
||||
export const packageInfo = { name: '@polkadot/dev-ts', path: 'auto', type: 'auto', version: '0.84.2' };
|
||||
@@ -0,0 +1,134 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-ts authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
/// <reference types="@polkadot/dev-test/globals.d.ts" />
|
||||
|
||||
import path from 'node:path';
|
||||
import { pathToFileURL } from 'node:url';
|
||||
|
||||
import { CWD_PATH } from './common.js';
|
||||
import { resolveAlias, resolveExtBare, resolveExtJs, resolveExtJson, resolveExtTs } from './resolver.js';
|
||||
|
||||
const ROOT_URL = pathToFileURL(`${CWD_PATH}/`);
|
||||
const SRC_PATH = 'packages/dev/src';
|
||||
const SRC_URL = pathToFileURL(`${CWD_PATH}/${SRC_PATH}/`);
|
||||
const INDEX_PATH = `${SRC_PATH}/index.ts`;
|
||||
const INDEX_URL = pathToFileURL(INDEX_PATH);
|
||||
|
||||
describe('resolveExtTs', () => {
|
||||
it('returns no value for a non .{ts, tsx} extension', () => {
|
||||
expect(
|
||||
resolveExtTs(`./${SRC_PATH}/cjs/sample.js`, ROOT_URL)
|
||||
).not.toBeDefined();
|
||||
});
|
||||
|
||||
it('returns a correct object for a .ts extension', () => {
|
||||
expect(
|
||||
resolveExtTs(INDEX_PATH, ROOT_URL)
|
||||
).toEqual({
|
||||
format: 'module',
|
||||
shortCircuit: true,
|
||||
url: INDEX_URL.href
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolveExtJs', () => {
|
||||
const modFound = {
|
||||
format: 'module',
|
||||
shortCircuit: true,
|
||||
url: pathToFileURL(`${CWD_PATH}/${SRC_PATH}/mod.ts`).href
|
||||
};
|
||||
|
||||
it('returns the correct value for ./mod.js resolution', () => {
|
||||
expect(
|
||||
resolveExtJs('./mod.js', SRC_URL)
|
||||
).toEqual(modFound);
|
||||
});
|
||||
|
||||
it('returns the correct value for ../mod.js resolution', () => {
|
||||
expect(
|
||||
resolveExtJs('../mod.js', pathToFileURL(`${CWD_PATH}/${SRC_PATH}/rootJs/index.ts`))
|
||||
).toEqual(modFound);
|
||||
});
|
||||
|
||||
it('returns a correct object for a .jsx extension', () => {
|
||||
expect(
|
||||
resolveExtJs(`./${SRC_PATH}/rootJs/Jsx.jsx`, ROOT_URL)
|
||||
).toEqual({
|
||||
format: 'module',
|
||||
shortCircuit: true,
|
||||
url: pathToFileURL(`${SRC_PATH}/rootJs/Jsx.tsx`).href
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolveExtJson', () => {
|
||||
it('resolves .json files', () => {
|
||||
expect(
|
||||
resolveExtJson('../package.json', SRC_URL)
|
||||
).toEqual({
|
||||
format: 'json',
|
||||
shortCircuit: true,
|
||||
url: pathToFileURL(path.join(SRC_PATH, '../package.json')).href
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolveExtBare', () => {
|
||||
const indexFound = {
|
||||
format: 'module',
|
||||
shortCircuit: true,
|
||||
url: INDEX_URL.href
|
||||
};
|
||||
|
||||
it('does not resolve non-relative paths', () => {
|
||||
expect(
|
||||
resolveExtBare(INDEX_PATH, ROOT_URL)
|
||||
).not.toBeDefined();
|
||||
});
|
||||
|
||||
it('resolves to the index via .', () => {
|
||||
expect(
|
||||
resolveExtBare('.', SRC_URL)
|
||||
).toEqual(indexFound);
|
||||
});
|
||||
|
||||
it('resolves to the index via ./index', () => {
|
||||
expect(
|
||||
resolveExtBare('./index', SRC_URL)
|
||||
).toEqual(indexFound);
|
||||
});
|
||||
|
||||
it('resolves to the sub-directory via ./rootJs', () => {
|
||||
expect(
|
||||
resolveExtBare('./rootJs', SRC_URL)
|
||||
).toEqual({
|
||||
format: 'module',
|
||||
shortCircuit: true,
|
||||
url: pathToFileURL(`${SRC_PATH}/rootJs/index.ts`).href
|
||||
});
|
||||
});
|
||||
|
||||
it('resolves to extensionless path', () => {
|
||||
expect(
|
||||
resolveExtBare('./packageInfo', SRC_URL)
|
||||
).toEqual({
|
||||
format: 'module',
|
||||
shortCircuit: true,
|
||||
url: pathToFileURL(`${SRC_PATH}/packageInfo.ts`).href
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('resolveAliases', () => {
|
||||
it('resolves packageInfo', () => {
|
||||
expect(
|
||||
resolveAlias('@polkadot/dev-ts/packageInfo', ROOT_URL)
|
||||
).toEqual({
|
||||
format: 'module',
|
||||
shortCircuit: true,
|
||||
url: pathToFileURL('packages/dev-ts/src/packageInfo.ts').href
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,224 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-ts authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath, pathToFileURL, URL } from 'node:url';
|
||||
|
||||
import { CWD_URL, EXT_JS_REGEX, EXT_JSON_REGEX, EXT_TS_ARRAY, EXT_TS_REGEX } from './common.js';
|
||||
import { tsAliases } from './tsconfig.js';
|
||||
|
||||
interface Resolved {
|
||||
format: 'commonjs' | 'json' | 'module';
|
||||
shortCircuit?: boolean;
|
||||
url: string;
|
||||
}
|
||||
|
||||
interface ResolverContext {
|
||||
parentURL?: string;
|
||||
}
|
||||
|
||||
type Resolver = (specifier: string, context: ResolverContext) => Resolved | undefined;
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* From a specified URL, extract the actual full path as well as the
|
||||
* directory that this path reflects (either equivalent to path or the
|
||||
* root of the file being referenced)
|
||||
*/
|
||||
function getParentPath (parentUrl: URL | string): { parentDir: string; parentPath: string; } {
|
||||
const parentPath = fileURLToPath(parentUrl);
|
||||
|
||||
return {
|
||||
parentDir: fs.existsSync(parentPath) && fs.lstatSync(parentPath).isDirectory()
|
||||
? parentPath
|
||||
: path.dirname(parentPath),
|
||||
parentPath
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Resolve fully-specified imports with extensions.
|
||||
**/
|
||||
export function resolveExtTs (specifier: string, parentUrl: URL | string): Resolved | void {
|
||||
// handle .ts extensions directly
|
||||
if (EXT_TS_REGEX.test(specifier)) {
|
||||
return {
|
||||
format: 'module',
|
||||
shortCircuit: true,
|
||||
url: new URL(specifier, parentUrl).href
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Resolve fully-specified imports with extensions. Here we cater for the TS
|
||||
* mapping of import foo from './bar.js' where only './bar.ts' exists
|
||||
**/
|
||||
export function resolveExtJs (specifier: string, parentUrl: URL | string): Resolved | void {
|
||||
// handle ts imports where import *.js -> *.ts
|
||||
// (unlike the ts resolution, we only cater for relative paths)
|
||||
if (specifier.startsWith('.') && EXT_JS_REGEX.test(specifier)) {
|
||||
const full = fileURLToPath(new URL(specifier, parentUrl));
|
||||
|
||||
// when it doesn't exist, we try and see if a source replacement helps
|
||||
if (!fs.existsSync(full)) {
|
||||
const found = EXT_TS_ARRAY
|
||||
.map((e) => full.replace(EXT_JS_REGEX, e))
|
||||
.find((f) => fs.existsSync(f) && fs.lstatSync(f).isFile());
|
||||
|
||||
if (found) {
|
||||
return {
|
||||
format: 'module',
|
||||
shortCircuit: true,
|
||||
url: pathToFileURL(found).href
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Resolution for Json files. Generally these would be via path aliasing.
|
||||
*/
|
||||
export function resolveExtJson (specifier: string, parentUrl: URL | string): Resolved | void {
|
||||
if (specifier.startsWith('.') && EXT_JSON_REGEX.test(specifier)) {
|
||||
const { parentDir } = getParentPath(parentUrl);
|
||||
const jsonPath = path.join(parentDir, specifier);
|
||||
|
||||
if (fs.existsSync(jsonPath)) {
|
||||
return {
|
||||
// .json needs to be in 'json' format for the loader, for the
|
||||
// the rest (it should only be TS) we use the 'module' format
|
||||
format: 'json',
|
||||
shortCircuit: true,
|
||||
url: pathToFileURL(jsonPath).href
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Resolve relative (extensionless) paths.
|
||||
*
|
||||
* At some point we probably might need to extend this to cater for the
|
||||
* ts (recommended) approach for using .js extensions inside the sources.
|
||||
* However, since we don't use this in the polkadot-js code, can kick this
|
||||
* down the line
|
||||
**/
|
||||
export function resolveExtBare (specifier: string, parentUrl: URL | string): Resolved | void {
|
||||
if (specifier.startsWith('.')) {
|
||||
const { parentDir, parentPath } = getParentPath(parentUrl);
|
||||
const found = specifier === '.'
|
||||
? (
|
||||
// handle . imports for <dir>/index.ts
|
||||
EXT_TS_ARRAY
|
||||
.map((e) => path.join(parentDir, `index${e}`))
|
||||
.find((f) => fs.existsSync(f)) ||
|
||||
// handle the case where parentUrl needs an extension (generally via alias)
|
||||
EXT_TS_ARRAY
|
||||
.map((e) => `${parentPath}${e}`)
|
||||
.find((f) => fs.existsSync(f))
|
||||
)
|
||||
: (
|
||||
// tests to see if this is a file (without extension)
|
||||
EXT_TS_ARRAY
|
||||
.map((e) => path.join(parentDir, `${specifier}${e}`))
|
||||
.find((f) => fs.existsSync(f)) ||
|
||||
// test to see if this is a directory
|
||||
EXT_TS_ARRAY
|
||||
.map((e) => path.join(parentDir, `${specifier}/index${e}`))
|
||||
.find((f) => fs.existsSync(f))
|
||||
);
|
||||
|
||||
if (found) {
|
||||
return {
|
||||
format: 'module',
|
||||
shortCircuit: true,
|
||||
url: pathToFileURL(found).href
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Resolve anything that is not an alias
|
||||
**/
|
||||
export function resolveNonAlias (specifier: string, parentUrl: URL | string): Resolved | void {
|
||||
return (
|
||||
resolveExtTs(specifier, parentUrl) ||
|
||||
resolveExtJs(specifier, parentUrl) ||
|
||||
resolveExtJson(specifier, parentUrl) ||
|
||||
resolveExtBare(specifier, parentUrl)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Resolve TS alias mappings as defined in the tsconfig.json file
|
||||
**/
|
||||
export function resolveAlias (specifier: string, _parentUrl: URL | string, aliases = tsAliases): Resolved | void {
|
||||
const parts = specifier.split(/[\\/]/);
|
||||
const found = aliases
|
||||
// return a [filter, [...partIndex]] mapping
|
||||
.map((alias) => ({
|
||||
alias,
|
||||
indexes: parts
|
||||
.map((_, i) => i)
|
||||
.filter((start) =>
|
||||
(
|
||||
alias.isWildcard
|
||||
// parts should have more entries than the wildcard
|
||||
? parts.length > alias.filter.length
|
||||
// or the same amount in case of a non-wildcard match
|
||||
: parts.length === alias.filter.length
|
||||
) &&
|
||||
// match all parts of the alias
|
||||
alias.filter.every((f, i) =>
|
||||
parts[start + i] &&
|
||||
parts[start + i] === f
|
||||
)
|
||||
)
|
||||
}))
|
||||
// we only return the first
|
||||
.find(({ indexes }) => indexes.length);
|
||||
|
||||
if (found) {
|
||||
// do the actual un-aliased resolution
|
||||
return resolveNonAlias(
|
||||
`./${found.alias.path.replace('*', path.join(...parts.slice(found.alias.filter.length)))}`,
|
||||
found.alias.url
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolves a path using our logic.
|
||||
*
|
||||
* 1. First we attempt to directly resolve if .ts/.tsx extension is found
|
||||
* 2. Then we do relative resolves (this is for extension-less .ts files)
|
||||
* 3. The we try to do resolution via TS aliases
|
||||
*
|
||||
* ... finally, try the next loader in the chain
|
||||
*/
|
||||
export function resolve (specifier: string, context: ResolverContext, nextResolve: Resolver) {
|
||||
const parentUrl = context.parentURL || CWD_URL;
|
||||
|
||||
return (
|
||||
resolveNonAlias(specifier, parentUrl) ||
|
||||
resolveAlias(specifier, parentUrl) ||
|
||||
nextResolve(specifier, context)
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-ts authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// Adapted from: https://nodejs.org/api/esm.html#esm_transpiler_loader
|
||||
//
|
||||
// NOTE: This assumes the loader implementation for Node.js >= 18
|
||||
|
||||
import { loaderOptions } from './common.js';
|
||||
|
||||
loaderOptions.isCached = true;
|
||||
|
||||
export { resolve } from './resolver.js';
|
||||
export { load } from './testLoader.js';
|
||||
@@ -0,0 +1,92 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-ts authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import crypto from 'node:crypto';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
import ts from 'typescript';
|
||||
|
||||
import { EXT_TS_REGEX, loaderOptions } from './common.js';
|
||||
|
||||
interface Loaded {
|
||||
format: 'commonjs' | 'module';
|
||||
shortCircuit?: boolean;
|
||||
source: string;
|
||||
}
|
||||
|
||||
type NexLoad = (url: string, context: Record<string, unknown>) => Promise<Loaded>;
|
||||
|
||||
/**
|
||||
* Load all TypeScript files, compile via tsc on-the-fly
|
||||
**/
|
||||
export async function load (url: string, context: Record<string, unknown>, nextLoad: NexLoad): Promise<Loaded> {
|
||||
if (EXT_TS_REGEX.test(url)) {
|
||||
// used the chained loaders to retrieve
|
||||
const { source } = await nextLoad(url, {
|
||||
...context,
|
||||
format: 'module'
|
||||
});
|
||||
|
||||
// This ensures there is support for Node v22 while also maintaining backwards compatibility for testing.
|
||||
const modifiedSrc = Buffer.from(source.toString().replace(/assert\s*\{\s*type:\s*'json'\s*\}/g, 'with { type: \'json\' }'), 'utf-8');
|
||||
|
||||
// we use a hash of the source to determine caching
|
||||
const sourceHash = `//# sourceHash=${crypto.createHash('sha256').update(modifiedSrc as unknown as string).digest('hex')}`;
|
||||
const compiledFile = url.includes('/src/')
|
||||
? fileURLToPath(
|
||||
url
|
||||
.replace(/\.tsx?$/, '.js')
|
||||
.replace('/src/', '/build-loader/')
|
||||
)
|
||||
: null;
|
||||
|
||||
if (loaderOptions.isCached && compiledFile && fs.existsSync(compiledFile)) {
|
||||
const compiled = fs.readFileSync(compiledFile, 'utf-8');
|
||||
|
||||
if (compiled.includes(sourceHash)) {
|
||||
return {
|
||||
format: 'module',
|
||||
source: compiled
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// compile via typescript
|
||||
const { outputText } = ts.transpileModule(modifiedSrc.toString(), {
|
||||
compilerOptions: {
|
||||
...(
|
||||
url.endsWith('.tsx')
|
||||
? { jsx: ts.JsxEmit.ReactJSX }
|
||||
: {}
|
||||
),
|
||||
esModuleInterop: true,
|
||||
importHelpers: true,
|
||||
inlineSourceMap: true,
|
||||
module: ts.ModuleKind.ESNext,
|
||||
moduleResolution: ts.ModuleResolutionKind.NodeNext,
|
||||
skipLibCheck: true,
|
||||
// Aligns with packages/dev/scripts/polkadot-dev-build-ts & packages/dev/config/tsconfig
|
||||
target: ts.ScriptTarget.ES2022
|
||||
},
|
||||
fileName: fileURLToPath(url)
|
||||
});
|
||||
|
||||
if (loaderOptions.isCached && compiledFile) {
|
||||
const compiledDir = path.dirname(compiledFile);
|
||||
|
||||
if (!fs.existsSync(compiledDir)) {
|
||||
fs.mkdirSync(compiledDir, { recursive: true });
|
||||
}
|
||||
|
||||
fs.writeFileSync(compiledFile, `${outputText}\n${sourceHash}`, 'utf-8');
|
||||
}
|
||||
|
||||
return {
|
||||
format: 'module',
|
||||
source: outputText
|
||||
};
|
||||
}
|
||||
|
||||
return nextLoad(url, context);
|
||||
}
|
||||
@@ -0,0 +1,121 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-ts authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import type { TsAlias } from './types.js';
|
||||
|
||||
import JSON5 from 'json5';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { pathToFileURL } from 'node:url';
|
||||
|
||||
import { CWD_PATH, CWD_URL, MOD_PATH } from './common.js';
|
||||
|
||||
interface JsonConfig {
|
||||
compilerOptions?: {
|
||||
baseUrl?: string;
|
||||
paths?: Record<string, string[]>;
|
||||
};
|
||||
extends?: string | string[];
|
||||
}
|
||||
|
||||
interface PartialConfig {
|
||||
paths: Record<string, string[]>;
|
||||
url?: URL;
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Extracts the (relevant) tsconfig info, also using extends
|
||||
**/
|
||||
function readConfigFile (currentPath = CWD_PATH, tsconfig = 'tsconfig.json', fromFile?: string): PartialConfig {
|
||||
const configFile = path.join(currentPath, tsconfig);
|
||||
|
||||
if (!fs.existsSync(configFile)) {
|
||||
console.warn(`No ${configFile}${fromFile ? ` (extended from ${fromFile})` : ''} found, assuming defaults`);
|
||||
|
||||
return { paths: {} };
|
||||
}
|
||||
|
||||
try {
|
||||
const { compilerOptions, extends: parentConfig } = JSON5.parse<JsonConfig>(fs.readFileSync(configFile, 'utf8'));
|
||||
let url: URL | undefined;
|
||||
|
||||
if (compilerOptions?.baseUrl) {
|
||||
const configDir = path.dirname(configFile);
|
||||
|
||||
// the baseParentUrl is relative to the actual config file
|
||||
url = pathToFileURL(path.join(configDir, `${compilerOptions.baseUrl}/`));
|
||||
}
|
||||
|
||||
// empty paths if none are found
|
||||
let paths = compilerOptions?.paths || {};
|
||||
|
||||
if (parentConfig) {
|
||||
const allExtends = Array.isArray(parentConfig)
|
||||
? parentConfig
|
||||
: [parentConfig];
|
||||
|
||||
for (const extendsPath of allExtends) {
|
||||
const extRoot = extendsPath.startsWith('.')
|
||||
? currentPath
|
||||
: MOD_PATH;
|
||||
const extSubs = extendsPath.split(/[\\/]/);
|
||||
const extPath = path.join(extRoot, ...extSubs.slice(0, -1));
|
||||
const extConfig = readConfigFile(extPath, extSubs.at(-1), configFile);
|
||||
|
||||
// base configs are overridden by later configs, order here matters
|
||||
// FIXME The paths would be relative to the baseUrl at that point... for
|
||||
// now we don't care much since we define these 2 together in all @polkadot
|
||||
// configs, but it certainly _may_ create and issue at some point (for others)
|
||||
paths = { ...extConfig.paths, ...paths };
|
||||
url = url || extConfig.url;
|
||||
}
|
||||
}
|
||||
|
||||
return url
|
||||
? { paths, url }
|
||||
: { paths };
|
||||
} catch (error) {
|
||||
console.error(`FATAL: Error parsing ${configFile}:: ${(error as Error).message}`);
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Retrieves all TS aliases definitions
|
||||
**/
|
||||
function extractAliases (): TsAlias[] {
|
||||
const { paths, url = CWD_URL } = readConfigFile();
|
||||
|
||||
return Object
|
||||
.entries(paths)
|
||||
.filter((kv): kv is [string, [string, ...string[]]] => !!kv[1].length)
|
||||
// TODO The path value is an array - we only handle the first entry in there,
|
||||
// this is a possible fix into the future if it is ever an issue... (may have
|
||||
// some impacts on the actual loader where only 1 alias is retrieved)
|
||||
.map(([key, [path]]) => {
|
||||
const filter = key.split(/[\\/]/);
|
||||
const isWildcard = filter.at(-1) === '*';
|
||||
|
||||
// ensure that when we have wilcards specified, they always occur in the last position
|
||||
if (filter.filter((f) => f.includes('*')).length !== (isWildcard ? 1 : 0)) {
|
||||
throw new Error(`FATAL: Wildcards in tsconfig.json path entries are only supported in the last position. Invalid ${key}: ${path} mapping`);
|
||||
}
|
||||
|
||||
return {
|
||||
filter: isWildcard
|
||||
? filter.slice(0, -1)
|
||||
: filter,
|
||||
isWildcard,
|
||||
path,
|
||||
url
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/** We only export the aliases from the config */
|
||||
export const tsAliases = extractAliases();
|
||||
@@ -0,0 +1,13 @@
|
||||
// Copyright 2017-2025 @polkadot/dev-ts authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
export interface TsAlias {
|
||||
filter: string[];
|
||||
isWildcard?: boolean;
|
||||
path: string;
|
||||
url: URL;
|
||||
}
|
||||
|
||||
export interface LoaderOptions {
|
||||
isCached?: boolean;
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "..",
|
||||
"outDir": "./build",
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"exclude": [
|
||||
"**/mod.ts",
|
||||
"src/**/*.spec.ts"
|
||||
],
|
||||
"include": [
|
||||
"src/**/*"
|
||||
],
|
||||
"references": []
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "..",
|
||||
"outDir": "./build",
|
||||
"rootDir": "./src",
|
||||
"emitDeclarationOnly": false,
|
||||
"noEmit": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.spec.ts"
|
||||
],
|
||||
"references": [
|
||||
{ "path": "../dev-ts/tsconfig.build.json" },
|
||||
{ "path": "../dev-test/tsconfig.build.json" }
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,547 @@
|
||||
# @pezkuwi/dev
|
||||
|
||||
A collection of shared CI scripts and development environment (configuration, dependencies) used by [@pezkuwi](https://pezkuwi.js.org) projects.
|
||||
|
||||
# Scripts
|
||||
|
||||
## polkadot-ci-ghact-build
|
||||
|
||||
**Summary**:
|
||||
This script automates the continuous integration (CI) process for building, testing, versioning, and publishing packages in the repository. It handles tasks like cleaning the workspace, running tests, updating versions, publishing to npm, GitHub repositories, and Deno, and generating changelogs.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
- **`--skip-beta`**:
|
||||
Prevents incrementing the version to a beta release.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-ci-ghact-build [options]
|
||||
```
|
||||
|
||||
## polkadot-ci-ghact-docs
|
||||
|
||||
**Summary**:
|
||||
This script generates documentation for the repository and deploys it to GitHub Pages. It ensures the documentation is built and published with the correct configuration.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
This script does not accept any CLI arguments.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-ci-ghact-docs
|
||||
```
|
||||
|
||||
## polkadot-ci-ghpages-force
|
||||
|
||||
**Summary**:
|
||||
This script force-refreshes the `gh-pages` branch of the repository by creating a new orphan branch, resetting its contents, and pushing it to GitHub. It ensures a clean state for GitHub Pages deployment.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
This script does not accept any CLI arguments.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-ci-ghpages-force
|
||||
```
|
||||
|
||||
## polkadot-dev-build-docs
|
||||
|
||||
**Summary**:
|
||||
This script prepares the documentation for building by copying the `docs` directory to a `build-docs` directory. If the `build-docs` directory already exists, it is cleared before copying.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
This script does not accept any CLI arguments.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-build-docs
|
||||
```
|
||||
|
||||
## polkadot-dev-build-ts
|
||||
|
||||
**Summary**:
|
||||
This script compiles TypeScript source files into JavaScript outputs using the specified compiler (`tsc`), prepares the build artifacts, rewrites imports for compatibility (e.g., for Deno), lints dependencies, and updates package metadata for distribution. It supports CommonJS, ESM, and Deno outputs, along with configuration validation and export mapping.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
- **`--compiler <type>`**: Specifies the compiler to use for TypeScript compilation.
|
||||
- Acceptable values: `tsc`
|
||||
- Default: `tsc`
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-build-ts [options]
|
||||
```
|
||||
|
||||
## polkadot-dev-circular
|
||||
|
||||
**Summary**:
|
||||
This script checks the project for circular dependencies in TypeScript (`.ts`, `.tsx`) files using the `madge` library. It reports any detected circular dependencies and exits with an error if any are found.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
This script does not accept any CLI arguments.
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-circular
|
||||
```
|
||||
|
||||
## polkadot-dev-clean-build
|
||||
|
||||
**Summary**:
|
||||
This script removes build artifacts and temporary files from the repository. It targets directories like `build` and files such as `tsconfig.*.tsbuildinfo`, ensuring a clean workspace for fresh builds.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
This script does not accept any CLI arguments.
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-clean-build
|
||||
```
|
||||
|
||||
## polkadot-dev-contrib
|
||||
|
||||
**Summary**:
|
||||
This script generates a `CONTRIBUTORS` file by aggregating and listing all contributors to the repository based on the Git commit history. It excludes bot accounts and service-related commits (e.g., GitHub Actions, Travis CI). The output includes the number of contributions, contributor names, and their most recent commit hash.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
This script does not accept any CLI arguments.
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-contrib
|
||||
```
|
||||
|
||||
## polkadot-dev-copy-dir
|
||||
|
||||
**Summary**:
|
||||
This script copies directories from specified source paths to a destination path. It supports options to change the working directory and to flatten the directory structure during copying.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
- **`--cd <path>`**:
|
||||
Specifies a working directory to prepend to the source and destination paths.
|
||||
|
||||
- **`--flatten`**:
|
||||
Copies all files directly to the destination without preserving the source directory structure.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-copy-dir [options] <source>... <destination>
|
||||
```
|
||||
- `<source>`: One or more source directories to copy.
|
||||
- `<destination>`: Destination directory for the copied files.
|
||||
|
||||
## polkadot-dev-copy-to
|
||||
|
||||
**Summary**:
|
||||
This script copies the `build` output and `node_modules` of all packages in the repository to a specified destination directory. It ensures the destination `node_modules` folder exists and is up-to-date.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
- **`<destination>`**:
|
||||
Specifies the target directory where the `node_modules` folder resides.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-copy-to <destination>
|
||||
```
|
||||
|
||||
## polkadot-dev-deno-map
|
||||
|
||||
**Summary**:
|
||||
This script generates a `mod.ts` file and an `import_map.json` file for Deno compatibility. It exports all packages with a `mod.ts` file in their `src` directory and maps their paths for use in Deno.
|
||||
|
||||
### Outputs
|
||||
|
||||
- **`mod.ts`**:
|
||||
An auto-generated TypeScript module exporting all packages for Deno. If the file does not exist, it is created.
|
||||
|
||||
- **`import_map.json`**:
|
||||
A JSON file mapping package paths to their corresponding Deno-compatible build paths. If an `import_map.in.json` file exists, its mappings are merged into the output.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
This script does not accept any CLI arguments.
|
||||
|
||||
## polkadot-dev-run-lint
|
||||
|
||||
**Summary**:
|
||||
This script runs linting and TypeScript checks on the repository. It uses `eslint` for code linting and `tsc` for TypeScript type checking. Specific checks can be skipped using CLI arguments.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
- **`--skip-eslint`**:
|
||||
Skips running `eslint` during the linting process.
|
||||
|
||||
- **`--skip-tsc`**:
|
||||
Skips running the TypeScript (`tsc`) type checker.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-run-lint [options]
|
||||
```
|
||||
|
||||
## polkadot-dev-run-node-ts
|
||||
|
||||
**Summary**:
|
||||
This script executes a Node.js script with TypeScript support, using the `@pezkuwi/dev-ts/cached` loader by default. It dynamically handles global and local loaders and allows for additional Node.js flags to be passed.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
- `<script>`: The TypeScript file to execute.
|
||||
- `[args...]`: Arguments to pass to the executed script.
|
||||
- Node.js flags, such as `--require`, `--loader`, and `--import`, are also supported and processed as follows:
|
||||
- **Global loaders** (e.g., absolute or non-relative paths) are prioritized.
|
||||
- The TypeScript loader is inserted after global loaders.
|
||||
- **Local loaders** (e.g., relative paths starting with `.`) are appended last.
|
||||
|
||||
### Default Behavior
|
||||
|
||||
- Suppresses warnings using the `--no-warnings` flag.
|
||||
- Enables source maps with `--enable-source-maps`.
|
||||
- Uses the `@pezkuwi/dev-ts/cached` loader for TypeScript execution.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-run-node-ts <script> [nodeFlags...] [args...]
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
- The execNodeTs function ensures correct ordering of loaders:
|
||||
1. Global loaders are added first.
|
||||
2. The default TypeScript loader is included.
|
||||
3. Local loaders are appended.
|
||||
- Global and local loaders can be mixed for flexible runtime configurations.
|
||||
|
||||
## polkadot-dev-run-test
|
||||
|
||||
**Summary**:
|
||||
This script runs test files in the repository, filtering by file extensions and optional path-based filters. It supports both Node.js and browser environments, custom flags, and development-specific configurations.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
- **`--dev-build`**:
|
||||
Enables development mode, using local development builds for dependencies and loaders.
|
||||
|
||||
- **`--env <environment>`**:
|
||||
Specifies the test environment.
|
||||
- Acceptable values: `node`, `browser`
|
||||
- Default: `node`
|
||||
|
||||
- **`--bail`**:
|
||||
Stops the test suite on the first failure.
|
||||
|
||||
- **`--console`**:
|
||||
Enables console output during tests.
|
||||
|
||||
- **`--logfile <file>`**:
|
||||
Specifies a log file to capture test output.
|
||||
|
||||
- **`--import <module>`**:
|
||||
Imports the specified module.
|
||||
|
||||
- **`--loader <loader>`**:
|
||||
Specifies a custom Node.js loader.
|
||||
|
||||
- **`--require <module>`**:
|
||||
Preloads the specified module.
|
||||
|
||||
- **Filters**:
|
||||
You can include or exclude specific test files by specifying path-based filters:
|
||||
- Include: `filter` (e.g., `utils`)
|
||||
- Exclude: `^filter` (e.g., `^utils`)
|
||||
|
||||
### Supported Test Files
|
||||
|
||||
The script searches for test files with the following extensions:
|
||||
- **File Types**: `.spec`, `.test`
|
||||
- **Languages**: `.ts`, `.tsx`, `.js`, `.jsx`, `.cjs`, `.mjs`
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-run-test [options] [filters...]
|
||||
```
|
||||
|
||||
### Behavior
|
||||
|
||||
- **Filters:**
|
||||
Filters are applied to include or exclude test files based on their paths. Included filters take precedence, and excluded filters are applied afterward.
|
||||
|
||||
- **Execution:**
|
||||
The script dynamically loads the appropriate environment setup (node or browser) and runs the tests using @pezkuwi/dev-test.
|
||||
|
||||
- **Errors:**
|
||||
If no matching files are found, the script exits with a fatal error.
|
||||
|
||||
- **Development Mode:**
|
||||
In development mode, local build paths are used for test and TypeScript loaders.
|
||||
|
||||
## polkadot-dev-version
|
||||
|
||||
**Summary**:
|
||||
This script automates the version bump process for a package or a monorepo. It updates the `version` field in `package.json` files and synchronizes dependency versions across workspaces. It supports major, minor, patch, and pre-release version bumps.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
- `<type>`: The type of version bump to apply.
|
||||
- Acceptable values: `major`, `minor`, `patch`, `pre`
|
||||
- Required.
|
||||
|
||||
### Behavior
|
||||
|
||||
1. **Version Bump**:
|
||||
- Uses `yarn version` to bump the root package version based on the specified `<type>`.
|
||||
|
||||
2. **Synchronizes Dependencies**:
|
||||
- Updates all `dependencies`, `devDependencies`, `peerDependencies`, `optionalDependencies`, and `resolutions` across all workspace packages to match the new version where applicable.
|
||||
|
||||
3. **Handles `-x` Suffix**:
|
||||
- If the root package's version ends with `-x`, it is temporarily removed before the version bump and re-added afterward for pre-releases.
|
||||
|
||||
4. **Updates Workspaces**:
|
||||
- Loops through all `packages/*` directories to update their `package.json` files with the new version and aligned dependencies.
|
||||
|
||||
5. **Installs Updated Dependencies**:
|
||||
- Runs `yarn install` to apply dependency updates after bumping versions.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-version <type>
|
||||
```
|
||||
|
||||
## polkadot-dev-yarn-only
|
||||
|
||||
|
||||
**Summary**:
|
||||
This script ensures that `yarn` is being used as the package manager. It exits with an error if a different package manager (e.g., `npm`) is detected.
|
||||
|
||||
### Behavior
|
||||
|
||||
1. **Check for Yarn**:
|
||||
- Verifies that the `yarn` package manager is being used by inspecting the `npm_execpath` environment variable.
|
||||
|
||||
2. **Exit on Failure**:
|
||||
- If `yarn` is not detected, the script exits with a fatal error message explaining that `yarn` is required.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-dev-yarn-only
|
||||
```
|
||||
|
||||
## polkadot-exec-eslint
|
||||
|
||||
**Summary**:
|
||||
This script runs the ESLint binary to lint JavaScript and TypeScript files in the project. It uses the ESLint installation local to the project.
|
||||
|
||||
### Behavior
|
||||
|
||||
1. **Import ESLint**:
|
||||
- Dynamically imports and executes the `eslint` binary from the local project's `node_modules`.
|
||||
|
||||
2. **Delegates to ESLint**:
|
||||
- The script acts as a wrapper around the `eslint` command, passing any arguments to it.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-exec-eslint [eslint-arguments]
|
||||
```
|
||||
|
||||
Notes
|
||||
- This script ensures that the locally installed version of ESLint is used, avoiding conflicts with global installations.
|
||||
- All standard ESLint CLI options can be passed directly to the script.
|
||||
|
||||
## polkadot-exec-ghpages
|
||||
|
||||
**Summary**:
|
||||
This script acts as a wrapper for the `gh-pages` tool, which is used to publish content to a project's GitHub Pages branch.
|
||||
|
||||
### Behavior
|
||||
|
||||
1. **Import `gh-pages`**:
|
||||
- Dynamically imports the `gh-pages` binary from the local project's `node_modules`.
|
||||
|
||||
2. **Run `gh-pages`**:
|
||||
- Passes command-line arguments directly to the `gh-pages` tool to execute the desired publishing tasks.
|
||||
|
||||
3. **Output on Success**:
|
||||
- Logs `Published` to the console upon successful completion.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-exec-ghpages [gh-pages-arguments]
|
||||
```
|
||||
|
||||
## polkadot-exec-ghrelease
|
||||
|
||||
**Summary**:
|
||||
This script is a wrapper for the `gh-release` tool, used to create GitHub releases directly from the command line.
|
||||
|
||||
### Behavior
|
||||
|
||||
1. **Import `gh-release`**:
|
||||
- Dynamically imports the `gh-release` binary from the local project's `node_modules`.
|
||||
|
||||
2. **Run `gh-release`**:
|
||||
- Executes the `gh-release` CLI with any provided arguments.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-exec-ghrelease [gh-release-arguments]
|
||||
```
|
||||
|
||||
## polkadot-exec-node-test
|
||||
|
||||
**Summary**:
|
||||
This script is designed to execute Node.js tests using the `node:test` module. It includes support for diagnostic reporting, customizable logging, and execution controls like bail and timeout.
|
||||
|
||||
### Key Features:
|
||||
|
||||
1. **Custom Test Execution**:
|
||||
- Executes tests using the `node:test` framework.
|
||||
- Handles test results, diagnostic messages, and statistics.
|
||||
|
||||
2. **Real-time Feedback**:
|
||||
- Displays progress updates on the console with formatted outputs:
|
||||
- `·` for passed tests.
|
||||
- `x` for failed tests.
|
||||
- `>` for skipped tests.
|
||||
- `!` for todo tests.
|
||||
|
||||
3. **Logging and Debugging**:
|
||||
- Optionally logs errors to a specified file (`--logfile <filename>`).
|
||||
- Outputs detailed diagnostic information when `--console` is used.
|
||||
|
||||
4. **Command-line Options**:
|
||||
- `--bail`: Stops execution after the first test failure.
|
||||
- `--console`: Outputs diagnostic and error details to the console.
|
||||
- `--logfile <file>`: Appends error logs to the specified file.
|
||||
|
||||
5. **Error Reporting**:
|
||||
- Provides structured error output, including filenames, stack traces, and failure types.
|
||||
|
||||
6. **Timeout**:
|
||||
- Configures a default timeout of 1 hour to avoid indefinite hangs.
|
||||
|
||||
### CLI Options:
|
||||
|
||||
- `--bail`: Exit after the first test failure.
|
||||
- `--console`: Print diagnostic details to the console.
|
||||
- `--logfile <file>`: Write failure details to the specified log file.
|
||||
- `<files>`: Specify test files to run (supports glob patterns).
|
||||
|
||||
### Usage:
|
||||
|
||||
```bash
|
||||
yarn polkadot-exec-node-test [options] <files...>
|
||||
```
|
||||
|
||||
## polkadot-exec-rollup
|
||||
|
||||
**Summary**:
|
||||
This script serves as a wrapper for the Rollup CLI, allowing users to execute Rollup commands via Node.js. It simplifies access to the Rollup binary and forwards all provided arguments directly to the Rollup CLI.
|
||||
|
||||
### CLI Arguments
|
||||
|
||||
- **`--config <file>`**:
|
||||
Specifies the Rollup configuration file to use.
|
||||
|
||||
- **`--watch`**:
|
||||
Enables watch mode, automatically rebuilding the bundle on file changes.
|
||||
|
||||
- **`--input <file>`**:
|
||||
Specifies the input file for the build.
|
||||
|
||||
- **`--output <file>`**:
|
||||
Specifies the output file or directory for the build.
|
||||
|
||||
- **`--silent`**:
|
||||
Suppresses Rollup output logs.
|
||||
|
||||
Refer to the [Rollup CLI documentation](https://rollupjs.org/guide/en/#command-line-interface) for a full list of available options.
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-exec-rollup [options]
|
||||
```
|
||||
|
||||
## polkadot-exec-tsc
|
||||
|
||||
**Summary**:
|
||||
This script executes the TypeScript Compiler (TSC) directly by importing the TypeScript library, enabling developers to compile TypeScript files with the same options available in the native `tsc` CLI.
|
||||
|
||||
### Common Options
|
||||
|
||||
- **`--project <file>`**
|
||||
Use a specific `tsconfig.json` file for compilation.
|
||||
|
||||
- **`--watch`**
|
||||
Watch for file changes and recompile automatically.
|
||||
|
||||
- **`--outDir <directory>`**
|
||||
Specify an output directory for compiled files.
|
||||
|
||||
- **`--declaration`**
|
||||
Generate TypeScript declaration files (`.d.ts`).
|
||||
|
||||
- **`--strict`**
|
||||
Enable strict type-checking options.
|
||||
|
||||
Refer to the official [TypeScript Compiler Options](https://www.typescriptlang.org/tsconfig) for a complete list of supported options.
|
||||
|
||||
### CLI Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-exec-tsc [options]
|
||||
```
|
||||
|
||||
##
|
||||
|
||||
**Summary**:
|
||||
This script directly imports and executes the Webpack CLI, allowing developers to bundle JavaScript applications using Webpack with access to all CLI options provided by the `webpack-cli`.
|
||||
|
||||
## Common Options
|
||||
|
||||
- **`--config <path>`**
|
||||
Specify a path to the Webpack configuration file.
|
||||
|
||||
- **`--mode <mode>`**
|
||||
Set the mode for Webpack. Valid values are `development`, `production`, or `none`.
|
||||
|
||||
- **`--watch`**
|
||||
Watch files for changes and rebuild the bundle automatically.
|
||||
|
||||
- **`--entry <file>`**
|
||||
Specify the entry file for the application.
|
||||
|
||||
- **`--output <path>`**
|
||||
Set the directory or filename for the output bundle.
|
||||
|
||||
Refer to the official [Webpack CLI Options](https://webpack.js.org/api/cli/) for a complete list of supported options.
|
||||
|
||||
## CLI Usage
|
||||
|
||||
```bash
|
||||
yarn polkadot-exec-webpack [options]
|
||||
```
|
||||
@@ -0,0 +1,160 @@
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// @ts-expect-error No definition for this one
|
||||
import eslintJs from '@eslint/js';
|
||||
import tsPlugin from '@typescript-eslint/eslint-plugin';
|
||||
import tsParser from '@typescript-eslint/parser';
|
||||
// @ts-expect-error No definition for this one
|
||||
import standardConfig from 'eslint-config-standard';
|
||||
import deprecationPlugin from 'eslint-plugin-deprecation';
|
||||
// @ts-expect-error No definition for this one
|
||||
import headerPlugin from 'eslint-plugin-header';
|
||||
// @ts-expect-error No definition for this one
|
||||
import importPlugin from 'eslint-plugin-import';
|
||||
// @ts-expect-error No definition for this one
|
||||
import importNewlinesPlugin from 'eslint-plugin-import-newlines';
|
||||
// @ts-expect-error No definition for this one
|
||||
import jestPlugin from 'eslint-plugin-jest';
|
||||
// @ts-expect-error No definition for this one
|
||||
import nPlugin from 'eslint-plugin-n';
|
||||
// @ts-expect-error No definition for this one
|
||||
import promisePlugin from 'eslint-plugin-promise';
|
||||
// @ts-expect-error No definition for this one
|
||||
import reactPlugin from 'eslint-plugin-react';
|
||||
// @ts-expect-error No definition for this one
|
||||
import reactHooksPlugin from 'eslint-plugin-react-hooks';
|
||||
// @ts-expect-error No definition for this one
|
||||
import simpleImportSortPlugin from 'eslint-plugin-simple-import-sort';
|
||||
// @ts-expect-error No definition for this one
|
||||
import sortDestructureKeysPlugin from 'eslint-plugin-sort-destructure-keys';
|
||||
import globals from 'globals';
|
||||
|
||||
import { overrideAll, overrideJs, overrideJsx, overrideSpec } from './eslint.rules.js';
|
||||
|
||||
const EXT_JS = ['.cjs', '.js', '.mjs'];
|
||||
const EXT_TS = ['.ts', '.tsx'];
|
||||
const EXT_ALL = [...EXT_JS, ...EXT_TS];
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* Converts a list of EXT_* defined above to globs
|
||||
* @param {string[]} exts
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function extsToGlobs (exts) {
|
||||
return exts.map((e) => `**/*${e}`);
|
||||
}
|
||||
|
||||
export default [
|
||||
{
|
||||
ignores: [
|
||||
'**/.github/',
|
||||
'**/.vscode/',
|
||||
'**/.yarn/',
|
||||
'**/build/',
|
||||
'**/build-*/',
|
||||
'**/coverage/'
|
||||
]
|
||||
},
|
||||
{
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.browser,
|
||||
...globals.node
|
||||
},
|
||||
parser: tsParser,
|
||||
parserOptions: {
|
||||
ecmaVersion: 'latest',
|
||||
project: './tsconfig.eslint.json',
|
||||
sourceType: 'module',
|
||||
warnOnUnsupportedTypeScriptVersion: false
|
||||
}
|
||||
},
|
||||
plugins: {
|
||||
'@typescript-eslint': tsPlugin,
|
||||
deprecation: deprecationPlugin,
|
||||
header: headerPlugin,
|
||||
import: importPlugin,
|
||||
'import-newlines': importNewlinesPlugin,
|
||||
n: nPlugin,
|
||||
promise: promisePlugin,
|
||||
'simple-import-sort': simpleImportSortPlugin,
|
||||
'sort-destructure-keys': sortDestructureKeysPlugin
|
||||
},
|
||||
settings: {
|
||||
'import/extensions': EXT_ALL,
|
||||
'import/parsers': {
|
||||
'@typescript-eslint/parser': EXT_TS,
|
||||
espree: EXT_JS
|
||||
},
|
||||
'import/resolver': {
|
||||
node: {
|
||||
extensions: EXT_ALL
|
||||
},
|
||||
typescript: {
|
||||
project: './tsconfig.eslint.json'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
files: extsToGlobs(EXT_ALL),
|
||||
rules: {
|
||||
...eslintJs.configs.recommended.rules,
|
||||
...standardConfig.rules,
|
||||
...tsPlugin.configs['recommended-type-checked'].rules,
|
||||
...tsPlugin.configs['stylistic-type-checked'].rules,
|
||||
...overrideAll
|
||||
}
|
||||
},
|
||||
{
|
||||
files: extsToGlobs(EXT_JS),
|
||||
rules: {
|
||||
...overrideJs
|
||||
}
|
||||
},
|
||||
{
|
||||
files: [
|
||||
'**/*.tsx',
|
||||
'**/use*.ts'
|
||||
],
|
||||
plugins: {
|
||||
react: reactPlugin,
|
||||
'react-hooks': reactHooksPlugin
|
||||
},
|
||||
rules: {
|
||||
...reactPlugin.configs.recommended.rules,
|
||||
...reactHooksPlugin.configs.recommended.rules,
|
||||
...overrideJsx
|
||||
},
|
||||
settings: {
|
||||
react: {
|
||||
version: 'detect'
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
files: [
|
||||
'**/*.spec.ts',
|
||||
'**/*.spec.tsx'
|
||||
],
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.jest
|
||||
}
|
||||
},
|
||||
plugins: {
|
||||
jest: jestPlugin
|
||||
},
|
||||
rules: {
|
||||
...jestPlugin.configs.recommended.rules,
|
||||
...overrideSpec
|
||||
},
|
||||
settings: {
|
||||
jest: {
|
||||
version: 27
|
||||
}
|
||||
}
|
||||
}
|
||||
];
|
||||
@@ -0,0 +1,214 @@
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import JSON5 from 'json5';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import process from 'node:process';
|
||||
|
||||
const FIXME = {
|
||||
// This is in the new 6.0.0 and we should switch this on
|
||||
// at some point. For a first iteration we keep as-is
|
||||
'@typescript-eslint/prefer-nullish-coalescing': 'off'
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns a copyright header pattern (using tsconfig.base.json)
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
function getHeaderPattern () {
|
||||
const tsPath = path.join(process.cwd(), 'tsconfig.base.json');
|
||||
|
||||
if (!fs.existsSync(tsPath)) {
|
||||
throw new Error(`Unable to load ${tsPath}`);
|
||||
}
|
||||
|
||||
const tsConfig = JSON5.parse(fs.readFileSync(tsPath, 'utf-8'));
|
||||
|
||||
if (!tsConfig?.compilerOptions?.paths) {
|
||||
throw new Error(`Unable to extract compilerOptions.paths structure from ${tsPath}`);
|
||||
}
|
||||
|
||||
const paths = Object.keys(tsConfig.compilerOptions.paths);
|
||||
|
||||
if (!paths.length) {
|
||||
throw new Error(`No keys found in compilerOptions.paths from ${tsPath}`);
|
||||
}
|
||||
|
||||
const packages = paths.reduce((packages, k) => {
|
||||
const [pd, pk] = k.split('/');
|
||||
|
||||
if (pd !== '@polkadot' || !pk) {
|
||||
throw new Error(`Non @polkadot path in ${tsPath}`);
|
||||
}
|
||||
|
||||
return packages.length
|
||||
? `${packages}|${pk}`
|
||||
: pk;
|
||||
}, '');
|
||||
const fullyear = new Date().getFullYear();
|
||||
const years = [];
|
||||
|
||||
for (let i = 17, last = fullyear - 2000; i < last; i++) {
|
||||
years.push(`${i}`);
|
||||
}
|
||||
|
||||
return ` Copyright 20(${years.join('|')})(-${fullyear})? @polkadot/(${packages})`;
|
||||
}
|
||||
|
||||
export const overrideAll = {
|
||||
...FIXME,
|
||||
// the next 2 enforce isolatedModules & verbatimModuleSyntax
|
||||
'@typescript-eslint/consistent-type-exports': 'error',
|
||||
'@typescript-eslint/consistent-type-imports': 'error',
|
||||
'@typescript-eslint/dot-notation': 'error',
|
||||
'@typescript-eslint/indent': ['error', 2],
|
||||
'@typescript-eslint/no-non-null-assertion': 'error',
|
||||
// ts itself checks and ignores those starting with _, align the linting
|
||||
'@typescript-eslint/no-unused-vars': ['error', {
|
||||
args: 'all',
|
||||
argsIgnorePattern: '^_',
|
||||
caughtErrors: 'all',
|
||||
caughtErrorsIgnorePattern: '^_',
|
||||
destructuredArrayIgnorePattern: '^_',
|
||||
vars: 'all',
|
||||
varsIgnorePattern: '^_'
|
||||
}],
|
||||
'@typescript-eslint/type-annotation-spacing': 'error',
|
||||
'arrow-parens': ['error', 'always'],
|
||||
'brace-style': ['error', '1tbs'],
|
||||
curly: ['error', 'all'],
|
||||
'default-param-last': 'off', // conflicts with TS version
|
||||
'deprecation/deprecation': 'error',
|
||||
'dot-notation': 'off', // conflicts with TS version
|
||||
'func-style': ['error', 'declaration', {
|
||||
allowArrowFunctions: true
|
||||
}],
|
||||
// this does help with declarations, but also
|
||||
// applies to invocations, which is an issue...
|
||||
// 'function-paren-newline': ['error', 'never'],
|
||||
'function-call-argument-newline': ['error', 'consistent'],
|
||||
'header/header': ['error', 'line', [
|
||||
{ pattern: getHeaderPattern() },
|
||||
' SPDX-License-Identifier: Apache-2.0'
|
||||
], 2],
|
||||
'import-newlines/enforce': ['error', {
|
||||
forceSingleLine: true,
|
||||
items: 2048
|
||||
}],
|
||||
'import/export': 'error',
|
||||
'import/extensions': ['error', 'ignorePackages', {
|
||||
cjs: 'always',
|
||||
js: 'always',
|
||||
json: 'always',
|
||||
jsx: 'never',
|
||||
mjs: 'always',
|
||||
ts: 'never',
|
||||
tsx: 'never'
|
||||
}],
|
||||
'import/first': 'error',
|
||||
'import/newline-after-import': 'error',
|
||||
'import/no-duplicates': 'error',
|
||||
'import/order': 'off', // conflicts with simple-import-sort
|
||||
indent: 'off', // required as 'off' since typescript-eslint has own versions
|
||||
'no-extra-semi': 'error',
|
||||
'no-unused-vars': 'off',
|
||||
'no-use-before-define': 'off',
|
||||
'object-curly-newline': ['error', {
|
||||
ExportDeclaration: { minProperties: 2048 },
|
||||
ImportDeclaration: { minProperties: 2048 },
|
||||
ObjectPattern: { minProperties: 2048 }
|
||||
}],
|
||||
'padding-line-between-statements': [
|
||||
'error',
|
||||
{ blankLine: 'always', next: '*', prev: ['const', 'let', 'var'] },
|
||||
{ blankLine: 'any', next: ['const', 'let', 'var'], prev: ['const', 'let', 'var'] },
|
||||
{ blankLine: 'always', next: 'block-like', prev: '*' },
|
||||
{ blankLine: 'always', next: '*', prev: 'block-like' },
|
||||
{ blankLine: 'always', next: 'function', prev: '*' },
|
||||
{ blankLine: 'always', next: '*', prev: 'function' },
|
||||
{ blankLine: 'always', next: 'try', prev: '*' },
|
||||
{ blankLine: 'always', next: '*', prev: 'try' },
|
||||
{ blankLine: 'always', next: 'return', prev: '*' },
|
||||
{ blankLine: 'always', next: 'import', prev: '*' },
|
||||
{ blankLine: 'always', next: '*', prev: 'import' },
|
||||
{ blankLine: 'any', next: 'import', prev: 'import' }
|
||||
],
|
||||
semi: ['error', 'always'],
|
||||
'simple-import-sort/exports': 'error',
|
||||
'simple-import-sort/imports': ['error', {
|
||||
groups: [
|
||||
['^\u0000'], // all side-effects (0 at start)
|
||||
['\u0000$', '^@polkadot.*\u0000$', '^\\..*\u0000$'], // types (0 at end)
|
||||
// ['^node:'], // node
|
||||
['^[^/\\.]'], // non-polkadot
|
||||
['^@polkadot'], // polkadot
|
||||
['^\\.\\.(?!/?$)', '^\\.\\./?$', '^\\./(?=.*/)(?!/?$)', '^\\.(?!/?$)', '^\\./?$'] // local (. last)
|
||||
]
|
||||
}],
|
||||
'sort-destructure-keys/sort-destructure-keys': ['error', {
|
||||
caseSensitive: true
|
||||
}],
|
||||
'sort-keys': 'error',
|
||||
'spaced-comment': ['error', 'always', {
|
||||
block: {
|
||||
// pure export helpers
|
||||
markers: ['#__PURE__']
|
||||
},
|
||||
line: {
|
||||
// TS reference types
|
||||
markers: ['/ <reference']
|
||||
}
|
||||
}]
|
||||
};
|
||||
|
||||
export const overrideJsx = {
|
||||
'jsx-quotes': ['error', 'prefer-single'],
|
||||
// swap from recommended warning to error
|
||||
'react-hooks/exhaustive-deps': 'error',
|
||||
'react/jsx-closing-bracket-location': ['warn', 'tag-aligned'],
|
||||
'react/jsx-first-prop-new-line': ['warn', 'multiline-multiprop'],
|
||||
'react/jsx-fragments': 'error',
|
||||
'react/jsx-max-props-per-line': ['warn', {
|
||||
maximum: 1,
|
||||
when: 'always'
|
||||
}],
|
||||
'react/jsx-newline': ['error', {
|
||||
prevent: true
|
||||
}],
|
||||
'react/jsx-no-bind': 'error',
|
||||
'react/jsx-props-no-multi-spaces': 'error',
|
||||
'react/jsx-sort-props': ['warn', {
|
||||
noSortAlphabetically: false
|
||||
}],
|
||||
'react/jsx-tag-spacing': ['error', {
|
||||
afterOpening: 'never',
|
||||
beforeClosing: 'never',
|
||||
beforeSelfClosing: 'always',
|
||||
closingSlash: 'never'
|
||||
}],
|
||||
'react/prop-types': 'off' // this is a completely broken rule
|
||||
};
|
||||
|
||||
export const overrideJs = {
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
'@typescript-eslint/no-unsafe-argument': 'off',
|
||||
'@typescript-eslint/no-unsafe-assignment': 'off',
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/no-unsafe-member-access': 'off',
|
||||
'@typescript-eslint/no-unsafe-return': 'off',
|
||||
'@typescript-eslint/no-var-requires': 'off',
|
||||
'@typescript-eslint/restrict-plus-operands': 'off',
|
||||
'@typescript-eslint/restrict-template-expressions': 'off'
|
||||
};
|
||||
|
||||
export const overrideSpec = {
|
||||
// in the specs we are a little less worried about
|
||||
// specific correctness, i.e. we can have dangling bits
|
||||
'@typescript-eslint/no-unsafe-call': 'off',
|
||||
'@typescript-eslint/no-unsafe-member-access': 'off',
|
||||
'jest/expect-expect': ['warn', {
|
||||
assertFunctionNames: ['assert', 'expect']
|
||||
}]
|
||||
};
|
||||
@@ -0,0 +1,22 @@
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
module.exports = {
|
||||
arrowParens: 'always',
|
||||
bracketSpacing: true,
|
||||
embeddedLanguageFormatting: 'off',
|
||||
endOfLine: 'lf',
|
||||
htmlWhitespaceSensitivity: 'ignore',
|
||||
jsxBracketSameLine: false,
|
||||
jsxSingleQuote: true,
|
||||
parser: 'typescript',
|
||||
printWidth: 2048,
|
||||
proseWrap: 'preserve',
|
||||
quoteProps: 'as-needed',
|
||||
requirePragma: true, // only on those files explicitly asked for
|
||||
semi: true,
|
||||
singleQuote: true,
|
||||
tabWidth: 2,
|
||||
trailingComma: 'none',
|
||||
useTabs: false
|
||||
};
|
||||
@@ -0,0 +1,113 @@
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import pluginAlias from '@rollup/plugin-alias';
|
||||
import pluginCommonjs from '@rollup/plugin-commonjs';
|
||||
import pluginDynamicImportVars from '@rollup/plugin-dynamic-import-vars';
|
||||
import pluginInject from '@rollup/plugin-inject';
|
||||
import pluginJson from '@rollup/plugin-json';
|
||||
import { nodeResolve as pluginResolve } from '@rollup/plugin-node-resolve';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import pluginCleanup from 'rollup-plugin-cleanup';
|
||||
|
||||
/** @typedef {{ entries?: Record<string, string>; external: string[]; globals?: Record<string, string>; index?: string; inject?: Record<string, string>; pkg: string; }} BundleDef */
|
||||
/** @typedef {{ file: string; format: 'umd'; generatedCode: Record<string, unknown>; globals: Record<string, string>; inlineDynamicImports: true; intro: string; name: string; }} BundleOutput */
|
||||
/** @typedef {{ context: 'global'; external: string[]; input: string; output: BundleOutput; plugins: any[]; }} Bundle */
|
||||
|
||||
/**
|
||||
* @param {string} pkg
|
||||
* @returns {string}
|
||||
*/
|
||||
function sanitizePkg (pkg) {
|
||||
return pkg.replace('@polkadot/', '');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} input
|
||||
* @returns {string}
|
||||
*/
|
||||
function createName (input) {
|
||||
return `polkadot-${sanitizePkg(input)}`
|
||||
.toLowerCase()
|
||||
.replace(/[^a-zA-Z0-9]+(.)/g, (_, c) => c.toUpperCase());
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} pkg
|
||||
* @param {string} [index]
|
||||
* @returns {string}
|
||||
*/
|
||||
export function createInput (pkg, index) {
|
||||
const partialPath = `packages/${sanitizePkg(pkg)}/build`;
|
||||
|
||||
return `${partialPath}/${
|
||||
index ||
|
||||
fs.existsSync(path.join(process.cwd(), partialPath, 'bundle.js'))
|
||||
? 'bundle.js'
|
||||
: (
|
||||
JSON.parse(fs.readFileSync(path.join(process.cwd(), partialPath, 'package.json'), 'utf8')).browser ||
|
||||
'index.js'
|
||||
)
|
||||
}`;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} pkg
|
||||
* @param {string[]} external
|
||||
* @param {Record<string, string>} globals
|
||||
* @returns {BundleOutput}
|
||||
*/
|
||||
export function createOutput (pkg, external, globals) {
|
||||
const name = sanitizePkg(pkg);
|
||||
|
||||
return {
|
||||
file: `packages/${name}/build/bundle-polkadot-${name}.js`,
|
||||
format: 'umd',
|
||||
generatedCode: {
|
||||
constBindings: true
|
||||
},
|
||||
globals: external.reduce((all, p) => ({
|
||||
[p]: createName(p),
|
||||
...all
|
||||
}), { ...globals }),
|
||||
// combine multi-chunk builds with dynamic imports
|
||||
inlineDynamicImports: true,
|
||||
// this is a mini x-global, determine where our context lies
|
||||
intro: 'const global = typeof globalThis !== "undefined" ? globalThis : typeof self !== "undefined" ? self : window;',
|
||||
name: createName(pkg)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {BundleDef} param0
|
||||
* @returns {Bundle}
|
||||
*/
|
||||
export function createBundle ({ entries = {}, external, globals = {}, index, inject = {}, pkg }) {
|
||||
return {
|
||||
// specify this (we define global in the output intro as globalThis || self || window)
|
||||
context: 'global',
|
||||
external,
|
||||
input: createInput(pkg, index),
|
||||
output: createOutput(pkg, external, globals),
|
||||
// NOTE The expect-error directives are due to rollup plugins, see
|
||||
// - https://github.com/rollup/plugins/issues/1488
|
||||
// - https://github.com/rollup/plugins/issues/1329
|
||||
plugins: [
|
||||
// @ts-expect-error See the linked rollup issues above
|
||||
pluginAlias({ entries }),
|
||||
// @ts-expect-error See the linked rollup issues above
|
||||
pluginJson(),
|
||||
// @ts-expect-error See the linked rollup issues above
|
||||
pluginCommonjs(),
|
||||
// @ts-expect-error See the linked rollup issues above
|
||||
pluginDynamicImportVars(),
|
||||
// @ts-expect-error See the linked rollup issues above
|
||||
pluginInject(inject),
|
||||
pluginResolve({ browser: true }),
|
||||
pluginCleanup()
|
||||
]
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
{
|
||||
/**
|
||||
* There uses the strictest configs as the base
|
||||
* https://github.com/tsconfig/bases/blob/f674fa6cbca17062ff02511b02872f8729a597ec/bases/strictest.json
|
||||
*/
|
||||
"extends": "@tsconfig/strictest/tsconfig.json",
|
||||
"compilerOptions": {
|
||||
/**
|
||||
* Aligns with packages/dev/scripts/polkadot-dev-build-ts & packages/dev-ts/src/loader
|
||||
* (target here is specifically tied to the minimum supported Node version)
|
||||
*/
|
||||
"module": "nodenext",
|
||||
"moduleResolution": "nodenext",
|
||||
"target": "es2022",
|
||||
|
||||
/**
|
||||
* Specific compilation configs for polkadot-js projects as it is used
|
||||
* (we only compile *.d.ts via the tsc command-line)
|
||||
*/
|
||||
"declaration": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"jsx": "preserve",
|
||||
"verbatimModuleSyntax": true,
|
||||
|
||||
/**
|
||||
* These appear in strictest, however we don't (yet) use them. For the most part it means
|
||||
* that we actually do have a large number of these lurking (especially on index checks)
|
||||
*/
|
||||
"exactOptionalPropertyTypes": false,
|
||||
"noUncheckedIndexedAccess": false,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
module.exports = {
|
||||
exclude: '**/*+(index|e2e|spec|types).ts',
|
||||
excludeExternals: true,
|
||||
excludeNotExported: true,
|
||||
excludePrivate: true,
|
||||
excludeProtected: true,
|
||||
hideGenerator: true,
|
||||
includeDeclarations: false,
|
||||
module: 'commonjs',
|
||||
moduleResolution: 'node',
|
||||
name: 'polkadot{.js}',
|
||||
out: 'docs',
|
||||
stripInternal: 'true',
|
||||
theme: 'markdown'
|
||||
};
|
||||
@@ -0,0 +1,107 @@
|
||||
{
|
||||
"author": "Jaco Greeff <jacogr@gmail.com>",
|
||||
"bugs": "https://github.com/pezkuwi/dev/issues",
|
||||
"description": "A collection of shared CI scripts and development environment used by @pezkuwi projects",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
},
|
||||
"homepage": "https://github.com/pezkuwi/dev/tree/master/packages/dev#readme",
|
||||
"license": "Apache-2.0",
|
||||
"name": "@pezkuwi/dev",
|
||||
"repository": {
|
||||
"directory": "packages/dev",
|
||||
"type": "git",
|
||||
"url": "https://github.com/pezkuwi/dev.git"
|
||||
},
|
||||
"sideEffects": false,
|
||||
"type": "module",
|
||||
"version": "0.84.2",
|
||||
"bin": {
|
||||
"polkadot-ci-ghact-build": "./scripts/polkadot-ci-ghact-build.mjs",
|
||||
"polkadot-ci-ghact-docs": "./scripts/polkadot-ci-ghact-docs.mjs",
|
||||
"polkadot-ci-ghpages-force": "./scripts/polkadot-ci-ghpages-force.mjs",
|
||||
"polkadot-dev-build-docs": "./scripts/polkadot-dev-build-docs.mjs",
|
||||
"polkadot-dev-build-ts": "./scripts/polkadot-dev-build-ts.mjs",
|
||||
"polkadot-dev-circular": "./scripts/polkadot-dev-circular.mjs",
|
||||
"polkadot-dev-clean-build": "./scripts/polkadot-dev-clean-build.mjs",
|
||||
"polkadot-dev-contrib": "./scripts/polkadot-dev-contrib.mjs",
|
||||
"polkadot-dev-copy-dir": "./scripts/polkadot-dev-copy-dir.mjs",
|
||||
"polkadot-dev-copy-to": "./scripts/polkadot-dev-copy-to.mjs",
|
||||
"polkadot-dev-deno-map": "./scripts/polkadot-dev-deno-map.mjs",
|
||||
"polkadot-dev-run-lint": "./scripts/polkadot-dev-run-lint.mjs",
|
||||
"polkadot-dev-run-node-ts": "./scripts/polkadot-dev-run-node-ts.mjs",
|
||||
"polkadot-dev-run-test": "./scripts/polkadot-dev-run-test.mjs",
|
||||
"polkadot-dev-version": "./scripts/polkadot-dev-version.mjs",
|
||||
"polkadot-dev-yarn-only": "./scripts/polkadot-dev-yarn-only.mjs",
|
||||
"polkadot-exec-eslint": "./scripts/polkadot-exec-eslint.mjs",
|
||||
"polkadot-exec-ghpages": "./scripts/polkadot-exec-ghpages.mjs",
|
||||
"polkadot-exec-ghrelease": "./scripts/polkadot-exec-ghrelease.mjs",
|
||||
"polkadot-exec-node-test": "./scripts/polkadot-exec-node-test.mjs",
|
||||
"polkadot-exec-rollup": "./scripts/polkadot-exec-rollup.mjs",
|
||||
"polkadot-exec-tsc": "./scripts/polkadot-exec-tsc.mjs",
|
||||
"polkadot-exec-webpack": "./scripts/polkadot-exec-webpack.mjs"
|
||||
},
|
||||
"exports": {
|
||||
"./config/eslint": "./config/eslint.js",
|
||||
"./config/prettier.cjs": "./config/prettier.cjs",
|
||||
"./config/tsconfig.json": "./config/tsconfig.json",
|
||||
"./rootJs/dynamic.mjs": "./src/rootJs/dynamic.mjs",
|
||||
"./rootJs/testJson.json": "./src/rootJs/testJson.json"
|
||||
},
|
||||
"dependencies": {
|
||||
"@eslint/js": "^8.56.0",
|
||||
"@pezkuwi/dev-test": "^0.84.2",
|
||||
"@pezkuwi/dev-ts": "^0.84.2",
|
||||
"@rollup/plugin-alias": "^5.1.1",
|
||||
"@rollup/plugin-commonjs": "^25.0.8",
|
||||
"@rollup/plugin-dynamic-import-vars": "^2.1.5",
|
||||
"@rollup/plugin-inject": "^5.0.5",
|
||||
"@rollup/plugin-json": "^6.1.0",
|
||||
"@rollup/plugin-node-resolve": "^15.3.1",
|
||||
"@tsconfig/strictest": "^2.0.2",
|
||||
"@typescript-eslint/eslint-plugin": "^6.19.1",
|
||||
"@typescript-eslint/parser": "^6.19.1",
|
||||
"eslint": "^8.56.0",
|
||||
"eslint-config-standard": "^17.1.0",
|
||||
"eslint-import-resolver-node": "^0.3.9",
|
||||
"eslint-import-resolver-typescript": "^3.6.1",
|
||||
"eslint-plugin-deprecation": "^2.0.0",
|
||||
"eslint-plugin-header": "^3.1.1",
|
||||
"eslint-plugin-import": "^2.29.1",
|
||||
"eslint-plugin-import-newlines": "^1.3.4",
|
||||
"eslint-plugin-jest": "^27.6.3",
|
||||
"eslint-plugin-n": "^16.6.2",
|
||||
"eslint-plugin-promise": "^6.1.1",
|
||||
"eslint-plugin-react": "^7.33.2",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-simple-import-sort": "^10.0.0",
|
||||
"eslint-plugin-sort-destructure-keys": "^1.5.0",
|
||||
"espree": "^9.6.1",
|
||||
"gh-pages": "^6.1.1",
|
||||
"gh-release": "^7.0.2",
|
||||
"globals": "^13.24.0",
|
||||
"json5": "^2.2.3",
|
||||
"madge": "^6.1.0",
|
||||
"rollup": "^4.9.6",
|
||||
"rollup-plugin-cleanup": "^3.2.1",
|
||||
"tslib": "^2.7.0",
|
||||
"typescript": "^5.5.4",
|
||||
"webpack": "^5.89.0",
|
||||
"webpack-cli": "^5.1.4",
|
||||
"webpack-dev-server": "^4.15.1",
|
||||
"webpack-merge": "^5.10.0",
|
||||
"webpack-subresource-integrity": "^5.2.0-rc.1",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@testing-library/react": "^14.1.2",
|
||||
"@types/node": "^20.11.5",
|
||||
"@types/react": "^18.2.48",
|
||||
"@types/react-dom": "^18.2.18",
|
||||
"@types/yargs": "^17.0.32",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-is": "^18.2.0",
|
||||
"styled-components": "^6.1.8"
|
||||
}
|
||||
}
|
||||
+540
@@ -0,0 +1,540 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import process from 'node:process';
|
||||
import yargs from 'yargs';
|
||||
|
||||
import { copyDirSync, copyFileSync, denoCreateDir, execGit, execPm, execSync, exitFatal, GITHUB_REPO, GITHUB_TOKEN_URL, gitSetup, logBin, mkdirpSync, rimrafSync, topoSort } from './util.mjs';
|
||||
|
||||
/** @typedef {Record<string, any>} ChangelogMap */
|
||||
|
||||
logBin('polkadot-ci-ghact-build');
|
||||
|
||||
const DENO_REPO = 'polkadot-js/build-deno.land';
|
||||
const BUND_REPO = 'polkadot-js/build-bundle';
|
||||
|
||||
const repo = `${GITHUB_TOKEN_URL}/${GITHUB_REPO}.git`;
|
||||
const denoRepo = `${GITHUB_TOKEN_URL}/${DENO_REPO}.git`;
|
||||
const bundRepo = `${GITHUB_TOKEN_URL}/${BUND_REPO}.git`;
|
||||
const bundClone = 'build-bundle-clone';
|
||||
const denoClone = 'build-deno-clone';
|
||||
|
||||
let withDeno = false;
|
||||
let withBund = false;
|
||||
let withNpm = false;
|
||||
|
||||
/** @type {string[]} */
|
||||
const shouldDeno = [];
|
||||
/** @type {string[]} */
|
||||
const shouldBund = [];
|
||||
|
||||
const argv = await yargs(process.argv.slice(2))
|
||||
.options({
|
||||
'skip-beta': {
|
||||
description: 'Do not increment as beta',
|
||||
type: 'boolean'
|
||||
}
|
||||
})
|
||||
.strict()
|
||||
.argv;
|
||||
|
||||
/**
|
||||
* Removes a specific file, returning true if found, false otherwise
|
||||
*
|
||||
* @param {string} file
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function rmFile (file) {
|
||||
if (fs.existsSync(file)) {
|
||||
rimrafSync(file);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the path of the root package.json
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
function npmGetJsonPath () {
|
||||
return path.resolve(process.cwd(), 'package.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the contents of the root package.json
|
||||
*
|
||||
* @returns {{ name: string; version: string; versions?: { npm?: string; git?: string } }}
|
||||
*/
|
||||
function npmGetJson () {
|
||||
return JSON.parse(
|
||||
fs.readFileSync(npmGetJsonPath(), 'utf8')
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the contents of the root package.json
|
||||
*
|
||||
* @param {any} json
|
||||
*/
|
||||
function npmSetJson (json) {
|
||||
fs.writeFileSync(npmGetJsonPath(), `${JSON.stringify(json, null, 2)}\n`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieved the current version included in package.json
|
||||
*
|
||||
* @returns {string}
|
||||
*/
|
||||
function npmGetVersion () {
|
||||
return npmGetJson().version;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the current to have an -x version specifier (aka beta)
|
||||
*/
|
||||
function npmAddVersionX () {
|
||||
const json = npmGetJson();
|
||||
|
||||
if (!json.version.endsWith('-x')) {
|
||||
json.version = json.version + '-x';
|
||||
npmSetJson(json);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the current -x version specifier (aka beta)
|
||||
*/
|
||||
function npmDelVersionX () {
|
||||
const json = npmGetJson();
|
||||
|
||||
if (json.version.endsWith('-x')) {
|
||||
json.version = json.version.replace('-x', '');
|
||||
npmSetJson(json);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the {versions: { npm, git } } fields in package.json
|
||||
*/
|
||||
function npmSetVersionFields () {
|
||||
const json = npmGetJson();
|
||||
|
||||
if (!json.versions) {
|
||||
json.versions = {};
|
||||
}
|
||||
|
||||
json.versions.git = json.version;
|
||||
|
||||
if (!json.version.endsWith('-x')) {
|
||||
json.versions.npm = json.version;
|
||||
}
|
||||
|
||||
npmSetJson(json);
|
||||
rmFile('.123current');
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the npm token in the home directory
|
||||
*/
|
||||
function npmSetup () {
|
||||
const registry = 'registry.npmjs.org';
|
||||
|
||||
fs.writeFileSync(path.join(os.homedir(), '.npmrc'), `//${registry}/:_authToken=${process.env['NPM_TOKEN']}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes the current package
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
function npmPublish () {
|
||||
if (fs.existsSync('.skip-npm') || !withNpm) {
|
||||
return;
|
||||
}
|
||||
|
||||
['LICENSE', 'package.json']
|
||||
.filter((file) => !fs.existsSync(path.join(process.cwd(), 'build', file)))
|
||||
.forEach((file) => copyFileSync(file, 'build'));
|
||||
|
||||
process.chdir('build');
|
||||
|
||||
const tag = npmGetVersion().includes('-') ? '--tag beta' : '';
|
||||
let count = 1;
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
execSync(`npm publish --quiet --access public ${tag}`);
|
||||
|
||||
break;
|
||||
} catch {
|
||||
if (count < 5) {
|
||||
const end = Date.now() + 15000;
|
||||
|
||||
console.error(`Publish failed on attempt ${count}/5. Retrying in 15s`);
|
||||
count++;
|
||||
|
||||
while (Date.now() < end) {
|
||||
// just spin our wheels
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
process.chdir('..');
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a map of changelog entries
|
||||
*
|
||||
* @param {string[][]} parts
|
||||
* @param {ChangelogMap} result
|
||||
* @returns {ChangelogMap}
|
||||
*/
|
||||
function createChangelogMap (parts, result = {}) {
|
||||
for (let i = 0, count = parts.length; i < count; i++) {
|
||||
const [n, ...e] = parts[i];
|
||||
|
||||
if (!result[n]) {
|
||||
if (e.length) {
|
||||
result[n] = createChangelogMap([e]);
|
||||
} else {
|
||||
result[n] = { '': {} };
|
||||
}
|
||||
} else {
|
||||
if (e.length) {
|
||||
createChangelogMap([e], result[n]);
|
||||
} else {
|
||||
result[n][''] = {};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an array of changelog entries
|
||||
*
|
||||
* @param {ChangelogMap} map
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function createChangelogArr (map) {
|
||||
const result = [];
|
||||
const entries = Object.entries(map);
|
||||
|
||||
for (let i = 0, count = entries.length; i < count; i++) {
|
||||
const [name, imap] = entries[i];
|
||||
|
||||
if (name) {
|
||||
if (imap['']) {
|
||||
result.push(name);
|
||||
}
|
||||
|
||||
const inner = createChangelogArr(imap);
|
||||
|
||||
if (inner.length === 1) {
|
||||
result.push(`${name}-${inner[0]}`);
|
||||
} else if (inner.length) {
|
||||
result.push(`${name}-{${inner.join(', ')}}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds changelog entries
|
||||
*
|
||||
* @param {string[]} changelog
|
||||
* @returns {string}
|
||||
*/
|
||||
function addChangelog (changelog) {
|
||||
const [version, ...names] = changelog;
|
||||
const entry = `${
|
||||
createChangelogArr(
|
||||
createChangelogMap(
|
||||
names
|
||||
.sort()
|
||||
.map((n) => n.split('-'))
|
||||
)
|
||||
).join(', ')
|
||||
} ${version}`;
|
||||
const newInfo = `## master\n\n- ${entry}\n`;
|
||||
|
||||
if (!fs.existsSync('CHANGELOG.md')) {
|
||||
fs.writeFileSync('CHANGELOG.md', `# CHANGELOG\n\n${newInfo}`);
|
||||
} else {
|
||||
const md = fs.readFileSync('CHANGELOG.md', 'utf-8');
|
||||
|
||||
fs.writeFileSync('CHANGELOG.md', md.includes('## master\n\n')
|
||||
? md.replace('## master\n\n', newInfo)
|
||||
: md.replace('# CHANGELOG\n\n', `# CHANGELOG\n\n${newInfo}\n`)
|
||||
);
|
||||
}
|
||||
|
||||
return entry;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} repo
|
||||
* @param {string} clone
|
||||
* @param {string[]} names
|
||||
*/
|
||||
function commitClone (repo, clone, names) {
|
||||
if (names.length) {
|
||||
process.chdir(clone);
|
||||
|
||||
const entry = addChangelog(names);
|
||||
|
||||
gitSetup();
|
||||
execGit('add --all .');
|
||||
execGit(`commit --no-status --quiet -m "${entry}"`);
|
||||
execGit(`push ${repo}`, true);
|
||||
|
||||
process.chdir('..');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes a specific package to polkadot-js bundles
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
function bundlePublishPkg () {
|
||||
const { name, version } = npmGetJson();
|
||||
const dirName = name.split('/')[1];
|
||||
const bundName = `bundle-polkadot-${dirName}.js`;
|
||||
const srcPath = path.join('build', bundName);
|
||||
const dstDir = path.join('../..', bundClone);
|
||||
|
||||
if (!fs.existsSync(srcPath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`\n *** bundle ${name}`);
|
||||
|
||||
if (shouldBund.length === 0) {
|
||||
shouldBund.push(version);
|
||||
}
|
||||
|
||||
shouldBund.push(dirName);
|
||||
|
||||
rimrafSync(path.join(dstDir, bundName));
|
||||
copyFileSync(srcPath, dstDir);
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes all packages to polkadot-js bundles
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
function bundlePublish () {
|
||||
const { version } = npmGetJson();
|
||||
|
||||
if (!withBund && version.includes('-')) {
|
||||
return;
|
||||
}
|
||||
|
||||
execGit(`clone ${bundRepo} ${bundClone}`, true);
|
||||
|
||||
loopFunc(bundlePublishPkg);
|
||||
|
||||
commitClone(bundRepo, bundClone, shouldBund);
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes a specific package to Deno
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
function denoPublishPkg () {
|
||||
const { name, version } = npmGetJson();
|
||||
|
||||
if (fs.existsSync('.skip-deno') || !fs.existsSync('build-deno')) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`\n *** deno ${name}`);
|
||||
|
||||
const dirName = denoCreateDir(name);
|
||||
const denoPath = `../../${denoClone}/${dirName}`;
|
||||
|
||||
if (shouldDeno.length === 0) {
|
||||
shouldDeno.push(version);
|
||||
}
|
||||
|
||||
shouldDeno.push(dirName);
|
||||
|
||||
rimrafSync(denoPath);
|
||||
mkdirpSync(denoPath);
|
||||
|
||||
copyDirSync('build-deno', denoPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Publishes all packages to Deno
|
||||
*
|
||||
* @returns {void}
|
||||
*/
|
||||
function denoPublish () {
|
||||
const { version } = npmGetJson();
|
||||
|
||||
if (!withDeno && version.includes('-')) {
|
||||
return;
|
||||
}
|
||||
|
||||
execGit(`clone ${denoRepo} ${denoClone}`, true);
|
||||
|
||||
loopFunc(denoPublishPkg);
|
||||
|
||||
commitClone(denoRepo, denoClone, shouldDeno);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves flags based on current specifications
|
||||
*/
|
||||
function getFlags () {
|
||||
withDeno = rmFile('.123deno');
|
||||
withBund = rmFile('.123bundle');
|
||||
withNpm = rmFile('.123npm');
|
||||
}
|
||||
|
||||
/**
|
||||
* Bumps the current version, also applying to all sub-packages
|
||||
*/
|
||||
function verBump () {
|
||||
const { version: currentVersion, versions } = npmGetJson();
|
||||
const [version, tag] = currentVersion.split('-');
|
||||
const [,, patch] = version.split('.');
|
||||
const lastVersion = versions?.npm || currentVersion;
|
||||
|
||||
if (argv['skip-beta'] || patch === '0') {
|
||||
// don't allow beta versions
|
||||
execPm('polkadot-dev-version patch');
|
||||
withNpm = true;
|
||||
} else if (tag || currentVersion === lastVersion) {
|
||||
// if we don't want to publish, add an X before passing
|
||||
if (!withNpm) {
|
||||
npmAddVersionX();
|
||||
} else {
|
||||
npmDelVersionX();
|
||||
}
|
||||
|
||||
// beta version, just continue the stream of betas
|
||||
execPm('polkadot-dev-version pre');
|
||||
} else {
|
||||
// manually set, got for publish
|
||||
withNpm = true;
|
||||
}
|
||||
|
||||
// always ensure we have made some changes, so we can commit
|
||||
npmSetVersionFields();
|
||||
rmFile('.123trigger');
|
||||
|
||||
execPm('polkadot-dev-contrib');
|
||||
execGit('add --all .');
|
||||
}
|
||||
|
||||
/**
|
||||
* Commits and pushes the current version on git
|
||||
*/
|
||||
function gitPush () {
|
||||
const version = npmGetVersion();
|
||||
let doGHRelease = false;
|
||||
|
||||
if (process.env['GH_RELEASE_GITHUB_API_TOKEN']) {
|
||||
const changes = fs.readFileSync('CHANGELOG.md', 'utf8');
|
||||
|
||||
if (changes.includes(`## ${version}`)) {
|
||||
doGHRelease = true;
|
||||
} else if (version.endsWith('.1')) {
|
||||
exitFatal(`Unable to release, no CHANGELOG entry for ${version}`);
|
||||
}
|
||||
}
|
||||
|
||||
execGit('add --all .');
|
||||
|
||||
if (fs.existsSync('docs/README.md')) {
|
||||
execGit('add --all -f docs');
|
||||
}
|
||||
|
||||
// add the skip checks for GitHub ...
|
||||
execGit(`commit --no-status --quiet -m "[CI Skip] ${version.includes('-x') ? 'bump' : 'release'}/${version.includes('-') ? 'beta' : 'stable'} ${version}
|
||||
|
||||
|
||||
skip-checks: true"`);
|
||||
|
||||
// Make sure the release commit is on top of the latest master
|
||||
execGit(`pull --rebase ${repo} master`);
|
||||
|
||||
// Now push normally
|
||||
execGit(`push ${repo} HEAD:${process.env['GITHUB_REF']}`, true);
|
||||
|
||||
if (doGHRelease) {
|
||||
const files = process.env['GH_RELEASE_FILES']
|
||||
? `--assets ${process.env['GH_RELEASE_FILES']}`
|
||||
: '';
|
||||
|
||||
execPm(`polkadot-exec-ghrelease --draft ${files} --yes`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loops through the packages/* (or root), executing the supplied
|
||||
* function for each package found
|
||||
*
|
||||
* @param {() => unknown} fn
|
||||
*/
|
||||
function loopFunc (fn) {
|
||||
if (fs.existsSync('packages')) {
|
||||
const dirs = fs
|
||||
.readdirSync('packages')
|
||||
.filter((dir) => {
|
||||
const pkgDir = path.join(process.cwd(), 'packages', dir);
|
||||
|
||||
return fs.statSync(pkgDir).isDirectory() &&
|
||||
fs.existsSync(path.join(pkgDir, 'package.json')) &&
|
||||
fs.existsSync(path.join(pkgDir, 'build'));
|
||||
});
|
||||
|
||||
topoSort(dirs)
|
||||
.forEach((dir) => {
|
||||
process.chdir(path.join('packages', dir));
|
||||
fn();
|
||||
process.chdir('../..');
|
||||
});
|
||||
} else {
|
||||
fn();
|
||||
}
|
||||
}
|
||||
|
||||
// first do infrastructure setup
|
||||
gitSetup();
|
||||
npmSetup();
|
||||
|
||||
// get flags immediate, then adjust
|
||||
getFlags();
|
||||
verBump();
|
||||
|
||||
// perform the actual CI build
|
||||
execPm('polkadot-dev-clean-build');
|
||||
execPm('lint');
|
||||
execPm('test');
|
||||
execPm('build');
|
||||
|
||||
// publish to all GH repos
|
||||
gitPush();
|
||||
denoPublish();
|
||||
bundlePublish();
|
||||
|
||||
// publish to npm
|
||||
loopFunc(npmPublish);
|
||||
+14
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { execPm, GITHUB_REPO, GITHUB_TOKEN_URL, gitSetup, logBin } from './util.mjs';
|
||||
|
||||
const repo = `${GITHUB_TOKEN_URL}/${GITHUB_REPO}.git`;
|
||||
|
||||
logBin('polkadot-ci-ghact-docs');
|
||||
|
||||
gitSetup();
|
||||
|
||||
execPm('run docs');
|
||||
execPm(`polkadot-exec-ghpages --dotfiles --repo ${repo} --dist ${process.env['GH_PAGES_SRC']} --dest .`, true);
|
||||
+43
@@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import fs from 'node:fs';
|
||||
|
||||
import { execGit, logBin } from './util.mjs';
|
||||
|
||||
logBin('polkadot-ci-ghpages-force');
|
||||
|
||||
// ensure we are on master
|
||||
execGit('checkout master');
|
||||
|
||||
// checkout latest
|
||||
execGit('fetch');
|
||||
execGit('checkout gh-pages');
|
||||
execGit('pull');
|
||||
execGit('checkout --orphan gh-pages-temp');
|
||||
|
||||
// ignore relevant files
|
||||
fs.writeFileSync('.gitignore', `
|
||||
.github/
|
||||
.vscode/
|
||||
.yarn/
|
||||
build/
|
||||
coverage/
|
||||
node_modules/
|
||||
packages/
|
||||
test/
|
||||
NOTES.md
|
||||
`);
|
||||
|
||||
// add
|
||||
execGit('add -A');
|
||||
execGit('commit -am "refresh history"');
|
||||
|
||||
// danger, force new
|
||||
execGit('branch -D gh-pages');
|
||||
execGit('branch -m gh-pages');
|
||||
execGit('push -f origin gh-pages');
|
||||
|
||||
// switch to master
|
||||
execGit('checkout master');
|
||||
+19
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import { copyDirSync, logBin, rimrafSync } from './util.mjs';
|
||||
|
||||
logBin('polkadot-dev-build-docs');
|
||||
|
||||
let docRoot = path.join(process.cwd(), 'docs');
|
||||
|
||||
if (fs.existsSync(docRoot)) {
|
||||
docRoot = path.join(process.cwd(), 'build-docs');
|
||||
|
||||
rimrafSync(docRoot);
|
||||
copyDirSync(path.join(process.cwd(), 'docs'), docRoot);
|
||||
}
|
||||
+1518
File diff suppressed because it is too large
Load Diff
+29
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// @ts-expect-error For scripts we don't include @types/* definitions
|
||||
import madge from 'madge';
|
||||
|
||||
import { exitFatal, logBin } from './util.mjs';
|
||||
|
||||
logBin('polkadot-dev-circular');
|
||||
|
||||
const res = await madge('./', { fileExtensions: ['ts', 'tsx'] });
|
||||
|
||||
/** @type {string[][]} */
|
||||
const circular = res.circular();
|
||||
|
||||
if (!circular.length) {
|
||||
process.stdout.write('No circular dependency found!\n');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const err = `Failed with ${circular.length} circular dependencies`;
|
||||
const all = circular
|
||||
.map((files, idx) => `${(idx + 1).toString().padStart(4)}: ${files.join(' > ')}`)
|
||||
.join('\n');
|
||||
|
||||
process.stdout.write(`\n${err}:\n\n${all}\n\n`);
|
||||
|
||||
exitFatal(err);
|
||||
+61
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import { logBin, PATHS_BUILD, rimrafSync } from './util.mjs';
|
||||
|
||||
const PKGS = path.join(process.cwd(), 'packages');
|
||||
const DIRS = PATHS_BUILD.map((d) => `build${d}`);
|
||||
|
||||
logBin('polkadot-dev-clean-build');
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Retrieves all the files containing tsconfig.*.tsbuildinfo contained withing the directory
|
||||
*
|
||||
* @param {string} dir
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function getPaths (dir) {
|
||||
if (!fs.existsSync(dir)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return fs
|
||||
.readdirSync(dir)
|
||||
.reduce((all, p) => {
|
||||
if (p.startsWith('tsconfig.') && p.endsWith('.tsbuildinfo')) {
|
||||
all.push(path.join(dir, p));
|
||||
}
|
||||
|
||||
return all;
|
||||
}, DIRS.map((p) => path.join(dir, p)));
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Removes all the specified directories
|
||||
*
|
||||
* @param {string[]} dirs
|
||||
*/
|
||||
function cleanDirs (dirs) {
|
||||
dirs.forEach((d) => rimrafSync(d));
|
||||
}
|
||||
|
||||
cleanDirs(getPaths(process.cwd()));
|
||||
|
||||
if (fs.existsSync(PKGS)) {
|
||||
cleanDirs(getPaths(PKGS));
|
||||
cleanDirs(
|
||||
fs
|
||||
.readdirSync(PKGS)
|
||||
.map((f) => path.join(PKGS, f))
|
||||
.filter((f) => fs.statSync(f).isDirectory())
|
||||
.reduce((/** @type {string[]} */ res, d) => res.concat(getPaths(d)), [])
|
||||
);
|
||||
}
|
||||
+74
@@ -0,0 +1,74 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import fs from 'node:fs';
|
||||
|
||||
import { execGit, logBin, mkdirpSync } from './util.mjs';
|
||||
|
||||
const tmpDir = 'packages/build';
|
||||
const tmpFile = `${tmpDir}/CONTRIBUTORS`;
|
||||
|
||||
logBin('polkadot-dev-contrib');
|
||||
|
||||
mkdirpSync(tmpDir);
|
||||
execGit(`shortlog master -e -n -s > ${tmpFile}`);
|
||||
|
||||
fs.writeFileSync(
|
||||
'CONTRIBUTORS',
|
||||
Object
|
||||
.entries(
|
||||
fs
|
||||
.readFileSync(tmpFile, 'utf-8')
|
||||
.split('\n')
|
||||
.map((l) => l.trim())
|
||||
.filter((l) => !!l)
|
||||
.reduce((/** @type {Record<string, { count: number; name: string; }>} */ all, line) => {
|
||||
const [c, e] = line.split('\t');
|
||||
const count = parseInt(c, 10);
|
||||
const [name, rest] = e.split(' <');
|
||||
const isExcluded = (
|
||||
['GitHub', 'Travis CI'].some((n) => name.startsWith(n)) ||
|
||||
['>', 'action@github.com>'].some((e) => rest === e) ||
|
||||
[name, rest].some((n) => n.includes('[bot]'))
|
||||
);
|
||||
|
||||
if (!isExcluded) {
|
||||
let [email] = rest.split('>');
|
||||
|
||||
if (!all[email]) {
|
||||
email = Object.keys(all).find((k) =>
|
||||
name.includes(' ') &&
|
||||
all[k].name === name
|
||||
) || email;
|
||||
}
|
||||
|
||||
if (all[email]) {
|
||||
all[email].count += count;
|
||||
} else {
|
||||
all[email] = { count, name };
|
||||
}
|
||||
}
|
||||
|
||||
return all;
|
||||
}, {})
|
||||
)
|
||||
.sort((a, b) => {
|
||||
const diff = b[1].count - a[1].count;
|
||||
|
||||
return diff === 0
|
||||
? a[1].name.localeCompare(b[1].name)
|
||||
: diff;
|
||||
})
|
||||
.map(([email, { count, name }], i) => {
|
||||
execGit(`log master -1 --author=${email} > ${tmpFile}-${i}`);
|
||||
|
||||
const commit = fs
|
||||
.readFileSync(`${tmpFile}-${i}`, 'utf-8')
|
||||
.split('\n')[4]
|
||||
.trim();
|
||||
|
||||
return `${`${count}`.padStart(8)}\t${name.padEnd(30)}\t${commit}`;
|
||||
})
|
||||
.join('\n')
|
||||
);
|
||||
+44
@@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { copyDirSync, exitFatal, logBin } from './util.mjs';
|
||||
|
||||
const argv = process.argv.slice(2);
|
||||
const args = [];
|
||||
let cd = '';
|
||||
let flatten = false;
|
||||
|
||||
for (let i = 0; i < argv.length; i++) {
|
||||
switch (argv[i]) {
|
||||
case '--cd':
|
||||
cd = argv[++i];
|
||||
break;
|
||||
case '--flatten':
|
||||
flatten = true;
|
||||
break;
|
||||
default:
|
||||
args.push(argv[i]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const sources = args.slice(0, args.length - 1);
|
||||
const dest = args[args.length - 1];
|
||||
|
||||
logBin('polkadot-dev-copy-dir');
|
||||
|
||||
if (!sources || !dest) {
|
||||
exitFatal('Expected at least one <source>... and one <destination> argument');
|
||||
}
|
||||
|
||||
sources.forEach((src) =>
|
||||
copyDirSync(
|
||||
cd
|
||||
? `${cd}/${src}`
|
||||
: src,
|
||||
cd
|
||||
? `${cd}/${dest}${flatten ? '' : `/${src}`}`
|
||||
: `${dest}${flatten ? '' : `/${src}`}`
|
||||
)
|
||||
);
|
||||
+53
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
|
||||
import { copyDirSync, execPm, exitFatal, logBin, mkdirpSync, rimrafSync } from './util.mjs';
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
|
||||
logBin('polkadot-dev-copy-to');
|
||||
|
||||
if (args.length !== 1) {
|
||||
exitFatal('Expected one <destination> argument');
|
||||
}
|
||||
|
||||
const dest = path.join(process.cwd(), '..', args[0], 'node_modules');
|
||||
|
||||
if (!fs.existsSync(dest)) {
|
||||
exitFatal('Destination node_modules folder does not exist');
|
||||
}
|
||||
|
||||
// build to ensure we actually have latest
|
||||
execPm('build');
|
||||
|
||||
// map across what is available and copy it
|
||||
fs
|
||||
.readdirSync('packages')
|
||||
.map((dir) => {
|
||||
const pkgPath = path.join(process.cwd(), 'packages', dir);
|
||||
|
||||
return [pkgPath, path.join(pkgPath, 'package.json')];
|
||||
})
|
||||
.filter(([, jsonPath]) => fs.existsSync(jsonPath))
|
||||
.map(([pkgPath, json]) => [JSON.parse(fs.readFileSync(json, 'utf8')).name, pkgPath])
|
||||
.forEach(([name, pkgPath]) => {
|
||||
console.log(`*** Copying ${name} to ${dest}`);
|
||||
|
||||
const outDest = path.join(dest, name);
|
||||
|
||||
// remove the destination
|
||||
rimrafSync(outDest);
|
||||
|
||||
// create the root
|
||||
mkdirpSync(outDest);
|
||||
|
||||
// copy the build output
|
||||
copyDirSync(path.join(pkgPath, 'build'), outDest);
|
||||
|
||||
// copy node_modules, as available
|
||||
copyDirSync(path.join(pkgPath, 'node_modules'), path.join(outDest, 'node_modules'));
|
||||
});
|
||||
+35
@@ -0,0 +1,35 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import fs from 'node:fs';
|
||||
|
||||
import { DENO_POL_PRE } from './util.mjs';
|
||||
|
||||
const [e, i] = fs
|
||||
.readdirSync('packages')
|
||||
.filter((p) => fs.existsSync(`packages/${p}/src/mod.ts`))
|
||||
.sort()
|
||||
.reduce((/** @type {[string[], Record<String, string>]} */ [e, i], p) => {
|
||||
e.push(`export * as ${p.replace(/-/g, '_')} from '${DENO_POL_PRE}/${p}/mod.ts';`);
|
||||
i[`${DENO_POL_PRE}/${p}/`] = `./packages/${p}/build-deno/`;
|
||||
|
||||
return [e, i];
|
||||
}, [[], {}]);
|
||||
|
||||
if (!fs.existsSync('mod.ts')) {
|
||||
fs.writeFileSync('mod.ts', `// Copyright 2017-${new Date().getFullYear()} @polkadot/dev authors & contributors\n// SPDX-License-Identifier: Apache-2.0\n\n// auto-generated via polkadot-dev-deno-map, do not edit\n\n// This is a Deno file, so we can allow .ts imports
|
||||
/* eslint-disable import/extensions */\n\n${e.join('\n')}\n`);
|
||||
}
|
||||
|
||||
if (fs.existsSync('import_map.in.json')) {
|
||||
const o = JSON.parse(fs.readFileSync('import_map.in.json', 'utf-8'));
|
||||
|
||||
Object
|
||||
.entries(o.imports)
|
||||
.forEach(([k, v]) => {
|
||||
i[k] = v;
|
||||
});
|
||||
}
|
||||
|
||||
fs.writeFileSync('import_map.json', JSON.stringify({ imports: i }, null, 2));
|
||||
+40
@@ -0,0 +1,40 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import process from 'node:process';
|
||||
import yargs from 'yargs';
|
||||
|
||||
import { __dirname, execPm, GITHUB_REPO, logBin } from './util.mjs';
|
||||
|
||||
const TS_CONFIG_BUILD = true;
|
||||
|
||||
logBin('polkadot-dev-run-lint');
|
||||
|
||||
// Since yargs can also be a promise, we just relax the type here completely
|
||||
const argv = await yargs(process.argv.slice(2))
|
||||
.options({
|
||||
'skip-eslint': {
|
||||
description: 'Skips running eslint',
|
||||
type: 'boolean'
|
||||
},
|
||||
'skip-tsc': {
|
||||
description: 'Skips running tsc',
|
||||
type: 'boolean'
|
||||
}
|
||||
})
|
||||
.strict()
|
||||
.argv;
|
||||
|
||||
if (!argv['skip-eslint']) {
|
||||
// We don't want to run with fix on CI
|
||||
const extra = GITHUB_REPO
|
||||
? ''
|
||||
: '--fix';
|
||||
|
||||
execPm(`polkadot-exec-eslint ${extra} ${process.cwd()}`);
|
||||
}
|
||||
|
||||
if (!argv['skip-tsc']) {
|
||||
execPm(`polkadot-exec-tsc --noEmit --emitDeclarationOnly false --pretty${TS_CONFIG_BUILD ? ' --project tsconfig.build.json' : ''}`);
|
||||
}
|
||||
+9
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { execNodeTs, logBin } from './util.mjs';
|
||||
|
||||
logBin('polkadot-run-node-ts');
|
||||
|
||||
execNodeTs(process.argv.slice(2).join(' '));
|
||||
+163
@@ -0,0 +1,163 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import process from 'node:process';
|
||||
|
||||
import { execNodeTs, exitFatal, exitFatalEngine, importPath, logBin, readdirSync } from './util.mjs';
|
||||
|
||||
// A & B are just helpers here and in the errors below
|
||||
const EXT_A = ['spec', 'test'];
|
||||
const EXT_B = ['ts', 'tsx', 'js', 'jsx', 'cjs', 'mjs'];
|
||||
|
||||
// The actual extensions we are looking for
|
||||
const EXTS = EXT_A.reduce((/** @type {string[]} */ exts, s) => exts.concat(...EXT_B.map((e) => `.${s}.${e}`)), []);
|
||||
|
||||
logBin('polkadot-dev-run-test');
|
||||
|
||||
exitFatalEngine();
|
||||
|
||||
const cmd = [];
|
||||
const nodeFlags = [];
|
||||
const filters = [];
|
||||
|
||||
/** @type {Record<string, string[]>} */
|
||||
const filtersExcl = {};
|
||||
/** @type {Record<string, string[]>} */
|
||||
const filtersIncl = {};
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
let testEnv = 'node';
|
||||
let isDev = false;
|
||||
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
switch (args[i]) {
|
||||
// when running inside a dev environment, specifically @polkadot/dev
|
||||
case '--dev-build':
|
||||
isDev = true;
|
||||
break;
|
||||
|
||||
// environment, not passed-through
|
||||
case '--env':
|
||||
if (!['browser', 'node'].includes(args[++i])) {
|
||||
throw new Error(`Invalid --env ${args[i]}, expected 'browser' or 'node'`);
|
||||
}
|
||||
|
||||
testEnv = args[i];
|
||||
break;
|
||||
|
||||
// internal flags with no params
|
||||
case '--bail':
|
||||
case '--console':
|
||||
cmd.push(args[i]);
|
||||
break;
|
||||
|
||||
// internal flags, with params
|
||||
case '--logfile':
|
||||
cmd.push(args[i]);
|
||||
cmd.push(args[++i]);
|
||||
break;
|
||||
|
||||
// node flags that could have additional params
|
||||
case '--import':
|
||||
case '--loader':
|
||||
case '--require':
|
||||
nodeFlags.push(args[i]);
|
||||
nodeFlags.push(args[++i]);
|
||||
break;
|
||||
|
||||
// any other non-flag arguments are passed-through
|
||||
default:
|
||||
if (args[i].startsWith('-')) {
|
||||
throw new Error(`Unknown flag ${args[i]} found`);
|
||||
}
|
||||
|
||||
filters.push(args[i]);
|
||||
|
||||
if (args[i].startsWith('^')) {
|
||||
const key = args[i].slice(1);
|
||||
|
||||
if (filtersIncl[key]) {
|
||||
delete filtersIncl[key];
|
||||
} else {
|
||||
filtersExcl[key] = key.split(/[\\/]/);
|
||||
}
|
||||
} else {
|
||||
const key = args[i];
|
||||
|
||||
if (filtersExcl[key]) {
|
||||
delete filtersExcl[key];
|
||||
} else {
|
||||
filtersIncl[key] = key.split(/[\\/]/);
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string[]} parts
|
||||
* @param {Record<string, string[]>} filters
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function applyFilters (parts, filters) {
|
||||
return Object
|
||||
.values(filters)
|
||||
.some((filter) =>
|
||||
parts
|
||||
.map((_, i) => i)
|
||||
.filter((i) =>
|
||||
filter[0].startsWith(':')
|
||||
? parts[i].includes(filter[0].slice(1))
|
||||
: filter.length === 1
|
||||
? parts[i].startsWith(filter[0])
|
||||
: parts[i] === filter[0]
|
||||
)
|
||||
.some((start) =>
|
||||
filter.every((f, i) =>
|
||||
parts[start + i] && (
|
||||
f.startsWith(':')
|
||||
? parts[start + i].includes(f.slice(1))
|
||||
: i === (filter.length - 1)
|
||||
? parts[start + i].startsWith(f)
|
||||
: parts[start + i] === f
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const files = readdirSync('packages', EXTS).filter((file) => {
|
||||
const parts = file.split(/[\\/]/);
|
||||
let isIncluded = true;
|
||||
|
||||
if (Object.keys(filtersIncl).length) {
|
||||
isIncluded = applyFilters(parts, filtersIncl);
|
||||
}
|
||||
|
||||
if (isIncluded && Object.keys(filtersExcl).length) {
|
||||
isIncluded = !applyFilters(parts, filtersExcl);
|
||||
}
|
||||
|
||||
return isIncluded;
|
||||
});
|
||||
|
||||
if (files.length === 0) {
|
||||
exitFatal(`No files matching *.{${EXT_A.join(', ')}}.{${EXT_B.join(', ')}} found${filters.length ? ` (filtering on ${filters.join(', ')})` : ''}`);
|
||||
}
|
||||
|
||||
try {
|
||||
const allFlags = `${importPath('@polkadot/dev/scripts/polkadot-exec-node-test.mjs')} ${[...cmd, ...files].join(' ')}`;
|
||||
|
||||
nodeFlags.push('--require');
|
||||
nodeFlags.push(
|
||||
isDev
|
||||
? `./packages/dev-test/build/cjs/${testEnv}.js`
|
||||
: `@polkadot/dev-test/${testEnv}`
|
||||
);
|
||||
|
||||
execNodeTs(allFlags, nodeFlags, false, isDev ? './packages/dev-ts/build/testCached.js' : '@polkadot/dev-ts/testCached');
|
||||
} catch {
|
||||
process.exit(1);
|
||||
}
|
||||
+143
@@ -0,0 +1,143 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import yargs from 'yargs';
|
||||
|
||||
import { execPm, exitFatal, logBin } from './util.mjs';
|
||||
|
||||
/** @typedef {{ dependencies?: Record<string, string>; devDependencies?: Record<string, string>; peerDependencies?: Record<string, string>; optionalDependencies?: Record<string, string>; resolutions?: Record<string, string>; name?: string; stableVersion?: string; version: string; }} PkgJson */
|
||||
|
||||
const TYPES = ['major', 'minor', 'patch', 'pre'];
|
||||
|
||||
const [type] = (
|
||||
await yargs(process.argv.slice(2))
|
||||
.demandCommand(1)
|
||||
.argv
|
||||
)._;
|
||||
|
||||
if (typeof type !== 'string' || !TYPES.includes(type)) {
|
||||
exitFatal(`Invalid version bump "${type}", expected one of ${TYPES.join(', ')}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Record<string, string>} dependencies
|
||||
* @param {string[]} others
|
||||
* @param {string} version
|
||||
* @returns {Record<string, string>}
|
||||
*/
|
||||
function updateDependencies (dependencies, others, version) {
|
||||
return Object
|
||||
.entries(dependencies)
|
||||
.sort((a, b) => a[0].localeCompare(b[0]))
|
||||
.reduce((/** @type {Record<string, string>} */ result, [key, value]) => {
|
||||
result[key] = others.includes(key) && value !== '*'
|
||||
? value.startsWith('^')
|
||||
? `^${version}`
|
||||
: version
|
||||
: value;
|
||||
|
||||
return result;
|
||||
}, {});
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {[string, PkgJson]}
|
||||
*/
|
||||
function readCurrentPkgJson () {
|
||||
const rootPath = path.join(process.cwd(), 'package.json');
|
||||
const rootJson = JSON.parse(fs.readFileSync(rootPath, 'utf8'));
|
||||
|
||||
return [rootPath, rootJson];
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path
|
||||
* @param {unknown} json
|
||||
*/
|
||||
function writePkgJson (path, json) {
|
||||
fs.writeFileSync(path, `${JSON.stringify(json, null, 2)}\n`);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {string} version
|
||||
* @param {string[]} others
|
||||
* @param {string} pkgPath
|
||||
* @param {Record<String, any>} json
|
||||
*/
|
||||
function updatePackage (version, others, pkgPath, json) {
|
||||
const updated = Object
|
||||
.keys(json)
|
||||
.reduce((/** @type {Record<String, unknown>} */ result, key) => {
|
||||
if (key === 'version') {
|
||||
result[key] = version;
|
||||
} else if (['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies', 'resolutions'].includes(key)) {
|
||||
result[key] = updateDependencies(json[key], others, version);
|
||||
} else if (key !== 'stableVersion') {
|
||||
result[key] = json[key];
|
||||
}
|
||||
|
||||
return result;
|
||||
}, {});
|
||||
|
||||
writePkgJson(pkgPath, updated);
|
||||
}
|
||||
|
||||
function removeX () {
|
||||
const [rootPath, json] = readCurrentPkgJson();
|
||||
|
||||
if (!json.version?.endsWith('-x')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
json.version = json.version.replace('-x', '');
|
||||
writePkgJson(rootPath, json);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function addX () {
|
||||
const [rootPath, json] = readCurrentPkgJson();
|
||||
|
||||
if (json.version.endsWith('-x')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
json.version = json.version + '-x';
|
||||
writePkgJson(rootPath, json);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
logBin('polkadot-dev-version');
|
||||
|
||||
const isX = removeX();
|
||||
|
||||
execPm(`version ${type === 'pre' ? 'prerelease' : type}`);
|
||||
|
||||
if (isX && type === 'pre') {
|
||||
addX();
|
||||
}
|
||||
|
||||
const [rootPath, rootJson] = readCurrentPkgJson();
|
||||
|
||||
updatePackage(rootJson.version, [], rootPath, rootJson);
|
||||
|
||||
// yarn workspaces does an OOM, manual looping takes ages
|
||||
if (fs.existsSync('packages')) {
|
||||
const packages = fs
|
||||
.readdirSync('packages')
|
||||
.map((dir) => path.join(process.cwd(), 'packages', dir, 'package.json'))
|
||||
.filter((pkgPath) => fs.existsSync(pkgPath))
|
||||
.map((pkgPath) => [pkgPath, JSON.parse(fs.readFileSync(pkgPath, 'utf8'))]);
|
||||
const others = packages.map(([, json]) => json.name);
|
||||
|
||||
packages.forEach(([pkgPath, json]) => {
|
||||
updatePackage(rootJson.version, others, pkgPath, json);
|
||||
});
|
||||
}
|
||||
|
||||
execPm('install');
|
||||
+11
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import process from 'node:process';
|
||||
|
||||
import { exitFatalYarn } from './util.mjs';
|
||||
|
||||
exitFatalYarn();
|
||||
|
||||
process.exit(0);
|
||||
+7
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { importRelative } from './util.mjs';
|
||||
|
||||
await importRelative('eslint', 'eslint/bin/eslint.js');
|
||||
+11
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { importRelative } from './util.mjs';
|
||||
|
||||
const ghp = await importRelative('gh-pages', 'gh-pages/bin/gh-pages.js');
|
||||
|
||||
await ghp.default(process.argv);
|
||||
|
||||
console.log('Published');
|
||||
+7
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { importRelative } from './util.mjs';
|
||||
|
||||
await importRelative('gh-release', 'gh-release/bin/cli.js');
|
||||
+368
@@ -0,0 +1,368 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// For Node 18, earliest usable is 18.14:
|
||||
//
|
||||
// - node:test added in 18.0,
|
||||
// - run method exposed in 18.9,
|
||||
// - mock in 18.13,
|
||||
// - diagnostics changed in 18.14
|
||||
//
|
||||
// Node 16 is not supported:
|
||||
//
|
||||
// - node:test added is 16.17,
|
||||
// - run method exposed in 16.19,
|
||||
// - mock not available
|
||||
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import process from 'node:process';
|
||||
import { run } from 'node:test';
|
||||
import { isMainThread, parentPort, Worker, workerData } from 'node:worker_threads';
|
||||
|
||||
// NOTE error should be defined as "Error", however the @types/node definitions doesn't include all
|
||||
/** @typedef {{ file?: string; message?: string; }} DiagStat */
|
||||
/** @typedef {{ details: { type: string; duration_ms: number; error: { message: string; failureType: unknown; stack: string; cause: { code: number; message: string; stack: string; generatedMessage?: any; }; code: number; } }; file?: string; name: string; testNumber: number; nesting: number; }} FailStat */
|
||||
/** @typedef {{ details: { duration_ms: number }; name: string; }} PassStat */
|
||||
/** @typedef {{ diag: DiagStat[]; fail: FailStat[]; pass: PassStat[]; skip: unknown[]; todo: unknown[]; total: number; [key: string]: any; }} Stats */
|
||||
|
||||
console.time('\t elapsed :');
|
||||
|
||||
const WITH_DEBUG = false;
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
/** @type {string[]} */
|
||||
const files = [];
|
||||
|
||||
/** @type {Stats} */
|
||||
const stats = {
|
||||
diag: [],
|
||||
fail: [],
|
||||
pass: [],
|
||||
skip: [],
|
||||
todo: [],
|
||||
total: 0
|
||||
};
|
||||
/** @type {string | null} */
|
||||
let logFile = null;
|
||||
/** @type {number} */
|
||||
let startAt = 0;
|
||||
/** @type {boolean} */
|
||||
let bail = false;
|
||||
/** @type {boolean} */
|
||||
let toConsole = false;
|
||||
/** @type {number} */
|
||||
let progressRowCount = 0;
|
||||
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
if (args[i] === '--bail') {
|
||||
bail = true;
|
||||
} else if (args[i] === '--console') {
|
||||
toConsole = true;
|
||||
} else if (args[i] === '--logfile') {
|
||||
logFile = args[++i];
|
||||
} else {
|
||||
files.push(args[i]);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Performs an indent of the line (and containing lines) with the specific count
|
||||
*
|
||||
* @param {number} count
|
||||
* @param {string} str
|
||||
* @param {string} start
|
||||
* @returns {string}
|
||||
*/
|
||||
function indent (count, str = '', start = '') {
|
||||
let pre = '\n';
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
break;
|
||||
|
||||
case 1:
|
||||
pre += '\t';
|
||||
break;
|
||||
|
||||
case 2:
|
||||
pre += '\t\t';
|
||||
break;
|
||||
|
||||
default:
|
||||
pre += '\t\t\t';
|
||||
break;
|
||||
}
|
||||
|
||||
pre += ' ';
|
||||
|
||||
return `${pre}${start}${
|
||||
str
|
||||
.split('\n')
|
||||
.map((l) => l.trim())
|
||||
.join(`${pre}${start ? ' '.padStart(start.length, ' ') : ''}`)
|
||||
}\n`;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {FailStat} r
|
||||
* @return {string | undefined}
|
||||
*/
|
||||
function getFilename (r) {
|
||||
if (r.file?.includes('.spec.') || r.file?.includes('.test.')) {
|
||||
return r.file;
|
||||
}
|
||||
|
||||
if (r.details.error.cause.stack) {
|
||||
const stack = r.details.error.cause.stack
|
||||
.split('\n')
|
||||
.map((l) => l.trim())
|
||||
.filter((l) => l.startsWith('at ') && (l.includes('.spec.') || l.includes('.test.')))
|
||||
.map((l) => l.match(/\(.*:\d\d?:\d\d?\)$/)?.[0])
|
||||
.map((l) => l?.replace('(', '')?.replace(')', ''));
|
||||
|
||||
if (stack.length) {
|
||||
return stack[0];
|
||||
}
|
||||
}
|
||||
|
||||
return r.file;
|
||||
}
|
||||
|
||||
function complete () {
|
||||
process.stdout.write('\n');
|
||||
|
||||
let logError = '';
|
||||
|
||||
stats.fail.forEach((r) => {
|
||||
WITH_DEBUG && console.error(JSON.stringify(r, null, 2));
|
||||
|
||||
let item = '';
|
||||
|
||||
item += indent(1, [getFilename(r), r.name].filter((s) => !!s).join('\n'), 'x ');
|
||||
item += indent(2, `${r.details.error.failureType} / ${r.details.error.code}${r.details.error.cause.code && r.details.error.cause.code !== r.details.error.code ? ` / ${r.details.error.cause.code}` : ''}`);
|
||||
|
||||
if (r.details.error.cause.message) {
|
||||
item += indent(2, r.details.error.cause.message);
|
||||
}
|
||||
|
||||
logError += item;
|
||||
|
||||
if (r.details.error.cause.stack) {
|
||||
item += indent(2, r.details.error.cause.stack);
|
||||
}
|
||||
|
||||
process.stdout.write(item);
|
||||
});
|
||||
|
||||
if (logFile && logError) {
|
||||
try {
|
||||
fs.appendFileSync(path.join(process.cwd(), logFile), logError);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
console.log();
|
||||
console.log('\t passed ::', stats.pass.length);
|
||||
console.log('\t failed ::', stats.fail.length);
|
||||
console.log('\t skipped ::', stats.skip.length);
|
||||
console.log('\t todo ::', stats.todo.length);
|
||||
console.log('\t total ::', stats.total);
|
||||
console.timeEnd('\t elapsed :');
|
||||
console.log();
|
||||
|
||||
// The full error information can be quite useful in the case of overall failures
|
||||
if ((stats.fail.length || toConsole) && stats.diag.length) {
|
||||
/** @type {string | undefined} */
|
||||
let lastFilename = '';
|
||||
|
||||
stats.diag.forEach((r) => {
|
||||
WITH_DEBUG && console.error(JSON.stringify(r, null, 2));
|
||||
|
||||
if (typeof r === 'string') {
|
||||
console.log(r); // Node.js <= 18.14
|
||||
} else if (r.file && r.file.includes('@polkadot/dev/scripts')) {
|
||||
// Ignore internal diagnostics
|
||||
} else {
|
||||
if (lastFilename !== r.file) {
|
||||
lastFilename = r.file;
|
||||
|
||||
console.log(lastFilename ? `\n${lastFilename}::\n` : '\n');
|
||||
}
|
||||
|
||||
// Edge case: We don't need additional noise that is not useful.
|
||||
if (!r.message?.split(' ').includes('tests')) {
|
||||
console.log(`\t${r.message?.split('\n').join('\n\t')}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (toConsole) {
|
||||
stats.pass.forEach((r) => {
|
||||
console.log(`pass ${r.name} ${r.details.duration_ms} ms`);
|
||||
});
|
||||
|
||||
console.log();
|
||||
|
||||
stats.fail.forEach((r) => {
|
||||
console.log(`fail ${r.name}`);
|
||||
});
|
||||
|
||||
console.log();
|
||||
}
|
||||
|
||||
if (stats.total === 0) {
|
||||
console.error('FATAL: No tests executed');
|
||||
console.error();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.exit(stats.fail.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the progress in real-time as data is passed from the worker.
|
||||
*
|
||||
* @param {string} symbol
|
||||
*/
|
||||
function printProgress (symbol) {
|
||||
if (!progressRowCount) {
|
||||
progressRowCount = 0;
|
||||
}
|
||||
|
||||
if (!startAt) {
|
||||
startAt = performance.now();
|
||||
}
|
||||
|
||||
// If starting a new row, calculate and print the elapsed time
|
||||
if (progressRowCount === 0) {
|
||||
const now = performance.now();
|
||||
const elapsed = (now - startAt) / 1000;
|
||||
const minutes = Math.floor(elapsed / 60);
|
||||
const seconds = elapsed - minutes * 60;
|
||||
|
||||
process.stdout.write(
|
||||
`${`${minutes}:${seconds.toFixed(3).padStart(6, '0')}`.padStart(11)} `
|
||||
);
|
||||
}
|
||||
|
||||
// Print the symbol with formatting
|
||||
process.stdout.write(symbol);
|
||||
|
||||
progressRowCount++;
|
||||
|
||||
// Add spaces for readability
|
||||
if (progressRowCount % 10 === 0) {
|
||||
process.stdout.write(' '); // Double space every 10 symbols
|
||||
} else if (progressRowCount % 5 === 0) {
|
||||
process.stdout.write(' '); // Single space every 5 symbols
|
||||
}
|
||||
|
||||
// If the row reaches 100 symbols, start a new row
|
||||
if (progressRowCount >= 100) {
|
||||
process.stdout.write('\n');
|
||||
progressRowCount = 0;
|
||||
}
|
||||
}
|
||||
|
||||
async function runParallel () {
|
||||
const MAX_WORKERS = Math.min(os.cpus().length, files.length);
|
||||
const chunks = Math.ceil(files.length / MAX_WORKERS);
|
||||
|
||||
try {
|
||||
// Create and manage worker threads
|
||||
const results = await Promise.all(
|
||||
Array.from({ length: MAX_WORKERS }, (_, i) => {
|
||||
const fileSubset = files.slice(i * chunks, (i + 1) * chunks);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const worker = new Worker(new URL(import.meta.url), {
|
||||
workerData: { files: fileSubset }
|
||||
});
|
||||
|
||||
worker.on('message', (message) => {
|
||||
if (message.type === 'progress') {
|
||||
printProgress(message.data);
|
||||
} else if (message.type === 'result') {
|
||||
resolve(message.data);
|
||||
}
|
||||
});
|
||||
|
||||
worker.on('error', reject);
|
||||
worker.on('exit', (code) => {
|
||||
if (code !== 0) {
|
||||
reject(new Error(`Worker stopped with exit code ${code}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
// Aggregate results from workers
|
||||
results.forEach((result) => {
|
||||
Object.keys(stats).forEach((key) => {
|
||||
if (Array.isArray(stats[key])) {
|
||||
stats[key] = stats[key].concat(result[key]);
|
||||
} else if (typeof stats[key] === 'number') {
|
||||
stats[key] += result[key];
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
complete();
|
||||
} catch (err) {
|
||||
console.error('Error during parallel execution:', err);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (isMainThread) {
|
||||
console.time('\tElapsed:');
|
||||
runParallel().catch((err) => console.error(err));
|
||||
} else {
|
||||
run({ files: workerData.files, timeout: 3_600_000 })
|
||||
.on('data', () => undefined)
|
||||
.on('end', () => parentPort && parentPort.postMessage(stats))
|
||||
.on('test:coverage', () => undefined)
|
||||
.on('test:diagnostic', (/** @type {DiagStat} */data) => {
|
||||
stats.diag.push(data);
|
||||
parentPort && parentPort.postMessage({ data: stats, type: 'result' });
|
||||
})
|
||||
.on('test:fail', (/** @type {FailStat} */ data) => {
|
||||
const statFail = structuredClone(data);
|
||||
|
||||
if (data.details.error.cause?.stack) {
|
||||
statFail.details.error.cause.stack = data.details.error.cause.stack;
|
||||
}
|
||||
|
||||
stats.fail.push(statFail);
|
||||
stats.total++;
|
||||
parentPort && parentPort.postMessage({ data: 'x', type: 'progress' });
|
||||
|
||||
if (bail) {
|
||||
complete();
|
||||
}
|
||||
})
|
||||
.on('test:pass', (data) => {
|
||||
const symbol = typeof data.skip !== 'undefined' ? '>' : typeof data.todo !== 'undefined' ? '!' : '·';
|
||||
|
||||
if (symbol === '>') {
|
||||
stats.skip.push(data);
|
||||
} else if (symbol === '!') {
|
||||
stats.todo.push(data);
|
||||
} else {
|
||||
stats.pass.push(data);
|
||||
}
|
||||
|
||||
stats.total++;
|
||||
parentPort && parentPort.postMessage({ data: symbol, type: 'progress' });
|
||||
})
|
||||
.on('test:plan', () => undefined)
|
||||
.on('test:start', () => undefined);
|
||||
}
|
||||
+7
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { execViaNode } from './util.mjs';
|
||||
|
||||
execViaNode('rollup', 'rollup/dist/bin/rollup');
|
||||
Executable
+7
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { importDirect } from './util.mjs';
|
||||
|
||||
await importDirect('tsc', 'typescript/lib/tsc.js');
|
||||
+7
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import { importDirect } from './util.mjs';
|
||||
|
||||
await importDirect('webpack', 'webpack-cli/bin/cli.js');
|
||||
@@ -0,0 +1,540 @@
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
import cp from 'node:child_process';
|
||||
import fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import process from 'node:process';
|
||||
import url from 'node:url';
|
||||
|
||||
/** @internal logging */
|
||||
const BLANK = ''.padStart(75);
|
||||
|
||||
/** CJS/ESM compatible __dirname */
|
||||
export const __dirname = path.dirname(url.fileURLToPath(import.meta.url));
|
||||
|
||||
/** Deno prefix for externals */
|
||||
export const DENO_EXT_PRE = 'https://esm.sh';
|
||||
|
||||
/** Deno prefix for built-ins */
|
||||
export const DENO_LND_PRE = 'https://deno.land';
|
||||
|
||||
/** Deno prefix for the polkadot package */
|
||||
export const DENO_POL_PRE = `${DENO_LND_PRE}/x/polkadot`;
|
||||
|
||||
/** The GH user that we use for actions */
|
||||
export const GITHUB_USER = 'github-actions[bot]';
|
||||
|
||||
/** The GH email for actions */
|
||||
export const GITHUB_MAIL = '41898282+github-actions[bot]@users.noreply.github.com';
|
||||
|
||||
/** The GH repo link */
|
||||
export const GITHUB_REPO = process.env['GITHUB_REPOSITORY'];
|
||||
|
||||
/** The GH token */
|
||||
export const GITHUB_TOKEN = process.env['GH_PAT'];
|
||||
|
||||
/** The GH repo URL */
|
||||
export const GITHUB_TOKEN_URL = `https://${GITHUB_TOKEN}@github.com`;
|
||||
|
||||
/** Paths that we generally building to (catch-all for possible usages) */
|
||||
export const PATHS_BUILD = ['', '-cjs', '-esm'].reduce((r, a) => r.concat(['', '-babel', '-esbuild', '-swc', '-tsc'].map((b) => `${b}${a}`)), ['-deno', '-docs', '-loader', '-wasm']).sort();
|
||||
|
||||
/** Paths that are generally excluded from source operations */
|
||||
export const PATHS_EXCL = ['node_modules', ...PATHS_BUILD.map((e) => `build${e}`)];
|
||||
|
||||
/**
|
||||
* Copy a file to a target dir
|
||||
*
|
||||
* @param {string | string[]} src
|
||||
* @param {string} destDir
|
||||
**/
|
||||
export function copyFileSync (src, destDir) {
|
||||
if (Array.isArray(src)) {
|
||||
src.forEach((s) => copyFileSync(s, destDir));
|
||||
} else {
|
||||
fs.copyFileSync(src, path.join(destDir, path.basename(src)));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively copies a directory to a target dir
|
||||
*
|
||||
* @param {string | string[]} src
|
||||
* @param {string} dest
|
||||
* @param {string[]} [include]
|
||||
* @param {string[]} [exclude]
|
||||
**/
|
||||
export function copyDirSync (src, dest, include, exclude) {
|
||||
if (Array.isArray(src)) {
|
||||
src.forEach((s) => copyDirSync(s, dest, include, exclude));
|
||||
} else if (!fs.existsSync(src)) {
|
||||
// it doesn't exist, so we have nothing to copy
|
||||
} else if (!fs.statSync(src).isDirectory()) {
|
||||
exitFatal(`Source ${src} should be a directory`);
|
||||
} else {
|
||||
mkdirpSync(dest);
|
||||
|
||||
fs
|
||||
.readdirSync(src)
|
||||
.forEach((file) => {
|
||||
const srcPath = path.join(src, file);
|
||||
|
||||
if (fs.statSync(srcPath).isDirectory()) {
|
||||
copyDirSync(srcPath, path.join(dest, file), include, exclude);
|
||||
} else if (!include?.length || include.some((e) => file.endsWith(e))) {
|
||||
if (!exclude || !exclude.some((e) => file.endsWith(e))) {
|
||||
copyFileSync(srcPath, dest);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a deno directory name
|
||||
*
|
||||
* @param {string} name
|
||||
* @returns {string}
|
||||
**/
|
||||
export function denoCreateDir (name) {
|
||||
// aligns with name above - since we have sub-paths, we only return
|
||||
// the actual path inside packages/* (i.e. the last part of the name)
|
||||
return name.replace('@polkadot/', '');
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Adjusts the engine setting, highest of current and requested
|
||||
*
|
||||
* @param {string} [a]
|
||||
* @param {string} [b]
|
||||
* @returns {number}
|
||||
*/
|
||||
export function engineVersionCmp (a, b) {
|
||||
const aVer = engineVersionSplit(a);
|
||||
const bVer = engineVersionSplit(b);
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
if (aVer[i] < bVer[i]) {
|
||||
return -1;
|
||||
} else if (aVer[i] > bVer[i]) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*
|
||||
* Splits a engines version, i.e. >=xx(.yy) into
|
||||
* the major/minor/patch parts
|
||||
*
|
||||
* @param {string} [ver]
|
||||
* @returns {[number, number, number]}
|
||||
*/
|
||||
export function engineVersionSplit (ver) {
|
||||
const parts = (ver || '>=0')
|
||||
.replace('v', '') // process.version returns v18.14.0
|
||||
.replace('>=', '') // engines have >= prefix
|
||||
.split('.')
|
||||
.map((e) => e.trim());
|
||||
|
||||
return [parseInt(parts[0] || '0', 10), parseInt(parts[1] || '0', 10), parseInt(parts[2] || '0', 10)];
|
||||
}
|
||||
|
||||
/**
|
||||
* Process execution
|
||||
*
|
||||
* @param {string} cmd
|
||||
* @param {boolean} [noLog]
|
||||
**/
|
||||
export function execSync (cmd, noLog) {
|
||||
const exec = cmd
|
||||
.replace(/ {2}/g, ' ')
|
||||
.trim();
|
||||
|
||||
if (!noLog) {
|
||||
logBin(exec, true);
|
||||
}
|
||||
|
||||
cp.execSync(exec, { stdio: 'inherit' });
|
||||
}
|
||||
|
||||
/**
|
||||
* Node execution with ts support
|
||||
*
|
||||
* @param {string} cmd
|
||||
* @param {string[]} [nodeFlags]
|
||||
* @param {boolean} [noLog]
|
||||
* @param {string} [loaderPath]
|
||||
**/
|
||||
export function execNodeTs (cmd, nodeFlags = [], noLog, loaderPath = '@polkadot/dev-ts/cached') {
|
||||
const loadersGlo = [];
|
||||
const loadersLoc = [];
|
||||
const otherFlags = [];
|
||||
|
||||
for (let i = 0; i < nodeFlags.length; i++) {
|
||||
const flag = nodeFlags[i];
|
||||
|
||||
if (['--import', '--loader', '--require'].includes(flag)) {
|
||||
const arg = nodeFlags[++i];
|
||||
|
||||
// We split the loader arguments based on type in execSync. The
|
||||
// split here is to extract the various provided types:
|
||||
//
|
||||
// 1. Global loaders are added first, then
|
||||
// 2. Our specific dev-ts loader is added, then
|
||||
// 3. Any provided local loaders are added
|
||||
//
|
||||
// The ordering requirement here is driven from the use of global
|
||||
// loaders inside the apps repo (specifically extensionless), while
|
||||
// ensuring we don't break local loader usage in the wasm repo
|
||||
if (arg.startsWith('.')) {
|
||||
loadersLoc.push(flag);
|
||||
loadersLoc.push(arg);
|
||||
} else {
|
||||
loadersGlo.push(flag);
|
||||
loadersGlo.push(arg);
|
||||
}
|
||||
} else {
|
||||
otherFlags.push(flag);
|
||||
}
|
||||
}
|
||||
|
||||
execSync(`${process.execPath} ${otherFlags.join(' ')} --no-warnings --enable-source-maps ${loadersGlo.join(' ')} --loader ${loaderPath} ${loadersLoc.join(' ')} ${cmd}`, noLog);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the git command
|
||||
*
|
||||
* @param {string} cmd
|
||||
* @param {boolean} [noLog]
|
||||
**/
|
||||
export function execGit (cmd, noLog) {
|
||||
execSync(`git ${cmd}`, noLog);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the package manager (yarn by default)
|
||||
*
|
||||
* @param {string} cmd
|
||||
* @param {boolean} [noLog]
|
||||
**/
|
||||
export function execPm (cmd, noLog) {
|
||||
// It could be possible to extends this to npm/pnpm, but the package manager
|
||||
// arguments are not quite the same between them, so we may need to do mangling
|
||||
// and adjust to convert yarn-isms to the specific target.
|
||||
//
|
||||
// Instead of defaulting here, we could possibly use process.env['npm_execpath']
|
||||
// to determine the package manager which would work in most (???) cases where the
|
||||
// top-level has been executed via a package manager and the env is set - no bets
|
||||
// atm for what happens when execSync/fork is used
|
||||
//
|
||||
// TL;DR Not going to spend effort on this, but quite possibly there is an avenue
|
||||
// to support other package managers, aka pick-your-poison
|
||||
execSync(`yarn ${cmd}`, noLog);
|
||||
}
|
||||
|
||||
/**
|
||||
* Node binary execution
|
||||
*
|
||||
* @param {string} name
|
||||
* @param {string} cmd
|
||||
**/
|
||||
export function execViaNode (name, cmd) {
|
||||
logBin(name);
|
||||
|
||||
execSync(`${importPath(cmd)} ${process.argv.slice(2).join(' ')}`, true);
|
||||
}
|
||||
|
||||
/** A consistent setup for git variables */
|
||||
export function gitSetup () {
|
||||
execGit(`config user.name "${GITHUB_USER}"`);
|
||||
execGit(`config user.email "${GITHUB_MAIL}"`);
|
||||
|
||||
execGit('config push.default simple');
|
||||
execGit('config merge.ours.driver true');
|
||||
|
||||
execGit('checkout master');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an absolute import path into node_modules from a
|
||||
* <this module> module name
|
||||
*
|
||||
* @param {string} req
|
||||
* @returns {string}
|
||||
**/
|
||||
export function importPath (req) {
|
||||
return path.join(process.cwd(), 'node_modules', req);
|
||||
}
|
||||
|
||||
/**
|
||||
* Do an async import
|
||||
*
|
||||
* @param {string} bin
|
||||
* @param {string} req
|
||||
* @returns {Promise<any>}
|
||||
**/
|
||||
export async function importDirect (bin, req) {
|
||||
logBin(bin);
|
||||
|
||||
try {
|
||||
const mod = await import(req);
|
||||
|
||||
return mod;
|
||||
} catch (/** @type {any} */ error) {
|
||||
exitFatal(`Error importing ${req}`, error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a relative async import
|
||||
*
|
||||
* @param {string} bin
|
||||
* @param {string} req
|
||||
* @returns {Promise<any>}
|
||||
**/
|
||||
export function importRelative (bin, req) {
|
||||
return importDirect(bin, importPath(req));
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs the binary name with the calling args
|
||||
*
|
||||
* @param {string} bin
|
||||
* @param {boolean} [noArgs]
|
||||
*/
|
||||
export function logBin (bin, noArgs) {
|
||||
const extra = noArgs
|
||||
? ''
|
||||
: process.argv.slice(2).join(' ');
|
||||
|
||||
console.log(`$ ${bin} ${extra}`.replace(/ {2}/g, ' ').trim());
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a mkdirp (no global support, native)
|
||||
*
|
||||
* @param {string} dir
|
||||
**/
|
||||
export function mkdirpSync (dir) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the ful path (no glob support)
|
||||
*
|
||||
* @param {string} dir
|
||||
**/
|
||||
export function rimrafSync (dir) {
|
||||
if (fs.existsSync(dir)) {
|
||||
fs.rmSync(dir, { force: true, recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively reads a directory, making a list of the matched extensions
|
||||
*
|
||||
* @param {string} src
|
||||
* @param {string[]} extensions
|
||||
* @param {string[]} [files]
|
||||
**/
|
||||
export function readdirSync (src, extensions, files = []) {
|
||||
if (!fs.statSync(src).isDirectory()) {
|
||||
exitFatal(`Source ${src} should be a directory`);
|
||||
}
|
||||
|
||||
fs
|
||||
.readdirSync(src)
|
||||
.forEach((file) => {
|
||||
const srcPath = path.join(src, file);
|
||||
|
||||
if (fs.statSync(srcPath).isDirectory()) {
|
||||
if (!PATHS_EXCL.includes(file)) {
|
||||
readdirSync(srcPath, extensions, files);
|
||||
}
|
||||
} else if (extensions.some((e) => file.endsWith(e))) {
|
||||
files.push(srcPath);
|
||||
}
|
||||
});
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the fatal error message and exit with a non-zero return code
|
||||
*
|
||||
* @param {string} message
|
||||
* @param {Error} [error]
|
||||
* @returns {never}
|
||||
**/
|
||||
export function exitFatal (message, error) {
|
||||
console.error();
|
||||
console.error('FATAL:', message);
|
||||
|
||||
if (error) {
|
||||
console.error();
|
||||
console.error(error);
|
||||
}
|
||||
|
||||
console.error();
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for Node version with a fatal exit code
|
||||
*/
|
||||
export function exitFatalEngine () {
|
||||
const pkg = JSON.parse(fs.readFileSync(path.join(process.cwd(), 'package.json'), 'utf-8'));
|
||||
|
||||
if (engineVersionCmp(process.version, pkg.engines?.node) === -1) {
|
||||
console.error(
|
||||
`${BLANK}\n FATAL: At least Node version ${pkg.engines.node} is required for development.\n${BLANK}`
|
||||
);
|
||||
|
||||
console.error(`
|
||||
Technical explanation: For a development environment all projects in
|
||||
the @polkadot famility uses node:test in their operation. Currently the
|
||||
minimum required version of Node is thus set at the first first version
|
||||
with operational support, hence this limitation. Additionally only LTS
|
||||
Node versions are supported.
|
||||
|
||||
LTS Node versions are detailed on https://nodejs.dev/en/about/releases/
|
||||
|
||||
`);
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks for yarn usage with a fatal exit code
|
||||
*/
|
||||
export function exitFatalYarn () {
|
||||
if (!process.env['npm_execpath']?.includes('yarn')) {
|
||||
console.error(
|
||||
`${BLANK}\n FATAL: The use of yarn is required, install via npm is not supported.\n${BLANK}`
|
||||
);
|
||||
console.error(`
|
||||
Technical explanation: All the projects in the @polkadot' family use
|
||||
yarn specific configs and assume yarn for build operations and locks.
|
||||
|
||||
If yarn is not available, you can get it from https://yarnpkg.com/
|
||||
|
||||
`);
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Topological sort of dependencies. It handles circular deps by placing them at the end
|
||||
* of the sorted array from circular dep with the smallest vertices to the greatest vertices.
|
||||
*
|
||||
* Credit to: https://gist.github.com/shinout/1232505 (Parts of this were used as a starting point for the structure of the topoSort)
|
||||
*
|
||||
* @param {string[]} dirs
|
||||
*/
|
||||
export function topoSort (dirs) {
|
||||
/** @type {Record<string, Node>} */
|
||||
const nodes = {};
|
||||
/** @type {string[]} */
|
||||
const sorted = [];
|
||||
/** @type {Record<string, boolean>} */
|
||||
const visited = {};
|
||||
/** @type {Record<string, Node>} */
|
||||
const circular = {};
|
||||
|
||||
if (dirs.length === 1) {
|
||||
return dirs;
|
||||
}
|
||||
|
||||
class Node {
|
||||
/** @param {string} id */
|
||||
constructor (id) {
|
||||
this.id = id;
|
||||
/** @type {string[]} */
|
||||
this.vertices = [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} key
|
||||
* @param {string[]} ancestors
|
||||
* @returns
|
||||
*/
|
||||
function cb (key, ancestors) {
|
||||
const node = nodes[key];
|
||||
const id = node.id;
|
||||
|
||||
if (visited[key]) {
|
||||
return;
|
||||
}
|
||||
|
||||
ancestors.push(id);
|
||||
visited[key] = true;
|
||||
|
||||
node.vertices.forEach((i) => {
|
||||
if (ancestors.indexOf(i) >= 0) {
|
||||
console.log('CIRCULAR: closed chain : ' + i + ' is in ' + id);
|
||||
|
||||
if (nodes[id].vertices.includes(i)) {
|
||||
circular[id] = nodes[id];
|
||||
}
|
||||
|
||||
circular[i] = nodes[i];
|
||||
}
|
||||
|
||||
cb(i.toString(), ancestors.map((v) => v));
|
||||
});
|
||||
|
||||
if (!circular[id]) {
|
||||
sorted.push(id);
|
||||
}
|
||||
}
|
||||
|
||||
// Build edges
|
||||
const edges = dirs.map((dir) => {
|
||||
const json = fs.readFileSync(path.join('packages', dir, 'package.json'), 'utf8');
|
||||
const deps = JSON.parse(json).dependencies;
|
||||
|
||||
return dirs
|
||||
.filter((d) => d !== dir && deps && Object.keys(deps).includes(`@polkadot/${d}`))
|
||||
.map((d) => [dir, d]);
|
||||
}).flat();
|
||||
|
||||
edges.forEach((v) => {
|
||||
const from = v[0]; const to = v[1];
|
||||
|
||||
if (!nodes[from]) {
|
||||
nodes[from] = new Node(from);
|
||||
}
|
||||
|
||||
if (!nodes[to]) {
|
||||
nodes[to] = new Node(to);
|
||||
}
|
||||
|
||||
nodes[from].vertices.push(to);
|
||||
});
|
||||
|
||||
const keys = Object.keys(nodes);
|
||||
|
||||
for (const key of keys) {
|
||||
cb(key, []);
|
||||
}
|
||||
|
||||
const circularSorted = Object.keys(circular)
|
||||
.sort((a, b) => circular[a].vertices.length < circular[b].vertices.length ? -1 : 1);
|
||||
|
||||
const flattenedEdges = edges.flat();
|
||||
// Packages that have no edges
|
||||
/** @type {string[]} */
|
||||
const standAlones = dirs.filter((d) => !flattenedEdges.includes(d));
|
||||
|
||||
return sorted.concat(circularSorted).concat(standAlones);
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"module": "commonjs"
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
// Copyright 2017-2025 @polkadot/dev authors & contributors
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
module.exports = { foo: 'bar' };
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user