Bump dev deps (#3371)

This commit is contained in:
Jaco
2023-05-18 11:21:13 +03:00
committed by GitHub
parent 35c0439f06
commit 80c71a5938
15 changed files with 569 additions and 222 deletions
+2 -2
View File
@@ -17,7 +17,7 @@ jobs:
node-version: 'lts/*'
- name: check
run: |
yarn install --immutable | grep -v 'YN0013'
yarn install --immutable
yarn phishing:crosscheck
- name: issue
if: ${{ failure() }}
@@ -39,7 +39,7 @@ jobs:
node-version: 'lts/*'
- name: check
run: |
yarn install --immutable | grep -v 'YN0013'
yarn install --immutable
yarn phishing:addrcheck
- name: issue
if: ${{ failure() }}
+1 -1
View File
@@ -22,5 +22,5 @@ jobs:
- name: ${{ matrix.step }}
if: always()
run: |
yarn install --immutable | grep -v 'YN0013'
yarn install --immutable
yarn ${{ matrix.step }}
+2 -2
View File
@@ -27,7 +27,7 @@ jobs:
node-version: 'lts/*'
- name: ${{ matrix.step }}
run: |
yarn install --immutable | grep -v 'YN0013'
yarn install --immutable
yarn ${{ matrix.step }}
# publish to ipfs when a release is detected
@@ -50,6 +50,6 @@ jobs:
node-version: 'lts/*'
- name: ${{ matrix.step }}
run: |
yarn install --immutable | grep -v 'YN0013'
yarn install --immutable
./scripts/ipfsPrep.sh
yarn ${{ matrix.step }}
+5
View File
@@ -2,6 +2,11 @@ enableImmutableInstalls: false
enableProgressBars: false
logFilters:
# Discard any "cannot be found in cache" messages
- code: YN0013
level: discard
nodeLinker: node-modules
plugins:
+2 -2
View File
@@ -41,8 +41,8 @@
"dnslink-cloudflare": "^3.0.0"
},
"devDependencies": {
"@polkadot/dev": "^0.73.17",
"@types/node": "^20.1.3"
"@polkadot/dev": "^0.74.1",
"@types/node": "^20.2.0"
},
"resolutions": {
"typescript": "^5.0.4"
+1 -1
View File
@@ -24,7 +24,7 @@
"@polkadot/util": "^12.2.1",
"@polkadot/util-crypto": "^12.2.1",
"@polkadot/x-fetch": "^12.2.1",
"tslib": "^2.5.0"
"tslib": "^2.5.1"
},
"devDependencies": {
"@types/js-yaml": "^4.0.5",
+4
View File
@@ -5,5 +5,9 @@
"outDir": "./build",
"rootDir": "./src"
},
"exclude": [
"**/*.spec.ts",
"**/mod.ts"
],
"references": []
}
+16
View File
@@ -0,0 +1,16 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {
"baseUrl": "..",
"outDir": "./build",
"rootDir": "./src",
"emitDeclarationOnly": false,
"noEmit": true
},
"include": [
"**/*.spec.ts"
],
"references": [
{ "path": "../phishing/tsconfig.build.json" }
]
}
+3
View File
@@ -3,6 +3,8 @@
import fs from 'node:fs';
/** @typedef {{ allow: string[]; deny: string[]; }} AllList */
// muli-part domain codes and (hopefully) valid top-levels (all manual from list)
const DOMS = [
// county stuff
@@ -15,6 +17,7 @@ const DOMS = [
const MEDS = DOMS.map((d) => `medium.${d}`);
// get allow/deny lists
/** @type {AllList} */
const all = JSON.parse(fs.readFileSync('all.json', 'utf-8'));
// break the allow list into usable parts
+31 -16
View File
@@ -2,13 +2,20 @@
// SPDX-License-Identifier: Apache-2.0
import PinataSDK from '@pinata/sdk';
// @ts-expect-error We are not defining our own types for this
import cloudflare from 'dnslink-cloudflare';
/** @typedef {import('@pinata/sdk').default} PinataClient */
const SUB_DOMAIN = 'phishing';
const DOMAIN = 'dotapps.io';
const DST = 'build';
const PINMETA = { name: `${SUB_DOMAIN}.${DOMAIN}` };
const PINMETA = {
name: `${SUB_DOMAIN}.${DOMAIN}`
};
/** @type {PinataClient} */
// @ts-expect-error For some reason we have issues here...
const pinata = new PinataSDK({
pinataApiKey: process.env.PINATA_API_KEY,
pinataSecretApiKey: process.env.PINATA_SECRET_KEY
@@ -16,10 +23,13 @@ const pinata = new PinataSDK({
async function wait (delay = 2500) {
return new Promise((resolve) => {
setTimeout(() => resolve(), delay);
setTimeout(() => resolve(undefined), delay);
});
}
/**
* @returns {Promise<string>}
*/
async function pin () {
const result = await pinata.pinFromFS(DST, { pinataMetadata: PINMETA });
@@ -30,32 +40,37 @@ async function pin () {
return result.IpfsHash;
}
/**
* @param {string} exclude
*/
async function unpin (exclude) {
// @ts-expect-error We can forgo the keyvalues field
const result = await pinata.pinList({ metadata: PINMETA, status: 'pinned' });
await wait();
if (result.count > 1) {
const hashes = result.rows
.map((r) => r.ipfs_pin_hash)
.filter((hash) => hash !== exclude);
const hashes = result.rows
.map((r) => r.ipfs_pin_hash)
.filter((/** @type { string} */ hash) => hash !== exclude);
for (let i = 0; i < hashes.length; i++) {
const hash = hashes[i];
for (let i = 0; i < hashes.length; i++) {
const hash = hashes[i];
try {
await pinata.unpin(hash);
try {
await pinata.unpin(hash);
console.log(`Unpinned ${hash}`);
} catch (error) {
console.error(`Failed unpinning ${hash}`, error);
}
await wait();
console.log(`Unpinned ${hash}`);
} catch (error) {
console.error(`Failed unpinning ${hash}`, error);
}
await wait();
}
}
/**
* @param {string} hash
*/
async function dnslink (hash) {
const records = [`_dnslink.${SUB_DOMAIN}.${DOMAIN}`];
+61 -4
View File
@@ -3,10 +3,17 @@
import fs from 'node:fs';
// @ts-expect-error @polkadot/dev scripts don't have .d.ts files
import { mkdirpSync, rimrafSync } from '@polkadot/dev/scripts/util.mjs';
/** @typedef {{ allow: string[]; deny: string[]; }} AllList */
const KNOWN_URLS = ['telegra.ph', 'twitter.com', 'youtube.com'];
/**
* @param {string} url
* @returns {string}
*/
function sanitizeUrl (url) {
return (
url.includes('://')
@@ -15,10 +22,14 @@ function sanitizeUrl (url) {
).split('/')[0];
}
/**
* @param {string[]} list
* @returns {string[]}
*/
function filterSection (list) {
return list
.map((entry) => sanitizeUrl(entry))
.reduce((filtered, entry) => {
.reduce((/** @type {string[]} */ filtered, entry) => {
!filtered.includes(entry) &&
filtered.push(entry);
@@ -26,10 +37,19 @@ function filterSection (list) {
}, []);
}
/**
* @param {string[]} list
* @returns {string[]}
*/
function sortSection (list) {
return filterSection(list).sort((a, b) => a.localeCompare(b));
}
/**
* @param {string[]} list
* @param {string} url
* @returns {boolean}
*/
function isSubdomain (list, url) {
const parts = url.split('.');
@@ -43,6 +63,10 @@ function isSubdomain (list, url) {
return false;
}
/**
* @param {string} url
* @returns {string}
*/
function flattenUrl (url) {
// currently we only check for plesk-page to flatten
if (!url.endsWith('plesk.page')) {
@@ -56,6 +80,10 @@ function flattenUrl (url) {
: url;
}
/**
* @param {string[]} list
* @returns {string[]}
*/
function rewriteSubs (list) {
return filterSection(
list
@@ -64,12 +92,19 @@ function rewriteSubs (list) {
);
}
/**
* @param {Record<string, string[]>} values
* @returns {Record<string, string[]>}
*/
function sortAddresses (values) {
return Object
.entries(values)
.map(([key, address]) => [sanitizeUrl(key), address])
.map(
/** @returns {[string, string[]]} */
([key, addresses]) => [sanitizeUrl(key), addresses]
)
.sort(([a], [b]) => a.localeCompare(b))
.reduce((all, [key, addresses]) => {
.reduce((/** @type {Record<string, string[]>} */ all, [key, addresses]) => {
if (!all[key]) {
all[key] = [];
}
@@ -83,6 +118,11 @@ function sortAddresses (values) {
}, {});
}
/**
* @param {AllList} param0
* @param {Record<string, string>} values
* @returns
*/
function addSites ({ allow, deny }, values) {
return Object
.keys(values)
@@ -94,10 +134,18 @@ function addSites ({ allow, deny }, values) {
}, deny);
}
/**
* @param {string} file
* @returns {any}
*/
function readJson (file) {
return JSON.parse(fs.readFileSync(file, 'utf-8'));
}
/**
* @param {string} file
* @param {unknown} contents
*/
function writeJson (file, contents) {
fs.writeFileSync(file, `${JSON.stringify(contents, null, '\t')}\n`);
}
@@ -117,8 +165,14 @@ function readMeta () {
return meta;
}
/**
* @param {{ date: string; url: string; }[]} meta
*/
export function writeMeta (meta) {
/** @type {Record<string, { date: string; url: string; }[]>} */
const months = {};
/** @type {string[]} */
const index = [];
for (const item of meta) {
@@ -139,11 +193,14 @@ export function writeMeta (meta) {
writeJson('meta/index.json', index.sort((a, b) => b.localeCompare(a)));
}
/**
* @param {string[]} deny
*/
function writeAllList (deny) {
rimrafSync('all');
mkdirpSync('all');
const avail = deny.reduce((avail, url) => {
const avail = deny.reduce((/** @type {Record<String, string[]>} */ avail, url) => {
const [top] = url.split('.').reverse();
if (!avail[top]) {
+2 -1
View File
@@ -4,6 +4,7 @@
},
"files": [],
"references": [
{ "path": "./packages/phishing/tsconfig.build.json" }
{ "path": "./packages/phishing/tsconfig.build.json" },
{ "path": "./packages/phishing/tsconfig.spec.json" }
]
}
+7 -2
View File
@@ -4,6 +4,11 @@
"baseUrl": "./packages",
"composite": false
},
"include": ["packages/**/src/**/*", "scripts/*"],
"exclue": ["**/node_modules/**/*"]
"include": [
"packages/**/src/**/*",
"scripts/*"
],
"exclue": [
"**/node_modules/**/*"
]
}
+2 -1
View File
@@ -5,7 +5,8 @@
"composite": false
},
"include": [
"packages/**/src/**/*"
"packages/**/src/**/*",
"scripts/*"
],
"exclude": [
"**/node_modules/**/*"
+430 -190
View File
File diff suppressed because it is too large Load Diff