Split addition tests, expand non-url checks (#885)

* Split addition tests, expand non-url checks

* Spelling

* Additional valid url check

* Meta changes
This commit is contained in:
Jaco
2022-01-02 09:20:34 +01:00
committed by GitHub
parent 080afd8fa5
commit 60bd4b6d64
8 changed files with 88 additions and 97 deletions
+1 -1
View File
@@ -236,7 +236,7 @@
"12HR9ciHKMmcs7MJhykPUEhSk7ssA1eu7VWZyoLqyZ1os4Uf",
"167u8YLEEBwENxQnKw8GJwbWWREoWxMGMnVuXW6HcYBXbVc2"
],
"social enginnering": [
"social engineering": [
"16JeATU8HazNbaMkpNW2niozWgNu1A6dx5u2kKs7pZaS4Vze"
],
"telegra.ph": [
+1 -4
View File
@@ -1270,7 +1270,6 @@
"paxfuls.xyz",
"pcakecoins.finance",
"peercoinwallet.com",
"phishing Polkadot extension",
"phuture.medium.cn.com",
"phuture.medium.com.ru",
"phuture.polkastarter.com.es",
@@ -1628,7 +1627,6 @@
"saitamatokendrop.com",
"scaleswap.medium.com.ru",
"scaleswap.polkastarter.com.es",
"scam-check",
"scopebtc.com",
"secdappwal.live",
"securecryptomining.org",
@@ -1679,7 +1677,6 @@
"smartwalletvalidator.com",
"smetamask.com",
"smoothsncsecure.online",
"social enginnering",
"solanium.live",
"solanium.medium.com.ru",
"solutiondapps.live",
@@ -1963,7 +1960,7 @@
"wallenmexico.com",
"wallerhof.com",
"wallet-api.link",
"wallet-api.online ",
"wallet-api.online",
"wallet-authenticatordapps.com",
"wallet-authorizations.com",
"wallet-autosync.art",
+1 -5
View File
@@ -311,10 +311,6 @@
"date": "2021-10-19",
"url": "decentralizedintegration.com"
},
{
"date": "2021-10-19",
"url": "social enginnering"
},
{
"date": "2021-10-19",
"url": "tokenmainnet.org"
@@ -757,7 +753,7 @@
},
{
"date": "2021-10-08",
"url": "wallet-api.online "
"url": "wallet-api.online"
},
{
"date": "2021-10-08",
-4
View File
@@ -899,10 +899,6 @@
"date": "2021-11-12",
"url": "xn--unswap-4va.app"
},
{
"date": "2021-11-11",
"url": "scam-check"
},
{
"date": "2021-11-10",
"url": "conn-we.online"
-4
View File
@@ -1231,10 +1231,6 @@
"date": "2021-12-20",
"url": "mainsaffixsync.online"
},
{
"date": "2021-12-20",
"url": "phishing Polkadot extension"
},
{
"date": "2021-12-20",
"url": "polkadot-bonusevents.network"
+82
View File
@@ -0,0 +1,82 @@
// Copyright 2020-2022 @polkadot/phishing authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'fs';
import { decodeAddress } from '@polkadot/util-crypto';
const addresses = JSON.parse(fs.readFileSync('address.json', 'utf-8')) as Record<string, string[]>;
const allowed = JSON.parse(fs.readFileSync('known.json', 'utf-8')) as Record<string, string[]>;
const all = JSON.parse(fs.readFileSync('all.json', 'utf8')) as { deny: string[] };
describe('added addresses', (): void => {
it('has no malformed addresses', (): void => {
const invalids = Object
.entries(addresses)
.map(([url, addrs]): [string, string[]] => {
return [url, addrs.filter((a) => {
try {
return decodeAddress(a).length !== 32;
} catch (error) {
console.error(url, (error as Error).message);
return true;
}
})];
})
.filter(([, addrs]) => addrs.length);
if (invalids.length) {
throw new Error(`Invalid ss58 checksum addresses found: ${invalids.map(([url, addrs]) => `\n\t${url}: ${addrs.join(', ')}`).join('')}`);
}
});
it('has no entries on the known addresses list', (): void => {
const added = Object
.values(addresses)
.reduce<string[]>((all, addrs) => all.concat(addrs), []);
const dupes = Object
.entries(allowed)
.reduce<[string, string][]>((all, [site, addrs]) => all.concat(addrs.map((a) => [site, a])), [])
.filter(([, a]) => added.includes(a));
expect(dupes).toEqual([]);
});
});
describe('added urls', (): void => {
it('has no malformed domain-only entries', (): void => {
const invalids = all.deny.filter((u) =>
u.includes('/') || // don't allow paths
u.includes('?') || // don't allow query params
u.includes(' ') || // no spaces
!u.includes('.') // need at least a domain
);
expect(invalids).toEqual([]);
});
it('has no urls starting with www. (domain-only inclusions)', (): void => {
const invalids = all.deny.filter((u) =>
u.startsWith('www.')
);
expect(invalids).toEqual([]);
});
it('has no duplicate entries', (): void => {
const checks: string[] = [];
const dupes = all.deny.reduce<string[]>((dupes, url) => {
if (!checks.includes(url)) {
checks.push(url);
} else {
dupes.push(url);
}
return dupes;
}, []);
expect(dupes).toEqual([]);
});
});
-76
View File
@@ -1,16 +1,8 @@
// Copyright 2020-2022 @polkadot/phishing authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'fs';
import { decodeAddress } from '@polkadot/util-crypto';
import { checkAddress, checkIfDenied } from '.';
const addresses = JSON.parse(fs.readFileSync('address.json', 'utf-8')) as Record<string, string[]>;
const allowed = JSON.parse(fs.readFileSync('known.json', 'utf-8')) as Record<string, string[]>;
const all = JSON.parse(fs.readFileSync('all.json', 'utf8')) as { deny: string[] };
describe('checkIfDenied', (): void => {
it('returns false when host is not listed', async (): Promise<void> => {
expect(
@@ -74,71 +66,3 @@ describe('checkAddress', (): void => {
).toEqual('polkadots.network');
});
});
describe('check additions', (): void => {
it('has no malformed addresses', (): void => {
const invalids = Object
.entries(addresses)
.map(([url, addrs]): [string, string[]] => {
return [url, addrs.filter((a) => {
try {
return decodeAddress(a).length !== 32;
} catch (error) {
console.error(url, (error as Error).message);
return true;
}
})];
})
.filter(([, addrs]) => addrs.length);
if (invalids.length) {
throw new Error(`Invalid ss58 checksum addresses found: ${invalids.map(([url, addrs]) => `\n\t${url}: ${addrs.join(', ')}`).join('')}`);
}
});
it('has no entries on the known addresses list', (): void => {
const added = Object
.values(addresses)
.reduce<string[]>((all, addrs) => all.concat(addrs), []);
const dupes = Object
.entries(allowed)
.reduce<[string, string][]>((all, [site, addrs]) => all.concat(addrs.map((a) => [site, a])), [])
.filter(([, a]) => added.includes(a));
expect(dupes).toEqual([]);
});
it('has no malformed domain-only entries', (): void => {
const invalids = all.deny.filter((u) =>
u.includes('/') ||
u.includes('?')
);
expect(invalids).toEqual([]);
});
it('has no urls starting with www. (domain-only inclusions)', (): void => {
const invalids = all.deny.filter((u) =>
u.startsWith('www.')
);
expect(invalids).toEqual([]);
});
it('has no duplicate entries', (): void => {
const checks: string[] = [];
const dupes = all.deny.reduce<string[]>((dupes, url) => {
if (!checks.includes(url)) {
checks.push(url);
} else {
dupes.push(url);
}
return dupes;
}, []);
expect(dupes).toEqual([]);
});
});
+3 -3
View File
@@ -44,11 +44,11 @@ function sortAddresses (values) {
}, {});
}
function addSites (deny, values) {
function addSites ({ allow, deny }, values) {
return Object
.keys(values)
.reduce((filtered, url) => {
!filtered.includes(url) && !KNOWN_URLS.includes(url) &&
url.includes('.') && !url.includes(' ') && !url.includes('/') && !allow.includes(url) && !filtered.includes(url) && !KNOWN_URLS.includes(url) &&
filtered.push(url);
return filtered;
@@ -103,7 +103,7 @@ export function writeMeta (meta) {
const addr = readJson('address.json');
const all = readJson('all.json');
const meta = readMeta();
const deny = sortSection(addSites(all.deny, addr));
const deny = sortSection(addSites(all, addr));
// rewrite with all our entries (newline included)
writeJson('address.json', sortAddresses(addr));