Files
pezkuwi-api/packages/bizinikiwi-bindings/dist/index.js
T
pezkuwichain 31467f90d4 feat: add PAPI rebrand packages
- @pezkuwi/papi-utils (rebrand of @polkadot-api/utils)
- @pezkuwi/bizinikiwi-bindings (rebrand of @polkadot-api/substrate-bindings)
- @pezkuwi/metadata-builders (rebrand of @polkadot-api/metadata-builders)
- @pezkuwi/merkleize-metadata (rebrand of @polkadot-api/merkleize-metadata)

All @polkadot-api references replaced with @pezkuwi equivalents.
2026-01-22 15:40:12 +03:00

1328 lines
39 KiB
JavaScript

'use strict';
var scaleTs = require('scale-ts');
var base = require('@scure/base');
var blake2_js = require('@noble/hashes/blake2.js');
var utils = require('@pezkuwi/papi-utils');
var blake3_js = require('@noble/hashes/blake3.js');
var sha3_js = require('@noble/hashes/sha3.js');
const SS58_PREFIX = new TextEncoder().encode("SS58PRE");
const CHECKSUM_LENGTH = 2;
const getSs58AddressInfo = (address) => {
try {
const decoded = base.base58.decode(address);
const prefixBytes = decoded.subarray(0, decoded[0] & 64 ? 2 : 1);
const publicKey = decoded.subarray(
prefixBytes.length,
decoded.length - CHECKSUM_LENGTH
);
const checksum = decoded.subarray(prefixBytes.length + publicKey.length);
const expectedChecksum = blake2_js.blake2b(
Uint8Array.of(...SS58_PREFIX, ...prefixBytes, ...publicKey),
{
dkLen: 64
}
).subarray(0, CHECKSUM_LENGTH);
const isChecksumValid = checksum[0] === expectedChecksum[0] && checksum[1] === expectedChecksum[1];
if (!isChecksumValid) return { isValid: false };
return {
isValid: true,
ss58Format: prefixBytesToNumber(prefixBytes),
publicKey: publicKey.slice()
};
} catch (_) {
return { isValid: false };
}
};
const prefixBytesToNumber = (bytes) => {
const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
return dv.byteLength === 1 ? dv.getUint8(0) : dv.getUint16(0);
};
const withSs58Cache = (fn) => {
let cache = {};
let activityCount = 0;
let latestCount = 0;
const checkActivity = () => {
if (activityCount === latestCount) {
cache = {};
activityCount = latestCount = 0;
} else {
latestCount = activityCount;
setTimeout(checkActivity, 0);
}
};
return (publicKey) => {
var _a, _b;
if (++activityCount === 1) checkActivity();
let entry = cache;
const lastIdx = publicKey.length - 1;
for (let i = 0; i <= lastIdx; i++) entry = entry[_a = publicKey[i]] || (entry[_a] = {});
return entry[_b = publicKey[lastIdx]] || (entry[_b] = fn(publicKey));
};
};
const fromBufferToBase58 = (ss58Format) => {
const prefixBytes = ss58Format < 64 ? Uint8Array.of(ss58Format) : Uint8Array.of(
(ss58Format & 252) >> 2 | 64,
ss58Format >> 8 | (ss58Format & 3) << 6
);
return withSs58Cache((publicKey) => {
const checksum = blake2_js.blake2b(
Uint8Array.of(...SS58_PREFIX, ...prefixBytes, ...publicKey),
{
dkLen: 64
}
).subarray(0, CHECKSUM_LENGTH);
return base.base58.encode(
Uint8Array.of(...prefixBytes, ...publicKey, ...checksum)
);
});
};
function fromBase58ToBuffer(nBytes, _ss58Format) {
return (address) => {
const info = getSs58AddressInfo(address);
if (!info.isValid) throw new Error("Invalid checksum");
const { publicKey } = info;
if (publicKey.length !== nBytes)
throw new Error("Invalid public key length");
return publicKey;
};
}
const AccountId = (ss58Format = 42, nBytes = 32) => scaleTs.enhanceCodec(
scaleTs.Bytes(nBytes),
fromBase58ToBuffer(nBytes),
fromBufferToBase58(ss58Format)
);
var __defProp = Object.defineProperty;
var __typeError = (msg) => {
throw TypeError(msg);
};
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
var _bytes, _opaqueBytes, _hex, _opaqueHex, _str;
const textEncoder$3 = new TextEncoder();
const textDecoder$2 = new TextDecoder();
const opaqueBytesDec = scaleTs.Tuple(scaleTs.compact, scaleTs.Bytes(Infinity))[1];
class Binary {
constructor(data, opaque = false) {
__privateAdd(this, _bytes);
__privateAdd(this, _opaqueBytes, null);
__privateAdd(this, _hex, null);
__privateAdd(this, _opaqueHex, null);
__privateAdd(this, _str, null);
__publicField(this, "asText", () => __privateGet(this, _str) ?? __privateSet(this, _str, textDecoder$2.decode(__privateGet(this, _bytes))));
__publicField(this, "asHex", () => __privateGet(this, _hex) ?? __privateSet(this, _hex, utils.toHex(__privateGet(this, _bytes))));
__publicField(this, "asOpaqueHex", () => __privateGet(this, _opaqueHex) ?? __privateSet(this, _opaqueHex, utils.toHex(this.asOpaqueBytes())));
__publicField(this, "asBytes", () => __privateGet(this, _bytes));
__publicField(this, "asOpaqueBytes", () => __privateGet(this, _opaqueBytes) ?? __privateSet(this, _opaqueBytes, utils.mergeUint8([
scaleTs.compact[0](__privateGet(this, _bytes).length),
__privateGet(this, _bytes)
])));
if (opaque) {
try {
const [len, bytes] = opaqueBytesDec(data);
if (len === bytes.length) {
__privateSet(this, _bytes, bytes);
__privateSet(this, _opaqueBytes, data);
return;
}
} catch (_) {
}
throw new Error("Invalid opaque bytes");
} else __privateSet(this, _bytes, data);
}
static fromText(input) {
return new this(textEncoder$3.encode(input));
}
static fromHex(input) {
return new this(utils.fromHex(input));
}
static fromOpaqueHex(input) {
return new this(utils.fromHex(input), true);
}
static fromBytes(input) {
return new this(input);
}
static fromOpaqueBytes(input) {
return new this(input, true);
}
}
_bytes = new WeakMap();
_opaqueBytes = new WeakMap();
_hex = new WeakMap();
_opaqueHex = new WeakMap();
_str = new WeakMap();
const [accountIdEncoder] = AccountId();
class FixedSizeBinary extends Binary {
constructor(data) {
super(data);
}
static fromArray(input) {
return new this(new Uint8Array(input));
}
static fromAccountId32(input) {
return new this(accountIdEncoder(input));
}
}
const enc$2 = (nBytes) => {
const _enc = scaleTs.Bytes.enc(nBytes);
return (value) => _enc(value.asBytes());
};
const dec$2 = (nBytes) => {
const _dec = scaleTs.Bytes.dec(nBytes);
const Bin2 = nBytes == null ? Binary : FixedSizeBinary;
return (value) => Bin2.fromBytes(_dec(value));
};
const Bin = (nBytes) => scaleTs.createCodec(enc$2(nBytes), dec$2(nBytes));
Bin.enc = enc$2;
Bin.dec = dec$2;
const compactNumber = scaleTs.enhanceCodec(scaleTs.compact, (v) => v, Number);
const compactBn = scaleTs.enhanceCodec(scaleTs.compact, (v) => v, BigInt);
const bitSequenceDecoder = scaleTs.createDecoder((data) => {
const bitsLen = compactNumber.dec(data);
const bytesLen = Math.ceil(bitsLen / 8);
const bytes = scaleTs.Bytes(bytesLen).dec(data);
return { bytes, bitsLen };
});
const bitSequenceEncoder = (input) => {
if (input.bitsLen > input.bytes.length * 8)
throw new Error(
`Not enough bytes. (bitsLen:${input.bitsLen}, bytesLen:${input.bytes.length})`
);
const lenEncoded = compactNumber.enc(input.bitsLen);
const result = new Uint8Array(input.bytes.length + lenEncoded.length);
result.set(lenEncoded, 0);
result.set(input.bytes, lenEncoded.length);
return result;
};
const bitSequence$1 = scaleTs.createCodec(bitSequenceEncoder, bitSequenceDecoder);
const char = scaleTs.enhanceCodec(
scaleTs.u8,
(str) => str.charCodeAt(0),
String.fromCharCode
);
const enc$1 = (nBytes) => {
const _enc = scaleTs.Bytes.enc(nBytes);
return (value) => _enc(utils.fromHex(value));
};
const dec$1 = (nBytes) => {
const _dec = scaleTs.Bytes.dec(nBytes);
return (value) => utils.toHex(_dec(value));
};
const Hex = (nBytes) => scaleTs.createCodec(enc$1(nBytes), dec$1(nBytes));
Hex.enc = enc$1;
Hex.dec = dec$1;
const textEncoder$2 = new TextEncoder();
const textDecoder$1 = new TextDecoder();
const fixedStr = (nBytes) => scaleTs.enhanceCodec(
scaleTs.Bytes(nBytes),
(str) => textEncoder$2.encode(str),
(bytes) => textDecoder$1.decode(bytes)
);
const selfEncoder = (value) => {
let cache = (x) => {
const encoder = value();
cache = encoder;
return encoder(x);
};
return (x) => cache(x);
};
const selfDecoder = (value) => {
let cache = (x) => {
const decoder = value();
const result = decoder;
cache = decoder;
return result(x);
};
return (x) => cache(x);
};
const Self = (value) => scaleTs.createCodec(
selfEncoder(() => value().enc),
selfDecoder(() => value().dec)
);
const discriminant = {
is(value, type) {
return value.type === type;
},
as(value, type) {
if (type !== value.type)
throw new Error(
`Enum.as(enum, ${type}) used with actual type ${value.type}`
);
return value;
}
};
const Enum = Object.assign((type, value) => {
return {
type,
value
};
}, discriminant);
const _Enum = new Proxy(
{},
{
get(_, prop) {
return (value) => Enum(prop, value);
}
}
);
const withInner = (codec, inner) => {
const result = codec;
result.inner = inner;
return result;
};
const VariantEnc = (...args) => {
const enc = scaleTs.Enum.enc(...args);
return withInner((v) => enc({ tag: v.type, value: v.value }), args[0]);
};
const VariantDec = (...args) => {
const dec = scaleTs.Enum.dec(...args);
return withInner((v) => {
const { tag, value } = dec(v);
return Enum(tag, value);
}, args[0]);
};
const Variant = (inner, ...args) => withInner(
scaleTs.createCodec(
VariantEnc(
utils.mapObject(inner, ([encoder]) => encoder),
...args
),
VariantDec(
utils.mapObject(inner, ([, decoder]) => decoder),
...args
)
),
inner
);
Variant.enc = VariantEnc;
Variant.dec = VariantDec;
const ScaleEnum = (inner, ...args) => withInner(scaleTs.Enum(inner, ...args), inner);
ScaleEnum.enc = (inner, ...rest) => withInner(scaleTs.Enum.enc(inner, ...rest), inner);
ScaleEnum.dec = (inner, ...rest) => withInner(scaleTs.Enum.dec(inner, ...rest), inner);
const len32$1 = { dkLen: 32 };
const Blake2256 = (encoded) => blake2_js.blake2b(encoded, len32$1);
const len16 = { dkLen: 16 };
const Blake2128 = (encoded) => blake2_js.blake2b(encoded, len16);
const Blake2128Concat = (encoded) => utils.mergeUint8([Blake2128(encoded), encoded]);
const len32 = { dkLen: 32 };
const Blake3256 = (encoded) => blake3_js.blake3(encoded, len32);
const Blake3256Concat = (encoded) => utils.mergeUint8([Blake3256(encoded), encoded]);
const Identity = (encoded) => encoded;
const bigintFromU16 = (v0, v1, v2, v3) => new DataView(new Uint16Array([v0, v1, v2, v3]).buffer).getBigUint64(0, true);
const MASK_64 = 2n ** 64n - 1n;
const rotl = (input, nBits) => input << nBits & MASK_64 | input >> 64n - nBits;
const multiply = (a, b) => a * b & MASK_64;
const add = (a, b) => a + b & MASK_64;
const PRIME64_1 = 11400714785074694791n;
const PRIME64_2 = 14029467366897019727n;
const PRIME64_3 = 1609587929392839161n;
const PRIME64_4 = 9650029242287828579n;
const PRIME64_5 = 2870177450012600261n;
function h64(input, seed = 0n) {
let v1 = add(add(seed, PRIME64_1), PRIME64_2);
let v2 = add(seed, PRIME64_2);
let v3 = seed;
let v4 = seed - PRIME64_1;
let totalLen = input.length;
let memsize = 0;
let memory = null;
(function update() {
let p2 = 0;
let bEnd = p2 + totalLen;
if (!totalLen) return;
memory = new Uint8Array(32);
if (totalLen < 32) {
memory.set(input.subarray(0, totalLen), memsize);
memsize += totalLen;
return;
}
if (p2 <= bEnd - 32) {
const limit = bEnd - 32;
do {
let other;
other = bigintFromU16(
input[p2 + 1] << 8 | input[p2],
input[p2 + 3] << 8 | input[p2 + 2],
input[p2 + 5] << 8 | input[p2 + 4],
input[p2 + 7] << 8 | input[p2 + 6]
);
v1 = multiply(rotl(add(v1, multiply(other, PRIME64_2)), 31n), PRIME64_1);
p2 += 8;
other = bigintFromU16(
input[p2 + 1] << 8 | input[p2],
input[p2 + 3] << 8 | input[p2 + 2],
input[p2 + 5] << 8 | input[p2 + 4],
input[p2 + 7] << 8 | input[p2 + 6]
);
v2 = multiply(rotl(add(v2, multiply(other, PRIME64_2)), 31n), PRIME64_1);
p2 += 8;
other = bigintFromU16(
input[p2 + 1] << 8 | input[p2],
input[p2 + 3] << 8 | input[p2 + 2],
input[p2 + 5] << 8 | input[p2 + 4],
input[p2 + 7] << 8 | input[p2 + 6]
);
v3 = multiply(rotl(add(v3, multiply(other, PRIME64_2)), 31n), PRIME64_1);
p2 += 8;
other = bigintFromU16(
input[p2 + 1] << 8 | input[p2],
input[p2 + 3] << 8 | input[p2 + 2],
input[p2 + 5] << 8 | input[p2 + 4],
input[p2 + 7] << 8 | input[p2 + 6]
);
v4 = multiply(rotl(add(v4, multiply(other, PRIME64_2)), 31n), PRIME64_1);
p2 += 8;
} while (p2 <= limit);
}
if (p2 < bEnd) {
memory.set(input.subarray(p2, bEnd), memsize);
memsize = bEnd - p2;
}
})();
input = memory || input;
let result;
let p = 0;
if (totalLen >= 32) {
result = rotl(v1, 1n);
result = add(result, rotl(v2, 7n));
result = add(result, rotl(v3, 12n));
result = add(result, rotl(v4, 18n));
v1 = multiply(rotl(multiply(v1, PRIME64_2), 31n), PRIME64_1);
result = result ^ v1;
result = add(multiply(result, PRIME64_1), PRIME64_4);
v2 = multiply(rotl(multiply(v2, PRIME64_2), 31n), PRIME64_1);
result = result ^ v2;
result = add(multiply(result, PRIME64_1), PRIME64_4);
v3 = multiply(rotl(multiply(v3, PRIME64_2), 31n), PRIME64_1);
result = result ^ v3;
result = add(multiply(result, PRIME64_1), PRIME64_4);
v4 = multiply(rotl(multiply(v4, PRIME64_2), 31n), PRIME64_1);
result = result ^ v4;
result = add(multiply(result, PRIME64_1), PRIME64_4);
} else {
result = add(seed, PRIME64_5);
}
result = add(result, BigInt(totalLen));
while (p <= memsize - 8) {
let temp2 = bigintFromU16(
input[p + 1] << 8 | input[p],
input[p + 3] << 8 | input[p + 2],
input[p + 5] << 8 | input[p + 4],
input[p + 7] << 8 | input[p + 6]
);
temp2 = multiply(rotl(multiply(temp2, PRIME64_2), 31n), PRIME64_1);
result = add(multiply(rotl(result ^ temp2, 27n), PRIME64_1), PRIME64_4);
p += 8;
}
if (p + 4 <= memsize) {
let temp2 = multiply(
bigintFromU16(
input[p + 1] << 8 | input[p],
input[p + 3] << 8 | input[p + 2],
0,
0
),
PRIME64_1
);
result = add(multiply(rotl(result ^ temp2, 23n), PRIME64_2), PRIME64_3);
p += 4;
}
while (p < memsize) {
const temp2 = multiply(bigintFromU16(input[p++], 0, 0, 0), PRIME64_5);
result = multiply(rotl(result ^ temp2, 11n), PRIME64_1);
}
let temp = result >> 33n;
result = multiply(result ^ temp, PRIME64_2);
temp = result >> 29n;
result = multiply(result ^ temp, PRIME64_3);
temp = result >> 32n;
result ^= temp;
return result;
}
const Twox128 = (input) => {
const result = new Uint8Array(16);
const dv = new DataView(result.buffer);
dv.setBigUint64(0, h64(input), true);
dv.setBigUint64(8, h64(input, 1n), true);
return result;
};
const Twox256 = (input) => {
const result = new Uint8Array(32);
const dv = new DataView(result.buffer);
dv.setBigUint64(0, h64(input), true);
dv.setBigUint64(8, h64(input, 1n), true);
dv.setBigUint64(16, h64(input, 2n), true);
dv.setBigUint64(24, h64(input, 3n), true);
return result;
};
const Twox64Concat = (encoded) => utils.mergeUint8([scaleTs.u64.enc(h64(encoded)), encoded]);
const Keccak256 = sha3_js.keccak_256;
const getFormattedAddress = (hexAddress) => {
const nonChecksum = hexAddress.slice(2);
const hashedAddress = utils.toHex(Keccak256(textEncoder$2.encode(nonChecksum))).slice(
2
);
const result = new Array(40);
for (let i = 0; i < 40; i++) {
const checksumVal = parseInt(hashedAddress[i], 16);
const char = nonChecksum[i];
result[i] = checksumVal > 7 ? char.toUpperCase() : char;
}
return `0x${result.join("")}`;
};
const bytes20Dec = scaleTs.Bytes(20)[1];
const ethAccount = scaleTs.createCodec(
(input) => {
const bytes = utils.fromHex(input);
if (bytes.length !== 20)
throw new Error(`Invalid length found on EthAddress(${input})`);
const hexAddress = utils.toHex(bytes);
if (input === hexAddress || input === hexAddress.toUpperCase()) return bytes;
if (getFormattedAddress(hexAddress) !== input)
throw new Error(`Invalid checksum found on EthAddress(${input})`);
return bytes;
},
scaleTs.createDecoder((bytes) => getFormattedAddress(utils.toHex(bytes20Dec(bytes))))
);
const Struct = (codecs) => withInner(scaleTs.Struct(codecs), codecs);
Struct.enc = (x) => withInner(scaleTs.Struct.enc(x), x);
Struct.dec = (x) => withInner(scaleTs.Struct.dec(x), x);
const Tuple = (...inner) => withInner(scaleTs.Tuple(...inner), inner);
Tuple.enc = (...inner) => withInner(scaleTs.Tuple.enc(...inner), inner);
Tuple.dec = (...inner) => withInner(scaleTs.Tuple.dec(...inner), inner);
const Vector = (inner, ...rest) => withInner(scaleTs.Vector(inner, ...rest), inner);
Vector.enc = (inner, ...rest) => withInner(scaleTs.Vector.enc(inner, ...rest), inner);
Vector.dec = (inner, ...rest) => withInner(scaleTs.Vector.dec(inner, ...rest), inner);
const Result = (ok, ko) => withInner(scaleTs.Result(ok, ko), { ok, ko });
Result.enc = (ok, ko) => withInner(scaleTs.Result.enc(ok, ko), { ok, ko });
Result.dec = (ok, ko) => withInner(scaleTs.Result.dec(ok, ko), { ok, ko });
const Option = (inner) => withInner(scaleTs.Option(inner), inner);
Option.enc = (inner) => withInner(scaleTs.Option.enc(inner), inner);
Option.dec = (inner) => withInner(scaleTs.Option.dec(inner), inner);
const dec = (isLsb = true) => scaleTs.createDecoder((data) => {
const bitsLen = compactNumber.dec(data);
const bytesLen = Math.ceil(bitsLen / 8);
const bytes = scaleTs.Bytes(bytesLen).dec(data);
const result = new Array(bitsLen);
let resultIdx = 0;
bytes.forEach((val) => {
for (let i = 0; i < 8 && resultIdx < bitsLen; i++) {
const actualIdx = isLsb ? i : 7 - i;
result[resultIdx++] = val >> actualIdx & 1;
}
});
return result;
});
const enc = (isLsb = true) => (input) => {
const lenEncoded = compactNumber.enc(input.length);
const nBytes = Math.ceil(input.length / 8);
const bytes = new Uint8Array(nBytes);
for (let byteIdx = 0; byteIdx < nBytes; byteIdx++) {
let inputIdx = byteIdx * 8;
let byte = 0;
for (let i = 0; i < 8 && inputIdx < input.length; i++, inputIdx++)
byte |= input[inputIdx] << (isLsb ? i : 7 - i);
bytes[byteIdx] = byte;
}
return utils.mergeUint8([lenEncoded, bytes]);
};
const BitSeq = (isLsb) => scaleTs.createCodec(enc(isLsb), dec(isLsb));
BitSeq.enc = enc;
BitSeq.dec = dec;
const textEncoder$1 = new TextEncoder();
const textDecoder = new TextDecoder();
const fourChars = scaleTs.enhanceCodec(
scaleTs.Bytes(4),
textEncoder$1.encode.bind(textEncoder$1),
textDecoder.decode.bind(textDecoder)
);
const diggestVal = Struct({
engine: fourChars,
payload: Hex()
});
const diggest = Variant(
{
other: scaleTs.Bytes(),
consensus: diggestVal,
seal: diggestVal,
preRuntime: diggestVal,
runtimeUpdated: scaleTs._void
},
[0, 4, 5, 6, 8]
);
const hex32$1 = Hex(32);
const blockHeader = Struct({
parentHash: hex32$1,
number: compactNumber,
stateRoot: hex32$1,
extrinsicRoot: hex32$1,
digests: Vector(diggest)
});
const docs = scaleTs.Vector(scaleTs.str);
const oStr = scaleTs.Option(scaleTs.str);
const primitive = scaleTs.Enum({
bool: scaleTs._void,
char: scaleTs._void,
str: scaleTs._void,
u8: scaleTs._void,
u16: scaleTs._void,
u32: scaleTs._void,
u64: scaleTs._void,
u128: scaleTs._void,
u256: scaleTs._void,
i8: scaleTs._void,
i16: scaleTs._void,
i32: scaleTs._void,
i64: scaleTs._void,
i128: scaleTs._void,
i256: scaleTs._void
});
const fields = scaleTs.Vector(
scaleTs.Struct({
name: oStr,
type: compactNumber,
typeName: oStr,
docs
})
);
const arr = scaleTs.Struct({
len: scaleTs.u32,
type: compactNumber
});
const bitSequence = scaleTs.Struct({
bitStoreType: compactNumber,
bitOrderType: compactNumber
});
const variant = scaleTs.Vector(
scaleTs.Struct({
name: scaleTs.str,
fields,
index: scaleTs.u8,
docs
})
);
const def = scaleTs.Enum({
composite: fields,
variant,
sequence: compactNumber,
array: arr,
tuple: scaleTs.Vector(compactNumber),
primitive,
compact: compactNumber,
bitSequence
});
const param = scaleTs.Struct({
name: scaleTs.str,
type: scaleTs.Option(compactNumber)
});
const params = scaleTs.Vector(param);
const entry = scaleTs.Struct({
id: compactNumber,
path: docs,
params,
def,
docs
});
const lookup = scaleTs.Vector(entry);
const itemDeprecation = scaleTs.Enum({
NotDeprecated: scaleTs._void,
DeprecatedWithoutNote: scaleTs._void,
Deprecated: scaleTs.Struct({
note: scaleTs.str,
since: scaleTs.Option(scaleTs.str)
})
});
const variantDeprecation = scaleTs.Vector(
scaleTs.Struct({
index: scaleTs.u8,
deprecation: scaleTs.Enum(
{
DeprecatedWithoutNote: scaleTs._void,
Deprecated: scaleTs.Struct({
note: scaleTs.str,
since: scaleTs.Option(scaleTs.str)
})
},
[1, 2]
)
})
);
const runtimeApiMethod = {
name: scaleTs.str,
inputs: scaleTs.Vector(
scaleTs.Struct({
name: scaleTs.str,
type: compactNumber
})
),
output: compactNumber,
docs
};
const runtimeApiV15 = scaleTs.Struct({
name: scaleTs.str,
methods: scaleTs.Vector(scaleTs.Struct(runtimeApiMethod)),
docs
});
const runtimeApi = scaleTs.Struct({
name: scaleTs.str,
methods: scaleTs.Vector(
scaleTs.Struct({ ...runtimeApiMethod, deprecationInfo: itemDeprecation })
),
docs,
version: compactNumber,
deprecationInfo: itemDeprecation
});
const viewFunction = scaleTs.Struct({
id: Hex(32),
...runtimeApiMethod,
deprecationInfo: itemDeprecation
});
const hashType = scaleTs.Enum({
Blake2128: scaleTs._void,
Blake2256: scaleTs._void,
Blake2128Concat: scaleTs._void,
Twox128: scaleTs._void,
Twox256: scaleTs._void,
Twox64Concat: scaleTs._void,
Identity: scaleTs._void
});
const hashers$1 = scaleTs.Vector(hashType);
const storageMap = scaleTs.Struct({
hashers: hashers$1,
key: compactNumber,
value: compactNumber
});
const storageItem = {
name: scaleTs.str,
modifier: scaleTs.u8,
type: scaleTs.Enum({
plain: compactNumber,
map: storageMap
}),
fallback: Hex(),
docs
};
const enumRefV14 = scaleTs.Option(compactNumber);
const v14Pallet = {
name: scaleTs.str,
storage: scaleTs.Option(
scaleTs.Struct({
prefix: scaleTs.str,
items: scaleTs.Vector(scaleTs.Struct(storageItem))
})
),
calls: enumRefV14,
events: enumRefV14,
constants: scaleTs.Vector(
scaleTs.Struct({
name: scaleTs.str,
type: compactNumber,
value: Hex(),
docs
})
),
errors: enumRefV14,
index: scaleTs.u8
};
const v15Pallet = {
...v14Pallet,
docs
};
const enumRef = scaleTs.Option(
scaleTs.Struct({ type: compactNumber, deprecationInfo: variantDeprecation })
);
const v16Pallet = {
name: scaleTs.str,
storage: scaleTs.Option(
scaleTs.Struct({
prefix: scaleTs.str,
items: scaleTs.Vector(
scaleTs.Struct({
...storageItem,
deprecationInfo: itemDeprecation
})
)
})
),
calls: enumRef,
events: enumRef,
constants: scaleTs.Vector(
scaleTs.Struct({
name: scaleTs.str,
type: compactNumber,
value: Hex(),
docs,
deprecationInfo: itemDeprecation
})
),
errors: enumRef,
associatedTypes: scaleTs.Vector(
scaleTs.Struct({
name: scaleTs.str,
type: compactNumber,
docs
})
),
viewFns: scaleTs.Vector(viewFunction),
index: scaleTs.u8,
docs,
deprecationInfo: itemDeprecation
};
const empty = new Uint8Array();
const Always = (value) => scaleTs.createCodec(
() => empty,
() => value
);
const extrinsic$2 = scaleTs.Struct({
type: compactNumber,
version: scaleTs.u8,
signedExtensions: scaleTs.Vector(
scaleTs.Struct({
identifier: scaleTs.str,
type: compactNumber,
additionalSigned: compactNumber
})
)
});
const v14 = scaleTs.Struct({
lookup,
pallets: scaleTs.Vector(scaleTs.Struct({ ...v14Pallet, docs: Always([]) })),
extrinsic: extrinsic$2,
type: compactNumber,
apis: Always([])
});
const extrinsic$1 = scaleTs.Struct({
version: scaleTs.u8,
address: compactNumber,
call: compactNumber,
signature: compactNumber,
extra: compactNumber,
signedExtensions: scaleTs.Vector(
scaleTs.Struct({
identifier: scaleTs.str,
type: compactNumber,
additionalSigned: compactNumber
})
)
});
const v15 = scaleTs.Struct({
lookup,
pallets: scaleTs.Vector(scaleTs.Struct(v15Pallet)),
extrinsic: extrinsic$1,
type: compactNumber,
apis: scaleTs.Vector(runtimeApiV15),
outerEnums: scaleTs.Struct({
call: compactNumber,
event: compactNumber,
error: compactNumber
}),
custom: scaleTs.Vector(scaleTs.Tuple(scaleTs.str, scaleTs.Struct({ type: compactNumber, value: Hex() })))
});
const extrinsic = scaleTs.Struct({
version: scaleTs.Vector(scaleTs.u8),
address: compactNumber,
call: compactNumber,
signature: compactNumber,
signedExtensionsByVersion: scaleTs.Vector(scaleTs.Tuple(scaleTs.u8, scaleTs.Vector(compactNumber))),
signedExtensions: scaleTs.Vector(
scaleTs.Struct({
identifier: scaleTs.str,
type: compactNumber,
additionalSigned: compactNumber
})
)
});
const v16 = scaleTs.Struct({
lookup,
pallets: scaleTs.Vector(scaleTs.Struct(v16Pallet)),
extrinsic,
apis: scaleTs.Vector(runtimeApi),
outerEnums: scaleTs.Struct({
call: compactNumber,
event: compactNumber,
error: compactNumber
}),
custom: scaleTs.Vector(scaleTs.Tuple(scaleTs.str, scaleTs.Struct({ type: compactNumber, value: Hex() })))
});
const unsupportedFn = () => {
throw new Error("Unsupported metadata version!");
};
const unsupported = scaleTs.createCodec(
unsupportedFn,
unsupportedFn
);
const metadata = scaleTs.Struct({
magicNumber: scaleTs.u32,
metadata: scaleTs.Enum({
v0: unsupported,
v1: unsupported,
v2: unsupported,
v3: unsupported,
v4: unsupported,
v5: unsupported,
v6: unsupported,
v7: unsupported,
v8: unsupported,
v9: unsupported,
v10: unsupported,
v11: unsupported,
v12: unsupported,
v13: unsupported,
v14,
v15,
v16
})
});
const opaqueBytes = scaleTs.Bytes();
const optionOpaque = scaleTs.Option(opaqueBytes);
const opaqueOpaqueBytes = scaleTs.Tuple(scaleTs.compact, opaqueBytes);
const decAnyMetadata = (input) => {
try {
return metadata.dec(input);
} catch (_) {
}
try {
return metadata.dec(optionOpaque.dec(input));
} catch (_) {
}
try {
return metadata.dec(opaqueBytes.dec(input));
} catch (_) {
}
try {
return metadata.dec(opaqueOpaqueBytes.dec(input)[1]);
} catch (_) {
}
throw null;
};
const unifyMetadata = (metadata) => {
if ("magicNumber" in metadata) metadata = metadata.metadata;
if ("tag" in metadata) {
if (metadata.tag !== "v14" && metadata.tag !== "v15" && metadata.tag !== "v16")
throw new Error("Only metadata 14, 15, and 16 are supported");
metadata = metadata.value;
}
if ("signedExtensionsByVersion" in metadata.extrinsic) {
return { version: 16, ...metadata };
}
if ("custom" in metadata) {
const { lookup: lookup2, extrinsic: extrinsic2, custom, apis, pallets: pallets2, outerEnums } = metadata;
return {
version: 15,
lookup: lookup2,
pallets: pallets2.map((p) => ({
...p,
calls: p.calls != null ? { type: p.calls } : void 0,
events: p.events != null ? { type: p.events } : void 0,
errors: p.errors != null ? { type: p.errors } : void 0,
viewFns: [],
associatedTypes: []
})),
extrinsic: { ...extrinsic2, version: [extrinsic2.version] },
apis,
outerEnums,
custom
};
}
const { lookup, extrinsic, pallets } = metadata;
return {
version: 14,
lookup,
pallets: pallets.map((p) => ({
...p,
calls: p.calls != null ? { type: p.calls } : void 0,
events: p.events != null ? { type: p.events } : void 0,
errors: p.errors != null ? { type: p.errors } : void 0,
viewFns: [],
associatedTypes: []
})),
extrinsic: { ...extrinsic, version: [extrinsic.version] },
apis: []
};
};
const TYPES = {
bare: 0,
0: "bare",
general: 1,
1: "general",
signed: 2,
2: "signed"
};
const extrinsicFormat = scaleTs.enhanceCodec(
scaleTs.u8,
({ version, type }) => version + (TYPES[type] << 6),
(v) => {
const version = v & 63;
const type = v >> 6;
if (version === 4 && (type === TYPES.bare || type === TYPES.signed))
return { version, type: TYPES[type] };
if (version === 5 && (type === TYPES.bare || type === TYPES.general))
return { version, type: TYPES[type] };
throw new Error(`ExtrinsicFormat ${v} not valid`);
}
);
const textEncoder = new TextEncoder();
const hashers = /* @__PURE__ */ new Map([
[Identity, 0],
[Twox64Concat, 8],
[Blake2128Concat, 16],
[Blake2128, -16],
[Blake2256, -32],
[Twox128, -16],
[Twox256, -32]
]);
const Storage = (pallet) => {
const palledEncoded = Twox128(textEncoder.encode(pallet));
return (name, ...encoders) => {
const palletItemEncoded = utils.mergeUint8([
palledEncoded,
Twox128(textEncoder.encode(name))
]);
const palletItemEncodedHex = utils.toHex(palletItemEncoded);
const dec = (key) => {
if (!key.startsWith(palletItemEncodedHex))
throw new Error(`key does not match this storage (${pallet}.${name})`);
if (encoders.length === 0) return [];
const argsKey = utils.fromHex(key.slice(palletItemEncodedHex.length));
const result = new Array(encoders.length);
for (let i = 0, cur = 0; i < encoders.length; i++) {
const [codec, hasher] = encoders[i];
const hBytes = hashers.get(hasher);
if (hBytes == null) throw new Error("Unknown hasher");
if (hBytes < 0) {
const opaqueBytes = hBytes * -1;
result[i] = utils.toHex(argsKey.slice(cur, cur + opaqueBytes));
cur += opaqueBytes;
} else {
cur += hBytes;
result[i] = codec.dec(argsKey.slice(cur));
cur += codec.enc(result[i]).length;
}
}
return result;
};
const fns = encoders.map(
([{ enc: enc2 }, hash]) => (val) => hash(enc2(val))
);
const enc = (...args) => utils.toHex(
utils.mergeUint8([
palletItemEncoded,
...args.map((val, idx) => fns[idx](val))
])
);
return {
enc,
dec
};
};
};
const PREFIX = Binary.fromText("modlpy/utilisuba").asBytes();
const getMultisigAccountId = ({
threshold,
signatories
}) => {
const sortedSignatories = sortMultisigSignatories(signatories);
const payload = utils.mergeUint8([
PREFIX,
scaleTs.compact.enc(sortedSignatories.length),
...sortedSignatories,
scaleTs.u16.enc(threshold)
]);
return Blake2256(payload);
};
const sortMultisigSignatories = (signatories) => signatories.slice().sort((a, b) => {
for (let i = 0; ; i++) {
const overA = i >= a.length;
const overB = i >= b.length;
if (overA && overB) return 0;
else if (overA) return -1;
else if (overB) return 1;
else if (a[i] !== b[i]) return a[i] > b[i] ? 1 : -1;
}
});
const TrieNodeHeaders = {
Leaf: "Leaf",
Branch: "Branch",
BranchWithVal: "BranchWithVal",
LeafWithHash: "LeafWithHash",
BranchWithHash: "BranchWithHash",
Empty: "Empty",
Reserved: "Reserved"
};
const varHex = Hex().dec;
const allHex = Hex(Infinity).dec;
const hex32 = Hex(32).dec;
const byte = scaleTs.u8.dec;
const getHeader = (bytes) => {
const firstByte = byte(bytes);
let bitsLeft = 6;
const typeId = firstByte >> bitsLeft;
const type = typeId ? typeId === 1 ? TrieNodeHeaders.Leaf : typeId === 2 ? TrieNodeHeaders.Branch : TrieNodeHeaders.BranchWithVal : firstByte >> --bitsLeft ? TrieNodeHeaders.LeafWithHash : firstByte >> --bitsLeft ? TrieNodeHeaders.BranchWithHash : firstByte ? TrieNodeHeaders.Reserved : TrieNodeHeaders.Empty;
let nNibles = firstByte & 255 >> 8 - bitsLeft;
if (nNibles === 2 ** bitsLeft - 1) {
let current;
do
nNibles += current = byte(bytes);
while (current === 255);
}
return {
type,
partialKey: Hex(Math.ceil(nNibles / 2)).dec(bytes).slice(nNibles % 2 ? 3 : 2)
};
};
const trieNodeDec = scaleTs.createDecoder((bytes) => {
const header = getHeader(bytes);
const { type } = header;
if (type === "Empty" || type === "Reserved") return header;
if (type === "Leaf" || type === "LeafWithHash")
return {
...header,
value: allHex(bytes)
};
const bitmap = scaleTs.u16.dec(bytes);
const keys = [];
for (let i = 0; i < 16; i++) if (bitmap >> i & 1) keys.push(i.toString(16));
let value = null;
if (type === "BranchWithVal") value = varHex(bytes);
if (type === "BranchWithHash") value = hex32(bytes);
const result = {
...header,
children: Object.fromEntries(keys.map((key) => [key, varHex(bytes)]))
};
if (value !== null) result.value = value;
return result;
});
const TrieNodeWithHash = (hasher) => scaleTs.createDecoder((input) => {
const hash = utils.toHex(hasher(new Uint8Array(input.buffer)));
try {
return {
hash,
...trieNodeDec(input)
};
} catch {
return { type: "Raw", hash, value: "" };
}
});
const validateProofs = (proofs, hasher = Blake2256) => {
const proofsList = proofs.map(TrieNodeWithHash(hasher));
const proofsRecord = Object.fromEntries(proofsList.map((p) => [p.hash, p]));
const hashes = proofsList.map((p) => p.hash);
const roots = new Set(hashes);
const setRawValue = (input) => {
if (input.value) return;
const val = proofs[hashes.indexOf(input.hash)];
input.value = typeof val === "string" ? val : utils.toHex(val);
};
proofsList.forEach((p) => {
if ("children" in p) {
Object.values(p.children).forEach((hash) => {
const child = proofsRecord[hash];
if (child) {
child.parent = p.hash;
roots.delete(hash);
}
});
}
if (p.type === "BranchWithHash" || p.type === "LeafWithHash") {
const childHash = p.value;
const child = proofsRecord[childHash];
if (!child) return;
roots.delete(childHash);
if (child.type !== "Raw") {
Object.keys(child).forEach((k) => delete child[k]);
child.type = "Raw";
child.hash = childHash;
}
child.parent = p.hash;
setRawValue(child);
}
if (p.type === "Raw") setRawValue(p);
});
return roots.size === 1 ? { rootHash: roots.values().next().value, proofs: proofsRecord } : null;
};
Object.defineProperty(exports, "Bytes", {
enumerable: true,
get: function () { return scaleTs.Bytes; }
});
Object.defineProperty(exports, "_void", {
enumerable: true,
get: function () { return scaleTs._void; }
});
Object.defineProperty(exports, "bool", {
enumerable: true,
get: function () { return scaleTs.bool; }
});
Object.defineProperty(exports, "compact", {
enumerable: true,
get: function () { return scaleTs.compact; }
});
Object.defineProperty(exports, "createCodec", {
enumerable: true,
get: function () { return scaleTs.createCodec; }
});
Object.defineProperty(exports, "createDecoder", {
enumerable: true,
get: function () { return scaleTs.createDecoder; }
});
Object.defineProperty(exports, "enhanceCodec", {
enumerable: true,
get: function () { return scaleTs.enhanceCodec; }
});
Object.defineProperty(exports, "enhanceDecoder", {
enumerable: true,
get: function () { return scaleTs.enhanceDecoder; }
});
Object.defineProperty(exports, "enhanceEncoder", {
enumerable: true,
get: function () { return scaleTs.enhanceEncoder; }
});
Object.defineProperty(exports, "i128", {
enumerable: true,
get: function () { return scaleTs.i128; }
});
Object.defineProperty(exports, "i16", {
enumerable: true,
get: function () { return scaleTs.i16; }
});
Object.defineProperty(exports, "i256", {
enumerable: true,
get: function () { return scaleTs.i256; }
});
Object.defineProperty(exports, "i32", {
enumerable: true,
get: function () { return scaleTs.i32; }
});
Object.defineProperty(exports, "i64", {
enumerable: true,
get: function () { return scaleTs.i64; }
});
Object.defineProperty(exports, "i8", {
enumerable: true,
get: function () { return scaleTs.i8; }
});
Object.defineProperty(exports, "str", {
enumerable: true,
get: function () { return scaleTs.str; }
});
Object.defineProperty(exports, "u128", {
enumerable: true,
get: function () { return scaleTs.u128; }
});
Object.defineProperty(exports, "u16", {
enumerable: true,
get: function () { return scaleTs.u16; }
});
Object.defineProperty(exports, "u256", {
enumerable: true,
get: function () { return scaleTs.u256; }
});
Object.defineProperty(exports, "u32", {
enumerable: true,
get: function () { return scaleTs.u32; }
});
Object.defineProperty(exports, "u64", {
enumerable: true,
get: function () { return scaleTs.u64; }
});
Object.defineProperty(exports, "u8", {
enumerable: true,
get: function () { return scaleTs.u8; }
});
exports.AccountId = AccountId;
exports.Bin = Bin;
exports.Binary = Binary;
exports.BitSeq = BitSeq;
exports.Blake2128 = Blake2128;
exports.Blake2128Concat = Blake2128Concat;
exports.Blake2256 = Blake2256;
exports.Blake3256 = Blake3256;
exports.Blake3256Concat = Blake3256Concat;
exports.Enum = Enum;
exports.FixedSizeBinary = FixedSizeBinary;
exports.Hex = Hex;
exports.Identity = Identity;
exports.Keccak256 = Keccak256;
exports.Option = Option;
exports.Result = Result;
exports.ScaleEnum = ScaleEnum;
exports.Self = Self;
exports.Storage = Storage;
exports.Struct = Struct;
exports.TrieNodeHeaders = TrieNodeHeaders;
exports.TrieNodeWithHash = TrieNodeWithHash;
exports.Tuple = Tuple;
exports.Twox128 = Twox128;
exports.Twox256 = Twox256;
exports.Twox64Concat = Twox64Concat;
exports.Variant = Variant;
exports.Vector = Vector;
exports._Enum = _Enum;
exports.bitSequence = bitSequence$1;
exports.blockHeader = blockHeader;
exports.char = char;
exports.compactBn = compactBn;
exports.compactNumber = compactNumber;
exports.decAnyMetadata = decAnyMetadata;
exports.ethAccount = ethAccount;
exports.extrinsicFormat = extrinsicFormat;
exports.fixedStr = fixedStr;
exports.fromBufferToBase58 = fromBufferToBase58;
exports.getMultisigAccountId = getMultisigAccountId;
exports.getSs58AddressInfo = getSs58AddressInfo;
exports.h64 = h64;
exports.metadata = metadata;
exports.selfDecoder = selfDecoder;
exports.selfEncoder = selfEncoder;
exports.sortMultisigSignatories = sortMultisigSignatories;
exports.trieNodeDec = trieNodeDec;
exports.unifyMetadata = unifyMetadata;
exports.v14 = v14;
exports.v14Lookup = lookup;
exports.v15 = v15;
exports.v16 = v16;
exports.validateProofs = validateProofs;
//# sourceMappingURL=index.js.map