feat: add PAPI rebrand packages

- @pezkuwi/papi-utils (rebrand of @polkadot-api/utils)
- @pezkuwi/bizinikiwi-bindings (rebrand of @polkadot-api/substrate-bindings)
- @pezkuwi/metadata-builders (rebrand of @polkadot-api/metadata-builders)
- @pezkuwi/merkleize-metadata (rebrand of @polkadot-api/merkleize-metadata)

All @polkadot-api references replaced with @pezkuwi equivalents.
This commit is contained in:
2026-01-22 15:40:12 +03:00
parent 6e91756e5c
commit 31467f90d4
150 changed files with 22742 additions and 0 deletions
+21
View File
@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2023 Josep M Sobrepere
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+1
View File
@@ -0,0 +1 @@
# @polkadot-api/substrate-bindings
@@ -0,0 +1,45 @@
import { enhanceCodec, Bytes, _void } from 'scale-ts';
import '../utils/ss58-util.mjs';
import './scale/Binary.mjs';
import './scale/bitSequence.mjs';
import './scale/char.mjs';
import { compactNumber } from './scale/compact.mjs';
import { Hex } from './scale/Hex.mjs';
import './scale/fixed-str.mjs';
import { Variant } from './scale/Variant.mjs';
import './scale/ethAccount.mjs';
import { Struct, Vector } from './scale/shaped.mjs';
import './scale/BitSeq.mjs';
const textEncoder$1 = new TextEncoder();
const textDecoder = new TextDecoder();
const fourChars = enhanceCodec(
Bytes(4),
textEncoder$1.encode.bind(textEncoder$1),
textDecoder.decode.bind(textDecoder)
);
const diggestVal = Struct({
engine: fourChars,
payload: Hex()
});
const diggest = Variant(
{
other: Bytes(),
consensus: diggestVal,
seal: diggestVal,
preRuntime: diggestVal,
runtimeUpdated: _void
},
[0, 4, 5, 6, 8]
);
const hex32$1 = Hex(32);
const blockHeader = Struct({
parentHash: hex32$1,
number: compactNumber,
stateRoot: hex32$1,
extrinsicRoot: hex32$1,
digests: Vector(diggest)
});
export { blockHeader };
//# sourceMappingURL=blockHeader.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"blockHeader.mjs","sources":["../../../src/codecs/blockHeader.ts"],"sourcesContent":["import {\n Bytes,\n CodecType,\n Struct,\n Vector,\n _void,\n enhanceCodec,\n Hex,\n Variant,\n compactNumber,\n} from \"./scale\"\n\nconst textEncoder = new TextEncoder()\nconst textDecoder = new TextDecoder()\n\nconst fourChars = enhanceCodec(\n Bytes(4),\n textEncoder.encode.bind(textEncoder),\n textDecoder.decode.bind(textDecoder),\n)\n\nconst diggestVal = Struct({\n engine: fourChars,\n payload: Hex(),\n})\n\nconst diggest = Variant(\n {\n other: Bytes(),\n consensus: diggestVal,\n seal: diggestVal,\n preRuntime: diggestVal,\n runtimeUpdated: _void,\n },\n [0, 4, 5, 6, 8],\n)\n\nconst hex32 = Hex(32)\nexport const blockHeader = Struct({\n parentHash: hex32,\n number: compactNumber,\n stateRoot: hex32,\n extrinsicRoot: hex32,\n digests: Vector(diggest),\n})\n\nexport type BlockHeader = CodecType<typeof blockHeader>\n"],"names":["textEncoder","hex32"],"mappings":";;;;;;;;;;;;;AAYA,MAAMA,aAAA,GAAc,IAAI,WAAA,EAAY;AACpC,MAAM,WAAA,GAAc,IAAI,WAAA,EAAY;AAEpC,MAAM,SAAA,GAAY,YAAA;AAAA,EAChB,MAAM,CAAC,CAAA;AAAA,EACPA,aAAA,CAAY,MAAA,CAAO,IAAA,CAAKA,aAAW,CAAA;AAAA,EACnC,WAAA,CAAY,MAAA,CAAO,IAAA,CAAK,WAAW;AACrC,CAAA;AAEA,MAAM,aAAa,MAAA,CAAO;AAAA,EACxB,MAAA,EAAQ,SAAA;AAAA,EACR,SAAS,GAAA;AACX,CAAC,CAAA;AAED,MAAM,OAAA,GAAU,OAAA;AAAA,EACd;AAAA,IACE,OAAO,KAAA,EAAM;AAAA,IACb,SAAA,EAAW,UAAA;AAAA,IACX,IAAA,EAAM,UAAA;AAAA,IACN,UAAA,EAAY,UAAA;AAAA,IACZ,cAAA,EAAgB;AAAA,GAClB;AAAA,EACA,CAAC,CAAA,EAAG,CAAA,EAAG,CAAA,EAAG,GAAG,CAAC;AAChB,CAAA;AAEA,MAAMC,OAAA,GAAQ,IAAI,EAAE,CAAA;AACb,MAAM,cAAc,MAAA,CAAO;AAAA,EAChC,UAAA,EAAYA,OAAA;AAAA,EACZ,MAAA,EAAQ,aAAA;AAAA,EACR,SAAA,EAAWA,OAAA;AAAA,EACX,aAAA,EAAeA,OAAA;AAAA,EACf,OAAA,EAAS,OAAO,OAAO;AACzB,CAAC;;;;"}
@@ -0,0 +1,28 @@
import { Enum, Struct, _void, Option, str, Vector, u8 } from 'scale-ts';
const itemDeprecation = Enum({
NotDeprecated: _void,
DeprecatedWithoutNote: _void,
Deprecated: Struct({
note: str,
since: Option(str)
})
});
const variantDeprecation = Vector(
Struct({
index: u8,
deprecation: Enum(
{
DeprecatedWithoutNote: _void,
Deprecated: Struct({
note: str,
since: Option(str)
})
},
[1, 2]
)
})
);
export { itemDeprecation, variantDeprecation };
//# sourceMappingURL=deprecation.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"deprecation.mjs","sources":["../../../../src/codecs/metadata/deprecation.ts"],"sourcesContent":["import { _void, Enum, Option, str, Struct, u8, Vector } from \"scale-ts\"\n\nexport const itemDeprecation = Enum({\n NotDeprecated: _void,\n DeprecatedWithoutNote: _void,\n Deprecated: Struct({\n note: str,\n since: Option(str),\n }),\n})\n\nexport const variantDeprecation = Vector(\n Struct({\n index: u8,\n deprecation: Enum(\n {\n DeprecatedWithoutNote: _void,\n Deprecated: Struct({\n note: str,\n since: Option(str),\n }),\n },\n [1, 2],\n ),\n }),\n)\n"],"names":[],"mappings":";;AAEO,MAAM,kBAAkB,IAAA,CAAK;AAAA,EAClC,aAAA,EAAe,KAAA;AAAA,EACf,qBAAA,EAAuB,KAAA;AAAA,EACvB,YAAY,MAAA,CAAO;AAAA,IACjB,IAAA,EAAM,GAAA;AAAA,IACN,KAAA,EAAO,OAAO,GAAG;AAAA,GAClB;AACH,CAAC;AAEM,MAAM,kBAAA,GAAqB,MAAA;AAAA,EAChC,MAAA,CAAO;AAAA,IACL,KAAA,EAAO,EAAA;AAAA,IACP,WAAA,EAAa,IAAA;AAAA,MACX;AAAA,QACE,qBAAA,EAAuB,KAAA;AAAA,QACvB,YAAY,MAAA,CAAO;AAAA,UACjB,IAAA,EAAM,GAAA;AAAA,UACN,KAAA,EAAO,OAAO,GAAG;AAAA,SAClB;AAAA,OACH;AAAA,MACA,CAAC,GAAG,CAAC;AAAA;AACP,GACD;AACH;;;;"}
@@ -0,0 +1,6 @@
import { Vector, str } from 'scale-ts';
const docs = Vector(str);
export { docs };
//# sourceMappingURL=docs.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"docs.mjs","sources":["../../../../src/codecs/metadata/docs.ts"],"sourcesContent":["import { Vector, str } from \"scale-ts\"\n\nexport const docs = Vector(str)\n"],"names":[],"mappings":";;AAEO,MAAM,IAAA,GAAO,OAAO,GAAG;;;;"}
@@ -0,0 +1,72 @@
import { compactNumber } from '../scale/compact.mjs';
import { Option, str, Enum, _void, Vector, Struct, u32, u8 } from 'scale-ts';
import { docs } from './docs.mjs';
const oStr = Option(str);
const primitive = Enum({
bool: _void,
char: _void,
str: _void,
u8: _void,
u16: _void,
u32: _void,
u64: _void,
u128: _void,
u256: _void,
i8: _void,
i16: _void,
i32: _void,
i64: _void,
i128: _void,
i256: _void
});
const fields = Vector(
Struct({
name: oStr,
type: compactNumber,
typeName: oStr,
docs
})
);
const arr = Struct({
len: u32,
type: compactNumber
});
const bitSequence = Struct({
bitStoreType: compactNumber,
bitOrderType: compactNumber
});
const variant = Vector(
Struct({
name: str,
fields,
index: u8,
docs
})
);
const def = Enum({
composite: fields,
variant,
sequence: compactNumber,
array: arr,
tuple: Vector(compactNumber),
primitive,
compact: compactNumber,
bitSequence
});
const param = Struct({
name: str,
type: Option(compactNumber)
});
const params = Vector(param);
const entry = Struct({
id: compactNumber,
path: docs,
params,
def,
docs
});
const lookup = Vector(entry);
export { lookup };
//# sourceMappingURL=lookup.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"lookup.mjs","sources":["../../../../src/codecs/metadata/lookup.ts"],"sourcesContent":["import { compactNumber } from \"../scale/compact\"\nimport {\n CodecType,\n Enum,\n Option,\n Struct,\n Vector,\n _void,\n str,\n u32,\n u8,\n} from \"scale-ts\"\nimport { docs } from \"./docs\"\n\nconst oStr = Option(str)\n\nconst primitive = Enum({\n bool: _void,\n char: _void,\n str: _void,\n u8: _void,\n u16: _void,\n u32: _void,\n u64: _void,\n u128: _void,\n u256: _void,\n i8: _void,\n i16: _void,\n i32: _void,\n i64: _void,\n i128: _void,\n i256: _void,\n})\n\nconst fields = Vector(\n Struct({\n name: oStr,\n type: compactNumber,\n typeName: oStr,\n docs,\n }),\n)\n\nconst arr = Struct({\n len: u32,\n type: compactNumber,\n})\n\nconst bitSequence = Struct({\n bitStoreType: compactNumber,\n bitOrderType: compactNumber,\n})\n\nconst variant = Vector(\n Struct({\n name: str,\n fields,\n index: u8,\n docs,\n }),\n)\n\nconst def = Enum({\n composite: fields,\n variant,\n sequence: compactNumber,\n array: arr,\n tuple: Vector(compactNumber),\n primitive,\n compact: compactNumber,\n bitSequence,\n})\n\nconst param = Struct({\n name: str,\n type: Option(compactNumber),\n})\nconst params = Vector(param)\n\nconst entry = Struct({\n id: compactNumber,\n path: docs,\n params,\n def,\n docs,\n})\n\nexport const lookup = Vector(entry)\nexport type V14Lookup = CodecType<typeof lookup>\n"],"names":[],"mappings":";;;;AAcA,MAAM,IAAA,GAAO,OAAO,GAAG,CAAA;AAEvB,MAAM,YAAY,IAAA,CAAK;AAAA,EACrB,IAAA,EAAM,KAAA;AAAA,EACN,IAAA,EAAM,KAAA;AAAA,EACN,GAAA,EAAK,KAAA;AAAA,EACL,EAAA,EAAI,KAAA;AAAA,EACJ,GAAA,EAAK,KAAA;AAAA,EACL,GAAA,EAAK,KAAA;AAAA,EACL,GAAA,EAAK,KAAA;AAAA,EACL,IAAA,EAAM,KAAA;AAAA,EACN,IAAA,EAAM,KAAA;AAAA,EACN,EAAA,EAAI,KAAA;AAAA,EACJ,GAAA,EAAK,KAAA;AAAA,EACL,GAAA,EAAK,KAAA;AAAA,EACL,GAAA,EAAK,KAAA;AAAA,EACL,IAAA,EAAM,KAAA;AAAA,EACN,IAAA,EAAM;AACR,CAAC,CAAA;AAED,MAAM,MAAA,GAAS,MAAA;AAAA,EACb,MAAA,CAAO;AAAA,IACL,IAAA,EAAM,IAAA;AAAA,IACN,IAAA,EAAM,aAAA;AAAA,IACN,QAAA,EAAU,IAAA;AAAA,IACV;AAAA,GACD;AACH,CAAA;AAEA,MAAM,MAAM,MAAA,CAAO;AAAA,EACjB,GAAA,EAAK,GAAA;AAAA,EACL,IAAA,EAAM;AACR,CAAC,CAAA;AAED,MAAM,cAAc,MAAA,CAAO;AAAA,EACzB,YAAA,EAAc,aAAA;AAAA,EACd,YAAA,EAAc;AAChB,CAAC,CAAA;AAED,MAAM,OAAA,GAAU,MAAA;AAAA,EACd,MAAA,CAAO;AAAA,IACL,IAAA,EAAM,GAAA;AAAA,IACN,MAAA;AAAA,IACA,KAAA,EAAO,EAAA;AAAA,IACP;AAAA,GACD;AACH,CAAA;AAEA,MAAM,MAAM,IAAA,CAAK;AAAA,EACf,SAAA,EAAW,MAAA;AAAA,EACX,OAAA;AAAA,EACA,QAAA,EAAU,aAAA;AAAA,EACV,KAAA,EAAO,GAAA;AAAA,EACP,KAAA,EAAO,OAAO,aAAa,CAAA;AAAA,EAC3B,SAAA;AAAA,EACA,OAAA,EAAS,aAAA;AAAA,EACT;AACF,CAAC,CAAA;AAED,MAAM,QAAQ,MAAA,CAAO;AAAA,EACnB,IAAA,EAAM,GAAA;AAAA,EACN,IAAA,EAAM,OAAO,aAAa;AAC5B,CAAC,CAAA;AACD,MAAM,MAAA,GAAS,OAAO,KAAK,CAAA;AAE3B,MAAM,QAAQ,MAAA,CAAO;AAAA,EACnB,EAAA,EAAI,aAAA;AAAA,EACJ,IAAA,EAAM,IAAA;AAAA,EACN,MAAA;AAAA,EACA,GAAA;AAAA,EACA;AACF,CAAC,CAAA;AAEM,MAAM,MAAA,GAAS,OAAO,KAAK;;;;"}
@@ -0,0 +1,59 @@
import { createCodec, Struct, Enum, u32, Bytes, Option, Tuple, compact } from 'scale-ts';
import { v14 } from './v14.mjs';
import { v15 } from './v15.mjs';
import { v16 } from './v16.mjs';
const unsupportedFn = () => {
throw new Error("Unsupported metadata version!");
};
const unsupported = createCodec(
unsupportedFn,
unsupportedFn
);
const metadata = Struct({
magicNumber: u32,
metadata: Enum({
v0: unsupported,
v1: unsupported,
v2: unsupported,
v3: unsupported,
v4: unsupported,
v5: unsupported,
v6: unsupported,
v7: unsupported,
v8: unsupported,
v9: unsupported,
v10: unsupported,
v11: unsupported,
v12: unsupported,
v13: unsupported,
v14,
v15,
v16
})
});
const opaqueBytes = Bytes();
const optionOpaque = Option(opaqueBytes);
const opaqueOpaqueBytes = Tuple(compact, opaqueBytes);
const decAnyMetadata = (input) => {
try {
return metadata.dec(input);
} catch (_) {
}
try {
return metadata.dec(optionOpaque.dec(input));
} catch (_) {
}
try {
return metadata.dec(opaqueBytes.dec(input));
} catch (_) {
}
try {
return metadata.dec(opaqueOpaqueBytes.dec(input)[1]);
} catch (_) {
}
throw null;
};
export { decAnyMetadata, metadata };
//# sourceMappingURL=metadata.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"metadata.mjs","sources":["../../../../src/codecs/metadata/metadata.ts"],"sourcesContent":["import {\n Bytes,\n type Codec,\n type CodecType,\n Enum,\n Option,\n Struct,\n Tuple,\n compact,\n createCodec,\n u32,\n} from \"scale-ts\"\nimport type { HexString } from \"../scale\"\nimport { v14 } from \"./v14\"\nimport { v15 } from \"./v15\"\nimport { v16 } from \"./v16\"\n\nconst unsupportedFn = () => {\n throw new Error(\"Unsupported metadata version!\")\n}\n\nconst unsupported = createCodec(\n unsupportedFn,\n unsupportedFn,\n) as unknown as Codec<unknown>\n\nexport const metadata = Struct({\n magicNumber: u32,\n metadata: Enum({\n v0: unsupported,\n v1: unsupported,\n v2: unsupported,\n v3: unsupported,\n v4: unsupported,\n v5: unsupported,\n v6: unsupported,\n v7: unsupported,\n v8: unsupported,\n v9: unsupported,\n v10: unsupported,\n v11: unsupported,\n v12: unsupported,\n v13: unsupported,\n v14,\n v15,\n v16,\n }),\n})\nexport type Metadata = CodecType<typeof metadata>\n\nconst opaqueBytes = Bytes()\nconst optionOpaque = Option(opaqueBytes)\nconst opaqueOpaqueBytes = Tuple(compact, opaqueBytes)\n\nexport const decAnyMetadata = (\n input: Uint8Array | HexString,\n): CodecType<typeof metadata> => {\n try {\n return metadata.dec(input)\n } catch (_) {}\n\n // comes from metadata.metadata_at_version\n try {\n return metadata.dec(optionOpaque.dec(input)!)\n } catch (_) {}\n\n // comes from state.getMetadata\n try {\n return metadata.dec(opaqueBytes.dec(input))\n } catch (_) {}\n\n // comes from metadata.metadata\n try {\n return metadata.dec(opaqueOpaqueBytes.dec(input)[1])\n } catch (_) {}\n\n throw null\n}\n"],"names":[],"mappings":";;;;;AAiBA,MAAM,gBAAgB,MAAM;AAC1B,EAAA,MAAM,IAAI,MAAM,+BAA+B,CAAA;AACjD,CAAA;AAEA,MAAM,WAAA,GAAc,WAAA;AAAA,EAClB,aAAA;AAAA,EACA;AACF,CAAA;AAEO,MAAM,WAAW,MAAA,CAAO;AAAA,EAC7B,WAAA,EAAa,GAAA;AAAA,EACb,UAAU,IAAA,CAAK;AAAA,IACb,EAAA,EAAI,WAAA;AAAA,IACJ,EAAA,EAAI,WAAA;AAAA,IACJ,EAAA,EAAI,WAAA;AAAA,IACJ,EAAA,EAAI,WAAA;AAAA,IACJ,EAAA,EAAI,WAAA;AAAA,IACJ,EAAA,EAAI,WAAA;AAAA,IACJ,EAAA,EAAI,WAAA;AAAA,IACJ,EAAA,EAAI,WAAA;AAAA,IACJ,EAAA,EAAI,WAAA;AAAA,IACJ,EAAA,EAAI,WAAA;AAAA,IACJ,GAAA,EAAK,WAAA;AAAA,IACL,GAAA,EAAK,WAAA;AAAA,IACL,GAAA,EAAK,WAAA;AAAA,IACL,GAAA,EAAK,WAAA;AAAA,IACL,GAAA;AAAA,IACA,GAAA;AAAA,IACA;AAAA,GACD;AACH,CAAC;AAGD,MAAM,cAAc,KAAA,EAAM;AAC1B,MAAM,YAAA,GAAe,OAAO,WAAW,CAAA;AACvC,MAAM,iBAAA,GAAoB,KAAA,CAAM,OAAA,EAAS,WAAW,CAAA;AAE7C,MAAM,cAAA,GAAiB,CAC5B,KAAA,KAC+B;AAC/B,EAAA,IAAI;AACF,IAAA,OAAO,QAAA,CAAS,IAAI,KAAK,CAAA;AAAA,EAC3B,SAAS,CAAA,EAAG;AAAA,EAAC;AAGb,EAAA,IAAI;AACF,IAAA,OAAO,QAAA,CAAS,GAAA,CAAI,YAAA,CAAa,GAAA,CAAI,KAAK,CAAE,CAAA;AAAA,EAC9C,SAAS,CAAA,EAAG;AAAA,EAAC;AAGb,EAAA,IAAI;AACF,IAAA,OAAO,QAAA,CAAS,GAAA,CAAI,WAAA,CAAY,GAAA,CAAI,KAAK,CAAC,CAAA;AAAA,EAC5C,SAAS,CAAA,EAAG;AAAA,EAAC;AAGb,EAAA,IAAI;AACF,IAAA,OAAO,SAAS,GAAA,CAAI,iBAAA,CAAkB,IAAI,KAAK,CAAA,CAAE,CAAC,CAAC,CAAA;AAAA,EACrD,SAAS,CAAA,EAAG;AAAA,EAAC;AAEb,EAAA,MAAM,IAAA;AACR;;;;"}
@@ -0,0 +1,110 @@
import { Enum, _void, Vector, Struct, Option, str, u8 } from 'scale-ts';
import '../../utils/ss58-util.mjs';
import '../scale/Binary.mjs';
import '../scale/bitSequence.mjs';
import '../scale/char.mjs';
import { compactNumber } from '../scale/compact.mjs';
import { Hex } from '../scale/Hex.mjs';
import '../scale/fixed-str.mjs';
import '../scale/Variant.mjs';
import '../scale/ethAccount.mjs';
import '../scale/shaped.mjs';
import '../scale/BitSeq.mjs';
import { docs } from './docs.mjs';
import { variantDeprecation, itemDeprecation } from './deprecation.mjs';
import { viewFunction } from './runtime-api.mjs';
const hashType = Enum({
Blake2128: _void,
Blake2256: _void,
Blake2128Concat: _void,
Twox128: _void,
Twox256: _void,
Twox64Concat: _void,
Identity: _void
});
const hashers$1 = Vector(hashType);
const storageMap = Struct({
hashers: hashers$1,
key: compactNumber,
value: compactNumber
});
const storageItem = {
name: str,
modifier: u8,
type: Enum({
plain: compactNumber,
map: storageMap
}),
fallback: Hex(),
docs
};
const enumRefV14 = Option(compactNumber);
const v14Pallet = {
name: str,
storage: Option(
Struct({
prefix: str,
items: Vector(Struct(storageItem))
})
),
calls: enumRefV14,
events: enumRefV14,
constants: Vector(
Struct({
name: str,
type: compactNumber,
value: Hex(),
docs
})
),
errors: enumRefV14,
index: u8
};
const v15Pallet = {
...v14Pallet,
docs
};
const enumRef = Option(
Struct({ type: compactNumber, deprecationInfo: variantDeprecation })
);
const v16Pallet = {
name: str,
storage: Option(
Struct({
prefix: str,
items: Vector(
Struct({
...storageItem,
deprecationInfo: itemDeprecation
})
)
})
),
calls: enumRef,
events: enumRef,
constants: Vector(
Struct({
name: str,
type: compactNumber,
value: Hex(),
docs,
deprecationInfo: itemDeprecation
})
),
errors: enumRef,
associatedTypes: Vector(
Struct({
name: str,
type: compactNumber,
docs
})
),
viewFns: Vector(viewFunction),
index: u8,
docs,
deprecationInfo: itemDeprecation
};
export { storageMap, v14Pallet, v15Pallet, v16Pallet };
//# sourceMappingURL=pallets.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"pallets.mjs","sources":["../../../../src/codecs/metadata/pallets.ts"],"sourcesContent":["import { Hex, compactNumber } from \"@/codecs/scale\"\nimport { Struct, Option, Vector, u8, str, Enum, _void } from \"scale-ts\"\nimport { docs } from \"./docs\"\nimport { itemDeprecation, variantDeprecation } from \"./deprecation\"\nimport { viewFunction } from \"./runtime-api\"\n\nconst hashType = Enum({\n Blake2128: _void,\n Blake2256: _void,\n Blake2128Concat: _void,\n Twox128: _void,\n Twox256: _void,\n Twox64Concat: _void,\n Identity: _void,\n})\n\nconst hashers = Vector(hashType)\n\nexport const storageMap = Struct({\n hashers,\n key: compactNumber,\n value: compactNumber,\n})\n\nconst storageItem = {\n name: str,\n modifier: u8,\n type: Enum({\n plain: compactNumber,\n map: storageMap,\n }),\n fallback: Hex(),\n docs,\n}\n\nconst enumRefV14 = Option(compactNumber)\n\nexport const v14Pallet = {\n name: str,\n storage: Option(\n Struct({\n prefix: str,\n items: Vector(Struct(storageItem)),\n }),\n ),\n calls: enumRefV14,\n events: enumRefV14,\n constants: Vector(\n Struct({\n name: str,\n type: compactNumber,\n value: Hex(),\n docs,\n }),\n ),\n errors: enumRefV14,\n index: u8,\n}\n\nexport const v15Pallet = {\n ...v14Pallet,\n docs,\n}\n\nconst enumRef = Option(\n Struct({ type: compactNumber, deprecationInfo: variantDeprecation }),\n)\n\nexport const v16Pallet = {\n name: str,\n storage: Option(\n Struct({\n prefix: str,\n items: Vector(\n Struct({\n ...storageItem,\n deprecationInfo: itemDeprecation,\n }),\n ),\n }),\n ),\n calls: enumRef,\n events: enumRef,\n constants: Vector(\n Struct({\n name: str,\n type: compactNumber,\n value: Hex(),\n docs,\n deprecationInfo: itemDeprecation,\n }),\n ),\n errors: enumRef,\n associatedTypes: Vector(\n Struct({\n name: str,\n type: compactNumber,\n docs,\n }),\n ),\n viewFns: Vector(viewFunction),\n index: u8,\n docs,\n deprecationInfo: itemDeprecation,\n}\n"],"names":["hashers"],"mappings":";;;;;;;;;;;;;;;;AAMA,MAAM,WAAW,IAAA,CAAK;AAAA,EACpB,SAAA,EAAW,KAAA;AAAA,EACX,SAAA,EAAW,KAAA;AAAA,EACX,eAAA,EAAiB,KAAA;AAAA,EACjB,OAAA,EAAS,KAAA;AAAA,EACT,OAAA,EAAS,KAAA;AAAA,EACT,YAAA,EAAc,KAAA;AAAA,EACd,QAAA,EAAU;AACZ,CAAC,CAAA;AAED,MAAMA,SAAA,GAAU,OAAO,QAAQ,CAAA;AAExB,MAAM,aAAa,MAAA,CAAO;AAAA,WAC/BA,SAAA;AAAA,EACA,GAAA,EAAK,aAAA;AAAA,EACL,KAAA,EAAO;AACT,CAAC;AAED,MAAM,WAAA,GAAc;AAAA,EAClB,IAAA,EAAM,GAAA;AAAA,EACN,QAAA,EAAU,EAAA;AAAA,EACV,MAAM,IAAA,CAAK;AAAA,IACT,KAAA,EAAO,aAAA;AAAA,IACP,GAAA,EAAK;AAAA,GACN,CAAA;AAAA,EACD,UAAU,GAAA,EAAI;AAAA,EACd;AACF,CAAA;AAEA,MAAM,UAAA,GAAa,OAAO,aAAa,CAAA;AAEhC,MAAM,SAAA,GAAY;AAAA,EACvB,IAAA,EAAM,GAAA;AAAA,EACN,OAAA,EAAS,MAAA;AAAA,IACP,MAAA,CAAO;AAAA,MACL,MAAA,EAAQ,GAAA;AAAA,MACR,KAAA,EAAO,MAAA,CAAO,MAAA,CAAO,WAAW,CAAC;AAAA,KAClC;AAAA,GACH;AAAA,EACA,KAAA,EAAO,UAAA;AAAA,EACP,MAAA,EAAQ,UAAA;AAAA,EACR,SAAA,EAAW,MAAA;AAAA,IACT,MAAA,CAAO;AAAA,MACL,IAAA,EAAM,GAAA;AAAA,MACN,IAAA,EAAM,aAAA;AAAA,MACN,OAAO,GAAA,EAAI;AAAA,MACX;AAAA,KACD;AAAA,GACH;AAAA,EACA,MAAA,EAAQ,UAAA;AAAA,EACR,KAAA,EAAO;AACT;AAEO,MAAM,SAAA,GAAY;AAAA,EACvB,GAAG,SAAA;AAAA,EACH;AACF;AAEA,MAAM,OAAA,GAAU,MAAA;AAAA,EACd,OAAO,EAAE,IAAA,EAAM,aAAA,EAAe,eAAA,EAAiB,oBAAoB;AACrE,CAAA;AAEO,MAAM,SAAA,GAAY;AAAA,EACvB,IAAA,EAAM,GAAA;AAAA,EACN,OAAA,EAAS,MAAA;AAAA,IACP,MAAA,CAAO;AAAA,MACL,MAAA,EAAQ,GAAA;AAAA,MACR,KAAA,EAAO,MAAA;AAAA,QACL,MAAA,CAAO;AAAA,UACL,GAAG,WAAA;AAAA,UACH,eAAA,EAAiB;AAAA,SAClB;AAAA;AACH,KACD;AAAA,GACH;AAAA,EACA,KAAA,EAAO,OAAA;AAAA,EACP,MAAA,EAAQ,OAAA;AAAA,EACR,SAAA,EAAW,MAAA;AAAA,IACT,MAAA,CAAO;AAAA,MACL,IAAA,EAAM,GAAA;AAAA,MACN,IAAA,EAAM,aAAA;AAAA,MACN,OAAO,GAAA,EAAI;AAAA,MACX,IAAA;AAAA,MACA,eAAA,EAAiB;AAAA,KAClB;AAAA,GACH;AAAA,EACA,MAAA,EAAQ,OAAA;AAAA,EACR,eAAA,EAAiB,MAAA;AAAA,IACf,MAAA,CAAO;AAAA,MACL,IAAA,EAAM,GAAA;AAAA,MACN,IAAA,EAAM,aAAA;AAAA,MACN;AAAA,KACD;AAAA,GACH;AAAA,EACA,OAAA,EAAS,OAAO,YAAY,CAAA;AAAA,EAC5B,KAAA,EAAO,EAAA;AAAA,EACP,IAAA;AAAA,EACA,eAAA,EAAiB;AACnB;;;;"}
@@ -0,0 +1,48 @@
import { Vector, Struct, str } from 'scale-ts';
import { docs } from './docs.mjs';
import '../../utils/ss58-util.mjs';
import '../scale/Binary.mjs';
import '../scale/bitSequence.mjs';
import '../scale/char.mjs';
import { compactNumber } from '../scale/compact.mjs';
import { Hex } from '../scale/Hex.mjs';
import '../scale/fixed-str.mjs';
import '../scale/Variant.mjs';
import '../scale/ethAccount.mjs';
import '../scale/shaped.mjs';
import '../scale/BitSeq.mjs';
import { itemDeprecation } from './deprecation.mjs';
const runtimeApiMethod = {
name: str,
inputs: Vector(
Struct({
name: str,
type: compactNumber
})
),
output: compactNumber,
docs
};
const runtimeApiV15 = Struct({
name: str,
methods: Vector(Struct(runtimeApiMethod)),
docs
});
const runtimeApi = Struct({
name: str,
methods: Vector(
Struct({ ...runtimeApiMethod, deprecationInfo: itemDeprecation })
),
docs,
version: compactNumber,
deprecationInfo: itemDeprecation
});
const viewFunction = Struct({
id: Hex(32),
...runtimeApiMethod,
deprecationInfo: itemDeprecation
});
export { runtimeApi, runtimeApiMethod, runtimeApiV15, viewFunction };
//# sourceMappingURL=runtime-api.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"runtime-api.mjs","sources":["../../../../src/codecs/metadata/runtime-api.ts"],"sourcesContent":["import { Struct, Vector, str } from \"scale-ts\"\nimport { docs } from \"./docs\"\nimport { compactNumber, Hex, compactNumber as ty } from \"../scale\"\nimport { itemDeprecation } from \"./deprecation\"\n\nexport const runtimeApiMethod = {\n name: str,\n inputs: Vector(\n Struct({\n name: str,\n type: ty,\n }),\n ),\n output: ty,\n docs,\n}\n\nexport const runtimeApiV15 = Struct({\n name: str,\n methods: Vector(Struct(runtimeApiMethod)),\n docs,\n})\n\nexport const runtimeApi = Struct({\n name: str,\n methods: Vector(\n Struct({ ...runtimeApiMethod, deprecationInfo: itemDeprecation }),\n ),\n docs,\n version: compactNumber,\n deprecationInfo: itemDeprecation,\n})\n\nexport const viewFunction = Struct({\n id: Hex(32),\n ...runtimeApiMethod,\n deprecationInfo: itemDeprecation,\n})\n"],"names":["ty"],"mappings":";;;;;;;;;;;;;;;AAKO,MAAM,gBAAA,GAAmB;AAAA,EAC9B,IAAA,EAAM,GAAA;AAAA,EACN,MAAA,EAAQ,MAAA;AAAA,IACN,MAAA,CAAO;AAAA,MACL,IAAA,EAAM,GAAA;AAAA,MACN,IAAA,EAAMA;AAAA,KACP;AAAA,GACH;AAAA,EACA,MAAA,EAAQA,aAAA;AAAA,EACR;AACF;AAEO,MAAM,gBAAgB,MAAA,CAAO;AAAA,EAClC,IAAA,EAAM,GAAA;AAAA,EACN,OAAA,EAAS,MAAA,CAAO,MAAA,CAAO,gBAAgB,CAAC,CAAA;AAAA,EACxC;AACF,CAAC;AAEM,MAAM,aAAa,MAAA,CAAO;AAAA,EAC/B,IAAA,EAAM,GAAA;AAAA,EACN,OAAA,EAAS,MAAA;AAAA,IACP,OAAO,EAAE,GAAG,gBAAA,EAAkB,eAAA,EAAiB,iBAAiB;AAAA,GAClE;AAAA,EACA,IAAA;AAAA,EACA,OAAA,EAAS,aAAA;AAAA,EACT,eAAA,EAAiB;AACnB,CAAC;AAEM,MAAM,eAAe,MAAA,CAAO;AAAA,EACjC,EAAA,EAAI,IAAI,EAAE,CAAA;AAAA,EACV,GAAG,gBAAA;AAAA,EACH,eAAA,EAAiB;AACnB,CAAC;;;;"}
@@ -0,0 +1,48 @@
const unifyMetadata = (metadata) => {
if ("magicNumber" in metadata) metadata = metadata.metadata;
if ("tag" in metadata) {
if (metadata.tag !== "v14" && metadata.tag !== "v15" && metadata.tag !== "v16")
throw new Error("Only metadata 14, 15, and 16 are supported");
metadata = metadata.value;
}
if ("signedExtensionsByVersion" in metadata.extrinsic) {
return { version: 16, ...metadata };
}
if ("custom" in metadata) {
const { lookup: lookup2, extrinsic: extrinsic2, custom, apis, pallets: pallets2, outerEnums } = metadata;
return {
version: 15,
lookup: lookup2,
pallets: pallets2.map((p) => ({
...p,
calls: p.calls != null ? { type: p.calls } : void 0,
events: p.events != null ? { type: p.events } : void 0,
errors: p.errors != null ? { type: p.errors } : void 0,
viewFns: [],
associatedTypes: []
})),
extrinsic: { ...extrinsic2, version: [extrinsic2.version] },
apis,
outerEnums,
custom
};
}
const { lookup, extrinsic, pallets } = metadata;
return {
version: 14,
lookup,
pallets: pallets.map((p) => ({
...p,
calls: p.calls != null ? { type: p.calls } : void 0,
events: p.events != null ? { type: p.events } : void 0,
errors: p.errors != null ? { type: p.errors } : void 0,
viewFns: [],
associatedTypes: []
})),
extrinsic: { ...extrinsic, version: [extrinsic.version] },
apis: []
};
};
export { unifyMetadata };
//# sourceMappingURL=unified.mjs.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,41 @@
import { Struct, Vector, u8, str, createCodec } from 'scale-ts';
import { lookup } from './lookup.mjs';
import '../../utils/ss58-util.mjs';
import '../scale/Binary.mjs';
import '../scale/bitSequence.mjs';
import '../scale/char.mjs';
import { compactNumber } from '../scale/compact.mjs';
import '../scale/Hex.mjs';
import '../scale/fixed-str.mjs';
import '../scale/Variant.mjs';
import '../scale/ethAccount.mjs';
import '../scale/shaped.mjs';
import '../scale/BitSeq.mjs';
import { v14Pallet } from './pallets.mjs';
const empty = new Uint8Array();
const Always = (value) => createCodec(
() => empty,
() => value
);
const extrinsic$2 = Struct({
type: compactNumber,
version: u8,
signedExtensions: Vector(
Struct({
identifier: str,
type: compactNumber,
additionalSigned: compactNumber
})
)
});
const v14 = Struct({
lookup,
pallets: Vector(Struct({ ...v14Pallet, docs: Always([]) })),
extrinsic: extrinsic$2,
type: compactNumber,
apis: Always([])
});
export { v14 };
//# sourceMappingURL=v14.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"v14.mjs","sources":["../../../../src/codecs/metadata/v14.ts"],"sourcesContent":["import { CodecType, Struct, Vector, createCodec, str, u8 } from \"scale-ts\"\nimport { lookup } from \"./lookup\"\nimport { compactNumber } from \"../scale\"\nimport { v14Pallet } from \"./pallets\"\nimport { runtimeApi } from \"./runtime-api\"\n\nconst empty = new Uint8Array()\nconst Always = <T>(value: T) =>\n createCodec<T>(\n () => empty,\n () => value,\n )\n\nconst extrinsic = Struct({\n type: compactNumber,\n version: u8,\n signedExtensions: Vector(\n Struct({\n identifier: str,\n type: compactNumber,\n additionalSigned: compactNumber,\n }),\n ),\n})\n\nexport const v14 = Struct({\n lookup,\n pallets: Vector(Struct({ ...v14Pallet, docs: Always([] as string[]) })),\n extrinsic,\n type: compactNumber,\n apis: Always([] as Array<CodecType<typeof runtimeApi>>),\n})\nexport type V14 = CodecType<typeof v14>\n"],"names":["extrinsic"],"mappings":";;;;;;;;;;;;;;;AAMA,MAAM,KAAA,GAAQ,IAAI,UAAA,EAAW;AAC7B,MAAM,MAAA,GAAS,CAAI,KAAA,KACjB,WAAA;AAAA,EACE,MAAM,KAAA;AAAA,EACN,MAAM;AACR,CAAA;AAEF,MAAMA,cAAY,MAAA,CAAO;AAAA,EACvB,IAAA,EAAM,aAAA;AAAA,EACN,OAAA,EAAS,EAAA;AAAA,EACT,gBAAA,EAAkB,MAAA;AAAA,IAChB,MAAA,CAAO;AAAA,MACL,UAAA,EAAY,GAAA;AAAA,MACZ,IAAA,EAAM,aAAA;AAAA,MACN,gBAAA,EAAkB;AAAA,KACnB;AAAA;AAEL,CAAC,CAAA;AAEM,MAAM,MAAM,MAAA,CAAO;AAAA,EACxB,MAAA;AAAA,EACA,OAAA,EAAS,MAAA,CAAO,MAAA,CAAO,EAAE,GAAG,SAAA,EAAW,IAAA,EAAM,MAAA,CAAO,EAAc,CAAA,EAAG,CAAC,CAAA;AAAA,aACtEA,WAAA;AAAA,EACA,IAAA,EAAM,aAAA;AAAA,EACN,IAAA,EAAM,MAAA,CAAO,EAAyC;AACxD,CAAC;;;;"}
@@ -0,0 +1,46 @@
import { Struct, Vector, u8, str, Tuple } from 'scale-ts';
import { lookup } from './lookup.mjs';
import { v15Pallet } from './pallets.mjs';
import '../../utils/ss58-util.mjs';
import '../scale/Binary.mjs';
import '../scale/bitSequence.mjs';
import '../scale/char.mjs';
import { compactNumber } from '../scale/compact.mjs';
import { Hex } from '../scale/Hex.mjs';
import '../scale/fixed-str.mjs';
import '../scale/Variant.mjs';
import '../scale/ethAccount.mjs';
import '../scale/shaped.mjs';
import '../scale/BitSeq.mjs';
import { runtimeApiV15 } from './runtime-api.mjs';
const extrinsic$1 = Struct({
version: u8,
address: compactNumber,
call: compactNumber,
signature: compactNumber,
extra: compactNumber,
signedExtensions: Vector(
Struct({
identifier: str,
type: compactNumber,
additionalSigned: compactNumber
})
)
});
const v15 = Struct({
lookup,
pallets: Vector(Struct(v15Pallet)),
extrinsic: extrinsic$1,
type: compactNumber,
apis: Vector(runtimeApiV15),
outerEnums: Struct({
call: compactNumber,
event: compactNumber,
error: compactNumber
}),
custom: Vector(Tuple(str, Struct({ type: compactNumber, value: Hex() })))
});
export { v15 };
//# sourceMappingURL=v15.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"v15.mjs","sources":["../../../../src/codecs/metadata/v15.ts"],"sourcesContent":["import { CodecType, Struct, Tuple, Vector, str, u8 } from \"scale-ts\"\nimport { lookup } from \"./lookup\"\nimport { v15Pallet } from \"./pallets\"\nimport { Hex, compactNumber as ty } from \"../scale\"\nimport { runtimeApiV15 } from \"./runtime-api\"\n\nconst extrinsic = Struct({\n version: u8,\n address: ty,\n call: ty,\n signature: ty,\n extra: ty,\n signedExtensions: Vector(\n Struct({\n identifier: str,\n type: ty,\n additionalSigned: ty,\n }),\n ),\n})\n\nexport const v15 = Struct({\n lookup,\n pallets: Vector(Struct(v15Pallet)),\n extrinsic,\n type: ty,\n apis: Vector(runtimeApiV15),\n outerEnums: Struct({\n call: ty,\n event: ty,\n error: ty,\n }),\n custom: Vector(Tuple(str, Struct({ type: ty, value: Hex() }))),\n})\nexport type V15 = CodecType<typeof v15>\n"],"names":["extrinsic","ty"],"mappings":";;;;;;;;;;;;;;;;AAMA,MAAMA,cAAY,MAAA,CAAO;AAAA,EACvB,OAAA,EAAS,EAAA;AAAA,EACT,OAAA,EAASC,aAAA;AAAA,EACT,IAAA,EAAMA,aAAA;AAAA,EACN,SAAA,EAAWA,aAAA;AAAA,EACX,KAAA,EAAOA,aAAA;AAAA,EACP,gBAAA,EAAkB,MAAA;AAAA,IAChB,MAAA,CAAO;AAAA,MACL,UAAA,EAAY,GAAA;AAAA,MACZ,IAAA,EAAMA,aAAA;AAAA,MACN,gBAAA,EAAkBA;AAAA,KACnB;AAAA;AAEL,CAAC,CAAA;AAEM,MAAM,MAAM,MAAA,CAAO;AAAA,EACxB,MAAA;AAAA,EACA,OAAA,EAAS,MAAA,CAAO,MAAA,CAAO,SAAS,CAAC,CAAA;AAAA,aACjCD,WAAA;AAAA,EACA,IAAA,EAAMC,aAAA;AAAA,EACN,IAAA,EAAM,OAAO,aAAa,CAAA;AAAA,EAC1B,YAAY,MAAA,CAAO;AAAA,IACjB,IAAA,EAAMA,aAAA;AAAA,IACN,KAAA,EAAOA,aAAA;AAAA,IACP,KAAA,EAAOA;AAAA,GACR,CAAA;AAAA,EACD,MAAA,EAAQ,MAAA,CAAO,KAAA,CAAM,GAAA,EAAK,MAAA,CAAO,EAAE,IAAA,EAAMA,aAAA,EAAI,KAAA,EAAO,GAAA,EAAI,EAAG,CAAC,CAAC;AAC/D,CAAC;;;;"}
@@ -0,0 +1,45 @@
import { Struct, Vector, str, Tuple, u8 } from 'scale-ts';
import { lookup } from './lookup.mjs';
import { v16Pallet } from './pallets.mjs';
import '../../utils/ss58-util.mjs';
import '../scale/Binary.mjs';
import '../scale/bitSequence.mjs';
import '../scale/char.mjs';
import { compactNumber } from '../scale/compact.mjs';
import { Hex } from '../scale/Hex.mjs';
import '../scale/fixed-str.mjs';
import '../scale/Variant.mjs';
import '../scale/ethAccount.mjs';
import '../scale/shaped.mjs';
import '../scale/BitSeq.mjs';
import { runtimeApi } from './runtime-api.mjs';
const extrinsic = Struct({
version: Vector(u8),
address: compactNumber,
call: compactNumber,
signature: compactNumber,
signedExtensionsByVersion: Vector(Tuple(u8, Vector(compactNumber))),
signedExtensions: Vector(
Struct({
identifier: str,
type: compactNumber,
additionalSigned: compactNumber
})
)
});
const v16 = Struct({
lookup,
pallets: Vector(Struct(v16Pallet)),
extrinsic,
apis: Vector(runtimeApi),
outerEnums: Struct({
call: compactNumber,
event: compactNumber,
error: compactNumber
}),
custom: Vector(Tuple(str, Struct({ type: compactNumber, value: Hex() })))
});
export { v16 };
//# sourceMappingURL=v16.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"v16.mjs","sources":["../../../../src/codecs/metadata/v16.ts"],"sourcesContent":["import { CodecType, Struct, Tuple, Vector, str, u8 } from \"scale-ts\"\nimport { lookup } from \"./lookup\"\nimport { v16Pallet } from \"./pallets\"\nimport { compactNumber, Hex, compactNumber as ty } from \"../scale\"\nimport { runtimeApi } from \"./runtime-api\"\n\nconst extrinsic = Struct({\n version: Vector(u8),\n address: ty,\n call: ty,\n signature: ty,\n signedExtensionsByVersion: Vector(Tuple(u8, Vector(compactNumber))),\n signedExtensions: Vector(\n Struct({\n identifier: str,\n type: ty,\n additionalSigned: ty,\n }),\n ),\n})\n\nexport const v16 = Struct({\n lookup,\n pallets: Vector(Struct(v16Pallet)),\n extrinsic,\n apis: Vector(runtimeApi),\n outerEnums: Struct({\n call: ty,\n event: ty,\n error: ty,\n }),\n custom: Vector(Tuple(str, Struct({ type: ty, value: Hex() }))),\n})\nexport type V16 = CodecType<typeof v16>\n"],"names":["ty"],"mappings":";;;;;;;;;;;;;;;;AAMA,MAAM,YAAY,MAAA,CAAO;AAAA,EACvB,OAAA,EAAS,OAAO,EAAE,CAAA;AAAA,EAClB,OAAA,EAASA,aAAA;AAAA,EACT,IAAA,EAAMA,aAAA;AAAA,EACN,SAAA,EAAWA,aAAA;AAAA,EACX,2BAA2B,MAAA,CAAO,KAAA,CAAM,IAAI,MAAA,CAAO,aAAa,CAAC,CAAC,CAAA;AAAA,EAClE,gBAAA,EAAkB,MAAA;AAAA,IAChB,MAAA,CAAO;AAAA,MACL,UAAA,EAAY,GAAA;AAAA,MACZ,IAAA,EAAMA,aAAA;AAAA,MACN,gBAAA,EAAkBA;AAAA,KACnB;AAAA;AAEL,CAAC,CAAA;AAEM,MAAM,MAAM,MAAA,CAAO;AAAA,EACxB,MAAA;AAAA,EACA,OAAA,EAAS,MAAA,CAAO,MAAA,CAAO,SAAS,CAAC,CAAA;AAAA,EACjC,SAAA;AAAA,EACA,IAAA,EAAM,OAAO,UAAU,CAAA;AAAA,EACvB,YAAY,MAAA,CAAO;AAAA,IACjB,IAAA,EAAMA,aAAA;AAAA,IACN,KAAA,EAAOA,aAAA;AAAA,IACP,KAAA,EAAOA;AAAA,GACR,CAAA;AAAA,EACD,MAAA,EAAQ,MAAA,CAAO,KAAA,CAAM,GAAA,EAAK,MAAA,CAAO,EAAE,IAAA,EAAMA,aAAA,EAAI,KAAA,EAAO,GAAA,EAAI,EAAG,CAAC,CAAC;AAC/D,CAAC;;;;"}
@@ -0,0 +1,21 @@
import { enhanceCodec, Bytes } from 'scale-ts';
import { fromBufferToBase58, getSs58AddressInfo } from '../../utils/ss58-util.mjs';
function fromBase58ToBuffer(nBytes, _ss58Format) {
return (address) => {
const info = getSs58AddressInfo(address);
if (!info.isValid) throw new Error("Invalid checksum");
const { publicKey } = info;
if (publicKey.length !== nBytes)
throw new Error("Invalid public key length");
return publicKey;
};
}
const AccountId = (ss58Format = 42, nBytes = 32) => enhanceCodec(
Bytes(nBytes),
fromBase58ToBuffer(nBytes),
fromBufferToBase58(ss58Format)
);
export { AccountId };
//# sourceMappingURL=AccountId.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"AccountId.mjs","sources":["../../../../src/codecs/scale/AccountId.ts"],"sourcesContent":["import { Bytes, enhanceCodec } from \"scale-ts\"\nimport {\n getSs58AddressInfo,\n SS58String,\n fromBufferToBase58,\n} from \"@/utils/ss58-util\"\n\nfunction fromBase58ToBuffer(nBytes: number, _ss58Format: number) {\n return (address: SS58String) => {\n const info = getSs58AddressInfo(address)\n if (!info.isValid) throw new Error(\"Invalid checksum\")\n const { publicKey } = info\n if (publicKey.length !== nBytes)\n throw new Error(\"Invalid public key length\")\n\n return publicKey\n }\n}\n\nexport const AccountId = (ss58Format: number = 42, nBytes: 32 | 33 = 32) =>\n enhanceCodec(\n Bytes(nBytes),\n fromBase58ToBuffer(nBytes, ss58Format),\n fromBufferToBase58(ss58Format),\n )\n"],"names":[],"mappings":";;;AAOA,SAAS,kBAAA,CAAmB,QAAgB,WAAA,EAAqB;AAC/D,EAAA,OAAO,CAAC,OAAA,KAAwB;AAC9B,IAAA,MAAM,IAAA,GAAO,mBAAmB,OAAO,CAAA;AACvC,IAAA,IAAI,CAAC,IAAA,CAAK,OAAA,EAAS,MAAM,IAAI,MAAM,kBAAkB,CAAA;AACrD,IAAA,MAAM,EAAE,WAAU,GAAI,IAAA;AACtB,IAAA,IAAI,UAAU,MAAA,KAAW,MAAA;AACvB,MAAA,MAAM,IAAI,MAAM,2BAA2B,CAAA;AAE7C,IAAA,OAAO,SAAA;AAAA,EACT,CAAA;AACF;AAEO,MAAM,SAAA,GAAY,CAAC,UAAA,GAAqB,EAAA,EAAI,SAAkB,EAAA,KACnE,YAAA;AAAA,EACE,MAAM,MAAM,CAAA;AAAA,EACZ,kBAAA,CAAmB,MAAkB,CAAA;AAAA,EACrC,mBAAmB,UAAU;AAC/B;;;;"}
@@ -0,0 +1,94 @@
import { Tuple, compact, Bytes, createCodec } from 'scale-ts';
import { fromHex, toHex, mergeUint8 } from '@pezkuwi/papi-utils';
import { AccountId } from './AccountId.mjs';
var __defProp = Object.defineProperty;
var __typeError = (msg) => {
throw TypeError(msg);
};
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
var __publicField = (obj, key, value) => __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
var __accessCheck = (obj, member, msg) => member.has(obj) || __typeError("Cannot " + msg);
var __privateGet = (obj, member, getter) => (__accessCheck(obj, member, "read from private field"), getter ? getter.call(obj) : member.get(obj));
var __privateAdd = (obj, member, value) => member.has(obj) ? __typeError("Cannot add the same private member more than once") : member instanceof WeakSet ? member.add(obj) : member.set(obj, value);
var __privateSet = (obj, member, value, setter) => (__accessCheck(obj, member, "write to private field"), member.set(obj, value), value);
var _bytes, _opaqueBytes, _hex, _opaqueHex, _str;
const textEncoder$3 = new TextEncoder();
const textDecoder$2 = new TextDecoder();
const opaqueBytesDec = Tuple(compact, Bytes(Infinity))[1];
class Binary {
constructor(data, opaque = false) {
__privateAdd(this, _bytes);
__privateAdd(this, _opaqueBytes, null);
__privateAdd(this, _hex, null);
__privateAdd(this, _opaqueHex, null);
__privateAdd(this, _str, null);
__publicField(this, "asText", () => __privateGet(this, _str) ?? __privateSet(this, _str, textDecoder$2.decode(__privateGet(this, _bytes))));
__publicField(this, "asHex", () => __privateGet(this, _hex) ?? __privateSet(this, _hex, toHex(__privateGet(this, _bytes))));
__publicField(this, "asOpaqueHex", () => __privateGet(this, _opaqueHex) ?? __privateSet(this, _opaqueHex, toHex(this.asOpaqueBytes())));
__publicField(this, "asBytes", () => __privateGet(this, _bytes));
__publicField(this, "asOpaqueBytes", () => __privateGet(this, _opaqueBytes) ?? __privateSet(this, _opaqueBytes, mergeUint8([
compact[0](__privateGet(this, _bytes).length),
__privateGet(this, _bytes)
])));
if (opaque) {
try {
const [len, bytes] = opaqueBytesDec(data);
if (len === bytes.length) {
__privateSet(this, _bytes, bytes);
__privateSet(this, _opaqueBytes, data);
return;
}
} catch (_) {
}
throw new Error("Invalid opaque bytes");
} else __privateSet(this, _bytes, data);
}
static fromText(input) {
return new this(textEncoder$3.encode(input));
}
static fromHex(input) {
return new this(fromHex(input));
}
static fromOpaqueHex(input) {
return new this(fromHex(input), true);
}
static fromBytes(input) {
return new this(input);
}
static fromOpaqueBytes(input) {
return new this(input, true);
}
}
_bytes = new WeakMap();
_opaqueBytes = new WeakMap();
_hex = new WeakMap();
_opaqueHex = new WeakMap();
_str = new WeakMap();
const [accountIdEncoder] = AccountId();
class FixedSizeBinary extends Binary {
constructor(data) {
super(data);
}
static fromArray(input) {
return new this(new Uint8Array(input));
}
static fromAccountId32(input) {
return new this(accountIdEncoder(input));
}
}
const enc$2 = (nBytes) => {
const _enc = Bytes.enc(nBytes);
return (value) => _enc(value.asBytes());
};
const dec$2 = (nBytes) => {
const _dec = Bytes.dec(nBytes);
const Bin2 = nBytes == null ? Binary : FixedSizeBinary;
return (value) => Bin2.fromBytes(_dec(value));
};
const Bin = (nBytes) => createCodec(enc$2(nBytes), dec$2(nBytes));
Bin.enc = enc$2;
Bin.dec = dec$2;
export { Bin, Binary, FixedSizeBinary };
//# sourceMappingURL=Binary.mjs.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,37 @@
import { createCodec, createDecoder, Bytes } from 'scale-ts';
import { compactNumber } from './compact.mjs';
import { mergeUint8 } from '@pezkuwi/papi-utils';
const dec = (isLsb = true) => createDecoder((data) => {
const bitsLen = compactNumber.dec(data);
const bytesLen = Math.ceil(bitsLen / 8);
const bytes = Bytes(bytesLen).dec(data);
const result = new Array(bitsLen);
let resultIdx = 0;
bytes.forEach((val) => {
for (let i = 0; i < 8 && resultIdx < bitsLen; i++) {
const actualIdx = isLsb ? i : 7 - i;
result[resultIdx++] = val >> actualIdx & 1;
}
});
return result;
});
const enc = (isLsb = true) => (input) => {
const lenEncoded = compactNumber.enc(input.length);
const nBytes = Math.ceil(input.length / 8);
const bytes = new Uint8Array(nBytes);
for (let byteIdx = 0; byteIdx < nBytes; byteIdx++) {
let inputIdx = byteIdx * 8;
let byte = 0;
for (let i = 0; i < 8 && inputIdx < input.length; i++, inputIdx++)
byte |= input[inputIdx] << (isLsb ? i : 7 - i);
bytes[byteIdx] = byte;
}
return mergeUint8([lenEncoded, bytes]);
};
const BitSeq = (isLsb) => createCodec(enc(isLsb), dec(isLsb));
BitSeq.enc = enc;
BitSeq.dec = dec;
export { BitSeq };
//# sourceMappingURL=BitSeq.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"BitSeq.mjs","sources":["../../../../src/codecs/scale/BitSeq.ts"],"sourcesContent":["import {\n Bytes,\n Codec,\n Decoder,\n Encoder,\n createCodec,\n createDecoder,\n} from \"scale-ts\"\nimport { compactNumber } from \"./compact\"\nimport { mergeUint8 } from \"@pezkuwi/papi-utils\"\n\nexport type BitSeq = Array<0 | 1>\n\nconst dec: (isLsb?: boolean) => Decoder<BitSeq> = (isLsb = true) =>\n createDecoder((data) => {\n const bitsLen = compactNumber.dec(data)\n const bytesLen = Math.ceil(bitsLen / 8)\n const bytes = Bytes(bytesLen).dec(data)\n\n const result = new Array<0 | 1>(bitsLen)\n let resultIdx = 0\n bytes.forEach((val) => {\n for (let i = 0; i < 8 && resultIdx < bitsLen; i++) {\n const actualIdx = isLsb ? i : 7 - i\n result[resultIdx++] = ((val >> actualIdx) & 1) as 1 | 0\n }\n })\n return result\n })\n\nconst enc: (isLsb?: boolean) => Encoder<BitSeq> =\n (isLsb = true) =>\n (input) => {\n const lenEncoded = compactNumber.enc(input.length)\n const nBytes = Math.ceil(input.length / 8)\n\n const bytes = new Uint8Array(nBytes)\n for (let byteIdx = 0; byteIdx < nBytes; byteIdx++) {\n let inputIdx = byteIdx * 8\n let byte = 0\n for (let i = 0; i < 8 && inputIdx < input.length; i++, inputIdx++)\n byte |= input[inputIdx] << (isLsb ? i : 7 - i)\n bytes[byteIdx] = byte\n }\n\n return mergeUint8([lenEncoded, bytes])\n }\n\nexport const BitSeq = (isLsb?: boolean): Codec<BitSeq> =>\n createCodec(enc(isLsb), dec(isLsb))\n\nBitSeq.enc = enc\nBitSeq.dec = dec\n"],"names":[],"mappings":";;;;AAaA,MAAM,MAA4C,CAAC,KAAA,GAAQ,IAAA,KACzD,aAAA,CAAc,CAAC,IAAA,KAAS;AACtB,EAAA,MAAM,OAAA,GAAU,aAAA,CAAc,GAAA,CAAI,IAAI,CAAA;AACtC,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,IAAA,CAAK,OAAA,GAAU,CAAC,CAAA;AACtC,EAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,QAAQ,CAAA,CAAE,IAAI,IAAI,CAAA;AAEtC,EAAA,MAAM,MAAA,GAAS,IAAI,KAAA,CAAa,OAAO,CAAA;AACvC,EAAA,IAAI,SAAA,GAAY,CAAA;AAChB,EAAA,KAAA,CAAM,OAAA,CAAQ,CAAC,GAAA,KAAQ;AACrB,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,CAAA,IAAK,SAAA,GAAY,SAAS,CAAA,EAAA,EAAK;AACjD,MAAA,MAAM,SAAA,GAAY,KAAA,GAAQ,CAAA,GAAI,CAAA,GAAI,CAAA;AAClC,MAAA,MAAA,CAAO,SAAA,EAAW,CAAA,GAAM,GAAA,IAAO,SAAA,GAAa,CAAA;AAAA,IAC9C;AAAA,EACF,CAAC,CAAA;AACD,EAAA,OAAO,MAAA;AACT,CAAC,CAAA;AAEH,MAAM,GAAA,GACJ,CAAC,KAAA,GAAQ,IAAA,KACT,CAAC,KAAA,KAAU;AACT,EAAA,MAAM,UAAA,GAAa,aAAA,CAAc,GAAA,CAAI,KAAA,CAAM,MAAM,CAAA;AACjD,EAAA,MAAM,MAAA,GAAS,IAAA,CAAK,IAAA,CAAK,KAAA,CAAM,SAAS,CAAC,CAAA;AAEzC,EAAA,MAAM,KAAA,GAAQ,IAAI,UAAA,CAAW,MAAM,CAAA;AACnC,EAAA,KAAA,IAAS,OAAA,GAAU,CAAA,EAAG,OAAA,GAAU,MAAA,EAAQ,OAAA,EAAA,EAAW;AACjD,IAAA,IAAI,WAAW,OAAA,GAAU,CAAA;AACzB,IAAA,IAAI,IAAA,GAAO,CAAA;AACX,IAAA,KAAA,IAAS,IAAI,CAAA,EAAG,CAAA,GAAI,KAAK,QAAA,GAAW,KAAA,CAAM,QAAQ,CAAA,EAAA,EAAK,QAAA,EAAA;AACrD,MAAA,IAAA,IAAQ,KAAA,CAAM,QAAQ,CAAA,KAAM,KAAA,GAAQ,IAAI,CAAA,GAAI,CAAA,CAAA;AAC9C,IAAA,KAAA,CAAM,OAAO,CAAA,GAAI,IAAA;AAAA,EACnB;AAEA,EAAA,OAAO,UAAA,CAAW,CAAC,UAAA,EAAY,KAAK,CAAC,CAAA;AACvC,CAAA;AAEK,MAAM,MAAA,GAAS,CAAC,KAAA,KACrB,WAAA,CAAY,IAAI,KAAK,CAAA,EAAG,GAAA,CAAI,KAAK,CAAC;AAEpC,MAAA,CAAO,GAAA,GAAM,GAAA;AACb,MAAA,CAAO,GAAA,GAAM,GAAA;;;;"}
@@ -0,0 +1,17 @@
import { fromHex, toHex } from '@pezkuwi/papi-utils';
import { createCodec, Bytes } from 'scale-ts';
const enc$1 = (nBytes) => {
const _enc = Bytes.enc(nBytes);
return (value) => _enc(fromHex(value));
};
const dec$1 = (nBytes) => {
const _dec = Bytes.dec(nBytes);
return (value) => toHex(_dec(value));
};
const Hex = (nBytes) => createCodec(enc$1(nBytes), dec$1(nBytes));
Hex.enc = enc$1;
Hex.dec = dec$1;
export { Hex };
//# sourceMappingURL=Hex.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"Hex.mjs","sources":["../../../../src/codecs/scale/Hex.ts"],"sourcesContent":["import { fromHex, toHex } from \"@pezkuwi/papi-utils\"\nimport { Bytes, Codec, Decoder, Encoder, createCodec } from \"scale-ts\"\n\nexport type HexString = string & { __hexString?: unknown }\n\nconst enc = (nBytes?: number): Encoder<HexString> => {\n const _enc = Bytes.enc(nBytes)\n return (value: string) => _enc(fromHex(value))\n}\n\nconst dec = (nBytes?: number): Decoder<HexString> => {\n const _dec = Bytes.dec(nBytes)\n return (value) => toHex(_dec(value)) as HexString\n}\n\nexport const Hex = (nBytes?: number): Codec<HexString> =>\n createCodec(enc(nBytes), dec(nBytes))\n\nHex.enc = enc\nHex.dec = dec\n"],"names":["enc","dec"],"mappings":";;;AAKA,MAAMA,KAAA,GAAM,CAAC,MAAA,KAAwC;AACnD,EAAA,MAAM,IAAA,GAAO,KAAA,CAAM,GAAA,CAAI,MAAM,CAAA;AAC7B,EAAA,OAAO,CAAC,KAAA,KAAkB,IAAA,CAAK,OAAA,CAAQ,KAAK,CAAC,CAAA;AAC/C,CAAA;AAEA,MAAMC,KAAA,GAAM,CAAC,MAAA,KAAwC;AACnD,EAAA,MAAM,IAAA,GAAO,KAAA,CAAM,GAAA,CAAI,MAAM,CAAA;AAC7B,EAAA,OAAO,CAAC,KAAA,KAAU,KAAA,CAAM,IAAA,CAAK,KAAK,CAAC,CAAA;AACrC,CAAA;AAEO,MAAM,GAAA,GAAM,CAAC,MAAA,KAClB,WAAA,CAAYD,MAAI,MAAM,CAAA,EAAGC,KAAA,CAAI,MAAM,CAAC;AAEtC,GAAA,CAAI,GAAA,GAAMD,KAAA;AACV,GAAA,CAAI,GAAA,GAAMC,KAAA;;;;"}
@@ -0,0 +1,26 @@
import { createCodec } from 'scale-ts';
const selfEncoder = (value) => {
let cache = (x) => {
const encoder = value();
cache = encoder;
return encoder(x);
};
return (x) => cache(x);
};
const selfDecoder = (value) => {
let cache = (x) => {
const decoder = value();
const result = decoder;
cache = decoder;
return result(x);
};
return (x) => cache(x);
};
const Self = (value) => createCodec(
selfEncoder(() => value().enc),
selfDecoder(() => value().dec)
);
export { Self, selfDecoder, selfEncoder };
//# sourceMappingURL=Self.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"Self.mjs","sources":["../../../../src/codecs/scale/Self.ts"],"sourcesContent":["import { Codec, Decoder, Encoder, createCodec, _void } from \"scale-ts\"\n\nexport const selfEncoder = <T>(value: () => Encoder<T>): Encoder<T> => {\n let cache: Encoder<T> = (x) => {\n const encoder = value()\n cache = encoder\n return encoder(x)\n }\n\n return (x) => cache(x)\n}\n\nexport const selfDecoder = <T>(value: () => Decoder<T>): Decoder<T> => {\n let cache: Decoder<T> = (x) => {\n const decoder = value()\n const result = decoder\n cache = decoder\n return result(x)\n }\n\n return (x) => cache(x)\n}\n\nexport const Self = <T>(value: () => Codec<T>): Codec<T> =>\n createCodec(\n selfEncoder(() => value().enc),\n selfDecoder(() => value().dec),\n )\n"],"names":[],"mappings":";;AAEO,MAAM,WAAA,GAAc,CAAI,KAAA,KAAwC;AACrE,EAAA,IAAI,KAAA,GAAoB,CAAC,CAAA,KAAM;AAC7B,IAAA,MAAM,UAAU,KAAA,EAAM;AACtB,IAAA,KAAA,GAAQ,OAAA;AACR,IAAA,OAAO,QAAQ,CAAC,CAAA;AAAA,EAClB,CAAA;AAEA,EAAA,OAAO,CAAC,CAAA,KAAM,KAAA,CAAM,CAAC,CAAA;AACvB;AAEO,MAAM,WAAA,GAAc,CAAI,KAAA,KAAwC;AACrE,EAAA,IAAI,KAAA,GAAoB,CAAC,CAAA,KAAM;AAC7B,IAAA,MAAM,UAAU,KAAA,EAAM;AACtB,IAAA,MAAM,MAAA,GAAS,OAAA;AACf,IAAA,KAAA,GAAQ,OAAA;AACR,IAAA,OAAO,OAAO,CAAC,CAAA;AAAA,EACjB,CAAA;AAEA,EAAA,OAAO,CAAC,CAAA,KAAM,KAAA,CAAM,CAAC,CAAA;AACvB;AAEO,MAAM,IAAA,GAAO,CAAI,KAAA,KACtB,WAAA;AAAA,EACE,WAAA,CAAY,MAAM,KAAA,EAAM,CAAE,GAAG,CAAA;AAAA,EAC7B,WAAA,CAAY,MAAM,KAAA,EAAM,CAAE,GAAG;AAC/B;;;;"}
@@ -0,0 +1,37 @@
import { createCodec, Enum } from 'scale-ts';
import { mapObject } from '@pezkuwi/papi-utils';
import { Enum as Enum$1 } from '../../types/enum.mjs';
import { withInner } from './with-inner.mjs';
const VariantEnc = (...args) => {
const enc = Enum.enc(...args);
return withInner((v) => enc({ tag: v.type, value: v.value }), args[0]);
};
const VariantDec = (...args) => {
const dec = Enum.dec(...args);
return withInner((v) => {
const { tag, value } = dec(v);
return Enum$1(tag, value);
}, args[0]);
};
const Variant = (inner, ...args) => withInner(
createCodec(
VariantEnc(
mapObject(inner, ([encoder]) => encoder),
...args
),
VariantDec(
mapObject(inner, ([, decoder]) => decoder),
...args
)
),
inner
);
Variant.enc = VariantEnc;
Variant.dec = VariantDec;
const ScaleEnum = (inner, ...args) => withInner(Enum(inner, ...args), inner);
ScaleEnum.enc = (inner, ...rest) => withInner(Enum.enc(inner, ...rest), inner);
ScaleEnum.dec = (inner, ...rest) => withInner(Enum.dec(inner, ...rest), inner);
export { ScaleEnum, Variant };
//# sourceMappingURL=Variant.mjs.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,24 @@
import { createDecoder, Bytes, createCodec } from 'scale-ts';
import { compactNumber } from './compact.mjs';
const bitSequenceDecoder = createDecoder((data) => {
const bitsLen = compactNumber.dec(data);
const bytesLen = Math.ceil(bitsLen / 8);
const bytes = Bytes(bytesLen).dec(data);
return { bytes, bitsLen };
});
const bitSequenceEncoder = (input) => {
if (input.bitsLen > input.bytes.length * 8)
throw new Error(
`Not enough bytes. (bitsLen:${input.bitsLen}, bytesLen:${input.bytes.length})`
);
const lenEncoded = compactNumber.enc(input.bitsLen);
const result = new Uint8Array(input.bytes.length + lenEncoded.length);
result.set(lenEncoded, 0);
result.set(input.bytes, lenEncoded.length);
return result;
};
const bitSequence$1 = createCodec(bitSequenceEncoder, bitSequenceDecoder);
export { bitSequence$1 as bitSequence };
//# sourceMappingURL=bitSequence.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"bitSequence.mjs","sources":["../../../../src/codecs/scale/bitSequence.ts"],"sourcesContent":["import { Bytes, Decoder, Encoder, createCodec, createDecoder } from \"scale-ts\"\nimport { compactNumber } from \"./compact\"\n\nexport interface BitSequence {\n bitsLen: number\n bytes: Uint8Array\n}\n\nconst bitSequenceDecoder: Decoder<BitSequence> = createDecoder((data) => {\n const bitsLen = compactNumber.dec(data)\n\n const bytesLen = Math.ceil(bitsLen / 8)\n const bytes = Bytes(bytesLen).dec(data)\n return { bytes, bitsLen }\n})\n\nconst bitSequenceEncoder: Encoder<BitSequence> = (input) => {\n if (input.bitsLen > input.bytes.length * 8)\n throw new Error(\n `Not enough bytes. (bitsLen:${input.bitsLen}, bytesLen:${input.bytes.length})`,\n )\n\n const lenEncoded = compactNumber.enc(input.bitsLen)\n const result = new Uint8Array(input.bytes.length + lenEncoded.length)\n result.set(lenEncoded, 0)\n result.set(input.bytes, lenEncoded.length)\n return result\n}\n\nexport const bitSequence = createCodec(bitSequenceEncoder, bitSequenceDecoder)\n"],"names":["bitSequence"],"mappings":";;;AAQA,MAAM,kBAAA,GAA2C,aAAA,CAAc,CAAC,IAAA,KAAS;AACvE,EAAA,MAAM,OAAA,GAAU,aAAA,CAAc,GAAA,CAAI,IAAI,CAAA;AAEtC,EAAA,MAAM,QAAA,GAAW,IAAA,CAAK,IAAA,CAAK,OAAA,GAAU,CAAC,CAAA;AACtC,EAAA,MAAM,KAAA,GAAQ,KAAA,CAAM,QAAQ,CAAA,CAAE,IAAI,IAAI,CAAA;AACtC,EAAA,OAAO,EAAE,OAAO,OAAA,EAAQ;AAC1B,CAAC,CAAA;AAED,MAAM,kBAAA,GAA2C,CAAC,KAAA,KAAU;AAC1D,EAAA,IAAI,KAAA,CAAM,OAAA,GAAU,KAAA,CAAM,KAAA,CAAM,MAAA,GAAS,CAAA;AACvC,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,8BAA8B,KAAA,CAAM,OAAO,CAAA,WAAA,EAAc,KAAA,CAAM,MAAM,MAAM,CAAA,CAAA;AAAA,KAC7E;AAEF,EAAA,MAAM,UAAA,GAAa,aAAA,CAAc,GAAA,CAAI,KAAA,CAAM,OAAO,CAAA;AAClD,EAAA,MAAM,SAAS,IAAI,UAAA,CAAW,MAAM,KAAA,CAAM,MAAA,GAAS,WAAW,MAAM,CAAA;AACpE,EAAA,MAAA,CAAO,GAAA,CAAI,YAAY,CAAC,CAAA;AACxB,EAAA,MAAA,CAAO,GAAA,CAAI,KAAA,CAAM,KAAA,EAAO,UAAA,CAAW,MAAM,CAAA;AACzC,EAAA,OAAO,MAAA;AACT,CAAA;AAEO,MAAMA,aAAA,GAAc,WAAA,CAAY,kBAAA,EAAoB,kBAAkB;;;;"}
@@ -0,0 +1,10 @@
import { enhanceCodec, u8 } from 'scale-ts';
const char = enhanceCodec(
u8,
(str) => str.charCodeAt(0),
String.fromCharCode
);
export { char };
//# sourceMappingURL=char.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"char.mjs","sources":["../../../../src/codecs/scale/char.ts"],"sourcesContent":["import { enhanceCodec, u8 } from \"scale-ts\"\n\nexport const char = enhanceCodec(\n u8,\n (str: string) => str.charCodeAt(0),\n String.fromCharCode,\n)\n"],"names":[],"mappings":";;AAEO,MAAM,IAAA,GAAO,YAAA;AAAA,EAClB,EAAA;AAAA,EACA,CAAC,GAAA,KAAgB,GAAA,CAAI,UAAA,CAAW,CAAC,CAAA;AAAA,EACjC,MAAA,CAAO;AACT;;;;"}
@@ -0,0 +1,7 @@
import { enhanceCodec, compact } from 'scale-ts';
const compactNumber = enhanceCodec(compact, (v) => v, Number);
const compactBn = enhanceCodec(compact, (v) => v, BigInt);
export { compactBn, compactNumber };
//# sourceMappingURL=compact.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"compact.mjs","sources":["../../../../src/codecs/scale/compact.ts"],"sourcesContent":["import { compact, enhanceCodec } from \"scale-ts\"\n\nexport const compactNumber = enhanceCodec(compact, (v) => v, Number)\nexport const compactBn = enhanceCodec(compact, (v) => v, BigInt)\n"],"names":[],"mappings":";;AAEO,MAAM,gBAAgB,YAAA,CAAa,OAAA,EAAS,CAAC,CAAA,KAAM,GAAG,MAAM;AAC5D,MAAM,YAAY,YAAA,CAAa,OAAA,EAAS,CAAC,CAAA,KAAM,GAAG,MAAM;;;;"}
@@ -0,0 +1,37 @@
import { fromHex, toHex } from '@pezkuwi/papi-utils';
import { Bytes, createCodec, createDecoder } from 'scale-ts';
import '@noble/hashes/blake2.js';
import '@noble/hashes/blake3.js';
import { Keccak256 } from '../../hashes/keccak.mjs';
import { textEncoder } from './fixed-str.mjs';
const getFormattedAddress = (hexAddress) => {
const nonChecksum = hexAddress.slice(2);
const hashedAddress = toHex(Keccak256(textEncoder.encode(nonChecksum))).slice(
2
);
const result = new Array(40);
for (let i = 0; i < 40; i++) {
const checksumVal = parseInt(hashedAddress[i], 16);
const char = nonChecksum[i];
result[i] = checksumVal > 7 ? char.toUpperCase() : char;
}
return `0x${result.join("")}`;
};
const bytes20Dec = Bytes(20)[1];
const ethAccount = createCodec(
(input) => {
const bytes = fromHex(input);
if (bytes.length !== 20)
throw new Error(`Invalid length found on EthAddress(${input})`);
const hexAddress = toHex(bytes);
if (input === hexAddress || input === hexAddress.toUpperCase()) return bytes;
if (getFormattedAddress(hexAddress) !== input)
throw new Error(`Invalid checksum found on EthAddress(${input})`);
return bytes;
},
createDecoder((bytes) => getFormattedAddress(toHex(bytes20Dec(bytes))))
);
export { ethAccount };
//# sourceMappingURL=ethAccount.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"ethAccount.mjs","sources":["../../../../src/codecs/scale/ethAccount.ts"],"sourcesContent":["import { fromHex, toHex } from \"@pezkuwi/papi-utils\"\nimport { Bytes, createCodec, createDecoder } from \"scale-ts\"\nimport { Keccak256 } from \"../../hashes\"\nimport { textEncoder } from \"./fixed-str\"\n\nconst getFormattedAddress = (hexAddress: string) => {\n const nonChecksum = hexAddress.slice(2)\n const hashedAddress = toHex(Keccak256(textEncoder.encode(nonChecksum))).slice(\n 2,\n )\n const result = new Array(40)\n\n for (let i = 0; i < 40; i++) {\n const checksumVal = parseInt(hashedAddress[i], 16)\n const char = nonChecksum[i]\n result[i] = checksumVal > 7 ? char.toUpperCase() : char\n }\n\n return `0x${result.join(\"\")}`\n}\n\nconst bytes20Dec = Bytes(20)[1]\n\nexport const ethAccount = createCodec<string>(\n (input: string) => {\n const bytes = fromHex(input)\n if (bytes.length !== 20)\n throw new Error(`Invalid length found on EthAddress(${input})`)\n\n const hexAddress = toHex(bytes)\n if (input === hexAddress || input === hexAddress.toUpperCase()) return bytes\n\n if (getFormattedAddress(hexAddress) !== input)\n throw new Error(`Invalid checksum found on EthAddress(${input})`)\n\n return bytes\n },\n createDecoder((bytes) => getFormattedAddress(toHex(bytes20Dec(bytes)))),\n)\n"],"names":[],"mappings":";;;;;;;AAKA,MAAM,mBAAA,GAAsB,CAAC,UAAA,KAAuB;AAClD,EAAA,MAAM,WAAA,GAAc,UAAA,CAAW,KAAA,CAAM,CAAC,CAAA;AACtC,EAAA,MAAM,aAAA,GAAgB,MAAM,SAAA,CAAU,WAAA,CAAY,OAAO,WAAW,CAAC,CAAC,CAAA,CAAE,KAAA;AAAA,IACtE;AAAA,GACF;AACA,EAAA,MAAM,MAAA,GAAS,IAAI,KAAA,CAAM,EAAE,CAAA;AAE3B,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,EAAA,EAAI,CAAA,EAAA,EAAK;AAC3B,IAAA,MAAM,WAAA,GAAc,QAAA,CAAS,aAAA,CAAc,CAAC,GAAG,EAAE,CAAA;AACjD,IAAA,MAAM,IAAA,GAAO,YAAY,CAAC,CAAA;AAC1B,IAAA,MAAA,CAAO,CAAC,CAAA,GAAI,WAAA,GAAc,CAAA,GAAI,IAAA,CAAK,aAAY,GAAI,IAAA;AAAA,EACrD;AAEA,EAAA,OAAO,CAAA,EAAA,EAAK,MAAA,CAAO,IAAA,CAAK,EAAE,CAAC,CAAA,CAAA;AAC7B,CAAA;AAEA,MAAM,UAAA,GAAa,KAAA,CAAM,EAAE,CAAA,CAAE,CAAC,CAAA;AAEvB,MAAM,UAAA,GAAa,WAAA;AAAA,EACxB,CAAC,KAAA,KAAkB;AACjB,IAAA,MAAM,KAAA,GAAQ,QAAQ,KAAK,CAAA;AAC3B,IAAA,IAAI,MAAM,MAAA,KAAW,EAAA;AACnB,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,mCAAA,EAAsC,KAAK,CAAA,CAAA,CAAG,CAAA;AAEhE,IAAA,MAAM,UAAA,GAAa,MAAM,KAAK,CAAA;AAC9B,IAAA,IAAI,UAAU,UAAA,IAAc,KAAA,KAAU,UAAA,CAAW,WAAA,IAAe,OAAO,KAAA;AAEvE,IAAA,IAAI,mBAAA,CAAoB,UAAU,CAAA,KAAM,KAAA;AACtC,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,qCAAA,EAAwC,KAAK,CAAA,CAAA,CAAG,CAAA;AAElE,IAAA,OAAO,KAAA;AAAA,EACT,CAAA;AAAA,EACA,aAAA,CAAc,CAAC,KAAA,KAAU,mBAAA,CAAoB,MAAM,UAAA,CAAW,KAAK,CAAC,CAAC,CAAC;AACxE;;;;"}
@@ -0,0 +1,12 @@
import { enhanceCodec, Bytes } from 'scale-ts';
const textEncoder$2 = new TextEncoder();
const textDecoder$1 = new TextDecoder();
const fixedStr = (nBytes) => enhanceCodec(
Bytes(nBytes),
(str) => textEncoder$2.encode(str),
(bytes) => textDecoder$1.decode(bytes)
);
export { fixedStr, textEncoder$2 as textEncoder };
//# sourceMappingURL=fixed-str.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"fixed-str.mjs","sources":["../../../../src/codecs/scale/fixed-str.ts"],"sourcesContent":["import { Bytes, enhanceCodec } from \"scale-ts\"\n\nexport const textEncoder = new TextEncoder()\nconst textDecoder = new TextDecoder()\n\nexport const fixedStr = (nBytes: number) =>\n enhanceCodec(\n Bytes(nBytes),\n (str: string) => textEncoder.encode(str),\n (bytes) => textDecoder.decode(bytes),\n )\n"],"names":["textEncoder","textDecoder"],"mappings":";;AAEO,MAAMA,aAAA,GAAc,IAAI,WAAA;AAC/B,MAAMC,aAAA,GAAc,IAAI,WAAA,EAAY;AAE7B,MAAM,QAAA,GAAW,CAAC,MAAA,KACvB,YAAA;AAAA,EACE,MAAM,MAAM,CAAA;AAAA,EACZ,CAAC,GAAA,KAAgBD,aAAA,CAAY,MAAA,CAAO,GAAG,CAAA;AAAA,EACvC,CAAC,KAAA,KAAUC,aAAA,CAAY,MAAA,CAAO,KAAK;AACrC;;;;"}
@@ -0,0 +1,21 @@
import { Struct as Struct$1, Vector as Vector$1, Tuple as Tuple$1, Result as Result$1, Option as Option$1 } from 'scale-ts';
import { withInner } from './with-inner.mjs';
const Struct = (codecs) => withInner(Struct$1(codecs), codecs);
Struct.enc = (x) => withInner(Struct$1.enc(x), x);
Struct.dec = (x) => withInner(Struct$1.dec(x), x);
const Tuple = (...inner) => withInner(Tuple$1(...inner), inner);
Tuple.enc = (...inner) => withInner(Tuple$1.enc(...inner), inner);
Tuple.dec = (...inner) => withInner(Tuple$1.dec(...inner), inner);
const Vector = (inner, ...rest) => withInner(Vector$1(inner, ...rest), inner);
Vector.enc = (inner, ...rest) => withInner(Vector$1.enc(inner, ...rest), inner);
Vector.dec = (inner, ...rest) => withInner(Vector$1.dec(inner, ...rest), inner);
const Result = (ok, ko) => withInner(Result$1(ok, ko), { ok, ko });
Result.enc = (ok, ko) => withInner(Result$1.enc(ok, ko), { ok, ko });
Result.dec = (ok, ko) => withInner(Result$1.dec(ok, ko), { ok, ko });
const Option = (inner) => withInner(Option$1(inner), inner);
Option.enc = (inner) => withInner(Option$1.enc(inner), inner);
Option.dec = (inner) => withInner(Option$1.dec(inner), inner);
export { Option, Result, Struct, Tuple, Vector };
//# sourceMappingURL=shaped.mjs.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,8 @@
const withInner = (codec, inner) => {
const result = codec;
result.inner = inner;
return result;
};
export { withInner };
//# sourceMappingURL=with-inner.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"with-inner.mjs","sources":["../../../../src/codecs/scale/with-inner.ts"],"sourcesContent":["export const withInner = <T, I>(codec: T, inner: I): T & { inner: I } => {\n const result: T & { inner: I } = codec as any\n result.inner = inner\n return result\n}\n"],"names":[],"mappings":"AAAO,MAAM,SAAA,GAAY,CAAO,KAAA,EAAU,KAAA,KAA+B;AACvE,EAAA,MAAM,MAAA,GAA2B,KAAA;AACjC,EAAA,MAAA,CAAO,KAAA,GAAQ,KAAA;AACf,EAAA,OAAO,MAAA;AACT;;;;"}
@@ -0,0 +1,26 @@
import { enhanceCodec, u8 } from 'scale-ts';
const TYPES = {
bare: 0,
0: "bare",
general: 1,
1: "general",
signed: 2,
2: "signed"
};
const extrinsicFormat = enhanceCodec(
u8,
({ version, type }) => version + (TYPES[type] << 6),
(v) => {
const version = v & 63;
const type = v >> 6;
if (version === 4 && (type === TYPES.bare || type === TYPES.signed))
return { version, type: TYPES[type] };
if (version === 5 && (type === TYPES.bare || type === TYPES.general))
return { version, type: TYPES[type] };
throw new Error(`ExtrinsicFormat ${v} not valid`);
}
);
export { extrinsicFormat };
//# sourceMappingURL=extrinsic-format.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"extrinsic-format.mjs","sources":["../../../src/extrinsics/extrinsic-format.ts"],"sourcesContent":["import { enhanceCodec, u8 } from \"scale-ts\"\n\nconst TYPES = {\n bare: 0b00,\n 0b00: \"bare\",\n general: 0b01,\n 0b01: \"general\",\n signed: 0b10,\n 0b10: \"signed\",\n} as const\n\nexport type ExtrinsicFormat =\n | { version: 4; type: \"bare\" | \"signed\" }\n | { version: 5; type: \"bare\" | \"general\" }\n\nexport const extrinsicFormat = enhanceCodec<number, ExtrinsicFormat>(\n u8,\n ({ version, type }) => version + (TYPES[type] << 6),\n (v) => {\n const version = v & 0x3f // 0b0011_1111\n const type = v >> 6\n if (version === 4 && (type === TYPES.bare || type === TYPES.signed))\n return { version, type: TYPES[type] }\n if (version === 5 && (type === TYPES.bare || type === TYPES.general))\n return { version, type: TYPES[type] }\n throw new Error(`ExtrinsicFormat ${v} not valid`)\n },\n)\n"],"names":[],"mappings":";;AAEA,MAAM,KAAA,GAAQ;AAAA,EACZ,IAAA,EAAM,CAAA;AAAA,EACN,CAAA,EAAM,MAAA;AAAA,EACN,OAAA,EAAS,CAAA;AAAA,EACT,CAAA,EAAM,SAAA;AAAA,EACN,MAAA,EAAQ,CAAA;AAAA,EACR,CAAA,EAAM;AACR,CAAA;AAMO,MAAM,eAAA,GAAkB,YAAA;AAAA,EAC7B,EAAA;AAAA,EACA,CAAC,EAAE,OAAA,EAAS,IAAA,OAAW,OAAA,IAAW,KAAA,CAAM,IAAI,CAAA,IAAK,CAAA,CAAA;AAAA,EACjD,CAAC,CAAA,KAAM;AACL,IAAA,MAAM,UAAU,CAAA,GAAI,EAAA;AACpB,IAAA,MAAM,OAAO,CAAA,IAAK,CAAA;AAClB,IAAA,IAAI,YAAY,CAAA,KAAM,IAAA,KAAS,KAAA,CAAM,IAAA,IAAQ,SAAS,KAAA,CAAM,MAAA,CAAA;AAC1D,MAAA,OAAO,EAAE,OAAA,EAAS,IAAA,EAAM,KAAA,CAAM,IAAI,CAAA,EAAE;AACtC,IAAA,IAAI,YAAY,CAAA,KAAM,IAAA,KAAS,KAAA,CAAM,IAAA,IAAQ,SAAS,KAAA,CAAM,OAAA,CAAA;AAC1D,MAAA,OAAO,EAAE,OAAA,EAAS,IAAA,EAAM,KAAA,CAAM,IAAI,CAAA,EAAE;AACtC,IAAA,MAAM,IAAI,KAAA,CAAM,CAAA,gBAAA,EAAmB,CAAC,CAAA,UAAA,CAAY,CAAA;AAAA,EAClD;AACF;;;;"}
+11
View File
@@ -0,0 +1,11 @@
import { mergeUint8 } from '@pezkuwi/papi-utils';
import { blake2b } from '@noble/hashes/blake2.js';
const len32$1 = { dkLen: 32 };
const Blake2256 = (encoded) => blake2b(encoded, len32$1);
const len16 = { dkLen: 16 };
const Blake2128 = (encoded) => blake2b(encoded, len16);
const Blake2128Concat = (encoded) => mergeUint8([Blake2128(encoded), encoded]);
export { Blake2128, Blake2128Concat, Blake2256 };
//# sourceMappingURL=blake2.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"blake2.mjs","sources":["../../../src/hashes/blake2.ts"],"sourcesContent":["import { mergeUint8 } from \"@pezkuwi/papi-utils\"\nimport { blake2b } from \"@noble/hashes/blake2.js\"\n\nconst len32 = { dkLen: 32 }\nexport const Blake2256 = (encoded: Uint8Array) => blake2b(encoded, len32)\n\nconst len16 = { dkLen: 16 }\nexport const Blake2128 = (encoded: Uint8Array) => blake2b(encoded, len16)\n\nexport const Blake2128Concat = (encoded: Uint8Array) =>\n mergeUint8([Blake2128(encoded), encoded])\n"],"names":["len32"],"mappings":";;;AAGA,MAAMA,OAAA,GAAQ,EAAE,KAAA,EAAO,EAAA,EAAG;AACnB,MAAM,SAAA,GAAY,CAAC,OAAA,KAAwB,OAAA,CAAQ,SAASA,OAAK;AAExE,MAAM,KAAA,GAAQ,EAAE,KAAA,EAAO,EAAA,EAAG;AACnB,MAAM,SAAA,GAAY,CAAC,OAAA,KAAwB,OAAA,CAAQ,SAAS,KAAK;AAEjE,MAAM,eAAA,GAAkB,CAAC,OAAA,KAC9B,UAAA,CAAW,CAAC,SAAA,CAAU,OAAO,CAAA,EAAG,OAAO,CAAC;;;;"}
@@ -0,0 +1,9 @@
import { mergeUint8 } from '@pezkuwi/papi-utils';
import { blake3 } from '@noble/hashes/blake3.js';
const len32 = { dkLen: 32 };
const Blake3256 = (encoded) => blake3(encoded, len32);
const Blake3256Concat = (encoded) => mergeUint8([Blake3256(encoded), encoded]);
export { Blake3256, Blake3256Concat };
//# sourceMappingURL=blake3.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"blake3.mjs","sources":["../../../src/hashes/blake3.ts"],"sourcesContent":["import { mergeUint8 } from \"@pezkuwi/papi-utils\"\nimport { blake3 } from \"@noble/hashes/blake3.js\"\n\nconst len32 = { dkLen: 32 }\nexport const Blake3256 = (encoded: Uint8Array) => blake3(encoded, len32)\n\nexport const Blake3256Concat = (encoded: Uint8Array) =>\n mergeUint8([Blake3256(encoded), encoded])\n"],"names":[],"mappings":";;;AAGA,MAAM,KAAA,GAAQ,EAAE,KAAA,EAAO,EAAA,EAAG;AACnB,MAAM,SAAA,GAAY,CAAC,OAAA,KAAwB,MAAA,CAAO,SAAS,KAAK;AAEhE,MAAM,eAAA,GAAkB,CAAC,OAAA,KAC9B,UAAA,CAAW,CAAC,SAAA,CAAU,OAAO,CAAA,EAAG,OAAO,CAAC;;;;"}
+134
View File
@@ -0,0 +1,134 @@
const bigintFromU16 = (v0, v1, v2, v3) => new DataView(new Uint16Array([v0, v1, v2, v3]).buffer).getBigUint64(0, true);
const MASK_64 = 2n ** 64n - 1n;
const rotl = (input, nBits) => input << nBits & MASK_64 | input >> 64n - nBits;
const multiply = (a, b) => a * b & MASK_64;
const add = (a, b) => a + b & MASK_64;
const PRIME64_1 = 11400714785074694791n;
const PRIME64_2 = 14029467366897019727n;
const PRIME64_3 = 1609587929392839161n;
const PRIME64_4 = 9650029242287828579n;
const PRIME64_5 = 2870177450012600261n;
function h64(input, seed = 0n) {
let v1 = add(add(seed, PRIME64_1), PRIME64_2);
let v2 = add(seed, PRIME64_2);
let v3 = seed;
let v4 = seed - PRIME64_1;
let totalLen = input.length;
let memsize = 0;
let memory = null;
(function update() {
let p2 = 0;
let bEnd = p2 + totalLen;
if (!totalLen) return;
memory = new Uint8Array(32);
if (totalLen < 32) {
memory.set(input.subarray(0, totalLen), memsize);
memsize += totalLen;
return;
}
if (p2 <= bEnd - 32) {
const limit = bEnd - 32;
do {
let other;
other = bigintFromU16(
input[p2 + 1] << 8 | input[p2],
input[p2 + 3] << 8 | input[p2 + 2],
input[p2 + 5] << 8 | input[p2 + 4],
input[p2 + 7] << 8 | input[p2 + 6]
);
v1 = multiply(rotl(add(v1, multiply(other, PRIME64_2)), 31n), PRIME64_1);
p2 += 8;
other = bigintFromU16(
input[p2 + 1] << 8 | input[p2],
input[p2 + 3] << 8 | input[p2 + 2],
input[p2 + 5] << 8 | input[p2 + 4],
input[p2 + 7] << 8 | input[p2 + 6]
);
v2 = multiply(rotl(add(v2, multiply(other, PRIME64_2)), 31n), PRIME64_1);
p2 += 8;
other = bigintFromU16(
input[p2 + 1] << 8 | input[p2],
input[p2 + 3] << 8 | input[p2 + 2],
input[p2 + 5] << 8 | input[p2 + 4],
input[p2 + 7] << 8 | input[p2 + 6]
);
v3 = multiply(rotl(add(v3, multiply(other, PRIME64_2)), 31n), PRIME64_1);
p2 += 8;
other = bigintFromU16(
input[p2 + 1] << 8 | input[p2],
input[p2 + 3] << 8 | input[p2 + 2],
input[p2 + 5] << 8 | input[p2 + 4],
input[p2 + 7] << 8 | input[p2 + 6]
);
v4 = multiply(rotl(add(v4, multiply(other, PRIME64_2)), 31n), PRIME64_1);
p2 += 8;
} while (p2 <= limit);
}
if (p2 < bEnd) {
memory.set(input.subarray(p2, bEnd), memsize);
memsize = bEnd - p2;
}
})();
input = memory || input;
let result;
let p = 0;
if (totalLen >= 32) {
result = rotl(v1, 1n);
result = add(result, rotl(v2, 7n));
result = add(result, rotl(v3, 12n));
result = add(result, rotl(v4, 18n));
v1 = multiply(rotl(multiply(v1, PRIME64_2), 31n), PRIME64_1);
result = result ^ v1;
result = add(multiply(result, PRIME64_1), PRIME64_4);
v2 = multiply(rotl(multiply(v2, PRIME64_2), 31n), PRIME64_1);
result = result ^ v2;
result = add(multiply(result, PRIME64_1), PRIME64_4);
v3 = multiply(rotl(multiply(v3, PRIME64_2), 31n), PRIME64_1);
result = result ^ v3;
result = add(multiply(result, PRIME64_1), PRIME64_4);
v4 = multiply(rotl(multiply(v4, PRIME64_2), 31n), PRIME64_1);
result = result ^ v4;
result = add(multiply(result, PRIME64_1), PRIME64_4);
} else {
result = add(seed, PRIME64_5);
}
result = add(result, BigInt(totalLen));
while (p <= memsize - 8) {
let temp2 = bigintFromU16(
input[p + 1] << 8 | input[p],
input[p + 3] << 8 | input[p + 2],
input[p + 5] << 8 | input[p + 4],
input[p + 7] << 8 | input[p + 6]
);
temp2 = multiply(rotl(multiply(temp2, PRIME64_2), 31n), PRIME64_1);
result = add(multiply(rotl(result ^ temp2, 27n), PRIME64_1), PRIME64_4);
p += 8;
}
if (p + 4 <= memsize) {
let temp2 = multiply(
bigintFromU16(
input[p + 1] << 8 | input[p],
input[p + 3] << 8 | input[p + 2],
0,
0
),
PRIME64_1
);
result = add(multiply(rotl(result ^ temp2, 23n), PRIME64_2), PRIME64_3);
p += 4;
}
while (p < memsize) {
const temp2 = multiply(bigintFromU16(input[p++], 0, 0, 0), PRIME64_5);
result = multiply(rotl(result ^ temp2, 11n), PRIME64_1);
}
let temp = result >> 33n;
result = multiply(result ^ temp, PRIME64_2);
temp = result >> 29n;
result = multiply(result ^ temp, PRIME64_3);
temp = result >> 32n;
result ^= temp;
return result;
}
export { h64 };
//# sourceMappingURL=h64.mjs.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,4 @@
const Identity = (encoded) => encoded;
export { Identity };
//# sourceMappingURL=identity.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"identity.mjs","sources":["../../../src/hashes/identity.ts"],"sourcesContent":["export const Identity = (encoded: Uint8Array): Uint8Array => encoded\n"],"names":[],"mappings":"AAAO,MAAM,QAAA,GAAW,CAAC,OAAA,KAAoC;;;;"}
@@ -0,0 +1,6 @@
import { keccak_256 } from '@noble/hashes/sha3.js';
const Keccak256 = keccak_256;
export { Keccak256 };
//# sourceMappingURL=keccak.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"keccak.mjs","sources":["../../../src/hashes/keccak.ts"],"sourcesContent":["import { keccak_256 } from \"@noble/hashes/sha3.js\"\n\nexport const Keccak256: (input: Uint8Array) => Uint8Array = keccak_256\n"],"names":[],"mappings":";;AAEO,MAAM,SAAA,GAA+C;;;;"}
+24
View File
@@ -0,0 +1,24 @@
import { mergeUint8 } from '@pezkuwi/papi-utils';
import { u64 } from 'scale-ts';
import { h64 } from './h64.mjs';
const Twox128 = (input) => {
const result = new Uint8Array(16);
const dv = new DataView(result.buffer);
dv.setBigUint64(0, h64(input), true);
dv.setBigUint64(8, h64(input, 1n), true);
return result;
};
const Twox256 = (input) => {
const result = new Uint8Array(32);
const dv = new DataView(result.buffer);
dv.setBigUint64(0, h64(input), true);
dv.setBigUint64(8, h64(input, 1n), true);
dv.setBigUint64(16, h64(input, 2n), true);
dv.setBigUint64(24, h64(input, 3n), true);
return result;
};
const Twox64Concat = (encoded) => mergeUint8([u64.enc(h64(encoded)), encoded]);
export { Twox128, Twox256, Twox64Concat };
//# sourceMappingURL=twoX.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"twoX.mjs","sources":["../../../src/hashes/twoX.ts"],"sourcesContent":["import { mergeUint8 } from \"@pezkuwi/papi-utils\"\nimport { u64 } from \"scale-ts\"\nimport { h64 } from \"./h64\"\n\nexport const Twox128 = (input: Uint8Array): Uint8Array => {\n const result = new Uint8Array(16)\n const dv = new DataView(result.buffer)\n\n dv.setBigUint64(0, h64(input), true)\n dv.setBigUint64(8, h64(input, 1n), true)\n\n return result\n}\n\nexport const Twox256 = (input: Uint8Array): Uint8Array => {\n const result = new Uint8Array(32)\n const dv = new DataView(result.buffer)\n\n dv.setBigUint64(0, h64(input), true)\n dv.setBigUint64(8, h64(input, 1n), true)\n dv.setBigUint64(16, h64(input, 2n), true)\n dv.setBigUint64(24, h64(input, 3n), true)\n\n return result\n}\n\nexport const Twox64Concat = (encoded: Uint8Array): Uint8Array =>\n mergeUint8([u64.enc(h64(encoded)), encoded])\n"],"names":[],"mappings":";;;;AAIO,MAAM,OAAA,GAAU,CAAC,KAAA,KAAkC;AACxD,EAAA,MAAM,MAAA,GAAS,IAAI,UAAA,CAAW,EAAE,CAAA;AAChC,EAAA,MAAM,EAAA,GAAK,IAAI,QAAA,CAAS,MAAA,CAAO,MAAM,CAAA;AAErC,EAAA,EAAA,CAAG,YAAA,CAAa,CAAA,EAAG,GAAA,CAAI,KAAK,GAAG,IAAI,CAAA;AACnC,EAAA,EAAA,CAAG,aAAa,CAAA,EAAG,GAAA,CAAI,KAAA,EAAO,EAAE,GAAG,IAAI,CAAA;AAEvC,EAAA,OAAO,MAAA;AACT;AAEO,MAAM,OAAA,GAAU,CAAC,KAAA,KAAkC;AACxD,EAAA,MAAM,MAAA,GAAS,IAAI,UAAA,CAAW,EAAE,CAAA;AAChC,EAAA,MAAM,EAAA,GAAK,IAAI,QAAA,CAAS,MAAA,CAAO,MAAM,CAAA;AAErC,EAAA,EAAA,CAAG,YAAA,CAAa,CAAA,EAAG,GAAA,CAAI,KAAK,GAAG,IAAI,CAAA;AACnC,EAAA,EAAA,CAAG,aAAa,CAAA,EAAG,GAAA,CAAI,KAAA,EAAO,EAAE,GAAG,IAAI,CAAA;AACvC,EAAA,EAAA,CAAG,aAAa,EAAA,EAAI,GAAA,CAAI,KAAA,EAAO,EAAE,GAAG,IAAI,CAAA;AACxC,EAAA,EAAA,CAAG,aAAa,EAAA,EAAI,GAAA,CAAI,KAAA,EAAO,EAAE,GAAG,IAAI,CAAA;AAExC,EAAA,OAAO,MAAA;AACT;AAEO,MAAM,YAAA,GAAe,CAAC,OAAA,KAC3B,UAAA,CAAW,CAAC,GAAA,CAAI,GAAA,CAAI,GAAA,CAAI,OAAO,CAAC,CAAA,EAAG,OAAO,CAAC;;;;"}
+35
View File
@@ -0,0 +1,35 @@
export { AccountId } from './codecs/scale/AccountId.mjs';
export { Bin, Binary, FixedSizeBinary } from './codecs/scale/Binary.mjs';
export { bitSequence } from './codecs/scale/bitSequence.mjs';
export { char } from './codecs/scale/char.mjs';
export { compactBn, compactNumber } from './codecs/scale/compact.mjs';
export { Hex } from './codecs/scale/Hex.mjs';
export { fixedStr } from './codecs/scale/fixed-str.mjs';
export { Bytes, _void, bool, compact, createCodec, createDecoder, enhanceCodec, enhanceDecoder, enhanceEncoder, i128, i16, i256, i32, i64, i8, str, u128, u16, u256, u32, u64, u8 } from 'scale-ts';
export { Self, selfDecoder, selfEncoder } from './codecs/scale/Self.mjs';
export { ScaleEnum, Variant } from './codecs/scale/Variant.mjs';
export { ethAccount } from './codecs/scale/ethAccount.mjs';
export { Option, Result, Struct, Tuple, Vector } from './codecs/scale/shaped.mjs';
export { BitSeq } from './codecs/scale/BitSeq.mjs';
export { blockHeader } from './codecs/blockHeader.mjs';
export { decAnyMetadata, metadata } from './codecs/metadata/metadata.mjs';
export { v14 } from './codecs/metadata/v14.mjs';
export { v15 } from './codecs/metadata/v15.mjs';
export { v16 } from './codecs/metadata/v16.mjs';
export { unifyMetadata } from './codecs/metadata/unified.mjs';
export { lookup as v14Lookup } from './codecs/metadata/lookup.mjs';
export { extrinsicFormat } from './extrinsics/extrinsic-format.mjs';
export { Blake2128, Blake2128Concat, Blake2256 } from './hashes/blake2.mjs';
export { Blake3256, Blake3256Concat } from './hashes/blake3.mjs';
export { Identity } from './hashes/identity.mjs';
export { Twox128, Twox256, Twox64Concat } from './hashes/twoX.mjs';
export { h64 } from './hashes/h64.mjs';
export { Keccak256 } from './hashes/keccak.mjs';
export { Storage } from './storage.mjs';
export { Enum, _Enum } from './types/enum.mjs';
export { fromBufferToBase58, getSs58AddressInfo } from './utils/ss58-util.mjs';
export { getMultisigAccountId, sortMultisigSignatories } from './utils/multisig.mjs';
export { TrieNodeHeaders } from './trie/types.mjs';
export { trieNodeDec } from './trie/node-decoder.mjs';
export { TrieNodeWithHash, validateProofs } from './trie/proofs.mjs';
//# sourceMappingURL=index.mjs.map
+1
View File
@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
+65
View File
@@ -0,0 +1,65 @@
import { mergeUint8, toHex, fromHex } from '@pezkuwi/papi-utils';
import { Blake2128Concat, Blake2128, Blake2256 } from './hashes/blake2.mjs';
import '@noble/hashes/blake3.js';
import { Identity } from './hashes/identity.mjs';
import { Twox128, Twox64Concat, Twox256 } from './hashes/twoX.mjs';
import '@noble/hashes/sha3.js';
const textEncoder = new TextEncoder();
const hashers = /* @__PURE__ */ new Map([
[Identity, 0],
[Twox64Concat, 8],
[Blake2128Concat, 16],
[Blake2128, -16],
[Blake2256, -32],
[Twox128, -16],
[Twox256, -32]
]);
const Storage = (pallet) => {
const palledEncoded = Twox128(textEncoder.encode(pallet));
return (name, ...encoders) => {
const palletItemEncoded = mergeUint8([
palledEncoded,
Twox128(textEncoder.encode(name))
]);
const palletItemEncodedHex = toHex(palletItemEncoded);
const dec = (key) => {
if (!key.startsWith(palletItemEncodedHex))
throw new Error(`key does not match this storage (${pallet}.${name})`);
if (encoders.length === 0) return [];
const argsKey = fromHex(key.slice(palletItemEncodedHex.length));
const result = new Array(encoders.length);
for (let i = 0, cur = 0; i < encoders.length; i++) {
const [codec, hasher] = encoders[i];
const hBytes = hashers.get(hasher);
if (hBytes == null) throw new Error("Unknown hasher");
if (hBytes < 0) {
const opaqueBytes = hBytes * -1;
result[i] = toHex(argsKey.slice(cur, cur + opaqueBytes));
cur += opaqueBytes;
} else {
cur += hBytes;
result[i] = codec.dec(argsKey.slice(cur));
cur += codec.enc(result[i]).length;
}
}
return result;
};
const fns = encoders.map(
([{ enc: enc2 }, hash]) => (val) => hash(enc2(val))
);
const enc = (...args) => toHex(
mergeUint8([
palletItemEncoded,
...args.map((val, idx) => fns[idx](val))
])
);
return {
enc,
dec
};
};
};
export { Storage };
//# sourceMappingURL=storage.mjs.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,66 @@
import { u8, createDecoder, u16 } from 'scale-ts';
import '../utils/ss58-util.mjs';
import '../codecs/scale/Binary.mjs';
import '../codecs/scale/bitSequence.mjs';
import '../codecs/scale/char.mjs';
import '../codecs/scale/compact.mjs';
import { Hex } from '../codecs/scale/Hex.mjs';
import '../codecs/scale/fixed-str.mjs';
import '../codecs/scale/Variant.mjs';
import '../codecs/scale/ethAccount.mjs';
import '../codecs/scale/shaped.mjs';
import '../codecs/scale/BitSeq.mjs';
import '../codecs/blockHeader.mjs';
import '../codecs/metadata/metadata.mjs';
import '../codecs/metadata/v14.mjs';
import '../codecs/metadata/v15.mjs';
import '../codecs/metadata/v16.mjs';
import '../codecs/metadata/lookup.mjs';
import { TrieNodeHeaders } from './types.mjs';
const varHex = Hex().dec;
const allHex = Hex(Infinity).dec;
const hex32 = Hex(32).dec;
const byte = u8.dec;
const getHeader = (bytes) => {
const firstByte = byte(bytes);
let bitsLeft = 6;
const typeId = firstByte >> bitsLeft;
const type = typeId ? typeId === 1 ? TrieNodeHeaders.Leaf : typeId === 2 ? TrieNodeHeaders.Branch : TrieNodeHeaders.BranchWithVal : firstByte >> --bitsLeft ? TrieNodeHeaders.LeafWithHash : firstByte >> --bitsLeft ? TrieNodeHeaders.BranchWithHash : firstByte ? TrieNodeHeaders.Reserved : TrieNodeHeaders.Empty;
let nNibles = firstByte & 255 >> 8 - bitsLeft;
if (nNibles === 2 ** bitsLeft - 1) {
let current;
do
nNibles += current = byte(bytes);
while (current === 255);
}
return {
type,
partialKey: Hex(Math.ceil(nNibles / 2)).dec(bytes).slice(nNibles % 2 ? 3 : 2)
};
};
const trieNodeDec = createDecoder((bytes) => {
const header = getHeader(bytes);
const { type } = header;
if (type === "Empty" || type === "Reserved") return header;
if (type === "Leaf" || type === "LeafWithHash")
return {
...header,
value: allHex(bytes)
};
const bitmap = u16.dec(bytes);
const keys = [];
for (let i = 0; i < 16; i++) if (bitmap >> i & 1) keys.push(i.toString(16));
let value = null;
if (type === "BranchWithVal") value = varHex(bytes);
if (type === "BranchWithHash") value = hex32(bytes);
const result = {
...header,
children: Object.fromEntries(keys.map((key) => [key, varHex(bytes)]))
};
if (value !== null) result.value = value;
return result;
});
export { trieNodeDec };
//# sourceMappingURL=node-decoder.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"node-decoder.mjs","sources":["../../../src/trie/node-decoder.ts"],"sourcesContent":["import { createDecoder, Hex, u16, u8, type HexString } from \"../codecs\"\nimport { type TrieNodeHeaderKey, TrieNodeHeaders, type TrieNode } from \"./types\"\n\nconst varHex = Hex().dec\nconst allHex = Hex(Infinity).dec\nconst hex32 = Hex(32).dec\nconst byte = u8.dec\n\nconst getHeader = (\n bytes: Uint8Array,\n): { type: TrieNodeHeaderKey; partialKey: string } => {\n const firstByte = byte(bytes)\n\n let bitsLeft = 6\n const typeId = firstByte >> bitsLeft\n const type: TrieNodeHeaderKey = typeId\n ? typeId === 1\n ? TrieNodeHeaders.Leaf\n : typeId === 2\n ? TrieNodeHeaders.Branch\n : TrieNodeHeaders.BranchWithVal\n : firstByte >> --bitsLeft\n ? TrieNodeHeaders.LeafWithHash\n : firstByte >> --bitsLeft\n ? TrieNodeHeaders.BranchWithHash\n : firstByte\n ? TrieNodeHeaders.Reserved\n : TrieNodeHeaders.Empty\n\n let nNibles = firstByte & (0xff >> (8 - bitsLeft))\n if (nNibles === 2 ** bitsLeft - 1) {\n let current: number\n do nNibles += current = byte(bytes)\n while (current === 255)\n }\n\n return {\n type,\n partialKey: Hex(Math.ceil(nNibles / 2))\n .dec(bytes)\n .slice(nNibles % 2 ? 3 : 2),\n }\n}\n\nexport const trieNodeDec = createDecoder((bytes): TrieNode => {\n const header = getHeader(bytes)\n const { type } = header\n\n if (type === \"Empty\" || type === \"Reserved\") return header as TrieNode\n if (type === \"Leaf\" || type === \"LeafWithHash\")\n return {\n ...header,\n value: allHex(bytes),\n } as TrieNode\n\n const bitmap = u16.dec(bytes)\n const keys: string[] = []\n for (let i = 0; i < 16; i++) if ((bitmap >> i) & 1) keys.push(i.toString(16))\n\n let value: null | HexString = null\n if (type === \"BranchWithVal\") value = varHex(bytes)\n if (type === \"BranchWithHash\") value = hex32(bytes)\n\n const result: any = {\n ...header,\n children: Object.fromEntries(keys.map((key) => [key, varHex(bytes)])),\n }\n if (value !== null) result.value = value\n return result\n})\n"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;AAGA,MAAM,MAAA,GAAS,KAAI,CAAE,GAAA;AACrB,MAAM,MAAA,GAAS,GAAA,CAAI,QAAQ,CAAA,CAAE,GAAA;AAC7B,MAAM,KAAA,GAAQ,GAAA,CAAI,EAAE,CAAA,CAAE,GAAA;AACtB,MAAM,OAAO,EAAA,CAAG,GAAA;AAEhB,MAAM,SAAA,GAAY,CAChB,KAAA,KACoD;AACpD,EAAA,MAAM,SAAA,GAAY,KAAK,KAAK,CAAA;AAE5B,EAAA,IAAI,QAAA,GAAW,CAAA;AACf,EAAA,MAAM,SAAS,SAAA,IAAa,QAAA;AAC5B,EAAA,MAAM,IAAA,GAA0B,MAAA,GAC5B,MAAA,KAAW,CAAA,GACT,eAAA,CAAgB,OAChB,MAAA,KAAW,CAAA,GACT,eAAA,CAAgB,MAAA,GAChB,eAAA,CAAgB,aAAA,GACpB,aAAa,EAAE,QAAA,GACb,eAAA,CAAgB,YAAA,GAChB,SAAA,IAAa,EAAE,QAAA,GACb,eAAA,CAAgB,cAAA,GAChB,SAAA,GACE,eAAA,CAAgB,QAAA,GAChB,eAAA,CAAgB,KAAA;AAE1B,EAAA,IAAI,OAAA,GAAU,SAAA,GAAa,GAAA,IAAS,CAAA,GAAI,QAAA;AACxC,EAAA,IAAI,OAAA,KAAY,CAAA,IAAK,QAAA,GAAW,CAAA,EAAG;AACjC,IAAA,IAAI,OAAA;AACJ,IAAA;AAAG,MAAA,OAAA,IAAW,OAAA,GAAU,KAAK,KAAK,CAAA;AAAA,WAC3B,OAAA,KAAY,GAAA;AAAA,EACrB;AAEA,EAAA,OAAO;AAAA,IACL,IAAA;AAAA,IACA,UAAA,EAAY,GAAA,CAAI,IAAA,CAAK,IAAA,CAAK,UAAU,CAAC,CAAC,CAAA,CACnC,GAAA,CAAI,KAAK,CAAA,CACT,KAAA,CAAM,OAAA,GAAU,CAAA,GAAI,IAAI,CAAC;AAAA,GAC9B;AACF,CAAA;AAEO,MAAM,WAAA,GAAc,aAAA,CAAc,CAAC,KAAA,KAAoB;AAC5D,EAAA,MAAM,MAAA,GAAS,UAAU,KAAK,CAAA;AAC9B,EAAA,MAAM,EAAE,MAAK,GAAI,MAAA;AAEjB,EAAA,IAAI,IAAA,KAAS,OAAA,IAAW,IAAA,KAAS,UAAA,EAAY,OAAO,MAAA;AACpD,EAAA,IAAI,IAAA,KAAS,UAAU,IAAA,KAAS,cAAA;AAC9B,IAAA,OAAO;AAAA,MACL,GAAG,MAAA;AAAA,MACH,KAAA,EAAO,OAAO,KAAK;AAAA,KACrB;AAEF,EAAA,MAAM,MAAA,GAAS,GAAA,CAAI,GAAA,CAAI,KAAK,CAAA;AAC5B,EAAA,MAAM,OAAiB,EAAC;AACxB,EAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,EAAA,EAAI,KAAK,IAAK,MAAA,IAAU,CAAA,GAAK,CAAA,EAAG,IAAA,CAAK,IAAA,CAAK,CAAA,CAAE,QAAA,CAAS,EAAE,CAAC,CAAA;AAE5E,EAAA,IAAI,KAAA,GAA0B,IAAA;AAC9B,EAAA,IAAI,IAAA,KAAS,eAAA,EAAiB,KAAA,GAAQ,MAAA,CAAO,KAAK,CAAA;AAClD,EAAA,IAAI,IAAA,KAAS,gBAAA,EAAkB,KAAA,GAAQ,KAAA,CAAM,KAAK,CAAA;AAElD,EAAA,MAAM,MAAA,GAAc;AAAA,IAClB,GAAG,MAAA;AAAA,IACH,QAAA,EAAU,MAAA,CAAO,WAAA,CAAY,IAAA,CAAK,GAAA,CAAI,CAAC,GAAA,KAAQ,CAAC,GAAA,EAAK,MAAA,CAAO,KAAK,CAAC,CAAC,CAAC;AAAA,GACtE;AACA,EAAA,IAAI,KAAA,KAAU,IAAA,EAAM,MAAA,CAAO,KAAA,GAAQ,KAAA;AACnC,EAAA,OAAO,MAAA;AACT,CAAC;;;;"}
+75
View File
@@ -0,0 +1,75 @@
import { toHex } from '@pezkuwi/papi-utils';
import { createDecoder } from 'scale-ts';
import '../utils/ss58-util.mjs';
import '../codecs/scale/Binary.mjs';
import '../codecs/scale/bitSequence.mjs';
import '../codecs/scale/char.mjs';
import '../codecs/scale/compact.mjs';
import '../codecs/scale/Hex.mjs';
import '../codecs/scale/fixed-str.mjs';
import '../codecs/scale/Variant.mjs';
import '../codecs/scale/ethAccount.mjs';
import '../codecs/scale/shaped.mjs';
import '../codecs/scale/BitSeq.mjs';
import '../codecs/blockHeader.mjs';
import '../codecs/metadata/metadata.mjs';
import '../codecs/metadata/v14.mjs';
import '../codecs/metadata/v15.mjs';
import '../codecs/metadata/v16.mjs';
import '../codecs/metadata/lookup.mjs';
import { trieNodeDec } from './node-decoder.mjs';
import { Blake2256 } from '../hashes/blake2.mjs';
import '@noble/hashes/blake3.js';
import '@noble/hashes/sha3.js';
const TrieNodeWithHash = (hasher) => createDecoder((input) => {
const hash = toHex(hasher(new Uint8Array(input.buffer)));
try {
return {
hash,
...trieNodeDec(input)
};
} catch {
return { type: "Raw", hash, value: "" };
}
});
const validateProofs = (proofs, hasher = Blake2256) => {
const proofsList = proofs.map(TrieNodeWithHash(hasher));
const proofsRecord = Object.fromEntries(proofsList.map((p) => [p.hash, p]));
const hashes = proofsList.map((p) => p.hash);
const roots = new Set(hashes);
const setRawValue = (input) => {
if (input.value) return;
const val = proofs[hashes.indexOf(input.hash)];
input.value = typeof val === "string" ? val : toHex(val);
};
proofsList.forEach((p) => {
if ("children" in p) {
Object.values(p.children).forEach((hash) => {
const child = proofsRecord[hash];
if (child) {
child.parent = p.hash;
roots.delete(hash);
}
});
}
if (p.type === "BranchWithHash" || p.type === "LeafWithHash") {
const childHash = p.value;
const child = proofsRecord[childHash];
if (!child) return;
roots.delete(childHash);
if (child.type !== "Raw") {
Object.keys(child).forEach((k) => delete child[k]);
child.type = "Raw";
child.hash = childHash;
}
child.parent = p.hash;
setRawValue(child);
}
if (p.type === "Raw") setRawValue(p);
});
return roots.size === 1 ? { rootHash: roots.values().next().value, proofs: proofsRecord } : null;
};
export { TrieNodeWithHash, validateProofs };
//# sourceMappingURL=proofs.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"proofs.mjs","sources":["../../../src/trie/proofs.ts"],"sourcesContent":["import { toHex } from \"@pezkuwi/papi-utils\"\nimport { createDecoder, type HexString } from \"../codecs\"\nimport { trieNodeDec } from \"./node-decoder\"\nimport { Blake2256 } from \"@/hashes\"\nimport { type ProofTrieNode } from \"./types\"\n\nexport const TrieNodeWithHash = (hasher: (input: Uint8Array) => Uint8Array) =>\n createDecoder((input): ProofTrieNode => {\n const hash = toHex(hasher(new Uint8Array(input.buffer)))\n try {\n return {\n hash,\n ...trieNodeDec(input),\n }\n } catch {\n // Sometimes the proofs include random raw-values which are not trie-nodes\n return { type: \"Raw\", hash, value: \"\" }\n }\n })\n\nexport const validateProofs = <T extends HexString | Uint8Array>(\n proofs: Array<T>,\n hasher: (input: Uint8Array) => Uint8Array = Blake2256,\n): { rootHash: HexString; proofs: Record<HexString, ProofTrieNode> } | null => {\n const proofsList = proofs.map(TrieNodeWithHash(hasher))\n const proofsRecord = Object.fromEntries(proofsList.map((p) => [p.hash, p]))\n const hashes = proofsList.map((p) => p.hash)\n const roots = new Set(hashes)\n\n const setRawValue = (input: {\n type: \"Raw\"\n hash: HexString\n value: HexString\n }) => {\n if (input.value) return\n const val = proofs[hashes.indexOf(input.hash)!]\n input.value = typeof val === \"string\" ? val : toHex(val)\n }\n\n proofsList.forEach((p) => {\n if (\"children\" in p) {\n Object.values(p.children).forEach((hash) => {\n const child = proofsRecord[hash]\n if (child) {\n child.parent = p.hash\n roots.delete(hash)\n }\n })\n }\n\n if (p.type === \"BranchWithHash\" || p.type === \"LeafWithHash\") {\n const childHash = p.value\n const child = proofsRecord[childHash]\n if (!child) return\n\n roots.delete(childHash)\n if (child.type !== \"Raw\") {\n Object.keys(child).forEach((k) => delete (child as any)[k])\n ;(child as any).type = \"Raw\"\n child.hash = childHash\n }\n child.parent = p.hash\n setRawValue(child as any)\n }\n\n if (p.type === \"Raw\") setRawValue(p)\n })\n\n return roots.size === 1\n ? { rootHash: roots.values().next().value!, proofs: proofsRecord }\n : null\n}\n"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAMO,MAAM,gBAAA,GAAmB,CAAC,MAAA,KAC/B,aAAA,CAAc,CAAC,KAAA,KAAyB;AACtC,EAAA,MAAM,IAAA,GAAO,MAAM,MAAA,CAAO,IAAI,WAAW,KAAA,CAAM,MAAM,CAAC,CAAC,CAAA;AACvD,EAAA,IAAI;AACF,IAAA,OAAO;AAAA,MACL,IAAA;AAAA,MACA,GAAG,YAAY,KAAK;AAAA,KACtB;AAAA,EACF,CAAA,CAAA,MAAQ;AAEN,IAAA,OAAO,EAAE,IAAA,EAAM,KAAA,EAAO,IAAA,EAAM,OAAO,EAAA,EAAG;AAAA,EACxC;AACF,CAAC;AAEI,MAAM,cAAA,GAAiB,CAC5B,MAAA,EACA,MAAA,GAA4C,SAAA,KACiC;AAC7E,EAAA,MAAM,UAAA,GAAa,MAAA,CAAO,GAAA,CAAI,gBAAA,CAAiB,MAAM,CAAC,CAAA;AACtD,EAAA,MAAM,YAAA,GAAe,MAAA,CAAO,WAAA,CAAY,UAAA,CAAW,GAAA,CAAI,CAAC,CAAA,KAAM,CAAC,CAAA,CAAE,IAAA,EAAM,CAAC,CAAC,CAAC,CAAA;AAC1E,EAAA,MAAM,SAAS,UAAA,CAAW,GAAA,CAAI,CAAC,CAAA,KAAM,EAAE,IAAI,CAAA;AAC3C,EAAA,MAAM,KAAA,GAAQ,IAAI,GAAA,CAAI,MAAM,CAAA;AAE5B,EAAA,MAAM,WAAA,GAAc,CAAC,KAAA,KAIf;AACJ,IAAA,IAAI,MAAM,KAAA,EAAO;AACjB,IAAA,MAAM,MAAM,MAAA,CAAO,MAAA,CAAO,OAAA,CAAQ,KAAA,CAAM,IAAI,CAAE,CAAA;AAC9C,IAAA,KAAA,CAAM,QAAQ,OAAO,GAAA,KAAQ,QAAA,GAAW,GAAA,GAAM,MAAM,GAAG,CAAA;AAAA,EACzD,CAAA;AAEA,EAAA,UAAA,CAAW,OAAA,CAAQ,CAAC,CAAA,KAAM;AACxB,IAAA,IAAI,cAAc,CAAA,EAAG;AACnB,MAAA,MAAA,CAAO,OAAO,CAAA,CAAE,QAAQ,CAAA,CAAE,OAAA,CAAQ,CAAC,IAAA,KAAS;AAC1C,QAAA,MAAM,KAAA,GAAQ,aAAa,IAAI,CAAA;AAC/B,QAAA,IAAI,KAAA,EAAO;AACT,UAAA,KAAA,CAAM,SAAS,CAAA,CAAE,IAAA;AACjB,UAAA,KAAA,CAAM,OAAO,IAAI,CAAA;AAAA,QACnB;AAAA,MACF,CAAC,CAAA;AAAA,IACH;AAEA,IAAA,IAAI,CAAA,CAAE,IAAA,KAAS,gBAAA,IAAoB,CAAA,CAAE,SAAS,cAAA,EAAgB;AAC5D,MAAA,MAAM,YAAY,CAAA,CAAE,KAAA;AACpB,MAAA,MAAM,KAAA,GAAQ,aAAa,SAAS,CAAA;AACpC,MAAA,IAAI,CAAC,KAAA,EAAO;AAEZ,MAAA,KAAA,CAAM,OAAO,SAAS,CAAA;AACtB,MAAA,IAAI,KAAA,CAAM,SAAS,KAAA,EAAO;AACxB,QAAA,MAAA,CAAO,IAAA,CAAK,KAAK,CAAA,CAAE,OAAA,CAAQ,CAAC,CAAA,KAAM,OAAQ,KAAA,CAAc,CAAC,CAAC,CAAA;AACzD,QAAC,MAAc,IAAA,GAAO,KAAA;AACvB,QAAA,KAAA,CAAM,IAAA,GAAO,SAAA;AAAA,MACf;AACA,MAAA,KAAA,CAAM,SAAS,CAAA,CAAE,IAAA;AACjB,MAAA,WAAA,CAAY,KAAY,CAAA;AAAA,IAC1B;AAEA,IAAA,IAAI,CAAA,CAAE,IAAA,KAAS,KAAA,EAAO,WAAA,CAAY,CAAC,CAAA;AAAA,EACrC,CAAC,CAAA;AAED,EAAA,OAAO,KAAA,CAAM,IAAA,KAAS,CAAA,GAClB,EAAE,QAAA,EAAU,KAAA,CAAM,MAAA,EAAO,CAAE,IAAA,EAAK,CAAE,KAAA,EAAQ,MAAA,EAAQ,cAAa,GAC/D,IAAA;AACN;;;;"}
+12
View File
@@ -0,0 +1,12 @@
const TrieNodeHeaders = {
Leaf: "Leaf",
Branch: "Branch",
BranchWithVal: "BranchWithVal",
LeafWithHash: "LeafWithHash",
BranchWithHash: "BranchWithHash",
Empty: "Empty",
Reserved: "Reserved"
};
export { TrieNodeHeaders };
//# sourceMappingURL=types.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"types.mjs","sources":["../../../src/trie/types.ts"],"sourcesContent":["import { type HexString } from \"../codecs\"\n\nexport const TrieNodeHeaders = {\n Leaf: \"Leaf\",\n Branch: \"Branch\",\n BranchWithVal: \"BranchWithVal\",\n LeafWithHash: \"LeafWithHash\",\n BranchWithHash: \"BranchWithHash\",\n Empty: \"Empty\",\n Reserved: \"Reserved\",\n} as const\ntype TrieNodeHeaders = typeof TrieNodeHeaders\nexport type TrieNodeHeaderKey =\n (typeof TrieNodeHeaders)[keyof typeof TrieNodeHeaders]\n\nexport type Nibble =\n | \"0\"\n | \"1\"\n | \"2\"\n | \"3\"\n | \"4\"\n | \"5\"\n | \"6\"\n | \"7\"\n | \"8\"\n | \"9\"\n | \"a\"\n | \"b\"\n | \"c\"\n | \"d\"\n | \"e\"\n | \"f\"\n\nexport type TrieNode = { partialKey: string } & (\n | {\n type: TrieNodeHeaders[\"Empty\"] | TrieNodeHeaders[\"Reserved\"]\n }\n | {\n type: TrieNodeHeaders[\"Leaf\"] | TrieNodeHeaders[\"LeafWithHash\"]\n value: HexString\n }\n | ({ children: Record<Nibble, HexString> } & (\n | { type: TrieNodeHeaders[\"Branch\"] }\n | {\n type:\n | TrieNodeHeaders[\"BranchWithHash\"]\n | TrieNodeHeaders[\"BranchWithVal\"]\n value: HexString\n }\n ))\n)\nexport type ProofTrieNode = {\n hash: HexString\n parent?: HexString\n} & (TrieNode | { type: \"Raw\"; value: HexString })\n"],"names":[],"mappings":"AAEO,MAAM,eAAA,GAAkB;AAAA,EAC7B,IAAA,EAAM,MAAA;AAAA,EACN,MAAA,EAAQ,QAAA;AAAA,EACR,aAAA,EAAe,eAAA;AAAA,EACf,YAAA,EAAc,cAAA;AAAA,EACd,cAAA,EAAgB,gBAAA;AAAA,EAChB,KAAA,EAAO,OAAA;AAAA,EACP,QAAA,EAAU;AACZ;;;;"}
+29
View File
@@ -0,0 +1,29 @@
const discriminant = {
is(value, type) {
return value.type === type;
},
as(value, type) {
if (type !== value.type)
throw new Error(
`Enum.as(enum, ${type}) used with actual type ${value.type}`
);
return value;
}
};
const Enum = Object.assign((type, value) => {
return {
type,
value
};
}, discriminant);
const _Enum = new Proxy(
{},
{
get(_, prop) {
return (value) => Enum(prop, value);
}
}
);
export { Enum, _Enum };
//# sourceMappingURL=enum.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"enum.mjs","sources":["../../../src/types/enum.ts"],"sourcesContent":["export type Enum<T extends {}> = {\n [K in keyof T & string]: {\n type: K\n value: T[K]\n }\n}[keyof T & string]\n\nexport type EnumVariant<\n T extends { type: string; value?: any },\n K extends T[\"type\"],\n> = T & {\n type: K\n}\n\nexport type ExtractEnumValue<\n T extends { type: string; value?: any },\n K extends string,\n> = EnumVariant<T, K>[\"value\"]\n\ntype ValueArg<V> = undefined extends V ? [value?: V] : [value: V]\n\ninterface Discriminant {\n is<T extends { type: string; value: any }, K extends T[\"type\"]>(\n value: T,\n type: K,\n ): value is T & { type: K }\n as<T extends { type: string; value: any }, K extends T[\"type\"]>(\n value: T,\n type: K,\n ): ExtractEnumValue<T, K>\n}\nconst discriminant: Discriminant = {\n is<T extends { type: string; value: any }, K extends T[\"type\"]>(\n value: T,\n type: K,\n ): value is T & { type: K } {\n return value.type === type\n },\n as(value, type) {\n if (type !== value.type)\n throw new Error(\n `Enum.as(enum, ${type}) used with actual type ${value.type}`,\n )\n return value\n },\n}\ninterface EnumFn extends Discriminant {\n <T extends { type: string; value: any }, K extends T[\"type\"]>(\n type: K,\n ...[value]: ValueArg<ExtractEnumValue<T, K>>\n ): EnumVariant<T, K>\n}\nexport const Enum: EnumFn = Object.assign((type: string, value?: any) => {\n return {\n type,\n value,\n } as any\n}, discriminant)\n\n// well-known enums\nexport type GetEnum<T extends Enum<any>> = {\n [K in T[\"type\"]]: (\n ...args: ExtractEnumValue<T, K> extends undefined\n ? []\n : [value: ExtractEnumValue<T, K>]\n ) => EnumVariant<T, K>\n}\nexport const _Enum = new Proxy(\n {},\n {\n get(_, prop: string) {\n return (value: string) => Enum(prop, value)\n },\n },\n)\n\n// type Bar = Enum<{\n// Kaka: 1\n// Bar: 2\n// }>\n\n// type FooInput = Enum<{\n// foo: \"foo\" | undefined\n// bar: Bar\n// baz: number\n// wtf: boolean\n// }>\n\n// declare function foo(foo: FooInput): void\n// foo(Enum(\"bar\", Enum(\"Bar\", 2)))\n\n// const InputEnum: GetEnum<FooInput> = null as any;\n// InputEnum.bar(Enum('Bar', 2))\n"],"names":[],"mappings":"AA+BA,MAAM,YAAA,GAA6B;AAAA,EACjC,EAAA,CACE,OACA,IAAA,EAC0B;AAC1B,IAAA,OAAO,MAAM,IAAA,KAAS,IAAA;AAAA,EACxB,CAAA;AAAA,EACA,EAAA,CAAG,OAAO,IAAA,EAAM;AACd,IAAA,IAAI,SAAS,KAAA,CAAM,IAAA;AACjB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,CAAA,cAAA,EAAiB,IAAI,CAAA,wBAAA,EAA2B,KAAA,CAAM,IAAI,CAAA;AAAA,OAC5D;AACF,IAAA,OAAO,KAAA;AAAA,EACT;AACF,CAAA;AAOO,MAAM,IAAA,GAAe,MAAA,CAAO,MAAA,CAAO,CAAC,MAAc,KAAA,KAAgB;AACvE,EAAA,OAAO;AAAA,IACL,IAAA;AAAA,IACA;AAAA,GACF;AACF,CAAA,EAAG,YAAY;AAUR,MAAM,QAAQ,IAAI,KAAA;AAAA,EACvB,EAAC;AAAA,EACD;AAAA,IACE,GAAA,CAAI,GAAG,IAAA,EAAc;AACnB,MAAA,OAAO,CAAC,KAAA,KAAkB,IAAA,CAAK,IAAA,EAAM,KAAK,CAAA;AAAA,IAC5C;AAAA;AAEJ;;;;"}
@@ -0,0 +1,50 @@
import { mergeUint8 } from '@pezkuwi/papi-utils';
import { compact, u16 } from 'scale-ts';
import './ss58-util.mjs';
import { Binary } from '../codecs/scale/Binary.mjs';
import '../codecs/scale/bitSequence.mjs';
import '../codecs/scale/char.mjs';
import '../codecs/scale/compact.mjs';
import '../codecs/scale/Hex.mjs';
import '../codecs/scale/fixed-str.mjs';
import '../codecs/scale/Variant.mjs';
import '../codecs/scale/ethAccount.mjs';
import '../codecs/scale/shaped.mjs';
import '../codecs/scale/BitSeq.mjs';
import '../codecs/blockHeader.mjs';
import '../codecs/metadata/metadata.mjs';
import '../codecs/metadata/v14.mjs';
import '../codecs/metadata/v15.mjs';
import '../codecs/metadata/v16.mjs';
import '../codecs/metadata/lookup.mjs';
import { Blake2256 } from '../hashes/blake2.mjs';
import '@noble/hashes/blake3.js';
import '@noble/hashes/sha3.js';
const PREFIX = Binary.fromText("modlpy/utilisuba").asBytes();
const getMultisigAccountId = ({
threshold,
signatories
}) => {
const sortedSignatories = sortMultisigSignatories(signatories);
const payload = mergeUint8([
PREFIX,
compact.enc(sortedSignatories.length),
...sortedSignatories,
u16.enc(threshold)
]);
return Blake2256(payload);
};
const sortMultisigSignatories = (signatories) => signatories.slice().sort((a, b) => {
for (let i = 0; ; i++) {
const overA = i >= a.length;
const overB = i >= b.length;
if (overA && overB) return 0;
else if (overA) return -1;
else if (overB) return 1;
else if (a[i] !== b[i]) return a[i] > b[i] ? 1 : -1;
}
});
export { getMultisigAccountId, sortMultisigSignatories };
//# sourceMappingURL=multisig.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"multisig.mjs","sources":["../../../src/utils/multisig.ts"],"sourcesContent":["import { mergeUint8 } from \"@pezkuwi/papi-utils\"\nimport { Binary, compact, u16 } from \"../codecs\"\nimport { Blake2256 } from \"../hashes\"\n\nconst PREFIX = Binary.fromText(\"modlpy/utilisuba\").asBytes()\nexport const getMultisigAccountId = ({\n threshold,\n signatories,\n}: {\n threshold: number\n signatories: Uint8Array[]\n}) => {\n const sortedSignatories = sortMultisigSignatories(signatories)\n const payload = mergeUint8([\n PREFIX,\n compact.enc(sortedSignatories.length),\n ...sortedSignatories,\n u16.enc(threshold),\n ])\n return Blake2256(payload)\n}\n\nexport const sortMultisigSignatories = (signatories: Uint8Array[]) =>\n signatories.slice().sort((a, b) => {\n for (let i = 0; ; i++) {\n const overA = i >= a.length\n const overB = i >= b.length\n\n if (overA && overB) return 0\n else if (overA) return -1\n else if (overB) return 1\n else if (a[i] !== b[i]) return a[i] > b[i] ? 1 : -1\n }\n })\n"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AAIA,MAAM,MAAA,GAAS,MAAA,CAAO,QAAA,CAAS,kBAAkB,EAAE,OAAA,EAAQ;AACpD,MAAM,uBAAuB,CAAC;AAAA,EACnC,SAAA;AAAA,EACA;AACF,CAAA,KAGM;AACJ,EAAA,MAAM,iBAAA,GAAoB,wBAAwB,WAAW,CAAA;AAC7D,EAAA,MAAM,UAAU,UAAA,CAAW;AAAA,IACzB,MAAA;AAAA,IACA,OAAA,CAAQ,GAAA,CAAI,iBAAA,CAAkB,MAAM,CAAA;AAAA,IACpC,GAAG,iBAAA;AAAA,IACH,GAAA,CAAI,IAAI,SAAS;AAAA,GAClB,CAAA;AACD,EAAA,OAAO,UAAU,OAAO,CAAA;AAC1B;AAEO,MAAM,uBAAA,GAA0B,CAAC,WAAA,KACtC,WAAA,CAAY,OAAM,CAAE,IAAA,CAAK,CAAC,CAAA,EAAG,CAAA,KAAM;AACjC,EAAA,KAAA,IAAS,CAAA,GAAI,KAAK,CAAA,EAAA,EAAK;AACrB,IAAA,MAAM,KAAA,GAAQ,KAAK,CAAA,CAAE,MAAA;AACrB,IAAA,MAAM,KAAA,GAAQ,KAAK,CAAA,CAAE,MAAA;AAErB,IAAA,IAAI,KAAA,IAAS,OAAO,OAAO,CAAA;AAAA,SAAA,IAClB,OAAO,OAAO,EAAA;AAAA,SAAA,IACd,OAAO,OAAO,CAAA;AAAA,SAAA,IACd,CAAA,CAAE,CAAC,CAAA,KAAM,CAAA,CAAE,CAAC,CAAA,EAAG,OAAO,CAAA,CAAE,CAAC,CAAA,GAAI,CAAA,CAAE,CAAC,IAAI,CAAA,GAAI,EAAA;AAAA,EACnD;AACF,CAAC;;;;"}
@@ -0,0 +1,77 @@
import { base58 } from '@scure/base';
import { blake2b } from '@noble/hashes/blake2.js';
const SS58_PREFIX = new TextEncoder().encode("SS58PRE");
const CHECKSUM_LENGTH = 2;
const getSs58AddressInfo = (address) => {
try {
const decoded = base58.decode(address);
const prefixBytes = decoded.subarray(0, decoded[0] & 64 ? 2 : 1);
const publicKey = decoded.subarray(
prefixBytes.length,
decoded.length - CHECKSUM_LENGTH
);
const checksum = decoded.subarray(prefixBytes.length + publicKey.length);
const expectedChecksum = blake2b(
Uint8Array.of(...SS58_PREFIX, ...prefixBytes, ...publicKey),
{
dkLen: 64
}
).subarray(0, CHECKSUM_LENGTH);
const isChecksumValid = checksum[0] === expectedChecksum[0] && checksum[1] === expectedChecksum[1];
if (!isChecksumValid) return { isValid: false };
return {
isValid: true,
ss58Format: prefixBytesToNumber(prefixBytes),
publicKey: publicKey.slice()
};
} catch (_) {
return { isValid: false };
}
};
const prefixBytesToNumber = (bytes) => {
const dv = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
return dv.byteLength === 1 ? dv.getUint8(0) : dv.getUint16(0);
};
const withSs58Cache = (fn) => {
let cache = {};
let activityCount = 0;
let latestCount = 0;
const checkActivity = () => {
if (activityCount === latestCount) {
cache = {};
activityCount = latestCount = 0;
} else {
latestCount = activityCount;
setTimeout(checkActivity, 0);
}
};
return (publicKey) => {
var _a, _b;
if (++activityCount === 1) checkActivity();
let entry = cache;
const lastIdx = publicKey.length - 1;
for (let i = 0; i <= lastIdx; i++) entry = entry[_a = publicKey[i]] || (entry[_a] = {});
return entry[_b = publicKey[lastIdx]] || (entry[_b] = fn(publicKey));
};
};
const fromBufferToBase58 = (ss58Format) => {
const prefixBytes = ss58Format < 64 ? Uint8Array.of(ss58Format) : Uint8Array.of(
(ss58Format & 252) >> 2 | 64,
ss58Format >> 8 | (ss58Format & 3) << 6
);
return withSs58Cache((publicKey) => {
const checksum = blake2b(
Uint8Array.of(...SS58_PREFIX, ...prefixBytes, ...publicKey),
{
dkLen: 64
}
).subarray(0, CHECKSUM_LENGTH);
return base58.encode(
Uint8Array.of(...prefixBytes, ...publicKey, ...checksum)
);
});
};
export { fromBufferToBase58, getSs58AddressInfo };
//# sourceMappingURL=ss58-util.mjs.map
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large Load Diff
File diff suppressed because it is too large Load Diff
File diff suppressed because one or more lines are too long
+47
View File
@@ -0,0 +1,47 @@
{
"name": "@pezkuwi/bizinikiwi-bindings",
"version": "1.0.0",
"author": "Dijital Kurdistan Tech Institute <info@pezkuwichain.io>",
"bugs": "https://github.com/pezkuwichain/pezkuwi-api/issues",
"homepage": "https://github.com/pezkuwichain/pezkuwi-api/tree/master/packages/bizinikiwi-bindings#readme",
"repository": {
"directory": "packages/bizinikiwi-bindings",
"type": "git",
"url": "https://github.com/pezkuwichain/pezkuwi-api.git"
},
"license": "MIT",
"sideEffects": false,
"type": "module",
"exports": {
".": {
"node": {
"production": {
"import": "./dist/esm/index.mjs",
"require": "./dist/index.js",
"default": "./dist/index.js"
},
"import": "./dist/esm/index.mjs",
"require": "./dist/index.js",
"default": "./dist/index.js"
},
"module": "./dist/esm/index.mjs",
"import": "./dist/esm/index.mjs",
"require": "./dist/index.js",
"default": "./dist/index.js"
},
"./package.json": "./package.json"
},
"main": "./dist/index.js",
"module": "./dist/esm/index.mjs",
"browser": "./dist/esm/index.mjs",
"types": "./dist/index.d.ts",
"files": [
"dist"
],
"dependencies": {
"@noble/hashes": "^2.0.1",
"@scure/base": "^2.0.0",
"scale-ts": "^1.6.1",
"@pezkuwi/papi-utils": "1.0.0"
}
}
+21
View File
@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2023 Josep M Sobrepere
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
+46
View File
@@ -0,0 +1,46 @@
# @polkadot-api/merkleize-metadata
This TS package provides utils for the merkleization of [`frame_metadata`](https://docs.rs/frame-metadata/latest/frame_metadata/) as described in
[RFC78](https://polkadot-fellows.github.io/RFCs/approved/0078-merkleized-metadata.html).
## Usage
```ts
import { merkleizeMetadata } from "@polkadot-api/merkleized-metadata"
const ksmMetadata = new Uint8Array(await readFile("ksm.bin"))
const merkleizedMetadata = merkleizeMetadata(ksmMetadata, {
decimals: 12,
tokenSymbol: "KSM",
})
// it returns the digest value of the metadata (aka its merkleized root-hash)
const rootHash = merkleizedMetadata.digest()
// given an extrinsic, it returns an encoded `Proof`
const proof1: Uint8Array = merkleizedMetadata.getProofForExtrinsic(
// Hex for the transaction bytes
"c10184008eaf04151687736326c9fea17e25fc5287613693c912909cb226aa4794f26a4801127d333c8f60c0d81dd0a6e2e20ea477a06f96aaca1811872c54c244f0935c60b1f8a38aabef3d3a4ef4050d8d078e35b57b3cf4f9545f8145ce98afb8755384550000000000001448656c6c6f",
// Optionally, we can pass the tx additional signed data
"386d0f001a000000143c3561eefac7bc66facd4f0a7ec31d33b64f1827932fb3fda0ce361def535f143c3561eefac7bc66facd4f0a7ec31d33b64f1827932fb3fda0ce361def535f00",
)
// given the extrinsic "parts", it returns an encoded `Proof`
const proof2: Uint8Array = merkleizedMetadata.getProofForExtrinsicParts(
// Call data
"0x040300648ad065ea416ca1725c29979cd41e288180f3e8aefde705cd3e0bab6cd212010bcb04fb711f01",
// Signed Extension data included in the extrinsic
"0x2503000000",
// Signed Extension data included in the signature
"0x164a0f001a000000b0a8d493285c2df73290dfb7e61f870f17b41801197a149ca93654499ea3dafe878a023bcb37967b6ba0685d002bb74e6cf3b4fc4ae37eb85f756bd9b026bede00",
)
// The type `Proof` definition is as follows:
// interface Proof {
// leaves: Array<LookupEntry>,
// leafIdxs: Array<number>,
// proofs: Array<Uint8Array>,
// extrinsic: ExtrinsicMetadata,
// info: ExtraInfo
// }
```
+118
View File
@@ -0,0 +1,118 @@
import { Struct, str, u8, u16, u32, Bytes, ScaleEnum, _void, compactNumber, Option, Vector, bool, Tuple, compact, extrinsicFormat } from '@pezkuwi/bizinikiwi-bindings';
const extraInfoInner = {
specVersion: u32,
specName: str,
base58Prefix: u16,
decimals: u8,
tokenSymbol: str
};
const extraInfo = Struct(extraInfoInner);
const hash = Bytes(32);
const metadataDigest = ScaleEnum({
V0: _void,
V1: Struct({
typeInformationTreeRoot: hash,
extrinsicMetadataHash: hash,
...extraInfoInner
})
});
ScaleEnum({
bool: _void,
char: _void,
str: _void,
u8: _void,
u16: _void,
u32: _void,
u64: _void,
u128: _void,
u256: _void,
i8: _void,
i16: _void,
i32: _void,
i64: _void,
i128: _void,
i256: _void
});
const typeRef = ScaleEnum({
bool: _void,
char: _void,
str: _void,
u8: _void,
u16: _void,
u32: _void,
u64: _void,
u128: _void,
u256: _void,
i8: _void,
i16: _void,
i32: _void,
i64: _void,
i128: _void,
i256: _void,
compactU8: _void,
compactU16: _void,
compactU32: _void,
compactU64: _void,
compactU128: _void,
compactU256: _void,
void: _void,
perId: compactNumber
});
const field = Struct({
name: Option(str),
ty: typeRef,
typeName: Option(str)
});
const typeDef = ScaleEnum({
composite: Vector(field),
enumeration: Struct({
name: str,
fields: Vector(field),
index: compactNumber
}),
sequence: typeRef,
array: Struct({
len: u32,
typeParam: typeRef
}),
tuple: Vector(typeRef),
bitSequence: Struct({
numBytes: u8,
leastSignificantBitFirst: bool
})
});
const lookupType = Struct({
path: Vector(str),
typeDef,
typeId: compactNumber
});
const lookup = Vector(lookupType);
const extrinsicMetadata = Struct({
version: u8,
addressTy: typeRef,
callTy: typeRef,
signatureTy: typeRef,
signedExtensions: Vector(
Struct({
identifier: str,
includedInExtrinsic: typeRef,
includedInSignedData: typeRef
})
)
});
const extrinsicDec = Tuple.dec(
compact[1],
extrinsicFormat[1],
Bytes(Infinity)[1]
);
const proof = Struct({
leaves: lookup,
leafIdxs: Vector(u32),
proofs: Vector(hash),
extrinsic: extrinsicMetadata,
info: extraInfo
});
export { extraInfo, extrinsicDec, extrinsicMetadata, hash, lookup, lookupType, metadataDigest, proof, typeDef, typeRef };
//# sourceMappingURL=codecs.mjs.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,94 @@
import { createDecoder, compact, _void, i256, i128, i64, i32, i16, i8, u256, u128, u64, u32, u16, u8, str } from '@pezkuwi/bizinikiwi-bindings';
const typeRefDecoders = {
bool: u8,
char: u8,
str,
u8,
u16,
u32,
u64,
u128,
u256,
i8,
i16,
i32,
i64,
i128,
i256,
void: _void,
compactU8: compact,
compactU16: compact,
compactU32: compact,
compactU64: compact,
compactU128: compact,
compactU256: compact
};
const innerDecodeAndCollect = (input, typeRef, idToLookups, lookup, collected) => {
if (typeRef.tag !== "perId") {
typeRefDecoders[typeRef.tag][1](input);
return;
}
const handleTypeRef = (typeRef2) => {
innerDecodeAndCollect(input, typeRef2, idToLookups, lookup, collected);
};
const lookupIdxs = idToLookups.get(typeRef.value);
const [currentIdx] = lookupIdxs;
const current = lookup[currentIdx];
if (lookupIdxs.length === 1) collected.add(currentIdx);
switch (current.typeDef.tag) {
case "enumeration": {
const selectedIdx = u8.dec(input);
const [selected, collectedIdx] = lookupIdxs.map(
(lookupIdx) => [lookup[lookupIdx].typeDef, lookupIdx]
).find(([x]) => x.value.index === selectedIdx);
collected.add(collectedIdx);
selected.value.fields.forEach(({ ty }) => {
handleTypeRef(ty);
});
break;
}
case "sequence": {
const len = compact.dec(input);
for (let i = 0; i < len; i++) handleTypeRef(current.typeDef.value);
break;
}
case "array": {
for (let i = 0; i < current.typeDef.value.len; i++)
handleTypeRef(current.typeDef.value.typeParam);
break;
}
case "composite": {
current.typeDef.value.forEach((x) => {
handleTypeRef(x.ty);
});
break;
}
case "tuple": {
current.typeDef.value.forEach(handleTypeRef);
break;
}
case "bitSequence":
throw new Error("bitSequence is not supported");
}
};
const decodeAndCollectKnownLeafs = (data, typeRefs, lookup) => {
let input = new Uint8Array();
createDecoder((_input) => {
input = _input;
})(data);
const idToLookups = /* @__PURE__ */ new Map();
lookup.forEach((lookup2, idx) => {
const arr = idToLookups.get(lookup2.typeId);
if (arr) arr.push(idx);
else idToLookups.set(lookup2.typeId, [idx]);
});
const result = /* @__PURE__ */ new Set();
typeRefs.forEach((typeRef) => {
innerDecodeAndCollect(input, typeRef, idToLookups, lookup, result);
});
return [...result].sort((a, b) => a - b);
};
export { decodeAndCollectKnownLeafs };
//# sourceMappingURL=decode-and-collect.mjs.map
File diff suppressed because one or more lines are too long
@@ -0,0 +1,52 @@
const getAccessibleTypes = (metadata, definitions) => {
const types = /* @__PURE__ */ new Set();
const collectTypesFromId = (id) => {
if (types.has(id)) return;
const { tag, value } = definitions.get(id).def;
switch (tag) {
case "composite":
if (!value.length) break;
types.add(id);
value.forEach(({ type }) => {
collectTypesFromId(type);
});
break;
case "variant":
if (!value.length) break;
types.add(id);
value.forEach(({ fields }) => {
fields.forEach(({ type }) => {
collectTypesFromId(type);
});
});
break;
case "tuple":
if (!value.length) break;
types.add(id);
value.forEach(collectTypesFromId);
break;
case "sequence":
types.add(id);
collectTypesFromId(value);
break;
case "array":
types.add(id);
collectTypesFromId(value.type);
break;
case "bitSequence":
types.add(id);
}
};
collectTypesFromId(metadata.extrinsic.call);
collectTypesFromId(metadata.extrinsic.address);
collectTypesFromId(metadata.extrinsic.signature);
metadata.extrinsic.signedExtensions.forEach(({ type, additionalSigned }) => {
collectTypesFromId(type);
collectTypesFromId(additionalSigned);
});
const sortedTypes = [...types].sort((a, b) => a - b);
return new Map(sortedTypes.map((value, idx) => [value, idx]));
};
export { getAccessibleTypes };
//# sourceMappingURL=get-accessible-types.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"get-accessible-types.mjs","sources":["../../src/get-accessible-types.ts"],"sourcesContent":["import { UnifiedMetadata } from \"@pezkuwi/bizinikiwi-bindings\"\nimport { LookupValue } from \"./codecs\"\n\nexport const getAccessibleTypes = (\n metadata: UnifiedMetadata<15 | 16>,\n definitions: Map<number, LookupValue>,\n): Map<number, number> => {\n const types = new Set<number>()\n\n const collectTypesFromId = (id: number) => {\n if (types.has(id)) return\n\n const { tag, value } = definitions.get(id)!.def\n switch (tag) {\n case \"composite\":\n if (!value.length) break\n types.add(id)\n value.forEach(({ type }) => {\n collectTypesFromId(type)\n })\n break\n case \"variant\":\n if (!value.length) break\n types.add(id)\n value.forEach(({ fields }) => {\n fields.forEach(({ type }) => {\n collectTypesFromId(type)\n })\n })\n break\n case \"tuple\":\n if (!value.length) break\n types.add(id)\n value.forEach(collectTypesFromId)\n break\n case \"sequence\":\n types.add(id)\n collectTypesFromId(value)\n break\n case \"array\":\n types.add(id)\n collectTypesFromId(value.type)\n break\n case \"bitSequence\": // bitSequence inner types are not stored\n types.add(id)\n // primitive and compact are not stored\n }\n }\n\n collectTypesFromId(metadata.extrinsic.call)\n collectTypesFromId(metadata.extrinsic.address)\n collectTypesFromId(metadata.extrinsic.signature)\n metadata.extrinsic.signedExtensions.forEach(({ type, additionalSigned }) => {\n collectTypesFromId(type)\n collectTypesFromId(additionalSigned)\n })\n\n const sortedTypes = [...types].sort((a, b) => a - b)\n return new Map(sortedTypes.map((value, idx) => [value, idx]))\n}\n"],"names":[],"mappings":"AAGO,MAAM,kBAAA,GAAqB,CAChC,QAAA,EACA,WAAA,KACwB;AACxB,EAAA,MAAM,KAAA,uBAAY,GAAA,EAAY;AAE9B,EAAA,MAAM,kBAAA,GAAqB,CAAC,EAAA,KAAe;AACzC,IAAA,IAAI,KAAA,CAAM,GAAA,CAAI,EAAE,CAAA,EAAG;AAEnB,IAAA,MAAM,EAAE,GAAA,EAAK,KAAA,KAAU,WAAA,CAAY,GAAA,CAAI,EAAE,CAAA,CAAG,GAAA;AAC5C,IAAA,QAAQ,GAAA;AAAK,MACX,KAAK,WAAA;AACH,QAAA,IAAI,CAAC,MAAM,MAAA,EAAQ;AACnB,QAAA,KAAA,CAAM,IAAI,EAAE,CAAA;AACZ,QAAA,KAAA,CAAM,OAAA,CAAQ,CAAC,EAAE,IAAA,EAAK,KAAM;AAC1B,UAAA,kBAAA,CAAmB,IAAI,CAAA;AAAA,QACzB,CAAC,CAAA;AACD,QAAA;AAAA,MACF,KAAK,SAAA;AACH,QAAA,IAAI,CAAC,MAAM,MAAA,EAAQ;AACnB,QAAA,KAAA,CAAM,IAAI,EAAE,CAAA;AACZ,QAAA,KAAA,CAAM,OAAA,CAAQ,CAAC,EAAE,MAAA,EAAO,KAAM;AAC5B,UAAA,MAAA,CAAO,OAAA,CAAQ,CAAC,EAAE,IAAA,EAAK,KAAM;AAC3B,YAAA,kBAAA,CAAmB,IAAI,CAAA;AAAA,UACzB,CAAC,CAAA;AAAA,QACH,CAAC,CAAA;AACD,QAAA;AAAA,MACF,KAAK,OAAA;AACH,QAAA,IAAI,CAAC,MAAM,MAAA,EAAQ;AACnB,QAAA,KAAA,CAAM,IAAI,EAAE,CAAA;AACZ,QAAA,KAAA,CAAM,QAAQ,kBAAkB,CAAA;AAChC,QAAA;AAAA,MACF,KAAK,UAAA;AACH,QAAA,KAAA,CAAM,IAAI,EAAE,CAAA;AACZ,QAAA,kBAAA,CAAmB,KAAK,CAAA;AACxB,QAAA;AAAA,MACF,KAAK,OAAA;AACH,QAAA,KAAA,CAAM,IAAI,EAAE,CAAA;AACZ,QAAA,kBAAA,CAAmB,MAAM,IAAI,CAAA;AAC7B,QAAA;AAAA,MACF,KAAK,aAAA;AACH,QAAA,KAAA,CAAM,IAAI,EAAE,CAAA;AAAA;AAEhB,EACF,CAAA;AAEA,EAAA,kBAAA,CAAmB,QAAA,CAAS,UAAU,IAAI,CAAA;AAC1C,EAAA,kBAAA,CAAmB,QAAA,CAAS,UAAU,OAAO,CAAA;AAC7C,EAAA,kBAAA,CAAmB,QAAA,CAAS,UAAU,SAAS,CAAA;AAC/C,EAAA,QAAA,CAAS,UAAU,gBAAA,CAAiB,OAAA,CAAQ,CAAC,EAAE,IAAA,EAAM,kBAAiB,KAAM;AAC1E,IAAA,kBAAA,CAAmB,IAAI,CAAA;AACvB,IAAA,kBAAA,CAAmB,gBAAgB,CAAA;AAAA,EACrC,CAAC,CAAA;AAED,EAAA,MAAM,WAAA,GAAc,CAAC,GAAG,KAAK,CAAA,CAAE,KAAK,CAAC,CAAA,EAAG,CAAA,KAAM,CAAA,GAAI,CAAC,CAAA;AACnD,EAAA,OAAO,IAAI,GAAA,CAAI,WAAA,CAAY,GAAA,CAAI,CAAC,KAAA,EAAO,GAAA,KAAQ,CAAC,KAAA,EAAO,GAAG,CAAC,CAAC,CAAA;AAC9D;;;;"}
+103
View File
@@ -0,0 +1,103 @@
const bitSequenceBytes = {
u8: 1,
u16: 2,
u32: 4,
u64: 8
};
const constructTypeDef = (definitions, getTypeRef, getPrimitive, frameId) => {
const {
def: { tag, value }
} = definitions.get(frameId);
switch (tag) {
case "composite":
return [
{
tag,
value: value.map((f) => ({
name: f.name,
typeName: f.typeName,
ty: getTypeRef(f.type)
}))
}
];
case "variant": {
return value.map((v) => ({
tag: "enumeration",
value: {
name: v.name,
index: v.index,
fields: v.fields.map((f) => ({
name: f.name,
typeName: f.typeName,
ty: getTypeRef(f.type)
}))
}
}));
}
case "sequence":
return [
{
tag,
value: getTypeRef(value)
}
];
case "array":
return [
{
tag,
value: {
len: value.len,
typeParam: getTypeRef(value.type)
}
}
];
case "tuple":
return [
{
tag,
value: value.map(getTypeRef)
}
];
case "bitSequence": {
const primitive = getPrimitive(value.bitStoreType);
const numBytes = bitSequenceBytes[primitive];
if (!numBytes) throw new Error("Invalid primitive for BitSequence");
const storeOrderPath = definitions.get(value.bitOrderType).path;
const leastSignificantBitFirst = storeOrderPath.includes("Lsb0");
if (!leastSignificantBitFirst && !storeOrderPath.includes("Msb0"))
throw new Error("BitOrderType not recognized");
return [
{
tag: "bitSequence",
value: { numBytes, leastSignificantBitFirst }
}
];
}
}
throw new Error(`FrameId(${frameId}) should have been filtered out`);
};
const getLookup = (definitions, accessibleTypes, getTypeRef, getPrimitive) => {
const typeTree = [];
[...accessibleTypes.entries()].forEach(([frameId, typeId]) => {
const { path } = definitions.get(frameId);
constructTypeDef(definitions, getTypeRef, getPrimitive, frameId).forEach(
(typeDef) => {
typeTree.push({
path,
typeId,
typeDef
});
}
);
});
typeTree.sort((a, b) => {
if (a.typeId !== b.typeId) return a.typeId - b.typeId;
if (a.typeDef.tag !== "enumeration" || b.typeDef.tag !== "enumeration")
throw new Error("Found two types with same id");
return a.typeDef.value.index - b.typeDef.value.index;
});
return typeTree;
};
export { getLookup };
//# sourceMappingURL=get-lookup.mjs.map
File diff suppressed because one or more lines are too long
+14
View File
@@ -0,0 +1,14 @@
import { unifyMetadata, decAnyMetadata } from '@pezkuwi/bizinikiwi-bindings';
const getMetadata = (input) => {
try {
const metadata = unifyMetadata(decAnyMetadata(input));
if (metadata.version <= 14) throw new Error("Wrong metadata version");
return metadata;
} catch (e) {
throw e || new Error("Unable to decode metadata");
}
};
export { getMetadata };
//# sourceMappingURL=get-metadata.mjs.map
@@ -0,0 +1 @@
{"version":3,"file":"get-metadata.mjs","sources":["../../src/get-metadata.ts"],"sourcesContent":["import {\n decAnyMetadata,\n UnifiedMetadata,\n type HexString,\n unifyMetadata,\n} from \"@pezkuwi/bizinikiwi-bindings\"\n\nexport const getMetadata = (\n input: Uint8Array | HexString,\n): UnifiedMetadata<15 | 16> => {\n try {\n const metadata = unifyMetadata(decAnyMetadata(input))\n if (metadata.version <= 14) throw new Error(\"Wrong metadata version\")\n return metadata as UnifiedMetadata<15 | 16>\n } catch (e) {\n throw e || new Error(\"Unable to decode metadata\")\n }\n}\n"],"names":[],"mappings":";;AAOO,MAAM,WAAA,GAAc,CACzB,KAAA,KAC6B;AAC7B,EAAA,IAAI;AACF,IAAA,MAAM,QAAA,GAAW,aAAA,CAAc,cAAA,CAAe,KAAK,CAAC,CAAA;AACpD,IAAA,IAAI,SAAS,OAAA,IAAW,EAAA,EAAI,MAAM,IAAI,MAAM,wBAAwB,CAAA;AACpE,IAAA,OAAO,QAAA;AAAA,EACT,SAAS,CAAA,EAAG;AACV,IAAA,MAAM,CAAA,IAAK,IAAI,KAAA,CAAM,2BAA2B,CAAA;AAAA,EAClD;AACF;;;;"}
+3
View File
@@ -0,0 +1,3 @@
export { merkleizeMetadata } from './main.mjs';
export { extraInfo, extrinsicMetadata, hash, lookup, lookupType, proof } from './codecs.mjs';
//# sourceMappingURL=index.mjs.map
+1
View File
@@ -0,0 +1 @@
{"version":3,"file":"index.mjs","sources":[],"sourcesContent":[],"names":[],"mappings":";"}
+175
View File
@@ -0,0 +1,175 @@
import { Blake3256, compact, u32 } from '@pezkuwi/bizinikiwi-bindings';
import { lookupType, extrinsicDec, extrinsicMetadata, metadataDigest, extraInfo } from './codecs.mjs';
import { getAccessibleTypes } from './get-accessible-types.mjs';
import { getLookup } from './get-lookup.mjs';
import { getMetadata } from './get-metadata.mjs';
import { compactTypeRefs, toBytes } from './utils.mjs';
import { decodeAndCollectKnownLeafs } from './decode-and-collect.mjs';
import { getProofData } from './proof.mjs';
import { getDynamicBuilder, getLookupFn } from '@pezkuwi/metadata-builders';
import { mergeUint8 } from '@pezkuwi/papi-utils';
const assertExpected = (name, expected, received) => {
if (received != null && received !== expected)
throw new Error(
`${name} not expected. Received ${received} expected ${expected}`
);
};
const merkleizeMetadata = (metadataBytes, {
decimals,
tokenSymbol,
...hinted
}) => {
const metadata = getMetadata(metadataBytes);
const checkedVersion = metadata.extrinsic.version.includes(4) ? 4 : null;
if (checkedVersion == null) throw new Error("Only extrinsic v4 is supported");
const { ss58Prefix, buildDefinition } = getDynamicBuilder(
getLookupFn(metadata)
);
if (ss58Prefix == null) throw new Error("SS58 prefix not found in metadata");
assertExpected("SS58 prefix", ss58Prefix, hinted.base58Prefix);
const version = metadata.pallets.find((x) => x.name === "System")?.constants.find((x) => x.name === "Version");
if (version == null) throw new Error("System.Version constant not found");
const { spec_name: specName, spec_version: specVersion } = buildDefinition(
version.type
).dec(version.value);
if (typeof specName !== "string" || typeof specVersion !== "number")
throw new Error("Spec name or spec version not found");
assertExpected("Spec name", specName, hinted.specName);
assertExpected("Spec version", specVersion, hinted.specVersion);
const info = {
decimals,
tokenSymbol,
specVersion,
specName,
base58Prefix: ss58Prefix
};
const definitions = new Map(
metadata.lookup.map((value) => [value.id, value])
);
const accessibleTypes = getAccessibleTypes(metadata, definitions);
const getPrimitive = (frameId) => {
const {
def: { tag, value }
} = definitions.get(frameId);
if (tag === "primitive") return value.tag;
if (tag !== "composite" && tag !== "tuple" || value.length > 1)
throw new Error("The provided definition doesn't map to a primitive");
return value.length === 0 ? null : getPrimitive(tag === "tuple" ? value[0] : value[0].type);
};
const getTypeRef = (frameId) => {
const { def } = definitions.get(frameId);
if (def.tag === "primitive") return { tag: def.value.tag, value: void 0 };
if (def.tag === "compact") {
const primitive = getPrimitive(def.value);
const tag = compactTypeRefs[primitive];
if (!tag) throw new Error("Invalid primitive for Compact");
return { tag, value: void 0 };
}
return accessibleTypes.has(frameId) ? { tag: "perId", value: accessibleTypes.get(frameId) } : { tag: "void", value: void 0 };
};
const extrinsic = {
version: checkedVersion,
addressTy: getTypeRef(metadata.extrinsic.address),
callTy: getTypeRef(metadata.extrinsic.call),
signatureTy: getTypeRef(metadata.extrinsic.signature),
signedExtensions: metadata.extrinsic.signedExtensions.map((se) => ({
identifier: se.identifier,
includedInExtrinsic: getTypeRef(se.type),
includedInSignedData: getTypeRef(se.additionalSigned)
}))
};
const lookup = getLookup(
definitions,
accessibleTypes,
getTypeRef,
getPrimitive
);
const lookupEncoded = lookup.map(lookupType.enc);
let hashTree;
const getHashTree = () => {
if (hashTree) return hashTree;
if (!lookupEncoded.length) return hashTree = [new Uint8Array(32).fill(0)];
hashTree = new Array(lookupEncoded.length * 2 - 1);
let leavesStartIdx = lookupEncoded.length - 1;
for (let i = 0; i < lookupEncoded.length; i++)
hashTree[leavesStartIdx + i] = Blake3256(lookupEncoded[i]);
for (let i = hashTree.length - 2; i > 0; i -= 2)
hashTree[(i - 1) / 2] = Blake3256(
mergeUint8([hashTree[i], hashTree[i + 1]])
);
return hashTree;
};
let digested;
const digest = () => {
if (digested) return digested;
const rootLookupHash = getHashTree()[0];
const digest2 = {
tag: "V1",
value: {
typeInformationTreeRoot: rootLookupHash,
extrinsicMetadataHash: Blake3256(extrinsicMetadata.enc(extrinsic)),
...info
}
};
return digested = Blake3256(metadataDigest.enc(digest2));
};
const generateProof = (knownIndexes) => {
const proofData = getProofData(lookupEncoded, knownIndexes);
const hashTree2 = getHashTree();
const proofs = proofData.proofIdxs.map((idx) => hashTree2[idx]);
return mergeUint8([
compact.enc(proofData.leaves.length),
...proofData.leaves,
compact.enc(proofData.leafIdxs.length),
...proofData.leafIdxs.map((x) => u32.enc(x)),
compact.enc(proofs.length),
...proofs,
extrinsicMetadata.enc(extrinsic),
extraInfo.enc(info)
]);
};
const getProofForExtrinsicPayload = (extrinsicPayload) => {
const typeRefs = [
extrinsic.callTy,
...extrinsic.signedExtensions.map((x) => x.includedInExtrinsic),
...extrinsic.signedExtensions.map((x) => x.includedInSignedData)
];
return generateProof(
decodeAndCollectKnownLeafs(extrinsicPayload, typeRefs, lookup)
);
};
const getProofForExtrinsicParts = (callData, includedInExtrinsic, includedInSignedData) => {
const bytes = mergeUint8(
[callData, includedInExtrinsic, includedInSignedData].map(toBytes)
);
return getProofForExtrinsicPayload(bytes);
};
const getProofForExtrinsic = (transaction, txAdditionalSigned) => {
let [, { version: version2, type }, bytes] = extrinsicDec(transaction);
if (version2 !== extrinsic.version)
throw new Error("Incorrect extrinsic version");
const typeRefs = type === "signed" ? [
extrinsic.addressTy,
extrinsic.signatureTy,
...extrinsic.signedExtensions.map((x) => x.includedInExtrinsic),
extrinsic.callTy
] : [extrinsic.callTy];
if (txAdditionalSigned) {
bytes = mergeUint8([bytes, toBytes(txAdditionalSigned)]);
typeRefs.push(
...extrinsic.signedExtensions.map((x) => x.includedInSignedData)
);
}
return generateProof(decodeAndCollectKnownLeafs(bytes, typeRefs, lookup));
};
return {
digest,
getProofForExtrinsic,
getProofForExtrinsicParts,
getProofForExtrinsicPayload
};
};
export { merkleizeMetadata };
//# sourceMappingURL=main.mjs.map
File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More