mirror of
https://github.com/pezkuwichain/pezkuwi-api.git
synced 2026-04-22 09:07:56 +00:00
31467f90d4
- @pezkuwi/papi-utils (rebrand of @polkadot-api/utils) - @pezkuwi/bizinikiwi-bindings (rebrand of @polkadot-api/substrate-bindings) - @pezkuwi/metadata-builders (rebrand of @polkadot-api/metadata-builders) - @pezkuwi/merkleize-metadata (rebrand of @polkadot-api/merkleize-metadata) All @polkadot-api references replaced with @pezkuwi equivalents.
598 lines
19 KiB
JavaScript
598 lines
19 KiB
JavaScript
'use strict';
|
|
|
|
var substrateBindings = require('@pezkuwi/bizinikiwi-bindings');
|
|
var utils = require('@pezkuwi/papi-utils');
|
|
var metadataBuilders = require('@pezkuwi/metadata-builders');
|
|
|
|
const extraInfoInner = {
|
|
specVersion: substrateBindings.u32,
|
|
specName: substrateBindings.str,
|
|
base58Prefix: substrateBindings.u16,
|
|
decimals: substrateBindings.u8,
|
|
tokenSymbol: substrateBindings.str
|
|
};
|
|
const extraInfo = substrateBindings.Struct(extraInfoInner);
|
|
const hash = substrateBindings.Bytes(32);
|
|
const metadataDigest = substrateBindings.ScaleEnum({
|
|
V0: substrateBindings._void,
|
|
V1: substrateBindings.Struct({
|
|
typeInformationTreeRoot: hash,
|
|
extrinsicMetadataHash: hash,
|
|
...extraInfoInner
|
|
})
|
|
});
|
|
substrateBindings.ScaleEnum({
|
|
bool: substrateBindings._void,
|
|
char: substrateBindings._void,
|
|
str: substrateBindings._void,
|
|
u8: substrateBindings._void,
|
|
u16: substrateBindings._void,
|
|
u32: substrateBindings._void,
|
|
u64: substrateBindings._void,
|
|
u128: substrateBindings._void,
|
|
u256: substrateBindings._void,
|
|
i8: substrateBindings._void,
|
|
i16: substrateBindings._void,
|
|
i32: substrateBindings._void,
|
|
i64: substrateBindings._void,
|
|
i128: substrateBindings._void,
|
|
i256: substrateBindings._void
|
|
});
|
|
const typeRef = substrateBindings.ScaleEnum({
|
|
bool: substrateBindings._void,
|
|
char: substrateBindings._void,
|
|
str: substrateBindings._void,
|
|
u8: substrateBindings._void,
|
|
u16: substrateBindings._void,
|
|
u32: substrateBindings._void,
|
|
u64: substrateBindings._void,
|
|
u128: substrateBindings._void,
|
|
u256: substrateBindings._void,
|
|
i8: substrateBindings._void,
|
|
i16: substrateBindings._void,
|
|
i32: substrateBindings._void,
|
|
i64: substrateBindings._void,
|
|
i128: substrateBindings._void,
|
|
i256: substrateBindings._void,
|
|
compactU8: substrateBindings._void,
|
|
compactU16: substrateBindings._void,
|
|
compactU32: substrateBindings._void,
|
|
compactU64: substrateBindings._void,
|
|
compactU128: substrateBindings._void,
|
|
compactU256: substrateBindings._void,
|
|
void: substrateBindings._void,
|
|
perId: substrateBindings.compactNumber
|
|
});
|
|
const field = substrateBindings.Struct({
|
|
name: substrateBindings.Option(substrateBindings.str),
|
|
ty: typeRef,
|
|
typeName: substrateBindings.Option(substrateBindings.str)
|
|
});
|
|
const typeDef = substrateBindings.ScaleEnum({
|
|
composite: substrateBindings.Vector(field),
|
|
enumeration: substrateBindings.Struct({
|
|
name: substrateBindings.str,
|
|
fields: substrateBindings.Vector(field),
|
|
index: substrateBindings.compactNumber
|
|
}),
|
|
sequence: typeRef,
|
|
array: substrateBindings.Struct({
|
|
len: substrateBindings.u32,
|
|
typeParam: typeRef
|
|
}),
|
|
tuple: substrateBindings.Vector(typeRef),
|
|
bitSequence: substrateBindings.Struct({
|
|
numBytes: substrateBindings.u8,
|
|
leastSignificantBitFirst: substrateBindings.bool
|
|
})
|
|
});
|
|
const lookupType = substrateBindings.Struct({
|
|
path: substrateBindings.Vector(substrateBindings.str),
|
|
typeDef,
|
|
typeId: substrateBindings.compactNumber
|
|
});
|
|
const lookup = substrateBindings.Vector(lookupType);
|
|
const extrinsicMetadata = substrateBindings.Struct({
|
|
version: substrateBindings.u8,
|
|
addressTy: typeRef,
|
|
callTy: typeRef,
|
|
signatureTy: typeRef,
|
|
signedExtensions: substrateBindings.Vector(
|
|
substrateBindings.Struct({
|
|
identifier: substrateBindings.str,
|
|
includedInExtrinsic: typeRef,
|
|
includedInSignedData: typeRef
|
|
})
|
|
)
|
|
});
|
|
const extrinsicDec = substrateBindings.Tuple.dec(
|
|
substrateBindings.compact[1],
|
|
substrateBindings.extrinsicFormat[1],
|
|
substrateBindings.Bytes(Infinity)[1]
|
|
);
|
|
const proof = substrateBindings.Struct({
|
|
leaves: lookup,
|
|
leafIdxs: substrateBindings.Vector(substrateBindings.u32),
|
|
proofs: substrateBindings.Vector(hash),
|
|
extrinsic: extrinsicMetadata,
|
|
info: extraInfo
|
|
});
|
|
|
|
const getAccessibleTypes = (metadata, definitions) => {
|
|
const types = /* @__PURE__ */ new Set();
|
|
const collectTypesFromId = (id) => {
|
|
if (types.has(id)) return;
|
|
const { tag, value } = definitions.get(id).def;
|
|
switch (tag) {
|
|
case "composite":
|
|
if (!value.length) break;
|
|
types.add(id);
|
|
value.forEach(({ type }) => {
|
|
collectTypesFromId(type);
|
|
});
|
|
break;
|
|
case "variant":
|
|
if (!value.length) break;
|
|
types.add(id);
|
|
value.forEach(({ fields }) => {
|
|
fields.forEach(({ type }) => {
|
|
collectTypesFromId(type);
|
|
});
|
|
});
|
|
break;
|
|
case "tuple":
|
|
if (!value.length) break;
|
|
types.add(id);
|
|
value.forEach(collectTypesFromId);
|
|
break;
|
|
case "sequence":
|
|
types.add(id);
|
|
collectTypesFromId(value);
|
|
break;
|
|
case "array":
|
|
types.add(id);
|
|
collectTypesFromId(value.type);
|
|
break;
|
|
case "bitSequence":
|
|
types.add(id);
|
|
}
|
|
};
|
|
collectTypesFromId(metadata.extrinsic.call);
|
|
collectTypesFromId(metadata.extrinsic.address);
|
|
collectTypesFromId(metadata.extrinsic.signature);
|
|
metadata.extrinsic.signedExtensions.forEach(({ type, additionalSigned }) => {
|
|
collectTypesFromId(type);
|
|
collectTypesFromId(additionalSigned);
|
|
});
|
|
const sortedTypes = [...types].sort((a, b) => a - b);
|
|
return new Map(sortedTypes.map((value, idx) => [value, idx]));
|
|
};
|
|
|
|
const bitSequenceBytes = {
|
|
u8: 1,
|
|
u16: 2,
|
|
u32: 4,
|
|
u64: 8
|
|
};
|
|
const constructTypeDef = (definitions, getTypeRef, getPrimitive, frameId) => {
|
|
const {
|
|
def: { tag, value }
|
|
} = definitions.get(frameId);
|
|
switch (tag) {
|
|
case "composite":
|
|
return [
|
|
{
|
|
tag,
|
|
value: value.map((f) => ({
|
|
name: f.name,
|
|
typeName: f.typeName,
|
|
ty: getTypeRef(f.type)
|
|
}))
|
|
}
|
|
];
|
|
case "variant": {
|
|
return value.map((v) => ({
|
|
tag: "enumeration",
|
|
value: {
|
|
name: v.name,
|
|
index: v.index,
|
|
fields: v.fields.map((f) => ({
|
|
name: f.name,
|
|
typeName: f.typeName,
|
|
ty: getTypeRef(f.type)
|
|
}))
|
|
}
|
|
}));
|
|
}
|
|
case "sequence":
|
|
return [
|
|
{
|
|
tag,
|
|
value: getTypeRef(value)
|
|
}
|
|
];
|
|
case "array":
|
|
return [
|
|
{
|
|
tag,
|
|
value: {
|
|
len: value.len,
|
|
typeParam: getTypeRef(value.type)
|
|
}
|
|
}
|
|
];
|
|
case "tuple":
|
|
return [
|
|
{
|
|
tag,
|
|
value: value.map(getTypeRef)
|
|
}
|
|
];
|
|
case "bitSequence": {
|
|
const primitive = getPrimitive(value.bitStoreType);
|
|
const numBytes = bitSequenceBytes[primitive];
|
|
if (!numBytes) throw new Error("Invalid primitive for BitSequence");
|
|
const storeOrderPath = definitions.get(value.bitOrderType).path;
|
|
const leastSignificantBitFirst = storeOrderPath.includes("Lsb0");
|
|
if (!leastSignificantBitFirst && !storeOrderPath.includes("Msb0"))
|
|
throw new Error("BitOrderType not recognized");
|
|
return [
|
|
{
|
|
tag: "bitSequence",
|
|
value: { numBytes, leastSignificantBitFirst }
|
|
}
|
|
];
|
|
}
|
|
}
|
|
throw new Error(`FrameId(${frameId}) should have been filtered out`);
|
|
};
|
|
const getLookup = (definitions, accessibleTypes, getTypeRef, getPrimitive) => {
|
|
const typeTree = [];
|
|
[...accessibleTypes.entries()].forEach(([frameId, typeId]) => {
|
|
const { path } = definitions.get(frameId);
|
|
constructTypeDef(definitions, getTypeRef, getPrimitive, frameId).forEach(
|
|
(typeDef) => {
|
|
typeTree.push({
|
|
path,
|
|
typeId,
|
|
typeDef
|
|
});
|
|
}
|
|
);
|
|
});
|
|
typeTree.sort((a, b) => {
|
|
if (a.typeId !== b.typeId) return a.typeId - b.typeId;
|
|
if (a.typeDef.tag !== "enumeration" || b.typeDef.tag !== "enumeration")
|
|
throw new Error("Found two types with same id");
|
|
return a.typeDef.value.index - b.typeDef.value.index;
|
|
});
|
|
return typeTree;
|
|
};
|
|
|
|
const getMetadata = (input) => {
|
|
try {
|
|
const metadata = substrateBindings.unifyMetadata(substrateBindings.decAnyMetadata(input));
|
|
if (metadata.version <= 14) throw new Error("Wrong metadata version");
|
|
return metadata;
|
|
} catch (e) {
|
|
throw e || new Error("Unable to decode metadata");
|
|
}
|
|
};
|
|
|
|
const toBytes = (input) => typeof input === "string" ? utils.fromHex(input) : input;
|
|
const compactTypeRefs = {
|
|
null: "void",
|
|
u8: "compactU8",
|
|
u16: "compactU16",
|
|
u32: "compactU32",
|
|
u64: "compactU64",
|
|
u128: "compactU128",
|
|
u256: "compactU256"
|
|
};
|
|
|
|
const typeRefDecoders = {
|
|
bool: substrateBindings.u8,
|
|
char: substrateBindings.u8,
|
|
str: substrateBindings.str,
|
|
u8: substrateBindings.u8,
|
|
u16: substrateBindings.u16,
|
|
u32: substrateBindings.u32,
|
|
u64: substrateBindings.u64,
|
|
u128: substrateBindings.u128,
|
|
u256: substrateBindings.u256,
|
|
i8: substrateBindings.i8,
|
|
i16: substrateBindings.i16,
|
|
i32: substrateBindings.i32,
|
|
i64: substrateBindings.i64,
|
|
i128: substrateBindings.i128,
|
|
i256: substrateBindings.i256,
|
|
void: substrateBindings._void,
|
|
compactU8: substrateBindings.compact,
|
|
compactU16: substrateBindings.compact,
|
|
compactU32: substrateBindings.compact,
|
|
compactU64: substrateBindings.compact,
|
|
compactU128: substrateBindings.compact,
|
|
compactU256: substrateBindings.compact
|
|
};
|
|
const innerDecodeAndCollect = (input, typeRef, idToLookups, lookup, collected) => {
|
|
if (typeRef.tag !== "perId") {
|
|
typeRefDecoders[typeRef.tag][1](input);
|
|
return;
|
|
}
|
|
const handleTypeRef = (typeRef2) => {
|
|
innerDecodeAndCollect(input, typeRef2, idToLookups, lookup, collected);
|
|
};
|
|
const lookupIdxs = idToLookups.get(typeRef.value);
|
|
const [currentIdx] = lookupIdxs;
|
|
const current = lookup[currentIdx];
|
|
if (lookupIdxs.length === 1) collected.add(currentIdx);
|
|
switch (current.typeDef.tag) {
|
|
case "enumeration": {
|
|
const selectedIdx = substrateBindings.u8.dec(input);
|
|
const [selected, collectedIdx] = lookupIdxs.map(
|
|
(lookupIdx) => [lookup[lookupIdx].typeDef, lookupIdx]
|
|
).find(([x]) => x.value.index === selectedIdx);
|
|
collected.add(collectedIdx);
|
|
selected.value.fields.forEach(({ ty }) => {
|
|
handleTypeRef(ty);
|
|
});
|
|
break;
|
|
}
|
|
case "sequence": {
|
|
const len = substrateBindings.compact.dec(input);
|
|
for (let i = 0; i < len; i++) handleTypeRef(current.typeDef.value);
|
|
break;
|
|
}
|
|
case "array": {
|
|
for (let i = 0; i < current.typeDef.value.len; i++)
|
|
handleTypeRef(current.typeDef.value.typeParam);
|
|
break;
|
|
}
|
|
case "composite": {
|
|
current.typeDef.value.forEach((x) => {
|
|
handleTypeRef(x.ty);
|
|
});
|
|
break;
|
|
}
|
|
case "tuple": {
|
|
current.typeDef.value.forEach(handleTypeRef);
|
|
break;
|
|
}
|
|
case "bitSequence":
|
|
throw new Error("bitSequence is not supported");
|
|
}
|
|
};
|
|
const decodeAndCollectKnownLeafs = (data, typeRefs, lookup) => {
|
|
let input = new Uint8Array();
|
|
substrateBindings.createDecoder((_input) => {
|
|
input = _input;
|
|
})(data);
|
|
const idToLookups = /* @__PURE__ */ new Map();
|
|
lookup.forEach((lookup2, idx) => {
|
|
const arr = idToLookups.get(lookup2.typeId);
|
|
if (arr) arr.push(idx);
|
|
else idToLookups.set(lookup2.typeId, [idx]);
|
|
});
|
|
const result = /* @__PURE__ */ new Set();
|
|
typeRefs.forEach((typeRef) => {
|
|
innerDecodeAndCollect(input, typeRef, idToLookups, lookup, result);
|
|
});
|
|
return [...result].sort((a, b) => a - b);
|
|
};
|
|
|
|
const getLevelFromIdx = (idx) => Math.log2(idx + 1) | 0;
|
|
const getAncestorIdx = (from, nLevels) => (from + 1 >> nLevels) - 1;
|
|
function getProofData(leaves, knownLeavesIdxs) {
|
|
const knownLeaves = knownLeavesIdxs.map((idx) => leaves[idx]);
|
|
const startingIdx = leaves.length - 1;
|
|
const leafIdxs = knownLeavesIdxs.map((idx) => startingIdx + idx);
|
|
const proofIdxs = [];
|
|
if (leafIdxs.length) {
|
|
const nLevels = getLevelFromIdx(leafIdxs.at(-1));
|
|
const splitPosition = Math.pow(2, nLevels) - 1;
|
|
const splitIdx = leafIdxs.findIndex((x) => x >= splitPosition);
|
|
if (splitIdx > 0) {
|
|
leafIdxs.unshift(...leafIdxs.splice(splitIdx));
|
|
knownLeaves.unshift(...knownLeaves.splice(splitIdx));
|
|
}
|
|
}
|
|
let targetIdx = 0;
|
|
const traverse = (nodeIdx) => {
|
|
if (targetIdx === leafIdxs.length) {
|
|
proofIdxs.push(nodeIdx);
|
|
return;
|
|
}
|
|
const target = leafIdxs[targetIdx];
|
|
if (target === nodeIdx) {
|
|
++targetIdx;
|
|
return;
|
|
}
|
|
const currentLevel = getLevelFromIdx(nodeIdx);
|
|
const targetLevel = getLevelFromIdx(target);
|
|
if (nodeIdx !== getAncestorIdx(target, targetLevel - currentLevel)) {
|
|
proofIdxs.push(nodeIdx);
|
|
return;
|
|
}
|
|
const leftSon = 2 * nodeIdx + 1;
|
|
traverse(leftSon);
|
|
traverse(leftSon + 1);
|
|
};
|
|
traverse(0);
|
|
return {
|
|
leaves: knownLeaves,
|
|
leafIdxs,
|
|
proofIdxs
|
|
};
|
|
}
|
|
|
|
const assertExpected = (name, expected, received) => {
|
|
if (received != null && received !== expected)
|
|
throw new Error(
|
|
`${name} not expected. Received ${received} expected ${expected}`
|
|
);
|
|
};
|
|
const merkleizeMetadata = (metadataBytes, {
|
|
decimals,
|
|
tokenSymbol,
|
|
...hinted
|
|
}) => {
|
|
const metadata = getMetadata(metadataBytes);
|
|
const checkedVersion = metadata.extrinsic.version.includes(4) ? 4 : null;
|
|
if (checkedVersion == null) throw new Error("Only extrinsic v4 is supported");
|
|
const { ss58Prefix, buildDefinition } = metadataBuilders.getDynamicBuilder(
|
|
metadataBuilders.getLookupFn(metadata)
|
|
);
|
|
if (ss58Prefix == null) throw new Error("SS58 prefix not found in metadata");
|
|
assertExpected("SS58 prefix", ss58Prefix, hinted.base58Prefix);
|
|
const version = metadata.pallets.find((x) => x.name === "System")?.constants.find((x) => x.name === "Version");
|
|
if (version == null) throw new Error("System.Version constant not found");
|
|
const { spec_name: specName, spec_version: specVersion } = buildDefinition(
|
|
version.type
|
|
).dec(version.value);
|
|
if (typeof specName !== "string" || typeof specVersion !== "number")
|
|
throw new Error("Spec name or spec version not found");
|
|
assertExpected("Spec name", specName, hinted.specName);
|
|
assertExpected("Spec version", specVersion, hinted.specVersion);
|
|
const info = {
|
|
decimals,
|
|
tokenSymbol,
|
|
specVersion,
|
|
specName,
|
|
base58Prefix: ss58Prefix
|
|
};
|
|
const definitions = new Map(
|
|
metadata.lookup.map((value) => [value.id, value])
|
|
);
|
|
const accessibleTypes = getAccessibleTypes(metadata, definitions);
|
|
const getPrimitive = (frameId) => {
|
|
const {
|
|
def: { tag, value }
|
|
} = definitions.get(frameId);
|
|
if (tag === "primitive") return value.tag;
|
|
if (tag !== "composite" && tag !== "tuple" || value.length > 1)
|
|
throw new Error("The provided definition doesn't map to a primitive");
|
|
return value.length === 0 ? null : getPrimitive(tag === "tuple" ? value[0] : value[0].type);
|
|
};
|
|
const getTypeRef = (frameId) => {
|
|
const { def } = definitions.get(frameId);
|
|
if (def.tag === "primitive") return { tag: def.value.tag, value: void 0 };
|
|
if (def.tag === "compact") {
|
|
const primitive = getPrimitive(def.value);
|
|
const tag = compactTypeRefs[primitive];
|
|
if (!tag) throw new Error("Invalid primitive for Compact");
|
|
return { tag, value: void 0 };
|
|
}
|
|
return accessibleTypes.has(frameId) ? { tag: "perId", value: accessibleTypes.get(frameId) } : { tag: "void", value: void 0 };
|
|
};
|
|
const extrinsic = {
|
|
version: checkedVersion,
|
|
addressTy: getTypeRef(metadata.extrinsic.address),
|
|
callTy: getTypeRef(metadata.extrinsic.call),
|
|
signatureTy: getTypeRef(metadata.extrinsic.signature),
|
|
signedExtensions: metadata.extrinsic.signedExtensions.map((se) => ({
|
|
identifier: se.identifier,
|
|
includedInExtrinsic: getTypeRef(se.type),
|
|
includedInSignedData: getTypeRef(se.additionalSigned)
|
|
}))
|
|
};
|
|
const lookup = getLookup(
|
|
definitions,
|
|
accessibleTypes,
|
|
getTypeRef,
|
|
getPrimitive
|
|
);
|
|
const lookupEncoded = lookup.map(lookupType.enc);
|
|
let hashTree;
|
|
const getHashTree = () => {
|
|
if (hashTree) return hashTree;
|
|
if (!lookupEncoded.length) return hashTree = [new Uint8Array(32).fill(0)];
|
|
hashTree = new Array(lookupEncoded.length * 2 - 1);
|
|
let leavesStartIdx = lookupEncoded.length - 1;
|
|
for (let i = 0; i < lookupEncoded.length; i++)
|
|
hashTree[leavesStartIdx + i] = substrateBindings.Blake3256(lookupEncoded[i]);
|
|
for (let i = hashTree.length - 2; i > 0; i -= 2)
|
|
hashTree[(i - 1) / 2] = substrateBindings.Blake3256(
|
|
utils.mergeUint8([hashTree[i], hashTree[i + 1]])
|
|
);
|
|
return hashTree;
|
|
};
|
|
let digested;
|
|
const digest = () => {
|
|
if (digested) return digested;
|
|
const rootLookupHash = getHashTree()[0];
|
|
const digest2 = {
|
|
tag: "V1",
|
|
value: {
|
|
typeInformationTreeRoot: rootLookupHash,
|
|
extrinsicMetadataHash: substrateBindings.Blake3256(extrinsicMetadata.enc(extrinsic)),
|
|
...info
|
|
}
|
|
};
|
|
return digested = substrateBindings.Blake3256(metadataDigest.enc(digest2));
|
|
};
|
|
const generateProof = (knownIndexes) => {
|
|
const proofData = getProofData(lookupEncoded, knownIndexes);
|
|
const hashTree2 = getHashTree();
|
|
const proofs = proofData.proofIdxs.map((idx) => hashTree2[idx]);
|
|
return utils.mergeUint8([
|
|
substrateBindings.compact.enc(proofData.leaves.length),
|
|
...proofData.leaves,
|
|
substrateBindings.compact.enc(proofData.leafIdxs.length),
|
|
...proofData.leafIdxs.map((x) => substrateBindings.u32.enc(x)),
|
|
substrateBindings.compact.enc(proofs.length),
|
|
...proofs,
|
|
extrinsicMetadata.enc(extrinsic),
|
|
extraInfo.enc(info)
|
|
]);
|
|
};
|
|
const getProofForExtrinsicPayload = (extrinsicPayload) => {
|
|
const typeRefs = [
|
|
extrinsic.callTy,
|
|
...extrinsic.signedExtensions.map((x) => x.includedInExtrinsic),
|
|
...extrinsic.signedExtensions.map((x) => x.includedInSignedData)
|
|
];
|
|
return generateProof(
|
|
decodeAndCollectKnownLeafs(extrinsicPayload, typeRefs, lookup)
|
|
);
|
|
};
|
|
const getProofForExtrinsicParts = (callData, includedInExtrinsic, includedInSignedData) => {
|
|
const bytes = utils.mergeUint8(
|
|
[callData, includedInExtrinsic, includedInSignedData].map(toBytes)
|
|
);
|
|
return getProofForExtrinsicPayload(bytes);
|
|
};
|
|
const getProofForExtrinsic = (transaction, txAdditionalSigned) => {
|
|
let [, { version: version2, type }, bytes] = extrinsicDec(transaction);
|
|
if (version2 !== extrinsic.version)
|
|
throw new Error("Incorrect extrinsic version");
|
|
const typeRefs = type === "signed" ? [
|
|
extrinsic.addressTy,
|
|
extrinsic.signatureTy,
|
|
...extrinsic.signedExtensions.map((x) => x.includedInExtrinsic),
|
|
extrinsic.callTy
|
|
] : [extrinsic.callTy];
|
|
if (txAdditionalSigned) {
|
|
bytes = utils.mergeUint8([bytes, toBytes(txAdditionalSigned)]);
|
|
typeRefs.push(
|
|
...extrinsic.signedExtensions.map((x) => x.includedInSignedData)
|
|
);
|
|
}
|
|
return generateProof(decodeAndCollectKnownLeafs(bytes, typeRefs, lookup));
|
|
};
|
|
return {
|
|
digest,
|
|
getProofForExtrinsic,
|
|
getProofForExtrinsicParts,
|
|
getProofForExtrinsicPayload
|
|
};
|
|
};
|
|
|
|
exports.extraInfo = extraInfo;
|
|
exports.extrinsicMetadata = extrinsicMetadata;
|
|
exports.hash = hash;
|
|
exports.lookup = lookup;
|
|
exports.lookupType = lookupType;
|
|
exports.merkleizeMetadata = merkleizeMetadata;
|
|
exports.proof = proof;
|
|
//# sourceMappingURL=index.js.map
|