Initial commit: Pezkuwi SubQuery indexer

- pezkuwi.yaml: Relay chain staking indexer (rewards, slashes, pools, transfers, era info)
- pezkuwi-assethub.yaml: Asset Hub indexer (NominationPools, asset transfers)
- GraphQL schema for staking data entities
- Handler mappings from Nova SubQuery base
This commit is contained in:
2026-02-12 23:55:13 +03:00
commit 0812cf9e7a
38 changed files with 16433 additions and 0 deletions
+31
View File
@@ -0,0 +1,31 @@
# Dependencies
node_modules/
# Build output
dist/
src/types/
# IDE
.idea/
.vscode/
*.swp
*.swo
# OS
.DS_Store
Thumbs.db
# Logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Environment
.env
.env.local
.env.*.local
# SubQuery
.data/
project.yaml
+201
View File
@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
+10
View File
@@ -0,0 +1,10 @@
SubQuery Pte Ltd
Copyright 2022-2023 SubQuery PTE. LTD.
This product includes software developed at SubQuery PTE. LTD.
SubQuery Nova
Copyright 2022-2023 Novasama Technologies PTE. LTD.
This product includes software developed at Novasama Technologies PTE. LTD.
License Rights transferred from Novasama Technologies PTE. LTD to Novasama Technologies GmbH starting from 1st of April 2023
+54
View File
@@ -0,0 +1,54 @@
# Pezkuwi SubQuery
SubQuery indexer for Pezkuwi blockchain - provides staking rewards, NominationPools, transfers and era validator data for PezWallet.
## Projects
- **pezkuwi.yaml** - Pezkuwi Relay Chain staking indexer
- **pezkuwi-assethub.yaml** - Pezkuwi Asset Hub NominationPools and transfers indexer
## Features
- Staking rewards (Reward/Rewarded events)
- Nomination Pool rewards (PaidOut events)
- Slashing events (Slash/Slashed, PoolSlashed, UnbondingPoolSlashed)
- Native transfers (balances.Transfer)
- Asset transfers (assets.Transferred) - Asset Hub only
- Era validator information (StakersElected/StakingElection)
- Full transaction history (signed extrinsics)
## Get Started
### Install dependencies
```shell
yarn install
```
### Build
```shell
yarn build
```
### Local Development
```shell
sh local-runner.sh pezkuwi.yaml
```
### Deploy to SubQuery Network
```shell
./node_modules/.bin/subql publish -f pezkuwi.yaml
./node_modules/.bin/subql publish -f pezkuwi-assethub.yaml
```
## Endpoints
- **Pezkuwi Relay**: wss://rpc.pezkuwichain.io
- **Pezkuwi Asset Hub**: wss://asset-hub-rpc.pezkuwichain.io
## License
Apache 2.0 - Based on Nova SubQuery implementation
+55
View File
@@ -0,0 +1,55 @@
version: "3"
services:
postgres:
container_name: "postgres-${PROJECT_PATH}"
image: postgres:16-alpine
ports:
- 5432:5432
volumes:
- .data/postgres:/var/lib/postgresql/data
- ./docker/init:/docker-entrypoint-initdb.d/
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 5s
timeout: 5s
retries: 5
environment:
POSTGRES_PASSWORD: postgres
subquery-node:
container_name: "node-${PROJECT_PATH}"
image: onfinality/subql-node:v5.6.0
depends_on:
postgres:
condition: service_healthy
restart: always
environment:
DB_USER: postgres
DB_PASS: postgres
DB_DATABASE: postgres
DB_HOST: postgres
DB_POST: 5432
volumes:
- ./:/app
command:
- -f=/app/${PROJECT_PATH}
- --disable-historical=true
- --batch-size=1
graphql-engine:
container_name: "query-${PROJECT_PATH}"
image: onfinality/subql-query:v1.5.0
ports:
- 3000:3000
depends_on:
- subquery-node
restart: always
environment:
DB_USER: postgres
DB_PASS: postgres
DB_DATABASE: postgres
DB_HOST: postgres
DB_PORT: 5432
command:
- --name=app
- --playground
+1
View File
@@ -0,0 +1 @@
CREATE EXTENSION IF NOT EXISTS btree_gist;
+12
View File
@@ -0,0 +1,12 @@
ARG NODE_JS_IMAGE_VERSION=20-alpine
ARG SUBQL_NODE_IMAGE_VERSION=v5.6.0
FROM node:${NODE_JS_IMAGE_VERSION} AS build
ADD . /project
WORKDIR /project
RUN yarn install && yarn codegen && yarn build
FROM onfinality/subql-node:${SUBQL_NODE_IMAGE_VERSION}
COPY --from=build /project /project
+4
View File
@@ -0,0 +1,4 @@
export default {
preset: "ts-jest",
testMatch: ["**/tests/**/*.test.ts"],
};
+24
View File
@@ -0,0 +1,24 @@
#!/bin/bash
SCRIPT_PATH=$(dirname "$0")
cd ${SCRIPT_PATH}
if [ -z $1 ]; then
echo "Provide a path to project-{name}.yaml file"
exit 1
fi
export PROJECT_PATH=$1
docker rm -f $(docker-compose ps -a -q)
sudo rm -rf .data/
sudo rm -rf dist/
# If any command bellow will fail - script will stop
set -e
yarn
yarn codegen
yarn build
yarn start:docker
+42
View File
@@ -0,0 +1,42 @@
{
"name": "subquery-pezkuwi",
"version": "1.0.0",
"description": "Pezkuwi SubQuery - Staking rewards, NominationPools, transfers indexer for PezWallet",
"main": "dist/index.js",
"scripts": {
"build": "./node_modules/.bin/subql build",
"start:docker": "docker-compose pull && docker-compose up --remove-orphans",
"codegen": "./node_modules/.bin/subql codegen",
"validate": "./node_modules/.bin/subql validate",
"local-publish": "cd ./scripts && ./local-publish.sh"
},
"files": [
"dist",
"schema.graphql",
"pezkuwi.yaml",
"pezkuwi-assethub.yaml"
],
"author": "Pezkuwi Team",
"license": "Apache-2.0",
"dependencies": {
"lodash": "^4.17.21"
},
"devDependencies": {
"@polkadot/api": "^16",
"@polkadot/api-derive": "^16",
"@polkadot/types": "^16",
"@polkadot/types-augment": "^16",
"@polkadot/types-codec": "^16",
"@subql/cli": "latest",
"@subql/types": "latest",
"@types/jest": "^29.5.1",
"jest": "^29.5.0",
"prettier": "3.1.0",
"ts-jest": "^29.1.0",
"ts-node": "^10.9.1",
"typescript": "^5.2.2"
},
"resolutions": {
"ipfs-unixfs": "6.0.6"
}
}
+60
View File
@@ -0,0 +1,60 @@
specVersion: 1.0.0
name: subquery-pezkuwi-assethub
version: 1.0.0
runner:
node:
name: "@subql/node"
version: ">=4.6.6"
query:
name: "@subql/query"
version: "*"
description: Pezkuwi Asset Hub SubQuery - Indexes NominationPools, transfers, swaps
repository: https://github.com/pezkuwichain/pezkuwi-subquery
schema:
file: ./schema.graphql
network:
chainId: "0x00d0e1d0581c3cd5c5768652d52f4520184018b44f56a2ae1e0dc9d65c00c948"
endpoint:
- wss://asset-hub-rpc.pezkuwichain.io
dataSources:
- name: main
kind: substrate/Runtime
startBlock: 1
mapping:
file: ./dist/index.js
handlers:
# Signed extrinsics for history
- handler: handleHistoryElement
kind: substrate/CallHandler
filter:
isSigned: true
# Nomination Pools rewards
- handler: handlePoolReward
kind: substrate/EventHandler
filter:
module: nominationPools
method: PaidOut
# Pool bonded slash
- handler: handlePoolBondedSlash
kind: substrate/EventHandler
filter:
module: nominationPools
method: PoolSlashed
# Pool unbonding slash
- handler: handlePoolUnbondingSlash
kind: substrate/EventHandler
filter:
module: nominationPools
method: UnbondingPoolSlashed
# Native transfers
- handler: handleTransfer
kind: substrate/EventHandler
filter:
module: balances
method: Transfer
# Asset transfers
- handler: handleAssetTransfer
kind: substrate/EventHandler
filter:
module: assets
method: Transferred
+91
View File
@@ -0,0 +1,91 @@
specVersion: 1.0.0
name: subquery-pezkuwi-staking
version: 1.0.0
runner:
node:
name: "@subql/node"
version: ">=4.6.6"
query:
name: "@subql/query"
version: "*"
description: Pezkuwi Staking SubQuery - Indexes staking rewards, slashes, era data for PezWallet
repository: https://github.com/pezkuwichain/pezkuwi-subquery
schema:
file: ./schema.graphql
network:
chainId: "0xbb4a61ab0c4b8c12f5eab71d0c86c482e03a275ecdafee678dea712474d33d75"
endpoint:
- wss://rpc.pezkuwichain.io
- wss://mainnet.pezkuwichain.io
dataSources:
- name: main
kind: substrate/Runtime
startBlock: 1
mapping:
file: ./dist/index.js
handlers:
# Signed extrinsics for history
- handler: handleHistoryElement
kind: substrate/CallHandler
filter:
isSigned: true
# Staking rewards (old format)
- handler: handleReward
kind: substrate/EventHandler
filter:
module: staking
method: Reward
# Staking rewards (new format - Polkadot 2.0)
- handler: handleRewarded
kind: substrate/EventHandler
filter:
module: staking
method: Rewarded
# Nomination Pools rewards
- handler: handlePoolReward
kind: substrate/EventHandler
filter:
module: nominationPools
method: PaidOut
# Slashing (old format)
- handler: handleSlash
kind: substrate/EventHandler
filter:
module: staking
method: Slash
# Slashing (new format)
- handler: handleSlashed
kind: substrate/EventHandler
filter:
module: staking
method: Slashed
# Pool bonded slash
- handler: handlePoolBondedSlash
kind: substrate/EventHandler
filter:
module: nominationPools
method: PoolSlashed
# Pool unbonding slash
- handler: handlePoolUnbondingSlash
kind: substrate/EventHandler
filter:
module: nominationPools
method: UnbondingPoolSlashed
# Transfers
- handler: handleTransfer
kind: substrate/EventHandler
filter:
module: balances
method: Transfer
# Era changes (old format)
- handler: handleNewEra
kind: substrate/EventHandler
filter:
module: staking
method: StakingElection
# Era changes (new format - Polkadot 2.0)
- handler: handleStakersElected
kind: substrate/EventHandler
filter:
module: staking
method: StakersElected
+125
View File
@@ -0,0 +1,125 @@
type Transfer @jsonField {
amount: String!
to: String!
from: String!
fee: String!
eventIdx: Int!
success: Boolean!
}
type AssetTransfer @jsonField {
assetId: String!
amount: String!
to: String!
from: String!
fee: String!
eventIdx: Int!
success: Boolean!
}
type Reward @jsonField {
eventIdx: Int!
amount: String!
isReward: Boolean!
era: Int
stash: String
validator: String
}
type PoolReward @jsonField {
eventIdx: Int!
amount: String!
isReward: Boolean!
poolId: Int!
}
type Swap @jsonField {
assetIdIn: String!
amountIn: String!
assetIdOut: String!
amountOut: String!
sender: String!
receiver: String!
assetIdFee: String!
fee: String!
eventIdx: Int!
success: Boolean!
}
enum RewardType {
reward
slash
}
type AccountReward @entity {
id: ID!
address: String! @index
blockNumber: Int! @index
timestamp: BigInt!
amount: BigInt!
accumulatedAmount: BigInt!
type: RewardType!
}
type AccountPoolReward @entity {
id: ID!
address: String! @index
blockNumber: Int! @index
timestamp: BigInt!
amount: BigInt!
accumulatedAmount: BigInt!
type: RewardType!
poolId: Int!
}
type AccumulatedReward @entity {
id: ID! #address
amount: BigInt!
}
type AccumulatedPoolReward @entity {
id: ID! #address
amount: BigInt!
}
type Extrinsic @jsonField {
hash: String!
module: String!
call: String!
fee: String!
success: Boolean!
}
type HistoryElement @entity {
id: ID!
blockNumber: Int!
extrinsicIdx: Int
extrinsicHash: String
timestamp: BigInt! @index
address: String! @index
reward: Reward
poolReward: PoolReward
extrinsic: Extrinsic
transfer: Transfer
assetTransfer: AssetTransfer
swap: Swap
}
type EraValidatorInfo @entity {
id: ID!
address: String! @index
era: Int! @index
total: BigInt!
own: BigInt!
others: [IndividualExposure]!
}
type IndividualExposure @jsonField {
who: String!
value: String!
}
type ErrorEvent @entity {
id: ID!
description: String!
}
+35
View File
@@ -0,0 +1,35 @@
#!/bin/bash
SCRIPT_PATH=$(dirname "$0")
MAIN_DIRECTORY=${SCRIPT_PATH%/*}
folders=($(ls ${MAIN_DIRECTORY}/networks))
for item in ${folders[*]}
do
printf " %s\n" $item
if [ -d "${MAIN_DIRECTORY}/networks/$item/src" ]; then
rm -r ${MAIN_DIRECTORY}/networks/$item/src
rm ${MAIN_DIRECTORY}/networks/$item/tsconfig.json
rm ${MAIN_DIRECTORY}/networks/$item/schema.graphql
rm ${MAIN_DIRECTORY}/networks/$item/local-runner.sh
rm ${MAIN_DIRECTORY}/networks/$item/docker-compose.yml
fi
if [ -d "${MAIN_DIRECTORY}/networks/$item/node_modules" ]; then
rm -r ${MAIN_DIRECTORY}/networks/$item/node_modules
fi
if [ -d "${MAIN_DIRECTORY}/networks/$item/dist" ]; then
rm -r ${MAIN_DIRECTORY}/networks/$item/dist
fi
if [ -d "${MAIN_DIRECTORY}/networks/$item/.data" ]; then
rm -r ${MAIN_DIRECTORY}/networks/$item/.data
fi
if [ -f "${MAIN_DIRECTORY}/networks/$item/yarn.lock" ]; then
rm ${MAIN_DIRECTORY}/networks/$item/yarn.lock
fi
done
printf "Done !"
+18
View File
@@ -0,0 +1,18 @@
#!/bin/bash
# Get a list of YAML files in alphabetical order
yamlFiles=($(ls ../*.yaml | sort))
for file in "${yamlFiles[@]}"; do
outputFileName=".$(basename "$file" .yaml)-cid"
# Execute subql publish command
subql codegen -f "$file" && subql publish -f "$file"
# Move or create the output file in the ipfs-cids folder
mv "../$outputFileName" "../ipfs-cids/$outputFileName"
echo "Command executed for $file. Output file: $outputFileName"
done
echo "All project published successfully."
+17
View File
@@ -0,0 +1,17 @@
#!/bin/bash
SCRIPT_PATH=$(dirname "$0")
MAIN_DIRECTORY=${SCRIPT_PATH%/*}
folders=($(ls ${MAIN_DIRECTORY}/networks))
for item in ${folders[*]}
do
printf " %s\n" $item
scp -r ${MAIN_DIRECTORY}/src ${MAIN_DIRECTORY}/networks/$item
scp ${MAIN_DIRECTORY}/tsconfig.json ${MAIN_DIRECTORY}/networks/$item
scp ${MAIN_DIRECTORY}/schema.graphql ${MAIN_DIRECTORY}/networks/$item
scp ${MAIN_DIRECTORY}/local-runner.sh ${MAIN_DIRECTORY}/networks/$item
scp ${MAIN_DIRECTORY}/docker-compose.yml ${MAIN_DIRECTORY}/networks/$item
done
printf "Done !"
@@ -0,0 +1,58 @@
#!/usr/bin/env python3
import os
from jinja2 import Template
from table_representation import ProjectTableGenerator
from subquery_api import SubQueryDeploymentAPI
from telegram_notifications import TelegramNotifications
token = os.getenv("SUBQUERY_TOKEN")
organisation = "nova-wallet"
nova_network_list = "https://raw.githubusercontent.com/nova-wallet/nova-utils/master/chains/v11/chains_dev.json"
readme = Template("""
Projects' status is updated every 4 hours
SubQuery API data sources are grouped based on the following features:
📚 Operation History - Transfers and Extrinsics for Utility (main) token of the network <br />
✨ Multi-asset transfers - Support for transfer history for tokens from ORML and Assets pallets <br />
🥞 Staking rewards - Rewards history and accumulated total rewards, supports both Staking and ParachainStaking pallets <br />
📈 Staking analytics - Queries for current stake, validators statistics, and stake change history
# List of deployed projects
{{dapps_table}}
""")
def generate_project_table():
sub_query = SubQueryDeploymentAPI(auth_token=token, org=organisation)
sub_query.collect_all_project_data()
table_generator = ProjectTableGenerator(sub_query, nova_network_list)
table = table_generator.generate_table()
return table
if __name__ == '__main__':
dir_name = 'gh-pages-temp'
telegram = TelegramNotifications()
try:
os.makedirs(dir_name)
print("Directory ", dir_name, " Created ")
except FileExistsError:
print("Directory ", dir_name, " already exists")
with open("./gh-pages-temp/README.md", "w") as f:
f.write(readme.render(
dapps_table=generate_project_table()
))
# TODO: Temp remove, waiting for https://app.clickup.com/t/862kc4b47
# telegram.send_notification()
+34
View File
@@ -0,0 +1,34 @@
anyio==3.6.2
attrs==22.2.0
certifi==2022.12.7
chardet==5.1.0
charset-normalizer==3.0.1
DataProperty==0.55.0
exceptiongroup==1.1.0
h11==0.14.0
httpcore==0.16.3
httpx==0.23.3
idna==3.4
iniconfig==2.0.0
Jinja2==3.1.2
MarkupSafe==2.1.2
mbstrdecoder==1.1.1
packaging==23.0
pathvalidate==2.5.2
pluggy==1.0.0
pytablewriter==0.64.2
pytest==7.2.1
python-dateutil==2.8.2
python-telegram-bot==20.0
pytz==2022.7.1
PyYAML==6.0
requests==2.28.2
rfc3986==1.5.0
six==1.16.0
sniffio==1.3.0
tabledata==1.3.0
tcolorpy==0.1.2
tomli==2.0.1
typepy==1.3.0
urllib3==1.26.14
wget==3.2
+6
View File
@@ -0,0 +1,6 @@
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
+158
View File
@@ -0,0 +1,158 @@
from typing import List
from datetime import datetime, timedelta
import re
import requests
class DeploymentInstance():
def __init__(self, **kwargs) -> None:
self.id = kwargs['id']
self.project_key = kwargs['projectKey']
self.version = kwargs['version']
self.status = kwargs['status']
self.type = kwargs['type']
self.configuration = kwargs['configuration']
class SubQueryProject():
def __init__(self, **kwargs) -> None:
self.id = kwargs['id']
self.key = kwargs['key']
self.name = kwargs['name']
self.network = kwargs['network']
self.metadata = kwargs['metadata']
self.query_url = kwargs['queryUrl']
self.deployments: List[DeploymentInstance] = []
deployments = kwargs.get('deployments')
if deployments:
for deployment in deployments:
self.deployments.append(DeploymentInstance(**deployment))
class SubQueryDeploymentAPI():
base_url = "https://api.subquery.network"
def __init__(self, auth_token, org) -> None:
self.org = org
self.headers = {
'authority': 'api.subquery.network',
'accept': 'application/json, text/plain, */*',
'accept-language': 'en-GB,en-US;q=0.9,en;q=0.8,ru;q=0.7',
'origin': 'https://managedservice.subquery.network',
'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-site',
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
'Authorization': f'Bearer {auth_token}',
}
def _send_request(self, method, path, payload=None):
try:
response = requests.request(
method, self.base_url + path, headers=self.headers, data=payload)
if response.status_code == 401:
raise Exception(f"Unautorised:\n{response}")
return response
except Exception as e:
raise Exception(
f"Can't request to: {path} by method: {method} and payload: {payload} \nException: {e}")
def collect_all_project_data(self) -> List[SubQueryProject]:
self.get_all_projects_for_organisation()
print(
f"Organisation: {self.org}\nHas {len(self.org_projects)} projects")
print(f"Process of getting deployments have been started.")
for project in self.org_projects:
self.get_deployments_for_project(project)
print(
f"Project: {project.network} received: {len(project.deployments)} deployments.")
for deployment in project.deployments:
self.get_sync_status_for_deployment(deployment)
print(
f"Deployment for {project.network} status: {deployment.sync_status}, env: {deployment.type}")
return self.org_projects
def get_all_projects_for_organisation(self) -> List[SubQueryProject]:
projects = self._send_request(
method="GET", path=f"/user/projects?account={self.org}").json()
self.org_projects = [SubQueryProject(**project) for project in projects]
return self.org_projects
def get_sync_status_for_deployment(self, deployment: DeploymentInstance) -> DeploymentInstance:
if len(self.org_projects) == 0:
print("org_projects is empty, use get_all_projects_for_organisation first")
sync_status = self._send_request(
method="GET",
path=f"/v3/subqueries/{deployment.project_key}/deployments/{deployment.id}/sync-status"
).json()
if len(sync_status['networks']) == 0:
deployment.__setattr__('sync_status', None)
return deployment
deployment.__setattr__('sync_status', sync_status['networks'][0])
return deployment
def get_deployments_for_project(self, project: SubQueryProject) -> List[DeploymentInstance]:
if len(self.org_projects) == 0:
print("org_projects is empty, use get_all_projects_for_organisation first")
deployments = self._send_request(
method="GET",
path=f"/subqueries/{project.key}/deployments"
).json()
project.deployments = [DeploymentInstance(**deployment) for deployment in deployments]
return project.deployments
def find_project_by_parameter(self, parameter_name, parameter_value):
found_project = [project for project in self.org_projects if project.__getattribute__(
parameter_name) == parameter_value]
if found_project:
print("Project found")
for obj in found_project:
return obj
else:
print("Project not found.")
def get_logs(self, project_name: str, sid: str, level: str = 'info', stage: bool = False):
params = {
'level': level,
'stage': str(stage).lower(),
'sid': sid
}
response = self._send_request(method="GET", path=f'/v3/subqueries/{project_name}/logs')
if response.status_code == 200:
return response.json()
else:
response.raise_for_status()
def parse_logs(self, logs):
for log in logs['result']:
message = log.get('message')
timestamp = log.get('timestamp')
if message and 'Target height' in message and 'Current height' in message:
log_time = datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ')
if datetime.utcnow() - log_time <= timedelta(hours=1):
target_height = re.search('Target height: ([\d,]+)', message).group(1)
current_height = re.search('Current height: ([\d,]+)', message).group(1)
# Remove commas from the numbers
target_height = int(target_height.replace(',', ''))
current_height = int(current_height.replace(',', ''))
return target_height, current_height
return None, None
+53
View File
@@ -0,0 +1,53 @@
import subprocess
import wget
import os
import zipfile
import os
import platform
def get_subquery_cli(subquery_cli_version):
download_url = "https://github.com/fewensa/subquery-cli/releases/download/v" + subquery_cli_version
temporary_path = "./temporary"
current_platform = platform.system()
if current_platform == "Linux":
download_url += "/subquery-linux-x86_64.zip"
elif current_platform == "Darwin":
download_url += "/subquery-macos-x86_64.zip"
elif current_platform == "Windows":
download_url += "/subquery-windows-x86_64.zip"
else:
raise ValueError('Can\'t to recognize the operating system')
try:
os.makedirs(temporary_path, exist_ok=False)
wget.download(download_url, out = temporary_path)
for file in os.listdir(temporary_path):
with zipfile.ZipFile(temporary_path+'/'+file) as item:
item.extractall(temporary_path)
except:
pass
subprocess.call(['chmod', '-R', '777', temporary_path])
return temporary_path
def use_subquery_cli(subquery_cli_version, *args):
temporary_path = get_subquery_cli(subquery_cli_version)
data_from_subquery = subprocess.check_output([temporary_path+'/subquery', *args]).decode()
return data_from_subquery
if __name__ == "__main__":
# token = os.environ['SUBQUERY_TOKEN', '']
token=''
# project_key = os.environ['PROJECT_KEY', '']
project_key = ''
subquery_cli_version = '0.2.4'
use_subquery_cli(subquery_cli_version, '--token', token, 'deployment', 'list', '-o', 'json', '--org', 'nova-wallet', '--key', project_key)
@@ -0,0 +1,144 @@
import json
import requests
from pytablewriter import MarkdownTableWriter
from telegram_notifications import TelegramNotifications
from subquery_api import SubQueryDeploymentAPI, SubQueryProject, DeploymentInstance
class ProjectTableGenerator:
def __init__(self, sub_query: SubQueryDeploymentAPI, nova_network_list_url: str):
self.sub_query = sub_query
self.nova_network_list_url = nova_network_list_url
def generate_table(self):
writer = MarkdownTableWriter(
headers=["--", "Network", "Features", "Stage status", "Prod status", "Stage commit", "Prod commit"],
value_matrix=self.generate_value_matrix_for_table(),
margin=1
)
writer.write_table()
return writer
def generate_value_matrix_for_table(self):
network_list = self.generate_network_list()
returning_array = []
for network in network_list:
network_data_array = self.generate_network_data_array(network)
returning_array.append(network_data_array)
print(f'{network.get("name").title()} generated!')
returning_array.sort()
increment = iter(range(1, len(returning_array)+1))
[network.insert(0, next(increment)) for network in returning_array]
return returning_array
def generate_network_data_array(self, network: dict):
network_data_array = []
subquery_project_data = self.sub_query.find_project_by_parameter('name', network.get('name'))
network_data_array.append(
f"[{network.get('name').title()}](https://explorer.subquery.network/subquery/{subquery_project_data.key})"
)
prod_status, prod_commit, stage_status, stage_comit = self.generate_progress_status(
next(filter(lambda project: project.name == network['name'], self.sub_query.org_projects))
)
network_data_array.extend([network.get('features'), stage_status, prod_status, stage_comit, prod_commit])
return network_data_array
def generate_network_list(self):
feature_list = []
chains_list = self._send_http_request(self.nova_network_list_url)
available_projects = self.sub_query.org_projects
for project in available_projects:
prod_genesis = self.get_prod_genesis(project)
if not prod_genesis: # Skip undeployed projects
continue
project_genesis = self._remove_hex_prefix(prod_genesis)
chain = next(iter([chain for chain in chains_list if chain.get('chainId') == project_genesis]), None)
feature_list.append({
"name": project.name,
"genesis": project_genesis,
"features": self.check_features(chain)
})
return feature_list
def get_prod_genesis(self, project):
try:
return [deploy.configuration['chainId'] for deploy in project.deployments if deploy.type == 'primary']
except:
print(f"Network: {project.network} has old deployment, need to redeploy")
return None
def generate_progress_status(self, project: SubQueryProject):
prod, stage = None, None
for deployment in project.deployments:
if deployment.type == 'primary':
prod = deployment
elif deployment.type == 'stage':
stage = deployment
else:
raise Exception(f"Unknown deployment type: {deployment.type} in project: {project}")
prod_status, prod_commit = self.fill_status_bar(prod, project)
stage_status, stage_commit = self.fill_status_bar(stage, project)
return prod_status, prod_commit, stage_status, stage_commit
def fill_status_bar(self, instance: DeploymentInstance, project: SubQueryProject):
if not instance:
return '![0](https://progress-bar.dev/0?title=N/A)', '-'
commit = instance.version[0:8]
if instance.status == 'processing':
return '![0](https://progress-bar.dev/0?title=Processing...)', commit
if instance.status == 'error' and self.get_sync_percentage(instance, project) == '0':
return '![0](https://progress-bar.dev/0?title=Error)', commit
percent = self.get_sync_percentage(instance, project)
return f'![{percent}](https://progress-bar.dev/{percent}?title={instance.type.capitalize()})', commit
def is_sync_status_valid(self, sync_status):
if sync_status is None:
return False
return all(key in sync_status and sync_status[key] is not None for key in ['processingBlock', 'targetBlock'])
def get_sync_percentage(self, instance: DeploymentInstance, project: SubQueryProject) -> str:
if not self.is_sync_status_valid(instance.sync_status):
logs = self.sub_query.get_logs(project.key, instance.id)
target_block, processing_block = self.sub_query.parse_logs(logs)
else:
processing_block = instance.sync_status.get('processingBlock')
target_block = instance.sync_status.get('targetBlock')
telegram = TelegramNotifications()
if processing_block and target_block:
if processing_block != -1:
return str(int((processing_block / target_block) * 100))
else:
telegram.add_row_in_telegram_notification(project=project, instance=instance)
return '0'
else:
telegram.add_row_in_telegram_notification(project=project, instance=instance)
return '0'
def check_features(self, chain: json):
def has_transfer_history(chain):
return True
def has_orml_or_asset(chain):
return any(asset.get('type') in ['orml', 'statemine'] for asset in chain.get('assets'))
def has_staking_analytics(chain):
return chain.get('assets')[0].get('staking') == 'relaychain'
def has_rewards_history(chain):
return bool(chain.get('assets')[0].get('staking'))
dict = {
"📚 Operation History": has_transfer_history,
"✨ Multi assets": has_orml_or_asset,
"📈 Staking analytics": has_staking_analytics,
"🥞 Staking rewards": has_rewards_history
}
if chain is None:
return list(dict.keys())[0]
features = [feature for feature, criteria in dict.items() if criteria(chain)]
return '<br />'.join(features)
def _send_http_request(self, url: str):
try:
response = requests.get(url)
except requests.exceptions.RequestException as e:
raise SystemExit(e)
return json.loads(response.text)
def _remove_hex_prefix(self, hex_string):
return hex_string[2:]
@@ -0,0 +1,38 @@
import os
import asyncio
import telegram
from subquery_api import SubQueryProject, DeploymentInstance
from singleton import Singleton
class TelegramNotifications(metaclass=Singleton):
notify_message_title = "⚠️ SubQuery projects error ⚠️"
notify_projects_message = []
def __init__(self) -> None:
self.token = os.getenv("TELEGRAM_BOT_TOKEN")
self.chat_id = os.getenv("TELEGRAM_CHAT_ID")
async def send_telegram_message(self, message):
bot = telegram.Bot(token=self.token)
await bot.send_message(chat_id=self.chat_id, text=message, parse_mode="MarkdownV2")
def send_notification(self):
if len(self.notify_projects_message) != 0:
notification_message = self.notify_message_title
for project_message in self.notify_projects_message:
notification_message += project_message
shielded_message = notification_message.replace('-', '\-')
asyncio.run(self.send_telegram_message(shielded_message))
else:
pass
def add_row_in_telegram_notification(self, project: SubQueryProject, instance: DeploymentInstance):
notify_project_name = project.name.title()
self.notify_projects_message.append(
f"\n\n*{notify_project_name}* Indexer is unhealthy\!\nProject URL: [Link to project](https://managedservice.subquery.network/orgs/nova-wallet/projects/{instance.project_key.split('/')[1]}/deployments?slot={instance.type})\nExplorer URL: [Link to explorer](https://explorer.subquery.network/subquery/{instance.project_key})\nEnvironment: {instance.type.capitalize()}"
)
+31
View File
@@ -0,0 +1,31 @@
import json
import pytest
import os
from subquery_cli import use_subquery_cli
subquery_cli_version = '0.2.4'
token = os.environ['SUBQUERY_TOKEN']
project_key = os.environ['PROJECT_KEY']
@pytest.fixture
def get_project_data():
project_data = json.loads(
use_subquery_cli(
subquery_cli_version, '--token', token, 'deployment', 'list', '-o', 'json', '--org', 'nova-wallet', '--key', project_key
))
stage_project = next(
item for item in project_data if item["type"] == "stage")
return stage_project
def test_project_status(get_project_data):
assert get_project_data['status'] == 'running'
def test_sync_status_test(get_project_data):
sync_status = use_subquery_cli(
subquery_cli_version, '--token', token, 'deployment', 'sync-status', '--id', str(get_project_data['id']), '--key', project_key, '--org', 'nova-wallet')
status = sync_status.split("percent: ")[1:]
assertion_value = status[0].split('%')[0:][0]
assert assertion_value == '100.00'
+69
View File
@@ -0,0 +1,69 @@
#!/bin/bash
# Require bash v4+
#
# You should download cli file for your operation sistem and put it in root directory.
# https://github.com/fewensa/subquery-cli/releases/
SCRIPT_PATH=$(dirname "$0")
MAIN_DIRECTORY=${SCRIPT_PATH%/*}
SUBQUERY_TOKEN="${SUBQUERY_TOKEN}"
ORGANISATION="nova-wallet"
BASE_DESCRIPTION="Nova SubQuery project is indexing the blockchain and provides a convenient API for fetching operation history & analytics data. It is used by the <a href=\"https://novawallet.io\">Nova Wallet</a>
Feel free to use this API for your app! 💖</br>
<mark>Make sure that you add filters and sorting rules to your queries!</mark></br>
Following API & datasource is supported:
📚 Transfers and extrinsics (transactions). Both or either can be fetched, for example:
<code>query {historyElements{nodes{transfer extrinsic}}}</code>
</br>"
MULTIASSET_DESCRIPTION="✨ Transfer history for additional assets in the network (based on \"assets\"/\"ORML\" Substrate pallet):
<code>query {historyElements{nodes{assetTransfer}}}</code>
</br>"
STAKING_DESCRIPTION="🥞 Staking rewards history:
<code>query {historyElements{nodes{reward}}}</code>
🎁 Total staking rewards for the desired acocunt:
<code>query {accumulatedRewards{nodes{id amount}}}</code>
</br>"
STAKING_ANALITIC="📊 Current stake — returns bonded amount:
<code>query {accumulatedStakes{nodes{id amount}}}</code>
👨‍🔧 Validators statistics:
<code>query {eraValidatorInfos{nodes{address era total own others}}}</code>
📈 Stake change history:
<code>query {stakeChanges{nodes{blockNumber extrinsicHash address amount accumulatedAmount type}}}</code>
</br>"
MULTIASSET_PROJECTS=('statemine parallel parallel-heiko westmint moonbeam moonriver astar shiden karura acala bifrost interlay kintsugi')
HAS_STAKING=('polkadot kusama westend moonbeam moonriver')
HAS_STAKING_ANALYTIC=('polkadot kusama westend')
folders=($(ls ${MAIN_DIRECTORY}/networks))
for item in ${folders[*]}; do
DESCRIPTION=${BASE_DESCRIPTION}
if [[ " ${MULTIASSET_PROJECTS[*]} " =~ " ${item} " ]]; then
DESCRIPTION+=${MULTIASSET_DESCRIPTION}
fi
if [[ " ${HAS_STAKING[*]} " =~ " ${item} " ]]; then
DESCRIPTION+=${STAKING_DESCRIPTION}
fi
if [[ " ${HAS_STAKING_ANALYTIC[*]} " =~ " ${item} " ]]; then
DESCRIPTION+=${STAKING_ANALITIC}
fi
$MAIN_DIRECTORY/subquery --token ${SUBQUERY_TOKEN} project update --org ${ORGANISATION} --key $item --description "${DESCRIPTION}" --subtitle "Nova Wallet SubQuery project for ${item^} network"
done
echo "Done !"
+8
View File
@@ -0,0 +1,8 @@
//Exports all handler functions
export * from "./mappings/HistoryElements";
export * from "./mappings/Rewards";
export * from "./mappings/PoolRewards";
export * from "./mappings/Transfers";
export * from "./mappings/NewEra";
export * from "./mappings/swaps";
import "@polkadot/api-augment";
+240
View File
@@ -0,0 +1,240 @@
import "@polkadot/types-augment/lookup";
import { SubstrateEvent } from "@subql/types";
import { blockNumber } from "./common";
import { AccountId } from "@polkadot/types/interfaces";
import {
PalletStakingRewardDestination,
PalletNominationPoolsPoolMember,
} from "@polkadot/types/lookup";
import { Option } from "@polkadot/types";
// Due to memory consumption optimization `rewardDestinationByAddress` contains only one key
let rewardDestinationByAddress: {
[blockId: string]: { [address: string]: PalletStakingRewardDestination };
} = {};
let controllersByStash: { [blockId: string]: { [address: string]: string } } =
{};
let parachainStakingRewardEra: { [blockId: string]: number } = {};
let poolMembers: {
[blockId: number]: [string, PalletNominationPoolsPoolMember][];
} = {};
export async function cachedRewardDestination(
accountAddress: string,
event: SubstrateEvent,
): Promise<PalletStakingRewardDestination> {
const blockId = blockNumber(event);
let cachedBlock = rewardDestinationByAddress[blockId];
if (cachedBlock !== undefined) {
return cachedBlock[accountAddress];
} else {
rewardDestinationByAddress = {};
let method = event.event.method;
let section = event.event.section;
const allEventsInBlock = event.block.events.filter((blockEvent) => {
return (
blockEvent.event.method == method && blockEvent.event.section == section
);
});
let destinationByAddress: {
[address: string]: PalletStakingRewardDestination;
} = {};
let {
event: { data: innerData },
} = event;
if (innerData.length == 3) {
allEventsInBlock.forEach((event) => {
let {
event: {
data: [accountId, destination, _],
},
} = event;
let accountAddress = accountId.toString();
destinationByAddress[accountAddress] =
destination as PalletStakingRewardDestination;
});
} else {
const allAccountsInBlock = allEventsInBlock.map((event) => {
let {
event: {
data: [accountId],
},
} = event;
return accountId;
});
// looks like accountAddress not related to events so just try to query payee directly
if (allAccountsInBlock.length === 0) {
rewardDestinationByAddress[blockId] = {};
return (await api.query.staking.payee(
accountAddress,
)) as unknown as PalletStakingRewardDestination;
}
// TODO: Commented code doesn't work now, may be fixed later
// const payees = await api.query.staking.payee.multi(allAccountsInBlock);
const payees = await api.queryMulti(
allAccountsInBlock.map((account) => [api.query.staking.payee, account]),
);
const rewardDestinations = payees.map((payee) => {
return payee as PalletStakingRewardDestination;
});
// something went wrong, so just query for single accountAddress
if (rewardDestinations.length !== allAccountsInBlock.length) {
const payee = (await api.query.staking.payee(
accountAddress,
)) as unknown as PalletStakingRewardDestination;
destinationByAddress[accountAddress] = payee;
rewardDestinationByAddress[blockId] = destinationByAddress;
return payee;
}
allAccountsInBlock.forEach((account, index) => {
let accountAddress = account.toString();
let rewardDestination = rewardDestinations[index];
destinationByAddress[accountAddress] = rewardDestination;
});
}
rewardDestinationByAddress[blockId] = destinationByAddress;
return destinationByAddress[accountAddress];
}
}
export async function cachedController(
accountAddress: string,
event: SubstrateEvent,
): Promise<string> {
const blockId = blockNumber(event);
let cachedBlock = controllersByStash[blockId];
if (cachedBlock !== undefined) {
return cachedBlock[accountAddress];
} else {
controllersByStash = {};
let method = event.event.method;
let section = event.event.section;
const allAccountsInBlock = event.block.events
.filter((blockEvent) => {
return (
blockEvent.event.method == method &&
blockEvent.event.section == section
);
})
.map((event) => {
let {
event: {
data: [accountId],
},
} = event;
return accountId;
});
var controllerNeedAccounts: AccountId[] = [];
for (let accountId of allAccountsInBlock) {
const rewardDestination = await cachedRewardDestination(
accountId.toString(),
event,
);
if (rewardDestination.isController) {
controllerNeedAccounts.push(accountId as AccountId);
}
}
// looks like accountAddress not related to events so just try to query controller directly
if (controllerNeedAccounts.length === 0) {
controllersByStash[blockId] = {};
let accountId = await api.query.staking.bonded(accountAddress);
return accountId.toString();
}
// TODO: Commented code doesn't work now, may be fixed later
// const bonded = await api.query.staking.bonded.multi(controllerNeedAccounts);
const bonded = await api.queryMulti(
controllerNeedAccounts.map((account) => [
api.query.staking.bonded,
account,
]),
);
const controllers = bonded.map((bonded) => {
return bonded.toString();
});
let bondedByAddress: { [address: string]: string } = {};
// something went wrong, so just query for single accountAddress
if (controllers.length !== controllerNeedAccounts.length) {
const controller = await api.query.staking.bonded(accountAddress);
let controllerAddress = controller.toString();
bondedByAddress[accountAddress] = controllerAddress;
controllersByStash[blockId] = bondedByAddress;
return controllerAddress;
}
controllerNeedAccounts.forEach((account, index) => {
let accountAddress = account.toString();
bondedByAddress[accountAddress] = controllers[index];
});
controllersByStash[blockId] = bondedByAddress;
return bondedByAddress[accountAddress];
}
}
export async function cachedStakingRewardEraIndex(
event: SubstrateEvent,
): Promise<number> {
const blockId = blockNumber(event);
let cachedEra = parachainStakingRewardEra[blockId];
if (cachedEra !== undefined) {
return cachedEra;
} else {
const era = await api.query.parachainStaking.round();
const paymentDelay =
api.consts.parachainStaking.rewardPaymentDelay.toHuman();
// HACK: used to get data from object
const eraIndex =
(era.toJSON() as { current: any }).current - Number(paymentDelay);
parachainStakingRewardEra = {};
parachainStakingRewardEra[blockId] = eraIndex;
return eraIndex;
}
}
export async function getPoolMembers(
blockId: number,
): Promise<[string, PalletNominationPoolsPoolMember][]> {
const cachedMembers = poolMembers[blockId];
if (cachedMembers != undefined) {
return cachedMembers;
}
const members: [string, PalletNominationPoolsPoolMember][] = (
await api.query.nominationPools.poolMembers.entries()
)
.filter(
([_, member]) =>
(member as Option<PalletNominationPoolsPoolMember>).isSome,
)
.map(([accountId, member]) => [
accountId.args[0].toString(),
(member as Option<PalletNominationPoolsPoolMember>).unwrap(),
]);
poolMembers = {};
poolMembers[blockId] = members;
return members;
}
+505
View File
@@ -0,0 +1,505 @@
import { SubstrateExtrinsic } from "@subql/types";
import { AssetTransfer, HistoryElement, Transfer, Swap } from "../types";
import {
getAssetIdFromMultilocation,
getEventData,
callFromProxy,
callsFromBatch,
calculateFeeAsString,
extrinsicIdFromBlockAndIdx,
eventRecordToSubstrateEvent,
isBatch,
isProxy,
timestamp,
isNativeTransfer,
isAssetTransfer,
isOrmlTransfer,
isSwapExactTokensForTokens,
isSwapTokensForExactTokens,
isNativeTransferAll,
isOrmlTransferAll,
isEvmTransaction,
isEvmExecutedEvent,
isAssetTxFeePaidEvent,
isEquilibriumTransfer,
isHydraOmnipoolBuy,
isHydraOmnipoolSell,
isHydraRouterSell,
isHydraRouterBuy,
convertOrmlCurrencyIdToString,
} from "./common";
import { CallBase } from "@polkadot/types/types/calls";
import { AnyTuple } from "@polkadot/types/types/codec";
import { u64 } from "@polkadot/types";
import { ethereumEncode } from "@polkadot/util-crypto";
import { u128, u32 } from "@polkadot/types-codec";
import { convertHydraDxTokenIdToString, findHydraDxFeeTyped } from "./swaps";
import { Codec } from "@polkadot/types/types";
type TransferData = {
isTransferAll: boolean;
transfer: Transfer | AssetTransfer | Swap;
};
type TransferCallback = (
isTransferAll: boolean,
address: string,
amount: any,
assetId?: string
) => Array<{ isTransferAll: boolean; transfer: Transfer }>;
type AssetHubSwapCallback = (
path: any,
amountId: Codec,
amountOut: Codec,
receiver: Codec
) => Array<{ isTransferAll: boolean; transfer: Swap }>;
type HydraDxSwapCallback = (
assetIn: Codec,
assetOut: Codec,
amountIn: Codec,
amountOut: Codec
) => { isTransferAll: boolean; transfer: Swap };
export async function handleHistoryElement(
extrinsic: SubstrateExtrinsic
): Promise<void> {
const { isSigned } = extrinsic.extrinsic;
if (isSigned) {
let failedTransfers = findFailedTransferCalls(extrinsic);
if (failedTransfers != null) {
await saveFailedTransfers(failedTransfers, extrinsic);
} else {
await saveExtrinsic(extrinsic);
}
} else if (
isEvmTransaction(extrinsic.extrinsic.method) &&
extrinsic.success
) {
await saveEvmExtrinsic(extrinsic);
}
}
function createHistoryElement(
extrinsic: SubstrateExtrinsic,
address: string,
suffix: string = "",
hash?: string
) {
let extrinsicHash = hash || extrinsic.extrinsic.hash.toString();
let blockNumber = extrinsic.block.block.header.number.toNumber();
let extrinsicIdx = extrinsic.idx;
let extrinsicId = extrinsicIdFromBlockAndIdx(blockNumber, extrinsicIdx);
let blockTimestamp = timestamp(extrinsic.block);
const historyElement = HistoryElement.create({
id: `${extrinsicId}${suffix}`,
blockNumber,
timestamp: blockTimestamp,
address,
});
historyElement.extrinsicHash = extrinsicHash;
historyElement.extrinsicIdx = extrinsicIdx;
historyElement.timestamp = blockTimestamp;
return historyElement;
}
function addTransferToHistoryElement(
element: HistoryElement,
transfer: Transfer | AssetTransfer | Swap
) {
if ("assetIdIn" in transfer) {
element.swap = transfer;
} else if ("assetId" in transfer) {
element.assetTransfer = transfer;
} else {
element.transfer = transfer;
}
}
async function saveFailedTransfers(
transfers: Array<TransferData>,
extrinsic: SubstrateExtrinsic
): Promise<void> {
for (const { isTransferAll, transfer } of transfers) {
const isSwap = "assetIdIn" in transfer;
const from = isSwap ? transfer.sender : transfer.from;
const to = isSwap ? transfer.receiver : transfer.to;
const elementFrom = createHistoryElement(extrinsic, from, `-from`);
addTransferToHistoryElement(elementFrom, transfer);
// FIXME: Try to find more appropriate way to handle failed transferAll events
if ((!isTransferAll && !isSwap) || from.toString() != to.toString()) {
const elementTo = createHistoryElement(extrinsic, to, `-to`);
addTransferToHistoryElement(elementTo, transfer);
await elementTo.save();
}
await elementFrom.save();
}
}
async function saveExtrinsic(extrinsic: SubstrateExtrinsic): Promise<void> {
const element = createHistoryElement(
extrinsic,
extrinsic.extrinsic.signer.toString(),
"-extrinsic"
);
element.extrinsic = {
hash: extrinsic.extrinsic.hash.toString(),
module: extrinsic.extrinsic.method.section,
call: extrinsic.extrinsic.method.method,
success: extrinsic.success,
fee: calculateFeeAsString(extrinsic),
};
await element.save();
}
async function saveEvmExtrinsic(extrinsic: SubstrateExtrinsic): Promise<void> {
const executedEvent = extrinsic.events.find(isEvmExecutedEvent);
if (!executedEvent) {
return;
}
const addressFrom = ethereumEncode(executedEvent.event.data?.[0]?.toString());
const hash = executedEvent.event.data?.[2]?.toString();
const success = !!(executedEvent.event.data?.[3].toJSON() as any).succeed;
const element = createHistoryElement(
extrinsic,
addressFrom,
"-extrinsic",
hash
);
element.extrinsic = {
hash,
module: extrinsic.extrinsic.method.section,
call: extrinsic.extrinsic.method.method,
success,
fee: calculateFeeAsString(extrinsic, addressFrom),
};
await element.save();
}
/// Success Transfer emits Transfer event that is handled at Transfers.ts handleTransfer()
function findFailedTransferCalls(
extrinsic: SubstrateExtrinsic
): Array<TransferData> | null {
if (extrinsic.success) {
return null;
}
let sender = extrinsic.extrinsic.signer;
const transferCallback: TransferCallback = (
isTransferAll,
address,
amount,
assetId?
) => {
const transfer: Transfer = {
amount: amount.toString(),
from: sender.toString(),
to: address,
fee: calculateFeeAsString(extrinsic),
eventIdx: -1,
success: false,
};
if (assetId) {
(transfer as AssetTransfer).assetId = assetId;
}
return [
{
isTransferAll,
transfer,
},
];
};
const assetHubSwapCallback: AssetHubSwapCallback = (
path,
amountIn,
amountOut,
receiver
) => {
let assetIdFee = "native";
let fee = calculateFeeAsString(extrinsic);
let foundAssetTxFeePaid = extrinsic.block.events.find((e) =>
isAssetTxFeePaidEvent(eventRecordToSubstrateEvent(e))
);
if (foundAssetTxFeePaid !== undefined) {
const [who, actual_fee, tip, rawAssetIdFee] = getEventData(
eventRecordToSubstrateEvent(foundAssetTxFeePaid)
);
if ("interior" in rawAssetIdFee) {
assetIdFee = getAssetIdFromMultilocation(rawAssetIdFee);
fee = actual_fee.toString();
}
}
const assetIdIn = getAssetIdFromMultilocation(path[0], true);
const assetIdOut = getAssetIdFromMultilocation(
path[path["length"] - 1],
true
);
if (assetIdIn === undefined || assetIdOut === undefined) {
return [];
}
const swap: Swap = {
assetIdIn: assetIdIn,
amountIn: amountIn.toString(),
assetIdOut: assetIdOut,
amountOut: amountOut.toString(),
sender: sender.toString(),
receiver: receiver.toString(),
assetIdFee: assetIdFee,
fee: fee,
eventIdx: -1,
success: false,
};
return [
{
isTransferAll: false,
transfer: swap,
},
];
};
const hydraDxSwapCallback: HydraDxSwapCallback = (
assetIn: Codec,
assetOut: Codec,
amountIn: Codec,
amountOut: Codec
) => {
let fee = findHydraDxFeeTyped(extrinsic.events);
const assetIdIn = convertHydraDxTokenIdToString(assetIn);
const assetIdOut = convertHydraDxTokenIdToString(assetOut);
const swap: Swap = {
assetIdIn: assetIdIn,
amountIn: amountIn.toString(),
assetIdOut: assetIdOut,
amountOut: amountOut.toString(),
sender: sender.toString(),
receiver: sender.toString(),
assetIdFee: fee.tokenId,
fee: fee.amount,
eventIdx: -1,
success: false,
};
return {
isTransferAll: false,
transfer: swap,
};
};
let transferCalls = determineTransferCallsArgs(
extrinsic.extrinsic.method,
transferCallback,
assetHubSwapCallback,
hydraDxSwapCallback
);
if (transferCalls.length == 0) {
return null;
}
return transferCalls;
}
function determineTransferCallsArgs(
causeCall: CallBase<AnyTuple>,
transferCallback: TransferCallback,
assetHubSwapCallback: AssetHubSwapCallback,
hydraDxSwapCallback: HydraDxSwapCallback
): Array<TransferData> {
if (isNativeTransfer(causeCall)) {
return transferCallback(false, ...extractArgsFromTransfer(causeCall));
} else if (isAssetTransfer(causeCall)) {
return transferCallback(false, ...extractArgsFromAssetTransfer(causeCall));
} else if (isOrmlTransfer(causeCall)) {
return transferCallback(false, ...extractArgsFromOrmlTransfer(causeCall));
} else if (isEquilibriumTransfer(causeCall)) {
return transferCallback(
false,
...extractArgsFromEquilibriumTransfer(causeCall)
);
} else if (isNativeTransferAll(causeCall)) {
return transferCallback(true, ...extractArgsFromTransferAll(causeCall));
} else if (isOrmlTransferAll(causeCall)) {
return transferCallback(true, ...extractArgsFromOrmlTransferAll(causeCall));
} else if (isSwapExactTokensForTokens(causeCall)) {
return assetHubSwapCallback(
...extractArgsFromSwapExactTokensForTokens(causeCall)
);
} else if (isSwapTokensForExactTokens(causeCall)) {
return assetHubSwapCallback(
...extractArgsFromSwapTokensForExactTokens(causeCall)
);
} else if (isHydraOmnipoolBuy(causeCall)) {
return [hydraDxSwapCallback(...extractArgsFromHydraOmnipoolBuy(causeCall))];
} else if (isHydraOmnipoolSell(causeCall)) {
return [
hydraDxSwapCallback(...extractArgsFromHydraOmnipoolSell(causeCall)),
];
} else if (isHydraRouterBuy(causeCall)) {
return [hydraDxSwapCallback(...extractArgsFromHydraRouterBuy(causeCall))];
} else if (isHydraRouterSell(causeCall)) {
return [hydraDxSwapCallback(...extractArgsFromHydraRouterSell(causeCall))];
} else if (isBatch(causeCall)) {
return callsFromBatch(causeCall)
.map((call) => {
return determineTransferCallsArgs(
call,
transferCallback,
assetHubSwapCallback,
hydraDxSwapCallback
).map((value, index, array) => {
return value;
});
})
.flat();
} else if (isProxy(causeCall)) {
let proxyCall = callFromProxy(causeCall);
return determineTransferCallsArgs(
proxyCall,
transferCallback,
assetHubSwapCallback,
hydraDxSwapCallback
);
} else {
return [];
}
}
function extractArgsFromTransfer(call: CallBase<AnyTuple>): [string, bigint] {
const [destinationAddress, amount] = call.args;
return [destinationAddress.toString(), (amount as u64).toBigInt()];
}
function extractArgsFromAssetTransfer(
call: CallBase<AnyTuple>
): [string, bigint, string] {
const [assetId, destinationAddress, amount] = call.args;
return [
destinationAddress.toString(),
(amount as u64).toBigInt(),
assetId.toString(),
];
}
function extractArgsFromOrmlTransfer(
call: CallBase<AnyTuple>
): [string, bigint, string] {
const [destinationAddress, currencyId, amount] = call.args;
return [
destinationAddress.toString(),
(amount as u64).toBigInt(),
currencyId.toHex().toString(),
];
}
function extractArgsFromEquilibriumTransfer(
call: CallBase<AnyTuple>
): [string, bigint, string] {
const [assetId, destinationAddress, amount] = call.args;
return [
destinationAddress.toString(),
(amount as u64).toBigInt(),
assetId.toString(),
];
}
function extractArgsFromTransferAll(
call: CallBase<AnyTuple>
): [string, bigint] {
const [destinationAddress] = call.args;
return [destinationAddress.toString(), BigInt(0)];
}
function extractArgsFromOrmlTransferAll(
call: CallBase<AnyTuple>
): [string, bigint, string] {
const [destinationAddress, currencyId] = call.args;
return [
destinationAddress.toString(),
BigInt(0),
convertOrmlCurrencyIdToString(currencyId),
];
}
function extractArgsFromSwapExactTokensForTokens(
call: CallBase<AnyTuple>
): [any, Codec, Codec, Codec] {
const [path, amountIn, amountOut, receiver, _] = call.args;
return [path, amountIn, amountOut, receiver];
}
function extractArgsFromSwapTokensForExactTokens(
call: CallBase<AnyTuple>
): [any, Codec, Codec, Codec] {
const [path, amountOut, amountIn, receiver, _] = call.args;
return [path, amountIn, amountOut, receiver];
}
function extractArgsFromHydraRouterSell(
call: CallBase<AnyTuple>
): [Codec, Codec, Codec, Codec] {
const [assetIn, assetOut, amountIn, minAmountOut, _] = call.args;
return [assetIn, assetOut, amountIn, minAmountOut];
}
function extractArgsFromHydraRouterBuy(
call: CallBase<AnyTuple>
): [Codec, Codec, Codec, Codec] {
const [assetIn, assetOut, amountOut, maxAmountIn, _] = call.args;
return [assetIn, assetOut, maxAmountIn, amountOut];
}
function extractArgsFromHydraOmnipoolSell(
call: CallBase<AnyTuple>
): [Codec, Codec, Codec, Codec] {
const [assetIn, assetOut, amount, minBuyAmount, _] = call.args;
return [
assetIn,
assetOut,
amount, // amountIn
minBuyAmount, // amountOut
];
}
function extractArgsFromHydraOmnipoolBuy(
call: CallBase<AnyTuple>
): [Codec, Codec, Codec, Codec] {
const [assetOut, assetIn, amount, maxSellAmount, _] = call.args;
return [
assetIn,
assetOut,
maxSellAmount, // amountIn
amount, // amountOut
];
}
+117
View File
@@ -0,0 +1,117 @@
import { SubstrateEvent } from "@subql/types";
import { eventId } from "./common";
import { EraValidatorInfo } from "../types/models/EraValidatorInfo";
import { IndividualExposure } from "../types";
import {
SpStakingPagedExposureMetadata,
SpStakingExposurePage,
} from "@polkadot/types/lookup";
import { Option } from "@polkadot/types";
import { INumber } from "@polkadot/types-codec/types/interfaces";
import { Exposure } from "@polkadot/types/interfaces";
export async function handleStakersElected(
event: SubstrateEvent,
): Promise<void> {
await handleNewEra(event);
}
export async function handleNewEra(event: SubstrateEvent): Promise<void> {
const currentEra = ((await api.query.staking.currentEra()) as Option<INumber>)
.unwrap()
.toNumber();
if (api.query.staking.erasStakersOverview) {
await processEraStakersPaged(event, currentEra);
} else {
await processEraStakersClipped(event, currentEra);
}
}
async function processEraStakersClipped(
event: SubstrateEvent,
currentEra: number,
): Promise<void> {
const exposures =
await api.query.staking.erasStakersClipped.entries(currentEra);
for (const [key, exposure] of exposures) {
const [, validatorId] = key.args;
let validatorIdString = validatorId.toString();
const exp = exposure as unknown as Exposure;
const eraValidatorInfo = new EraValidatorInfo(
eventId(event) + validatorIdString,
validatorIdString,
currentEra,
exp.total.toBigInt(),
exp.own.toBigInt(),
exp.others.map((other) => {
return {
who: other.who.toString(),
value: other.value.toString(),
} as IndividualExposure;
}),
);
await eraValidatorInfo.save();
}
}
async function processEraStakersPaged(
event: SubstrateEvent,
currentEra: number,
): Promise<void> {
const overview =
await api.query.staking.erasStakersOverview.entries(currentEra);
const pages = await api.query.staking.erasStakersPaged.entries(currentEra);
interface AccumulatorType {
[key: string]: any;
}
const othersCounted = pages.reduce(
(accumulator: AccumulatorType, [key, exp]) => {
const exposure = (
exp as unknown as Option<SpStakingExposurePage>
).unwrap();
const [, validatorId, pageId] = key.args;
const pageNumber = (pageId as INumber).toNumber();
const validatorIdString = validatorId.toString();
const others = exposure.others.map(({ who, value }) => {
return {
who: who.toString(),
value: value.toString(),
} as IndividualExposure;
});
(accumulator[validatorIdString] = accumulator[validatorIdString] || {})[
pageNumber
] = others;
return accumulator;
},
{},
);
for (const [key, exp] of overview) {
const exposure = (
exp as unknown as Option<SpStakingPagedExposureMetadata>
).unwrap();
const [, validatorId] = key.args;
let validatorIdString = validatorId.toString();
let others = [];
for (let i = 0; i < exposure.pageCount.toNumber(); ++i) {
others.push(...othersCounted[validatorIdString][i]);
}
const eraValidatorInfo = new EraValidatorInfo(
eventId(event) + validatorIdString,
validatorIdString,
currentEra,
exposure.total.toBigInt(),
exposure.own.toBigInt(),
others,
);
await eraValidatorInfo.save();
}
}
+257
View File
@@ -0,0 +1,257 @@
import {
AccountPoolReward,
AccumulatedReward,
AccumulatedPoolReward,
HistoryElement,
RewardType,
} from "../types";
import { SubstrateEvent } from "@subql/types";
import {
eventIdFromBlockAndIdxAndAddress,
timestamp,
eventIdWithAddress,
blockNumber,
} from "./common";
import { Codec } from "@polkadot/types/types";
import { u32 } from "@polkadot/types-codec";
import { INumber } from "@polkadot/types-codec/types/interfaces";
import {
PalletNominationPoolsBondedPoolInner,
PalletNominationPoolsPoolMember,
PalletNominationPoolsSubPools,
} from "@polkadot/types/lookup";
import {
handleGenericForTxHistory,
updateAccumulatedGenericReward,
} from "./Rewards";
import { getPoolMembers } from "./Cache";
import { Option } from "@polkadot/types";
export async function handlePoolReward(
rewardEvent: SubstrateEvent<
[accountId: Codec, poolId: INumber, reward: INumber]
>,
): Promise<void> {
await handlePoolRewardForTxHistory(rewardEvent);
let accumulatedReward = await updateAccumulatedPoolReward(rewardEvent, true);
let {
event: {
data: [accountId, poolId, amount],
},
} = rewardEvent;
await updateAccountPoolRewards(
rewardEvent,
accountId.toString(),
amount.toBigInt(),
poolId.toNumber(),
RewardType.reward,
accumulatedReward.amount,
);
}
async function handlePoolRewardForTxHistory(
rewardEvent: SubstrateEvent<
[accountId: Codec, poolId: INumber, reward: INumber]
>,
): Promise<void> {
const {
event: {
data: [account, poolId, amount],
},
} = rewardEvent;
handleGenericForTxHistory(
rewardEvent,
account.toString(),
async (element: HistoryElement) => {
element.poolReward = {
eventIdx: rewardEvent.idx,
amount: amount.toString(),
isReward: true,
poolId: poolId.toNumber(),
};
return element;
},
);
}
async function updateAccumulatedPoolReward(
event: SubstrateEvent<[accountId: Codec, poolId: INumber, reward: INumber]>,
isReward: boolean,
): Promise<AccumulatedReward> {
let {
event: {
data: [accountId, _, amount],
},
} = event;
return await updateAccumulatedGenericReward(
AccumulatedPoolReward,
accountId.toString(),
amount.toBigInt(),
isReward,
);
}
async function updateAccountPoolRewards(
event: SubstrateEvent,
accountAddress: string,
amount: bigint,
poolId: number,
rewardType: RewardType,
accumulatedAmount: bigint,
): Promise<void> {
let id = eventIdWithAddress(event, accountAddress);
let accountPoolReward = new AccountPoolReward(
id,
accountAddress,
blockNumber(event),
timestamp(event.block),
amount,
accumulatedAmount,
rewardType,
poolId,
);
await accountPoolReward.save();
}
export async function handlePoolBondedSlash(
bondedSlashEvent: SubstrateEvent<[poolId: INumber, slash: INumber]>,
): Promise<void> {
const {
event: {
data: [poolIdEncoded, slash],
},
} = bondedSlashEvent;
const poolId = poolIdEncoded.toNumber();
const poolOption = (await api.query.nominationPools.bondedPools(
poolId,
)) as Option<PalletNominationPoolsBondedPoolInner>;
const pool = poolOption.unwrap();
await handleRelaychainPooledStakingSlash(
bondedSlashEvent,
poolId,
pool.points.toBigInt(),
slash.toBigInt(),
(member: PalletNominationPoolsPoolMember): bigint => {
return member.points.toBigInt();
},
);
}
export async function handlePoolUnbondingSlash(
unbondingSlashEvent: SubstrateEvent<
[poolId: INumber, era: INumber, slash: INumber]
>,
): Promise<void> {
const {
event: {
data: [poolId, era, slash],
},
} = unbondingSlashEvent;
const poolIdNumber = poolId.toNumber();
const eraIdNumber = era.toNumber();
const unbondingPools = (
(await api.query.nominationPools.subPoolsStorage(
poolIdNumber,
)) as Option<PalletNominationPoolsSubPools>
).unwrap();
const pool =
unbondingPools.withEra.get(eraIdNumber as unknown as u32) ??
unbondingPools.noEra;
await handleRelaychainPooledStakingSlash(
unbondingSlashEvent,
poolIdNumber,
pool.points.toBigInt(),
slash.toBigInt(),
(member: PalletNominationPoolsPoolMember): bigint => {
return (
member.unbondingEras.get(eraIdNumber as unknown as u32)?.toBigInt() ??
BigInt(0)
);
},
);
}
async function handleRelaychainPooledStakingSlash(
event: SubstrateEvent,
poolId: number,
poolPoints: bigint,
slash: bigint,
memberPointsCounter: (member: PalletNominationPoolsPoolMember) => bigint,
): Promise<void> {
if (poolPoints == BigInt(0)) {
return;
}
const members = await getPoolMembers(blockNumber(event));
for (const [accountId, member] of members) {
let memberPoints: bigint;
if (member.poolId.toNumber() === poolId) {
memberPoints = memberPointsCounter(member);
if (memberPoints != BigInt(0)) {
const personalSlash = (slash * memberPoints) / poolPoints;
await handlePoolSlashForTxHistory(
event,
poolId,
accountId,
personalSlash,
);
let accumulatedReward = await updateAccumulatedGenericReward(
AccumulatedPoolReward,
accountId,
personalSlash,
false,
);
await updateAccountPoolRewards(
event,
accountId,
personalSlash,
poolId,
RewardType.slash,
accumulatedReward.amount,
);
}
}
}
}
async function handlePoolSlashForTxHistory(
slashEvent: SubstrateEvent,
poolId: number,
accountId: string,
personalSlash: bigint,
): Promise<void> {
const extrinsic = slashEvent.extrinsic;
const block = slashEvent.block;
const blockNumber = block.block.header.number.toString();
const blockTimestamp = timestamp(block);
const eventId = eventIdFromBlockAndIdxAndAddress(
blockNumber,
slashEvent.idx.toString(),
accountId,
);
const element = HistoryElement.create({
id: eventId,
timestamp: blockTimestamp,
blockNumber: block.block.header.number.toNumber(),
extrinsicHash:
extrinsic !== undefined ? extrinsic.extrinsic.hash.toString() : null,
extrinsicIdx: extrinsic !== undefined ? extrinsic.idx : null,
address: accountId,
poolReward: {
eventIdx: slashEvent.idx,
amount: personalSlash.toString(),
isReward: false,
poolId: poolId,
},
});
await element.save();
}
+616
View File
@@ -0,0 +1,616 @@
import {
AccountReward,
AccumulatedReward,
HistoryElement,
Reward,
RewardType,
} from "../types";
import {
SubstrateBlock,
SubstrateEvent,
SubstrateExtrinsic,
} from "@subql/types";
import {
callsFromBatch,
eventIdFromBlockAndIdx,
isBatch,
timestamp,
eventId,
eventIdWithAddress,
isProxy,
callFromProxy,
blockNumber,
} from "./common";
import { CallBase } from "@polkadot/types/types/calls";
import { AnyTuple } from "@polkadot/types/types/codec";
import { EraIndex } from "@polkadot/types/interfaces/staking";
import { Balance, EventRecord } from "@polkadot/types/interfaces";
import {
cachedRewardDestination,
cachedController,
cachedStakingRewardEraIndex,
} from "./Cache";
import { Codec } from "@polkadot/types/types";
import { INumber } from "@polkadot/types-codec/types/interfaces";
function isPayoutStakers(call: CallBase<AnyTuple>): boolean {
return call.method == "payoutStakers";
}
function isPayoutStakersByPage(call: CallBase<AnyTuple>): boolean {
return call.method == "payoutStakersByPage";
}
function isPayoutValidator(call: CallBase<AnyTuple>): boolean {
return call.method == "payoutValidator";
}
function extractArgsFromPayoutStakers(
call: CallBase<AnyTuple>,
): [string, number] {
const [validatorAddressRaw, eraRaw] = call.args;
return [validatorAddressRaw.toString(), (eraRaw as EraIndex).toNumber()];
}
function extractArgsFromPayoutStakersByPage(
call: CallBase<AnyTuple>,
): [string, number] {
const [validatorAddressRaw, eraRaw, _] = call.args;
return [validatorAddressRaw.toString(), (eraRaw as EraIndex).toNumber()];
}
function extractArgsFromPayoutValidator(
call: CallBase<AnyTuple>,
sender: string,
): [string, number] {
const [eraRaw] = call.args;
return [sender, (eraRaw as EraIndex).toNumber()];
}
export async function handleRewarded(
rewardEvent: SubstrateEvent<[accountId: Codec, reward: INumber]>,
): Promise<void> {
await handleReward(rewardEvent);
}
export async function handleReward(
rewardEvent: SubstrateEvent<[accountId: Codec, reward: INumber]>,
): Promise<void> {
await handleRewardForTxHistory(rewardEvent);
let accumulatedReward = await updateAccumulatedReward(rewardEvent, true);
await updateAccountRewards(
rewardEvent,
RewardType.reward,
accumulatedReward.amount,
);
// let rewardEventId = eventId(rewardEvent)
// try {
// let errorOccursOnEvent = await ErrorEvent.get(rewardEventId)
// if (errorOccursOnEvent !== undefined) {
// logger.info(`Skip rewardEvent: ${rewardEventId}`)
// return;
// }
// await handleRewardForTxHistory(rewardEvent)
// await updateAccumulatedReward(rewardEvent, true)
// } catch (error) {
// logger.error(`Got error on reward event: ${rewardEventId}: ${error.toString()}`)
// let saveError = new ErrorEvent(rewardEventId)
// saveError.description = error.toString()
// await saveError.save()
// }
}
async function handleRewardForTxHistory(
rewardEvent: SubstrateEvent,
): Promise<void> {
let element = await HistoryElement.get(eventId(rewardEvent));
if (element !== undefined) {
// already processed reward previously
return;
}
let payoutCallsArgs = rewardEvent.block.block.extrinsics
.map((extrinsic) =>
determinePayoutCallsArgs(extrinsic.method, extrinsic.signer.toString()),
)
.filter((args) => args.length != 0)
.flat();
if (payoutCallsArgs.length == 0) {
return;
}
const payoutValidators = payoutCallsArgs.map(([validator]) => validator);
const initialCallIndex = -1;
var accountsMapping: { [address: string]: string } = {};
for (const eventRecord of rewardEvent.block.events) {
if (
eventRecord.event.section == rewardEvent.event.section &&
eventRecord.event.method == rewardEvent.event.method
) {
let {
event: {
data: [account, _],
},
} = eventRecord;
if (account.toRawType() === "Balance") {
return;
}
let accountAddress = account.toString();
let rewardDestination = await cachedRewardDestination(
accountAddress,
eventRecord as unknown as SubstrateEvent,
);
if (rewardDestination.isStaked || rewardDestination.isStash) {
accountsMapping[accountAddress] = accountAddress;
} else if (rewardDestination.isController) {
accountsMapping[accountAddress] = await cachedController(
accountAddress,
eventRecord as unknown as SubstrateEvent,
);
} else if (rewardDestination.isAccount) {
accountsMapping[accountAddress] =
rewardDestination.asAccount.toString();
}
}
}
await buildRewardEvents(
rewardEvent.block,
rewardEvent.extrinsic,
rewardEvent.event.method,
rewardEvent.event.section,
accountsMapping,
initialCallIndex,
(currentCallIndex, eventAccount) => {
if (payoutValidators.length > currentCallIndex + 1) {
const index = payoutValidators.indexOf(eventAccount);
return index !== -1 && index > currentCallIndex
? index
: currentCallIndex;
} else {
return currentCallIndex;
}
},
(currentCallIndex, eventIdx, stash, amount) => {
if (currentCallIndex == -1) {
return {
eventIdx: eventIdx,
amount: amount,
isReward: true,
stash: stash,
validator: "",
era: -1,
};
} else {
const [validator, era] = payoutCallsArgs[currentCallIndex];
return {
eventIdx: eventIdx,
amount: amount,
isReward: true,
stash: stash,
validator: validator,
era: era,
};
}
},
);
}
function determinePayoutCallsArgs(
causeCall: CallBase<AnyTuple>,
sender: string,
): [string, number][] {
if (isPayoutStakers(causeCall)) {
return [extractArgsFromPayoutStakers(causeCall)];
} else if (isPayoutStakersByPage(causeCall)) {
return [extractArgsFromPayoutStakersByPage(causeCall)];
} else if (isPayoutValidator(causeCall)) {
return [extractArgsFromPayoutValidator(causeCall, sender)];
} else if (isBatch(causeCall)) {
return callsFromBatch(causeCall)
.map((call) => {
return determinePayoutCallsArgs(call, sender).map(
(value, index, array) => {
return value;
},
);
})
.flat();
} else if (isProxy(causeCall)) {
let proxyCall = callFromProxy(causeCall);
return determinePayoutCallsArgs(proxyCall, sender);
} else {
return [];
}
}
export async function handleSlashed(
slashEvent: SubstrateEvent<[accountId: Codec, slash: INumber]>,
): Promise<void> {
await handleSlash(slashEvent);
}
export async function handleSlash(
slashEvent: SubstrateEvent<[accountId: Codec, slash: INumber]>,
): Promise<void> {
await handleSlashForTxHistory(slashEvent);
let accumulatedReward = await updateAccumulatedReward(slashEvent, false);
await updateAccountRewards(
slashEvent,
RewardType.slash,
accumulatedReward.amount,
);
// let slashEventId = eventId(slashEvent)
// try {
// let errorOccursOnEvent = await ErrorEvent.get(slashEventId)
// if (errorOccursOnEvent !== undefined) {
// logger.info(`Skip slashEvent: ${slashEventId}`)
// return;
// }
// await handleSlashForTxHistory(slashEvent)
// await updateAccumulatedReward(slashEvent, false)
// } catch (error) {
// logger.error(`Got error on slash event: ${slashEventId}: ${error.toString()}`)
// let saveError = new ErrorEvent(slashEventId)
// saveError.description = error.toString()
// await saveError.save()
// }
}
async function getValidators(era: number): Promise<Set<string>> {
const eraStakersInSlashEra = await (api.query.staking.erasStakersClipped
? api.query.staking.erasStakersClipped.keys(era)
: api.query.staking.erasStakersOverview.keys(era));
const validatorsInSlashEra = eraStakersInSlashEra.map((key) => {
let [, validatorId] = key.args;
return validatorId.toString();
});
return new Set(validatorsInSlashEra);
}
async function handleSlashForTxHistory(
slashEvent: SubstrateEvent,
): Promise<void> {
let element = await HistoryElement.get(eventId(slashEvent));
if (element !== undefined) {
// already processed reward previously
return;
}
const eraWrapped = await api.query.staking.currentEra();
const currentEra = Number(eraWrapped.toString());
const slashDeferDuration = api.consts.staking.slashDeferDuration;
let validatorsSet = new Set();
const slashEra = !slashDeferDuration
? currentEra
: currentEra - Number(slashDeferDuration);
if (
api.query.staking.erasStakersOverview ||
api.query.staking.erasStakersClipped
) {
validatorsSet = await getValidators(slashEra);
}
const initialValidator: any = null;
await buildRewardEvents(
slashEvent.block,
slashEvent.extrinsic,
slashEvent.event.method,
slashEvent.event.section,
{},
initialValidator,
(currentValidator, eventAccount) => {
return validatorsSet.has(eventAccount) ? eventAccount : currentValidator;
},
(validator, eventIdx, stash, amount) => {
return {
eventIdx: eventIdx,
amount: amount,
isReward: false,
stash: stash,
validator: validator,
era: slashEra,
};
},
);
}
async function buildRewardEvents<A>(
block: SubstrateBlock,
extrinsic: SubstrateExtrinsic | undefined,
eventMethod: String,
eventSection: String,
accountsMapping: { [address: string]: string },
initialInnerAccumulator: A,
produceNewAccumulator: (currentAccumulator: A, eventAccount: string) => A,
produceReward: (
currentAccumulator: A,
eventIdx: number,
stash: string,
amount: string,
) => Reward,
) {
let blockNumber = block.block.header.number.toString();
let blockTimestamp = timestamp(block);
let innerAccumulator = initialInnerAccumulator;
for (let eventIndex = 0; eventIndex < block.events.length; eventIndex++) {
const eventRecord = block.events[eventIndex];
if (
!(
eventRecord.event.method === eventMethod &&
eventRecord.event.section === eventSection
)
)
continue;
let [account, amount] = decodeDataFromReward(
eventRecordToSubstrateEvent(eventRecord),
);
innerAccumulator = produceNewAccumulator(
innerAccumulator,
account.toString(),
);
const eventId = eventIdFromBlockAndIdx(blockNumber, eventIndex.toString());
const accountAddress = account.toString();
const destinationAddress = accountsMapping[accountAddress];
const element = new HistoryElement(
eventId,
block.block.header.number.toNumber(),
blockTimestamp,
destinationAddress !== undefined ? destinationAddress : accountAddress,
);
if (extrinsic !== undefined) {
element.extrinsicHash = extrinsic.extrinsic.hash.toString();
element.extrinsicIdx = extrinsic.idx;
}
element.reward = produceReward(
innerAccumulator,
eventIndex,
accountAddress,
amount.toString(),
);
await element.save();
}
}
async function updateAccumulatedReward(
event: SubstrateEvent,
isReward: boolean,
): Promise<AccumulatedReward> {
let [accountId, amount] = decodeDataFromReward(event);
return await updateAccumulatedGenericReward(
AccumulatedReward,
accountId.toString(),
(amount as unknown as Balance).toBigInt(),
isReward,
);
}
async function updateAccountRewards(
event: SubstrateEvent,
rewardType: RewardType,
accumulatedAmount: bigint,
): Promise<void> {
let [accountId, amount] = decodeDataFromReward(event);
const accountAddress = accountId.toString();
let id = eventIdWithAddress(event, accountAddress);
let accountReward = new AccountReward(
id,
accountAddress,
blockNumber(event),
timestamp(event.block),
(amount as unknown as Balance).toBigInt(),
accumulatedAmount,
rewardType,
);
await accountReward.save();
}
async function handleParachainRewardForTxHistory(
rewardEvent: SubstrateEvent,
): Promise<void> {
let [account, amount] = decodeDataFromReward(rewardEvent);
handleGenericForTxHistory(
rewardEvent,
account.toString(),
async (element: HistoryElement) => {
const eraIndex = await cachedStakingRewardEraIndex(rewardEvent);
const validatorEvent = rewardEvent.block.events.find(
(event) =>
event.event.section == rewardEvent.event.section &&
event.event.method == rewardEvent.event.method,
);
const validatorId = validatorEvent?.event.data[0].toString();
element.reward = {
eventIdx: rewardEvent.idx,
amount: amount.toString(),
isReward: true,
stash: account.toString(),
validator: validatorId,
era: eraIndex,
};
return element;
},
);
}
export async function handleParachainRewarded(
rewardEvent: SubstrateEvent<[accountId: Codec, reward: INumber]>,
): Promise<void> {
await handleParachainRewardForTxHistory(rewardEvent);
let accumulatedReward = await updateAccumulatedReward(rewardEvent, true);
await updateAccountRewards(
rewardEvent,
RewardType.reward,
accumulatedReward.amount,
);
}
// ============= Mythos ================
export async function handleMythosRewarded(
rewardEvent: SubstrateEvent<[accountId: Codec, reward: INumber]>,
): Promise<void> {
await handleMythosRewardForTxHistory(rewardEvent);
let accumulatedReward = await updateAccumulatedReward(rewardEvent, true);
await updateAccountRewards(
rewardEvent,
RewardType.reward,
accumulatedReward.amount,
);
}
async function handleMythosRewardForTxHistory(
rewardEvent: SubstrateEvent,
): Promise<void> {
let [account, amount] = decodeDataFromReward(rewardEvent);
await handleGenericForTxHistory(
rewardEvent,
account.toString(),
async (element: HistoryElement) => {
element.reward = {
eventIdx: rewardEvent.idx,
amount: amount.toString(),
isReward: true,
stash: account.toString(),
// Mythos staking rewards are paid manually by the user so each reward
// aggregates multiple payouts, and it is hard to split it into
// individual per-session per-validator pieces
validator: null,
era: null,
};
return element;
},
);
}
// ============= GENERICS ================
interface AccumulatedInterface {
amount: bigint;
save(): Promise<void>;
}
interface AccumulatedInterfaceStatic<BaseType extends AccumulatedInterface> {
new (id: string, amount: bigint): BaseType;
get(accountAddress: string): Promise<BaseType | undefined>;
}
export async function updateAccumulatedGenericReward<
AccumulatedRewardType extends AccumulatedInterface,
AccumulatedRewardClassType extends
AccumulatedInterfaceStatic<AccumulatedRewardType>,
>(
AccumulatedRewardTypeObject: AccumulatedRewardClassType,
accountId: string,
amount: bigint,
isReward: boolean,
): Promise<AccumulatedRewardType> {
let accountAddress = accountId;
let accumulatedReward = await AccumulatedRewardTypeObject.get(accountAddress);
if (!accumulatedReward) {
accumulatedReward = new AccumulatedRewardTypeObject(
accountAddress,
BigInt(0),
);
}
accumulatedReward.amount =
accumulatedReward.amount + (isReward ? amount : -amount);
await accumulatedReward.save();
return accumulatedReward;
}
export async function handleGenericForTxHistory(
event: SubstrateEvent,
address: string,
fieldCallback: (element: HistoryElement) => Promise<HistoryElement>,
): Promise<void> {
const extrinsic = event.extrinsic;
const block = event.block;
const blockNumber = block.block.header.number.toString();
const blockTimestamp = timestamp(block);
const eventId = eventIdFromBlockAndIdx(blockNumber, event.idx.toString());
const element = new HistoryElement(
eventId,
block.block.header.number.toNumber(),
blockTimestamp,
address,
);
if (extrinsic !== undefined) {
element.extrinsicHash = extrinsic.extrinsic.hash.toString();
element.extrinsicIdx = extrinsic.idx;
}
(await fieldCallback(element)).save();
}
interface AccountRewardsInterface {
id: string;
address: string;
blockNumber: number;
timestamp: bigint;
amount: bigint;
accumulatedAmount: bigint;
type: RewardType;
save(): Promise<void>;
}
export function eventRecordToSubstrateEvent(
eventRecord: EventRecord,
): SubstrateEvent {
return eventRecord as unknown as SubstrateEvent;
}
function decodeDataFromReward(event: SubstrateEvent): [Codec, Codec] {
// In early version staking.Reward data only have 2 parameters [accountId, amount]
// Now rewarded changed to https://polkadot.js.org/docs/substrate/events/#rewardedaccountid32-palletstakingrewarddestination-u128
// And we can direct access property from data
const {
event: { data: innerData },
} = event;
let account: Codec, amount: Codec;
if (innerData.length == 2) {
[account, amount] = innerData;
} else {
[account, , amount] = innerData;
}
return [account, amount];
}
+293
View File
@@ -0,0 +1,293 @@
import { Codec } from "@polkadot/types/types";
import { HistoryElement } from "../types";
import { HistoryElementProps } from "../types/models/HistoryElement";
import { SubstrateEvent } from "@subql/types";
import {
blockNumber,
eventId,
calculateFeeAsString,
timestamp,
getEventData,
isEvmTransaction,
isEvmExecutedEvent,
isAssetTxFeePaidEvent,
isSwapExecutedEvent,
eventRecordToSubstrateEvent,
getAssetIdFromMultilocation,
BigIntFromCodec,
convertOrmlCurrencyIdToString,
} from "./common";
type TransferPayload = {
event: SubstrateEvent;
address: Codec;
from: Codec;
to: Codec;
amount: Codec;
suffix: string;
assetId?: string;
};
export async function handleSwap(event: SubstrateEvent): Promise<void> {
const [from, to, path, amountIn, amountOut] = getEventData(event);
let element = await HistoryElement.get(`${eventId(event)}-from`);
if (element !== undefined) {
// already processed swap previously
return;
}
let assetIdFee: string;
let fee: string;
let foundAssetTxFeePaid = event.block.events.find((e) =>
isAssetTxFeePaidEvent(eventRecordToSubstrateEvent(e)),
);
let swaps = event.block.events.filter((e) =>
isSwapExecutedEvent(eventRecordToSubstrateEvent(e)),
);
if (foundAssetTxFeePaid === undefined) {
assetIdFee = "native";
fee = calculateFeeAsString(event.extrinsic, from.toString());
} else {
const [who, actualFee, tip, rawAssetIdFee] = getEventData(
eventRecordToSubstrateEvent(foundAssetTxFeePaid),
);
assetIdFee = getAssetIdFromMultilocation(rawAssetIdFee);
fee = actualFee.toString();
let {
event: {
data: [feeFrom, feeTo, feePath, feeAmountIn, feeAmountOut],
},
} = swaps[0];
swaps = swaps.slice(1);
if (BigIntFromCodec(actualFee) != BigIntFromCodec(feeAmountIn)) {
let {
event: {
data: [
refundFrom,
refundTo,
refundPath,
refundAmountIn,
refundAmountOut,
],
},
} = swaps[swaps.length - 1];
if (
BigIntFromCodec(feeAmountIn) ==
BigIntFromCodec(actualFee) + BigIntFromCodec(refundAmountOut) &&
getAssetIdFromMultilocation((feePath as any)[0]) ==
getAssetIdFromMultilocation(
(refundPath as any)[(refundPath as any)["length"] - 1],
)
) {
swaps = swaps.slice(swaps.length - 1);
// TODO: if fee splitted, than we will process the same block two times
}
}
}
for (const swap of swaps) {
await processSwap(eventRecordToSubstrateEvent(swap), assetIdFee, fee);
}
}
async function processSwap(
event: SubstrateEvent,
assetIdFee: string,
fee: string,
): Promise<void> {
const [from, to, path, amountIn, amountOut] = getEventData(event);
const swap = {
assetIdIn: getAssetIdFromMultilocation((path as any)[0]),
amountIn: amountIn.toString(),
assetIdOut: getAssetIdFromMultilocation(
(path as any)[(path as any)["length"] - 1],
),
amountOut: amountOut.toString(),
sender: from.toString(),
receiver: to.toString(),
assetIdFee: assetIdFee,
fee: fee,
eventIdx: event.idx,
success: true,
};
await createAssetTransmission(event, from.toString(), "-from", {
swap: swap,
});
if (from.toString() != to.toString()) {
await createAssetTransmission(event, to.toString(), "-to", { swap: swap });
}
}
export async function handleTransfer(event: SubstrateEvent): Promise<void> {
const [from, to, amount] = getEventData(event);
await createTransfer({
event,
address: from,
from,
to,
suffix: "-from",
amount,
});
await createTransfer({ event, address: to, from, to, suffix: "-to", amount });
}
export async function handleAssetTransfer(
event: SubstrateEvent,
): Promise<void> {
const [assetId, from, to, amount] = getEventData(event);
await createTransfer({
event,
address: from,
from,
to,
suffix: "-from",
amount,
assetId: assetId.toString(),
});
await createTransfer({
event,
address: to,
from,
to,
suffix: "-to",
amount,
assetId: assetId.toString(),
});
}
export async function handleOrmlTransfer(event: SubstrateEvent): Promise<void> {
const [currencyId, from, to, amount] = getEventData(event);
await createTransfer({
event,
address: from,
from,
to,
suffix: "-from",
amount,
assetId: convertOrmlCurrencyIdToString(currencyId),
});
await createTransfer({
event,
address: to,
from,
to,
suffix: "-to",
amount,
assetId: convertOrmlCurrencyIdToString(currencyId),
});
}
export async function handleEquilibriumTransfer(
event: SubstrateEvent,
): Promise<void> {
const [from, to, assetId, amount] = getEventData(event);
await createTransfer({
event,
address: from,
from,
to,
suffix: "-from",
amount,
assetId: assetId.toString(),
});
await createTransfer({
event,
address: to,
from,
to,
suffix: "-to",
amount,
assetId: assetId.toString(),
});
}
export async function handleTokenTransfer(
event: SubstrateEvent,
): Promise<void> {
await handleOrmlTransfer(event);
}
export async function handleCurrencyTransfer(
event: SubstrateEvent,
): Promise<void> {
await handleOrmlTransfer(event);
}
async function createTransfer({
event,
address,
suffix,
from,
to,
amount,
assetId = null,
}: TransferPayload) {
const transfer = {
amount: amount.toString(),
from: from.toString(),
to: to.toString(),
fee: calculateFeeAsString(event.extrinsic, from.toString()),
eventIdx: event.idx,
success: true,
};
let data;
if (assetId) {
data = {
assetTransfer: {
...transfer,
assetId: assetId,
},
};
} else {
data = {
transfer: transfer,
};
}
await createAssetTransmission(event, address, suffix, data);
}
export async function createAssetTransmission(
event: SubstrateEvent,
address: any,
suffix: string,
data: Partial<HistoryElementProps>,
) {
const element = new HistoryElement(
`${eventId(event)}${suffix}`,
blockNumber(event),
timestamp(event.block),
address.toString(),
);
if (event.extrinsic !== undefined) {
if (isEvmTransaction(event.extrinsic.extrinsic.method)) {
const executedEvent = event.extrinsic.events.find(isEvmExecutedEvent);
element.extrinsicHash =
executedEvent?.event.data?.[2]?.toString() ||
event.extrinsic.extrinsic.hash.toString();
} else {
element.extrinsicHash = event.extrinsic.extrinsic.hash.toString();
}
element.extrinsicIdx = event.extrinsic.idx;
}
for (var key in data) {
(element[key as keyof HistoryElementProps] as any) =
data[key as keyof HistoryElementProps];
}
await element.save();
}
+404
View File
@@ -0,0 +1,404 @@
import { SubstrateBlock, SubstrateEvent, TypedEventRecord } from "@subql/types";
import { SubstrateExtrinsic } from "@subql/types";
import { Balance, EventRecord } from "@polkadot/types/interfaces";
import { CallBase } from "@polkadot/types/types/calls";
import { AnyTuple, Codec } from "@polkadot/types/types/codec";
import { Vec, GenericEventData } from "@polkadot/types";
import { INumber } from "@polkadot/types-codec/types/interfaces";
import { u8aToHex } from "@polkadot/util";
const batchCalls = ["batch", "batchAll", "forceBatch"];
const transferCalls = ["transfer", "transferKeepAlive"];
const ormlSections = ["currencies", "tokens"];
export function distinct<T>(array: Array<T>): Array<T> {
return [...new Set(array)];
}
export function isBatch(call: CallBase<AnyTuple>): boolean {
return call.section == "utility" && batchCalls.includes(call.method);
}
export function isProxy(call: CallBase<AnyTuple>): boolean {
return call.section == "proxy" && call.method == "proxy";
}
export function isNativeTransfer(call: CallBase<AnyTuple>): boolean {
return (
(call.section == "balances" && transferCalls.includes(call.method)) ||
(call.section == "currencies" && call.method == "transferNativeCurrency")
);
}
export function isAssetTransfer(call: CallBase<AnyTuple>): boolean {
return call.section == "assets" && transferCalls.includes(call.method);
}
export function isEquilibriumTransfer(call: CallBase<AnyTuple>): boolean {
return call.section == "eqBalances" && transferCalls.includes(call.method);
}
export function isEvmTransaction(call: CallBase<AnyTuple>): boolean {
return call.section === "ethereum" && call.method === "transact";
}
export function isEvmExecutedEvent(event: TypedEventRecord<Codec[]>): boolean {
return (
event.event.section === "ethereum" && event.event.method === "Executed"
);
}
export function isAssetTxFeePaidEvent(event: SubstrateEvent): boolean {
return (
event.event.section === "assetTxPayment" &&
event.event.method === "AssetTxFeePaid"
);
}
export function isCurrencyDepositedEvent(event: SubstrateEvent): boolean {
return (
event.event.section === "currencies" && event.event.method === "Deposited"
);
}
export function isSwapExecutedEvent(event: SubstrateEvent): boolean {
return (
event.event.section === "assetConversion" &&
event.event.method === "SwapExecuted"
);
}
export function isSwapExactTokensForTokens(call: CallBase<AnyTuple>): boolean {
return (
call.section === "assetConversion" &&
call.method === "swapExactTokensForTokens"
);
}
export function isSwapTokensForExactTokens(call: CallBase<AnyTuple>): boolean {
return (
call.section === "assetConversion" &&
call.method === "swapTokensForExactTokens"
);
}
export function isHydraOmnipoolBuy(call: CallBase<AnyTuple>): boolean {
return call.section === "omnipool" && call.method == "buy";
}
export function isHydraOmnipoolSell(call: CallBase<AnyTuple>): boolean {
return call.section === "omnipool" && call.method == "sell";
}
export function isHydraRouterBuy(call: CallBase<AnyTuple>): boolean {
return call.section === "router" && call.method == "buy";
}
export function isHydraRouterSell(call: CallBase<AnyTuple>): boolean {
return call.section === "router" && call.method == "sell";
}
export function isOrmlTransfer(call: CallBase<AnyTuple>): boolean {
return (
ormlSections.includes(call.section) && transferCalls.includes(call.method)
);
}
export function isNativeTransferAll(call: CallBase<AnyTuple>): boolean {
return call.section == "balances" && call.method === "transferAll";
}
export function isOrmlTransferAll(call: CallBase<AnyTuple>): boolean {
return ormlSections.includes(call.section) && call.method === "transferAll";
}
export function callsFromBatch(
batchCall: CallBase<AnyTuple>,
): CallBase<AnyTuple>[] {
return batchCall.args[0] as Vec<CallBase<AnyTuple>>;
}
export function callFromProxy(
proxyCall: CallBase<AnyTuple>,
): CallBase<AnyTuple> {
return proxyCall.args[2] as CallBase<AnyTuple>;
}
export function eventIdWithAddress(
event: SubstrateEvent,
address: String,
): string {
return `${eventId(event)}-${address}`;
}
export function eventId(event: SubstrateEvent): string {
return `${blockNumber(event)}-${event.idx}`;
}
export function eventIdFromBlockAndIdx(blockNumber: string, eventIdx: string) {
return `${blockNumber}-${eventIdx}`;
}
export function eventIdFromBlockAndIdxAndAddress(
blockNumber: string,
eventIdx: string,
address: string,
) {
return `${blockNumber}-${eventIdx}-${address}`;
}
export function extrinsicIdx(event: SubstrateEvent): string {
let idx: string = event.extrinsic
? event.extrinsic.idx.toString()
: event.idx.toString();
return idx;
}
export function blockNumber(event: SubstrateEvent): number {
return event.block.block.header.number.toNumber();
}
export function extrinsicIdFromBlockAndIdx(
blockNumber: number,
extrinsicIdx: number,
): string {
return `${blockNumber.toString()}-${extrinsicIdx.toString()}`;
}
export function timestamp(block: SubstrateBlock): bigint {
return BigInt(
Math.round(block.timestamp ? block.timestamp.getTime() / 1000 : -1),
);
}
export function calculateFeeAsString(
extrinsic?: SubstrateExtrinsic,
from: string = "",
): string {
if (extrinsic) {
const transactionPaymentFee =
exportFeeFromTransactionFeePaidEvent(extrinsic);
if (transactionPaymentFee != undefined) {
return transactionPaymentFee.toString();
}
const withdrawFee = exportFeeFromBalancesWithdrawEvent(extrinsic, from);
if (withdrawFee !== BigInt(0)) {
if (isEvmTransaction(extrinsic.extrinsic.method)) {
const feeRefund = exportFeeRefund(extrinsic, from);
return feeRefund
? (withdrawFee - feeRefund).toString()
: withdrawFee.toString();
}
return withdrawFee.toString();
}
let balancesFee = exportFeeFromBalancesDepositEvent(extrinsic);
let treasureFee = exportFeeFromTreasureDepositEvent(extrinsic);
let totalFee = balancesFee + treasureFee;
return totalFee.toString();
} else {
return BigInt(0).toString();
}
}
export function getEventData(event: SubstrateEvent): GenericEventData {
return event.event.data as GenericEventData;
}
export function eventRecordToSubstrateEvent(
eventRecord: EventRecord,
): SubstrateEvent {
return eventRecord as unknown as SubstrateEvent;
}
export function BigIntFromCodec(eventRecord: Codec): bigint {
return (eventRecord as unknown as INumber).toBigInt();
}
export function convertOrmlCurrencyIdToString(currencyId: Codec): string {
// make sure first we have scale encoded bytes
const bytes = currencyId.toU8a();
return u8aToHex(bytes).toString();
}
function exportFeeRefund(
extrinsic: SubstrateExtrinsic,
from: string = "",
): bigint {
const extrinsicSigner = from || extrinsic.extrinsic.signer.toString();
const eventRecord = extrinsic.events.find(
(event) =>
event.event.method == "Deposit" &&
event.event.section == "balances" &&
event.event.data[0].toString() === extrinsicSigner,
);
if (eventRecord != undefined) {
const {
event: {
data: [, fee],
},
} = eventRecord;
return (fee as unknown as Balance).toBigInt();
}
return BigInt(0);
}
function exportFeeFromBalancesWithdrawEvent(
extrinsic: SubstrateExtrinsic,
from: string = "",
): bigint {
const eventRecord = extrinsic.events.find(
(event) =>
event.event.method == "Withdraw" && event.event.section == "balances",
);
if (eventRecord !== undefined) {
const {
event: {
data: [accountid, fee],
},
} = eventRecord;
const extrinsicSigner = from || extrinsic.extrinsic.signer.toString();
const withdrawAccountId = accountid.toString();
return extrinsicSigner === withdrawAccountId
? (fee as unknown as Balance).toBigInt()
: BigInt(0);
}
return BigInt(0);
}
function exportFeeFromTransactionFeePaidEvent(
extrinsic: SubstrateExtrinsic,
from: string = "",
): bigint | undefined {
const eventRecord = extrinsic.events.find(
(event) =>
event.event.method == "TransactionFeePaid" &&
event.event.section == "transactionPayment",
);
if (eventRecord !== undefined) {
const {
event: {
data: [accountid, fee, tip],
},
} = eventRecord;
const fullFee = (fee as Balance).toBigInt() + (tip as Balance).toBigInt();
const extrinsicSigner = from || extrinsic.extrinsic.signer.toString();
const withdrawAccountId = accountid.toString();
return extrinsicSigner === withdrawAccountId ? fullFee : undefined;
}
return undefined;
}
function exportFeeFromBalancesDepositEvent(
extrinsic: SubstrateExtrinsic,
): bigint {
const eventRecord = extrinsic.events.find((event) => {
return event.event.method == "Deposit" && event.event.section == "balances";
});
if (eventRecord != undefined) {
const {
event: {
data: [, fee],
},
} = eventRecord;
return (fee as unknown as Balance).toBigInt();
}
return BigInt(0);
}
function exportFeeFromTreasureDepositEvent(
extrinsic: SubstrateExtrinsic,
): bigint {
const eventRecord = extrinsic.events.find((event) => {
return event.event.method == "Deposit" && event.event.section == "treasury";
});
if (eventRecord != undefined) {
const {
event: {
data: [fee],
},
} = eventRecord;
return (fee as unknown as Balance).toBigInt();
} else {
return BigInt(0);
}
}
export function getAssetIdFromMultilocation(
multilocation: any,
safe = false,
): string | undefined {
try {
let junctions = multilocation.interior;
if (junctions.isHere) {
return "native";
} else if (multilocation.parents != "0") {
return multilocation.toHex();
} else {
return junctions.asX2[1].asGeneralIndex.toString();
}
} catch (e) {
if (safe) {
return undefined;
} else {
throw e;
}
}
}
export function getRewardData(event: SubstrateEvent): [Codec, Codec] {
const {
event: { data: innerData },
} = event;
let account: Codec, amount: Codec;
if (innerData.length == 2) {
[account, amount] = innerData;
} else {
[account, , amount] = innerData;
}
return [account, amount];
}
export function extractTransactionPaidFee(
events: EventRecord[],
): string | undefined {
const eventRecord = events.find(
(event) =>
event.event.method == "TransactionFeePaid" &&
event.event.section == "transactionPayment",
);
if (eventRecord == undefined) return undefined;
const {
event: {
data: [_, fee, tip],
},
} = eventRecord;
const fullFee = (fee as Balance).toBigInt() + (tip as Balance).toBigInt();
return fullFee.toString();
}
+119
View File
@@ -0,0 +1,119 @@
import { SubstrateEvent } from "@subql/types";
import {
BigIntFromCodec,
calculateFeeAsString,
eventId,
eventRecordToSubstrateEvent,
getAssetIdFromMultilocation,
getEventData,
isAssetTxFeePaidEvent,
isSwapExecutedEvent,
} from "../common";
import { HistoryElement } from "../../types";
import { createAssetTransmission } from "../Transfers";
export async function handleAssetConversionSwap(
event: SubstrateEvent
): Promise<void> {
const [from, to, path, amountIn, amountOut] = getEventData(event);
let element = await HistoryElement.get(`${eventId(event)}-from`);
if (element !== undefined) {
// already processed swap previously
return;
}
let assetIdFee: string;
let fee: string;
let foundAssetTxFeePaid = event.block.events.find((e) =>
isAssetTxFeePaidEvent(eventRecordToSubstrateEvent(e))
);
let swaps = event.block.events.filter((e) =>
isSwapExecutedEvent(eventRecordToSubstrateEvent(e))
);
if (foundAssetTxFeePaid === undefined) {
assetIdFee = "native";
fee = calculateFeeAsString(event.extrinsic, from.toString());
} else {
const [who, actualFee, tip, rawAssetIdFee] = getEventData(
eventRecordToSubstrateEvent(foundAssetTxFeePaid)
);
assetIdFee = getAssetIdFromMultilocation(rawAssetIdFee);
fee = actualFee.toString();
let {
event: {
data: [feeFrom, feeTo, feePath, feeAmountIn, feeAmountOut],
},
} = swaps[0];
swaps = swaps.slice(1);
if (BigIntFromCodec(actualFee) != BigIntFromCodec(feeAmountIn)) {
let {
event: {
data: [
refundFrom,
refundTo,
refundPath,
refundAmountIn,
refundAmountOut,
],
},
} = swaps[swaps.length - 1];
const feePathArray = feePath as unknown as any[];
const refundPathArray = refundPath as unknown as any[];
if (
BigIntFromCodec(feeAmountIn) ==
BigIntFromCodec(actualFee) + BigIntFromCodec(refundAmountOut) &&
getAssetIdFromMultilocation(feePathArray[0]) ==
getAssetIdFromMultilocation(
refundPathArray[refundPathArray["length"] - 1]
)
) {
swaps = swaps.slice(swaps.length - 1);
// TODO: if fee splitted, than we will process the same block two times
}
}
}
for (const e of swaps) {
await processAssetConversionSwap(
eventRecordToSubstrateEvent(e),
assetIdFee,
fee
);
}
}
async function processAssetConversionSwap(
event: SubstrateEvent,
assetIdFee: string,
fee: string
): Promise<void> {
const [from, to, path, amountIn, amountOut] = getEventData(event);
const pathArray = path as unknown as any[];
const swap = {
assetIdIn: getAssetIdFromMultilocation(pathArray[0]),
amountIn: amountIn.toString(),
assetIdOut: getAssetIdFromMultilocation(pathArray[pathArray["length"] - 1]),
amountOut: amountOut.toString(),
sender: from.toString(),
receiver: to.toString(),
assetIdFee: assetIdFee,
fee: fee,
eventIdx: event.idx,
success: true,
};
await createAssetTransmission(event, from.toString(), "-from", {
swap: swap,
});
if (from.toString() != to.toString()) {
await createAssetTransmission(event, to.toString(), "-to", { swap: swap });
}
}
+191
View File
@@ -0,0 +1,191 @@
import { SubstrateEvent, TypedEventRecord } from "@subql/types";
import {
eventId,
eventRecordToSubstrateEvent,
extractTransactionPaidFee,
isCurrencyDepositedEvent,
convertOrmlCurrencyIdToString,
} from "../common";
import { HistoryElement } from "../../types";
import { createAssetTransmission } from "../Transfers";
import { AccountId32 } from "@polkadot/types/interfaces/runtime";
import { u128, u32 } from "@polkadot/types-codec";
import { EventRecord } from "@polkadot/types/interfaces";
import { Codec } from "@polkadot/types/types";
import { INumber } from "@polkadot/types-codec/types/interfaces";
type OmnipoolSwapArgs = [
who: AccountId32,
assetIn: u32,
assetOut: u32,
amountIn: u128,
amountOut: u128,
assetFeeAmount: u128,
protocolFeeAmount: u128,
];
type RouterSwapArgs = [
assetIn: u32,
assetOut: u32,
amountIn: u128,
amountOut: u128,
];
export async function handleOmnipoolSwap(
event: SubstrateEvent<OmnipoolSwapArgs>,
): Promise<void> {
let element = await HistoryElement.get(`${eventId(event)}-from`);
if (element !== undefined) {
// already processed swap previously
return;
}
if (event.extrinsic == undefined) {
// TODO we dont yet process swap events that were initiated by the system and not by the user
// Example: https://hydradx.subscan.io/block/4361343?tab=event&event=4361343-27
return;
}
if (isPartOfRouterSwap(event.extrinsic.events)) {
// TODO: we currently don't support swaps in batch
return;
}
const fee = findHydraDxFeeTyped(event.extrinsic.events);
const [who, assetIn, assetOut, amountIn, amountOut] = event.event.data;
const swap = {
assetIdIn: convertHydraDxTokenIdToString(assetIn),
amountIn: amountIn.toString(),
assetIdOut: convertHydraDxTokenIdToString(assetOut),
amountOut: amountOut.toString(),
sender: who.toString(),
receiver: who.toString(),
assetIdFee: fee.tokenId,
fee: fee.amount,
eventIdx: event.idx,
success: true,
};
const blockNumber = event.block.block.header.number;
logger.info(
`Constructed omnipool swap ${JSON.stringify(
swap,
)} for block ${blockNumber.toString()}`,
);
await createAssetTransmission(event, who.toString(), "-from", { swap: swap });
}
export async function handleHydraRouterSwap(
event: SubstrateEvent<RouterSwapArgs>,
): Promise<void> {
let element = await HistoryElement.get(`${eventId(event)}-from`);
if (element !== undefined) {
return;
}
if (event.extrinsic == undefined) {
return;
}
const who = event.extrinsic.extrinsic.signer.toString();
const fee = findHydraDxFeeTyped(event.extrinsic.events);
const [assetIn, assetOut, amountIn, amountOut] = event.event.data;
const swap = {
assetIdIn: convertHydraDxTokenIdToString(assetIn),
amountIn: amountIn.toString(),
assetIdOut: convertHydraDxTokenIdToString(assetOut),
amountOut: amountOut.toString(),
sender: who.toString(),
receiver: who.toString(),
assetIdFee: fee.tokenId,
fee: fee.amount,
eventIdx: event.idx,
success: true,
};
const blockNumber = event.block.block.header.number;
logger.info(
`Constructed router swap ${JSON.stringify(
swap,
)} for block ${blockNumber.toString()}`,
);
await createAssetTransmission(event, who.toString(), "-from", { swap: swap });
}
export type Fee = {
tokenId: string;
amount: string;
};
export function findHydraDxFeeTyped(events: TypedEventRecord<Codec[]>[]): Fee {
return findHydraDxFee(events as EventRecord[]);
}
export function findHydraDxFee(events: EventRecord[]): Fee {
const lastCurrenciesDepositEvent = findLastEvent(events, (event) =>
isCurrencyDepositedEvent(eventRecordToSubstrateEvent(event)),
);
if (lastCurrenciesDepositEvent == undefined) return findNativeFee(events);
const {
event: {
data: [currencyId, _, amount],
},
} = lastCurrenciesDepositEvent;
return {
tokenId: convertHydraDxTokenIdToString(currencyId),
amount: (amount as INumber).toString(),
};
}
function isPartOfRouterSwap(events: TypedEventRecord<Codec[]>[]): boolean {
const eventRecords = events as EventRecord[];
for (const eventRecord of eventRecords) {
if (
eventRecord.event.section == "router" &&
(eventRecord.event.method == "Executed" ||
eventRecord.event.method == "RouteExecuted")
) {
return true;
}
}
return false;
}
function findNativeFee(events: EventRecord[]): Fee {
let foundAssetTxFeePaid = extractTransactionPaidFee(events);
if (foundAssetTxFeePaid == undefined) foundAssetTxFeePaid = "0";
return {
tokenId: "native",
amount: foundAssetTxFeePaid,
};
}
export function convertHydraDxTokenIdToString(hydraDxTokenId: Codec): string {
const asString = hydraDxTokenId.toString();
if (asString == "0") {
return "native";
} else {
return convertOrmlCurrencyIdToString(hydraDxTokenId);
}
}
function findLastEvent(
events: EventRecord[],
expression: (event: EventRecord) => boolean,
): EventRecord | undefined {
const currenciesDepositedEvents = events.filter(expression);
if (currenciesDepositedEvents.length == 0) {
return undefined;
}
return currenciesDepositedEvents[currenciesDepositedEvents.length - 1];
}
+4
View File
@@ -0,0 +1,4 @@
//Exports all handler functions
export * from "./AssetConversion";
export * from "./HydraDx";
import "@polkadot/api-augment";
+23
View File
@@ -0,0 +1,23 @@
{
"compilerOptions": {
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"esModuleInterop": true,
"declaration": true,
"importHelpers": true,
"resolveJsonModule": true,
"module": "commonjs",
"outDir": "dist",
"rootDir": ".",
"target": "es2020",
"skipLibCheck": true,
"strictNullChecks": false,
"strict": true
},
"include": [
"src/**/*",
"node_modules/@subql/types-core/dist/global.d.ts",
"node_modules/@subql/types/dist/global.d.ts",
"chainTypes/**/*"
]
}
+12285
View File
File diff suppressed because it is too large Load Diff