Add mobymask-v2-watcher with integrated relay and peer functionality (#327)

* Add an option to run relay node with watcher server

* Add a v2 mobymask-watcher

* Add an option to run peer node with watcher server

* Ignore ts error when importing types from peer package

* Rename mobymask v2 watcher

* Parse mobymask libp2p messages

* Refactor and add comments

* Add a note in peer package about nodejs exports

* Update copyright
This commit is contained in:
prathamesh0 2023-02-20 18:09:06 +05:30 committed by GitHub
parent 054600ccc4
commit 6fa3ee28b5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
63 changed files with 5377 additions and 19 deletions

View File

@ -2,6 +2,7 @@
// Copyright 2022 Vulcanize, Inc.
//
import debug from 'debug';
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import 'reflect-metadata';
@ -23,11 +24,17 @@ import {
startGQLMetricsServer,
EventWatcher,
GraphWatcherInterface,
Config
Config,
P2PConfig
} from '@cerc-io/util';
import { TypeSource } from '@graphql-tools/utils';
// @ts-expect-error https://github.com/microsoft/TypeScript/issues/49721#issuecomment-1319854183
import { RelayNodeInit, PeerIdObj } from '@cerc-io/peer';
import { BaseCmd } from './base';
import { readPeerId } from './utils/index';
const log = debug('vulcanize:server');
interface Arguments {
configFile: string;
@ -93,7 +100,8 @@ export class ServerCmd {
async exec (
createResolvers: (indexer: IndexerInterface, eventWatcher: EventWatcher) => Promise<any>,
typeDefs: TypeSource
typeDefs: TypeSource,
parseLibp2pMessage?: (peerId: string, data: any) => void
): Promise<{
app: Application,
server: ApolloServer
@ -122,9 +130,59 @@ export class ServerCmd {
await startGQLMetricsServer(config);
const p2pConfig = config.server.p2p;
// Start P2P nodes if config provided
if (p2pConfig) {
await this._startP2PNodes(p2pConfig, parseLibp2pMessage);
}
return { app, server };
}
async _startP2PNodes (
p2pConfig: P2PConfig,
parseLibp2pMessage?: (peerId: string, data: any) => void
): Promise<void> {
const { createRelayNode, Peer } = await import('@cerc-io/peer');
const { RELAY_DEFAULT_HOST, RELAY_DEFAULT_PORT, RELAY_DEFAULT_MAX_DIAL_RETRY } = await import('@cerc-io/peer');
// Run the relay node if enabled
if (p2pConfig.enableRelay) {
const relayConfig = p2pConfig.relay;
assert(relayConfig, 'Relay config not set');
let peerIdObj: PeerIdObj | undefined;
if (relayConfig.peerIdFile) {
peerIdObj = readPeerId(relayConfig.peerIdFile);
}
const relayNodeInit: RelayNodeInit = {
host: relayConfig.host ?? RELAY_DEFAULT_HOST,
port: relayConfig.port ?? RELAY_DEFAULT_PORT,
announceDomain: relayConfig.announce,
relayPeers: relayConfig.relayPeers ?? [],
maxDialRetry: relayConfig.maxDialRetry ?? RELAY_DEFAULT_MAX_DIAL_RETRY,
peerIdObj
};
await createRelayNode(relayNodeInit);
}
// Run a peer node if enabled
if (p2pConfig.enablePeer) {
const peer = new Peer(p2pConfig.relayMultiaddr, true);
await peer.init();
peer.subscribeTopic(p2pConfig.pubSubTopic, (peerId, data) => {
if (parseLibp2pMessage) {
parseLibp2pMessage(peerId.toString(), data);
}
});
log(`Peer ID: ${peer.peerId?.toString()}`);
}
}
_getArgv (): any {
return yargs(hideBin(process.argv))
.option('f', {

View File

@ -0,0 +1,17 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import fs from 'fs';
import path from 'path';
// @ts-expect-error https://github.com/microsoft/TypeScript/issues/49721#issuecomment-1319854183
import { PeerIdObj } from '@cerc-io/peer';
export function readPeerId (filePath: string): PeerIdObj {
const peerIdFilePath = path.resolve(filePath);
console.log(`Reading peer id from file ${peerIdFilePath}`);
const peerIdJson = fs.readFileSync(peerIdFilePath, 'utf-8');
return JSON.parse(peerIdJson);
}

View File

@ -0,0 +1,2 @@
# Don't lint build output.
dist

View File

@ -0,0 +1,27 @@
{
"env": {
"browser": true,
"es2021": true
},
"extends": [
"semistandard",
"plugin:@typescript-eslint/recommended"
],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 12,
"sourceType": "module"
},
"plugins": [
"@typescript-eslint"
],
"rules": {
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/explicit-module-boundary-types": [
"warn",
{
"allowArgumentsExplicitlyTypedAsAny": true
}
]
}
}

View File

@ -0,0 +1,281 @@
# MobyMask v2 Watcher
First try the [mobymask demo in stack orchestrator](https://github.com/cerc-io/stack-orchestrator/tree/main/app/data/stacks/mobymask) to quickly get started. Advanced users can see [here](/docs/README.md) for instructions on setting up a local environment by hand.
## Setup
Run the following command to install required packages:
```bash
yarn && yarn build
```
If the watcher is "lazy", run the server:
```bash
yarn server
```
GQL console: http://localhost:3010/graphql
If the watcher is "active", run the job-runner:
```bash
yarn job-runner
```
then the server:
```bash
yarn server
```
Next, clone the MobyMask repo and checkout this branch:
```bash
git clone https://github.com/cerc-io/MobyMask && cd MobyMask
git checkout use-laconic-watcher-as-hosted-index
```
Install the packages:
```bash
yarn
```
Deploy the contract:
```bash
cd packages/hardhat
yarn deploy
# deploying "PhisherRegistry" (tx: 0xaebeb2e883ece1f679304ec46f5dc61ca74f9e168427268a7dfa8802195b8de0)...: deployed at 0xMobyAddress with 2306221 gas
# $ hardhat run scripts/publish.js
# ✅ Published contracts to the subgraph package.
# Done in 14.28s.
```
Export the address of the deployed contract to a shell variable for later use:
```bash
export MOBY_ADDRESS="0xMobyAddress"
```
Run the following GQL mutation in watcher GraphQL endpoint http://127.0.0.1:3010/graphql
```graphql
mutation {
watchContract(
address: "MOBY_ADDRESS"
kind: "PhisherRegistry"
checkpoint: true
)
}
```
Get the latest block
```graphql
query {
latestBlock {
hash
number
}
}
```
Run the following GQL query in GraphQL endpoint
```graphql
query {
isPhisher(
blockHash: "LATEST_BLOCK_HASH"
contractAddress: "MOBY_ADDRESS"
key0: "TWT:phishername"
) {
value
proof {
data
}
}
isMember(
blockHash: "LATEST_BLOCK_HASH"
contractAddress: "MOBY_ADDRESS"
key0: "TWT:membername"
) {
value
proof {
data
}
}
}
```
Run the following GQL subscription in generated watcher GraphQL endpoint:
```graphql
subscription {
onEvent {
event {
__typename
... on PhisherStatusUpdatedEvent {
entity
isPhisher
},
... on MemberStatusUpdatedEvent {
entity
isMember
}
},
block {
number
hash
}
}
}
```
Update isPhiser and isMember lists with names
```bash
yarn claimPhisher --contract $MOBY_ADDRESS --name phisherName
```
```bash
yarn claimMember --contract $MOBY_ADDRESS --name memberName
```
- The events should be visible in the subscription at GQL endpoint. Note down the event blockHash from result.
- The isMember and isPhisher lists should be indexed. Check the database (mobymask-v2-watcher) tables `is_phisher` and `is_member`, there should be entries at the event blockHash and the value should be true. The data is indexed in `handleEvent` method in the [hooks file](./src/hooks.ts).
Update the the previous query with event blockHash and check isPhisher and isMember in GraphQL playground
```graphql
query {
isPhisher(
blockHash: "EVENT_BLOCK_HASH"
contractAddress: "MOBY_ADDRESS",
key0: "TWT:phishername"
) {
value
proof {
data
}
}
isMember(
blockHash: "EVENT_BLOCK_HASH"
contractAddress: "MOBY_ADDRESS",
key0: "TWT:membername"
) {
value
proof {
data
}
}
}
```
The data is fetched from watcher database as it is already indexed.
## Additional Commands
To watch a contract, run:
```bash
yarn watch:contract --address <contract-address> --kind <contract-kind> --checkpoint <true | false> --starting-block [block-number]
```
where:
- `address`: Address or identifier of the contract to be watched.
- `kind`: Kind of the contract.
- `checkpoint`: Turn checkpointing on (`true` | `false`).
- `starting-block`: Starting block for the contract (default: `1`).
Examples:
Watch a contract with its address and checkpointing on:
```bash
yarn watch:contract --address 0x1F78641644feB8b64642e833cE4AFE93DD6e7833 --kind ERC20 --checkpoint true
```
Watch a contract with its identifier and checkpointing on:
```bash
yarn watch:contract --address MyProtocol --kind protocol --checkpoint true
```
To fill a block range:
```bash
yarn fill --start-block <from-block> --end-block <to-block>
```
* `start-block`: Block number to start filling from.
* `end-block`: Block number till which to fill.
To create a checkpoint for a contract:
```bash
yarn checkpoint create --address <contract-address> --block-hash [block-hash]
```
* `address`: Address or identifier of the contract for which to create a checkpoint.
* `block-hash`: Hash of a block (in the pruned region) at which to create the checkpoint (default: latest canonical block hash).
To reset the watcher to a previous block number:
```bash
yarn reset watcher --block-number <previous-block-number>
```
Reset job-queue:
```bash
yarn reset job-queue
```
Reset state:
```bash
yarn reset state --block-number <previous-block-number>
```
* `block-number`: Block number to which to reset the watcher.
To export and import the watcher state:
In the source watcher, export watcher state:
```bash
yarn export-state --export-file [export-file-path] --block-number [snapshot-block-height]
```
* `export-file`: Path of file to which to export the watcher data.
* `block-number`: Block height at which to take snapshot for export.
In the target watcher, run job-runner:
```bash
yarn job-runner
```
Import watcher state:
```bash
yarn import-state --import-file <import-file-path>
```
* `import-file`: Path of file from which to import the watcher data.
Run server:
```bash
yarn server
```
To inspect a CID:
```bash
yarn inspect-cid --cid <cid>
```
* `cid`: CID to be inspected.

View File

@ -0,0 +1,68 @@
[server]
host = "127.0.0.1"
port = 3010
kind = "active"
# Checkpointing state.
checkpointing = true
# Checkpoint interval in number of blocks.
checkpointInterval = 2000
# Enable state creation
enableState = true
# Boolean to filter logs by contract.
filterLogs = true
# Max block range for which to return events in eventsInRange GQL query.
# Use -1 for skipping check on block range.
maxEventsBlockRange = -1
[server.p2p]
enablePeer = true
relayMultiaddr = ''
pubSubTopic = 'mobymask'
enableRelay = true
[server.p2p.relay]
host = "127.0.0.1"
port = 9090
relayPeers = []
peerIdFile = ''
[metrics]
host = "127.0.0.1"
port = 9000
[metrics.gql]
port = 9001
[database]
type = "postgres"
host = "localhost"
port = 5432
database = "mobymask-v2-watcher"
username = "postgres"
password = "postgres"
synchronize = true
logging = false
[upstream]
[upstream.ethServer]
gqlApiEndpoint = "http://127.0.0.1:8082/graphql"
rpcProviderEndpoint = "http://127.0.0.1:8081"
[upstream.cache]
name = "requests"
enabled = false
deleteOnStart = false
[jobQueue]
dbConnectionString = "postgres://postgres:postgres@localhost/mobymask-v2-watcher-job-queue"
maxCompletionLagInSecs = 300
jobDelayInMilliSecs = 100
eventsInBatch = 50
blockDelayInMilliSecs = 2000
prefetchBlocksInMem = true
prefetchBlockCount = 10

View File

@ -0,0 +1,218 @@
# Index missing blocks with eth-statediff-service
This readme can be followed to index required blocks out of order for a contract. This indexed data can then be used by the watcher further.
* For indexing the required blocks the following core services will be used:
* [ipld-eth-db](https://github.com/vulcanize/ipld-eth-db)
* Run ipld-eth-db database using docker:
```bash
docker-compose -f docker-compose.yml up
```
* [leveldb-ethdb-rpc](https://github.com/vulcanize/leveldb-ethdb-rpc)
It is an RPC wrapper around LevelDB. The endpoint can be used by eth-statediff-service to access LevelDB.
* [eth-statediff-service](https://github.com/vulcanize/eth-statediff-service)
* The [config file](https://github.com/vulcanize/eth-statediff-service/blob/sharding/environments/config.toml) can be updated with the following for running eth-statediff-service:
```toml
[leveldb]
mode = "remote"
# leveldb-ethdb-rpc endpoint
url = "http://127.0.0.1:8082/"
[server]
httpPath = "0.0.0.0:8545"
[statediff]
prerun = false
serviceWorkers = 2
workerQueueSize = 1024
trieWorkers = 16
[log]
level = "info"
[database]
# Credentials for ipld-eth-db database
name = "vulcanize_testing"
hostname = "localhost"
port = 8077
user = "vdbm"
password = "password"
type = "postgres"
driver = "sqlx"
[cache]
database = 1024
trie = 4096
[ethereum]
# Config for mainnet
nodeID = "1"
clientName = "eth-statediff-service"
networkID = 1
chainID = 1
```
* Run eth-statediff-service:
```bash
make build && ./eth-statediff-service serve --config environments/config.toml
```
* Indexing required blocks can be done in the following way:
* Call `writeStateDiffAt` API with watched addresses for required blocks:
```bash
# Replace $BLOCK_NUMBER with required block number to index and $CONTRACT_ADDRESS with the contract of interest.
curl -X POST -H 'Content-Type: application/json' --data '{"jsonrpc":"2.0","method":"statediff_writeStateDiffAt","params":[$BLOCK_NUMBER, {"intermediateStateNodes":true,"intermediateStorageNodes":true,"includeBlock":true,"includeReceipts":true,"includeTD":true,"includeCode":true,"watchedAddresses":["$CONTRACT_ADDRESS"]}],"id":1}' "127.0.0.1":"8545"
```
Example for indexing [mainnet MobyMask blocks](https://etherscan.io/address/0xb06e6db9288324738f04fcaac910f5a60102c1f8):
- 14869713
- 14875233
- 14876405
- 14884873
- 14885755
```bash
curl -X POST -H 'Content-Type: application/json' --data '{"jsonrpc":"2.0","method":"statediff_writeStateDiffAt","params":[14869713, {"intermediateStateNodes":true,"intermediateStorageNodes":true,"includeBlock":true,"includeReceipts":true,"includeTD":true,"includeCode":true,"watchedAddresses":["0xB06E6DB9288324738f04fCAAc910f5A60102C1F8"]}],"id":1}' "127.0.0.1":"8545"
```
After successfully completing writeStateDiffAt for a block the returned response is:
```bash
curl: (52) Empty reply from server
```
**NOTE**: Using remote leveldb-ethdb-rpc takes long time (6-20 minutes).
* Stop the eth-statediff-service after all required blocks are indexed.
* Start the [ipld-eth-server](https://github.com/vulcanize/eth-statediff-service) to query the indexed data from watcher.
* Create the following config.toml file for ipld-eth-server in [environments directory](https://github.com/vulcanize/ipld-eth-server/tree/sharding/environments):
```toml
[database]
# Credentials for ipld-eth-db database
name = "vulcanize_testing" # $DATABASE_NAME
hostname = "localhost" # $DATABASE_HOSTNAME
port = 8077 # $DATABASE_PORT
user = "vdbm" # $DATABASE_USER
password = "password" # $DATABASE_PASSWORD
[log]
level = "info" # $LOGRUS_LEVEL
[ethereum]
# Config for mainnet
chainID = "1" # $ETH_CHAIN_ID
nodeID = "arch1" # $ETH_NODE_ID
clientName = "Geth" # $ETH_CLIENT_NAME
networkID = "1" # $ETH_NETWORK_ID
```
* Run the server with the config above:
```bash
make build && ./ipld-eth-server serve --config=./environments/config.toml --eth-server-graphql --log-level info
```
* The following steps are for indexing blocks out of order in the watcher:
* Follow [steps in the readme](./README.md#setup) to setup the watcher.
* Watch the contract:
```bash
# Replace $CONTRACT_ADDRESS and $CONTRACT_NAME witch actual values
yarn watch:contract --address $CONTRACT_ADDRESS --kind $CONTRACT_NAME --checkpoint true
# Example for mobymask-v2-watcher
yarn watch:contract --address 0xB06E6DB9288324738f04fCAAc910f5A60102C1F8 --kind PhisherRegistry --checkpoint true
```
* Index the required blocks. They should be the same blocks indexed by eth-statediff-service above.
```bash
# Replace $BLOCK_NUMBER with required block number to index
yarn index-block --block $BLOCK_NUMBER
```
Example for [mainnet MobyMask blocks](https://etherscan.io/address/0xb06e6db9288324738f04fcaac910f5a60102c1f8) indexed above:
```bash
yarn index-block --block 14869713
```
* Check the `event` and `block_progress` table to confirm that the required blocks have been indexed properly.
* The watcher can be started to perform queries on the indexed data:
* The watcher can be started in lazy mode:
* Update `server.kind` in [config](./environments/local.toml):
```toml
[server]
kind = "lazy"
```
* Run server:
```bash
yarn server
```
* Run query in [GraphQL endpoint](http://127.0.0.1:3010/graphql) to get events in a range. Following query is for getting events in the range of mainnet blocks indexed for mobymask-v2-watcher:
```graphql
query {
eventsInRange(
# Range for mainnet data blocks
fromBlockNumber: 14869713
toBlockNumber: 14885755
) {
block {
hash
number
}
tx {
hash
}
contract
eventIndex
event {
__typename
}
proof {
data
}
}
}
```
* Run query to get contract storage variable values. The following query is for getting value of `isMember` variable in MobyMask contract:
```graphql
query {
isMember(
# BlockHash of an indexed mainnet block that can be taken from the events returned above
blockHash: "0x28cb16e740cd5d7de869bee2957e7442790e9d774e6e71804a67933c7e628038"
contractAddress: "0xB06E6DB9288324738f04fCAAc910f5A60102C1F8"
key0: "TWT:danfinlay"
) {
value
proof {
data
}
}
}
```

View File

@ -0,0 +1,71 @@
{
"name": "@cerc-io/mobymask-v2-watcher",
"version": "0.2.28",
"description": "mobymask-v2-watcher",
"private": true,
"main": "dist/index.js",
"scripts": {
"lint": "eslint .",
"build": "yarn clean && tsc && yarn copy-assets",
"clean": "rm -rf ./dist",
"copy-assets": "copyfiles -u 1 src/**/*.gql dist/",
"server": "DEBUG=vulcanize:* YARN_CHILD_PROCESS=true node --enable-source-maps dist/server.js",
"server:dev": "DEBUG=vulcanize:* YARN_CHILD_PROCESS=true ts-node src/server.ts",
"job-runner": "DEBUG=vulcanize:* YARN_CHILD_PROCESS=true node --enable-source-maps dist/job-runner.js",
"job-runner:dev": "DEBUG=vulcanize:* YARN_CHILD_PROCESS=true ts-node src/job-runner.ts",
"watch:contract": "DEBUG=vulcanize:* ts-node src/cli/watch-contract.ts",
"fill": "DEBUG=vulcanize:* ts-node src/fill.ts",
"reset": "DEBUG=vulcanize:* ts-node src/cli/reset.ts",
"checkpoint": "DEBUG=vulcanize:* node --enable-source-maps dist/cli/checkpoint.js",
"checkpoint:dev": "DEBUG=vulcanize:* ts-node src/cli/checkpoint.ts",
"export-state": "DEBUG=vulcanize:* node --enable-source-maps dist/cli/export-state.js",
"export-state:dev": "DEBUG=vulcanize:* ts-node src/cli/export-state.ts",
"import-state": "DEBUG=vulcanize:* node --enable-source-maps dist/cli/import-state.js",
"import-state:dev": "DEBUG=vulcanize:* ts-node src/cli/import-state.ts",
"inspect-cid": "DEBUG=vulcanize:* ts-node src/cli/inspect-cid.ts",
"index-block": "DEBUG=vulcanize:* ts-node src/cli/index-block.ts"
},
"repository": {
"type": "git",
"url": "git+https://github.com/cerc-io/watcher-ts.git"
},
"author": "",
"license": "AGPL-3.0",
"bugs": {
"url": "https://github.com/cerc-io/watcher-ts/issues"
},
"homepage": "https://github.com/cerc-io/watcher-ts#readme",
"dependencies": {
"@apollo/client": "^3.3.19",
"@cerc-io/cli": "^0.2.28",
"@cerc-io/ipld-eth-client": "^0.2.28",
"@cerc-io/solidity-mapper": "^0.2.28",
"@cerc-io/util": "^0.2.28",
"@ethersproject/providers": "^5.4.4",
"apollo-type-bigint": "^0.1.3",
"debug": "^4.3.1",
"decimal.js": "^10.3.1",
"ethers": "^5.4.4",
"graphql": "^15.5.0",
"json-bigint": "^1.0.0",
"reflect-metadata": "^0.1.13",
"typeorm": "^0.2.32",
"yargs": "^17.0.1"
},
"devDependencies": {
"@ethersproject/abi": "^5.3.0",
"@types/yargs": "^17.0.0",
"@typescript-eslint/eslint-plugin": "^5.47.1",
"@typescript-eslint/parser": "^5.47.1",
"copyfiles": "^2.4.1",
"eslint": "^7.27.0",
"eslint-config-semistandard": "^15.0.1",
"eslint-config-standard": "^16.0.3",
"eslint-plugin-import": "^2.23.3",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^5.1.0",
"eslint-plugin-standard": "^5.0.0",
"ts-node": "^10.2.1",
"typescript": "^4.3.2"
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,33 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { CreateCheckpointCmd } from '@cerc-io/cli';
import { Database } from '../../database';
import { Indexer } from '../../indexer';
export const command = 'create';
export const desc = 'Create checkpoint';
export const builder = {
address: {
type: 'string',
require: true,
demandOption: true,
describe: 'Contract address to create the checkpoint for.'
},
blockHash: {
type: 'string',
describe: 'Blockhash at which to create the checkpoint.'
}
};
export const handler = async (argv: any): Promise<void> => {
const createCheckpointCmd = new CreateCheckpointCmd();
await createCheckpointCmd.init(argv, Database);
await createCheckpointCmd.initIndexer(Indexer);
await createCheckpointCmd.exec();
};

View File

@ -0,0 +1,39 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import yargs from 'yargs';
import 'reflect-metadata';
import debug from 'debug';
import { DEFAULT_CONFIG_PATH } from '@cerc-io/util';
import { hideBin } from 'yargs/helpers';
const log = debug('vulcanize:checkpoint');
const main = async () => {
return yargs(hideBin(process.argv))
.parserConfiguration({
'parse-numbers': false
}).options({
configFile: {
alias: 'f',
type: 'string',
require: true,
demandOption: true,
describe: 'configuration file path (toml)',
default: DEFAULT_CONFIG_PATH
}
})
.commandDir('checkpoint-cmds', { extensions: ['ts', 'js'], exclude: /([a-zA-Z0-9\s_\\.\-:])+(.d.ts)$/ })
.demandCommand(1)
.help()
.argv;
};
main().then(() => {
process.exit();
}).catch(err => {
log(err);
});

View File

@ -0,0 +1,27 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import 'reflect-metadata';
import debug from 'debug';
import { ExportStateCmd } from '@cerc-io/cli';
import { Database } from '../database';
import { Indexer } from '../indexer';
const log = debug('vulcanize:export-state');
const main = async (): Promise<void> => {
const exportStateCmd = new ExportStateCmd();
await exportStateCmd.init(Database);
await exportStateCmd.initIndexer(Indexer);
await exportStateCmd.exec();
};
main().catch(err => {
log(err);
}).finally(() => {
process.exit(0);
});

View File

@ -0,0 +1,28 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import 'reflect-metadata';
import debug from 'debug';
import { ImportStateCmd } from '@cerc-io/cli';
import { Database } from '../database';
import { Indexer } from '../indexer';
import { State } from '../entity/State';
const log = debug('vulcanize:import-state');
export const main = async (): Promise<any> => {
const importStateCmd = new ImportStateCmd();
await importStateCmd.init(Database);
await importStateCmd.initIndexer(Indexer);
await importStateCmd.exec(State);
};
main().catch(err => {
log(err);
}).finally(() => {
process.exit(0);
});

View File

@ -0,0 +1,27 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import 'reflect-metadata';
import debug from 'debug';
import { IndexBlockCmd } from '@cerc-io/cli';
import { Database } from '../database';
import { Indexer } from '../indexer';
const log = debug('vulcanize:index-block');
const main = async (): Promise<void> => {
const indexBlockCmd = new IndexBlockCmd();
await indexBlockCmd.init(Database);
await indexBlockCmd.initIndexer(Indexer);
await indexBlockCmd.exec();
};
main().catch(err => {
log(err);
}).finally(() => {
process.exit(0);
});

View File

@ -0,0 +1,27 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import 'reflect-metadata';
import debug from 'debug';
import { InspectCIDCmd } from '@cerc-io/cli';
import { Database } from '../database';
import { Indexer } from '../indexer';
const log = debug('vulcanize:inspect-cid');
const main = async (): Promise<void> => {
const inspectCIDCmd = new InspectCIDCmd();
await inspectCIDCmd.init(Database);
await inspectCIDCmd.initIndexer(Indexer);
await inspectCIDCmd.exec();
};
main().catch(err => {
log(err);
}).finally(() => {
process.exit(0);
});

View File

@ -0,0 +1,22 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import debug from 'debug';
import { getConfig, resetJobs, Config } from '@cerc-io/util';
const log = debug('vulcanize:reset-job-queue');
export const command = 'job-queue';
export const desc = 'Reset job queue';
export const builder = {};
export const handler = async (argv: any): Promise<void> => {
const config: Config = await getConfig(argv.configFile);
await resetJobs(config);
log('Job queue reset successfully');
};

View File

@ -0,0 +1,24 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { ResetStateCmd } from '@cerc-io/cli';
import { Database } from '../../database';
export const command = 'state';
export const desc = 'Reset State to a given block number';
export const builder = {
blockNumber: {
type: 'number'
}
};
export const handler = async (argv: any): Promise<void> => {
const resetStateCmd = new ResetStateCmd();
await resetStateCmd.init(argv, Database);
await resetStateCmd.exec();
};

View File

@ -0,0 +1,26 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { ResetWatcherCmd } from '@cerc-io/cli';
import { Database } from '../../database';
import { Indexer } from '../../indexer';
export const command = 'watcher';
export const desc = 'Reset watcher to a block number';
export const builder = {
blockNumber: {
type: 'number'
}
};
export const handler = async (argv: any): Promise<void> => {
const resetWatcherCmd = new ResetWatcherCmd();
await resetWatcherCmd.init(argv, Database);
await resetWatcherCmd.initIndexer(Indexer);
await resetWatcherCmd.exec();
};

View File

@ -0,0 +1,24 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import 'reflect-metadata';
import debug from 'debug';
import { getResetYargs } from '@cerc-io/util';
const log = debug('vulcanize:reset');
const main = async () => {
return getResetYargs()
.commandDir('reset-cmds', { extensions: ['ts', 'js'], exclude: /([a-zA-Z0-9\s_\\.\-:])+(.d.ts)$/ })
.demandCommand(1)
.help()
.argv;
};
main().then(() => {
process.exit();
}).catch(err => {
log(err);
});

View File

@ -0,0 +1,27 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import 'reflect-metadata';
import debug from 'debug';
import { WatchContractCmd } from '@cerc-io/cli';
import { Database } from '../database';
import { Indexer } from '../indexer';
const log = debug('vulcanize:watch-contract');
const main = async (): Promise<void> => {
const watchContractCmd = new WatchContractCmd();
await watchContractCmd.init(Database);
await watchContractCmd.initIndexer(Indexer);
await watchContractCmd.exec();
};
main().catch(err => {
log(err);
}).finally(() => {
process.exit(0);
});

View File

@ -0,0 +1,100 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { gql } from '@apollo/client/core';
import { GraphQLClient, GraphQLConfig } from '@cerc-io/ipld-eth-client';
import { queries, mutations, subscriptions } from './gql';
export class Client {
_config: GraphQLConfig;
_client: GraphQLClient;
constructor (config: GraphQLConfig) {
this._config = config;
this._client = new GraphQLClient(config);
}
async getMultiNonce (blockHash: string, contractAddress: string, key0: string, key1: bigint): Promise<any> {
const { multiNonce } = await this._client.query(
gql(queries.multiNonce),
{ blockHash, contractAddress, key0, key1 }
);
return multiNonce;
}
async _getOwner (blockHash: string, contractAddress: string): Promise<any> {
const { _owner } = await this._client.query(
gql(queries._owner),
{ blockHash, contractAddress }
);
return _owner;
}
async getIsRevoked (blockHash: string, contractAddress: string, key0: string): Promise<any> {
const { isRevoked } = await this._client.query(
gql(queries.isRevoked),
{ blockHash, contractAddress, key0 }
);
return isRevoked;
}
async getIsPhisher (blockHash: string, contractAddress: string, key0: string): Promise<any> {
const { isPhisher } = await this._client.query(
gql(queries.isPhisher),
{ blockHash, contractAddress, key0 }
);
return isPhisher;
}
async getIsMember (blockHash: string, contractAddress: string, key0: string): Promise<any> {
const { isMember } = await this._client.query(
gql(queries.isMember),
{ blockHash, contractAddress, key0 }
);
return isMember;
}
async getEvents (blockHash: string, contractAddress: string, name: string): Promise<any> {
const { events } = await this._client.query(
gql(queries.events),
{ blockHash, contractAddress, name }
);
return events;
}
async getEventsInRange (fromBlockNumber: number, toBlockNumber: number): Promise<any> {
const { eventsInRange } = await this._client.query(
gql(queries.eventsInRange),
{ fromBlockNumber, toBlockNumber }
);
return eventsInRange;
}
async watchContract (contractAddress: string, startingBlock?: number): Promise<any> {
const { watchContract } = await this._client.mutate(
gql(mutations.watchContract),
{ contractAddress, startingBlock }
);
return watchContract;
}
async watchEvents (onNext: (value: any) => void): Promise<ZenObservable.Subscription> {
return this._client.subscribe(
gql(subscriptions.onEvent),
({ data }) => {
onNext(data.onEvent);
}
);
}
}

View File

@ -0,0 +1,352 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import assert from 'assert';
import { Connection, ConnectionOptions, DeepPartial, FindConditions, QueryRunner, FindManyOptions, LessThanOrEqual, EntityTarget } from 'typeorm';
import path from 'path';
import { Database as BaseDatabase, DatabaseInterface, QueryOptions, StateKind, Where } from '@cerc-io/util';
import { Contract } from './entity/Contract';
import { Event } from './entity/Event';
import { SyncStatus } from './entity/SyncStatus';
import { StateSyncStatus } from './entity/StateSyncStatus';
import { BlockProgress } from './entity/BlockProgress';
import { State } from './entity/State';
import { MultiNonce } from './entity/MultiNonce';
import { _Owner } from './entity/_Owner';
import { IsRevoked } from './entity/IsRevoked';
import { IsPhisher } from './entity/IsPhisher';
import { IsMember } from './entity/IsMember';
export const ENTITIES = [_Owner, IsMember, IsPhisher, IsRevoked, MultiNonce];
export class Database implements DatabaseInterface {
_config: ConnectionOptions;
_conn!: Connection;
_baseDatabase: BaseDatabase;
_propColMaps: { [key: string]: Map<string, string>; }
constructor (config: ConnectionOptions) {
assert(config);
this._config = {
...config,
entities: [path.join(__dirname, 'entity/*')]
};
this._baseDatabase = new BaseDatabase(this._config);
this._propColMaps = {};
}
get baseDatabase (): BaseDatabase {
return this._baseDatabase;
}
async init (): Promise<void> {
this._conn = await this._baseDatabase.init();
this._setPropColMaps();
}
async close (): Promise<void> {
return this._baseDatabase.close();
}
async getMultiNonce ({ blockHash, contractAddress, key0, key1 }: { blockHash: string, contractAddress: string, key0: string, key1: bigint }): Promise<MultiNonce | undefined> {
return this._conn.getRepository(MultiNonce)
.findOne({
blockHash,
contractAddress,
key0,
key1
});
}
async _getOwner ({ blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise<_Owner | undefined> {
return this._conn.getRepository(_Owner)
.findOne({
blockHash,
contractAddress
});
}
async getIsRevoked ({ blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: string }): Promise<IsRevoked | undefined> {
return this._conn.getRepository(IsRevoked)
.findOne({
blockHash,
contractAddress,
key0
});
}
async getIsPhisher ({ blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: string }): Promise<IsPhisher | undefined> {
return this._conn.getRepository(IsPhisher)
.findOne({
blockHash,
contractAddress,
key0
});
}
async getIsMember ({ blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: string }): Promise<IsMember | undefined> {
return this._conn.getRepository(IsMember)
.findOne({
blockHash,
contractAddress,
key0
});
}
async getPrevEntity<Entity> (entity: new () => Entity, fields: { blockNumber: number } & DeepPartial<Entity>): Promise<Entity | undefined> {
return this._conn.getRepository(entity)
.findOne({
where: {
...fields,
blockNumber: LessThanOrEqual(fields.blockNumber)
}
});
}
async saveMultiNonce ({ blockHash, blockNumber, contractAddress, key0, key1, value, proof }: DeepPartial<MultiNonce>): Promise<MultiNonce> {
const repo = this._conn.getRepository(MultiNonce);
const entity = repo.create({ blockHash, blockNumber, contractAddress, key0, key1, value, proof });
return repo.save(entity);
}
async _saveOwner ({ blockHash, blockNumber, contractAddress, value, proof }: DeepPartial<_Owner>): Promise<_Owner> {
const repo = this._conn.getRepository(_Owner);
const entity = repo.create({ blockHash, blockNumber, contractAddress, value, proof });
return repo.save(entity);
}
async saveIsRevoked ({ blockHash, blockNumber, contractAddress, key0, value, proof }: DeepPartial<IsRevoked>): Promise<IsRevoked> {
const repo = this._conn.getRepository(IsRevoked);
const entity = repo.create({ blockHash, blockNumber, contractAddress, key0, value, proof });
return repo.save(entity);
}
async saveIsPhisher ({ blockHash, blockNumber, contractAddress, key0, value, proof }: DeepPartial<IsPhisher>): Promise<IsPhisher> {
const repo = this._conn.getRepository(IsPhisher);
const entity = repo.create({ blockHash, blockNumber, contractAddress, key0, value, proof });
return repo.save(entity);
}
async saveIsMember ({ blockHash, blockNumber, contractAddress, key0, value, proof }: DeepPartial<IsMember>): Promise<IsMember> {
const repo = this._conn.getRepository(IsMember);
const entity = repo.create({ blockHash, blockNumber, contractAddress, key0, value, proof });
return repo.save(entity);
}
getNewState (): State {
return new State();
}
async getStates (where: FindConditions<State>): Promise<State[]> {
const repo = this._conn.getRepository(State);
return this._baseDatabase.getStates(repo, where);
}
async getLatestState (contractAddress: string, kind: StateKind | null, blockNumber?: number): Promise<State | undefined> {
const repo = this._conn.getRepository(State);
return this._baseDatabase.getLatestState(repo, contractAddress, kind, blockNumber);
}
async getPrevState (blockHash: string, contractAddress: string, kind?: string): Promise<State | undefined> {
const repo = this._conn.getRepository(State);
return this._baseDatabase.getPrevState(repo, blockHash, contractAddress, kind);
}
// Fetch all diff States after the specified block number.
async getDiffStatesInRange (contractAddress: string, startblock: number, endBlock: number): Promise<State[]> {
const repo = this._conn.getRepository(State);
return this._baseDatabase.getDiffStatesInRange(repo, contractAddress, startblock, endBlock);
}
async saveOrUpdateState (dbTx: QueryRunner, state: State): Promise<State> {
const repo = dbTx.manager.getRepository(State);
return this._baseDatabase.saveOrUpdateState(repo, state);
}
async removeStates (dbTx: QueryRunner, blockNumber: number, kind: string): Promise<void> {
const repo = dbTx.manager.getRepository(State);
await this._baseDatabase.removeStates(repo, blockNumber, kind);
}
async removeStatesAfterBlock (dbTx: QueryRunner, blockNumber: number): Promise<void> {
const repo = dbTx.manager.getRepository(State);
await this._baseDatabase.removeStatesAfterBlock(repo, blockNumber);
}
async getStateSyncStatus (): Promise<StateSyncStatus | undefined> {
const repo = this._conn.getRepository(StateSyncStatus);
return this._baseDatabase.getStateSyncStatus(repo);
}
async updateStateSyncStatusIndexedBlock (queryRunner: QueryRunner, blockNumber: number, force?: boolean): Promise<StateSyncStatus> {
const repo = queryRunner.manager.getRepository(StateSyncStatus);
return this._baseDatabase.updateStateSyncStatusIndexedBlock(repo, blockNumber, force);
}
async updateStateSyncStatusCheckpointBlock (queryRunner: QueryRunner, blockNumber: number, force?: boolean): Promise<StateSyncStatus> {
const repo = queryRunner.manager.getRepository(StateSyncStatus);
return this._baseDatabase.updateStateSyncStatusCheckpointBlock(repo, blockNumber, force);
}
async getContracts (): Promise<Contract[]> {
const repo = this._conn.getRepository(Contract);
return this._baseDatabase.getContracts(repo);
}
async createTransactionRunner (): Promise<QueryRunner> {
return this._baseDatabase.createTransactionRunner();
}
async getProcessedBlockCountForRange (fromBlockNumber: number, toBlockNumber: number): Promise<{ expected: number, actual: number }> {
const repo = this._conn.getRepository(BlockProgress);
return this._baseDatabase.getProcessedBlockCountForRange(repo, fromBlockNumber, toBlockNumber);
}
async getEventsInRange (fromBlockNumber: number, toBlockNumber: number): Promise<Array<Event>> {
const repo = this._conn.getRepository(Event);
return this._baseDatabase.getEventsInRange(repo, fromBlockNumber, toBlockNumber);
}
async saveEventEntity (queryRunner: QueryRunner, entity: Event): Promise<Event> {
const repo = queryRunner.manager.getRepository(Event);
return this._baseDatabase.saveEventEntity(repo, entity);
}
async getBlockEvents (blockHash: string, where: Where, queryOptions: QueryOptions): Promise<Event[]> {
const repo = this._conn.getRepository(Event);
return this._baseDatabase.getBlockEvents(repo, blockHash, where, queryOptions);
}
async saveBlockWithEvents (queryRunner: QueryRunner, block: DeepPartial<BlockProgress>, events: DeepPartial<Event>[]): Promise<BlockProgress> {
const blockRepo = queryRunner.manager.getRepository(BlockProgress);
const eventRepo = queryRunner.manager.getRepository(Event);
return this._baseDatabase.saveBlockWithEvents(blockRepo, eventRepo, block, events);
}
async saveEvents (queryRunner: QueryRunner, events: Event[]): Promise<void> {
const eventRepo = queryRunner.manager.getRepository(Event);
return this._baseDatabase.saveEvents(eventRepo, events);
}
async saveBlockProgress (queryRunner: QueryRunner, block: DeepPartial<BlockProgress>): Promise<BlockProgress> {
const repo = queryRunner.manager.getRepository(BlockProgress);
return this._baseDatabase.saveBlockProgress(repo, block);
}
async saveContract (queryRunner: QueryRunner, address: string, kind: string, checkpoint: boolean, startingBlock: number): Promise<Contract> {
const repo = queryRunner.manager.getRepository(Contract);
return this._baseDatabase.saveContract(repo, address, kind, checkpoint, startingBlock);
}
async updateSyncStatusIndexedBlock (queryRunner: QueryRunner, blockHash: string, blockNumber: number, force = false): Promise<SyncStatus> {
const repo = queryRunner.manager.getRepository(SyncStatus);
return this._baseDatabase.updateSyncStatusIndexedBlock(repo, blockHash, blockNumber, force);
}
async updateSyncStatusCanonicalBlock (queryRunner: QueryRunner, blockHash: string, blockNumber: number, force = false): Promise<SyncStatus> {
const repo = queryRunner.manager.getRepository(SyncStatus);
return this._baseDatabase.updateSyncStatusCanonicalBlock(repo, blockHash, blockNumber, force);
}
async updateSyncStatusChainHead (queryRunner: QueryRunner, blockHash: string, blockNumber: number, force = false): Promise<SyncStatus> {
const repo = queryRunner.manager.getRepository(SyncStatus);
return this._baseDatabase.updateSyncStatusChainHead(repo, blockHash, blockNumber, force);
}
async getSyncStatus (queryRunner: QueryRunner): Promise<SyncStatus | undefined> {
const repo = queryRunner.manager.getRepository(SyncStatus);
return this._baseDatabase.getSyncStatus(repo);
}
async getEvent (id: string): Promise<Event | undefined> {
const repo = this._conn.getRepository(Event);
return this._baseDatabase.getEvent(repo, id);
}
async getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgress[]> {
const repo = this._conn.getRepository(BlockProgress);
return this._baseDatabase.getBlocksAtHeight(repo, height, isPruned);
}
async markBlocksAsPruned (queryRunner: QueryRunner, blocks: BlockProgress[]): Promise<void> {
const repo = queryRunner.manager.getRepository(BlockProgress);
return this._baseDatabase.markBlocksAsPruned(repo, blocks);
}
async getBlockProgress (blockHash: string): Promise<BlockProgress | undefined> {
const repo = this._conn.getRepository(BlockProgress);
return this._baseDatabase.getBlockProgress(repo, blockHash);
}
async getBlockProgressEntities (where: FindConditions<BlockProgress>, options: FindManyOptions<BlockProgress>): Promise<BlockProgress[]> {
const repo = this._conn.getRepository(BlockProgress);
return this._baseDatabase.getBlockProgressEntities(repo, where, options);
}
async getEntitiesForBlock (blockHash: string, tableName: string): Promise<any[]> {
return this._baseDatabase.getEntitiesForBlock(blockHash, tableName);
}
async updateBlockProgress (queryRunner: QueryRunner, block: BlockProgress, lastProcessedEventIndex: number): Promise<BlockProgress> {
const repo = queryRunner.manager.getRepository(BlockProgress);
return this._baseDatabase.updateBlockProgress(repo, block, lastProcessedEventIndex);
}
async removeEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindManyOptions<Entity> | FindConditions<Entity>): Promise<void> {
return this._baseDatabase.removeEntities(queryRunner, entity, findConditions);
}
async deleteEntitiesByConditions<Entity> (queryRunner: QueryRunner, entity: EntityTarget<Entity>, findConditions: FindConditions<Entity>): Promise<void> {
await this._baseDatabase.deleteEntitiesByConditions(queryRunner, entity, findConditions);
}
async getAncestorAtDepth (blockHash: string, depth: number): Promise<string> {
return this._baseDatabase.getAncestorAtDepth(blockHash, depth);
}
_getPropertyColumnMapForEntity (entityName: string): Map<string, string> {
return this._conn.getMetadata(entityName).ownColumns.reduce((acc, curr) => {
return acc.set(curr.propertyName, curr.databaseName);
}, new Map<string, string>());
}
_setPropColMaps (): void {
this._propColMaps.MultiNonce = this._getPropertyColumnMapForEntity('MultiNonce');
this._propColMaps._Owner = this._getPropertyColumnMapForEntity('_Owner');
this._propColMaps.IsRevoked = this._getPropertyColumnMapForEntity('IsRevoked');
this._propColMaps.IsPhisher = this._getPropertyColumnMapForEntity('IsPhisher');
this._propColMaps.IsMember = this._getPropertyColumnMapForEntity('IsMember');
}
}

View File

@ -0,0 +1,48 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { Entity, PrimaryGeneratedColumn, Column, Index, CreateDateColumn } from 'typeorm';
import { BlockProgressInterface } from '@cerc-io/util';
@Entity()
@Index(['blockHash'], { unique: true })
@Index(['blockNumber'])
@Index(['parentHash'])
export class BlockProgress implements BlockProgressInterface {
@PrimaryGeneratedColumn()
id!: number;
@Column('varchar')
cid!: string;
@Column('varchar', { length: 66 })
blockHash!: string;
@Column('varchar', { length: 66 })
parentHash!: string;
@Column('integer')
blockNumber!: number;
@Column('integer')
blockTimestamp!: number;
@Column('integer')
numEvents!: number;
@Column('integer')
numProcessedEvents!: number;
@Column('integer')
lastProcessedEventIndex!: number;
@Column('boolean')
isComplete!: boolean;
@Column('boolean', { default: false })
isPruned!: boolean;
@CreateDateColumn()
createdAt!: Date;
}

View File

@ -0,0 +1,24 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
@Entity()
@Index(['address'], { unique: true })
export class Contract {
@PrimaryGeneratedColumn()
id!: number;
@Column('varchar', { length: 42 })
address!: string;
@Column('varchar')
kind!: string;
@Column('boolean')
checkpoint!: boolean;
@Column('integer')
startingBlock!: number;
}

View File

@ -0,0 +1,38 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { Entity, PrimaryGeneratedColumn, Column, Index, ManyToOne } from 'typeorm';
import { BlockProgress } from './BlockProgress';
@Entity()
@Index(['block', 'contract'])
@Index(['block', 'contract', 'eventName'])
export class Event {
@PrimaryGeneratedColumn()
id!: number;
@ManyToOne(() => BlockProgress, { onDelete: 'CASCADE' })
block!: BlockProgress;
@Column('varchar', { length: 66 })
txHash!: string;
@Column('integer')
index!: number;
@Column('varchar', { length: 42 })
contract!: string;
@Column('varchar', { length: 256 })
eventName!: string;
@Column('text')
eventInfo!: string;
@Column('text')
extraInfo!: string;
@Column('text')
proof!: string;
}

View File

@ -0,0 +1,30 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
@Entity()
@Index(['blockHash', 'contractAddress', 'key0'], { unique: true })
export class IsMember {
@PrimaryGeneratedColumn()
id!: number;
@Column('varchar', { length: 66 })
blockHash!: string;
@Column('integer')
blockNumber!: number;
@Column('varchar', { length: 42 })
contractAddress!: string;
@Column('varchar')
key0!: string;
@Column('boolean')
value!: boolean;
@Column('text', { nullable: true })
proof!: string;
}

View File

@ -0,0 +1,30 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
@Entity()
@Index(['blockHash', 'contractAddress', 'key0'], { unique: true })
export class IsPhisher {
@PrimaryGeneratedColumn()
id!: number;
@Column('varchar', { length: 66 })
blockHash!: string;
@Column('integer')
blockNumber!: number;
@Column('varchar', { length: 42 })
contractAddress!: string;
@Column('varchar')
key0!: string;
@Column('boolean')
value!: boolean;
@Column('text', { nullable: true })
proof!: string;
}

View File

@ -0,0 +1,30 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
@Entity()
@Index(['blockHash', 'contractAddress', 'key0'], { unique: true })
export class IsRevoked {
@PrimaryGeneratedColumn()
id!: number;
@Column('varchar', { length: 66 })
blockHash!: string;
@Column('integer')
blockNumber!: number;
@Column('varchar', { length: 42 })
contractAddress!: string;
@Column('varchar')
key0!: string;
@Column('boolean')
value!: boolean;
@Column('text', { nullable: true })
proof!: string;
}

View File

@ -0,0 +1,34 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
import { bigintTransformer } from '@cerc-io/util';
@Entity()
@Index(['blockHash', 'contractAddress', 'key0', 'key1'], { unique: true })
export class MultiNonce {
@PrimaryGeneratedColumn()
id!: number;
@Column('varchar', { length: 66 })
blockHash!: string;
@Column('integer')
blockNumber!: number;
@Column('varchar', { length: 42 })
contractAddress!: string;
@Column('varchar', { length: 42 })
key0!: string;
@Column('numeric', { transformer: bigintTransformer })
key1!: bigint;
@Column('numeric', { transformer: bigintTransformer })
value!: bigint;
@Column('text', { nullable: true })
proof!: string;
}

View File

@ -0,0 +1,36 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { Entity, PrimaryGeneratedColumn, Column, Index, ManyToOne } from 'typeorm';
import { StateKind } from '@cerc-io/util';
import { BlockProgress } from './BlockProgress';
@Entity()
@Index(['cid'], { unique: true })
@Index(['block', 'contractAddress'])
@Index(['block', 'contractAddress', 'kind'], { unique: true })
export class State {
@PrimaryGeneratedColumn()
id!: number;
@ManyToOne(() => BlockProgress, { onDelete: 'CASCADE' })
block!: BlockProgress;
@Column('varchar', { length: 42 })
contractAddress!: string;
@Column('varchar')
cid!: string;
@Column({
type: 'enum',
enum: StateKind
})
kind!: StateKind;
@Column('bytea')
data!: Buffer;
}

View File

@ -0,0 +1,17 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { Entity, PrimaryGeneratedColumn, Column } from 'typeorm';
@Entity()
export class StateSyncStatus {
@PrimaryGeneratedColumn()
id!: number;
@Column('integer')
latestIndexedBlockNumber!: number;
@Column('integer', { nullable: true })
latestCheckpointBlockNumber!: number;
}

View File

@ -0,0 +1,36 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { Entity, PrimaryGeneratedColumn, Column } from 'typeorm';
import { SyncStatusInterface } from '@cerc-io/util';
@Entity()
export class SyncStatus implements SyncStatusInterface {
@PrimaryGeneratedColumn()
id!: number;
@Column('varchar', { length: 66 })
chainHeadBlockHash!: string;
@Column('integer')
chainHeadBlockNumber!: number;
@Column('varchar', { length: 66 })
latestIndexedBlockHash!: string;
@Column('integer')
latestIndexedBlockNumber!: number;
@Column('varchar', { length: 66 })
latestCanonicalBlockHash!: string;
@Column('integer')
latestCanonicalBlockNumber!: number;
@Column('varchar', { length: 66 })
initialIndexedBlockHash!: string;
@Column('integer')
initialIndexedBlockNumber!: number;
}

View File

@ -0,0 +1,27 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
@Entity()
@Index(['blockHash', 'contractAddress'], { unique: true })
export class _Owner {
@PrimaryGeneratedColumn()
id!: number;
@Column('varchar', { length: 66 })
blockHash!: string;
@Column('integer')
blockNumber!: number;
@Column('varchar', { length: 42 })
contractAddress!: string;
@Column('varchar')
value!: string;
@Column('text', { nullable: true })
proof!: string;
}

View File

@ -0,0 +1,32 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import 'reflect-metadata';
import debug from 'debug';
import { FillCmd } from '@cerc-io/cli';
import { Database } from './database';
import { Indexer } from './indexer';
const log = debug('vulcanize:fill');
export const main = async (): Promise<any> => {
const fillCmd = new FillCmd();
await fillCmd.init(Database);
await fillCmd.initIndexer(Indexer);
await fillCmd.exec();
};
main().catch(err => {
log(err);
}).finally(() => {
process.exit();
});
process.on('SIGINT', () => {
log(`Exiting process ${process.pid} with code 0`);
process.exit(0);
});

View File

@ -0,0 +1,3 @@
export * as mutations from './mutations';
export * as queries from './queries';
export * as subscriptions from './subscriptions';

View File

@ -0,0 +1,4 @@
import fs from 'fs';
import path from 'path';
export const watchContract = fs.readFileSync(path.join(__dirname, 'watchContract.gql'), 'utf8');

View File

@ -0,0 +1,3 @@
mutation watchContract($address: String!, $kind: String!, $checkpoint: Boolean!, $startingBlock: Int){
watchContract(address: $address, kind: $kind, checkpoint: $checkpoint, startingBlock: $startingBlock)
}

View File

@ -0,0 +1,8 @@
query _owner($blockHash: String!, $contractAddress: String!){
_owner(blockHash: $blockHash, contractAddress: $contractAddress){
value
proof{
data
}
}
}

View File

@ -0,0 +1,40 @@
query events($blockHash: String!, $contractAddress: String!, $name: String){
events(blockHash: $blockHash, contractAddress: $contractAddress, name: $name){
block{
cid
hash
number
timestamp
parentHash
}
tx{
hash
index
from
to
}
contract
eventIndex
event{
... on DelegationTriggeredEvent {
principal
agent
}
... on MemberStatusUpdatedEvent {
entity
isMember
}
... on OwnershipTransferredEvent {
previousOwner
newOwner
}
... on PhisherStatusUpdatedEvent {
entity
isPhisher
}
}
proof{
data
}
}
}

View File

@ -0,0 +1,40 @@
query eventsInRange($fromBlockNumber: Int!, $toBlockNumber: Int!){
eventsInRange(fromBlockNumber: $fromBlockNumber, toBlockNumber: $toBlockNumber){
block{
cid
hash
number
timestamp
parentHash
}
tx{
hash
index
from
to
}
contract
eventIndex
event{
... on DelegationTriggeredEvent {
principal
agent
}
... on MemberStatusUpdatedEvent {
entity
isMember
}
... on OwnershipTransferredEvent {
previousOwner
newOwner
}
... on PhisherStatusUpdatedEvent {
entity
isPhisher
}
}
proof{
data
}
}
}

View File

@ -0,0 +1,15 @@
query getState($blockHash: String!, $contractAddress: String!, $kind: String){
getState(blockHash: $blockHash, contractAddress: $contractAddress, kind: $kind){
block{
cid
hash
number
timestamp
parentHash
}
contractAddress
cid
kind
data
}
}

View File

@ -0,0 +1,15 @@
query getStateByCID($cid: String!){
getStateByCID(cid: $cid){
block{
cid
hash
number
timestamp
parentHash
}
contractAddress
cid
kind
data
}
}

View File

@ -0,0 +1,12 @@
import fs from 'fs';
import path from 'path';
export const events = fs.readFileSync(path.join(__dirname, 'events.gql'), 'utf8');
export const eventsInRange = fs.readFileSync(path.join(__dirname, 'eventsInRange.gql'), 'utf8');
export const multiNonce = fs.readFileSync(path.join(__dirname, 'multiNonce.gql'), 'utf8');
export const _owner = fs.readFileSync(path.join(__dirname, '_owner.gql'), 'utf8');
export const isRevoked = fs.readFileSync(path.join(__dirname, 'isRevoked.gql'), 'utf8');
export const isPhisher = fs.readFileSync(path.join(__dirname, 'isPhisher.gql'), 'utf8');
export const isMember = fs.readFileSync(path.join(__dirname, 'isMember.gql'), 'utf8');
export const getStateByCID = fs.readFileSync(path.join(__dirname, 'getStateByCID.gql'), 'utf8');
export const getState = fs.readFileSync(path.join(__dirname, 'getState.gql'), 'utf8');

View File

@ -0,0 +1,8 @@
query isMember($blockHash: String!, $contractAddress: String!, $key0: String!){
isMember(blockHash: $blockHash, contractAddress: $contractAddress, key0: $key0){
value
proof{
data
}
}
}

View File

@ -0,0 +1,8 @@
query isPhisher($blockHash: String!, $contractAddress: String!, $key0: String!){
isPhisher(blockHash: $blockHash, contractAddress: $contractAddress, key0: $key0){
value
proof{
data
}
}
}

View File

@ -0,0 +1,8 @@
query isRevoked($blockHash: String!, $contractAddress: String!, $key0: String!){
isRevoked(blockHash: $blockHash, contractAddress: $contractAddress, key0: $key0){
value
proof{
data
}
}
}

View File

@ -0,0 +1,8 @@
query multiNonce($blockHash: String!, $contractAddress: String!, $key0: String!, $key1: BigInt!){
multiNonce(blockHash: $blockHash, contractAddress: $contractAddress, key0: $key0, key1: $key1){
value
proof{
data
}
}
}

View File

@ -0,0 +1,4 @@
import fs from 'fs';
import path from 'path';
export const onEvent = fs.readFileSync(path.join(__dirname, 'onEvent.gql'), 'utf8');

View File

@ -0,0 +1,40 @@
subscription onEvent{
onEvent{
block{
cid
hash
number
timestamp
parentHash
}
tx{
hash
index
from
to
}
contract
eventIndex
event{
... on DelegationTriggeredEvent {
principal
agent
}
... on MemberStatusUpdatedEvent {
entity
isMember
}
... on OwnershipTransferredEvent {
previousOwner
newOwner
}
... on PhisherStatusUpdatedEvent {
entity
isPhisher
}
}
proof{
data
}
}
}

View File

@ -0,0 +1,129 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import assert from 'assert';
import { utils } from 'ethers';
import { ResultEvent } from '@cerc-io/util';
import { Indexer, KIND_PHISHERREGISTRY } from './indexer';
const INVOKE_SIGNATURE = 'invoke(((((address,uint256,bytes),((address,bytes32,(address,bytes)[]),bytes)[])[],(uint256,uint256)),bytes)[])';
const CLAIM_IF_MEMBER_SIGNATURE = 'claimIfMember(string,bool)';
const CLAIM_IF_PHISHER_SIGNATURE = 'claimIfPhisher(string,bool)';
/**
* Hook function to store an initial state.
* @param indexer Indexer instance.
* @param blockHash Hash of the concerned block.
* @param contractAddress Address of the concerned contract.
* @returns Data block to be stored.
*/
export async function createInitialState (indexer: Indexer, contractAddress: string, blockHash: string): Promise<any> {
assert(indexer);
assert(blockHash);
assert(contractAddress);
// Store an empty State.
const stateData: any = {
state: {}
};
// Use updateStateForElementaryType to update initial state with an elementary property.
// Eg. const stateData = updateStateForElementaryType(stateData, '_totalBalance', result.value.toString());
// Use updateStateForMappingType to update initial state with a nested property.
// Eg. const stateData = updateStateForMappingType(stateData, '_allowances', [owner, spender], allowance.value.toString());
// Return initial state data to be saved.
return stateData;
}
/**
* Hook function to create state diff.
* @param indexer Indexer instance that contains methods to fetch the contract varaiable values.
* @param blockHash Block hash of the concerned block.
*/
export async function createStateDiff (indexer: Indexer, blockHash: string): Promise<void> {
assert(indexer);
assert(blockHash);
// Use indexer.createDiff() method to save custom state diff(s).
}
/**
* Hook function to create state checkpoint
* @param indexer Indexer instance.
* @param contractAddress Address of the concerned contract.
* @param blockHash Block hash of the concerned block.
* @returns Whether to disable default checkpoint. If false, the state from this hook is updated with that from default checkpoint.
*/
export async function createStateCheckpoint (indexer: Indexer, contractAddress: string, blockHash: string): Promise<boolean> {
assert(indexer);
assert(blockHash);
assert(contractAddress);
// Use indexer.createStateCheckpoint() method to create a custom checkpoint.
// Return false to update the state created by this hook by auto-generated checkpoint state.
// Return true to disable update of the state created by this hook by auto-generated checkpoint state.
return false;
}
/**
* Event hook function.
* @param indexer Indexer instance that contains methods to fetch and update the contract values in the database.
* @param eventData ResultEvent object containing event information.
*/
export async function handleEvent (indexer: Indexer, eventData: ResultEvent): Promise<void> {
assert(indexer);
assert(eventData);
// Perform indexing for PhisherStatusUpdated and MemberStatusUpdated.
if (['PhisherStatusUpdatedEvent', 'MemberStatusUpdatedEvent'].includes(eventData.event.__typename)) {
const txData = await indexer.getFullTransaction(eventData.tx.hash, eventData.block.number);
const txDescription = getTxDescription(indexer, KIND_PHISHERREGISTRY, txData.input);
let txDescriptions = [txDescription];
if (txDescription.signature === INVOKE_SIGNATURE) {
// Parse transactions from batches if it is an invoke method in Delegatable contract.
txDescriptions = txDescription.args.signedInvocations
.reduce((txs: utils.TransactionDescription[], signedInvocation: any) => {
// Get transactions from signed invocations batch.
const batchTxs = signedInvocation.invocations.batch.map((invocation: any) => {
return getTxDescription(indexer, KIND_PHISHERREGISTRY, invocation.transaction.data);
});
txs.push(...batchTxs);
return txs;
}, []);
}
// Filter transactions for claimIfMember and claimIsPhisher methods.
txDescriptions = txDescriptions.filter((tx: utils.TransactionDescription) => {
return [CLAIM_IF_MEMBER_SIGNATURE, CLAIM_IF_PHISHER_SIGNATURE].includes(tx.signature);
});
for (const txDescription of txDescriptions) {
switch (txDescription.signature) {
case CLAIM_IF_MEMBER_SIGNATURE:
// Update isMember entry for the identifier in database.
await indexer.isMember(eventData.block.hash, eventData.contract, txDescription.args.identifier, true);
break;
case CLAIM_IF_PHISHER_SIGNATURE:
// Update isPhisher entry for the identifier in database.
await indexer.isPhisher(eventData.block.hash, eventData.contract, txDescription.args.identifier, true);
break;
}
}
}
}
// Get transaction details from input data.
const getTxDescription = (indexer: Indexer, contractKind: string, data: string): utils.TransactionDescription => {
const contractInterface = indexer.getContractInterface(contractKind);
assert(contractInterface);
return contractInterface.parseTransaction({ data });
};

View File

@ -0,0 +1,668 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import assert from 'assert';
import debug from 'debug';
import { DeepPartial, FindConditions, FindManyOptions } from 'typeorm';
import JSONbig from 'json-bigint';
import { ethers } from 'ethers';
import { JsonFragment } from '@ethersproject/abi';
import { JsonRpcProvider } from '@ethersproject/providers';
import { EthClient } from '@cerc-io/ipld-eth-client';
import { MappingKey, StorageLayout } from '@cerc-io/solidity-mapper';
import {
Indexer as BaseIndexer,
IndexerInterface,
ValueResult,
ServerConfig,
JobQueue,
Where,
QueryOptions,
updateStateForElementaryType,
updateStateForMappingType,
BlockHeight,
StateKind,
StateStatus,
getFullTransaction,
ResultEvent,
getResultEvent,
DatabaseInterface,
Clients
} from '@cerc-io/util';
import PhisherRegistryArtifacts from './artifacts/PhisherRegistry.json';
import { Database, ENTITIES } from './database';
import { createInitialState, handleEvent, createStateDiff, createStateCheckpoint } from './hooks';
import { Contract } from './entity/Contract';
import { Event } from './entity/Event';
import { SyncStatus } from './entity/SyncStatus';
import { StateSyncStatus } from './entity/StateSyncStatus';
import { BlockProgress } from './entity/BlockProgress';
import { State } from './entity/State';
import { IsMember } from './entity/IsMember';
import { IsPhisher } from './entity/IsPhisher';
import { IsRevoked } from './entity/IsRevoked';
import { _Owner } from './entity/_Owner';
import { MultiNonce } from './entity/MultiNonce';
const log = debug('vulcanize:indexer');
const JSONbigNative = JSONbig({ useNativeBigInt: true });
export const KIND_PHISHERREGISTRY = 'PhisherRegistry';
export class Indexer implements IndexerInterface {
_db: Database
_ethClient: EthClient
_ethProvider: JsonRpcProvider
_baseIndexer: BaseIndexer
_serverConfig: ServerConfig
_abiMap: Map<string, JsonFragment[]>
_storageLayoutMap: Map<string, StorageLayout>
_contractMap: Map<string, ethers.utils.Interface>
constructor (serverConfig: ServerConfig, db: DatabaseInterface, clients: Clients, ethProvider: JsonRpcProvider, jobQueue: JobQueue) {
assert(db);
assert(clients.ethClient);
this._db = db as Database;
this._ethClient = clients.ethClient;
this._ethProvider = ethProvider;
this._serverConfig = serverConfig;
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue);
this._abiMap = new Map();
this._storageLayoutMap = new Map();
this._contractMap = new Map();
const {
abi: PhisherRegistryABI,
storageLayout: PhisherRegistryStorageLayout
} = PhisherRegistryArtifacts;
assert(PhisherRegistryABI);
this._abiMap.set(KIND_PHISHERREGISTRY, PhisherRegistryABI);
assert(PhisherRegistryStorageLayout);
this._storageLayoutMap.set(KIND_PHISHERREGISTRY, PhisherRegistryStorageLayout);
this._contractMap.set(KIND_PHISHERREGISTRY, new ethers.utils.Interface(PhisherRegistryABI));
}
get serverConfig (): ServerConfig {
return this._serverConfig;
}
get storageLayoutMap (): Map<string, StorageLayout> {
return this._storageLayoutMap;
}
async init (): Promise<void> {
await this._baseIndexer.fetchContracts();
await this._baseIndexer.fetchStateStatus();
}
getResultEvent (event: Event): ResultEvent {
return getResultEvent(event);
}
async multiNonce (blockHash: string, contractAddress: string, key0: string, key1: bigint, diff = false): Promise<ValueResult> {
let entity = await this._db.getMultiNonce({ blockHash, contractAddress, key0, key1 });
if (entity) {
log('multiNonce: db hit.');
} else {
log('multiNonce: db miss, fetching from upstream server');
entity = await this._getStorageEntity(
blockHash,
contractAddress,
MultiNonce,
'multiNonce',
{ key0, key1 },
BigInt(0)
);
await this._db.saveMultiNonce(entity);
if (diff) {
const stateUpdate = updateStateForMappingType({}, 'multiNonce', [key0.toString(), key1.toString()], entity.value.toString());
await this.createDiffStaged(contractAddress, blockHash, stateUpdate);
}
}
return {
value: entity.value,
proof: JSON.parse(entity.proof)
};
}
async _owner (blockHash: string, contractAddress: string, diff = false): Promise<ValueResult> {
let entity = await this._db._getOwner({ blockHash, contractAddress });
if (entity) {
log('_owner: db hit.');
} else {
log('_owner: db miss, fetching from upstream server');
entity = await this._getStorageEntity(
blockHash,
contractAddress,
_Owner,
'_owner',
{},
''
);
await this._db._saveOwner(entity);
if (diff) {
const stateUpdate = updateStateForElementaryType({}, '_owner', entity.value.toString());
await this.createDiffStaged(contractAddress, blockHash, stateUpdate);
}
}
return {
value: entity.value,
proof: JSON.parse(entity.proof)
};
}
async isRevoked (blockHash: string, contractAddress: string, key0: string, diff = false): Promise<ValueResult> {
let entity = await this._db.getIsRevoked({ blockHash, contractAddress, key0 });
if (entity) {
log('isRevoked: db hit.');
} else {
log('isRevoked: db miss, fetching from upstream server');
entity = await this._getStorageEntity(
blockHash,
contractAddress,
IsRevoked,
'isRevoked',
{ key0 },
false
);
await this._db.saveIsRevoked(entity);
if (diff) {
const stateUpdate = updateStateForMappingType({}, 'isRevoked', [key0.toString()], entity.value.toString());
await this.createDiffStaged(contractAddress, blockHash, stateUpdate);
}
}
return {
value: entity.value,
proof: JSON.parse(entity.proof)
};
}
async isPhisher (blockHash: string, contractAddress: string, key0: string, diff = false): Promise<ValueResult> {
let entity = await this._db.getIsPhisher({ blockHash, contractAddress, key0 });
if (entity) {
log('isPhisher: db hit.');
} else {
log('isPhisher: db miss, fetching from upstream server');
entity = await this._getStorageEntity(
blockHash,
contractAddress,
IsPhisher,
'isPhisher',
{ key0 },
false
);
await this._db.saveIsPhisher(entity);
if (diff) {
const stateUpdate = updateStateForMappingType({}, 'isPhisher', [key0.toString()], entity.value.toString());
await this.createDiffStaged(contractAddress, blockHash, stateUpdate);
}
}
return {
value: entity.value,
proof: JSON.parse(entity.proof)
};
}
async isMember (blockHash: string, contractAddress: string, key0: string, diff = false): Promise<ValueResult> {
let entity = await this._db.getIsMember({ blockHash, contractAddress, key0 });
if (entity) {
log('isMember: db hit.');
} else {
log('isMember: db miss, fetching from upstream server');
entity = await this._getStorageEntity(
blockHash,
contractAddress,
IsMember,
'isMember',
{ key0 },
false
);
await this._db.saveIsMember(entity);
if (diff) {
const stateUpdate = updateStateForMappingType({}, 'isMember', [key0.toString()], entity.value.toString());
await this.createDiffStaged(contractAddress, blockHash, stateUpdate);
}
}
return {
value: entity.value,
proof: JSON.parse(entity.proof)
};
}
async _getStorageEntity<Entity> (
blockHash: string,
contractAddress: string,
entity: new () => Entity,
storageVariableName: string,
mappingKeys: {[key: string]: any},
defaultValue: any
): Promise<Entity> {
const [{ number }, syncStatus] = await Promise.all([
this._ethProvider.send('eth_getHeaderByHash', [blockHash]),
this.getSyncStatus()
]);
const blockNumber = ethers.BigNumber.from(number).toNumber();
let result: ValueResult = {
value: defaultValue
};
if (syncStatus && blockNumber < syncStatus.initialIndexedBlockNumber) {
const entityFields: any = { blockNumber, contractAddress, ...mappingKeys };
const entityData: any = await this._db.getPrevEntity(entity, entityFields);
if (entityData) {
result = {
value: entityData.value,
proof: JSON.parse(entityData.proof)
};
}
} else {
const storageLayout = this._storageLayoutMap.get(KIND_PHISHERREGISTRY);
assert(storageLayout);
result = await this._baseIndexer.getStorageValue(
storageLayout,
blockHash,
contractAddress,
storageVariableName,
...Object.values(mappingKeys)
);
}
return {
blockHash,
blockNumber,
contractAddress,
...mappingKeys,
value: result.value,
proof: result.proof ? JSONbigNative.stringify(result.proof) : null
} as any;
}
async getStorageValue (storageLayout: StorageLayout, blockHash: string, contractAddress: string, variable: string, ...mappingKeys: MappingKey[]): Promise<ValueResult> {
return this._baseIndexer.getStorageValue(
storageLayout,
blockHash,
contractAddress,
variable,
...mappingKeys
);
}
async getEntitiesForBlock (blockHash: string, tableName: string): Promise<any[]> {
return this._db.getEntitiesForBlock(blockHash, tableName);
}
async processInitialState (contractAddress: string, blockHash: string): Promise<any> {
// Call initial state hook.
return createInitialState(this, contractAddress, blockHash);
}
async processStateCheckpoint (contractAddress: string, blockHash: string): Promise<boolean> {
// Call checkpoint hook.
return createStateCheckpoint(this, contractAddress, blockHash);
}
async processCanonicalBlock (blockHash: string): Promise<void> {
console.time('time:indexer#processCanonicalBlock-finalize_auto_diffs');
// Finalize staged diff blocks if any.
await this._baseIndexer.finalizeDiffStaged(blockHash);
console.timeEnd('time:indexer#processCanonicalBlock-finalize_auto_diffs');
// Call custom stateDiff hook.
await createStateDiff(this, blockHash);
}
async processCheckpoint (blockHash: string): Promise<void> {
// Return if checkpointInterval is <= 0.
const checkpointInterval = this._serverConfig.checkpointInterval;
if (checkpointInterval <= 0) return;
console.time('time:indexer#processCheckpoint-checkpoint');
await this._baseIndexer.processCheckpoint(this, blockHash, checkpointInterval);
console.timeEnd('time:indexer#processCheckpoint-checkpoint');
}
async processCLICheckpoint (contractAddress: string, blockHash?: string): Promise<string | undefined> {
return this._baseIndexer.processCLICheckpoint(this, contractAddress, blockHash);
}
async getPrevState (blockHash: string, contractAddress: string, kind?: string): Promise<State | undefined> {
return this._db.getPrevState(blockHash, contractAddress, kind);
}
async getLatestState (contractAddress: string, kind: StateKind | null, blockNumber?: number): Promise<State | undefined> {
return this._db.getLatestState(contractAddress, kind, blockNumber);
}
async getStatesByHash (blockHash: string): Promise<State[]> {
return this._baseIndexer.getStatesByHash(blockHash);
}
async getStateByCID (cid: string): Promise<State | undefined> {
return this._baseIndexer.getStateByCID(cid);
}
async getStates (where: FindConditions<State>): Promise<State[]> {
return this._db.getStates(where);
}
getStateData (state: State): any {
return this._baseIndexer.getStateData(state);
}
// Method used to create auto diffs (diff_staged).
async createDiffStaged (contractAddress: string, blockHash: string, data: any): Promise<void> {
console.time('time:indexer#createDiffStaged-auto_diff');
await this._baseIndexer.createDiffStaged(contractAddress, blockHash, data);
console.timeEnd('time:indexer#createDiffStaged-auto_diff');
}
// Method to be used by createStateDiff hook.
async createDiff (contractAddress: string, blockHash: string, data: any): Promise<void> {
const block = await this.getBlockProgress(blockHash);
assert(block);
await this._baseIndexer.createDiff(contractAddress, block, data);
}
// Method to be used by createStateCheckpoint hook.
async createStateCheckpoint (contractAddress: string, blockHash: string, data: any): Promise<void> {
const block = await this.getBlockProgress(blockHash);
assert(block);
return this._baseIndexer.createStateCheckpoint(contractAddress, block, data);
}
// Method to be used by export-state CLI.
async createCheckpoint (contractAddress: string, blockHash: string): Promise<string | undefined> {
const block = await this.getBlockProgress(blockHash);
assert(block);
return this._baseIndexer.createCheckpoint(this, contractAddress, block);
}
async saveOrUpdateState (state: State): Promise<State> {
return this._baseIndexer.saveOrUpdateState(state);
}
async removeStates (blockNumber: number, kind: StateKind): Promise<void> {
await this._baseIndexer.removeStates(blockNumber, kind);
}
async triggerIndexingOnEvent (event: Event): Promise<void> {
const resultEvent = this.getResultEvent(event);
// Call custom hook function for indexing on event.
await handleEvent(this, resultEvent);
}
async processEvent (event: Event): Promise<void> {
// Trigger indexing of data based on the event.
await this.triggerIndexingOnEvent(event);
}
async processBlock (blockProgress: BlockProgress): Promise<void> {
console.time('time:indexer#processBlock-init_state');
// Call a function to create initial state for contracts.
await this._baseIndexer.createInit(this, blockProgress.blockHash, blockProgress.blockNumber);
console.timeEnd('time:indexer#processBlock-init_state');
}
parseEventNameAndArgs (kind: string, logObj: any): any {
const { topics, data } = logObj;
const contract = this._contractMap.get(kind);
assert(contract);
const logDescription = contract.parseLog({ data, topics });
const { eventName, eventInfo, eventSignature } = this._baseIndexer.parseEvent(logDescription);
return {
eventName,
eventInfo,
eventSignature
};
}
async getStateSyncStatus (): Promise<StateSyncStatus | undefined> {
return this._db.getStateSyncStatus();
}
async updateStateSyncStatusIndexedBlock (blockNumber: number, force?: boolean): Promise<StateSyncStatus> {
const dbTx = await this._db.createTransactionRunner();
let res;
try {
res = await this._db.updateStateSyncStatusIndexedBlock(dbTx, blockNumber, force);
await dbTx.commitTransaction();
} catch (error) {
await dbTx.rollbackTransaction();
throw error;
} finally {
await dbTx.release();
}
return res;
}
async updateStateSyncStatusCheckpointBlock (blockNumber: number, force?: boolean): Promise<StateSyncStatus> {
const dbTx = await this._db.createTransactionRunner();
let res;
try {
res = await this._db.updateStateSyncStatusCheckpointBlock(dbTx, blockNumber, force);
await dbTx.commitTransaction();
} catch (error) {
await dbTx.rollbackTransaction();
throw error;
} finally {
await dbTx.release();
}
return res;
}
async getLatestCanonicalBlock (): Promise<BlockProgress> {
const syncStatus = await this.getSyncStatus();
assert(syncStatus);
const latestCanonicalBlock = await this.getBlockProgress(syncStatus.latestCanonicalBlockHash);
assert(latestCanonicalBlock);
return latestCanonicalBlock;
}
async getLatestStateIndexedBlock (): Promise<BlockProgress> {
return this._baseIndexer.getLatestStateIndexedBlock();
}
async watchContract (address: string, kind: string, checkpoint: boolean, startingBlock: number): Promise<void> {
return this._baseIndexer.watchContract(address, kind, checkpoint, startingBlock);
}
updateStateStatusMap (address: string, stateStatus: StateStatus): void {
this._baseIndexer.updateStateStatusMap(address, stateStatus);
}
cacheContract (contract: Contract): void {
return this._baseIndexer.cacheContract(contract);
}
async saveEventEntity (dbEvent: Event): Promise<Event> {
return this._baseIndexer.saveEventEntity(dbEvent);
}
async getEventsByFilter (blockHash: string, contract?: string, name?: string): Promise<Array<Event>> {
return this._baseIndexer.getEventsByFilter(blockHash, contract, name);
}
isWatchedContract (address : string): Contract | undefined {
return this._baseIndexer.isWatchedContract(address);
}
getContractsByKind (kind: string): Contract[] {
return this._baseIndexer.getContractsByKind(kind);
}
async getProcessedBlockCountForRange (fromBlockNumber: number, toBlockNumber: number): Promise<{ expected: number, actual: number }> {
return this._baseIndexer.getProcessedBlockCountForRange(fromBlockNumber, toBlockNumber);
}
async getEventsInRange (fromBlockNumber: number, toBlockNumber: number): Promise<Array<Event>> {
return this._baseIndexer.getEventsInRange(fromBlockNumber, toBlockNumber, this._serverConfig.maxEventsBlockRange);
}
async getSyncStatus (): Promise<SyncStatus | undefined> {
return this._baseIndexer.getSyncStatus();
}
async getBlocks (blockFilter: { blockHash?: string, blockNumber?: number }): Promise<any> {
return this._baseIndexer.getBlocks(blockFilter);
}
async updateSyncStatusIndexedBlock (blockHash: string, blockNumber: number, force = false): Promise<SyncStatus> {
return this._baseIndexer.updateSyncStatusIndexedBlock(blockHash, blockNumber, force);
}
async updateSyncStatusChainHead (blockHash: string, blockNumber: number, force = false): Promise<SyncStatus> {
return this._baseIndexer.updateSyncStatusChainHead(blockHash, blockNumber, force);
}
async updateSyncStatusCanonicalBlock (blockHash: string, blockNumber: number, force = false): Promise<SyncStatus> {
return this._baseIndexer.updateSyncStatusCanonicalBlock(blockHash, blockNumber, force);
}
async getEvent (id: string): Promise<Event | undefined> {
return this._baseIndexer.getEvent(id);
}
async getBlockProgress (blockHash: string): Promise<BlockProgress | undefined> {
return this._baseIndexer.getBlockProgress(blockHash);
}
async getBlockProgressEntities (where: FindConditions<BlockProgress>, options: FindManyOptions<BlockProgress>): Promise<BlockProgress[]> {
return this._baseIndexer.getBlockProgressEntities(where, options);
}
async getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgress[]> {
return this._baseIndexer.getBlocksAtHeight(height, isPruned);
}
async saveBlockAndFetchEvents (block: DeepPartial<BlockProgress>): Promise<[BlockProgress, DeepPartial<Event>[]]> {
return this._saveBlockAndFetchEvents(block);
}
async getBlockEvents (blockHash: string, where: Where, queryOptions: QueryOptions): Promise<Array<Event>> {
return this._baseIndexer.getBlockEvents(blockHash, where, queryOptions);
}
async removeUnknownEvents (block: BlockProgress): Promise<void> {
return this._baseIndexer.removeUnknownEvents(Event, block);
}
async markBlocksAsPruned (blocks: BlockProgress[]): Promise<void> {
return this._baseIndexer.markBlocksAsPruned(blocks);
}
async updateBlockProgress (block: BlockProgress, lastProcessedEventIndex: number): Promise<BlockProgress> {
return this._baseIndexer.updateBlockProgress(block, lastProcessedEventIndex);
}
async getAncestorAtDepth (blockHash: string, depth: number): Promise<string> {
return this._baseIndexer.getAncestorAtDepth(blockHash, depth);
}
// Get latest block using eth client.
async getLatestBlock (): Promise<BlockHeight> {
const { block } = await this._ethClient.getBlockByHash();
return block;
}
// Get full transaction data.
async getFullTransaction (txHash: string, blockNumber: number): Promise<any> {
return getFullTransaction(this._ethClient, txHash, blockNumber);
}
// Get contract interface for specified contract kind.
getContractInterface (kind: string): ethers.utils.Interface | undefined {
return this._contractMap.get(kind);
}
async resetWatcherToBlock (blockNumber: number): Promise<void> {
const entities = [...ENTITIES];
await this._baseIndexer.resetWatcherToBlock(blockNumber, entities);
}
async _saveBlockAndFetchEvents ({
cid: blockCid,
blockHash,
blockNumber,
blockTimestamp,
parentHash
}: DeepPartial<BlockProgress>): Promise<[BlockProgress, DeepPartial<Event>[]]> {
assert(blockHash);
assert(blockNumber);
const dbEvents = await this._baseIndexer.fetchEvents(blockHash, blockNumber, this.parseEventNameAndArgs.bind(this));
const dbTx = await this._db.createTransactionRunner();
try {
const block = {
cid: blockCid,
blockHash,
blockNumber,
blockTimestamp,
parentHash
};
console.time(`time:indexer#_saveBlockAndFetchEvents-db-save-${blockNumber}`);
const blockProgress = await this._db.saveBlockWithEvents(dbTx, block, dbEvents);
await dbTx.commitTransaction();
console.timeEnd(`time:indexer#_saveBlockAndFetchEvents-db-save-${blockNumber}`);
return [blockProgress, []];
} catch (error) {
await dbTx.rollbackTransaction();
throw error;
} finally {
await dbTx.release();
}
}
}

View File

@ -0,0 +1,36 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import debug from 'debug';
import { JobRunnerCmd } from '@cerc-io/cli';
import { JobRunner } from '@cerc-io/util';
import { Indexer } from './indexer';
import { Database } from './database';
const log = debug('vulcanize:job-runner');
export const main = async (): Promise<any> => {
const jobRunnerCmd = new JobRunnerCmd();
await jobRunnerCmd.init(Database);
await jobRunnerCmd.initIndexer(Indexer);
await jobRunnerCmd.exec(async (jobRunner: JobRunner): Promise<void> => {
await jobRunner.subscribeBlockProcessingQueue();
await jobRunner.subscribeEventProcessingQueue();
await jobRunner.subscribeBlockCheckpointQueue();
await jobRunner.subscribeHooksQueue();
});
};
main().then(() => {
log('Starting job runner...');
}).catch(err => {
log(err);
});
process.on('uncaughtException', err => {
log('uncaughtException', err);
});

View File

@ -0,0 +1,160 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import assert from 'assert';
import BigInt from 'apollo-type-bigint';
import debug from 'debug';
import Decimal from 'decimal.js';
import { GraphQLScalarType } from 'graphql';
import { ValueResult, gqlTotalQueryCount, gqlQueryCount, getResultState, IndexerInterface, EventWatcher } from '@cerc-io/util';
import { Indexer } from './indexer';
const log = debug('vulcanize:resolver');
export const createResolvers = async (indexerArg: IndexerInterface, eventWatcher: EventWatcher): Promise<any> => {
const indexer = indexerArg as Indexer;
return {
BigInt: new BigInt('bigInt'),
BigDecimal: new GraphQLScalarType({
name: 'BigDecimal',
description: 'BigDecimal custom scalar type',
parseValue (value) {
// value from the client
return new Decimal(value);
},
serialize (value: Decimal) {
// value sent to the client
return value.toFixed();
}
}),
Event: {
__resolveType: (obj: any) => {
assert(obj.__typename);
return obj.__typename;
}
},
Subscription: {
onEvent: {
subscribe: () => eventWatcher.getEventIterator()
}
},
Mutation: {
watchContract: async (_: any, { address, kind, checkpoint, startingBlock = 1 }: { address: string, kind: string, checkpoint: boolean, startingBlock: number }): Promise<boolean> => {
log('watchContract', address, kind, checkpoint, startingBlock);
await indexer.watchContract(address, kind, checkpoint, startingBlock);
return true;
}
},
Query: {
multiNonce: (_: any, { blockHash, contractAddress, key0, key1 }: { blockHash: string, contractAddress: string, key0: string, key1: bigint }): Promise<ValueResult> => {
log('multiNonce', blockHash, contractAddress, key0, key1);
gqlTotalQueryCount.inc(1);
gqlQueryCount.labels('multiNonce').inc(1);
return indexer.multiNonce(blockHash, contractAddress, key0, key1);
},
_owner: (_: any, { blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise<ValueResult> => {
log('_owner', blockHash, contractAddress);
gqlTotalQueryCount.inc(1);
gqlQueryCount.labels('_owner').inc(1);
return indexer._owner(blockHash, contractAddress);
},
isRevoked: (_: any, { blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: string }): Promise<ValueResult> => {
log('isRevoked', blockHash, contractAddress, key0);
gqlTotalQueryCount.inc(1);
gqlQueryCount.labels('isRevoked').inc(1);
return indexer.isRevoked(blockHash, contractAddress, key0);
},
isPhisher: (_: any, { blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: string }): Promise<ValueResult> => {
log('isPhisher', blockHash, contractAddress, key0);
gqlTotalQueryCount.inc(1);
gqlQueryCount.labels('isPhisher').inc(1);
return indexer.isPhisher(blockHash, contractAddress, key0);
},
isMember: (_: any, { blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: string }): Promise<ValueResult> => {
log('isMember', blockHash, contractAddress, key0);
gqlTotalQueryCount.inc(1);
gqlQueryCount.labels('isMember').inc(1);
return indexer.isMember(blockHash, contractAddress, key0);
},
events: async (_: any, { blockHash, contractAddress, name }: { blockHash: string, contractAddress: string, name?: string }) => {
log('events', blockHash, contractAddress, name);
gqlTotalQueryCount.inc(1);
gqlQueryCount.labels('events').inc(1);
const block = await indexer.getBlockProgress(blockHash);
if (!block || !block.isComplete) {
throw new Error(`Block hash ${blockHash} number ${block?.blockNumber} not processed yet`);
}
const events = await indexer.getEventsByFilter(blockHash, contractAddress, name);
return events.map(event => indexer.getResultEvent(event));
},
eventsInRange: async (_: any, { fromBlockNumber, toBlockNumber }: { fromBlockNumber: number, toBlockNumber: number }) => {
log('eventsInRange', fromBlockNumber, toBlockNumber);
gqlTotalQueryCount.inc(1);
gqlQueryCount.labels('eventsInRange').inc(1);
const events = await indexer.getEventsInRange(fromBlockNumber, toBlockNumber);
return events.map(event => indexer.getResultEvent(event));
},
getStateByCID: async (_: any, { cid }: { cid: string }) => {
log('getStateByCID', cid);
gqlTotalQueryCount.inc(1);
gqlQueryCount.labels('getStateByCID').inc(1);
const state = await indexer.getStateByCID(cid);
return state && state.block.isComplete ? getResultState(state) : undefined;
},
getState: async (_: any, { blockHash, contractAddress, kind }: { blockHash: string, contractAddress: string, kind: string }) => {
log('getState', blockHash, contractAddress, kind);
gqlTotalQueryCount.inc(1);
gqlQueryCount.labels('getState').inc(1);
const state = await indexer.getPrevState(blockHash, contractAddress, kind);
return state && state.block.isComplete ? getResultState(state) : undefined;
},
getSyncStatus: async () => {
log('getSyncStatus');
gqlTotalQueryCount.inc(1);
gqlQueryCount.labels('getSyncStatus').inc(1);
return indexer.getSyncStatus();
},
latestBlock: async () => {
log('latestBlock');
gqlTotalQueryCount.inc(1);
gqlQueryCount.labels('latestBlock').inc(1);
return indexer.getLatestBlock();
}
}
};
};

View File

@ -0,0 +1,117 @@
scalar BigInt
scalar BigDecimal
scalar Bytes
type Block_height {
hash: Bytes
number: Int
}
type Proof {
data: String!
}
type ResultBoolean {
value: Boolean!
proof: Proof
}
type ResultString {
value: String!
proof: Proof
}
type ResultInt {
value: Int!
proof: Proof
}
type ResultBigInt {
value: BigInt!
proof: Proof
}
type _Block_ {
cid: String!
hash: String!
number: Int!
timestamp: Int!
parentHash: String!
}
type _Transaction_ {
hash: String!
index: Int!
from: String!
to: String!
}
type ResultEvent {
block: _Block_!
tx: _Transaction_!
contract: String!
eventIndex: Int!
event: Event!
proof: Proof
}
union Event = DelegationTriggeredEvent | MemberStatusUpdatedEvent | OwnershipTransferredEvent | PhisherStatusUpdatedEvent
type DelegationTriggeredEvent {
principal: String!
agent: String!
}
type MemberStatusUpdatedEvent {
entity: String!
isMember: Boolean!
}
type OwnershipTransferredEvent {
previousOwner: String!
newOwner: String!
}
type PhisherStatusUpdatedEvent {
entity: String!
isPhisher: Boolean!
}
type ResultState {
block: _Block_!
contractAddress: String!
cid: String!
kind: String!
data: String!
}
type SyncStatus {
latestIndexedBlockHash: String!
latestIndexedBlockNumber: Int!
latestCanonicalBlockHash: String!
latestCanonicalBlockNumber: Int!
}
type Query {
events(blockHash: String!, contractAddress: String!, name: String): [ResultEvent!]
eventsInRange(fromBlockNumber: Int!, toBlockNumber: Int!): [ResultEvent!]
multiNonce(blockHash: String!, contractAddress: String!, key0: String!, key1: BigInt!): ResultBigInt!
_owner(blockHash: String!, contractAddress: String!): ResultString!
isRevoked(blockHash: String!, contractAddress: String!, key0: String!): ResultBoolean!
isPhisher(blockHash: String!, contractAddress: String!, key0: String!): ResultBoolean!
isMember(blockHash: String!, contractAddress: String!, key0: String!): ResultBoolean!
getStateByCID(cid: String!): ResultState
getState(blockHash: String!, contractAddress: String!, kind: String): ResultState
getSyncStatus: SyncStatus
latestBlock: Block_height
}
type Mutation {
watchContract(address: String!, kind: String!, checkpoint: Boolean!, startingBlock: Int): Boolean!
}
type Subscription {
onEvent: ResultEvent!
}

View File

@ -0,0 +1,89 @@
//
// Copyright 2023 Vulcanize, Inc.
//
import fs from 'fs';
import path from 'path';
import 'reflect-metadata';
import debug from 'debug';
import { ethers } from 'ethers';
import { ServerCmd } from '@cerc-io/cli';
import { createResolvers } from './resolvers';
import { Indexer } from './indexer';
import { Database } from './database';
import { abi as PhisherRegistryABI } from './artifacts/PhisherRegistry.json';
const log = debug('vulcanize:server');
const contractInterface = new ethers.utils.Interface(PhisherRegistryABI);
export const main = async (): Promise<any> => {
const serverCmd = new ServerCmd();
await serverCmd.init(Database);
await serverCmd.initIndexer(Indexer);
const typeDefs = fs.readFileSync(path.join(__dirname, 'schema.gql')).toString();
return serverCmd.exec(createResolvers, typeDefs, parseLibp2pMessage);
};
const MESSAGE_KINDS = {
INVOKE: 'invoke',
REVOKE: 'revoke'
};
function parseLibp2pMessage (peerId: string, data: any) {
log('Received a message on mobymask P2P network from peer:', peerId);
const { kind, message } = data;
switch (kind) {
case MESSAGE_KINDS.INVOKE: {
log('Signed invocations:');
log(JSON.stringify(message, null, 2));
const [{ invocations: { batch: invocationsList } }] = message;
Array.from(invocationsList).forEach((invocation: any) => {
const txData = invocation.transaction.data;
const decoded = contractInterface.parseTransaction({ data: txData });
log(`method: ${decoded.name}, value: ${decoded.args[0]}`);
});
break;
}
case MESSAGE_KINDS.REVOKE: {
const { signedDelegation, signedIntendedRevocation } = message;
log('Signed delegation:');
log(JSON.stringify(signedDelegation, null, 2));
log('Signed intention to revoke:');
const stringifiedSignedIntendedRevocation = JSON.stringify(
signedIntendedRevocation,
(key, value) => {
if (key === 'delegationHash' && value.type === 'Buffer') {
// Show hex value for delegationHash instead of Buffer
return ethers.utils.hexlify(Buffer.from(value));
}
return value;
},
2
);
log(stringifiedSignedIntendedRevocation);
break;
}
default:
break;
}
log('------------------------------------------');
}
main().then(() => {
log('Starting server...');
}).catch(err => {
log(err);
});

View File

@ -0,0 +1,3 @@
//
// Copyright 2023 Vulcanize, Inc.
//

View File

@ -0,0 +1,74 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig.json to read more about this file */
/* Basic Options */
// "incremental": true, /* Enable incremental compilation */
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
"lib": ["es2019"], /* Specify library files to be included in the compilation. */
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
// "declaration": true, /* Generates corresponding '.d.ts' file. */
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
"sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "dist", /* Redirect output structure to the directory. */
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "composite": true, /* Enable project compilation */
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
"downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true, /* Enable all strict type-checking options. */
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* Enable strict null checks. */
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
// "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an 'override' modifier. */
// "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
/* Source Map Options */
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
"experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
/* Advanced Options */
"skipLibCheck": true, /* Skip type checking of declaration files. */
"forceConsistentCasingInFileNames": true, /* Disallow inconsistently-cased references to the same file. */
"resolveJsonModule": true /* Enabling the option allows importing JSON, and validating the types in that JSON file. */
},
"include": ["src/**/*"]
}

View File

@ -10,6 +10,10 @@ Package used for connecting between peers and send messages
- [x] Use package in server
- [x] Send messages between systems in different LANs using relay node
## Note
- Avoid any nodejs specific exports from this package as it is intented to be used in browser applications as well
## Known Issues
- `peer:disconnect` event is not fired when remote peer browser is closed

View File

@ -5,10 +5,7 @@ import path from 'path';
import { RelayNodeInit, createRelayNode } from '../relay.js';
import { PeerIdObj } from '../peer.js';
const DEFAULT_HOST = '127.0.0.1';
const DEFAULT_PORT = 9090;
const DEFAULT_MAX_DIAL_RETRY = 5;
import { RELAY_DEFAULT_HOST, RELAY_DEFAULT_PORT, RELAY_DEFAULT_MAX_DIAL_RETRY } from '../constants.js';
interface Arguments {
host: string;
@ -58,20 +55,20 @@ async function main (): Promise<void> {
await createRelayNode(relayNodeInit);
}
function _getArgv (): any {
function _getArgv (): Arguments {
return yargs(hideBin(process.argv)).parserConfiguration({
'parse-numbers': false
}).options({
host: {
type: 'string',
alias: 'h',
default: DEFAULT_HOST,
default: RELAY_DEFAULT_HOST,
describe: 'Host to bind to'
},
port: {
type: 'number',
alias: 'p',
default: DEFAULT_PORT,
default: RELAY_DEFAULT_PORT,
describe: 'Port to start listening on'
},
announce: {
@ -91,8 +88,8 @@ function _getArgv (): any {
},
maxDialRetry: {
type: 'number',
describe: 'Maximum number of retries for dialling a relay peer',
default: DEFAULT_MAX_DIAL_RETRY
describe: 'Maximum number of dial retries to be attempted to a relay peer',
default: RELAY_DEFAULT_MAX_DIAL_RETRY
}
// https://github.com/yargs/yargs/blob/main/docs/typescript.md?plain=1#L83
}).parseSync();

View File

@ -55,3 +55,14 @@ export const MIN_CONNECTIONS = 0;
// How long a dial is allowed to take before it's aborted
export const DIAL_TIMEOUT = 10000; // 10 seconds
// Relay node defaults
// Default host to bind relay server to
export const RELAY_DEFAULT_HOST = '127.0.0.1';
// Default port to start listening on
export const RELAY_DEFAULT_PORT = 9090;
// Default max number of dial retries to a relay peer
export const RELAY_DEFAULT_MAX_DIAL_RETRY = 5;

View File

@ -5,3 +5,4 @@
export { Peer, PeerIdObj, createPeerId } from './peer.js';
export { RelayNodeInit, createRelayNode } from './relay.js';
export { getPseudonymForPeerId } from './utils/index.js';
export { RELAY_DEFAULT_HOST, RELAY_DEFAULT_PORT, RELAY_DEFAULT_MAX_DIAL_RETRY } from './constants.js';

View File

@ -152,7 +152,7 @@ export class Peer {
console.log('libp2p node created', this._node);
this._peerHeartbeatChecker = new PeerHearbeatChecker(this._node);
// Dial to the HOP enabled relay node
// Dial to the HOP enabled primary relay node
await this._dialRelay();
// Listen for change in stored multiaddrs
@ -323,7 +323,7 @@ export class Peer {
async _dialRelay (): Promise<void> {
assert(this._node);
const relayMultiaddr = this._relayNodeMultiaddr;
console.log('Dialling relay node');
console.log('Dialling primary relay node');
const connection = await dialWithRetry(
this._node,
@ -473,7 +473,7 @@ export class Peer {
this._peerHeartbeatChecker?.stop(disconnectedPeerId);
if (disconnectedPeerId.toString() === this._relayNodeMultiaddr?.getPeerId()) {
// Reconnect to relay node if disconnected
// Reconnect to primary relay node if disconnected
await this._dialRelay();
}
}

View File

@ -36,6 +36,43 @@ export interface GQLCacheConfig {
timeTravelMaxAge: number;
}
// Relay node config
export interface RelayConfig {
// Host to bind the relay server to
host?: string;
// Port to start listening on
port?: number;
// Domain name to be used in the announce address
announce?: string;
// Relay peer id file path (json)
peerIdFile?: string;
// Relay peer multiaddr(s) list
relayPeers?: string[];
// Max number of dial retries to be attempted to a relay peer
maxDialRetry?: number;
}
// P2P config
export interface P2PConfig {
// Enable relay node
enableRelay: boolean;
relay: RelayConfig;
// Enable peer node
enablePeer: boolean;
// Multiaddr of the primary relay node for this peer
relayMultiaddr: string;
// Pubsub topic to subscribe this peer to
pubSubTopic: string;
}
export interface ServerConfig {
host: string;
port: number;
@ -63,11 +100,13 @@ export interface ServerConfig {
loadRelationsSequential: boolean;
// GQL cache-control max-age settings (in seconds)
gqlCache: GQLCacheConfig
gqlCache: GQLCacheConfig;
p2p: P2PConfig;
}
export interface UpstreamConfig {
cache: CacheConfig,
cache: CacheConfig;
ethServer: {
gqlApiEndpoint: string;
rpcProviderEndpoint: string;
@ -88,9 +127,9 @@ export interface MetricsConfig {
export interface Config {
server: ServerConfig;
database: ConnectionOptions;
upstream: UpstreamConfig,
jobQueue: JobQueueConfig,
metrics: MetricsConfig,
upstream: UpstreamConfig;
jobQueue: JobQueueConfig;
metrics: MetricsConfig;
}
export const getConfig = async<ConfigType> (configFile: string): Promise<ConfigType> => {