mirror of
https://github.com/cerc-io/watcher-ts
synced 2024-11-19 20:36:19 +00:00
Refactor watch-contract CLI to cli package (#245)
* Refactor watch-contract CLI to cli package * Use watch-contract from cli in eden and erc721 watchers * Use watch-contract from cli in all watchers * Pass an object containing clients to refactored CLI Co-authored-by: nabarun <nabarun@deepstacksoft.com>
This commit is contained in:
parent
a0ba657d17
commit
7520e9012c
5
packages/cli/.eslintignore
Normal file
5
packages/cli/.eslintignore
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# Don't lint node_modules.
|
||||||
|
node_modules
|
||||||
|
|
||||||
|
# Don't lint build output.
|
||||||
|
dist
|
27
packages/cli/.eslintrc.json
Normal file
27
packages/cli/.eslintrc.json
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"env": {
|
||||||
|
"browser": true,
|
||||||
|
"es2021": true
|
||||||
|
},
|
||||||
|
"extends": [
|
||||||
|
"semistandard",
|
||||||
|
"plugin:@typescript-eslint/recommended"
|
||||||
|
],
|
||||||
|
"parser": "@typescript-eslint/parser",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaVersion": 12,
|
||||||
|
"sourceType": "module"
|
||||||
|
},
|
||||||
|
"plugins": [
|
||||||
|
"@typescript-eslint"
|
||||||
|
],
|
||||||
|
"rules": {
|
||||||
|
"@typescript-eslint/no-explicit-any": "off",
|
||||||
|
"@typescript-eslint/explicit-module-boundary-types": [
|
||||||
|
"warn",
|
||||||
|
{
|
||||||
|
"allowArgumentsExplicitlyTypedAsAny": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
4
packages/cli/.npmignore
Normal file
4
packages/cli/.npmignore
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
/src/
|
||||||
|
tsconfig.json
|
||||||
|
.eslintrc.json
|
||||||
|
.eslintignore
|
30
packages/cli/package.json
Normal file
30
packages/cli/package.json
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"name": "@cerc-io/cli",
|
||||||
|
"version": "0.2.13",
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"license": "AGPL-3.0",
|
||||||
|
"scripts": {
|
||||||
|
"lint": "eslint .",
|
||||||
|
"build": "yarn clean && tsc && yarn copy-assets",
|
||||||
|
"clean": "rm -rf ./dist",
|
||||||
|
"copy-assets": "copyfiles -u 1 src/**/*.gql dist/"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@cerc-io/graph-node": "^0.2.13",
|
||||||
|
"@cerc-io/ipld-eth-client": "^0.2.13",
|
||||||
|
"@cerc-io/util": "^0.2.13",
|
||||||
|
"@ethersproject/providers": "^5.4.4",
|
||||||
|
"reflect-metadata": "^0.1.13",
|
||||||
|
"typeorm": "^0.2.32",
|
||||||
|
"yargs": "^17.0.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@typescript-eslint/eslint-plugin": "^4.25.0",
|
||||||
|
"@typescript-eslint/parser": "^4.25.0",
|
||||||
|
"eslint-config-semistandard": "^15.0.1",
|
||||||
|
"eslint-config-standard": "^5.0.0",
|
||||||
|
"eslint-plugin-import": "^2.23.3",
|
||||||
|
"eslint-plugin-node": "^11.1.0",
|
||||||
|
"eslint-plugin-promise": "^5.1.0"
|
||||||
|
}
|
||||||
|
}
|
5
packages/cli/src/index.ts
Normal file
5
packages/cli/src/index.ts
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
//
|
||||||
|
// Copyright 2022 Vulcanize, Inc.
|
||||||
|
//
|
||||||
|
|
||||||
|
export * from './watch-contract';
|
163
packages/cli/src/watch-contract.ts
Normal file
163
packages/cli/src/watch-contract.ts
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
//
|
||||||
|
// Copyright 2022 Vulcanize, Inc.
|
||||||
|
//
|
||||||
|
|
||||||
|
import yargs from 'yargs';
|
||||||
|
import 'reflect-metadata';
|
||||||
|
import assert from 'assert';
|
||||||
|
import { ConnectionOptions } from 'typeorm';
|
||||||
|
|
||||||
|
import { JsonRpcProvider } from '@ethersproject/providers';
|
||||||
|
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
|
||||||
|
import { EthClient } from '@cerc-io/ipld-eth-client';
|
||||||
|
import {
|
||||||
|
DEFAULT_CONFIG_PATH,
|
||||||
|
Config,
|
||||||
|
getConfig,
|
||||||
|
initClients,
|
||||||
|
JobQueue,
|
||||||
|
DatabaseInterface,
|
||||||
|
IndexerInterface,
|
||||||
|
ServerConfig,
|
||||||
|
Database as BaseDatabase,
|
||||||
|
Clients
|
||||||
|
} from '@cerc-io/util';
|
||||||
|
|
||||||
|
interface Arguments {
|
||||||
|
[x: string]: unknown;
|
||||||
|
configFile: string;
|
||||||
|
address: string;
|
||||||
|
kind: string;
|
||||||
|
checkpoint: boolean;
|
||||||
|
startingBlock: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class WatchContractCmd {
|
||||||
|
_argv?: Arguments
|
||||||
|
_config?: Config;
|
||||||
|
_clients?: Clients;
|
||||||
|
_ethClient?: EthClient;
|
||||||
|
_ethProvider?: JsonRpcProvider
|
||||||
|
_database?: DatabaseInterface
|
||||||
|
_indexer?: IndexerInterface
|
||||||
|
|
||||||
|
async initConfig<ConfigType> (): Promise<ConfigType> {
|
||||||
|
this._argv = this._getArgv();
|
||||||
|
assert(this._argv);
|
||||||
|
|
||||||
|
this._config = await getConfig(this._argv.configFile);
|
||||||
|
assert(this._config);
|
||||||
|
|
||||||
|
return this._config as any;
|
||||||
|
}
|
||||||
|
|
||||||
|
async init (
|
||||||
|
Database: new (
|
||||||
|
config: ConnectionOptions,
|
||||||
|
serverConfig?: ServerConfig
|
||||||
|
) => DatabaseInterface,
|
||||||
|
Indexer: new (
|
||||||
|
serverConfig: ServerConfig,
|
||||||
|
db: DatabaseInterface,
|
||||||
|
clients: Clients,
|
||||||
|
ethProvider: JsonRpcProvider,
|
||||||
|
jobQueue: JobQueue,
|
||||||
|
graphWatcher?: GraphWatcher
|
||||||
|
) => IndexerInterface,
|
||||||
|
clients: { [key: string]: any } = {}
|
||||||
|
): Promise<void> {
|
||||||
|
if (!this._config) {
|
||||||
|
await this.initConfig();
|
||||||
|
}
|
||||||
|
assert(this._config);
|
||||||
|
|
||||||
|
this._database = new Database(this._config.database, this._config.server);
|
||||||
|
await this._database.init();
|
||||||
|
|
||||||
|
const jobQueueConfig = this._config.jobQueue;
|
||||||
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
|
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||||
|
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||||
|
|
||||||
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
await jobQueue.start();
|
||||||
|
|
||||||
|
const { ethClient, ethProvider } = await initClients(this._config);
|
||||||
|
this._ethClient = ethClient;
|
||||||
|
this._ethProvider = ethProvider;
|
||||||
|
this._clients = { ethClient, ...clients };
|
||||||
|
|
||||||
|
// Check if subgraph watcher.
|
||||||
|
if (this._config.server.subgraphPath) {
|
||||||
|
const graphWatcher = await this._getGraphWatcher(this._database.baseDatabase);
|
||||||
|
this._indexer = new Indexer(this._config.server, this._database, this._clients, ethProvider, jobQueue, graphWatcher);
|
||||||
|
await this._indexer.init();
|
||||||
|
|
||||||
|
graphWatcher.setIndexer(this._indexer);
|
||||||
|
await graphWatcher.init();
|
||||||
|
} else {
|
||||||
|
this._indexer = new Indexer(this._config.server, this._database, this._clients, ethProvider, jobQueue);
|
||||||
|
await this._indexer.init();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async exec (): Promise<void> {
|
||||||
|
assert(this._argv);
|
||||||
|
assert(this._database);
|
||||||
|
assert(this._indexer);
|
||||||
|
assert(this._indexer.watchContract);
|
||||||
|
|
||||||
|
await this._indexer.watchContract(this._argv.address, this._argv.kind, this._argv.checkpoint, this._argv.startingBlock);
|
||||||
|
await this._database.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
async _getGraphWatcher (baseDatabase: BaseDatabase): Promise<GraphWatcher> {
|
||||||
|
assert(this._config);
|
||||||
|
assert(this._ethClient);
|
||||||
|
assert(this._ethProvider);
|
||||||
|
|
||||||
|
const graphDb = new GraphDatabase(this._config.server, baseDatabase);
|
||||||
|
await graphDb.init();
|
||||||
|
|
||||||
|
return new GraphWatcher(graphDb, this._ethClient, this._ethProvider, this._config.server);
|
||||||
|
}
|
||||||
|
|
||||||
|
_getArgv (): any {
|
||||||
|
return yargs.parserConfiguration({
|
||||||
|
'parse-numbers': false
|
||||||
|
}).options({
|
||||||
|
configFile: {
|
||||||
|
alias: 'f',
|
||||||
|
type: 'string',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Configuration file path (toml)',
|
||||||
|
default: DEFAULT_CONFIG_PATH
|
||||||
|
},
|
||||||
|
address: {
|
||||||
|
type: 'string',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Address of the deployed contract'
|
||||||
|
},
|
||||||
|
kind: {
|
||||||
|
type: 'string',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Kind of contract'
|
||||||
|
},
|
||||||
|
checkpoint: {
|
||||||
|
type: 'boolean',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Turn checkpointing on'
|
||||||
|
},
|
||||||
|
startingBlock: {
|
||||||
|
type: 'number',
|
||||||
|
default: 1,
|
||||||
|
describe: 'Starting block'
|
||||||
|
}
|
||||||
|
}).argv;
|
||||||
|
}
|
||||||
|
}
|
76
packages/cli/tsconfig.json
Normal file
76
packages/cli/tsconfig.json
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
/* Visit https://aka.ms/tsconfig.json to read more about this file */
|
||||||
|
|
||||||
|
/* Basic Options */
|
||||||
|
// "incremental": true, /* Enable incremental compilation */
|
||||||
|
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
||||||
|
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
||||||
|
"lib": [ "ES5", "ES6", "ES2020" ], /* Specify library files to be included in the compilation. */
|
||||||
|
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||||
|
// "checkJs": true, /* Report errors in .js files. */
|
||||||
|
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
||||||
|
"declaration": true, /* Generates corresponding '.d.ts' file. */
|
||||||
|
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
|
||||||
|
"sourceMap": true, /* Generates corresponding '.map' file. */
|
||||||
|
// "outFile": "./", /* Concatenate and emit output to single file. */
|
||||||
|
"outDir": "dist", /* Redirect output structure to the directory. */
|
||||||
|
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||||
|
// "composite": true, /* Enable project compilation */
|
||||||
|
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
|
||||||
|
// "removeComments": true, /* Do not emit comments to output. */
|
||||||
|
// "noEmit": true, /* Do not emit outputs. */
|
||||||
|
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
|
||||||
|
"downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
|
||||||
|
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
|
||||||
|
|
||||||
|
/* Strict Type-Checking Options */
|
||||||
|
"strict": true, /* Enable all strict type-checking options. */
|
||||||
|
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||||
|
// "strictNullChecks": true, /* Enable strict null checks. */
|
||||||
|
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
|
||||||
|
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
|
||||||
|
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
|
||||||
|
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
|
||||||
|
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
|
||||||
|
|
||||||
|
/* Additional Checks */
|
||||||
|
// "noUnusedLocals": true, /* Report errors on unused locals. */
|
||||||
|
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||||
|
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||||
|
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||||
|
// "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
|
||||||
|
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an 'override' modifier. */
|
||||||
|
// "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */
|
||||||
|
|
||||||
|
/* Module Resolution Options */
|
||||||
|
"moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||||
|
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
|
||||||
|
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||||
|
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||||
|
"typeRoots": [
|
||||||
|
"./src/types"
|
||||||
|
], /* List of folders to include type definitions from. */
|
||||||
|
// "types": [], /* Type declaration files to be included in compilation. */
|
||||||
|
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||||
|
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||||
|
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||||
|
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||||
|
|
||||||
|
/* Source Map Options */
|
||||||
|
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
|
||||||
|
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||||
|
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
|
||||||
|
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
|
||||||
|
|
||||||
|
/* Experimental Options */
|
||||||
|
"experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
|
||||||
|
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
|
||||||
|
|
||||||
|
/* Advanced Options */
|
||||||
|
"skipLibCheck": true, /* Skip type checking of declaration files. */
|
||||||
|
"forceConsistentCasingInFileNames": true, /* Disallow inconsistently-cased references to the same file. */
|
||||||
|
"resolveJsonModule": true /* Enabling the option allows importing JSON, and validating the types in that JSON file. */
|
||||||
|
},
|
||||||
|
"include": ["src/**/*"]
|
||||||
|
}
|
@ -38,6 +38,7 @@
|
|||||||
"homepage": "https://github.com/cerc-io/watcher-ts#readme",
|
"homepage": "https://github.com/cerc-io/watcher-ts#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@apollo/client": "^3.3.19",
|
"@apollo/client": "^3.3.19",
|
||||||
|
"@cerc-io/cli": "^0.2.13",
|
||||||
"@cerc-io/graph-node": "^0.2.13",
|
"@cerc-io/graph-node": "^0.2.13",
|
||||||
"@cerc-io/ipld-eth-client": "^0.2.13",
|
"@cerc-io/ipld-eth-client": "^0.2.13",
|
||||||
"@cerc-io/solidity-mapper": "^0.2.13",
|
"@cerc-io/solidity-mapper": "^0.2.13",
|
||||||
|
@ -51,7 +51,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -47,7 +47,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -61,7 +61,7 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -65,7 +65,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -54,7 +54,7 @@ const main = async (): Promise<void> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -56,7 +56,7 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -46,7 +46,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -2,85 +2,21 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import yargs from 'yargs';
|
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import assert from 'assert';
|
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@cerc-io/util';
|
import { WatchContractCmd } from '@cerc-io/cli';
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
|
|
||||||
|
|
||||||
import { Database, ENTITY_TO_LATEST_ENTITY_MAP } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
|
|
||||||
const log = debug('vulcanize:watch-contract');
|
const log = debug('vulcanize:watch-contract');
|
||||||
|
|
||||||
const main = async (): Promise<void> => {
|
const main = async (): Promise<void> => {
|
||||||
const argv = await yargs.parserConfiguration({
|
const watchContractCmd = new WatchContractCmd();
|
||||||
'parse-numbers': false
|
await watchContractCmd.init(Database, Indexer);
|
||||||
}).options({
|
|
||||||
configFile: {
|
|
||||||
alias: 'f',
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Configuration file path (toml)',
|
|
||||||
default: DEFAULT_CONFIG_PATH
|
|
||||||
},
|
|
||||||
address: {
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Address of the deployed contract'
|
|
||||||
},
|
|
||||||
kind: {
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Kind of contract'
|
|
||||||
},
|
|
||||||
checkpoint: {
|
|
||||||
type: 'boolean',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Turn checkpointing on'
|
|
||||||
},
|
|
||||||
startingBlock: {
|
|
||||||
type: 'number',
|
|
||||||
default: 1,
|
|
||||||
describe: 'Starting block'
|
|
||||||
}
|
|
||||||
}).argv;
|
|
||||||
|
|
||||||
const config: Config = await getConfig(argv.configFile);
|
await watchContractCmd.exec();
|
||||||
const { ethClient, ethProvider } = await initClients(config);
|
|
||||||
|
|
||||||
const db = new Database(config.database);
|
|
||||||
await db.init();
|
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_TO_LATEST_ENTITY_MAP);
|
|
||||||
await graphDb.init();
|
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
|
||||||
|
|
||||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
|
||||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
|
||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
|
||||||
await jobQueue.start();
|
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
|
||||||
await indexer.init();
|
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
|
||||||
await graphWatcher.init();
|
|
||||||
|
|
||||||
await indexer.watchContract(argv.address, argv.kind, argv.checkpoint, argv.startingBlock);
|
|
||||||
|
|
||||||
await db.close();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
main().catch(err => {
|
main().catch(err => {
|
||||||
|
@ -79,7 +79,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -24,7 +24,9 @@ import {
|
|||||||
StateStatus,
|
StateStatus,
|
||||||
ValueResult,
|
ValueResult,
|
||||||
ResultEvent,
|
ResultEvent,
|
||||||
getResultEvent
|
getResultEvent,
|
||||||
|
DatabaseInterface,
|
||||||
|
Clients
|
||||||
} from '@cerc-io/util';
|
} from '@cerc-io/util';
|
||||||
import { GraphWatcher } from '@cerc-io/graph-node';
|
import { GraphWatcher } from '@cerc-io/graph-node';
|
||||||
|
|
||||||
@ -78,15 +80,17 @@ export class Indexer implements IndexerInterface {
|
|||||||
|
|
||||||
_subgraphStateMap: Map<string, any>
|
_subgraphStateMap: Map<string, any>
|
||||||
|
|
||||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: BaseProvider, jobQueue: JobQueue, graphWatcher: GraphWatcher) {
|
constructor (serverConfig: ServerConfig, db: DatabaseInterface, clients: Clients, ethProvider: BaseProvider, jobQueue: JobQueue, graphWatcher?: GraphWatcher) {
|
||||||
assert(db);
|
assert(db);
|
||||||
assert(ethClient);
|
assert(clients.ethClient);
|
||||||
|
|
||||||
this._db = db;
|
this._db = db as Database;
|
||||||
this._ethClient = ethClient;
|
this._ethClient = clients.ethClient;
|
||||||
this._ethProvider = ethProvider;
|
this._ethProvider = ethProvider;
|
||||||
this._serverConfig = serverConfig;
|
this._serverConfig = serverConfig;
|
||||||
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue);
|
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue);
|
||||||
|
|
||||||
|
assert(graphWatcher);
|
||||||
this._graphWatcher = graphWatcher;
|
this._graphWatcher = graphWatcher;
|
||||||
|
|
||||||
this._abiMap = new Map();
|
this._abiMap = new Map();
|
||||||
|
@ -108,7 +108,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -59,7 +59,7 @@ export const main = async (): Promise<any> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -46,6 +46,7 @@
|
|||||||
"@cerc-io/ipld-eth-client": "^0.2.13",
|
"@cerc-io/ipld-eth-client": "^0.2.13",
|
||||||
"@cerc-io/solidity-mapper": "^0.2.13",
|
"@cerc-io/solidity-mapper": "^0.2.13",
|
||||||
"@cerc-io/util": "^0.2.13",
|
"@cerc-io/util": "^0.2.13",
|
||||||
|
"@cerc-io/cli": "^0.2.13",
|
||||||
"@ethersproject/providers": "^5.4.4",
|
"@ethersproject/providers": "^5.4.4",
|
||||||
"@types/lodash": "^4.14.168",
|
"@types/lodash": "^4.14.168",
|
||||||
"apollo-type-bigint": "^0.1.3",
|
"apollo-type-bigint": "^0.1.3",
|
||||||
|
@ -39,7 +39,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
|
|
||||||
const syncStatus = await indexer.getSyncStatus();
|
const syncStatus = await indexer.getSyncStatus();
|
||||||
assert(syncStatus, 'Missing syncStatus');
|
assert(syncStatus, 'Missing syncStatus');
|
||||||
|
@ -2,68 +2,25 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import assert from 'assert';
|
|
||||||
import yargs from 'yargs';
|
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
|
import debug from 'debug';
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@cerc-io/util';
|
import { WatchContractCmd } from '@cerc-io/cli';
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
import { CONTRACT_KIND } from '../utils/index';
|
|
||||||
|
|
||||||
(async () => {
|
const log = debug('vulcanize:watch-contract');
|
||||||
const argv = await yargs.parserConfiguration({
|
|
||||||
'parse-numbers': false
|
|
||||||
}).options({
|
|
||||||
configFile: {
|
|
||||||
alias: 'f',
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'configuration file path (toml)',
|
|
||||||
default: DEFAULT_CONFIG_PATH
|
|
||||||
},
|
|
||||||
address: {
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Address of the deployed contract'
|
|
||||||
},
|
|
||||||
checkpoint: {
|
|
||||||
type: 'boolean',
|
|
||||||
default: false,
|
|
||||||
describe: 'Turn checkpointing on'
|
|
||||||
},
|
|
||||||
startingBlock: {
|
|
||||||
type: 'number',
|
|
||||||
default: 1,
|
|
||||||
describe: 'Starting block'
|
|
||||||
}
|
|
||||||
}).argv;
|
|
||||||
|
|
||||||
const config: Config = await getConfig(argv.configFile);
|
const main = async (): Promise<void> => {
|
||||||
const { database: dbConfig, server, jobQueue: jobQueueConfig } = config;
|
const watchContractCmd = new WatchContractCmd();
|
||||||
const { ethClient, ethProvider } = await initClients(config);
|
await watchContractCmd.init(Database, Indexer);
|
||||||
|
|
||||||
assert(dbConfig);
|
await watchContractCmd.exec();
|
||||||
|
};
|
||||||
|
|
||||||
const db = new Database(dbConfig);
|
main().catch(err => {
|
||||||
await db.init();
|
log(err);
|
||||||
|
}).finally(() => {
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
process.exit(0);
|
||||||
|
});
|
||||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
|
||||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
|
||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
|
||||||
await jobQueue.start();
|
|
||||||
|
|
||||||
const indexer = new Indexer(server, db, ethClient, ethProvider, jobQueue);
|
|
||||||
|
|
||||||
await indexer.watchContract(argv.address, CONTRACT_KIND, argv.checkpoint, argv.startingBlock);
|
|
||||||
|
|
||||||
await db.close();
|
|
||||||
await jobQueue.stop();
|
|
||||||
process.exit();
|
|
||||||
})();
|
|
||||||
|
@ -35,6 +35,10 @@ export class Database implements DatabaseInterface {
|
|||||||
this._baseDatabase = new BaseDatabase(this._config);
|
this._baseDatabase = new BaseDatabase(this._config);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get baseDatabase (): BaseDatabase {
|
||||||
|
return this._baseDatabase;
|
||||||
|
}
|
||||||
|
|
||||||
async init (): Promise<void> {
|
async init (): Promise<void> {
|
||||||
this._conn = await this._baseDatabase.init();
|
this._conn = await this._baseDatabase.init();
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
|
|
||||||
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ import { BaseProvider } from '@ethersproject/providers';
|
|||||||
|
|
||||||
import { EthClient } from '@cerc-io/ipld-eth-client';
|
import { EthClient } from '@cerc-io/ipld-eth-client';
|
||||||
import { MappingKey, StorageLayout } from '@cerc-io/solidity-mapper';
|
import { MappingKey, StorageLayout } from '@cerc-io/solidity-mapper';
|
||||||
import { IndexerInterface, Indexer as BaseIndexer, ValueResult, JobQueue, Where, QueryOptions, ServerConfig, StateStatus } from '@cerc-io/util';
|
import { IndexerInterface, Indexer as BaseIndexer, ValueResult, JobQueue, Where, QueryOptions, ServerConfig, StateStatus, DatabaseInterface, Clients } from '@cerc-io/util';
|
||||||
|
|
||||||
import { Database, ENTITIES } from './database';
|
import { Database, ENTITIES } from './database';
|
||||||
import { Event } from './entity/Event';
|
import { Event } from './entity/Event';
|
||||||
@ -56,12 +56,12 @@ export class Indexer implements IndexerInterface {
|
|||||||
_contract: ethers.utils.Interface
|
_contract: ethers.utils.Interface
|
||||||
_serverMode: string
|
_serverMode: string
|
||||||
|
|
||||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: BaseProvider, jobQueue: JobQueue) {
|
constructor (serverConfig: ServerConfig, db: DatabaseInterface, clients: Clients, ethProvider: BaseProvider, jobQueue: JobQueue) {
|
||||||
assert(db);
|
assert(db);
|
||||||
assert(ethClient);
|
assert(clients.ethClient);
|
||||||
|
|
||||||
this._db = db;
|
this._db = db as Database;
|
||||||
this._ethClient = ethClient;
|
this._ethClient = clients.ethClient;
|
||||||
this._ethProvider = ethProvider;
|
this._ethProvider = ethProvider;
|
||||||
this._serverConfig = serverConfig;
|
this._serverConfig = serverConfig;
|
||||||
this._serverMode = serverConfig.mode;
|
this._serverMode = serverConfig.mode;
|
||||||
|
@ -86,7 +86,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue);
|
const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue);
|
||||||
|
@ -54,7 +54,7 @@ export const main = async (): Promise<any> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
||||||
|
@ -49,6 +49,7 @@
|
|||||||
"@cerc-io/ipld-eth-client": "^0.2.13",
|
"@cerc-io/ipld-eth-client": "^0.2.13",
|
||||||
"@cerc-io/solidity-mapper": "^0.2.13",
|
"@cerc-io/solidity-mapper": "^0.2.13",
|
||||||
"@cerc-io/util": "^0.2.13",
|
"@cerc-io/util": "^0.2.13",
|
||||||
|
"@cerc-io/cli": "^0.2.13",
|
||||||
"@ethersproject/providers": "^5.4.4",
|
"@ethersproject/providers": "^5.4.4",
|
||||||
"@ipld/dag-cbor": "^6.0.12",
|
"@ipld/dag-cbor": "^6.0.12",
|
||||||
"apollo-type-bigint": "^0.1.3",
|
"apollo-type-bigint": "^0.1.3",
|
||||||
|
@ -45,7 +45,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const blockHash = await indexer.processCLICheckpoint(argv.address, argv.blockHash);
|
const blockHash = await indexer.processCLICheckpoint(argv.address, argv.blockHash);
|
||||||
|
@ -55,7 +55,7 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const exportData: any = {
|
const exportData: any = {
|
||||||
|
@ -59,7 +59,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
||||||
|
@ -48,7 +48,7 @@ const main = async (): Promise<void> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
await indexBlock(indexer, jobQueueConfig.eventsInBatch, argv);
|
await indexBlock(indexer, jobQueueConfig.eventsInBatch, argv);
|
||||||
|
@ -50,7 +50,7 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const state = await indexer.getStateByCID(argv.cid);
|
const state = await indexer.getStateByCID(argv.cid);
|
||||||
|
@ -40,7 +40,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
await indexer.resetWatcherToBlock(argv.blockNumber);
|
await indexer.resetWatcherToBlock(argv.blockNumber);
|
||||||
|
@ -2,12 +2,10 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import yargs from 'yargs';
|
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import assert from 'assert';
|
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@cerc-io/util';
|
import { WatchContractCmd } from '@cerc-io/cli';
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
@ -15,64 +13,10 @@ import { Indexer } from '../indexer';
|
|||||||
const log = debug('vulcanize:watch-contract');
|
const log = debug('vulcanize:watch-contract');
|
||||||
|
|
||||||
const main = async (): Promise<void> => {
|
const main = async (): Promise<void> => {
|
||||||
const argv = await yargs.parserConfiguration({
|
const watchContractCmd = new WatchContractCmd();
|
||||||
'parse-numbers': false
|
await watchContractCmd.init(Database, Indexer);
|
||||||
}).options({
|
|
||||||
configFile: {
|
|
||||||
alias: 'f',
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Configuration file path (toml)',
|
|
||||||
default: DEFAULT_CONFIG_PATH
|
|
||||||
},
|
|
||||||
address: {
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Address of the deployed contract'
|
|
||||||
},
|
|
||||||
kind: {
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Kind of contract'
|
|
||||||
},
|
|
||||||
checkpoint: {
|
|
||||||
type: 'boolean',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Turn checkpointing on'
|
|
||||||
},
|
|
||||||
startingBlock: {
|
|
||||||
type: 'number',
|
|
||||||
default: 1,
|
|
||||||
describe: 'Starting block'
|
|
||||||
}
|
|
||||||
}).argv;
|
|
||||||
|
|
||||||
const config: Config = await getConfig(argv.configFile);
|
await watchContractCmd.exec();
|
||||||
const { ethClient, ethProvider } = await initClients(config);
|
|
||||||
|
|
||||||
const db = new Database(config.database);
|
|
||||||
await db.init();
|
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
|
||||||
|
|
||||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
|
||||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
|
||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
|
||||||
await jobQueue.start();
|
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
|
||||||
await indexer.init();
|
|
||||||
|
|
||||||
await indexer.watchContract(argv.address, argv.kind, argv.checkpoint, argv.startingBlock);
|
|
||||||
|
|
||||||
await db.close();
|
|
||||||
await jobQueue.stop();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
main().catch(err => {
|
main().catch(err => {
|
||||||
|
@ -50,6 +50,10 @@ export class Database implements DatabaseInterface {
|
|||||||
this._propColMaps = {};
|
this._propColMaps = {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get baseDatabase (): BaseDatabase {
|
||||||
|
return this._baseDatabase;
|
||||||
|
}
|
||||||
|
|
||||||
async init (): Promise<void> {
|
async init (): Promise<void> {
|
||||||
this._conn = await this._baseDatabase.init();
|
this._conn = await this._baseDatabase.init();
|
||||||
this._setPropColMaps();
|
this._setPropColMaps();
|
||||||
|
@ -67,7 +67,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||||
|
@ -26,7 +26,9 @@ import {
|
|||||||
StateKind,
|
StateKind,
|
||||||
StateStatus,
|
StateStatus,
|
||||||
ResultEvent,
|
ResultEvent,
|
||||||
getResultEvent
|
getResultEvent,
|
||||||
|
DatabaseInterface,
|
||||||
|
Clients
|
||||||
} from '@cerc-io/util';
|
} from '@cerc-io/util';
|
||||||
|
|
||||||
import ERC721Artifacts from './artifacts/ERC721.json';
|
import ERC721Artifacts from './artifacts/ERC721.json';
|
||||||
@ -56,12 +58,12 @@ export class Indexer implements IndexerInterface {
|
|||||||
_storageLayoutMap: Map<string, StorageLayout>
|
_storageLayoutMap: Map<string, StorageLayout>
|
||||||
_contractMap: Map<string, ethers.utils.Interface>
|
_contractMap: Map<string, ethers.utils.Interface>
|
||||||
|
|
||||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: BaseProvider, jobQueue: JobQueue) {
|
constructor (serverConfig: ServerConfig, db: DatabaseInterface, clients: Clients, ethProvider: BaseProvider, jobQueue: JobQueue) {
|
||||||
assert(db);
|
assert(db);
|
||||||
assert(ethClient);
|
assert(clients.ethClient);
|
||||||
|
|
||||||
this._db = db;
|
this._db = db as Database;
|
||||||
this._ethClient = ethClient;
|
this._ethClient = clients.ethClient;
|
||||||
this._ethProvider = ethProvider;
|
this._ethProvider = ethProvider;
|
||||||
this._serverConfig = serverConfig;
|
this._serverConfig = serverConfig;
|
||||||
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue);
|
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue);
|
||||||
|
@ -102,7 +102,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue);
|
const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue);
|
||||||
|
@ -53,7 +53,7 @@ export const main = async (): Promise<any> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
||||||
|
@ -38,6 +38,10 @@ export class Indexer implements IndexerInterface {
|
|||||||
return this._storageLayoutMap;
|
return this._storageLayoutMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async init (): Promise<void> {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
getResultEvent (event: EventInterface): ResultEvent {
|
getResultEvent (event: EventInterface): ResultEvent {
|
||||||
return getResultEvent(event);
|
return getResultEvent(event);
|
||||||
}
|
}
|
||||||
|
@ -43,6 +43,7 @@
|
|||||||
"@cerc-io/ipld-eth-client": "^0.2.13",
|
"@cerc-io/ipld-eth-client": "^0.2.13",
|
||||||
"@cerc-io/solidity-mapper": "^0.2.13",
|
"@cerc-io/solidity-mapper": "^0.2.13",
|
||||||
"@cerc-io/util": "^0.2.13",
|
"@cerc-io/util": "^0.2.13",
|
||||||
|
"@cerc-io/cli": "^0.2.13",
|
||||||
"@ethersproject/providers": "^5.4.4",
|
"@ethersproject/providers": "^5.4.4",
|
||||||
"@ipld/dag-cbor": "^6.0.12",
|
"@ipld/dag-cbor": "^6.0.12",
|
||||||
"apollo-type-bigint": "^0.1.3",
|
"apollo-type-bigint": "^0.1.3",
|
||||||
|
@ -51,7 +51,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -47,7 +47,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -61,7 +61,7 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -65,7 +65,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -54,7 +54,7 @@ const main = async (): Promise<void> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -56,7 +56,7 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -46,7 +46,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -2,13 +2,10 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import yargs from 'yargs';
|
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import assert from 'assert';
|
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@cerc-io/util';
|
import { WatchContractCmd } from '@cerc-io/cli';
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
|
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
@ -16,71 +13,10 @@ import { Indexer } from '../indexer';
|
|||||||
const log = debug('vulcanize:watch-contract');
|
const log = debug('vulcanize:watch-contract');
|
||||||
|
|
||||||
const main = async (): Promise<void> => {
|
const main = async (): Promise<void> => {
|
||||||
const argv = await yargs.parserConfiguration({
|
const watchContractCmd = new WatchContractCmd();
|
||||||
'parse-numbers': false
|
await watchContractCmd.init(Database, Indexer);
|
||||||
}).options({
|
|
||||||
configFile: {
|
|
||||||
alias: 'f',
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Configuration file path (toml)',
|
|
||||||
default: DEFAULT_CONFIG_PATH
|
|
||||||
},
|
|
||||||
address: {
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Address of the deployed contract'
|
|
||||||
},
|
|
||||||
kind: {
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Kind of contract'
|
|
||||||
},
|
|
||||||
checkpoint: {
|
|
||||||
type: 'boolean',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Turn checkpointing on'
|
|
||||||
},
|
|
||||||
startingBlock: {
|
|
||||||
type: 'number',
|
|
||||||
default: 1,
|
|
||||||
describe: 'Starting block'
|
|
||||||
}
|
|
||||||
}).argv;
|
|
||||||
|
|
||||||
const config: Config = await getConfig(argv.configFile);
|
await watchContractCmd.exec();
|
||||||
const { ethClient, ethProvider } = await initClients(config);
|
|
||||||
|
|
||||||
const db = new Database(config.database);
|
|
||||||
await db.init();
|
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.server, db.baseDatabase);
|
|
||||||
await graphDb.init();
|
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
|
||||||
|
|
||||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
|
||||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
|
||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
|
||||||
await jobQueue.start();
|
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
|
||||||
await indexer.init();
|
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
|
||||||
await graphWatcher.init();
|
|
||||||
|
|
||||||
await indexer.watchContract(argv.address, argv.kind, argv.checkpoint, argv.startingBlock);
|
|
||||||
|
|
||||||
await db.close();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
main().catch(err => {
|
main().catch(err => {
|
||||||
|
@ -73,7 +73,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -27,7 +27,9 @@ import {
|
|||||||
IndexerInterface,
|
IndexerInterface,
|
||||||
StateStatus,
|
StateStatus,
|
||||||
ResultEvent,
|
ResultEvent,
|
||||||
getResultEvent
|
getResultEvent,
|
||||||
|
DatabaseInterface,
|
||||||
|
Clients
|
||||||
} from '@cerc-io/util';
|
} from '@cerc-io/util';
|
||||||
import { GraphWatcher } from '@cerc-io/graph-node';
|
import { GraphWatcher } from '@cerc-io/graph-node';
|
||||||
|
|
||||||
@ -67,15 +69,17 @@ export class Indexer implements IndexerInterface {
|
|||||||
|
|
||||||
_subgraphStateMap: Map<string, any>
|
_subgraphStateMap: Map<string, any>
|
||||||
|
|
||||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: BaseProvider, jobQueue: JobQueue, graphWatcher: GraphWatcher) {
|
constructor (serverConfig: ServerConfig, db: DatabaseInterface, clients: Clients, ethProvider: BaseProvider, jobQueue: JobQueue, graphWatcher?: GraphWatcher) {
|
||||||
assert(db);
|
assert(db);
|
||||||
assert(ethClient);
|
assert(clients.ethClient);
|
||||||
|
|
||||||
this._db = db;
|
this._db = db as Database;
|
||||||
this._ethClient = ethClient;
|
this._ethClient = clients.ethClient;
|
||||||
this._ethProvider = ethProvider;
|
this._ethProvider = ethProvider;
|
||||||
this._serverConfig = serverConfig;
|
this._serverConfig = serverConfig;
|
||||||
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue);
|
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue);
|
||||||
|
|
||||||
|
assert(graphWatcher);
|
||||||
this._graphWatcher = graphWatcher;
|
this._graphWatcher = graphWatcher;
|
||||||
|
|
||||||
this._abiMap = new Map();
|
this._abiMap = new Map();
|
||||||
|
@ -108,7 +108,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -59,7 +59,7 @@ export const main = async (): Promise<any> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
|
@ -40,6 +40,7 @@
|
|||||||
"@cerc-io/ipld-eth-client": "^0.2.13",
|
"@cerc-io/ipld-eth-client": "^0.2.13",
|
||||||
"@cerc-io/solidity-mapper": "^0.2.13",
|
"@cerc-io/solidity-mapper": "^0.2.13",
|
||||||
"@cerc-io/util": "^0.2.13",
|
"@cerc-io/util": "^0.2.13",
|
||||||
|
"@cerc-io/cli": "^0.2.13",
|
||||||
"@ethersproject/providers": "^5.4.4",
|
"@ethersproject/providers": "^5.4.4",
|
||||||
"@ipld/dag-cbor": "^6.0.12",
|
"@ipld/dag-cbor": "^6.0.12",
|
||||||
"apollo-type-bigint": "^0.1.3",
|
"apollo-type-bigint": "^0.1.3",
|
||||||
|
@ -45,7 +45,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const blockHash = await indexer.processCLICheckpoint(argv.address, argv.blockHash);
|
const blockHash = await indexer.processCLICheckpoint(argv.address, argv.blockHash);
|
||||||
|
@ -55,7 +55,7 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const exportData: any = {
|
const exportData: any = {
|
||||||
|
@ -59,7 +59,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
||||||
|
@ -48,7 +48,7 @@ const main = async (): Promise<void> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
await indexBlock(indexer, jobQueueConfig.eventsInBatch, argv);
|
await indexBlock(indexer, jobQueueConfig.eventsInBatch, argv);
|
||||||
|
@ -50,7 +50,7 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const state = await indexer.getStateByCID(argv.cid);
|
const state = await indexer.getStateByCID(argv.cid);
|
||||||
|
@ -40,7 +40,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
await indexer.resetWatcherToBlock(argv.blockNumber);
|
await indexer.resetWatcherToBlock(argv.blockNumber);
|
||||||
|
@ -2,12 +2,10 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import yargs from 'yargs';
|
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import assert from 'assert';
|
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@cerc-io/util';
|
import { WatchContractCmd } from '@cerc-io/cli';
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
@ -15,64 +13,10 @@ import { Indexer } from '../indexer';
|
|||||||
const log = debug('vulcanize:watch-contract');
|
const log = debug('vulcanize:watch-contract');
|
||||||
|
|
||||||
const main = async (): Promise<void> => {
|
const main = async (): Promise<void> => {
|
||||||
const argv = await yargs.parserConfiguration({
|
const watchContractCmd = new WatchContractCmd();
|
||||||
'parse-numbers': false
|
await watchContractCmd.init(Database, Indexer);
|
||||||
}).options({
|
|
||||||
configFile: {
|
|
||||||
alias: 'f',
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Configuration file path (toml)',
|
|
||||||
default: DEFAULT_CONFIG_PATH
|
|
||||||
},
|
|
||||||
address: {
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Address of the deployed contract'
|
|
||||||
},
|
|
||||||
kind: {
|
|
||||||
type: 'string',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Kind of contract'
|
|
||||||
},
|
|
||||||
checkpoint: {
|
|
||||||
type: 'boolean',
|
|
||||||
require: true,
|
|
||||||
demandOption: true,
|
|
||||||
describe: 'Turn checkpointing on'
|
|
||||||
},
|
|
||||||
startingBlock: {
|
|
||||||
type: 'number',
|
|
||||||
default: 1,
|
|
||||||
describe: 'Starting block'
|
|
||||||
}
|
|
||||||
}).argv;
|
|
||||||
|
|
||||||
const config: Config = await getConfig(argv.configFile);
|
await watchContractCmd.exec();
|
||||||
const { ethClient, ethProvider } = await initClients(config);
|
|
||||||
|
|
||||||
const db = new Database(config.database);
|
|
||||||
await db.init();
|
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
|
||||||
|
|
||||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
|
||||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
|
||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
|
||||||
await jobQueue.start();
|
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
|
||||||
await indexer.init();
|
|
||||||
|
|
||||||
await indexer.watchContract(argv.address, argv.kind, argv.checkpoint, argv.startingBlock);
|
|
||||||
|
|
||||||
await db.close();
|
|
||||||
await jobQueue.stop();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
main().catch(err => {
|
main().catch(err => {
|
||||||
|
@ -40,6 +40,10 @@ export class Database implements DatabaseInterface {
|
|||||||
this._propColMaps = {};
|
this._propColMaps = {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get baseDatabase (): BaseDatabase {
|
||||||
|
return this._baseDatabase;
|
||||||
|
}
|
||||||
|
|
||||||
async init (): Promise<void> {
|
async init (): Promise<void> {
|
||||||
this._conn = await this._baseDatabase.init();
|
this._conn = await this._baseDatabase.init();
|
||||||
this._setPropColMaps();
|
this._setPropColMaps();
|
||||||
|
@ -67,7 +67,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||||
|
@ -27,7 +27,9 @@ import {
|
|||||||
StateStatus,
|
StateStatus,
|
||||||
getFullTransaction,
|
getFullTransaction,
|
||||||
ResultEvent,
|
ResultEvent,
|
||||||
getResultEvent
|
getResultEvent,
|
||||||
|
DatabaseInterface,
|
||||||
|
Clients
|
||||||
} from '@cerc-io/util';
|
} from '@cerc-io/util';
|
||||||
|
|
||||||
import PhisherRegistryArtifacts from './artifacts/PhisherRegistry.json';
|
import PhisherRegistryArtifacts from './artifacts/PhisherRegistry.json';
|
||||||
@ -61,12 +63,12 @@ export class Indexer implements IndexerInterface {
|
|||||||
_storageLayoutMap: Map<string, StorageLayout>
|
_storageLayoutMap: Map<string, StorageLayout>
|
||||||
_contractMap: Map<string, ethers.utils.Interface>
|
_contractMap: Map<string, ethers.utils.Interface>
|
||||||
|
|
||||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: JsonRpcProvider, jobQueue: JobQueue) {
|
constructor (serverConfig: ServerConfig, db: DatabaseInterface, clients: Clients, ethProvider: JsonRpcProvider, jobQueue: JobQueue) {
|
||||||
assert(db);
|
assert(db);
|
||||||
assert(ethClient);
|
assert(clients.ethClient);
|
||||||
|
|
||||||
this._db = db;
|
this._db = db as Database;
|
||||||
this._ethClient = ethClient;
|
this._ethClient = clients.ethClient;
|
||||||
this._ethProvider = ethProvider;
|
this._ethProvider = ethProvider;
|
||||||
this._serverConfig = serverConfig;
|
this._serverConfig = serverConfig;
|
||||||
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue);
|
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue);
|
||||||
|
@ -102,7 +102,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue);
|
const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue);
|
||||||
|
@ -53,7 +53,7 @@ export const main = async (): Promise<any> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
||||||
|
@ -1,21 +0,0 @@
|
|||||||
//
|
|
||||||
// Copyright 2021 Vulcanize, Inc.
|
|
||||||
//
|
|
||||||
|
|
||||||
export * from './src/config';
|
|
||||||
export * from './src/database';
|
|
||||||
export * from './src/job-queue';
|
|
||||||
export * from './src/constants';
|
|
||||||
export * from './src/misc';
|
|
||||||
export * from './src/fill';
|
|
||||||
export * from './src/events';
|
|
||||||
export * from './src/types';
|
|
||||||
export * from './src/indexer';
|
|
||||||
export * from './src/job-runner';
|
|
||||||
export * from './src/state-helper';
|
|
||||||
export * from './src/graph-decimal';
|
|
||||||
export * from './src/index-block';
|
|
||||||
export * from './src/metrics';
|
|
||||||
export * from './src/gql-metrics';
|
|
||||||
export * from './src/common';
|
|
||||||
export * from './src/server';
|
|
21
packages/util/src/index.ts
Normal file
21
packages/util/src/index.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
//
|
||||||
|
// Copyright 2022 Vulcanize, Inc.
|
||||||
|
//
|
||||||
|
|
||||||
|
export * from './config';
|
||||||
|
export * from './database';
|
||||||
|
export * from './job-queue';
|
||||||
|
export * from './constants';
|
||||||
|
export * from './misc';
|
||||||
|
export * from './fill';
|
||||||
|
export * from './events';
|
||||||
|
export * from './types';
|
||||||
|
export * from './indexer';
|
||||||
|
export * from './job-runner';
|
||||||
|
export * from './state-helper';
|
||||||
|
export * from './graph-decimal';
|
||||||
|
export * from './index-block';
|
||||||
|
export * from './metrics';
|
||||||
|
export * from './gql-metrics';
|
||||||
|
export * from './common';
|
||||||
|
export * from './server';
|
@ -3,7 +3,7 @@ import { ApolloServer } from 'apollo-server-express';
|
|||||||
import { createServer } from 'http';
|
import { createServer } from 'http';
|
||||||
import { WebSocketServer } from 'ws';
|
import { WebSocketServer } from 'ws';
|
||||||
import { useServer } from 'graphql-ws/lib/use/ws';
|
import { useServer } from 'graphql-ws/lib/use/ws';
|
||||||
import { ApolloServerPluginDrainHttpServer } from 'apollo-server-core';
|
import { ApolloServerPluginDrainHttpServer, ApolloServerPluginLandingPageLocalDefault } from 'apollo-server-core';
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import responseCachePlugin from 'apollo-server-plugin-response-cache';
|
import responseCachePlugin from 'apollo-server-plugin-response-cache';
|
||||||
import { InMemoryLRUCache } from '@apollo/utils.keyvaluecache';
|
import { InMemoryLRUCache } from '@apollo/utils.keyvaluecache';
|
||||||
@ -65,7 +65,8 @@ export const createAndStartServer = async (
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
// GQL response cache plugin
|
// GQL response cache plugin
|
||||||
responseCachePlugin()
|
responseCachePlugin(),
|
||||||
|
ApolloServerPluginLandingPageLocalDefault({ embed: true })
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
await server.start();
|
await server.start();
|
||||||
|
@ -3,10 +3,12 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
import { Connection, DeepPartial, EntityTarget, FindConditions, FindManyOptions, QueryRunner } from 'typeorm';
|
import { Connection, DeepPartial, EntityTarget, FindConditions, FindManyOptions, QueryRunner } from 'typeorm';
|
||||||
|
|
||||||
import { MappingKey, StorageLayout } from '@cerc-io/solidity-mapper';
|
import { MappingKey, StorageLayout } from '@cerc-io/solidity-mapper';
|
||||||
|
import { EthClient } from '@cerc-io/ipld-eth-client';
|
||||||
|
|
||||||
import { ServerConfig } from './config';
|
import { ServerConfig } from './config';
|
||||||
import { Where, QueryOptions } from './database';
|
import { Where, QueryOptions, Database } from './database';
|
||||||
import { ValueResult, StateStatus } from './indexer';
|
import { ValueResult, StateStatus } from './indexer';
|
||||||
|
|
||||||
export enum StateKind {
|
export enum StateKind {
|
||||||
@ -81,6 +83,7 @@ export interface StateInterface {
|
|||||||
export interface IndexerInterface {
|
export interface IndexerInterface {
|
||||||
readonly serverConfig: ServerConfig
|
readonly serverConfig: ServerConfig
|
||||||
readonly storageLayoutMap: Map<string, StorageLayout>
|
readonly storageLayoutMap: Map<string, StorageLayout>
|
||||||
|
init (): Promise<void>
|
||||||
getBlockProgress (blockHash: string): Promise<BlockProgressInterface | undefined>
|
getBlockProgress (blockHash: string): Promise<BlockProgressInterface | undefined>
|
||||||
getBlockProgressEntities (where: FindConditions<BlockProgressInterface>, options: FindManyOptions<BlockProgressInterface>): Promise<BlockProgressInterface[]>
|
getBlockProgressEntities (where: FindConditions<BlockProgressInterface>, options: FindManyOptions<BlockProgressInterface>): Promise<BlockProgressInterface[]>
|
||||||
getEvent (id: string): Promise<EventInterface | undefined>
|
getEvent (id: string): Promise<EventInterface | undefined>
|
||||||
@ -132,6 +135,8 @@ export interface EventWatcherInterface {
|
|||||||
|
|
||||||
export interface DatabaseInterface {
|
export interface DatabaseInterface {
|
||||||
_conn: Connection;
|
_conn: Connection;
|
||||||
|
readonly baseDatabase: Database
|
||||||
|
init (): Promise<void>;
|
||||||
close (): Promise<void>;
|
close (): Promise<void>;
|
||||||
createTransactionRunner (): Promise<QueryRunner>;
|
createTransactionRunner (): Promise<QueryRunner>;
|
||||||
getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgressInterface[]>;
|
getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgressInterface[]>;
|
||||||
@ -170,3 +175,8 @@ export interface DatabaseInterface {
|
|||||||
export interface GraphDatabaseInterface {
|
export interface GraphDatabaseInterface {
|
||||||
getEntity<Entity> (entity: (new () => Entity) | string, id: string, blockHash?: string): Promise<Entity | undefined>;
|
getEntity<Entity> (entity: (new () => Entity) | string, id: string, blockHash?: string): Promise<Entity | undefined>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type Clients = {
|
||||||
|
ethClient: EthClient;
|
||||||
|
[key: string]: any;
|
||||||
|
}
|
||||||
|
@ -72,5 +72,6 @@
|
|||||||
"forceConsistentCasingInFileNames": true, /* Disallow inconsistently-cased references to the same file. */
|
"forceConsistentCasingInFileNames": true, /* Disallow inconsistently-cased references to the same file. */
|
||||||
"resolveJsonModule": true /* Enabling the option allows importing JSON, and validating the types in that JSON file. */
|
"resolveJsonModule": true /* Enabling the option allows importing JSON, and validating the types in that JSON file. */
|
||||||
},
|
},
|
||||||
|
"include": ["src"],
|
||||||
"exclude": ["test", "dist", "artifacts", "cache"]
|
"exclude": ["test", "dist", "artifacts", "cache"]
|
||||||
}
|
}
|
||||||
|
@ -6360,6 +6360,11 @@ eslint-config-standard@^16.0.3:
|
|||||||
resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-16.0.3.tgz#6c8761e544e96c531ff92642eeb87842b8488516"
|
resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-16.0.3.tgz#6c8761e544e96c531ff92642eeb87842b8488516"
|
||||||
integrity sha512-x4fmJL5hGqNJKGHSjnLdgA6U6h1YW/G2dW9fA+cyVur4SK6lyue8+UgNKWlZtUDTXvgKDD/Oa3GQjmB5kjtVvg==
|
integrity sha512-x4fmJL5hGqNJKGHSjnLdgA6U6h1YW/G2dW9fA+cyVur4SK6lyue8+UgNKWlZtUDTXvgKDD/Oa3GQjmB5kjtVvg==
|
||||||
|
|
||||||
|
eslint-config-standard@^5.0.0:
|
||||||
|
version "5.3.5"
|
||||||
|
resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-5.3.5.tgz#2b42bb5c9f0049b8527868e109c34ee22b13dcf6"
|
||||||
|
integrity sha512-QSNuJ6iCuPa7pKCVEml1lRrtQn2swtzF8y/LzWFkopI8K2cPvgq3USJwEyNOTgejUZ/CK4W+Buya9vPiKYB7gg==
|
||||||
|
|
||||||
eslint-import-resolver-node@^0.3.4:
|
eslint-import-resolver-node@^0.3.4:
|
||||||
version "0.3.4"
|
version "0.3.4"
|
||||||
resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz#85ffa81942c25012d8231096ddf679c03042c717"
|
resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz#85ffa81942c25012d8231096ddf679c03042c717"
|
||||||
|
Loading…
Reference in New Issue
Block a user