mirror of
https://github.com/cerc-io/watcher-ts
synced 2025-08-05 13:38:40 +00:00
Update codegen with changes implemented in mobymask watcher (#148)
* Update codegen with index-block CLI and remove graph-node * Add filter logs by contract flag * Skip generating GQL API for immutable variables * Add config for maxEventsBlockRange * Add new flags in existing watchers
This commit is contained in:
parent
b577db287f
commit
1bcabd64f2
@ -36,6 +36,7 @@ import { exportState } from './export-state';
|
|||||||
import { importState } from './import-state';
|
import { importState } from './import-state';
|
||||||
import { exportInspectCID } from './inspect-cid';
|
import { exportInspectCID } from './inspect-cid';
|
||||||
import { getSubgraphConfig } from './utils/subgraph';
|
import { getSubgraphConfig } from './utils/subgraph';
|
||||||
|
import { exportIndexBlock } from './index-block';
|
||||||
|
|
||||||
const main = async (): Promise<void> => {
|
const main = async (): Promise<void> => {
|
||||||
const argv = await yargs(hideBin(process.argv))
|
const argv = await yargs(hideBin(process.argv))
|
||||||
@ -166,7 +167,7 @@ function generateWatcher (visitor: Visitor, contracts: any[], config: any) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Register the handlebar helpers to be used in the templates.
|
// Register the handlebar helpers to be used in the templates.
|
||||||
registerHandlebarHelpers();
|
registerHandlebarHelpers(config);
|
||||||
|
|
||||||
visitor.visitSubgraph(config.subgraphPath);
|
visitor.visitSubgraph(config.subgraphPath);
|
||||||
|
|
||||||
@ -300,6 +301,11 @@ function generateWatcher (visitor: Visitor, contracts: any[], config: any) {
|
|||||||
? fs.createWriteStream(path.join(outputDir, 'src/cli/inspect-cid.ts'))
|
? fs.createWriteStream(path.join(outputDir, 'src/cli/inspect-cid.ts'))
|
||||||
: process.stdout;
|
: process.stdout;
|
||||||
exportInspectCID(outStream, config.subgraphPath);
|
exportInspectCID(outStream, config.subgraphPath);
|
||||||
|
|
||||||
|
outStream = outputDir
|
||||||
|
? fs.createWriteStream(path.join(outputDir, 'src/cli/index-block.ts'))
|
||||||
|
: process.stdout;
|
||||||
|
exportIndexBlock(outStream);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getConfig (configFile: string): any {
|
function getConfig (configFile: string): any {
|
||||||
|
21
packages/codegen/src/index-block.ts
Normal file
21
packages/codegen/src/index-block.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
//
|
||||||
|
// Copyright 2022 Vulcanize, Inc.
|
||||||
|
//
|
||||||
|
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import Handlebars from 'handlebars';
|
||||||
|
import { Writable } from 'stream';
|
||||||
|
|
||||||
|
const TEMPLATE_FILE = './templates/index-block-template.handlebars';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Writes the index-block file generated from a template to a stream.
|
||||||
|
* @param outStream A writable output stream to write the index-block file to.
|
||||||
|
*/
|
||||||
|
export function exportIndexBlock (outStream: Writable): void {
|
||||||
|
const templateString = fs.readFileSync(path.resolve(__dirname, TEMPLATE_FILE)).toString();
|
||||||
|
const template = Handlebars.compile(templateString);
|
||||||
|
const indexBlock = template({});
|
||||||
|
outStream.write(indexBlock);
|
||||||
|
}
|
@ -2,14 +2,18 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
{{/if}}
|
||||||
import yargs from 'yargs';
|
import yargs from 'yargs';
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
@ -45,11 +49,13 @@ const main = async (): Promise<void> => {
|
|||||||
|
|
||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
await graphDb.init();
|
await graphDb.init();
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
@ -60,11 +66,11 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
{{#if subgraphPath}}
|
|
||||||
await graphWatcher.init();
|
await graphWatcher.init();
|
||||||
{{/if}}
|
{{/if}}
|
||||||
|
|
||||||
|
@ -12,10 +12,17 @@
|
|||||||
# IPFS API address (can be taken from the output on running the IPFS daemon).
|
# IPFS API address (can be taken from the output on running the IPFS daemon).
|
||||||
ipfsApiAddr = "/ip4/127.0.0.1/tcp/5001"
|
ipfsApiAddr = "/ip4/127.0.0.1/tcp/5001"
|
||||||
|
|
||||||
{{#if subgraphPath}}
|
{{#if (subgraphPath)}}
|
||||||
subgraphPath = "{{subgraphPath}}"
|
subgraphPath = "{{subgraphPath}}"
|
||||||
wasmRestartBlocksInterval = 20
|
wasmRestartBlocksInterval = 20
|
||||||
|
|
||||||
{{/if}}
|
{{/if}}
|
||||||
|
# Boolean to filter logs by contract.
|
||||||
|
filterLogs = false
|
||||||
|
|
||||||
|
# Max block range for which to return events in eventsInRange GQL query.
|
||||||
|
# Use -1 for skipping check on block range.
|
||||||
|
maxEventsBlockRange = 1000
|
||||||
|
|
||||||
[database]
|
[database]
|
||||||
type = "postgres"
|
type = "postgres"
|
||||||
|
@ -10,7 +10,9 @@ import fs from 'fs';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, StateKind } from '@vulcanize/util';
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, StateKind } from '@vulcanize/util';
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
{{/if}}
|
||||||
import * as codec from '@ipld/dag-cbor';
|
import * as codec from '@ipld/dag-cbor';
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
@ -42,11 +44,13 @@ const main = async (): Promise<void> => {
|
|||||||
|
|
||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
await graphDb.init();
|
await graphDb.init();
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
@ -57,11 +61,11 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
{{#if subgraphPath}}
|
|
||||||
await graphWatcher.init();
|
await graphWatcher.init();
|
||||||
{{/if}}
|
{{/if}}
|
||||||
|
|
||||||
|
@ -2,7 +2,9 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
{{/if}}
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import yargs from 'yargs';
|
import yargs from 'yargs';
|
||||||
@ -11,7 +13,9 @@ import debug from 'debug';
|
|||||||
import { PubSub } from 'apollo-server-express';
|
import { PubSub } from 'apollo-server-express';
|
||||||
|
|
||||||
import { Config, getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, initClients } from '@vulcanize/util';
|
import { Config, getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, initClients } from '@vulcanize/util';
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
import { Database } from './database';
|
import { Database } from './database';
|
||||||
import { Indexer } from './indexer';
|
import { Indexer } from './indexer';
|
||||||
@ -57,11 +61,13 @@ export const main = async (): Promise<any> => {
|
|||||||
|
|
||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
await graphDb.init();
|
await graphDb.init();
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
@ -72,11 +78,11 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
{{#if subgraphPath}}
|
|
||||||
await graphWatcher.init();
|
await graphWatcher.init();
|
||||||
{{/if}}
|
{{/if}}
|
||||||
|
|
||||||
|
@ -12,7 +12,9 @@ import fs from 'fs';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
import { getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, Config, initClients, StateKind } from '@vulcanize/util';
|
import { getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, Config, initClients, StateKind } from '@vulcanize/util';
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
{{/if}}
|
||||||
import * as codec from '@ipld/dag-cbor';
|
import * as codec from '@ipld/dag-cbor';
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
@ -46,11 +48,13 @@ export const main = async (): Promise<any> => {
|
|||||||
|
|
||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
await graphDb.init();
|
await graphDb.init();
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||||
@ -65,11 +69,11 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
{{#if subgraphPath}}
|
|
||||||
await graphWatcher.init();
|
await graphWatcher.init();
|
||||||
{{/if}}
|
{{/if}}
|
||||||
|
|
||||||
|
@ -0,0 +1,81 @@
|
|||||||
|
//
|
||||||
|
// Copyright 2022 Vulcanize, Inc.
|
||||||
|
//
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
import path from 'path';
|
||||||
|
{{/if}}
|
||||||
|
import yargs from 'yargs';
|
||||||
|
import 'reflect-metadata';
|
||||||
|
import debug from 'debug';
|
||||||
|
import assert from 'assert';
|
||||||
|
|
||||||
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, indexBlock } from '@vulcanize/util';
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
|
import { Database } from '../database';
|
||||||
|
import { Indexer } from '../indexer';
|
||||||
|
|
||||||
|
const log = debug('vulcanize:index-block');
|
||||||
|
|
||||||
|
const main = async (): Promise<void> => {
|
||||||
|
const argv = await yargs.parserConfiguration({
|
||||||
|
'parse-numbers': false
|
||||||
|
}).options({
|
||||||
|
configFile: {
|
||||||
|
alias: 'f',
|
||||||
|
type: 'string',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Configuration file path (toml)',
|
||||||
|
default: DEFAULT_CONFIG_PATH
|
||||||
|
},
|
||||||
|
block: {
|
||||||
|
type: 'number',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Block number to index'
|
||||||
|
}
|
||||||
|
}).argv;
|
||||||
|
|
||||||
|
const config: Config = await getConfig(argv.configFile);
|
||||||
|
const { ethClient, ethProvider } = await initClients(config);
|
||||||
|
|
||||||
|
const db = new Database(config.database);
|
||||||
|
await db.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
|
await graphDb.init();
|
||||||
|
|
||||||
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
|
const jobQueueConfig = config.jobQueue;
|
||||||
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
|
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||||
|
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||||
|
|
||||||
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
|
||||||
|
await indexer.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
|
graphWatcher.setIndexer(indexer);
|
||||||
|
await graphWatcher.init();
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
|
await indexBlock(indexer, jobQueueConfig.eventsInBatch, argv);
|
||||||
|
|
||||||
|
await db.close();
|
||||||
|
};
|
||||||
|
|
||||||
|
main().catch(err => {
|
||||||
|
log(err);
|
||||||
|
}).finally(() => {
|
||||||
|
process.exit(0);
|
||||||
|
});
|
@ -24,12 +24,16 @@ import {
|
|||||||
QueryOptions,
|
QueryOptions,
|
||||||
updateStateForElementaryType,
|
updateStateForElementaryType,
|
||||||
updateStateForMappingType,
|
updateStateForMappingType,
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
BlockHeight,
|
BlockHeight,
|
||||||
|
{{/if}}
|
||||||
IPFSClient,
|
IPFSClient,
|
||||||
StateKind,
|
StateKind,
|
||||||
IpldStatus as IpldStatusInterface
|
IpldStatus as IpldStatusInterface
|
||||||
} from '@vulcanize/util';
|
} from '@vulcanize/util';
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import { GraphWatcher } from '@vulcanize/graph-node';
|
import { GraphWatcher } from '@vulcanize/graph-node';
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
{{#each contracts as | contract |}}
|
{{#each contracts as | contract |}}
|
||||||
import {{contract.contractName}}Artifacts from './artifacts/{{contract.contractName}}.json';
|
import {{contract.contractName}}Artifacts from './artifacts/{{contract.contractName}}.json';
|
||||||
@ -101,7 +105,9 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
_ethProvider: BaseProvider
|
_ethProvider: BaseProvider
|
||||||
_baseIndexer: BaseIndexer
|
_baseIndexer: BaseIndexer
|
||||||
_serverConfig: ServerConfig
|
_serverConfig: ServerConfig
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
_graphWatcher: GraphWatcher;
|
_graphWatcher: GraphWatcher;
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
_abiMap: Map<string, JsonFragment[]>
|
_abiMap: Map<string, JsonFragment[]>
|
||||||
_storageLayoutMap: Map<string, StorageLayout>
|
_storageLayoutMap: Map<string, StorageLayout>
|
||||||
@ -109,10 +115,12 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
|
|
||||||
_ipfsClient: IPFSClient
|
_ipfsClient: IPFSClient
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
_entityTypesMap: Map<string, { [key: string]: string }>
|
_entityTypesMap: Map<string, { [key: string]: string }>
|
||||||
_relationsMap: Map<any, { [key: string]: any }>
|
_relationsMap: Map<any, { [key: string]: any }>
|
||||||
|
|
||||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: BaseProvider, jobQueue: JobQueue, graphWatcher: GraphWatcher) {
|
{{/if}}
|
||||||
|
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: BaseProvider, jobQueue: JobQueue{{#if (subgraphPath)}}, graphWatcher: GraphWatcher{{/if}}) {
|
||||||
assert(db);
|
assert(db);
|
||||||
assert(ethClient);
|
assert(ethClient);
|
||||||
|
|
||||||
@ -122,7 +130,9 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
this._serverConfig = serverConfig;
|
this._serverConfig = serverConfig;
|
||||||
this._ipfsClient = new IPFSClient(this._serverConfig.ipfsApiAddr);
|
this._ipfsClient = new IPFSClient(this._serverConfig.ipfsApiAddr);
|
||||||
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue, this._ipfsClient);
|
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue, this._ipfsClient);
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
this._graphWatcher = graphWatcher;
|
this._graphWatcher = graphWatcher;
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
this._abiMap = new Map();
|
this._abiMap = new Map();
|
||||||
this._storageLayoutMap = new Map();
|
this._storageLayoutMap = new Map();
|
||||||
@ -147,14 +157,16 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
this._contractMap.set(KIND_{{capitalize contract.contractName}}, new ethers.utils.Interface({{contract.contractName}}ABI));
|
this._contractMap.set(KIND_{{capitalize contract.contractName}}, new ethers.utils.Interface({{contract.contractName}}ABI));
|
||||||
|
|
||||||
{{/each}}
|
{{/each}}
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
this._entityTypesMap = new Map();
|
this._entityTypesMap = new Map();
|
||||||
this._populateEntityTypesMap();
|
this._populateEntityTypesMap();
|
||||||
|
|
||||||
this._relationsMap = new Map();
|
this._relationsMap = new Map();
|
||||||
this._populateRelationsMap();
|
this._populateRelationsMap();
|
||||||
|
{{/if}}
|
||||||
}
|
}
|
||||||
|
|
||||||
get serverConfig () {
|
get serverConfig (): ServerConfig {
|
||||||
return this._serverConfig;
|
return this._serverConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -399,6 +411,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
await this._baseIndexer.removeIPLDBlocks(blockNumber, kind);
|
await this._baseIndexer.removeIPLDBlocks(blockNumber, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, block?: BlockHeight): Promise<any> {
|
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, block?: BlockHeight): Promise<any> {
|
||||||
const relations = this._relationsMap.get(entity) || {};
|
const relations = this._relationsMap.get(entity) || {};
|
||||||
|
|
||||||
@ -407,12 +420,16 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
async triggerIndexingOnEvent (event: Event): Promise<void> {
|
async triggerIndexingOnEvent (event: Event): Promise<void> {
|
||||||
const resultEvent = this.getResultEvent(event);
|
const resultEvent = this.getResultEvent(event);
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
// Call subgraph handler for event.
|
// Call subgraph handler for event.
|
||||||
await this._graphWatcher.handleEvent(resultEvent);
|
await this._graphWatcher.handleEvent(resultEvent);
|
||||||
|
|
||||||
|
{{/if}}
|
||||||
// Call custom hook function for indexing on event.
|
// Call custom hook function for indexing on event.
|
||||||
await handleEvent(this, resultEvent);
|
await handleEvent(this, resultEvent);
|
||||||
}
|
}
|
||||||
@ -425,9 +442,11 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
async processBlock (blockHash: string, blockNumber: number): Promise<void> {
|
async processBlock (blockHash: string, blockNumber: number): Promise<void> {
|
||||||
// Call a function to create initial state for contracts.
|
// Call a function to create initial state for contracts.
|
||||||
await this._baseIndexer.createInit(this, blockHash, blockNumber);
|
await this._baseIndexer.createInit(this, blockHash, blockNumber);
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
// Call subgraph handler for block.
|
// Call subgraph handler for block.
|
||||||
await this._graphWatcher.handleBlock(blockHash);
|
await this._graphWatcher.handleBlock(blockHash);
|
||||||
|
{{/if}}
|
||||||
}
|
}
|
||||||
|
|
||||||
parseEventNameAndArgs (kind: string, logObj: any): any {
|
parseEventNameAndArgs (kind: string, logObj: any): any {
|
||||||
@ -601,7 +620,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async getEventsInRange (fromBlockNumber: number, toBlockNumber: number): Promise<Array<Event>> {
|
async getEventsInRange (fromBlockNumber: number, toBlockNumber: number): Promise<Array<Event>> {
|
||||||
return this._baseIndexer.getEventsInRange(fromBlockNumber, toBlockNumber);
|
return this._baseIndexer.getEventsInRange(fromBlockNumber, toBlockNumber, this._serverConfig.maxEventsBlockRange);
|
||||||
}
|
}
|
||||||
|
|
||||||
async getSyncStatus (): Promise<SyncStatus | undefined> {
|
async getSyncStatus (): Promise<SyncStatus | undefined> {
|
||||||
@ -664,10 +683,14 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
return this._baseIndexer.getAncestorAtDepth(blockHash, depth);
|
return this._baseIndexer.getAncestorAtDepth(blockHash, depth);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
getEntityTypesMap (): Map<string, { [key: string]: string }> {
|
getEntityTypesMap (): Map<string, { [key: string]: string }> {
|
||||||
return this._entityTypesMap;
|
return this._entityTypesMap;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
_populateEntityTypesMap (): void {
|
_populateEntityTypesMap (): void {
|
||||||
{{#each subgraphEntities as | subgraphEntity |}}
|
{{#each subgraphEntities as | subgraphEntity |}}
|
||||||
this._entityTypesMap.set('{{subgraphEntity.className}}', {
|
this._entityTypesMap.set('{{subgraphEntity.className}}', {
|
||||||
@ -682,7 +705,9 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
});
|
});
|
||||||
{{/each}}
|
{{/each}}
|
||||||
}
|
}
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
_populateRelationsMap (): void {
|
_populateRelationsMap (): void {
|
||||||
{{#each subgraphEntities as | subgraphEntity |}}
|
{{#each subgraphEntities as | subgraphEntity |}}
|
||||||
{{#if subgraphEntity.relations}}
|
{{#if subgraphEntity.relations}}
|
||||||
@ -705,16 +730,39 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
{{/if}}
|
{{/if}}
|
||||||
{{/each}}
|
{{/each}}
|
||||||
}
|
}
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
||||||
assert(blockHash);
|
assert(blockHash);
|
||||||
|
|
||||||
const logsPromise = this._ethClient.getLogs({ blockHash });
|
|
||||||
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash });
|
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash });
|
||||||
|
const blockPromise = this._ethClient.getBlockByHash(blockHash);
|
||||||
|
let logs: any[];
|
||||||
|
|
||||||
|
if (this._serverConfig.filterLogs) {
|
||||||
|
const watchedContracts = this._baseIndexer.getWatchedContracts();
|
||||||
|
|
||||||
|
// TODO: Query logs by multiple contracts.
|
||||||
|
const contractlogsPromises = watchedContracts.map((watchedContract): Promise<any> => this._ethClient.getLogs({
|
||||||
|
blockHash,
|
||||||
|
contract: watchedContract.address
|
||||||
|
}));
|
||||||
|
|
||||||
|
const contractlogs = await Promise.all(contractlogsPromises);
|
||||||
|
|
||||||
|
// Flatten logs by contract and sort by index.
|
||||||
|
logs = contractlogs.map(data => {
|
||||||
|
return data.logs;
|
||||||
|
}).flat()
|
||||||
|
.sort((a, b) => {
|
||||||
|
return a.index - b.index;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
({ logs } = await this._ethClient.getLogs({ blockHash }));
|
||||||
|
}
|
||||||
|
|
||||||
let [
|
let [
|
||||||
{ block, logs },
|
{ block },
|
||||||
{
|
{
|
||||||
allEthHeaderCids: {
|
allEthHeaderCids: {
|
||||||
nodes: [
|
nodes: [
|
||||||
{
|
{
|
||||||
@ -725,7 +773,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
] = await Promise.all([logsPromise, transactionsPromise]);
|
] = await Promise.all([blockPromise, transactionsPromise]);
|
||||||
|
|
||||||
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
||||||
acc[transaction.txHash] = transaction;
|
acc[transaction.txHash] = transaction;
|
||||||
|
@ -2,7 +2,9 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
{{/if}}
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import yargs from 'yargs';
|
import yargs from 'yargs';
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
@ -10,7 +12,9 @@ import debug from 'debug';
|
|||||||
import util from 'util';
|
import util from 'util';
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
@ -42,11 +46,13 @@ const main = async (): Promise<void> => {
|
|||||||
|
|
||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
await graphDb.init();
|
await graphDb.init();
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
@ -57,11 +63,11 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
{{#if subgraphPath}}
|
|
||||||
await graphWatcher.init();
|
await graphWatcher.init();
|
||||||
{{/if}}
|
{{/if}}
|
||||||
|
|
||||||
|
@ -2,7 +2,9 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
{{/if}}
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import yargs from 'yargs';
|
import yargs from 'yargs';
|
||||||
@ -24,7 +26,9 @@ import {
|
|||||||
DEFAULT_CONFIG_PATH,
|
DEFAULT_CONFIG_PATH,
|
||||||
initClients
|
initClients
|
||||||
} from '@vulcanize/util';
|
} from '@vulcanize/util';
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
import { Indexer } from './indexer';
|
import { Indexer } from './indexer';
|
||||||
import { Database } from './database';
|
import { Database } from './database';
|
||||||
@ -252,11 +256,13 @@ export const main = async (): Promise<any> => {
|
|||||||
|
|
||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
await graphDb.init();
|
await graphDb.init();
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
@ -267,11 +273,11 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
{{#if subgraphPath}}
|
|
||||||
await graphWatcher.init();
|
await graphWatcher.init();
|
||||||
|
|
||||||
// Watching all the contracts in the subgraph.
|
// Watching all the contracts in the subgraph.
|
||||||
|
@ -15,7 +15,8 @@
|
|||||||
"checkpoint": "DEBUG=vulcanize:* ts-node src/cli/checkpoint.ts",
|
"checkpoint": "DEBUG=vulcanize:* ts-node src/cli/checkpoint.ts",
|
||||||
"export-state": "DEBUG=vulcanize:* ts-node src/cli/export-state.ts",
|
"export-state": "DEBUG=vulcanize:* ts-node src/cli/export-state.ts",
|
||||||
"import-state": "DEBUG=vulcanize:* ts-node src/cli/import-state.ts",
|
"import-state": "DEBUG=vulcanize:* ts-node src/cli/import-state.ts",
|
||||||
"inspect-cid": "DEBUG=vulcanize:* ts-node src/cli/inspect-cid.ts"
|
"inspect-cid": "DEBUG=vulcanize:* ts-node src/cli/inspect-cid.ts",
|
||||||
|
"index-block": "DEBUG=vulcanize:* ts-node src/cli/index-block.ts"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@ -34,7 +35,9 @@
|
|||||||
"@vulcanize/ipld-eth-client": "^0.1.0",
|
"@vulcanize/ipld-eth-client": "^0.1.0",
|
||||||
"@vulcanize/solidity-mapper": "^0.1.0",
|
"@vulcanize/solidity-mapper": "^0.1.0",
|
||||||
"@vulcanize/util": "^0.1.0",
|
"@vulcanize/util": "^0.1.0",
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
"@vulcanize/graph-node": "^0.1.0",
|
"@vulcanize/graph-node": "^0.1.0",
|
||||||
|
{{/if}}
|
||||||
"apollo-server-express": "^2.25.0",
|
"apollo-server-express": "^2.25.0",
|
||||||
"apollo-type-bigint": "^0.1.3",
|
"apollo-type-bigint": "^0.1.3",
|
||||||
"debug": "^4.3.1",
|
"debug": "^4.3.1",
|
||||||
|
@ -2,13 +2,17 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
{{/if}}
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import { MoreThan } from 'typeorm';
|
import { MoreThan } from 'typeorm';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import { getConfig, initClients, resetJobs, JobQueue } from '@vulcanize/util';
|
import { getConfig, initClients, resetJobs, JobQueue } from '@vulcanize/util';
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
import { Database } from '../../database';
|
import { Database } from '../../database';
|
||||||
import { Indexer } from '../../indexer';
|
import { Indexer } from '../../indexer';
|
||||||
@ -38,11 +42,13 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
// Initialize database.
|
// Initialize database.
|
||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
await graphDb.init();
|
await graphDb.init();
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
@ -53,11 +59,11 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
{{#if subgraphPath}}
|
|
||||||
await graphWatcher.init();
|
await graphWatcher.init();
|
||||||
{{/if}}
|
{{/if}}
|
||||||
|
|
||||||
|
@ -15,7 +15,9 @@ import 'graphql-import-node';
|
|||||||
import { createServer } from 'http';
|
import { createServer } from 'http';
|
||||||
|
|
||||||
import { DEFAULT_CONFIG_PATH, getConfig, Config, JobQueue, KIND_ACTIVE, initClients } from '@vulcanize/util';
|
import { DEFAULT_CONFIG_PATH, getConfig, Config, JobQueue, KIND_ACTIVE, initClients } from '@vulcanize/util';
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
import { createResolvers } from './resolvers';
|
import { createResolvers } from './resolvers';
|
||||||
import { Indexer } from './indexer';
|
import { Indexer } from './indexer';
|
||||||
@ -42,11 +44,13 @@ export const main = async (): Promise<any> => {
|
|||||||
|
|
||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
await graphDb.init();
|
await graphDb.init();
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||||
@ -60,11 +64,11 @@ export const main = async (): Promise<any> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
{{#if subgraphPath}}
|
|
||||||
await graphWatcher.init();
|
await graphWatcher.init();
|
||||||
{{/if}}
|
{{/if}}
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
// "incremental": true, /* Enable incremental compilation */
|
// "incremental": true, /* Enable incremental compilation */
|
||||||
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
||||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
||||||
// "lib": [], /* Specify library files to be included in the compilation. */
|
"lib": ["es2019"], /* Specify library files to be included in the compilation. */
|
||||||
// "allowJs": true, /* Allow javascript files to be compiled. */
|
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||||
// "checkJs": true, /* Report errors in .js files. */
|
// "checkJs": true, /* Report errors in .js files. */
|
||||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
||||||
|
@ -2,14 +2,18 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
{{/if}}
|
||||||
import yargs from 'yargs';
|
import yargs from 'yargs';
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
@ -58,11 +62,13 @@ const main = async (): Promise<void> => {
|
|||||||
|
|
||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
await graphDb.init();
|
await graphDb.init();
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
{{/if}}
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
@ -73,11 +79,11 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
{{#if (subgraphPath)}}
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
graphWatcher.setIndexer(indexer);
|
||||||
{{#if subgraphPath}}
|
|
||||||
await graphWatcher.init();
|
await graphWatcher.init();
|
||||||
{{/if}}
|
{{/if}}
|
||||||
|
|
||||||
|
@ -7,10 +7,11 @@ import Handlebars from 'handlebars';
|
|||||||
|
|
||||||
import { reservedNames } from './types';
|
import { reservedNames } from './types';
|
||||||
|
|
||||||
export function registerHandlebarHelpers (): void {
|
export function registerHandlebarHelpers (config: any): void {
|
||||||
Handlebars.registerHelper('compare', compareHelper);
|
Handlebars.registerHelper('compare', compareHelper);
|
||||||
Handlebars.registerHelper('capitalize', capitalizeHelper);
|
Handlebars.registerHelper('capitalize', capitalizeHelper);
|
||||||
Handlebars.registerHelper('reservedNameCheck', reservedNameCheckHelper);
|
Handlebars.registerHelper('reservedNameCheck', reservedNameCheckHelper);
|
||||||
|
Handlebars.registerHelper('subgraphPath', () => config.subgraphPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -93,9 +93,13 @@ export class Visitor {
|
|||||||
const variable = node.variables[0];
|
const variable = node.variables[0];
|
||||||
const name: string = variable.name;
|
const name: string = variable.name;
|
||||||
const stateVariableType: string = variable.typeName.type;
|
const stateVariableType: string = variable.typeName.type;
|
||||||
|
|
||||||
const params: Param[] = [];
|
const params: Param[] = [];
|
||||||
|
|
||||||
|
if (variable.isImmutable) {
|
||||||
|
// Skip in case variable is immutable.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
let typeName = variable.typeName;
|
let typeName = variable.typeName;
|
||||||
let numParams = 0;
|
let numParams = 0;
|
||||||
|
|
||||||
|
@ -15,6 +15,13 @@
|
|||||||
subgraphPath = "../graph-node/test/subgraph/eden"
|
subgraphPath = "../graph-node/test/subgraph/eden"
|
||||||
wasmRestartBlocksInterval = 20
|
wasmRestartBlocksInterval = 20
|
||||||
|
|
||||||
|
# Boolean to filter logs by contract.
|
||||||
|
filterLogs = false
|
||||||
|
|
||||||
|
# Max block range for which to return events in eventsInRange GQL query.
|
||||||
|
# Use -1 for skipping check on block range.
|
||||||
|
maxEventsBlockRange = 1000
|
||||||
|
|
||||||
[database]
|
[database]
|
||||||
type = "postgres"
|
type = "postgres"
|
||||||
host = "localhost"
|
host = "localhost"
|
||||||
|
@ -15,7 +15,8 @@
|
|||||||
"checkpoint": "DEBUG=vulcanize:* ts-node src/cli/checkpoint.ts",
|
"checkpoint": "DEBUG=vulcanize:* ts-node src/cli/checkpoint.ts",
|
||||||
"export-state": "DEBUG=vulcanize:* ts-node src/cli/export-state.ts",
|
"export-state": "DEBUG=vulcanize:* ts-node src/cli/export-state.ts",
|
||||||
"import-state": "DEBUG=vulcanize:* ts-node src/cli/import-state.ts",
|
"import-state": "DEBUG=vulcanize:* ts-node src/cli/import-state.ts",
|
||||||
"inspect-cid": "DEBUG=vulcanize:* ts-node src/cli/inspect-cid.ts"
|
"inspect-cid": "DEBUG=vulcanize:* ts-node src/cli/inspect-cid.ts",
|
||||||
|
"index-block": "DEBUG=vulcanize:* ts-node src/cli/index-block.ts"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
73
packages/eden-watcher/src/cli/index-block.ts
Normal file
73
packages/eden-watcher/src/cli/index-block.ts
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
//
|
||||||
|
// Copyright 2022 Vulcanize, Inc.
|
||||||
|
//
|
||||||
|
|
||||||
|
import path from 'path';
|
||||||
|
import yargs from 'yargs';
|
||||||
|
import 'reflect-metadata';
|
||||||
|
import debug from 'debug';
|
||||||
|
import assert from 'assert';
|
||||||
|
|
||||||
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, indexBlock } from '@vulcanize/util';
|
||||||
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
|
||||||
|
import { Database } from '../database';
|
||||||
|
import { Indexer } from '../indexer';
|
||||||
|
|
||||||
|
const log = debug('vulcanize:index-block');
|
||||||
|
|
||||||
|
const main = async (): Promise<void> => {
|
||||||
|
const argv = await yargs.parserConfiguration({
|
||||||
|
'parse-numbers': false
|
||||||
|
}).options({
|
||||||
|
configFile: {
|
||||||
|
alias: 'f',
|
||||||
|
type: 'string',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Configuration file path (toml)',
|
||||||
|
default: DEFAULT_CONFIG_PATH
|
||||||
|
},
|
||||||
|
block: {
|
||||||
|
type: 'number',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Block number to index'
|
||||||
|
}
|
||||||
|
}).argv;
|
||||||
|
|
||||||
|
const config: Config = await getConfig(argv.configFile);
|
||||||
|
const { ethClient, ethProvider } = await initClients(config);
|
||||||
|
|
||||||
|
const db = new Database(config.database);
|
||||||
|
await db.init();
|
||||||
|
|
||||||
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
|
await graphDb.init();
|
||||||
|
|
||||||
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
|
||||||
|
const jobQueueConfig = config.jobQueue;
|
||||||
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
|
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||||
|
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||||
|
|
||||||
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
||||||
|
await indexer.init();
|
||||||
|
|
||||||
|
graphWatcher.setIndexer(indexer);
|
||||||
|
await graphWatcher.init();
|
||||||
|
|
||||||
|
await indexBlock(indexer, jobQueueConfig.eventsInBatch, argv);
|
||||||
|
|
||||||
|
await db.close();
|
||||||
|
};
|
||||||
|
|
||||||
|
main().catch(err => {
|
||||||
|
log(err);
|
||||||
|
}).finally(() => {
|
||||||
|
process.exit(0);
|
||||||
|
});
|
@ -191,7 +191,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
this._populateRelationsMap();
|
this._populateRelationsMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
get serverConfig () {
|
get serverConfig (): ServerConfig {
|
||||||
return this._serverConfig;
|
return this._serverConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1361,12 +1361,34 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
|
|
||||||
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
||||||
assert(blockHash);
|
assert(blockHash);
|
||||||
|
|
||||||
const logsPromise = this._ethClient.getLogs({ blockHash });
|
|
||||||
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash });
|
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash });
|
||||||
|
const blockPromise = this._ethClient.getBlockByHash(blockHash);
|
||||||
|
let logs: any[];
|
||||||
|
|
||||||
|
if (this._serverConfig.filterLogs) {
|
||||||
|
const watchedContracts = this._baseIndexer.getWatchedContracts();
|
||||||
|
|
||||||
|
// TODO: Query logs by multiple contracts.
|
||||||
|
const contractlogsPromises = watchedContracts.map((watchedContract): Promise<any> => this._ethClient.getLogs({
|
||||||
|
blockHash,
|
||||||
|
contract: watchedContract.address
|
||||||
|
}));
|
||||||
|
|
||||||
|
const contractlogs = await Promise.all(contractlogsPromises);
|
||||||
|
|
||||||
|
// Flatten logs by contract and sort by index.
|
||||||
|
logs = contractlogs.map(data => {
|
||||||
|
return data.logs;
|
||||||
|
}).flat()
|
||||||
|
.sort((a, b) => {
|
||||||
|
return a.index - b.index;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
({ logs } = await this._ethClient.getLogs({ blockHash }));
|
||||||
|
}
|
||||||
|
|
||||||
let [
|
let [
|
||||||
{ block, logs },
|
{ block },
|
||||||
{
|
{
|
||||||
allEthHeaderCids: {
|
allEthHeaderCids: {
|
||||||
nodes: [
|
nodes: [
|
||||||
@ -1378,7 +1400,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
] = await Promise.all([logsPromise, transactionsPromise]);
|
] = await Promise.all([blockPromise, transactionsPromise]);
|
||||||
|
|
||||||
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
||||||
acc[transaction.txHash] = transaction;
|
acc[transaction.txHash] = transaction;
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
// "incremental": true, /* Enable incremental compilation */
|
// "incremental": true, /* Enable incremental compilation */
|
||||||
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
||||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
||||||
// "lib": [], /* Specify library files to be included in the compilation. */
|
"lib": ["es2019"], /* Specify library files to be included in the compilation. */
|
||||||
// "allowJs": true, /* Allow javascript files to be compiled. */
|
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||||
// "checkJs": true, /* Report errors in .js files. */
|
// "checkJs": true, /* Report errors in .js files. */
|
||||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
||||||
|
@ -75,7 +75,7 @@ export class Indexer implements IndexerInterface {
|
|||||||
this._contract = new ethers.utils.Interface(this._abi);
|
this._contract = new ethers.utils.Interface(this._abi);
|
||||||
}
|
}
|
||||||
|
|
||||||
get serverConfig () {
|
get serverConfig (): ServerConfig {
|
||||||
return this._serverConfig;
|
return this._serverConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -388,7 +388,10 @@ export class Indexer implements IndexerInterface {
|
|||||||
|
|
||||||
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
||||||
assert(blockHash);
|
assert(blockHash);
|
||||||
let { block, logs } = await this._ethClient.getLogs({ blockHash });
|
let [{ block }, { logs }] = await Promise.all([
|
||||||
|
this._ethClient.getBlockByHash(blockHash),
|
||||||
|
this._ethClient.getLogs({ blockHash })
|
||||||
|
]);
|
||||||
|
|
||||||
const dbEvents: Array<DeepPartial<Event>> = [];
|
const dbEvents: Array<DeepPartial<Event>> = [];
|
||||||
|
|
||||||
|
@ -12,6 +12,12 @@
|
|||||||
# IPFS API address (can be taken from the output on running the IPFS daemon).
|
# IPFS API address (can be taken from the output on running the IPFS daemon).
|
||||||
ipfsApiAddr = "/ip4/127.0.0.1/tcp/5001"
|
ipfsApiAddr = "/ip4/127.0.0.1/tcp/5001"
|
||||||
|
|
||||||
|
# Boolean to filter logs by contract.
|
||||||
|
filterLogs = false
|
||||||
|
|
||||||
|
# Max block range for which to return events in eventsInRange GQL query.
|
||||||
|
# Use -1 for skipping check on block range.
|
||||||
|
maxEventsBlockRange = 1000
|
||||||
|
|
||||||
[database]
|
[database]
|
||||||
type = "postgres"
|
type = "postgres"
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
"export-state": "DEBUG=vulcanize:* ts-node src/cli/export-state.ts",
|
"export-state": "DEBUG=vulcanize:* ts-node src/cli/export-state.ts",
|
||||||
"import-state": "DEBUG=vulcanize:* ts-node src/cli/import-state.ts",
|
"import-state": "DEBUG=vulcanize:* ts-node src/cli/import-state.ts",
|
||||||
"inspect-cid": "DEBUG=vulcanize:* ts-node src/cli/inspect-cid.ts",
|
"inspect-cid": "DEBUG=vulcanize:* ts-node src/cli/inspect-cid.ts",
|
||||||
|
"index-block": "DEBUG=vulcanize:* ts-node src/cli/index-block.ts",
|
||||||
"nft:deploy": "hardhat --network localhost nft-deploy",
|
"nft:deploy": "hardhat --network localhost nft-deploy",
|
||||||
"nft:mint": "hardhat --network localhost nft-mint",
|
"nft:mint": "hardhat --network localhost nft-mint",
|
||||||
"nft:transfer": "hardhat --network localhost nft-transfer",
|
"nft:transfer": "hardhat --network localhost nft-transfer",
|
||||||
@ -39,7 +40,6 @@
|
|||||||
"@vulcanize/ipld-eth-client": "^0.1.0",
|
"@vulcanize/ipld-eth-client": "^0.1.0",
|
||||||
"@vulcanize/solidity-mapper": "^0.1.0",
|
"@vulcanize/solidity-mapper": "^0.1.0",
|
||||||
"@vulcanize/util": "^0.1.0",
|
"@vulcanize/util": "^0.1.0",
|
||||||
"@vulcanize/graph-node": "^0.1.0",
|
|
||||||
"apollo-server-express": "^2.25.0",
|
"apollo-server-express": "^2.25.0",
|
||||||
"apollo-type-bigint": "^0.1.3",
|
"apollo-type-bigint": "^0.1.3",
|
||||||
"debug": "^4.3.1",
|
"debug": "^4.3.1",
|
||||||
|
@ -2,14 +2,12 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import path from 'path';
|
|
||||||
import yargs from 'yargs';
|
import yargs from 'yargs';
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
@ -46,11 +44,6 @@ const main = async (): Promise<void> => {
|
|||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
|
||||||
await graphDb.init();
|
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
@ -60,11 +53,9 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
|
||||||
|
|
||||||
const blockHash = await indexer.processCLICheckpoint(argv.address, argv.blockHash);
|
const blockHash = await indexer.processCLICheckpoint(argv.address, argv.blockHash);
|
||||||
|
|
||||||
log(`Created a checkpoint for contract ${argv.address} at block-hash ${blockHash}`);
|
log(`Created a checkpoint for contract ${argv.address} at block-hash ${blockHash}`);
|
||||||
|
@ -10,7 +10,6 @@ import fs from 'fs';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, StateKind } from '@vulcanize/util';
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, StateKind } from '@vulcanize/util';
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
|
||||||
import * as codec from '@ipld/dag-cbor';
|
import * as codec from '@ipld/dag-cbor';
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
@ -43,11 +42,6 @@ const main = async (): Promise<void> => {
|
|||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
|
||||||
await graphDb.init();
|
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
@ -57,11 +51,9 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
|
||||||
|
|
||||||
const exportData: any = {
|
const exportData: any = {
|
||||||
snapshotBlock: {},
|
snapshotBlock: {},
|
||||||
contracts: [],
|
contracts: [],
|
||||||
|
@ -12,7 +12,6 @@ import fs from 'fs';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
import { getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, Config, initClients, StateKind } from '@vulcanize/util';
|
import { getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, Config, initClients, StateKind } from '@vulcanize/util';
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
|
||||||
import * as codec from '@ipld/dag-cbor';
|
import * as codec from '@ipld/dag-cbor';
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
@ -47,11 +46,6 @@ export const main = async (): Promise<any> => {
|
|||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
|
||||||
await graphDb.init();
|
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
|
||||||
|
|
||||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||||
const pubsub = new PubSub();
|
const pubsub = new PubSub();
|
||||||
@ -65,11 +59,9 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
|
||||||
|
|
||||||
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
||||||
|
|
||||||
// Import data.
|
// Import data.
|
||||||
|
63
packages/erc721-watcher/src/cli/index-block.ts
Normal file
63
packages/erc721-watcher/src/cli/index-block.ts
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
//
|
||||||
|
// Copyright 2022 Vulcanize, Inc.
|
||||||
|
//
|
||||||
|
|
||||||
|
import yargs from 'yargs';
|
||||||
|
import 'reflect-metadata';
|
||||||
|
import debug from 'debug';
|
||||||
|
import assert from 'assert';
|
||||||
|
|
||||||
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, indexBlock } from '@vulcanize/util';
|
||||||
|
|
||||||
|
import { Database } from '../database';
|
||||||
|
import { Indexer } from '../indexer';
|
||||||
|
|
||||||
|
const log = debug('vulcanize:index-block');
|
||||||
|
|
||||||
|
const main = async (): Promise<void> => {
|
||||||
|
const argv = await yargs.parserConfiguration({
|
||||||
|
'parse-numbers': false
|
||||||
|
}).options({
|
||||||
|
configFile: {
|
||||||
|
alias: 'f',
|
||||||
|
type: 'string',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Configuration file path (toml)',
|
||||||
|
default: DEFAULT_CONFIG_PATH
|
||||||
|
},
|
||||||
|
block: {
|
||||||
|
type: 'number',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Block number to index'
|
||||||
|
}
|
||||||
|
}).argv;
|
||||||
|
|
||||||
|
const config: Config = await getConfig(argv.configFile);
|
||||||
|
const { ethClient, ethProvider } = await initClients(config);
|
||||||
|
|
||||||
|
const db = new Database(config.database);
|
||||||
|
await db.init();
|
||||||
|
|
||||||
|
const jobQueueConfig = config.jobQueue;
|
||||||
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
|
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||||
|
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||||
|
|
||||||
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
||||||
|
await indexer.init();
|
||||||
|
|
||||||
|
await indexBlock(indexer, jobQueueConfig.eventsInBatch, argv);
|
||||||
|
|
||||||
|
await db.close();
|
||||||
|
};
|
||||||
|
|
||||||
|
main().catch(err => {
|
||||||
|
log(err);
|
||||||
|
}).finally(() => {
|
||||||
|
process.exit(0);
|
||||||
|
});
|
@ -2,7 +2,6 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import path from 'path';
|
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import yargs from 'yargs';
|
import yargs from 'yargs';
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
@ -10,7 +9,6 @@ import debug from 'debug';
|
|||||||
import util from 'util';
|
import util from 'util';
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
@ -43,11 +41,6 @@ const main = async (): Promise<void> => {
|
|||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
|
||||||
await graphDb.init();
|
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
@ -57,11 +50,9 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
|
||||||
|
|
||||||
const ipldBlock = await indexer.getIPLDBlockByCid(argv.cid);
|
const ipldBlock = await indexer.getIPLDBlockByCid(argv.cid);
|
||||||
assert(ipldBlock, 'IPLDBlock for the provided CID doesn\'t exist.');
|
assert(ipldBlock, 'IPLDBlock for the provided CID doesn\'t exist.');
|
||||||
|
|
||||||
|
@ -2,13 +2,11 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import path from 'path';
|
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import { MoreThan } from 'typeorm';
|
import { MoreThan } from 'typeorm';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import { getConfig, initClients, resetJobs, JobQueue } from '@vulcanize/util';
|
import { getConfig, initClients, resetJobs, JobQueue } from '@vulcanize/util';
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
|
||||||
|
|
||||||
import { Database } from '../../database';
|
import { Database } from '../../database';
|
||||||
import { Indexer } from '../../indexer';
|
import { Indexer } from '../../indexer';
|
||||||
@ -50,11 +48,6 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
|
||||||
await graphDb.init();
|
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
@ -64,11 +57,9 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
|
||||||
|
|
||||||
const blockProgresses = await indexer.getBlocksAtHeight(argv.blockNumber, false);
|
const blockProgresses = await indexer.getBlocksAtHeight(argv.blockNumber, false);
|
||||||
assert(blockProgresses.length, `No blocks at specified block number ${argv.blockNumber}`);
|
assert(blockProgresses.length, `No blocks at specified block number ${argv.blockNumber}`);
|
||||||
assert(!blockProgresses.some(block => !block.isComplete), `Incomplete block at block number ${argv.blockNumber} with unprocessed events`);
|
assert(!blockProgresses.some(block => !block.isComplete), `Incomplete block at block number ${argv.blockNumber} with unprocessed events`);
|
||||||
|
@ -2,14 +2,12 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import path from 'path';
|
|
||||||
import yargs from 'yargs';
|
import yargs from 'yargs';
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util';
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
@ -59,11 +57,6 @@ const main = async (): Promise<void> => {
|
|||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
|
||||||
await graphDb.init();
|
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
@ -73,11 +66,9 @@ const main = async (): Promise<void> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
|
||||||
|
|
||||||
await indexer.watchContract(argv.address, argv.kind, argv.checkpoint, argv.startingBlock);
|
await indexer.watchContract(argv.address, argv.kind, argv.checkpoint, argv.startingBlock);
|
||||||
|
|
||||||
await db.close();
|
await db.close();
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
// Copyright 2022 Vulcanize, Inc.
|
// Copyright 2022 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import { Entity, PrimaryColumn, Column, Index } from 'typeorm';
|
import { Entity, PrimaryColumn, Column } from 'typeorm';
|
||||||
|
|
||||||
@Entity()
|
@Entity()
|
||||||
export class TransferCount {
|
export class TransferCount {
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import path from 'path';
|
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import yargs from 'yargs';
|
import yargs from 'yargs';
|
||||||
@ -11,7 +10,6 @@ import debug from 'debug';
|
|||||||
import { PubSub } from 'apollo-server-express';
|
import { PubSub } from 'apollo-server-express';
|
||||||
|
|
||||||
import { Config, getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, initClients } from '@vulcanize/util';
|
import { Config, getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, initClients } from '@vulcanize/util';
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
|
||||||
|
|
||||||
import { Database } from './database';
|
import { Database } from './database';
|
||||||
import { Indexer } from './indexer';
|
import { Indexer } from './indexer';
|
||||||
@ -58,11 +56,6 @@ export const main = async (): Promise<any> => {
|
|||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
|
||||||
await graphDb.init();
|
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
@ -72,11 +65,9 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
|
||||||
|
|
||||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||||
const pubsub = new PubSub();
|
const pubsub = new PubSub();
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import { updateStateForMappingType, updateStateForElementaryType } from '@vulcanize/util';
|
import { updateStateForElementaryType } from '@vulcanize/util';
|
||||||
|
|
||||||
import { Indexer, ResultEvent } from './indexer';
|
import { Indexer, ResultEvent } from './indexer';
|
||||||
import { TransferCount } from './entity/TransferCount';
|
import { TransferCount } from './entity/TransferCount';
|
||||||
|
@ -29,7 +29,6 @@ import {
|
|||||||
StateKind,
|
StateKind,
|
||||||
IpldStatus as IpldStatusInterface
|
IpldStatus as IpldStatusInterface
|
||||||
} from '@vulcanize/util';
|
} from '@vulcanize/util';
|
||||||
import { GraphWatcher } from '@vulcanize/graph-node';
|
|
||||||
|
|
||||||
import ERC721Artifacts from './artifacts/ERC721.json';
|
import ERC721Artifacts from './artifacts/ERC721.json';
|
||||||
import { Database } from './database';
|
import { Database } from './database';
|
||||||
@ -94,7 +93,6 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
_ethProvider: BaseProvider
|
_ethProvider: BaseProvider
|
||||||
_baseIndexer: BaseIndexer
|
_baseIndexer: BaseIndexer
|
||||||
_serverConfig: ServerConfig
|
_serverConfig: ServerConfig
|
||||||
_graphWatcher: GraphWatcher;
|
|
||||||
|
|
||||||
_abiMap: Map<string, JsonFragment[]>
|
_abiMap: Map<string, JsonFragment[]>
|
||||||
_storageLayoutMap: Map<string, StorageLayout>
|
_storageLayoutMap: Map<string, StorageLayout>
|
||||||
@ -102,10 +100,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
|
|
||||||
_ipfsClient: IPFSClient
|
_ipfsClient: IPFSClient
|
||||||
|
|
||||||
_entityTypesMap: Map<string, { [key: string]: string }>
|
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: BaseProvider, jobQueue: JobQueue) {
|
||||||
_relationsMap: Map<any, { [key: string]: any }>
|
|
||||||
|
|
||||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: BaseProvider, jobQueue: JobQueue, graphWatcher: GraphWatcher) {
|
|
||||||
assert(db);
|
assert(db);
|
||||||
assert(ethClient);
|
assert(ethClient);
|
||||||
|
|
||||||
@ -115,7 +110,6 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
this._serverConfig = serverConfig;
|
this._serverConfig = serverConfig;
|
||||||
this._ipfsClient = new IPFSClient(this._serverConfig.ipfsApiAddr);
|
this._ipfsClient = new IPFSClient(this._serverConfig.ipfsApiAddr);
|
||||||
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue, this._ipfsClient);
|
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue, this._ipfsClient);
|
||||||
this._graphWatcher = graphWatcher;
|
|
||||||
|
|
||||||
this._abiMap = new Map();
|
this._abiMap = new Map();
|
||||||
this._storageLayoutMap = new Map();
|
this._storageLayoutMap = new Map();
|
||||||
@ -131,13 +125,9 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
assert(ERC721StorageLayout);
|
assert(ERC721StorageLayout);
|
||||||
this._storageLayoutMap.set(KIND_ERC721, ERC721StorageLayout);
|
this._storageLayoutMap.set(KIND_ERC721, ERC721StorageLayout);
|
||||||
this._contractMap.set(KIND_ERC721, new ethers.utils.Interface(ERC721ABI));
|
this._contractMap.set(KIND_ERC721, new ethers.utils.Interface(ERC721ABI));
|
||||||
|
|
||||||
this._entityTypesMap = new Map();
|
|
||||||
|
|
||||||
this._relationsMap = new Map();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get serverConfig () {
|
get serverConfig (): ServerConfig {
|
||||||
return this._serverConfig;
|
return this._serverConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -452,9 +442,8 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
async saveOrUpdateTransferCount (transferCount: TransferCount) {
|
async saveOrUpdateTransferCount (transferCount: TransferCount): Promise<void> {
|
||||||
const dbTx = await this._db.createTransactionRunner();
|
const dbTx = await this._db.createTransactionRunner();
|
||||||
let res;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await this._db.saveTransferCount(dbTx, transferCount);
|
await this._db.saveTransferCount(dbTx, transferCount);
|
||||||
@ -464,8 +453,6 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
} finally {
|
} finally {
|
||||||
await dbTx.release();
|
await dbTx.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
return res;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async _name (blockHash: string, contractAddress: string, diff = false): Promise<ValueResult> {
|
async _name (blockHash: string, contractAddress: string, diff = false): Promise<ValueResult> {
|
||||||
@ -784,20 +771,9 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
await this._baseIndexer.removeIPLDBlocks(blockNumber, kind);
|
await this._baseIndexer.removeIPLDBlocks(blockNumber, kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, block?: BlockHeight): Promise<any> {
|
|
||||||
const relations = this._relationsMap.get(entity) || {};
|
|
||||||
|
|
||||||
const data = await this._graphWatcher.getEntity(entity, id, relations, block);
|
|
||||||
|
|
||||||
return data;
|
|
||||||
}
|
|
||||||
|
|
||||||
async triggerIndexingOnEvent (event: Event): Promise<void> {
|
async triggerIndexingOnEvent (event: Event): Promise<void> {
|
||||||
const resultEvent = this.getResultEvent(event);
|
const resultEvent = this.getResultEvent(event);
|
||||||
|
|
||||||
// Call subgraph handler for event.
|
|
||||||
await this._graphWatcher.handleEvent(resultEvent);
|
|
||||||
|
|
||||||
// Call custom hook function for indexing on event.
|
// Call custom hook function for indexing on event.
|
||||||
await handleEvent(this, resultEvent);
|
await handleEvent(this, resultEvent);
|
||||||
}
|
}
|
||||||
@ -810,9 +786,6 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
async processBlock (blockHash: string, blockNumber: number): Promise<void> {
|
async processBlock (blockHash: string, blockNumber: number): Promise<void> {
|
||||||
// Call a function to create initial state for contracts.
|
// Call a function to create initial state for contracts.
|
||||||
await this._baseIndexer.createInit(this, blockHash, blockNumber);
|
await this._baseIndexer.createInit(this, blockHash, blockNumber);
|
||||||
|
|
||||||
// Call subgraph handler for block.
|
|
||||||
await this._graphWatcher.handleBlock(blockHash);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
parseEventNameAndArgs (kind: string, logObj: any): any {
|
parseEventNameAndArgs (kind: string, logObj: any): any {
|
||||||
@ -848,7 +821,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
switch (logDescription.name) {
|
switch (logDescription.name) {
|
||||||
case APPROVAL_EVENT: {
|
case APPROVAL_EVENT: {
|
||||||
eventName = logDescription.name;
|
eventName = logDescription.name;
|
||||||
const { owner, approved, tokenId } = logDescription.args;
|
const [owner, approved, tokenId] = logDescription.args;
|
||||||
eventInfo = {
|
eventInfo = {
|
||||||
owner,
|
owner,
|
||||||
approved,
|
approved,
|
||||||
@ -859,7 +832,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
}
|
}
|
||||||
case APPROVALFORALL_EVENT: {
|
case APPROVALFORALL_EVENT: {
|
||||||
eventName = logDescription.name;
|
eventName = logDescription.name;
|
||||||
const { owner, operator, approved } = logDescription.args;
|
const [owner, operator, approved] = logDescription.args;
|
||||||
eventInfo = {
|
eventInfo = {
|
||||||
owner,
|
owner,
|
||||||
operator,
|
operator,
|
||||||
@ -870,7 +843,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
}
|
}
|
||||||
case TRANSFER_EVENT: {
|
case TRANSFER_EVENT: {
|
||||||
eventName = logDescription.name;
|
eventName = logDescription.name;
|
||||||
const { from, to, tokenId } = logDescription.args;
|
const [from, to, tokenId] = logDescription.args;
|
||||||
eventInfo = {
|
eventInfo = {
|
||||||
from,
|
from,
|
||||||
to,
|
to,
|
||||||
@ -1054,18 +1027,36 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
return this._baseIndexer.getAncestorAtDepth(blockHash, depth);
|
return this._baseIndexer.getAncestorAtDepth(blockHash, depth);
|
||||||
}
|
}
|
||||||
|
|
||||||
getEntityTypesMap (): Map<string, { [key: string]: string }> {
|
|
||||||
return this._entityTypesMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
||||||
assert(blockHash);
|
assert(blockHash);
|
||||||
|
|
||||||
const logsPromise = this._ethClient.getLogs({ blockHash });
|
|
||||||
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash });
|
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash });
|
||||||
|
const blockPromise = this._ethClient.getBlockByHash(blockHash);
|
||||||
|
let logs: any[];
|
||||||
|
|
||||||
|
if (this._serverConfig.filterLogs) {
|
||||||
|
const watchedContracts = this._baseIndexer.getWatchedContracts();
|
||||||
|
|
||||||
|
// TODO: Query logs by multiple contracts.
|
||||||
|
const contractlogsPromises = watchedContracts.map((watchedContract): Promise<any> => this._ethClient.getLogs({
|
||||||
|
blockHash,
|
||||||
|
contract: watchedContract.address
|
||||||
|
}));
|
||||||
|
|
||||||
|
const contractlogs = await Promise.all(contractlogsPromises);
|
||||||
|
|
||||||
|
// Flatten logs by contract and sort by index.
|
||||||
|
logs = contractlogs.map(data => {
|
||||||
|
return data.logs;
|
||||||
|
}).flat()
|
||||||
|
.sort((a, b) => {
|
||||||
|
return a.index - b.index;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
({ logs } = await this._ethClient.getLogs({ blockHash }));
|
||||||
|
}
|
||||||
|
|
||||||
let [
|
let [
|
||||||
{ block, logs },
|
{ block },
|
||||||
{
|
{
|
||||||
allEthHeaderCids: {
|
allEthHeaderCids: {
|
||||||
nodes: [
|
nodes: [
|
||||||
@ -1077,7 +1068,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
] = await Promise.all([logsPromise, transactionsPromise]);
|
] = await Promise.all([blockPromise, transactionsPromise]);
|
||||||
|
|
||||||
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
||||||
acc[transaction.txHash] = transaction;
|
acc[transaction.txHash] = transaction;
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
// Copyright 2021 Vulcanize, Inc.
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
//
|
//
|
||||||
|
|
||||||
import path from 'path';
|
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import 'reflect-metadata';
|
import 'reflect-metadata';
|
||||||
import yargs from 'yargs';
|
import yargs from 'yargs';
|
||||||
@ -24,7 +23,6 @@ import {
|
|||||||
DEFAULT_CONFIG_PATH,
|
DEFAULT_CONFIG_PATH,
|
||||||
initClients
|
initClients
|
||||||
} from '@vulcanize/util';
|
} from '@vulcanize/util';
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
|
||||||
|
|
||||||
import { Indexer } from './indexer';
|
import { Indexer } from './indexer';
|
||||||
import { Database } from './database';
|
import { Database } from './database';
|
||||||
@ -253,11 +251,6 @@ export const main = async (): Promise<any> => {
|
|||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
|
||||||
await graphDb.init();
|
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
|
||||||
|
|
||||||
const jobQueueConfig = config.jobQueue;
|
const jobQueueConfig = config.jobQueue;
|
||||||
assert(jobQueueConfig, 'Missing job queue config');
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
@ -267,11 +260,9 @@ export const main = async (): Promise<any> => {
|
|||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
await jobQueue.start();
|
await jobQueue.start();
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
|
||||||
|
|
||||||
const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue);
|
const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue);
|
||||||
await jobRunner.start();
|
await jobRunner.start();
|
||||||
};
|
};
|
||||||
|
@ -12,7 +12,6 @@ import { ValueResult, BlockHeight, StateKind } from '@vulcanize/util';
|
|||||||
|
|
||||||
import { Indexer } from './indexer';
|
import { Indexer } from './indexer';
|
||||||
import { EventWatcher } from './events';
|
import { EventWatcher } from './events';
|
||||||
import { TransferCount } from './entity/TransferCount';
|
|
||||||
|
|
||||||
const log = debug('vulcanize:resolver');
|
const log = debug('vulcanize:resolver');
|
||||||
|
|
||||||
|
@ -15,7 +15,6 @@ import 'graphql-import-node';
|
|||||||
import { createServer } from 'http';
|
import { createServer } from 'http';
|
||||||
|
|
||||||
import { DEFAULT_CONFIG_PATH, getConfig, Config, JobQueue, KIND_ACTIVE, initClients } from '@vulcanize/util';
|
import { DEFAULT_CONFIG_PATH, getConfig, Config, JobQueue, KIND_ACTIVE, initClients } from '@vulcanize/util';
|
||||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
|
||||||
|
|
||||||
import { createResolvers } from './resolvers';
|
import { createResolvers } from './resolvers';
|
||||||
import { Indexer } from './indexer';
|
import { Indexer } from './indexer';
|
||||||
@ -43,11 +42,6 @@ export const main = async (): Promise<any> => {
|
|||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
|
||||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
|
||||||
await graphDb.init();
|
|
||||||
|
|
||||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
|
||||||
|
|
||||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||||
const pubsub = new PubSub();
|
const pubsub = new PubSub();
|
||||||
@ -60,11 +54,9 @@ export const main = async (): Promise<any> => {
|
|||||||
|
|
||||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
graphWatcher.setIndexer(indexer);
|
|
||||||
|
|
||||||
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue);
|
||||||
|
|
||||||
if (watcherKind === KIND_ACTIVE) {
|
if (watcherKind === KIND_ACTIVE) {
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
// "incremental": true, /* Enable incremental compilation */
|
// "incremental": true, /* Enable incremental compilation */
|
||||||
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
||||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
||||||
// "lib": [], /* Specify library files to be included in the compilation. */
|
"lib": ["es2019"], /* Specify library files to be included in the compilation. */
|
||||||
// "allowJs": true, /* Allow javascript files to be compiled. */
|
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||||
// "checkJs": true, /* Report errors in .js files. */
|
// "checkJs": true, /* Report errors in .js files. */
|
||||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
||||||
|
@ -161,6 +161,7 @@ class ServerConfig implements ServerConfigInterface {
|
|||||||
subgraphPath: string;
|
subgraphPath: string;
|
||||||
wasmRestartBlocksInterval: number;
|
wasmRestartBlocksInterval: number;
|
||||||
filterLogs: boolean;
|
filterLogs: boolean;
|
||||||
|
maxEventsBlockRange: number;
|
||||||
|
|
||||||
constructor () {
|
constructor () {
|
||||||
this.host = '';
|
this.host = '';
|
||||||
@ -173,5 +174,6 @@ class ServerConfig implements ServerConfigInterface {
|
|||||||
this.subgraphPath = '';
|
this.subgraphPath = '';
|
||||||
this.wasmRestartBlocksInterval = 0;
|
this.wasmRestartBlocksInterval = 0;
|
||||||
this.filterLogs = false;
|
this.filterLogs = false;
|
||||||
|
this.maxEventsBlockRange = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,6 +15,13 @@
|
|||||||
subgraphPath = "../graph-node/test/subgraph/example1/build"
|
subgraphPath = "../graph-node/test/subgraph/example1/build"
|
||||||
wasmRestartBlocksInterval = 20
|
wasmRestartBlocksInterval = 20
|
||||||
|
|
||||||
|
# Boolean to filter logs by contract.
|
||||||
|
filterLogs = false
|
||||||
|
|
||||||
|
# Max block range for which to return events in eventsInRange GQL query.
|
||||||
|
# Use -1 for skipping check on block range.
|
||||||
|
maxEventsBlockRange = 1000
|
||||||
|
|
||||||
[database]
|
[database]
|
||||||
type = "postgres"
|
type = "postgres"
|
||||||
host = "localhost"
|
host = "localhost"
|
||||||
|
@ -15,7 +15,8 @@
|
|||||||
"checkpoint": "DEBUG=vulcanize:* ts-node src/cli/checkpoint.ts",
|
"checkpoint": "DEBUG=vulcanize:* ts-node src/cli/checkpoint.ts",
|
||||||
"export-state": "DEBUG=vulcanize:* ts-node src/cli/export-state.ts",
|
"export-state": "DEBUG=vulcanize:* ts-node src/cli/export-state.ts",
|
||||||
"import-state": "DEBUG=vulcanize:* ts-node src/cli/import-state.ts",
|
"import-state": "DEBUG=vulcanize:* ts-node src/cli/import-state.ts",
|
||||||
"inspect-cid": "DEBUG=vulcanize:* ts-node src/cli/inspect-cid.ts"
|
"inspect-cid": "DEBUG=vulcanize:* ts-node src/cli/inspect-cid.ts",
|
||||||
|
"index-block": "DEBUG=vulcanize:* ts-node src/cli/index-block.ts"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
73
packages/graph-test-watcher/src/cli/index-block.ts
Normal file
73
packages/graph-test-watcher/src/cli/index-block.ts
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
//
|
||||||
|
// Copyright 2022 Vulcanize, Inc.
|
||||||
|
//
|
||||||
|
|
||||||
|
import path from 'path';
|
||||||
|
import yargs from 'yargs';
|
||||||
|
import 'reflect-metadata';
|
||||||
|
import debug from 'debug';
|
||||||
|
import assert from 'assert';
|
||||||
|
|
||||||
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, indexBlock } from '@vulcanize/util';
|
||||||
|
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||||
|
|
||||||
|
import { Database } from '../database';
|
||||||
|
import { Indexer } from '../indexer';
|
||||||
|
|
||||||
|
const log = debug('vulcanize:index-block');
|
||||||
|
|
||||||
|
const main = async (): Promise<void> => {
|
||||||
|
const argv = await yargs.parserConfiguration({
|
||||||
|
'parse-numbers': false
|
||||||
|
}).options({
|
||||||
|
configFile: {
|
||||||
|
alias: 'f',
|
||||||
|
type: 'string',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Configuration file path (toml)',
|
||||||
|
default: DEFAULT_CONFIG_PATH
|
||||||
|
},
|
||||||
|
block: {
|
||||||
|
type: 'number',
|
||||||
|
require: true,
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Block number to index'
|
||||||
|
}
|
||||||
|
}).argv;
|
||||||
|
|
||||||
|
const config: Config = await getConfig(argv.configFile);
|
||||||
|
const { ethClient, ethProvider } = await initClients(config);
|
||||||
|
|
||||||
|
const db = new Database(config.database);
|
||||||
|
await db.init();
|
||||||
|
|
||||||
|
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||||
|
await graphDb.init();
|
||||||
|
|
||||||
|
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||||
|
|
||||||
|
const jobQueueConfig = config.jobQueue;
|
||||||
|
assert(jobQueueConfig, 'Missing job queue config');
|
||||||
|
|
||||||
|
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||||
|
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||||
|
|
||||||
|
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||||
|
|
||||||
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
|
||||||
|
await indexer.init();
|
||||||
|
|
||||||
|
graphWatcher.setIndexer(indexer);
|
||||||
|
await graphWatcher.init();
|
||||||
|
|
||||||
|
await indexBlock(indexer, jobQueueConfig.eventsInBatch, argv);
|
||||||
|
|
||||||
|
await db.close();
|
||||||
|
};
|
||||||
|
|
||||||
|
main().catch(err => {
|
||||||
|
log(err);
|
||||||
|
}).finally(() => {
|
||||||
|
process.exit(0);
|
||||||
|
});
|
@ -160,7 +160,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
this._populateRelationsMap();
|
this._populateRelationsMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
get serverConfig () {
|
get serverConfig (): ServerConfig {
|
||||||
return this._serverConfig;
|
return this._serverConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -758,12 +758,34 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
|
|
||||||
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
||||||
assert(blockHash);
|
assert(blockHash);
|
||||||
|
|
||||||
const logsPromise = this._ethClient.getLogs({ blockHash });
|
|
||||||
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash });
|
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash });
|
||||||
|
const blockPromise = this._ethClient.getBlockByHash(blockHash);
|
||||||
|
let logs: any[];
|
||||||
|
|
||||||
|
if (this._serverConfig.filterLogs) {
|
||||||
|
const watchedContracts = this._baseIndexer.getWatchedContracts();
|
||||||
|
|
||||||
|
// TODO: Query logs by multiple contracts.
|
||||||
|
const contractlogsPromises = watchedContracts.map((watchedContract): Promise<any> => this._ethClient.getLogs({
|
||||||
|
blockHash,
|
||||||
|
contract: watchedContract.address
|
||||||
|
}));
|
||||||
|
|
||||||
|
const contractlogs = await Promise.all(contractlogsPromises);
|
||||||
|
|
||||||
|
// Flatten logs by contract and sort by index.
|
||||||
|
logs = contractlogs.map(data => {
|
||||||
|
return data.logs;
|
||||||
|
}).flat()
|
||||||
|
.sort((a, b) => {
|
||||||
|
return a.index - b.index;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
({ logs } = await this._ethClient.getLogs({ blockHash }));
|
||||||
|
}
|
||||||
|
|
||||||
let [
|
let [
|
||||||
{ block, logs },
|
{ block },
|
||||||
{
|
{
|
||||||
allEthHeaderCids: {
|
allEthHeaderCids: {
|
||||||
nodes: [
|
nodes: [
|
||||||
@ -775,7 +797,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
] = await Promise.all([logsPromise, transactionsPromise]);
|
] = await Promise.all([blockPromise, transactionsPromise]);
|
||||||
|
|
||||||
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
||||||
acc[transaction.txHash] = transaction;
|
acc[transaction.txHash] = transaction;
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
// "incremental": true, /* Enable incremental compilation */
|
// "incremental": true, /* Enable incremental compilation */
|
||||||
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
||||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
||||||
// "lib": [], /* Specify library files to be included in the compilation. */
|
"lib": ["es2019"], /* Specify library files to be included in the compilation. */
|
||||||
// "allowJs": true, /* Allow javascript files to be compiled. */
|
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||||
// "checkJs": true, /* Report errors in .js files. */
|
// "checkJs": true, /* Report errors in .js files. */
|
||||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import _ from 'lodash';
|
|
||||||
|
|
||||||
import { Cache } from '@vulcanize/cache';
|
import { Cache } from '@vulcanize/cache';
|
||||||
|
|
||||||
@ -116,24 +115,10 @@ export class EthClient {
|
|||||||
async getLogs (vars: Vars): Promise<any> {
|
async getLogs (vars: Vars): Promise<any> {
|
||||||
const result = await this._getCachedOrFetch('getLogs', vars);
|
const result = await this._getCachedOrFetch('getLogs', vars);
|
||||||
const {
|
const {
|
||||||
getLogs: resultLogs,
|
getLogs
|
||||||
block: {
|
|
||||||
number: blockNumHex,
|
|
||||||
timestamp: timestampHex,
|
|
||||||
parent
|
|
||||||
}
|
|
||||||
} = result;
|
} = result;
|
||||||
|
|
||||||
const block = {
|
return { logs: getLogs };
|
||||||
hash: vars.blockHash,
|
|
||||||
number: parseInt(blockNumHex, 16),
|
|
||||||
timestamp: parseInt(timestampHex, 16),
|
|
||||||
parent
|
|
||||||
};
|
|
||||||
|
|
||||||
const logs = resultLogs.map((logEntry: any) => _.merge({}, logEntry, { transaction: { block } }));
|
|
||||||
|
|
||||||
return { logs, block };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async _getCachedOrFetch (queryName: keyof typeof ethQueries, vars: Vars): Promise<any> {
|
async _getCachedOrFetch (queryName: keyof typeof ethQueries, vars: Vars): Promise<any> {
|
||||||
|
@ -31,13 +31,6 @@ query getLogs($blockHash: Bytes32!, $contract: Address) {
|
|||||||
receiptCID
|
receiptCID
|
||||||
status
|
status
|
||||||
}
|
}
|
||||||
block(hash: $blockHash) {
|
|
||||||
number
|
|
||||||
timestamp
|
|
||||||
parent {
|
|
||||||
hash
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
@ -82,7 +82,11 @@ export class Indexer {
|
|||||||
async fetchEvents (blockHash: string): Promise<Array<ResultEvent>> {
|
async fetchEvents (blockHash: string): Promise<Array<ResultEvent>> {
|
||||||
assert(this._config.watch);
|
assert(this._config.watch);
|
||||||
const contract = this._config.watch.lighthouse;
|
const contract = this._config.watch.lighthouse;
|
||||||
const { logs, block } = await this._ethClient.getLogs({ blockHash, contract });
|
|
||||||
|
const [{ logs }, { block }] = await Promise.all([
|
||||||
|
this._ethClient.getLogs({ blockHash, contract }),
|
||||||
|
this._ethClient.getBlockByHash(blockHash)
|
||||||
|
]);
|
||||||
|
|
||||||
const {
|
const {
|
||||||
allEthHeaderCids: {
|
allEthHeaderCids: {
|
||||||
|
@ -15,6 +15,10 @@
|
|||||||
# Boolean to filter logs by contract.
|
# Boolean to filter logs by contract.
|
||||||
filterLogs = true
|
filterLogs = true
|
||||||
|
|
||||||
|
# Max block range for which to return events in eventsInRange GQL query.
|
||||||
|
# Use -1 for skipping check on block range.
|
||||||
|
maxEventsBlockRange = -1
|
||||||
|
|
||||||
[database]
|
[database]
|
||||||
type = "postgres"
|
type = "postgres"
|
||||||
host = "localhost"
|
host = "localhost"
|
||||||
|
@ -7,16 +7,12 @@ import 'reflect-metadata';
|
|||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, OrderDirection, UNKNOWN_EVENT_NAME } from '@vulcanize/util';
|
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, indexBlock } from '@vulcanize/util';
|
||||||
|
|
||||||
import { Database } from '../database';
|
import { Database } from '../database';
|
||||||
import { Indexer } from '../indexer';
|
import { Indexer } from '../indexer';
|
||||||
import { BlockProgress } from '../entity/BlockProgress';
|
|
||||||
import { Event } from '../entity/Event';
|
|
||||||
|
|
||||||
const DEFAULT_EVENTS_IN_BATCH = 50;
|
const log = debug('vulcanize:index-block');
|
||||||
|
|
||||||
const log = debug('vulcanize:watch-contract');
|
|
||||||
|
|
||||||
const main = async (): Promise<void> => {
|
const main = async (): Promise<void> => {
|
||||||
const argv = await yargs.parserConfiguration({
|
const argv = await yargs.parserConfiguration({
|
||||||
@ -55,70 +51,7 @@ const main = async (): Promise<void> => {
|
|||||||
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue);
|
||||||
await indexer.init();
|
await indexer.init();
|
||||||
|
|
||||||
let blockProgressEntities: Partial<BlockProgress>[] = await indexer.getBlocksAtHeight(argv.block, false);
|
await indexBlock(indexer, jobQueueConfig.eventsInBatch, argv);
|
||||||
|
|
||||||
if (!blockProgressEntities.length) {
|
|
||||||
console.time('time:index-block#getBlocks-ipld-eth-server');
|
|
||||||
const blocks = await indexer.getBlocks({ blockNumber: argv.block });
|
|
||||||
|
|
||||||
blockProgressEntities = blocks.map((block: any): Partial<BlockProgress> => {
|
|
||||||
block.blockTimestamp = block.timestamp;
|
|
||||||
|
|
||||||
return block;
|
|
||||||
});
|
|
||||||
|
|
||||||
console.timeEnd('time:index-block#getBlocks-ipld-eth-server');
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(blockProgressEntities.length, `No blocks fetched for block number ${argv.block}.`);
|
|
||||||
|
|
||||||
for (let blockProgress of blockProgressEntities) {
|
|
||||||
// Check if blockProgress fetched from database.
|
|
||||||
if (!blockProgress.id) {
|
|
||||||
blockProgress = await indexer.fetchBlockEvents(blockProgress);
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(blockProgress instanceof BlockProgress);
|
|
||||||
assert(indexer.processBlock);
|
|
||||||
await indexer.processBlock(blockProgress.blockHash, blockProgress.blockNumber);
|
|
||||||
|
|
||||||
// Check if block has unprocessed events.
|
|
||||||
if (blockProgress.numProcessedEvents < blockProgress.numEvents) {
|
|
||||||
while (!blockProgress.isComplete) {
|
|
||||||
console.time('time:index-block#fetching_events_batch');
|
|
||||||
|
|
||||||
// Fetch events in batches
|
|
||||||
const events = await indexer.getBlockEvents(
|
|
||||||
blockProgress.blockHash,
|
|
||||||
{
|
|
||||||
index: [
|
|
||||||
{ value: blockProgress.lastProcessedEventIndex + 1, operator: 'gte', not: false }
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
limit: jobQueueConfig.eventsInBatch || DEFAULT_EVENTS_IN_BATCH,
|
|
||||||
orderBy: 'index',
|
|
||||||
orderDirection: OrderDirection.asc
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
console.timeEnd('time:index-block#fetching_events_batch');
|
|
||||||
|
|
||||||
if (events.length) {
|
|
||||||
log(`Processing events batch from index ${events[0].index} to ${events[0].index + events.length - 1}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.time('time:index-block#processEvents-processing_events_batch');
|
|
||||||
|
|
||||||
for (const event of events) {
|
|
||||||
// Process events in loop
|
|
||||||
await processEvent(indexer, blockProgress, event);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.timeEnd('time:index-block#processEvents-processing_events_batch');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await db.close();
|
await db.close();
|
||||||
};
|
};
|
||||||
@ -128,57 +61,3 @@ main().catch(err => {
|
|||||||
}).finally(() => {
|
}).finally(() => {
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
/**
|
|
||||||
* Process individual event from database.
|
|
||||||
* @param indexer
|
|
||||||
* @param block
|
|
||||||
* @param event
|
|
||||||
*/
|
|
||||||
const processEvent = async (indexer: Indexer, block: BlockProgress, event: Event) => {
|
|
||||||
const eventIndex = event.index;
|
|
||||||
|
|
||||||
// Check that events are processed in order.
|
|
||||||
if (eventIndex <= block.lastProcessedEventIndex) {
|
|
||||||
throw new Error(`Events received out of order for block number ${block.blockNumber} hash ${block.blockHash}, got event index ${eventIndex} and lastProcessedEventIndex ${block.lastProcessedEventIndex}, aborting`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if previous event in block has been processed exactly before this and abort if not.
|
|
||||||
// Skip check if logs fetched are filtered by contract address.
|
|
||||||
if (!indexer.serverConfig.filterLogs) {
|
|
||||||
const prevIndex = eventIndex - 1;
|
|
||||||
|
|
||||||
if (prevIndex !== block.lastProcessedEventIndex) {
|
|
||||||
throw new Error(`Events received out of order for block number ${block.blockNumber} hash ${block.blockHash},` +
|
|
||||||
` prev event index ${prevIndex}, got event index ${event.index} and lastProcessedEventIndex ${block.lastProcessedEventIndex}, aborting`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let watchedContract;
|
|
||||||
|
|
||||||
if (!indexer.isWatchedContract) {
|
|
||||||
watchedContract = true;
|
|
||||||
} else {
|
|
||||||
watchedContract = await indexer.isWatchedContract(event.contract);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (watchedContract) {
|
|
||||||
// We might not have parsed this event yet. This can happen if the contract was added
|
|
||||||
// as a result of a previous event in the same block.
|
|
||||||
if (event.eventName === UNKNOWN_EVENT_NAME) {
|
|
||||||
const logObj = JSON.parse(event.extraInfo);
|
|
||||||
|
|
||||||
assert(indexer.parseEventNameAndArgs);
|
|
||||||
assert(typeof watchedContract !== 'boolean');
|
|
||||||
const { eventName, eventInfo } = indexer.parseEventNameAndArgs(watchedContract.kind, logObj);
|
|
||||||
|
|
||||||
event.eventName = eventName;
|
|
||||||
event.eventInfo = JSON.stringify(eventInfo);
|
|
||||||
event = await indexer.saveEventEntity(event);
|
|
||||||
}
|
|
||||||
|
|
||||||
await indexer.processEvent(event);
|
|
||||||
}
|
|
||||||
|
|
||||||
block = await indexer.updateBlockProgress(block, event.index);
|
|
||||||
};
|
|
||||||
|
@ -12,7 +12,6 @@ import { Database } from '../../database';
|
|||||||
import { Indexer } from '../../indexer';
|
import { Indexer } from '../../indexer';
|
||||||
import { BlockProgress } from '../../entity/BlockProgress';
|
import { BlockProgress } from '../../entity/BlockProgress';
|
||||||
|
|
||||||
import { DomainHash } from '../../entity/DomainHash';
|
|
||||||
import { MultiNonce } from '../../entity/MultiNonce';
|
import { MultiNonce } from '../../entity/MultiNonce';
|
||||||
import { _Owner } from '../../entity/_Owner';
|
import { _Owner } from '../../entity/_Owner';
|
||||||
import { IsRevoked } from '../../entity/IsRevoked';
|
import { IsRevoked } from '../../entity/IsRevoked';
|
||||||
@ -60,7 +59,7 @@ export const handler = async (argv: any): Promise<void> => {
|
|||||||
const dbTx = await db.createTransactionRunner();
|
const dbTx = await db.createTransactionRunner();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const entities = [BlockProgress, DomainHash, MultiNonce, _Owner, IsRevoked, IsPhisher, IsMember];
|
const entities = [BlockProgress, MultiNonce, _Owner, IsRevoked, IsPhisher, IsMember];
|
||||||
|
|
||||||
const removeEntitiesPromise = entities.map(async entityClass => {
|
const removeEntitiesPromise = entities.map(async entityClass => {
|
||||||
return db.removeEntities<any>(dbTx, entityClass, { blockNumber: MoreThan(argv.blockNumber) });
|
return db.removeEntities<any>(dbTx, entityClass, { blockNumber: MoreThan(argv.blockNumber) });
|
||||||
|
@ -17,15 +17,6 @@ export class Client {
|
|||||||
this._client = new GraphQLClient(config);
|
this._client = new GraphQLClient(config);
|
||||||
}
|
}
|
||||||
|
|
||||||
async getDomainHash (blockHash: string, contractAddress: string): Promise<any> {
|
|
||||||
const { domainHash } = await this._client.query(
|
|
||||||
gql(queries.domainHash),
|
|
||||||
{ blockHash, contractAddress }
|
|
||||||
);
|
|
||||||
|
|
||||||
return domainHash;
|
|
||||||
}
|
|
||||||
|
|
||||||
async getMultiNonce (blockHash: string, contractAddress: string, key0: string, key1: bigint): Promise<any> {
|
async getMultiNonce (blockHash: string, contractAddress: string, key0: string, key1: bigint): Promise<any> {
|
||||||
const { multiNonce } = await this._client.query(
|
const { multiNonce } = await this._client.query(
|
||||||
gql(queries.multiNonce),
|
gql(queries.multiNonce),
|
||||||
|
@ -14,7 +14,6 @@ import { SyncStatus } from './entity/SyncStatus';
|
|||||||
import { IpldStatus } from './entity/IpldStatus';
|
import { IpldStatus } from './entity/IpldStatus';
|
||||||
import { BlockProgress } from './entity/BlockProgress';
|
import { BlockProgress } from './entity/BlockProgress';
|
||||||
import { IPLDBlock } from './entity/IPLDBlock';
|
import { IPLDBlock } from './entity/IPLDBlock';
|
||||||
import { DomainHash } from './entity/DomainHash';
|
|
||||||
import { MultiNonce } from './entity/MultiNonce';
|
import { MultiNonce } from './entity/MultiNonce';
|
||||||
import { _Owner } from './entity/_Owner';
|
import { _Owner } from './entity/_Owner';
|
||||||
import { IsRevoked } from './entity/IsRevoked';
|
import { IsRevoked } from './entity/IsRevoked';
|
||||||
@ -48,14 +47,6 @@ export class Database implements IPLDDatabaseInterface {
|
|||||||
return this._baseDatabase.close();
|
return this._baseDatabase.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
async getDomainHash ({ blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise<DomainHash | undefined> {
|
|
||||||
return this._conn.getRepository(DomainHash)
|
|
||||||
.findOne({
|
|
||||||
blockHash,
|
|
||||||
contractAddress
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async getMultiNonce ({ blockHash, contractAddress, key0, key1 }: { blockHash: string, contractAddress: string, key0: string, key1: bigint }): Promise<MultiNonce | undefined> {
|
async getMultiNonce ({ blockHash, contractAddress, key0, key1 }: { blockHash: string, contractAddress: string, key0: string, key1: bigint }): Promise<MultiNonce | undefined> {
|
||||||
return this._conn.getRepository(MultiNonce)
|
return this._conn.getRepository(MultiNonce)
|
||||||
.findOne({
|
.findOne({
|
||||||
@ -111,12 +102,6 @@ export class Database implements IPLDDatabaseInterface {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async saveDomainHash ({ blockHash, blockNumber, contractAddress, value, proof }: DeepPartial<DomainHash>): Promise<DomainHash> {
|
|
||||||
const repo = this._conn.getRepository(DomainHash);
|
|
||||||
const entity = repo.create({ blockHash, blockNumber, contractAddress, value, proof });
|
|
||||||
return repo.save(entity);
|
|
||||||
}
|
|
||||||
|
|
||||||
async saveMultiNonce ({ blockHash, blockNumber, contractAddress, key0, key1, value, proof }: DeepPartial<MultiNonce>): Promise<MultiNonce> {
|
async saveMultiNonce ({ blockHash, blockNumber, contractAddress, key0, key1, value, proof }: DeepPartial<MultiNonce>): Promise<MultiNonce> {
|
||||||
const repo = this._conn.getRepository(MultiNonce);
|
const repo = this._conn.getRepository(MultiNonce);
|
||||||
const entity = repo.create({ blockHash, blockNumber, contractAddress, key0, key1, value, proof });
|
const entity = repo.create({ blockHash, blockNumber, contractAddress, key0, key1, value, proof });
|
||||||
@ -332,7 +317,6 @@ export class Database implements IPLDDatabaseInterface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_setPropColMaps (): void {
|
_setPropColMaps (): void {
|
||||||
this._propColMaps.DomainHash = this._getPropertyColumnMapForEntity('DomainHash');
|
|
||||||
this._propColMaps.MultiNonce = this._getPropertyColumnMapForEntity('MultiNonce');
|
this._propColMaps.MultiNonce = this._getPropertyColumnMapForEntity('MultiNonce');
|
||||||
this._propColMaps._Owner = this._getPropertyColumnMapForEntity('_Owner');
|
this._propColMaps._Owner = this._getPropertyColumnMapForEntity('_Owner');
|
||||||
this._propColMaps.IsRevoked = this._getPropertyColumnMapForEntity('IsRevoked');
|
this._propColMaps.IsRevoked = this._getPropertyColumnMapForEntity('IsRevoked');
|
||||||
|
@ -1,27 +0,0 @@
|
|||||||
//
|
|
||||||
// Copyright 2021 Vulcanize, Inc.
|
|
||||||
//
|
|
||||||
|
|
||||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
|
|
||||||
|
|
||||||
@Entity()
|
|
||||||
@Index(['blockHash', 'contractAddress'], { unique: true })
|
|
||||||
export class DomainHash {
|
|
||||||
@PrimaryGeneratedColumn()
|
|
||||||
id!: number;
|
|
||||||
|
|
||||||
@Column('varchar', { length: 66 })
|
|
||||||
blockHash!: string;
|
|
||||||
|
|
||||||
@Column('integer')
|
|
||||||
blockNumber!: number;
|
|
||||||
|
|
||||||
@Column('varchar', { length: 42 })
|
|
||||||
contractAddress!: string;
|
|
||||||
|
|
||||||
@Column('varchar')
|
|
||||||
value!: string;
|
|
||||||
|
|
||||||
@Column('text', { nullable: true })
|
|
||||||
proof!: string;
|
|
||||||
}
|
|
@ -1,8 +0,0 @@
|
|||||||
query domainHash($blockHash: String!, $contractAddress: String!){
|
|
||||||
domainHash(blockHash: $blockHash, contractAddress: $contractAddress){
|
|
||||||
value
|
|
||||||
proof{
|
|
||||||
data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -3,7 +3,6 @@ import path from 'path';
|
|||||||
|
|
||||||
export const events = fs.readFileSync(path.join(__dirname, 'events.gql'), 'utf8');
|
export const events = fs.readFileSync(path.join(__dirname, 'events.gql'), 'utf8');
|
||||||
export const eventsInRange = fs.readFileSync(path.join(__dirname, 'eventsInRange.gql'), 'utf8');
|
export const eventsInRange = fs.readFileSync(path.join(__dirname, 'eventsInRange.gql'), 'utf8');
|
||||||
export const domainHash = fs.readFileSync(path.join(__dirname, 'domainHash.gql'), 'utf8');
|
|
||||||
export const multiNonce = fs.readFileSync(path.join(__dirname, 'multiNonce.gql'), 'utf8');
|
export const multiNonce = fs.readFileSync(path.join(__dirname, 'multiNonce.gql'), 'utf8');
|
||||||
export const _owner = fs.readFileSync(path.join(__dirname, '_owner.gql'), 'utf8');
|
export const _owner = fs.readFileSync(path.join(__dirname, '_owner.gql'), 'utf8');
|
||||||
export const isRevoked = fs.readFileSync(path.join(__dirname, 'isRevoked.gql'), 'utf8');
|
export const isRevoked = fs.readFileSync(path.join(__dirname, 'isRevoked.gql'), 'utf8');
|
||||||
|
@ -45,7 +45,6 @@ import { IsPhisher } from './entity/IsPhisher';
|
|||||||
import { IsRevoked } from './entity/IsRevoked';
|
import { IsRevoked } from './entity/IsRevoked';
|
||||||
import { _Owner } from './entity/_Owner';
|
import { _Owner } from './entity/_Owner';
|
||||||
import { MultiNonce } from './entity/MultiNonce';
|
import { MultiNonce } from './entity/MultiNonce';
|
||||||
import { DomainHash } from './entity/DomainHash';
|
|
||||||
|
|
||||||
const log = debug('vulcanize:indexer');
|
const log = debug('vulcanize:indexer');
|
||||||
|
|
||||||
@ -56,8 +55,6 @@ const MEMBERSTATUSUPDATED_EVENT = 'MemberStatusUpdated';
|
|||||||
const OWNERSHIPTRANSFERRED_EVENT = 'OwnershipTransferred';
|
const OWNERSHIPTRANSFERRED_EVENT = 'OwnershipTransferred';
|
||||||
const PHISHERSTATUSUPDATED_EVENT = 'PhisherStatusUpdated';
|
const PHISHERSTATUSUPDATED_EVENT = 'PhisherStatusUpdated';
|
||||||
|
|
||||||
const MAX_EVENTS_BLOCK_RANGE = -1;
|
|
||||||
|
|
||||||
export type ResultEvent = {
|
export type ResultEvent = {
|
||||||
block: {
|
block: {
|
||||||
cid: string;
|
cid: string;
|
||||||
@ -109,9 +106,6 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
|
|
||||||
_ipfsClient: IPFSClient
|
_ipfsClient: IPFSClient
|
||||||
|
|
||||||
_entityTypesMap: Map<string, { [key: string]: string }>
|
|
||||||
_relationsMap: Map<any, { [key: string]: any }>
|
|
||||||
|
|
||||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: JsonRpcProvider, jobQueue: JobQueue) {
|
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: JsonRpcProvider, jobQueue: JobQueue) {
|
||||||
assert(db);
|
assert(db);
|
||||||
assert(ethClient);
|
assert(ethClient);
|
||||||
@ -137,13 +131,9 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
assert(PhisherRegistryStorageLayout);
|
assert(PhisherRegistryStorageLayout);
|
||||||
this._storageLayoutMap.set(KIND_PHISHERREGISTRY, PhisherRegistryStorageLayout);
|
this._storageLayoutMap.set(KIND_PHISHERREGISTRY, PhisherRegistryStorageLayout);
|
||||||
this._contractMap.set(KIND_PHISHERREGISTRY, new ethers.utils.Interface(PhisherRegistryABI));
|
this._contractMap.set(KIND_PHISHERREGISTRY, new ethers.utils.Interface(PhisherRegistryABI));
|
||||||
|
|
||||||
this._entityTypesMap = new Map();
|
|
||||||
|
|
||||||
this._relationsMap = new Map();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get serverConfig () {
|
get serverConfig (): ServerConfig {
|
||||||
return this._serverConfig;
|
return this._serverConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -207,37 +197,6 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async domainHash (blockHash: string, contractAddress: string, diff = false): Promise<ValueResult> {
|
|
||||||
let entity = await this._db.getDomainHash({ blockHash, contractAddress });
|
|
||||||
|
|
||||||
if (entity) {
|
|
||||||
log('domainHash: db hit.');
|
|
||||||
} else {
|
|
||||||
log('domainHash: db miss, fetching from upstream server');
|
|
||||||
|
|
||||||
entity = await this._getStorageEntity(
|
|
||||||
blockHash,
|
|
||||||
contractAddress,
|
|
||||||
DomainHash,
|
|
||||||
'domainHash',
|
|
||||||
{},
|
|
||||||
''
|
|
||||||
);
|
|
||||||
|
|
||||||
await this._db.saveDomainHash(entity);
|
|
||||||
|
|
||||||
if (diff) {
|
|
||||||
const stateUpdate = updateStateForElementaryType({}, 'domainHash', entity.value.toString());
|
|
||||||
await this.createDiffStaged(contractAddress, blockHash, stateUpdate);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
value: entity.value,
|
|
||||||
proof: JSON.parse(entity.proof)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async multiNonce (blockHash: string, contractAddress: string, key0: string, key1: bigint, diff = false): Promise<ValueResult> {
|
async multiNonce (blockHash: string, contractAddress: string, key0: string, key1: bigint, diff = false): Promise<ValueResult> {
|
||||||
let entity = await this._db.getMultiNonce({ blockHash, contractAddress, key0, key1 });
|
let entity = await this._db.getMultiNonce({ blockHash, contractAddress, key0, key1 });
|
||||||
|
|
||||||
@ -740,7 +699,7 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async getEventsInRange (fromBlockNumber: number, toBlockNumber: number): Promise<Array<Event>> {
|
async getEventsInRange (fromBlockNumber: number, toBlockNumber: number): Promise<Array<Event>> {
|
||||||
return this._baseIndexer.getEventsInRange(fromBlockNumber, toBlockNumber, MAX_EVENTS_BLOCK_RANGE);
|
return this._baseIndexer.getEventsInRange(fromBlockNumber, toBlockNumber, this._serverConfig.maxEventsBlockRange);
|
||||||
}
|
}
|
||||||
|
|
||||||
async getSyncStatus (): Promise<SyncStatus | undefined> {
|
async getSyncStatus (): Promise<SyncStatus | undefined> {
|
||||||
@ -820,49 +779,48 @@ export class Indexer implements IPLDIndexerInterface {
|
|||||||
return this._contractMap.get(kind);
|
return this._contractMap.get(kind);
|
||||||
}
|
}
|
||||||
|
|
||||||
getEntityTypesMap (): Map<string, { [key: string]: string }> {
|
|
||||||
return this._entityTypesMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
||||||
assert(blockHash);
|
assert(blockHash);
|
||||||
let block: any, logs: any[];
|
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash });
|
||||||
|
const blockPromise = this._ethClient.getBlockByHash(blockHash);
|
||||||
|
let logs: any[];
|
||||||
|
|
||||||
if (this._serverConfig.filterLogs) {
|
if (this._serverConfig.filterLogs) {
|
||||||
const watchedContracts = this._baseIndexer.getWatchedContracts();
|
const watchedContracts = this._baseIndexer.getWatchedContracts();
|
||||||
|
|
||||||
// TODO: Query logs by multiple contracts.
|
// TODO: Query logs by multiple contracts.
|
||||||
const contractlogsWithBlockPromises = watchedContracts.map((watchedContract): Promise<any> => this._ethClient.getLogs({
|
const contractlogsPromises = watchedContracts.map((watchedContract): Promise<any> => this._ethClient.getLogs({
|
||||||
blockHash,
|
blockHash,
|
||||||
contract: watchedContract.address
|
contract: watchedContract.address
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const contractlogsWithBlock = await Promise.all(contractlogsWithBlockPromises);
|
const contractlogs = await Promise.all(contractlogsPromises);
|
||||||
|
|
||||||
// Flatten logs by contract and sort by index.
|
// Flatten logs by contract and sort by index.
|
||||||
logs = contractlogsWithBlock.map(data => {
|
logs = contractlogs.map(data => {
|
||||||
return data.logs;
|
return data.logs;
|
||||||
}).flat()
|
}).flat()
|
||||||
.sort((a, b) => {
|
.sort((a, b) => {
|
||||||
return a.index - b.index;
|
return a.index - b.index;
|
||||||
});
|
});
|
||||||
|
|
||||||
({ block } = await this._ethClient.getBlockByHash(blockHash));
|
|
||||||
} else {
|
} else {
|
||||||
({ block, logs } = await this._ethClient.getLogs({ blockHash }));
|
({ logs } = await this._ethClient.getLogs({ blockHash }));
|
||||||
}
|
}
|
||||||
|
|
||||||
const {
|
let [
|
||||||
allEthHeaderCids: {
|
{ block },
|
||||||
nodes: [
|
{
|
||||||
{
|
allEthHeaderCids: {
|
||||||
ethTransactionCidsByHeaderId: {
|
nodes: [
|
||||||
nodes: transactions
|
{
|
||||||
|
ethTransactionCidsByHeaderId: {
|
||||||
|
nodes: transactions
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
]
|
||||||
]
|
}
|
||||||
}
|
}
|
||||||
} = await this._ethClient.getBlockWithTransactions({ blockHash });
|
] = await Promise.all([blockPromise, transactionsPromise]);
|
||||||
|
|
||||||
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
||||||
acc[transaction.txHash] = transaction;
|
acc[transaction.txHash] = transaction;
|
||||||
|
@ -8,7 +8,7 @@ import debug from 'debug';
|
|||||||
import Decimal from 'decimal.js';
|
import Decimal from 'decimal.js';
|
||||||
import { GraphQLScalarType } from 'graphql';
|
import { GraphQLScalarType } from 'graphql';
|
||||||
|
|
||||||
import { ValueResult, BlockHeight, StateKind } from '@vulcanize/util';
|
import { ValueResult, StateKind } from '@vulcanize/util';
|
||||||
|
|
||||||
import { Indexer } from './indexer';
|
import { Indexer } from './indexer';
|
||||||
import { EventWatcher } from './events';
|
import { EventWatcher } from './events';
|
||||||
@ -58,11 +58,6 @@ export const createResolvers = async (indexer: Indexer, eventWatcher: EventWatch
|
|||||||
},
|
},
|
||||||
|
|
||||||
Query: {
|
Query: {
|
||||||
domainHash: (_: any, { blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise<ValueResult> => {
|
|
||||||
log('domainHash', blockHash, contractAddress);
|
|
||||||
return indexer.domainHash(blockHash, contractAddress);
|
|
||||||
},
|
|
||||||
|
|
||||||
multiNonce: (_: any, { blockHash, contractAddress, key0, key1 }: { blockHash: string, contractAddress: string, key0: string, key1: bigint }): Promise<ValueResult> => {
|
multiNonce: (_: any, { blockHash, contractAddress, key0, key1 }: { blockHash: string, contractAddress: string, key0: string, key1: bigint }): Promise<ValueResult> => {
|
||||||
log('multiNonce', blockHash, contractAddress, key0, key1);
|
log('multiNonce', blockHash, contractAddress, key0, key1);
|
||||||
return indexer.multiNonce(blockHash, contractAddress, key0, key1);
|
return indexer.multiNonce(blockHash, contractAddress, key0, key1);
|
||||||
|
@ -90,7 +90,6 @@ type ResultIPLDBlock {
|
|||||||
type Query {
|
type Query {
|
||||||
events(blockHash: String!, contractAddress: String!, name: String): [ResultEvent!]
|
events(blockHash: String!, contractAddress: String!, name: String): [ResultEvent!]
|
||||||
eventsInRange(fromBlockNumber: Int!, toBlockNumber: Int!): [ResultEvent!]
|
eventsInRange(fromBlockNumber: Int!, toBlockNumber: Int!): [ResultEvent!]
|
||||||
domainHash(blockHash: String!, contractAddress: String!): ResultString!
|
|
||||||
multiNonce(blockHash: String!, contractAddress: String!, key0: String!, key1: BigInt!): ResultBigInt!
|
multiNonce(blockHash: String!, contractAddress: String!, key0: String!, key1: BigInt!): ResultBigInt!
|
||||||
_owner(blockHash: String!, contractAddress: String!): ResultString!
|
_owner(blockHash: String!, contractAddress: String!): ResultString!
|
||||||
isRevoked(blockHash: String!, contractAddress: String!, key0: String!): ResultBoolean!
|
isRevoked(blockHash: String!, contractAddress: String!, key0: String!): ResultBoolean!
|
||||||
|
@ -63,7 +63,7 @@ export class Indexer implements IndexerInterface {
|
|||||||
this._isDemo = serverConfig.mode === 'demo';
|
this._isDemo = serverConfig.mode === 'demo';
|
||||||
}
|
}
|
||||||
|
|
||||||
get serverConfig () {
|
get serverConfig (): ServerConfig {
|
||||||
return this._serverConfig;
|
return this._serverConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ export const main = async (): Promise<any> => {
|
|||||||
const config: Config = await getConfig(argv.f);
|
const config: Config = await getConfig(argv.f);
|
||||||
const { ethClient } = await initClients(config);
|
const { ethClient } = await initClients(config);
|
||||||
|
|
||||||
const { host, port, mode } = config.server;
|
const { host, port } = config.server;
|
||||||
|
|
||||||
const db = new Database(config.database);
|
const db = new Database(config.database);
|
||||||
await db.init();
|
await db.init();
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
import { DeepPartial, FindConditions, FindManyOptions, QueryRunner, Server } from 'typeorm';
|
import { DeepPartial, FindConditions, FindManyOptions, QueryRunner } from 'typeorm';
|
||||||
import JSONbig from 'json-bigint';
|
import JSONbig from 'json-bigint';
|
||||||
import { ethers } from 'ethers';
|
import { ethers } from 'ethers';
|
||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
@ -58,7 +58,7 @@ export class Indexer implements IndexerInterface {
|
|||||||
this._nfpmContract = new ethers.utils.Interface(nfpmABI);
|
this._nfpmContract = new ethers.utils.Interface(nfpmABI);
|
||||||
}
|
}
|
||||||
|
|
||||||
get serverConfig () {
|
get serverConfig (): ServerConfig {
|
||||||
return this._serverConfig;
|
return this._serverConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -435,12 +435,34 @@ export class Indexer implements IndexerInterface {
|
|||||||
|
|
||||||
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
||||||
assert(blockHash);
|
assert(blockHash);
|
||||||
|
|
||||||
const logsPromise = this._ethClient.getLogs({ blockHash });
|
|
||||||
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash });
|
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash });
|
||||||
|
const blockPromise = this._ethClient.getBlockByHash(blockHash);
|
||||||
|
let logs: any[];
|
||||||
|
|
||||||
|
if (this._serverConfig.filterLogs) {
|
||||||
|
const watchedContracts = this._baseIndexer.getWatchedContracts();
|
||||||
|
|
||||||
|
// TODO: Query logs by multiple contracts.
|
||||||
|
const contractlogsPromises = watchedContracts.map((watchedContract): Promise<any> => this._ethClient.getLogs({
|
||||||
|
blockHash,
|
||||||
|
contract: watchedContract.address
|
||||||
|
}));
|
||||||
|
|
||||||
|
const contractlogs = await Promise.all(contractlogsPromises);
|
||||||
|
|
||||||
|
// Flatten logs by contract and sort by index.
|
||||||
|
logs = contractlogs.map(data => {
|
||||||
|
return data.logs;
|
||||||
|
}).flat()
|
||||||
|
.sort((a, b) => {
|
||||||
|
return a.index - b.index;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
({ logs } = await this._ethClient.getLogs({ blockHash }));
|
||||||
|
}
|
||||||
|
|
||||||
let [
|
let [
|
||||||
{ block, logs },
|
{ block },
|
||||||
{
|
{
|
||||||
allEthHeaderCids: {
|
allEthHeaderCids: {
|
||||||
nodes: [
|
nodes: [
|
||||||
@ -452,7 +474,7 @@ export class Indexer implements IndexerInterface {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
] = await Promise.all([logsPromise, transactionsPromise]);
|
] = await Promise.all([blockPromise, transactionsPromise]);
|
||||||
|
|
||||||
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
||||||
acc[transaction.txHash] = transaction;
|
acc[transaction.txHash] = transaction;
|
||||||
|
@ -17,3 +17,4 @@ export * from './src/graph-decimal';
|
|||||||
export * from './src/ipld-indexer';
|
export * from './src/ipld-indexer';
|
||||||
export * from './src/ipld-database';
|
export * from './src/ipld-database';
|
||||||
export * from './src/ipfs';
|
export * from './src/ipfs';
|
||||||
|
export * from './src/index-block';
|
||||||
|
@ -1,9 +1,13 @@
|
|||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
|
import assert from 'assert';
|
||||||
|
|
||||||
import { JOB_KIND_PRUNE, QUEUE_BLOCK_PROCESSING, JOB_KIND_INDEX } from './constants';
|
import { JOB_KIND_PRUNE, QUEUE_BLOCK_PROCESSING, JOB_KIND_INDEX, UNKNOWN_EVENT_NAME } from './constants';
|
||||||
import { JobQueue } from './job-queue';
|
import { JobQueue } from './job-queue';
|
||||||
import { IndexerInterface } from './types';
|
import { BlockProgressInterface, IndexerInterface } from './types';
|
||||||
import { wait } from './misc';
|
import { wait } from './misc';
|
||||||
|
import { OrderDirection } from './database';
|
||||||
|
|
||||||
|
const DEFAULT_EVENTS_IN_BATCH = 50;
|
||||||
|
|
||||||
const log = debug('vulcanize:common');
|
const log = debug('vulcanize:common');
|
||||||
|
|
||||||
@ -98,3 +102,93 @@ export const processBlockByNumber = async (
|
|||||||
await wait(blockDelayInMilliSecs);
|
await wait(blockDelayInMilliSecs);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process events in batches for a block.
|
||||||
|
* @param indexer
|
||||||
|
* @param block
|
||||||
|
* @param eventsInBatch
|
||||||
|
*/
|
||||||
|
export const processBatchEvents = async (indexer: IndexerInterface, block: BlockProgressInterface, eventsInBatch: number): Promise<void> => {
|
||||||
|
// Check if block processing is complete.
|
||||||
|
while (!block.isComplete) {
|
||||||
|
console.time('time:common#processBacthEvents-fetching_events_batch');
|
||||||
|
|
||||||
|
// Fetch events in batches
|
||||||
|
const events = await indexer.getBlockEvents(
|
||||||
|
block.blockHash,
|
||||||
|
{
|
||||||
|
index: [
|
||||||
|
{ value: block.lastProcessedEventIndex + 1, operator: 'gte', not: false }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
limit: eventsInBatch || DEFAULT_EVENTS_IN_BATCH,
|
||||||
|
orderBy: 'index',
|
||||||
|
orderDirection: OrderDirection.asc
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
console.timeEnd('time:common#processBacthEvents-fetching_events_batch');
|
||||||
|
|
||||||
|
if (events.length) {
|
||||||
|
log(`Processing events batch from index ${events[0].index} to ${events[0].index + events.length - 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.time('time:common#processBacthEvents-processing_events_batch');
|
||||||
|
|
||||||
|
for (let event of events) {
|
||||||
|
// Process events in loop
|
||||||
|
|
||||||
|
const eventIndex = event.index;
|
||||||
|
// log(`Processing event ${event.id} index ${eventIndex}`);
|
||||||
|
|
||||||
|
// Check that events are processed in order.
|
||||||
|
if (eventIndex <= block.lastProcessedEventIndex) {
|
||||||
|
throw new Error(`Events received out of order for block number ${block.blockNumber} hash ${block.blockHash}, got event index ${eventIndex} and lastProcessedEventIndex ${block.lastProcessedEventIndex}, aborting`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if previous event in block has been processed exactly before this and abort if not.
|
||||||
|
// Skip check if logs fetched are filtered by contract address.
|
||||||
|
if (!indexer.serverConfig.filterLogs) {
|
||||||
|
const prevIndex = eventIndex - 1;
|
||||||
|
|
||||||
|
if (prevIndex !== block.lastProcessedEventIndex) {
|
||||||
|
throw new Error(`Events received out of order for block number ${block.blockNumber} hash ${block.blockHash},` +
|
||||||
|
` prev event index ${prevIndex}, got event index ${event.index} and lastProcessedEventIndex ${block.lastProcessedEventIndex}, aborting`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let watchedContract;
|
||||||
|
|
||||||
|
if (!indexer.isWatchedContract) {
|
||||||
|
// uni-info-watcher indexer doesn't have watched contracts implementation.
|
||||||
|
watchedContract = true;
|
||||||
|
} else {
|
||||||
|
watchedContract = await indexer.isWatchedContract(event.contract);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (watchedContract) {
|
||||||
|
// We might not have parsed this event yet. This can happen if the contract was added
|
||||||
|
// as a result of a previous event in the same block.
|
||||||
|
if (event.eventName === UNKNOWN_EVENT_NAME) {
|
||||||
|
const logObj = JSON.parse(event.extraInfo);
|
||||||
|
|
||||||
|
assert(indexer.parseEventNameAndArgs);
|
||||||
|
assert(typeof watchedContract !== 'boolean');
|
||||||
|
const { eventName, eventInfo } = indexer.parseEventNameAndArgs(watchedContract.kind, logObj);
|
||||||
|
|
||||||
|
event.eventName = eventName;
|
||||||
|
event.eventInfo = JSON.stringify(eventInfo);
|
||||||
|
event = await indexer.saveEventEntity(event);
|
||||||
|
}
|
||||||
|
|
||||||
|
await indexer.processEvent(event);
|
||||||
|
}
|
||||||
|
|
||||||
|
block = await indexer.updateBlockProgress(block, event.index);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.timeEnd('time:common#processBacthEvents-processing_events_batch');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
@ -11,7 +11,7 @@ import { ConnectionOptions } from 'typeorm';
|
|||||||
|
|
||||||
import { Config as CacheConfig, getCache } from '@vulcanize/cache';
|
import { Config as CacheConfig, getCache } from '@vulcanize/cache';
|
||||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||||
import { BaseProvider, JsonRpcProvider } from '@ethersproject/providers';
|
import { JsonRpcProvider } from '@ethersproject/providers';
|
||||||
|
|
||||||
import { getCustomProvider } from './misc';
|
import { getCustomProvider } from './misc';
|
||||||
|
|
||||||
@ -35,6 +35,7 @@ export interface ServerConfig {
|
|||||||
subgraphPath: string;
|
subgraphPath: string;
|
||||||
wasmRestartBlocksInterval: number;
|
wasmRestartBlocksInterval: number;
|
||||||
filterLogs: boolean;
|
filterLogs: boolean;
|
||||||
|
maxEventsBlockRange: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface UpstreamConfig {
|
export interface UpstreamConfig {
|
||||||
|
49
packages/util/src/index-block.ts
Normal file
49
packages/util/src/index-block.ts
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
//
|
||||||
|
// Copyright 2022 Vulcanize, Inc.
|
||||||
|
//
|
||||||
|
|
||||||
|
import assert from 'assert';
|
||||||
|
|
||||||
|
import { BlockProgressInterface, IndexerInterface } from './types';
|
||||||
|
import { processBatchEvents } from './common';
|
||||||
|
|
||||||
|
export const indexBlock = async (
|
||||||
|
indexer: IndexerInterface,
|
||||||
|
eventsInBatch: number,
|
||||||
|
argv: {
|
||||||
|
block: number,
|
||||||
|
}
|
||||||
|
): Promise<any> => {
|
||||||
|
let blockProgressEntities: Partial<BlockProgressInterface>[] = await indexer.getBlocksAtHeight(argv.block, false);
|
||||||
|
|
||||||
|
if (!blockProgressEntities.length) {
|
||||||
|
console.time('time:index-block#getBlocks-ipld-eth-server');
|
||||||
|
const blocks = await indexer.getBlocks({ blockNumber: argv.block });
|
||||||
|
|
||||||
|
blockProgressEntities = blocks.map((block: any): Partial<BlockProgressInterface> => {
|
||||||
|
block.blockTimestamp = block.timestamp;
|
||||||
|
|
||||||
|
return block;
|
||||||
|
});
|
||||||
|
|
||||||
|
console.timeEnd('time:index-block#getBlocks-ipld-eth-server');
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(blockProgressEntities.length, `No blocks fetched for block number ${argv.block}.`);
|
||||||
|
|
||||||
|
for (const partialblockProgress of blockProgressEntities) {
|
||||||
|
let blockProgress: BlockProgressInterface;
|
||||||
|
|
||||||
|
// Check if blockProgress fetched from database.
|
||||||
|
if (!partialblockProgress.id) {
|
||||||
|
blockProgress = await indexer.fetchBlockEvents(partialblockProgress);
|
||||||
|
} else {
|
||||||
|
blockProgress = partialblockProgress as BlockProgressInterface;
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(indexer.processBlock);
|
||||||
|
await indexer.processBlock(blockProgress.blockHash, blockProgress.blockNumber);
|
||||||
|
|
||||||
|
await processBatchEvents(indexer, blockProgress, eventsInBatch);
|
||||||
|
}
|
||||||
|
};
|
@ -13,17 +13,13 @@ import {
|
|||||||
JOB_KIND_EVENTS,
|
JOB_KIND_EVENTS,
|
||||||
JOB_KIND_CONTRACT,
|
JOB_KIND_CONTRACT,
|
||||||
MAX_REORG_DEPTH,
|
MAX_REORG_DEPTH,
|
||||||
UNKNOWN_EVENT_NAME,
|
|
||||||
QUEUE_BLOCK_PROCESSING,
|
QUEUE_BLOCK_PROCESSING,
|
||||||
QUEUE_EVENT_PROCESSING
|
QUEUE_EVENT_PROCESSING
|
||||||
} from './constants';
|
} from './constants';
|
||||||
import { JobQueue } from './job-queue';
|
import { JobQueue } from './job-queue';
|
||||||
import { EventInterface, IndexerInterface, IPLDIndexerInterface, SyncStatusInterface } from './types';
|
import { EventInterface, IndexerInterface, IPLDIndexerInterface, SyncStatusInterface } from './types';
|
||||||
import { wait } from './misc';
|
import { wait } from './misc';
|
||||||
import { createPruningJob } from './common';
|
import { createPruningJob, processBatchEvents } from './common';
|
||||||
import { OrderDirection } from './database';
|
|
||||||
|
|
||||||
const DEFAULT_EVENTS_IN_BATCH = 50;
|
|
||||||
|
|
||||||
const log = debug('vulcanize:job-runner');
|
const log = debug('vulcanize:job-runner');
|
||||||
|
|
||||||
@ -241,92 +237,13 @@ export class JobRunner {
|
|||||||
const { blockHash } = job.data;
|
const { blockHash } = job.data;
|
||||||
|
|
||||||
console.time('time:job-runner#_processEvents-get-block-progress');
|
console.time('time:job-runner#_processEvents-get-block-progress');
|
||||||
let block = await this._indexer.getBlockProgress(blockHash);
|
const block = await this._indexer.getBlockProgress(blockHash);
|
||||||
console.timeEnd('time:job-runner#_processEvents-get-block-progress');
|
console.timeEnd('time:job-runner#_processEvents-get-block-progress');
|
||||||
assert(block);
|
assert(block);
|
||||||
|
|
||||||
console.time('time:job-runner#_processEvents-events');
|
console.time('time:job-runner#_processEvents-events');
|
||||||
|
|
||||||
while (!block.isComplete) {
|
await processBatchEvents(this._indexer, block, this._jobQueueConfig.eventsInBatch);
|
||||||
console.time('time:job-runner#_processEvents-fetching_events_batch');
|
|
||||||
|
|
||||||
// Fetch events in batches
|
|
||||||
const events: EventInterface[] = await this._indexer.getBlockEvents(
|
|
||||||
blockHash,
|
|
||||||
{
|
|
||||||
index: [
|
|
||||||
{ value: block.lastProcessedEventIndex + 1, operator: 'gte', not: false }
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
limit: this._jobQueueConfig.eventsInBatch || DEFAULT_EVENTS_IN_BATCH,
|
|
||||||
orderBy: 'index',
|
|
||||||
orderDirection: OrderDirection.asc
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
console.timeEnd('time:job-runner#_processEvents-fetching_events_batch');
|
|
||||||
|
|
||||||
if (events.length) {
|
|
||||||
log(`Processing events batch from index ${events[0].index} to ${events[0].index + events.length - 1}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.time('time:job-runner#_processEvents-processing_events_batch');
|
|
||||||
|
|
||||||
for (let event of events) {
|
|
||||||
// Process events in loop
|
|
||||||
|
|
||||||
const eventIndex = event.index;
|
|
||||||
// log(`Processing event ${event.id} index ${eventIndex}`);
|
|
||||||
|
|
||||||
// Check that events are processed in order.
|
|
||||||
if (eventIndex <= block.lastProcessedEventIndex) {
|
|
||||||
throw new Error(`Events received out of order for block number ${block.blockNumber} hash ${block.blockHash}, got event index ${eventIndex} and lastProcessedEventIndex ${block.lastProcessedEventIndex}, aborting`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if previous event in block has been processed exactly before this and abort if not.
|
|
||||||
// Skip check if logs fetched are filtered by contract address.
|
|
||||||
if (!this._indexer.serverConfig.filterLogs) {
|
|
||||||
const prevIndex = eventIndex - 1;
|
|
||||||
|
|
||||||
if (prevIndex !== block.lastProcessedEventIndex) {
|
|
||||||
throw new Error(`Events received out of order for block number ${block.blockNumber} hash ${block.blockHash},` +
|
|
||||||
` prev event index ${prevIndex}, got event index ${event.index} and lastProcessedEventIndex ${block.lastProcessedEventIndex}, aborting`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let watchedContract;
|
|
||||||
|
|
||||||
if (!this._indexer.isWatchedContract) {
|
|
||||||
// uni-info-watcher indexer doesn't have watched contracts implementation.
|
|
||||||
watchedContract = true;
|
|
||||||
} else {
|
|
||||||
watchedContract = await this._indexer.isWatchedContract(event.contract);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (watchedContract) {
|
|
||||||
// We might not have parsed this event yet. This can happen if the contract was added
|
|
||||||
// as a result of a previous event in the same block.
|
|
||||||
if (event.eventName === UNKNOWN_EVENT_NAME) {
|
|
||||||
const logObj = JSON.parse(event.extraInfo);
|
|
||||||
|
|
||||||
assert(this._indexer.parseEventNameAndArgs);
|
|
||||||
assert(typeof watchedContract !== 'boolean');
|
|
||||||
const { eventName, eventInfo } = this._indexer.parseEventNameAndArgs(watchedContract.kind, logObj);
|
|
||||||
|
|
||||||
event.eventName = eventName;
|
|
||||||
event.eventInfo = JSON.stringify(eventInfo);
|
|
||||||
event = await this._indexer.saveEventEntity(event);
|
|
||||||
}
|
|
||||||
|
|
||||||
await this._indexer.processEvent(event);
|
|
||||||
}
|
|
||||||
|
|
||||||
block = await this._indexer.updateBlockProgress(block, event.index);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.timeEnd('time:job-runner#_processEvents-processing_events_batch');
|
|
||||||
}
|
|
||||||
|
|
||||||
console.timeEnd('time:job-runner#_processEvents-events');
|
console.timeEnd('time:job-runner#_processEvents-events');
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user