mirror of
https://github.com/cerc-io/watcher-ts
synced 2025-07-27 02:32:07 +00:00
Implement changes in codegen for performance improvements (#84)
This commit is contained in:
parent
97b529f3f2
commit
1e7d84879b
@ -61,12 +61,16 @@ columns:
|
||||
columnOptions:
|
||||
- option: default
|
||||
value: false
|
||||
- name: createdAt
|
||||
tsType: Date
|
||||
columnType: CreateDateColumn
|
||||
imports:
|
||||
- toImport:
|
||||
- Entity
|
||||
- PrimaryGeneratedColumn
|
||||
- Column
|
||||
- Index
|
||||
- CreateDateColumn
|
||||
from: typeorm
|
||||
- toImport:
|
||||
- BlockProgressInterface
|
||||
|
@ -11,7 +11,7 @@
|
||||
|
||||
# IPFS API address (can be taken from the output on running the IPFS daemon).
|
||||
ipfsApiAddr = "/ip4/127.0.0.1/tcp/5001"
|
||||
|
||||
|
||||
{{#if subgraphPath}}
|
||||
subgraphPath = "{{subgraphPath}}"
|
||||
{{/if}}
|
||||
@ -42,3 +42,4 @@
|
||||
dbConnectionString = "postgres://postgres:postgres@localhost/{{folderName}}-job-queue"
|
||||
maxCompletionLagInSecs = 300
|
||||
jobDelayInMilliSecs = 100
|
||||
eventsInBatch = 50
|
||||
|
@ -230,9 +230,10 @@ export class Database implements DatabaseInterface {
|
||||
return repo.save(entity);
|
||||
}
|
||||
|
||||
async getContracts (where: FindConditions<Contract>): Promise<Contract[]> {
|
||||
async getContracts (): Promise<Contract[]> {
|
||||
const repo = this._conn.getRepository(Contract);
|
||||
return repo.find({ where });
|
||||
|
||||
return this._baseDatabase.getContracts(repo);
|
||||
}
|
||||
|
||||
async getContract (address: string): Promise<Contract | undefined> {
|
||||
@ -265,7 +266,6 @@ export class Database implements DatabaseInterface {
|
||||
async getBlockEvents (blockHash: string, where: Where, queryOptions: QueryOptions): Promise<Event[]> {
|
||||
const repo = this._conn.getRepository(Event);
|
||||
|
||||
return this._baseDatabase.getBlockEvents(repo, blockHash, options);
|
||||
return this._baseDatabase.getBlockEvents(repo, blockHash, where, queryOptions);
|
||||
}
|
||||
|
||||
@ -276,12 +276,10 @@ export class Database implements DatabaseInterface {
|
||||
return this._baseDatabase.saveEvents(blockRepo, eventRepo, block, events);
|
||||
}
|
||||
|
||||
async saveContract (address: string, kind: string, checkpoint: boolean, startingBlock: number): Promise<void> {
|
||||
await this._conn.transaction(async (tx) => {
|
||||
const repo = tx.getRepository(Contract);
|
||||
async saveContract (address: string, kind: string, checkpoint: boolean, startingBlock: number): Promise<Contract> {
|
||||
const repo = queryRunner.manager.getRepository(Contract);
|
||||
|
||||
return this._baseDatabase.saveContract(repo, address, kind, checkpoint, startingBlock);
|
||||
});
|
||||
return this._baseDatabase.saveContract(repo, address, kind, checkpoint, startingBlock);
|
||||
}
|
||||
|
||||
async updateSyncStatusIndexedBlock (queryRunner: QueryRunner, blockHash: string, blockNumber: number, force = false): Promise<SyncStatus> {
|
||||
@ -296,10 +294,10 @@ export class Database implements DatabaseInterface {
|
||||
return this._baseDatabase.updateSyncStatusCanonicalBlock(repo, blockHash, blockNumber, force);
|
||||
}
|
||||
|
||||
async updateSyncStatusChainHead (queryRunner: QueryRunner, blockHash: string, blockNumber: number): Promise<SyncStatus> {
|
||||
async updateSyncStatusChainHead (queryRunner: QueryRunner, blockHash: string, blockNumber: number, force = false): Promise<SyncStatus> {
|
||||
const repo = queryRunner.manager.getRepository(SyncStatus);
|
||||
|
||||
return this._baseDatabase.updateSyncStatusChainHead(repo, blockHash, blockNumber);
|
||||
return this._baseDatabase.updateSyncStatusChainHead(repo, blockHash, blockNumber, force);
|
||||
}
|
||||
|
||||
async getSyncStatus (queryRunner: QueryRunner): Promise<SyncStatus | undefined> {
|
||||
@ -337,10 +335,10 @@ export class Database implements DatabaseInterface {
|
||||
return this._baseDatabase.getBlockProgressEntities(repo, where, options);
|
||||
}
|
||||
|
||||
async updateBlockProgress (queryRunner: QueryRunner, blockHash: string, lastProcessedEventIndex: number): Promise<void> {
|
||||
async updateBlockProgress (queryRunner: QueryRunner, block: BlockProgress, lastProcessedEventIndex: number): Promise<BlockProgress> {
|
||||
const repo = queryRunner.manager.getRepository(BlockProgress);
|
||||
|
||||
return this._baseDatabase.updateBlockProgress(repo, blockHash, lastProcessedEventIndex);
|
||||
return this._baseDatabase.updateBlockProgress(repo, block, lastProcessedEventIndex);
|
||||
}
|
||||
|
||||
async removeEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindManyOptions<Entity> | FindConditions<Entity>): Promise<void> {
|
||||
|
@ -39,6 +39,16 @@ export const main = async (): Promise<any> => {
|
||||
type: 'number',
|
||||
demandOption: true,
|
||||
describe: 'Block number to stop processing at'
|
||||
},
|
||||
prefetch: {
|
||||
type: 'boolean',
|
||||
default: false,
|
||||
describe: 'Block and events prefetch mode'
|
||||
},
|
||||
batchBlocks: {
|
||||
type: 'number',
|
||||
default: 10,
|
||||
describe: 'Number of blocks prefetched in batch'
|
||||
}
|
||||
}).argv;
|
||||
|
||||
@ -56,10 +66,6 @@ export const main = async (): Promise<any> => {
|
||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||
const pubsub = new PubSub();
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
@ -70,9 +76,15 @@ export const main = async (): Promise<any> => {
|
||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||
await jobQueue.start();
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, jobQueue, graphWatcher);
|
||||
await indexer.init();
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const eventWatcher = new EventWatcher(config.upstream, ethClient, postgraphileClient, indexer, pubsub, jobQueue);
|
||||
|
||||
await fillBlocks(jobQueue, indexer, postgraphileClient, eventWatcher, config.upstream.ethServer.blockDelayInMilliSecs, argv);
|
||||
await fillBlocks(jobQueue, indexer, eventWatcher, config.upstream.ethServer.blockDelayInMilliSecs, argv);
|
||||
};
|
||||
|
||||
main().catch(err => {
|
||||
@ -80,3 +92,8 @@ main().catch(err => {
|
||||
}).finally(() => {
|
||||
process.exit();
|
||||
});
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
log(`Exiting process ${process.pid} with code 0`);
|
||||
process.exit(0);
|
||||
});
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
import assert from 'assert';
|
||||
import debug from 'debug';
|
||||
import { DeepPartial, FindConditions } from 'typeorm';
|
||||
import { DeepPartial, FindConditions, FindManyOptions } from 'typeorm';
|
||||
import JSONbig from 'json-bigint';
|
||||
import { ethers } from 'ethers';
|
||||
import { sha256 } from 'multiformats/hashes/sha2';
|
||||
@ -16,7 +16,7 @@ import { BaseProvider } from '@ethersproject/providers';
|
||||
import * as codec from '@ipld/dag-cbor';
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
import { StorageLayout } from '@vulcanize/solidity-mapper';
|
||||
import { Indexer as BaseIndexer, IndexerInterface, ValueResult, UNKNOWN_EVENT_NAME, ServerConfig, Where, QueryOptions, updateStateForElementaryType, updateStateForMappingType } from '@vulcanize/util';
|
||||
import { Indexer as BaseIndexer, IndexerInterface, ValueResult, UNKNOWN_EVENT_NAME, ServerConfig, Where, QueryOptions, updateStateForElementaryType, updateStateForMappingType, JobQueue } from '@vulcanize/util';
|
||||
import { GraphWatcher } from '@vulcanize/graph-node';
|
||||
|
||||
import { Database } from './database';
|
||||
@ -93,7 +93,7 @@ export class Indexer implements IndexerInterface {
|
||||
|
||||
_ipfsClient: IPFSClient
|
||||
|
||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, postgraphileClient: EthClient, ethProvider: BaseProvider, graphWatcher: GraphWatcher) {
|
||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, postgraphileClient: EthClient, ethProvider: BaseProvider, jobQueue: JobQueue, graphWatcher: GraphWatcher) {
|
||||
assert(db);
|
||||
assert(ethClient);
|
||||
assert(postgraphileClient);
|
||||
@ -103,7 +103,7 @@ export class Indexer implements IndexerInterface {
|
||||
this._postgraphileClient = postgraphileClient;
|
||||
this._ethProvider = ethProvider;
|
||||
this._serverConfig = serverConfig;
|
||||
this._baseIndexer = new BaseIndexer(this._db, this._ethClient, this._postgraphileClient, this._ethProvider);
|
||||
this._baseIndexer = new BaseIndexer(this._db, this._ethClient, this._postgraphileClient, this._ethProvider, jobQueue);
|
||||
this._graphWatcher = graphWatcher;
|
||||
|
||||
const { abi, storageLayout } = artifacts;
|
||||
@ -119,6 +119,10 @@ export class Indexer implements IndexerInterface {
|
||||
this._ipfsClient = new IPFSClient(this._serverConfig.ipfsApiAddr);
|
||||
}
|
||||
|
||||
async init (): Promise<void> {
|
||||
await this._baseIndexer.fetchContracts();
|
||||
}
|
||||
|
||||
getResultEvent (event: Event): ResultEvent {
|
||||
const block = event.block;
|
||||
const eventFields = JSONbig.parse(event.eventInfo);
|
||||
@ -615,22 +619,14 @@ export class Indexer implements IndexerInterface {
|
||||
};
|
||||
}
|
||||
|
||||
async watchContract (address: string, kind: string, checkpoint: boolean, startingBlock?: number): Promise<boolean> {
|
||||
// Use the checksum address (https://docs.ethers.io/v5/api/utils/address/#utils-getAddress) if input to address is a contract address.
|
||||
// If a contract identifier is passed as address instead, no need to convert to checksum address.
|
||||
// Customize: use the kind input to filter out non-contract-address input to address.
|
||||
const formattedAddress = (kind === '__protocol__') ? address : ethers.utils.getAddress(address);
|
||||
async watchContract (address: string, kind: string, checkpoint: boolean, startingBlock: number): Promise<void> {
|
||||
this._baseIndexer.updateIPLDStatusMap(address, {});
|
||||
|
||||
if (!startingBlock) {
|
||||
const syncStatus = await this.getSyncStatus();
|
||||
assert(syncStatus);
|
||||
return this._baseIndexer.watchContract(address, kind, checkpoint, startingBlock);
|
||||
}
|
||||
|
||||
startingBlock = syncStatus.latestIndexedBlockNumber;
|
||||
}
|
||||
|
||||
await this._db.saveContract(formattedAddress, kind, checkpoint, startingBlock);
|
||||
|
||||
return true;
|
||||
cacheContract (contract: Contract): void {
|
||||
return this._baseIndexer.cacheContract(contract);
|
||||
}
|
||||
|
||||
async getHookStatus (): Promise<HookStatus | undefined> {
|
||||
@ -698,8 +694,8 @@ export class Indexer implements IndexerInterface {
|
||||
return this._baseIndexer.updateSyncStatusIndexedBlock(blockHash, blockNumber, force);
|
||||
}
|
||||
|
||||
async updateSyncStatusChainHead (blockHash: string, blockNumber: number): Promise<SyncStatus> {
|
||||
return this._baseIndexer.updateSyncStatusChainHead(blockHash, blockNumber);
|
||||
async updateSyncStatusChainHead (blockHash: string, blockNumber: number, force = false): Promise<SyncStatus> {
|
||||
return this._baseIndexer.updateSyncStatusChainHead(blockHash, blockNumber, force);
|
||||
}
|
||||
|
||||
async updateSyncStatusCanonicalBlock (blockHash: string, blockNumber: number, force = false): Promise<SyncStatus> {
|
||||
@ -742,8 +738,8 @@ export class Indexer implements IndexerInterface {
|
||||
return this._baseIndexer.markBlocksAsPruned(blocks);
|
||||
}
|
||||
|
||||
async updateBlockProgress (blockHash: string, lastProcessedEventIndex: number): Promise<void> {
|
||||
return this._baseIndexer.updateBlockProgress(blockHash, lastProcessedEventIndex);
|
||||
async updateBlockProgress (block: BlockProgress, lastProcessedEventIndex: number): Promise<BlockProgress> {
|
||||
return this._baseIndexer.updateBlockProgress(block, lastProcessedEventIndex);
|
||||
}
|
||||
|
||||
async getAncestorAtDepth (blockHash: string, depth: number): Promise<string> {
|
||||
@ -752,19 +748,24 @@ export class Indexer implements IndexerInterface {
|
||||
|
||||
async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial<BlockProgress>): Promise<BlockProgress> {
|
||||
assert(blockHash);
|
||||
let { block, logs } = await this._ethClient.getLogs({ blockHash });
|
||||
|
||||
const {
|
||||
allEthHeaderCids: {
|
||||
nodes: [
|
||||
{
|
||||
ethTransactionCidsByHeaderId: {
|
||||
nodes: transactions
|
||||
const logsPromise = this._ethClient.getLogs({ blockHash });
|
||||
const transactionsPromise = this._postgraphileClient.getBlockWithTransactions({ blockHash });
|
||||
|
||||
let [
|
||||
{ block, logs },
|
||||
{
|
||||
allEthHeaderCids: {
|
||||
nodes: [
|
||||
{
|
||||
ethTransactionCidsByHeaderId: {
|
||||
nodes: transactions
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
} = await this._postgraphileClient.getBlockWithTransactions({ blockHash });
|
||||
] = await Promise.all([logsPromise, transactionsPromise]);
|
||||
|
||||
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
||||
acc[transaction.txHash] = transaction;
|
||||
|
@ -56,22 +56,12 @@ export class JobRunner {
|
||||
// TODO Call pre-block hook here (Directly or indirectly (Like done through indexer.processEvent for events)).
|
||||
|
||||
await this._baseJobRunner.processBlock(job);
|
||||
|
||||
await this._jobQueue.markComplete(job);
|
||||
});
|
||||
}
|
||||
|
||||
async subscribeEventProcessingQueue (): Promise<void> {
|
||||
await this._jobQueue.subscribe(QUEUE_EVENT_PROCESSING, async (job) => {
|
||||
const event = await this._baseJobRunner.processEvent(job);
|
||||
|
||||
const watchedContract = await this._indexer.isWatchedContract(event.contract);
|
||||
if (watchedContract) {
|
||||
await this._indexer.processEvent(event);
|
||||
}
|
||||
|
||||
await this._indexer.updateBlockProgress(event.block.blockHash, event.index);
|
||||
await this._jobQueue.markComplete(job);
|
||||
await this._baseJobRunner.processEvent(job);
|
||||
});
|
||||
}
|
||||
|
||||
@ -135,11 +125,6 @@ export const main = async (): Promise<any> => {
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, ethProvider, config.server.subgraphPath);
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
|
||||
@ -149,6 +134,15 @@ export const main = async (): Promise<any> => {
|
||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||
await jobQueue.start();
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, jobQueue, graphWatcher);
|
||||
await indexer.init();
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
// Watching all the contracts in the subgraph.
|
||||
await graphWatcher.addContracts();
|
||||
|
||||
const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue);
|
||||
await jobRunner.start();
|
||||
};
|
||||
@ -162,3 +156,8 @@ main().then(() => {
|
||||
process.on('uncaughtException', err => {
|
||||
log('uncaughtException', err);
|
||||
});
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
log(`Exiting process ${process.pid} with code 0`);
|
||||
process.exit(0);
|
||||
});
|
||||
|
@ -51,10 +51,6 @@ export const main = async (): Promise<any> => {
|
||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||
const pubsub = new PubSub();
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
@ -64,6 +60,12 @@ export const main = async (): Promise<any> => {
|
||||
|
||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, jobQueue, graphWatcher);
|
||||
await indexer.init();
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const eventWatcher = new EventWatcher(config.upstream, ethClient, postgraphileClient, indexer, pubsub, jobQueue);
|
||||
|
||||
if (watcherKind === KIND_ACTIVE) {
|
||||
@ -98,3 +100,8 @@ main().then(() => {
|
||||
}).catch(err => {
|
||||
log(err);
|
||||
});
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
log(`Exiting process ${process.pid} with code 0`);
|
||||
process.exit(0);
|
||||
});
|
||||
|
@ -82,6 +82,8 @@ const main = async (): Promise<void> => {
|
||||
await indexer.watchContract(argv.address, argv.kind, argv.checkpoint, argv.startingBlock);
|
||||
|
||||
await db.close();
|
||||
await jobQueue.stop();
|
||||
process.exit();
|
||||
};
|
||||
|
||||
main().catch(err => {
|
||||
|
Loading…
Reference in New Issue
Block a user