Refactoring common code into util (#224)

* Refactor and pull common database code to util.

* Refactor and pull common indexer code to util.

Co-authored-by: nabarun <nabarun@deepstacksoft.com>
This commit is contained in:
Ashwin Phatak 2021-08-20 19:02:57 +05:30 committed by GitHub
parent 8489b5632a
commit 2ddedfefe7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 245 additions and 247 deletions

View File

@ -652,12 +652,9 @@ export class Database implements DatabaseInterface {
}
async getBlockEvents (blockHash: string): Promise<Event[]> {
return this._conn.getRepository(Event)
.createQueryBuilder('event')
.innerJoinAndSelect('event.block', 'block')
.where('block_hash = :blockHash', { blockHash })
.addOrderBy('event.id', 'ASC')
.getMany();
const repo = this._conn.getRepository(Event);
return this._baseDatabase.getBlockEvents(repo, blockHash);
}
async getEvents ({ blockHash, token }: { blockHash: string, token: string }): Promise<Event[]> {
@ -686,43 +683,10 @@ export class Database implements DatabaseInterface {
}
async saveEvents (queryRunner: QueryRunner, block: DeepPartial<BlockProgress>, events: DeepPartial<Event>[]): Promise<void> {
const {
blockHash,
blockNumber,
blockTimestamp,
parentHash
} = block;
const blockRepo = queryRunner.manager.getRepository(BlockProgress);
const eventRepo = queryRunner.manager.getRepository(Event);
assert(blockHash);
assert(blockNumber);
assert(blockTimestamp);
assert(parentHash);
// In a transaction:
// (1) Save all the events in the database.
// (2) Add an entry to the block progress table.
const numEvents = events.length;
const blockProgressRepo = queryRunner.manager.getRepository(BlockProgress);
let blockProgress = await blockProgressRepo.findOne({ where: { blockHash } });
if (!blockProgress) {
const entity = blockProgressRepo.create({
blockHash,
parentHash,
blockNumber,
blockTimestamp,
numEvents,
numProcessedEvents: 0,
lastProcessedEventIndex: -1,
isComplete: (numEvents === 0)
});
blockProgress = await blockProgressRepo.save(entity);
// Bulk insert events.
events.forEach(event => { event.block = blockProgress; });
await queryRunner.manager.createQueryBuilder().insert().into(Event).values(events).execute();
}
return this._baseDatabase.saveEvents(blockRepo, eventRepo, block, events);
}
async updateSyncStatusIndexedBlock (queryRunner: QueryRunner, blockHash: string, blockNumber: number): Promise<SyncStatus> {
@ -745,11 +709,14 @@ export class Database implements DatabaseInterface {
async getSyncStatus (queryRunner: QueryRunner): Promise<SyncStatus | undefined> {
const repo = queryRunner.manager.getRepository(SyncStatus);
return repo.findOne();
return this._baseDatabase.getSyncStatus(repo);
}
async getEvent (id: string): Promise<Event | undefined> {
return this._conn.getRepository(Event).findOne(id, { relations: ['block'] });
const repo = this._conn.getRepository(Event);
return this._baseDatabase.getEvent(repo, id);
}
async getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgress[]> {
@ -766,25 +733,13 @@ export class Database implements DatabaseInterface {
async getBlockProgress (blockHash: string): Promise<BlockProgress | undefined> {
const repo = this._conn.getRepository(BlockProgress);
return repo.findOne({ where: { blockHash } });
return this._baseDatabase.getBlockProgress(repo, blockHash);
}
async updateBlockProgress (queryRunner: QueryRunner, blockHash: string, lastProcessedEventIndex: number): Promise<void> {
const repo = queryRunner.manager.getRepository(BlockProgress);
const entity = await repo.findOne({ where: { blockHash } });
if (entity && !entity.isComplete) {
if (lastProcessedEventIndex <= entity.lastProcessedEventIndex) {
throw new Error(`Events processed out of order ${blockHash}, was ${entity.lastProcessedEventIndex}, got ${lastProcessedEventIndex}`);
}
entity.lastProcessedEventIndex = lastProcessedEventIndex;
entity.numProcessedEvents++;
if (entity.numProcessedEvents >= entity.numEvents) {
entity.isComplete = true;
}
await repo.save(entity);
}
return this._baseDatabase.updateBlockProgress(repo, blockHash, lastProcessedEventIndex);
}
async _getPrevEntityVersion<Entity> (queryRunner: QueryRunner, repo: Repository<Entity>, findOptions: { [key: string]: any }): Promise<Entity | undefined> {

View File

@ -74,7 +74,7 @@ export const main = async (): Promise<any> => {
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
const pubsub = new PubSub();
const indexer = new Indexer(db, uniClient, erc20Client, ethClient, config);
const indexer = new Indexer(db, uniClient, erc20Client, ethClient);
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLag } = jobQueueConfig;

View File

@ -11,7 +11,7 @@ import { utils } from 'ethers';
import { Client as UniClient } from '@vulcanize/uni-watcher';
import { Client as ERC20Client } from '@vulcanize/erc20-watcher';
import { EthClient } from '@vulcanize/ipld-eth-client';
import { Config, IndexerInterface, wait, Indexer as BaseIndexer } from '@vulcanize/util';
import { IndexerInterface, Indexer as BaseIndexer } from '@vulcanize/util';
import { findEthPerToken, getEthPriceInUSD, getTrackedAmountUSD, sqrtPriceX96ToTokenPrices, WHITELIST_TOKENS } from './utils/pricing';
import { updatePoolDayData, updatePoolHourData, updateTokenDayData, updateTokenHourData, updateUniswapDayData } from './utils/interval-updates';
@ -49,10 +49,9 @@ export class Indexer implements IndexerInterface {
_uniClient: UniClient
_erc20Client: ERC20Client
_ethClient: EthClient
_config: Config
_baseIndexer: BaseIndexer
constructor (db: Database, uniClient: UniClient, erc20Client: ERC20Client, ethClient: EthClient, config: Config) {
constructor (db: Database, uniClient: UniClient, erc20Client: ERC20Client, ethClient: EthClient) {
assert(db);
assert(uniClient);
assert(erc20Client);
@ -62,8 +61,7 @@ export class Indexer implements IndexerInterface {
this._uniClient = uniClient;
this._erc20Client = erc20Client;
this._ethClient = ethClient;
this._config = config;
this._baseIndexer = new BaseIndexer(this._db);
this._baseIndexer = new BaseIndexer(this._db, this._ethClient);
}
getResultEvent (event: Event): ResultEvent {
@ -92,29 +90,12 @@ export class Indexer implements IndexerInterface {
};
}
// Note: Some event names might be unknown at this point, as earlier events might not yet be processed.
async getOrFetchBlockEvents (block: DeepPartial<BlockProgress>): Promise<Array<Event>> {
assert(block.blockHash);
const blockProgress = await this._db.getBlockProgress(block.blockHash);
if (!blockProgress) {
const { jobQueue: { jobDelay } } = this._config;
assert(jobDelay);
// Delay to allow uni-watcher to process block.
await wait(jobDelay);
// Fetch and save events first and make a note in the event sync progress table.
await this._fetchAndSaveEvents(block);
log('getBlockEvents: db miss, fetching from upstream server');
}
const events = await this._db.getBlockEvents(block.blockHash);
log(`getBlockEvents: db hit, num events: ${events.length}`);
return events;
return this._baseIndexer.getOrFetchBlockEvents(block, this._fetchAndSaveEvents.bind(this));
}
async getBlockEvents (blockHash: string): Promise<Array<Event>> {
return this._db.getBlockEvents(blockHash);
return this._baseIndexer.getBlockEvents(blockHash);
}
async processEvent (dbEvent: Event): Promise<void> {
@ -191,25 +172,11 @@ export class Indexer implements IndexerInterface {
}
async getSyncStatus (): Promise<SyncStatus | undefined> {
const dbTx = await this._db.createTransactionRunner();
let res;
try {
res = await this._db.getSyncStatus(dbTx);
await dbTx.commitTransaction();
} catch (error) {
await dbTx.rollbackTransaction();
throw error;
} finally {
await dbTx.release();
}
return res;
return this._baseIndexer.getSyncStatus();
}
async getBlock (blockHash: string): Promise<any> {
const { block } = await this._ethClient.getLogs({ blockHash });
return block;
return this._baseIndexer.getBlock(blockHash);
}
async getBlocks (where: { [key: string]: any } = {}, queryOptions: QueryOptions): Promise<any> {
@ -237,11 +204,11 @@ export class Indexer implements IndexerInterface {
}
async getEvent (id: string): Promise<Event | undefined> {
return this._db.getEvent(id);
return this._baseIndexer.getEvent(id);
}
async getBlockProgress (blockHash: string): Promise<BlockProgress | undefined> {
return this._db.getBlockProgress(blockHash);
return this._baseIndexer.getBlockProgress(blockHash);
}
async getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgress[]> {
@ -257,20 +224,7 @@ export class Indexer implements IndexerInterface {
}
async updateBlockProgress (blockHash: string, lastProcessedEventIndex: number): Promise<void> {
const dbTx = await this._db.createTransactionRunner();
let res;
try {
res = await this._db.updateBlockProgress(dbTx, blockHash, lastProcessedEventIndex);
await dbTx.commitTransaction();
} catch (error) {
await dbTx.rollbackTransaction();
throw error;
} finally {
await dbTx.release();
}
return res;
return this._baseIndexer.updateBlockProgress(blockHash, lastProcessedEventIndex);
}
async getBundle (id: string, block: BlockHeight): Promise<Bundle | undefined> {

View File

@ -10,7 +10,7 @@ import debug from 'debug';
import { Client as ERC20Client } from '@vulcanize/erc20-watcher';
import { Client as UniClient } from '@vulcanize/uni-watcher';
import { getConfig, JobQueue, QUEUE_BLOCK_PROCESSING, QUEUE_EVENT_PROCESSING, QUEUE_CHAIN_PRUNING, JobRunner as BaseJobRunner } from '@vulcanize/util';
import { getConfig, JobQueue, QUEUE_BLOCK_PROCESSING, QUEUE_EVENT_PROCESSING, QUEUE_CHAIN_PRUNING, JobRunner as BaseJobRunner, wait, JobQueueConfig } from '@vulcanize/util';
import { getCache } from '@vulcanize/cache';
import { EthClient } from '@vulcanize/ipld-eth-client';
@ -23,10 +23,12 @@ export class JobRunner {
_indexer: Indexer
_jobQueue: JobQueue
_baseJobRunner: BaseJobRunner
_jobQueueConfig: JobQueueConfig
constructor (indexer: Indexer, jobQueue: JobQueue) {
constructor (jobQueueConfig: JobQueueConfig, indexer: Indexer, jobQueue: JobQueue) {
this._indexer = indexer;
this._jobQueue = jobQueue;
this._jobQueueConfig = jobQueueConfig;
this._baseJobRunner = new BaseJobRunner(this._indexer, this._jobQueue);
}
@ -47,6 +49,10 @@ export class JobRunner {
const blockProgress = await this._indexer.getBlockProgress(blockHash);
if (!blockProgress) {
const { jobDelay } = this._jobQueueConfig;
assert(jobDelay);
// Delay to allow uni-watcher to process block.
await wait(jobDelay);
const events = await this._indexer.getOrFetchBlockEvents({ blockHash, blockNumber, parentHash, blockTimestamp: timestamp });
for (let ei = 0; ei < events.length; ei++) {
@ -120,7 +126,7 @@ export const main = async (): Promise<any> => {
const erc20Client = new ERC20Client(tokenWatcher);
const indexer = new Indexer(db, uniClient, erc20Client, ethClient, config);
const indexer = new Indexer(db, uniClient, erc20Client, ethClient);
assert(jobQueueConfig, 'Missing job queue config');
@ -131,7 +137,7 @@ export const main = async (): Promise<any> => {
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag });
await jobQueue.start();
const jobRunner = new JobRunner(indexer, jobQueue);
const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue);
await jobRunner.start();
};

View File

@ -74,7 +74,7 @@ export const main = async (): Promise<any> => {
const uniClient = new UniClient(uniWatcher);
const erc20Client = new ERC20Client(tokenWatcher);
const indexer = new Indexer(db, uniClient, erc20Client, ethClient, config);
const indexer = new Indexer(db, uniClient, erc20Client, ethClient);
assert(jobQueueConfig, 'Missing job queue config');

View File

@ -60,7 +60,7 @@ describe('chain pruning', () => {
cache
});
indexer = new Indexer(config, db, ethClient, postgraphileClient);
indexer = new Indexer(db, ethClient, postgraphileClient);
assert(indexer, 'Could not create indexer object.');
const jobQueue = new JobQueue(jobQueueConfig);

View File

@ -37,12 +37,9 @@ export class Database implements DatabaseInterface {
}
async getBlockEvents (blockHash: string): Promise<Event[]> {
return this._conn.getRepository(Event)
.createQueryBuilder('event')
.innerJoinAndSelect('event.block', 'block')
.where('block_hash = :blockHash', { blockHash })
.addOrderBy('event.id', 'ASC')
.getMany();
const repo = this._conn.getRepository(Event);
return this._baseDatabase.getBlockEvents(repo, blockHash);
}
async getProcessedBlockCountForRange (fromBlockNumber: number, toBlockNumber: number): Promise<{ expected: number, actual: number }> {
@ -75,45 +72,10 @@ export class Database implements DatabaseInterface {
}
async saveEvents (queryRunner: QueryRunner, block: DeepPartial<BlockProgress>, events: DeepPartial<Event>[]): Promise<void> {
const {
blockHash,
blockNumber,
blockTimestamp,
parentHash
} = block;
const blockRepo = queryRunner.manager.getRepository(BlockProgress);
const eventRepo = queryRunner.manager.getRepository(Event);
assert(blockHash);
assert(blockNumber);
assert(blockTimestamp);
assert(parentHash);
// In a transaction:
// (1) Save all the events in the database.
// (2) Add an entry to the block progress table.
const numEvents = events.length;
const blockProgressRepo = queryRunner.manager.getRepository(BlockProgress);
let blockProgress = await blockProgressRepo.findOne({ where: { blockHash } });
if (!blockProgress) {
const entity = blockProgressRepo.create({
blockHash,
parentHash,
blockNumber,
blockTimestamp,
numEvents,
numProcessedEvents: 0,
lastProcessedEventIndex: -1,
isComplete: (numEvents === 0)
});
blockProgress = await blockProgressRepo.save(entity);
// Bulk insert events.
events.forEach(event => {
event.block = blockProgress;
});
await queryRunner.manager.createQueryBuilder().insert().into(Event).values(events).execute();
}
return this._baseDatabase.saveEvents(blockRepo, eventRepo, block, events);
}
async updateSyncStatusIndexedBlock (queryRunner: QueryRunner, blockHash: string, blockNumber: number): Promise<SyncStatus> {
@ -134,13 +96,16 @@ export class Database implements DatabaseInterface {
return this._baseDatabase.updateSyncStatusChainHead(repo, blockHash, blockNumber);
}
async getSyncStatus (): Promise<SyncStatus | undefined> {
const repo = this._conn.getRepository(SyncStatus);
return repo.findOne();
async getSyncStatus (queryRunner: QueryRunner): Promise<SyncStatus | undefined> {
const repo = queryRunner.manager.getRepository(SyncStatus);
return this._baseDatabase.getSyncStatus(repo);
}
async getEvent (id: string): Promise<Event | undefined> {
return this._conn.getRepository(Event).findOne(id, { relations: ['block'] });
const repo = this._conn.getRepository(Event);
return this._baseDatabase.getEvent(repo, id);
}
async saveEventEntity (queryRunner: QueryRunner, entity: Event): Promise<Event> {
@ -191,25 +156,13 @@ export class Database implements DatabaseInterface {
async getBlockProgress (blockHash: string): Promise<BlockProgress | undefined> {
const repo = this._conn.getRepository(BlockProgress);
return repo.findOne({ where: { blockHash } });
return this._baseDatabase.getBlockProgress(repo, blockHash);
}
async updateBlockProgress (queryRunner: QueryRunner, blockHash: string, lastProcessedEventIndex: number): Promise<void> {
const repo = queryRunner.manager.getRepository(BlockProgress);
const entity = await repo.findOne({ where: { blockHash } });
if (entity && !entity.isComplete) {
if (lastProcessedEventIndex <= entity.lastProcessedEventIndex) {
throw new Error(`Events processed out of order ${blockHash}, was ${entity.lastProcessedEventIndex}, got ${lastProcessedEventIndex}`);
}
entity.lastProcessedEventIndex = lastProcessedEventIndex;
entity.numProcessedEvents++;
if (entity.numProcessedEvents >= entity.numEvents) {
entity.isComplete = true;
}
await repo.save(entity);
}
return this._baseDatabase.updateBlockProgress(repo, blockHash, lastProcessedEventIndex);
}
async getEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<Entity[]> {

View File

@ -74,7 +74,7 @@ export const main = async (): Promise<any> => {
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
const pubsub = new PubSub();
const indexer = new Indexer(config, db, ethClient, postgraphileClient);
const indexer = new Indexer(db, ethClient, postgraphileClient);
const { dbConnectionString, maxCompletionLag } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');

View File

@ -10,7 +10,7 @@ import assert from 'assert';
import { EthClient } from '@vulcanize/ipld-eth-client';
import { GetStorageAt, getStorageValue, StorageLayout } from '@vulcanize/solidity-mapper';
import { Config, IndexerInterface, Indexer as BaseIndexer } from '@vulcanize/util';
import { IndexerInterface, Indexer as BaseIndexer } from '@vulcanize/util';
import { Database } from './database';
import { Event, UNKNOWN_EVENT_NAME } from './entity/Event';
@ -47,7 +47,6 @@ interface ValueResult {
}
export class Indexer implements IndexerInterface {
_config: Config;
_db: Database
_ethClient: EthClient
_postgraphileClient: EthClient
@ -58,13 +57,12 @@ export class Indexer implements IndexerInterface {
_poolContract: ethers.utils.Interface
_nfpmContract: ethers.utils.Interface
constructor (config: Config, db: Database, ethClient: EthClient, postgraphileClient: EthClient) {
this._config = config;
constructor (db: Database, ethClient: EthClient, postgraphileClient: EthClient) {
this._db = db;
this._ethClient = ethClient;
this._postgraphileClient = postgraphileClient;
this._getStorageAt = this._ethClient.getStorageAt.bind(this._ethClient);
this._baseIndexer = new BaseIndexer(this._db);
this._baseIndexer = new BaseIndexer(this._db, this._ethClient);
this._factoryContract = new ethers.utils.Interface(factoryABI);
this._poolContract = new ethers.utils.Interface(poolABI);
@ -106,22 +104,11 @@ export class Indexer implements IndexerInterface {
// Note: Some event names might be unknown at this point, as earlier events might not yet be processed.
async getOrFetchBlockEvents (block: DeepPartial<BlockProgress>): Promise<Array<Event>> {
assert(block.blockHash);
const blockProgress = await this._db.getBlockProgress(block.blockHash);
if (!blockProgress) {
// Fetch and save events first and make a note in the event sync progress table.
log(`getBlockEvents: db miss, fetching from upstream server ${block.blockHash}`);
await this.fetchAndSaveEvents(block);
}
const events = await this._db.getBlockEvents(block.blockHash);
log(`getBlockEvents: db hit, ${block.blockHash} num events: ${events.length}`);
return events;
return this._baseIndexer.getOrFetchBlockEvents(block, this._fetchAndSaveEvents.bind(this));
}
async getBlockEvents (blockHash: string): Promise<Array<Event>> {
return this._db.getBlockEvents(blockHash);
return this._baseIndexer.getBlockEvents(blockHash);
}
async getEventsByFilter (blockHash: string, contract: string, name: string | null): Promise<Array<Event>> {
@ -317,7 +304,7 @@ export class Indexer implements IndexerInterface {
return { eventName, eventInfo };
}
async fetchAndSaveEvents ({ blockHash }: DeepPartial<BlockProgress>): Promise<void> {
async _fetchAndSaveEvents ({ blockHash }: DeepPartial<BlockProgress>): Promise<void> {
assert(blockHash);
let { block, logs } = await this._ethClient.getLogs({ blockHash });
@ -422,16 +409,15 @@ export class Indexer implements IndexerInterface {
}
async getSyncStatus (): Promise<SyncStatus | undefined> {
return this._db.getSyncStatus();
return this._baseIndexer.getSyncStatus();
}
async getBlock (blockHash: string): Promise<any> {
const { block } = await this._ethClient.getLogs({ blockHash });
return block;
return this._baseIndexer.getBlock(blockHash);
}
async getEvent (id: string): Promise<Event | undefined> {
return this._db.getEvent(id);
return this._baseIndexer.getEvent(id);
}
async saveEventEntity (dbEvent: Event): Promise<Event> {
@ -452,7 +438,7 @@ export class Indexer implements IndexerInterface {
}
async getBlockProgress (blockHash: string): Promise<BlockProgress | undefined> {
return this._db.getBlockProgress(blockHash);
return this._baseIndexer.getBlockProgress(blockHash);
}
async getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgress[]> {
@ -468,20 +454,7 @@ export class Indexer implements IndexerInterface {
}
async updateBlockProgress (blockHash: string, lastProcessedEventIndex: number): Promise<void> {
const dbTx = await this._db.createTransactionRunner();
let res;
try {
res = await this._db.updateBlockProgress(dbTx, blockHash, lastProcessedEventIndex);
await dbTx.commitTransaction();
} catch (error) {
await dbTx.rollbackTransaction();
throw error;
} finally {
await dbTx.release();
}
return res;
return this._baseIndexer.updateBlockProgress(blockHash, lastProcessedEventIndex);
}
async getProcessedBlockCountForRange (fromBlockNumber: number, toBlockNumber: number): Promise<{ expected: number, actual: number }> {

View File

@ -126,7 +126,7 @@ export const main = async (): Promise<any> => {
cache
});
const indexer = new Indexer(config, db, ethClient, postgraphileClient);
const indexer = new Indexer(db, ethClient, postgraphileClient);
assert(jobQueueConfig, 'Missing job queue config');

View File

@ -69,7 +69,7 @@ export const main = async (): Promise<any> => {
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
const pubsub = new PubSub();
const indexer = new Indexer(config, db, ethClient, postgraphileClient);
const indexer = new Indexer(db, ethClient, postgraphileClient);
assert(jobQueueConfig, 'Missing job queue config');

View File

@ -123,7 +123,7 @@ describe('uni-watcher', () => {
factory = new Contract(factoryContract.address, FACTORY_ABI, signer);
// Verifying with the db.
const indexer = new Indexer(config, db, ethClient, postgraphileClient);
const indexer = new Indexer(db, ethClient, postgraphileClient);
assert(await indexer.isUniswapContract(factory.address), 'Factory contract not added to the database.');
});
@ -258,7 +258,7 @@ describe('uni-watcher', () => {
nfpm = new Contract(nfpmContract.address, NFPM_ABI, signer);
// Verifying with the db.
const indexer = new Indexer(config, db, ethClient, postgraphileClient);
const indexer = new Indexer(db, ethClient, postgraphileClient);
assert(await indexer.isUniswapContract(nfpm.address), 'NFPM contract not added to the database.');
});

View File

@ -3,10 +3,10 @@
//
import assert from 'assert';
import { Connection, ConnectionOptions, createConnection, FindConditions, QueryRunner, Repository } from 'typeorm';
import { Connection, ConnectionOptions, createConnection, DeepPartial, FindConditions, QueryRunner, Repository } from 'typeorm';
import { SnakeNamingStrategy } from 'typeorm-naming-strategies';
import { BlockProgressInterface, SyncStatusInterface } from './types';
import { BlockProgressInterface, EventInterface, SyncStatusInterface } from './types';
export class Database {
_config: ConnectionOptions
@ -39,6 +39,10 @@ export class Database {
return queryRunner;
}
async getSyncStatus (repo: Repository<SyncStatusInterface>): Promise<SyncStatusInterface | undefined> {
return repo.findOne();
}
async updateSyncStatusIndexedBlock (repo: Repository<SyncStatusInterface>, blockHash: string, blockNumber: number): Promise<SyncStatusInterface> {
const entity = await repo.findOne();
assert(entity);
@ -84,17 +88,92 @@ export class Database {
return await repo.save(entity);
}
async getBlockProgress (repo: Repository<BlockProgressInterface>, blockHash: string): Promise<BlockProgressInterface | undefined> {
return repo.findOne({ where: { blockHash } });
}
async getBlocksAtHeight (repo: Repository<BlockProgressInterface>, height: number, isPruned: boolean): Promise<BlockProgressInterface[]> {
return repo.createQueryBuilder('block_progress')
.where('block_number = :height AND is_pruned = :isPruned', { height, isPruned })
.getMany();
}
async updateBlockProgress (repo: Repository<BlockProgressInterface>, blockHash: string, lastProcessedEventIndex: number): Promise<void> {
const entity = await repo.findOne({ where: { blockHash } });
if (entity && !entity.isComplete) {
if (lastProcessedEventIndex <= entity.lastProcessedEventIndex) {
throw new Error(`Events processed out of order ${blockHash}, was ${entity.lastProcessedEventIndex}, got ${lastProcessedEventIndex}`);
}
entity.lastProcessedEventIndex = lastProcessedEventIndex;
entity.numProcessedEvents++;
if (entity.numProcessedEvents >= entity.numEvents) {
entity.isComplete = true;
}
await repo.save(entity);
}
}
async markBlockAsPruned (repo: Repository<BlockProgressInterface>, block: BlockProgressInterface): Promise<BlockProgressInterface> {
block.isPruned = true;
return repo.save(block);
}
async getEvent (repo: Repository<EventInterface>, id: string): Promise<EventInterface | undefined> {
return repo.findOne(id, { relations: ['block'] });
}
async getBlockEvents (repo: Repository<EventInterface>, blockHash: string): Promise<EventInterface[]> {
return repo.createQueryBuilder('event')
.innerJoinAndSelect('event.block', 'block')
.where('block_hash = :blockHash', { blockHash })
.addOrderBy('event.id', 'ASC')
.getMany();
}
async saveEvents (blockRepo: Repository<BlockProgressInterface>, eventRepo: Repository<EventInterface>, block: DeepPartial<BlockProgressInterface>, events: DeepPartial<EventInterface>[]): Promise<void> {
const {
blockHash,
blockNumber,
blockTimestamp,
parentHash
} = block;
assert(blockHash);
assert(blockNumber);
assert(blockTimestamp);
assert(parentHash);
// In a transaction:
// (1) Save all the events in the database.
// (2) Add an entry to the block progress table.
const numEvents = events.length;
let blockProgress = await blockRepo.findOne({ where: { blockHash } });
if (!blockProgress) {
const entity = blockRepo.create({
blockHash,
parentHash,
blockNumber,
blockTimestamp,
numEvents,
numProcessedEvents: 0,
lastProcessedEventIndex: -1,
isComplete: (numEvents === 0)
});
blockProgress = await blockRepo.save(entity);
// Bulk insert events.
events.forEach(event => {
event.block = blockProgress;
});
await eventRepo.createQueryBuilder().insert().values(events).execute();
}
}
async getEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<Entity[]> {
const repo = queryRunner.manager.getRepository(entity);
@ -102,27 +181,27 @@ export class Database {
return entities;
}
async isEntityEmpty<Entity> (entity: new () => Entity): Promise<boolean> {
const queryRunner = this._conn.createQueryRunner();
try {
await queryRunner.connect();
const data = await this.getEntities(queryRunner, entity);
if (data.length > 0) {
return false;
}
return true;
} finally {
await queryRunner.release();
}
}
async removeEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<void> {
const repo = queryRunner.manager.getRepository(entity);
const entities = await repo.find(findConditions);
await repo.remove(entities);
}
async isEntityEmpty<Entity> (entity: new () => Entity): Promise<boolean> {
const dbTx = await this.createTransactionRunner();
try {
const data = await this.getEntities(dbTx, entity);
if (data.length > 0) {
return false;
}
return true;
} catch (error) {
await dbTx.rollbackTransaction();
throw error;
} finally {
await dbTx.release();
}
}
}

View File

@ -3,14 +3,39 @@
//
import assert from 'assert';
import { DeepPartial } from 'typeorm';
import debug from 'debug';
import { BlockProgressInterface, DatabaseInterface, SyncStatusInterface } from './types';
import { EthClient } from '@vulcanize/ipld-eth-client';
import { BlockProgressInterface, DatabaseInterface, EventInterface, SyncStatusInterface } from './types';
const log = debug('vulcanize:indexer');
export class Indexer {
_db: DatabaseInterface;
_ethClient: EthClient;
constructor (db: DatabaseInterface) {
constructor (db: DatabaseInterface, ethClient: EthClient) {
this._db = db;
this._ethClient = ethClient;
}
async getSyncStatus (): Promise<SyncStatusInterface | undefined> {
const dbTx = await this._db.createTransactionRunner();
let res;
try {
res = await this._db.getSyncStatus(dbTx);
await dbTx.commitTransaction();
} catch (error) {
await dbTx.rollbackTransaction();
throw error;
} finally {
await dbTx.release();
}
return res;
}
async updateSyncStatusIndexedBlock (blockHash: string, blockNumber: number): Promise<SyncStatusInterface> {
@ -64,6 +89,15 @@ export class Indexer {
return res;
}
async getBlock (blockHash: string): Promise<any> {
const { block } = await this._ethClient.getLogs({ blockHash });
return block;
}
async getBlockProgress (blockHash: string): Promise<BlockProgressInterface | undefined> {
return this._db.getBlockProgress(blockHash);
}
async getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgressInterface[]> {
return this._db.getBlocksAtHeight(height, isPruned);
}
@ -111,4 +145,44 @@ export class Indexer {
return res;
}
async updateBlockProgress (blockHash: string, lastProcessedEventIndex: number): Promise<void> {
const dbTx = await this._db.createTransactionRunner();
let res;
try {
res = await this._db.updateBlockProgress(dbTx, blockHash, lastProcessedEventIndex);
await dbTx.commitTransaction();
} catch (error) {
await dbTx.rollbackTransaction();
throw error;
} finally {
await dbTx.release();
}
return res;
}
async getEvent (id: string): Promise<EventInterface | undefined> {
return this._db.getEvent(id);
}
async getOrFetchBlockEvents (block: DeepPartial<BlockProgressInterface>, fetchAndSaveEvents: (block: DeepPartial<BlockProgressInterface>) => Promise<void>): Promise<Array<EventInterface>> {
assert(block.blockHash);
const blockProgress = await this._db.getBlockProgress(block.blockHash);
if (!blockProgress) {
// Fetch and save events first and make a note in the event sync progress table.
log(`getBlockEvents: db miss, fetching from upstream server ${block.blockHash}`);
await fetchAndSaveEvents(block);
}
const events = await this._db.getBlockEvents(block.blockHash);
log(`getBlockEvents: db hit, ${block.blockHash} num events: ${events.length}`);
return events;
}
async getBlockEvents (blockHash: string): Promise<Array<EventInterface>> {
return this._db.getBlockEvents(blockHash);
}
}

View File

@ -64,7 +64,11 @@ export interface DatabaseInterface {
createTransactionRunner(): Promise<QueryRunner>;
getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgressInterface[]>;
getBlockProgress (blockHash: string): Promise<BlockProgressInterface | undefined>;
getBlockEvents (blockHash: string): Promise<EventInterface[]>;
getEvent (id: string): Promise<EventInterface | undefined>
getSyncStatus (queryRunner: QueryRunner): Promise<SyncStatusInterface | undefined>
markBlockAsPruned (queryRunner: QueryRunner, block: BlockProgressInterface): Promise<BlockProgressInterface>;
updateBlockProgress (queryRunner: QueryRunner, blockHash: string, lastProcessedEventIndex: number): Promise<void>
updateSyncStatusIndexedBlock (queryRunner: QueryRunner, blockHash: string, blockNumber: number): Promise<SyncStatusInterface>;
updateSyncStatusChainHead (queryRunner: QueryRunner, blockHash: string, blockNumber: number): Promise<SyncStatusInterface>;
updateSyncStatusCanonicalBlock (queryRunner: QueryRunner, blockHash: string, blockNumber: number): Promise<SyncStatusInterface>;