mirror of
https://github.com/cerc-io/watcher-ts
synced 2024-11-19 20:36:19 +00:00
Tests for pruning abandonded branches (#223)
* Create dummy test blocks and refactor test code. * Intial test for pruning block in a chain without branches. * Add test for pruning with multiple branches. * Add test for pruning at frothy region. * Test for pruning block at height more than max reorg depth. Co-authored-by: prathamesh0 <prathamesh.musale0@gmail.com>
This commit is contained in:
parent
35068b2c3d
commit
8489b5632a
@ -5,7 +5,7 @@
|
||||
import assert from 'assert';
|
||||
import { Brackets, Connection, ConnectionOptions, DeepPartial, FindConditions, FindOneOptions, LessThanOrEqual, QueryRunner, Repository } from 'typeorm';
|
||||
|
||||
import { MAX_REORG_DEPTH, Database as BaseDatabase } from '@vulcanize/util';
|
||||
import { MAX_REORG_DEPTH, Database as BaseDatabase, DatabaseInterface } from '@vulcanize/util';
|
||||
|
||||
import { EventSyncProgress } from './entity/EventProgress';
|
||||
import { Factory } from './entity/Factory';
|
||||
@ -69,7 +69,7 @@ interface Where {
|
||||
}]
|
||||
}
|
||||
|
||||
export class Database {
|
||||
export class Database implements DatabaseInterface {
|
||||
_config: ConnectionOptions
|
||||
_conn!: Connection
|
||||
_baseDatabase: BaseDatabase
|
||||
@ -443,7 +443,7 @@ export class Database {
|
||||
return entity;
|
||||
}
|
||||
|
||||
async getEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, block: BlockHeight, where: Where = {}, queryOptions: QueryOptions = {}, relations: string[] = []): Promise<Entity[]> {
|
||||
async getUniswapEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, block: BlockHeight, where: Where = {}, queryOptions: QueryOptions = {}, relations: string[] = []): Promise<Entity[]> {
|
||||
const repo = queryRunner.manager.getRepository(entity);
|
||||
const { tableName } = repo.metadata;
|
||||
|
||||
@ -892,4 +892,16 @@ export class Database {
|
||||
|
||||
return { canonicalBlockNumber, blockHashes };
|
||||
}
|
||||
|
||||
async getEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<Entity[]> {
|
||||
return this._baseDatabase.getEntities(queryRunner, entity, findConditions);
|
||||
}
|
||||
|
||||
async removeEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<void> {
|
||||
return this._baseDatabase.removeEntities(queryRunner, entity, findConditions);
|
||||
}
|
||||
|
||||
async isEntityEmpty<Entity> (entity: new () => Entity): Promise<boolean> {
|
||||
return this._baseDatabase.isEntityEmpty(entity);
|
||||
}
|
||||
}
|
||||
|
@ -4,8 +4,10 @@
|
||||
|
||||
import { Entity, PrimaryGeneratedColumn, Column } from 'typeorm';
|
||||
|
||||
import { SyncStatusInterface } from '@vulcanize/util';
|
||||
|
||||
@Entity()
|
||||
export class SyncStatus {
|
||||
export class SyncStatus implements SyncStatusInterface {
|
||||
@PrimaryGeneratedColumn()
|
||||
id!: number;
|
||||
|
||||
|
@ -9,16 +9,17 @@ import _ from 'lodash';
|
||||
import {
|
||||
getConfig
|
||||
} from '@vulcanize/util';
|
||||
import { removeEntities } from '@vulcanize/util/test';
|
||||
|
||||
import { TestDatabase } from '../test/test-db';
|
||||
import { createTestBlockTree, insertDummyToken, removeEntities } from '../test/utils';
|
||||
import { Database } from './database';
|
||||
import { createTestBlockTree, insertDummyToken } from '../test/utils';
|
||||
import { Block } from './events';
|
||||
import { BlockProgress } from './entity/BlockProgress';
|
||||
import { SyncStatus } from './entity/SyncStatus';
|
||||
import { Token } from './entity/Token';
|
||||
|
||||
describe('getPrevEntityVersion', () => {
|
||||
let db: TestDatabase;
|
||||
let db: Database;
|
||||
let blocks: Block[][];
|
||||
let tail: Block;
|
||||
let head: Block;
|
||||
@ -33,11 +34,15 @@ describe('getPrevEntityVersion', () => {
|
||||
assert(dbConfig, 'Missing dbConfig.');
|
||||
|
||||
// Initialize database.
|
||||
db = new TestDatabase(dbConfig);
|
||||
db = new Database(dbConfig);
|
||||
await db.init();
|
||||
|
||||
// Check if database is empty.
|
||||
isDbEmptyBeforeTest = await db.isEmpty();
|
||||
const isBlockProgressEmpty = await db.isEntityEmpty(BlockProgress);
|
||||
const isTokenEmpty = await db.isEntityEmpty(Token);
|
||||
const isSyncStatusEmpty = await db.isEntityEmpty(SyncStatus);
|
||||
isDbEmptyBeforeTest = isBlockProgressEmpty && isTokenEmpty && isSyncStatusEmpty;
|
||||
|
||||
assert(isDbEmptyBeforeTest, 'Abort: Database not empty.');
|
||||
|
||||
// Create BlockProgress test data.
|
||||
|
@ -358,7 +358,7 @@ export class Indexer implements IndexerInterface {
|
||||
return acc;
|
||||
}, {});
|
||||
|
||||
res = await this._db.getEntities(dbTx, entity, block, where, queryOptions, relations);
|
||||
res = await this._db.getUniswapEntities(dbTx, entity, block, where, queryOptions, relations);
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
@ -570,7 +570,7 @@ export class Indexer implements IndexerInterface {
|
||||
|
||||
// TODO: In subgraph factory is fetched by hardcoded factory address.
|
||||
// Currently fetching first factory in database as only one exists.
|
||||
const [factory] = await this._db.getEntities(dbTx, Factory, { hash: block.hash }, {}, { limit: 1 });
|
||||
const [factory] = await this._db.getUniswapEntities(dbTx, Factory, { hash: block.hash }, {}, { limit: 1 });
|
||||
|
||||
const token0 = pool.token0;
|
||||
const token1 = pool.token1;
|
||||
@ -715,7 +715,7 @@ export class Indexer implements IndexerInterface {
|
||||
|
||||
// TODO: In subgraph factory is fetched by hardcoded factory address.
|
||||
// Currently fetching first factory in database as only one exists.
|
||||
const [factory] = await this._db.getEntities(dbTx, Factory, { hash: block.hash }, {}, { limit: 1 });
|
||||
const [factory] = await this._db.getUniswapEntities(dbTx, Factory, { hash: block.hash }, {}, { limit: 1 });
|
||||
|
||||
const token0 = pool.token0;
|
||||
const token1 = pool.token1;
|
||||
@ -842,7 +842,7 @@ export class Indexer implements IndexerInterface {
|
||||
|
||||
// TODO: In subgraph factory is fetched by hardcoded factory address.
|
||||
// Currently fetching first factory in database as only one exists.
|
||||
const [factory] = await this._db.getEntities(dbTx, Factory, { hash: block.hash }, {}, { limit: 1 });
|
||||
const [factory] = await this._db.getUniswapEntities(dbTx, Factory, { hash: block.hash }, {}, { limit: 1 });
|
||||
|
||||
const pool = await this._db.getPool(dbTx, { id: contractAddress, blockHash: block.hash });
|
||||
assert(pool);
|
||||
|
@ -26,7 +26,7 @@ export const updateUniswapDayData = async (db: Database, dbTx: QueryRunner, even
|
||||
|
||||
// TODO: In subgraph factory is fetched by hardcoded factory address.
|
||||
// Currently fetching first factory in database as only one exists.
|
||||
const [factory] = await db.getEntities(dbTx, Factory, { hash: block.hash }, {}, { limit: 1 });
|
||||
const [factory] = await db.getUniswapEntities(dbTx, Factory, { hash: block.hash }, {}, { limit: 1 });
|
||||
|
||||
const dayID = Math.floor(block.timestamp / 86400); // Rounded.
|
||||
const dayStartTimestamp = dayID * 86400;
|
||||
|
@ -1,36 +0,0 @@
|
||||
//
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import { QueryRunner, FindConditions } from 'typeorm';
|
||||
|
||||
import { Database } from '../src/database';
|
||||
import { BlockProgress } from '../src/entity/BlockProgress';
|
||||
import { Token } from '../src/entity/Token';
|
||||
|
||||
export class TestDatabase extends Database {
|
||||
async removeEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<void> {
|
||||
const repo = queryRunner.manager.getRepository(entity);
|
||||
|
||||
const entities = await repo.find(findConditions);
|
||||
await repo.remove(entities);
|
||||
}
|
||||
|
||||
async isEmpty (): Promise<boolean> {
|
||||
const dbTx = await this.createTransactionRunner();
|
||||
try {
|
||||
const dataBP = await this.getEntities(dbTx, BlockProgress, {}, {}, { limit: 1 });
|
||||
const dataToken = await this.getEntities(dbTx, Token, {}, {}, { limit: 1 });
|
||||
const dataSyncStatus = await this.getSyncStatus(dbTx);
|
||||
if (dataBP.length > 0 || dataToken.length > 0 || dataSyncStatus) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
}
|
||||
}
|
@ -7,7 +7,8 @@ import { ethers } from 'ethers';
|
||||
import { request } from 'graphql-request';
|
||||
import Decimal from 'decimal.js';
|
||||
import _ from 'lodash';
|
||||
import { DeepPartial } from 'typeorm';
|
||||
|
||||
import { insertNDummyBlocks } from '@vulcanize/util/test';
|
||||
|
||||
import {
|
||||
queryFactory,
|
||||
@ -20,10 +21,9 @@ import {
|
||||
queryTokenHourData,
|
||||
queryTransactions
|
||||
} from '../test/queries';
|
||||
import { Database } from '../src/database';
|
||||
import { Block } from '../src/events';
|
||||
import { Token } from '../src/entity/Token';
|
||||
import { BlockProgress } from '../src/entity/BlockProgress';
|
||||
import { TestDatabase } from './test-db';
|
||||
|
||||
export const checkUniswapDayData = async (endpoint: string): Promise<void> => {
|
||||
// Checked values: date, tvlUSD.
|
||||
@ -169,68 +169,7 @@ export const fetchTransaction = async (endpoint: string): Promise<{transaction:
|
||||
return transaction;
|
||||
};
|
||||
|
||||
export const insertDummyBlock = async (db: TestDatabase, parentBlock: Block): Promise<Block> => {
|
||||
// Insert a dummy BlockProgress entity after parentBlock.
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
|
||||
try {
|
||||
const randomByte = ethers.utils.randomBytes(10);
|
||||
const blockHash = ethers.utils.sha256(randomByte);
|
||||
const blockTimestamp = Math.floor(Date.now() / 1000);
|
||||
const parentHash = parentBlock.hash;
|
||||
const blockNumber = parentBlock.number + 1;
|
||||
|
||||
const block: DeepPartial<BlockProgress> = {
|
||||
blockNumber,
|
||||
blockHash,
|
||||
blockTimestamp,
|
||||
parentHash
|
||||
};
|
||||
await db.updateSyncStatusChainHead(dbTx, blockHash, blockNumber);
|
||||
await db.saveEvents(dbTx, block, []);
|
||||
|
||||
await dbTx.commitTransaction();
|
||||
|
||||
return {
|
||||
number: blockNumber,
|
||||
hash: blockHash,
|
||||
timestamp: blockTimestamp,
|
||||
parentHash
|
||||
};
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
};
|
||||
|
||||
export const insertNDummyBlocks = async (db: TestDatabase, numberOfBlocks:number, parentBlock?: Block): Promise<Block[]> => {
|
||||
// Insert n dummy BlockProgress serially after parentBlock.
|
||||
|
||||
const blocksArray: Block[] = [];
|
||||
if (!parentBlock) {
|
||||
const randomByte = ethers.utils.randomBytes(10);
|
||||
const hash = ethers.utils.sha256(randomByte);
|
||||
parentBlock = {
|
||||
number: 0,
|
||||
hash,
|
||||
timestamp: -1,
|
||||
parentHash: ''
|
||||
};
|
||||
}
|
||||
|
||||
let block = parentBlock;
|
||||
for (let i = 0; i < numberOfBlocks; i++) {
|
||||
block = await insertDummyBlock(db, block);
|
||||
blocksArray.push(block);
|
||||
}
|
||||
|
||||
return blocksArray;
|
||||
};
|
||||
|
||||
export const createTestBlockTree = async (db: TestDatabase): Promise<Block[][]> => {
|
||||
export const createTestBlockTree = async (db: Database): Promise<Block[][]> => {
|
||||
// Create BlockProgress test data.
|
||||
//
|
||||
// +---+
|
||||
@ -239,7 +178,7 @@ export const createTestBlockTree = async (db: TestDatabase): Promise<Block[][]>
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 20| | 15|------Token (token44)
|
||||
// | 20| | 15|
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
@ -265,8 +204,8 @@ export const createTestBlockTree = async (db: TestDatabase): Promise<Block[][]>
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// tail----->| 1 |------Token (token00)
|
||||
// +---+ (Target)
|
||||
// tail----->| 1 |
|
||||
// +---+
|
||||
//
|
||||
|
||||
const blocks: Block[][] = [];
|
||||
@ -286,7 +225,7 @@ export const createTestBlockTree = async (db: TestDatabase): Promise<Block[][]>
|
||||
return blocks;
|
||||
};
|
||||
|
||||
export const insertDummyToken = async (db: TestDatabase, block: Block, token?: Token): Promise<Token> => {
|
||||
export const insertDummyToken = async (db: Database, block: Block, token?: Token): Promise<Token> => {
|
||||
// Insert a dummy Token entity at block.
|
||||
|
||||
if (!token) {
|
||||
@ -314,19 +253,3 @@ export const insertDummyToken = async (db: TestDatabase, block: Block, token?: T
|
||||
await dbTx.release();
|
||||
}
|
||||
};
|
||||
|
||||
export async function removeEntities<Entity> (db: TestDatabase, entity: new () => Entity): Promise<void> {
|
||||
// Remove all entries of the specified entity from database.
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
|
||||
try {
|
||||
await db.removeEntities(dbTx, entity);
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,8 @@
|
||||
"build": "tsc",
|
||||
"watch:contract": "ts-node src/cli/watch-contract.ts --configFile environments/local.toml",
|
||||
"test:init": "ts-node test/init.ts",
|
||||
"smoke-test": "yarn test:init && mocha src/smoke.test.ts"
|
||||
"smoke-test": "yarn test:init && mocha src/smoke.test.ts",
|
||||
"test:chain-pruning": "mocha src/chain-pruning.test.ts"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
416
packages/uni-watcher/src/chain-pruning.test.ts
Normal file
416
packages/uni-watcher/src/chain-pruning.test.ts
Normal file
@ -0,0 +1,416 @@
|
||||
//
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import { expect, assert } from 'chai';
|
||||
import { AssertionError } from 'assert';
|
||||
import 'mocha';
|
||||
import _ from 'lodash';
|
||||
|
||||
import { getConfig, JobQueue, JobRunner } from '@vulcanize/util';
|
||||
import { getCache } from '@vulcanize/cache';
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
import { insertNDummyBlocks, removeEntities } from '@vulcanize/util/test';
|
||||
|
||||
import { Indexer } from './indexer';
|
||||
import { Database } from './database';
|
||||
import { BlockProgress } from './entity/BlockProgress';
|
||||
import { SyncStatus } from './entity/SyncStatus';
|
||||
|
||||
describe('chain pruning', () => {
|
||||
let db: Database;
|
||||
let indexer: Indexer;
|
||||
let jobRunner: JobRunner;
|
||||
|
||||
before(async () => {
|
||||
// Get config.
|
||||
const configFile = './environments/local.toml';
|
||||
const config = await getConfig(configFile);
|
||||
|
||||
const { upstream, database: dbConfig, jobQueue: jobQueueConfig } = config;
|
||||
|
||||
assert(dbConfig, 'Missing database config');
|
||||
|
||||
// Initialize database.
|
||||
db = new Database(dbConfig);
|
||||
await db.init();
|
||||
|
||||
// Check if database is empty.
|
||||
const isBlockProgressEmpty = await db.isEntityEmpty(BlockProgress);
|
||||
const isSyncStatusEmpty = await db.isEntityEmpty(SyncStatus);
|
||||
const isDbEmptyBeforeTest = isBlockProgressEmpty && isSyncStatusEmpty;
|
||||
|
||||
assert(isDbEmptyBeforeTest, 'Abort: Database not empty.');
|
||||
|
||||
// Create an Indexer object.
|
||||
assert(upstream, 'Missing upstream config');
|
||||
const { ethServer: { gqlApiEndpoint, gqlPostgraphileEndpoint }, cache: cacheConfig } = upstream;
|
||||
assert(gqlApiEndpoint, 'Missing upstream ethServer.gqlApiEndpoint');
|
||||
assert(gqlPostgraphileEndpoint, 'Missing upstream ethServer.gqlPostgraphileEndpoint');
|
||||
|
||||
const cache = await getCache(cacheConfig);
|
||||
const ethClient = new EthClient({
|
||||
gqlEndpoint: gqlApiEndpoint,
|
||||
gqlSubscriptionEndpoint: gqlPostgraphileEndpoint,
|
||||
cache
|
||||
});
|
||||
|
||||
const postgraphileClient = new EthClient({
|
||||
gqlEndpoint: gqlPostgraphileEndpoint,
|
||||
cache
|
||||
});
|
||||
|
||||
indexer = new Indexer(config, db, ethClient, postgraphileClient);
|
||||
assert(indexer, 'Could not create indexer object.');
|
||||
|
||||
const jobQueue = new JobQueue(jobQueueConfig);
|
||||
|
||||
jobRunner = new JobRunner(indexer, jobQueue);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await removeEntities(db, BlockProgress);
|
||||
await removeEntities(db, SyncStatus);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await db.close();
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// head----->| 20|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 19|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 12 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 6 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 5 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 4 | ------> Block Height to be pruned
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 2 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// tail----->| 1 |
|
||||
// +---+
|
||||
//
|
||||
it('should prune a block in chain without branches', async () => {
|
||||
// Create BlockProgress test data.
|
||||
await insertNDummyBlocks(db, 20);
|
||||
const pruneBlockHeight = 4;
|
||||
|
||||
// Should return only one block as there are no branches.
|
||||
const blocks = await indexer.getBlocksAtHeight(pruneBlockHeight, false);
|
||||
expect(blocks).to.have.lengthOf(1);
|
||||
|
||||
const job = { data: { pruneBlockHeight } };
|
||||
await jobRunner.pruneChain(job);
|
||||
|
||||
// Only one canonical (not pruned) block should exist at the pruned height.
|
||||
const blocksAfterPruning = await indexer.getBlocksAtHeight(pruneBlockHeight, false);
|
||||
expect(blocksAfterPruning).to.have.lengthOf(1);
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// | 20|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 19|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 13 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 5 | | 5 |
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// +---+ +---+ +----
|
||||
// | 4 | | 4 | | 4 | ----> Block Height to be pruned
|
||||
// +---+ +---+ +---+
|
||||
// \ | /
|
||||
// \ | /
|
||||
// +---+ +---+
|
||||
// | 3 | | 3 |
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+
|
||||
// | 2 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 1 |
|
||||
// +---+
|
||||
//
|
||||
it('should prune block at height with branches', async () => {
|
||||
// Create BlockProgress test data.
|
||||
const firstSeg = await insertNDummyBlocks(db, 2);
|
||||
const secondSeg = await insertNDummyBlocks(db, 2, _.last(firstSeg));
|
||||
expect(_.last(secondSeg).number).to.equal(4);
|
||||
const thirdSeg = await insertNDummyBlocks(db, 1, _.last(firstSeg));
|
||||
const fourthSeg = await insertNDummyBlocks(db, 2, _.last(thirdSeg));
|
||||
expect(_.last(fourthSeg).number).to.equal(5);
|
||||
const fifthSeg = await insertNDummyBlocks(db, 17, _.last(thirdSeg));
|
||||
expect(_.last(fifthSeg).number).to.equal(20);
|
||||
|
||||
const expectedCanonicalBlock = fifthSeg[0];
|
||||
const expectedPrunedBlocks = [secondSeg[1], fourthSeg[0]];
|
||||
|
||||
const pruneBlockHeight = 4;
|
||||
|
||||
// Should return multiple blocks that are not pruned.
|
||||
const blocksBeforePruning = await indexer.getBlocksAtHeight(pruneBlockHeight, false);
|
||||
expect(blocksBeforePruning).to.have.lengthOf(3);
|
||||
|
||||
const job = { data: { pruneBlockHeight } };
|
||||
await jobRunner.pruneChain(job);
|
||||
|
||||
// Only one canonical (not pruned) block should exist at the pruned height.
|
||||
const blocksAfterPruning = await indexer.getBlocksAtHeight(pruneBlockHeight, false);
|
||||
expect(blocksAfterPruning).to.have.lengthOf(1);
|
||||
|
||||
// Assert that correct block is canonical.
|
||||
expect(blocksAfterPruning[0].blockHash).to.equal(expectedCanonicalBlock.hash);
|
||||
|
||||
// Assert that correct blocks are pruned.
|
||||
const prunedBlocks = await indexer.getBlocksAtHeight(pruneBlockHeight, true);
|
||||
expect(prunedBlocks).to.have.lengthOf(2);
|
||||
const prunedBlockHashes = prunedBlocks.map(({ blockHash }) => blockHash);
|
||||
const expectedPrunedBlockHashes = expectedPrunedBlocks.map(({ hash }) => hash);
|
||||
expect(prunedBlockHashes).to.have.members(expectedPrunedBlockHashes);
|
||||
});
|
||||
|
||||
//
|
||||
// +---+ +---+
|
||||
// | 20| | 20|
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// +---+ +----
|
||||
// | 19| | 19|
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// +----
|
||||
// | 18|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 17|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 11 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 5 | | 5 |
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// +---+ +----
|
||||
// | 4 | | 4 | ----> Block Height to be pruned
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// +----
|
||||
// | 3 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 2 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 1 |
|
||||
// +---+
|
||||
//
|
||||
it('should prune block with multiple branches at chain head', async () => {
|
||||
// Create BlockProgress test data.
|
||||
const firstSeg = await insertNDummyBlocks(db, 3);
|
||||
const secondSeg = await insertNDummyBlocks(db, 2, _.last(firstSeg));
|
||||
expect(_.last(secondSeg).number).to.equal(5);
|
||||
const thirdSeg = await insertNDummyBlocks(db, 15, _.last(firstSeg));
|
||||
const fourthSeg = await insertNDummyBlocks(db, 2, _.last(thirdSeg));
|
||||
expect(_.last(fourthSeg).number).to.equal(20);
|
||||
const fifthSeg = await insertNDummyBlocks(db, 2, _.last(thirdSeg));
|
||||
expect(_.last(fifthSeg).number).to.equal(20);
|
||||
|
||||
const expectedCanonicalBlock = thirdSeg[0];
|
||||
const expectedPrunedBlock = secondSeg[0];
|
||||
|
||||
const pruneBlockHeight = 4;
|
||||
|
||||
// Should return multiple blocks that are not pruned.
|
||||
const blocksBeforePruning = await indexer.getBlocksAtHeight(pruneBlockHeight, false);
|
||||
expect(blocksBeforePruning).to.have.lengthOf(2);
|
||||
|
||||
const job = { data: { pruneBlockHeight } };
|
||||
await jobRunner.pruneChain(job);
|
||||
|
||||
// Only one canonical (not pruned) block should exist at the pruned height.
|
||||
const blocksAfterPruning = await indexer.getBlocksAtHeight(pruneBlockHeight, false);
|
||||
expect(blocksAfterPruning).to.have.lengthOf(1);
|
||||
expect(blocksAfterPruning[0].blockHash).to.equal(expectedCanonicalBlock.hash);
|
||||
|
||||
// Assert that correct blocks are pruned.
|
||||
const prunedBlocks = await indexer.getBlocksAtHeight(pruneBlockHeight, true);
|
||||
expect(prunedBlocks).to.have.lengthOf(1);
|
||||
expect(prunedBlocks[0].blockHash).to.equal(expectedPrunedBlock.hash);
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// | 21| ----> Latest Indexed
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 20|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 15 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 4 | | 4 |
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+ +---+
|
||||
// | 3 | | 3 | ----> Block Height to be pruned
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+
|
||||
// | 2 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 1 |
|
||||
// +---+
|
||||
//
|
||||
it('should prune block at depth greater than max reorg depth from latest indexed block', async () => {
|
||||
// Create BlockProgress test data.
|
||||
const firstSeg = await insertNDummyBlocks(db, 2);
|
||||
const secondSeg = await insertNDummyBlocks(db, 2, _.last(firstSeg));
|
||||
expect(_.last(secondSeg).number).to.equal(4);
|
||||
const thirdSeg = await insertNDummyBlocks(db, 19, _.last(firstSeg));
|
||||
expect(_.last(thirdSeg).number).to.equal(21);
|
||||
|
||||
const expectedCanonicalBlock = thirdSeg[0];
|
||||
const expectedPrunedBlock = secondSeg[0];
|
||||
|
||||
const pruneBlockHeight = 3;
|
||||
|
||||
// Should return multiple blocks that are not pruned.
|
||||
const blocksBeforePruning = await indexer.getBlocksAtHeight(pruneBlockHeight, false);
|
||||
expect(blocksBeforePruning).to.have.lengthOf(2);
|
||||
|
||||
const job = { data: { pruneBlockHeight } };
|
||||
await jobRunner.pruneChain(job);
|
||||
|
||||
// Only one canonical (not pruned) block should exist at the pruned height.
|
||||
const blocksAfterPruning = await indexer.getBlocksAtHeight(pruneBlockHeight, false);
|
||||
expect(blocksAfterPruning).to.have.lengthOf(1);
|
||||
expect(blocksAfterPruning[0].blockHash).to.equal(expectedCanonicalBlock.hash);
|
||||
|
||||
// Assert that correct blocks are pruned.
|
||||
const prunedBlocks = await indexer.getBlocksAtHeight(pruneBlockHeight, true);
|
||||
expect(prunedBlocks).to.have.lengthOf(1);
|
||||
expect(prunedBlocks[0].blockHash).to.equal(expectedPrunedBlock.hash);
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// | 20|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 19|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 8 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 10| | 10|
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// +---+ +----
|
||||
// | 9 | | 9 | ----> Block Height to be pruned
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// +---+
|
||||
// | 8 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 6 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 1 |
|
||||
// +---+
|
||||
//
|
||||
it('should avoid pruning block in frothy region', async () => {
|
||||
// Create BlockProgress test data.
|
||||
const firstSeg = await insertNDummyBlocks(db, 8);
|
||||
const secondSeg = await insertNDummyBlocks(db, 2, _.last(firstSeg));
|
||||
expect(_.last(secondSeg).number).to.equal(10);
|
||||
const thirdSeg = await insertNDummyBlocks(db, 12, _.last(firstSeg));
|
||||
expect(_.last(thirdSeg).number).to.equal(20);
|
||||
const pruneBlockHeight = 9;
|
||||
|
||||
// Should return multiple blocks that are not pruned.
|
||||
const blocksBeforePruning = await indexer.getBlocksAtHeight(pruneBlockHeight, false);
|
||||
expect(blocksBeforePruning).to.have.lengthOf(2);
|
||||
|
||||
try {
|
||||
const job = { data: { pruneBlockHeight } };
|
||||
await jobRunner.pruneChain(job);
|
||||
expect.fail('Job Runner should throw error for pruning at frothy region');
|
||||
} catch (error) {
|
||||
expect(error).to.be.instanceof(AssertionError);
|
||||
}
|
||||
|
||||
// No blocks should be pruned at frothy region.
|
||||
const blocksAfterPruning = await indexer.getBlocksAtHeight(pruneBlockHeight, true);
|
||||
expect(blocksAfterPruning).to.have.lengthOf(0);
|
||||
});
|
||||
});
|
@ -4,16 +4,16 @@
|
||||
|
||||
import assert from 'assert';
|
||||
import _ from 'lodash';
|
||||
import { Connection, ConnectionOptions, DeepPartial, QueryRunner } from 'typeorm';
|
||||
import { Connection, ConnectionOptions, DeepPartial, QueryRunner, FindConditions } from 'typeorm';
|
||||
|
||||
import { Database as BaseDatabase } from '@vulcanize/util';
|
||||
import { Database as BaseDatabase, DatabaseInterface } from '@vulcanize/util';
|
||||
|
||||
import { Event, UNKNOWN_EVENT_NAME } from './entity/Event';
|
||||
import { Contract } from './entity/Contract';
|
||||
import { BlockProgress } from './entity/BlockProgress';
|
||||
import { SyncStatus } from './entity/SyncStatus';
|
||||
|
||||
export class Database {
|
||||
export class Database implements DatabaseInterface {
|
||||
_config: ConnectionOptions
|
||||
_conn!: Connection
|
||||
_baseDatabase: BaseDatabase
|
||||
@ -211,4 +211,16 @@ export class Database {
|
||||
await repo.save(entity);
|
||||
}
|
||||
}
|
||||
|
||||
async getEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<Entity[]> {
|
||||
return this._baseDatabase.getEntities(queryRunner, entity, findConditions);
|
||||
}
|
||||
|
||||
async removeEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<void> {
|
||||
return this._baseDatabase.removeEntities(queryRunner, entity, findConditions);
|
||||
}
|
||||
|
||||
async isEntityEmpty<Entity> (entity: new () => Entity): Promise<boolean> {
|
||||
return this._baseDatabase.isEntityEmpty(entity);
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,7 @@
|
||||
//
|
||||
|
||||
import assert from 'assert';
|
||||
import { Connection, ConnectionOptions, createConnection, QueryRunner, Repository } from 'typeorm';
|
||||
import { Connection, ConnectionOptions, createConnection, FindConditions, QueryRunner, Repository } from 'typeorm';
|
||||
import { SnakeNamingStrategy } from 'typeorm-naming-strategies';
|
||||
|
||||
import { BlockProgressInterface, SyncStatusInterface } from './types';
|
||||
@ -94,4 +94,35 @@ export class Database {
|
||||
block.isPruned = true;
|
||||
return repo.save(block);
|
||||
}
|
||||
|
||||
async getEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<Entity[]> {
|
||||
const repo = queryRunner.manager.getRepository(entity);
|
||||
|
||||
const entities = await repo.find(findConditions);
|
||||
return entities;
|
||||
}
|
||||
|
||||
async removeEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<void> {
|
||||
const repo = queryRunner.manager.getRepository(entity);
|
||||
|
||||
const entities = await repo.find(findConditions);
|
||||
await repo.remove(entities);
|
||||
}
|
||||
|
||||
async isEntityEmpty<Entity> (entity: new () => Entity): Promise<boolean> {
|
||||
const dbTx = await this.createTransactionRunner();
|
||||
try {
|
||||
const data = await this.getEntities(dbTx, entity);
|
||||
|
||||
if (data.length > 0) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -123,13 +123,15 @@ export class JobRunner {
|
||||
// Should be at least 1.
|
||||
assert(blocksAtHeight.length);
|
||||
|
||||
// We have more than one node at this height, so prune all nodes not reachable from head.
|
||||
// We have more than one node at this height, so prune all nodes not reachable from indexed block at max reorg depth from prune height.
|
||||
// This will lead to orphaned nodes, which will get pruned at the next height.
|
||||
if (blocksAtHeight.length > 1) {
|
||||
const [indexedBlock] = await this._indexer.getBlocksAtHeight(pruneBlockHeight + MAX_REORG_DEPTH, false);
|
||||
|
||||
for (let i = 0; i < blocksAtHeight.length; i++) {
|
||||
const block = blocksAtHeight[i];
|
||||
// If this block is not reachable from the latest indexed block, mark it as pruned.
|
||||
const isAncestor = await this._indexer.blockIsAncestor(block.blockHash, syncStatus.latestIndexedBlockHash, MAX_REORG_DEPTH);
|
||||
// If this block is not reachable from the indexed block at max reorg depth from prune height, mark it as pruned.
|
||||
const isAncestor = await this._indexer.blockIsAncestor(block.blockHash, indexedBlock.blockHash, MAX_REORG_DEPTH);
|
||||
if (!isAncestor) {
|
||||
await this._indexer.markBlockAsPruned(block);
|
||||
}
|
||||
|
@ -2,7 +2,7 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import { QueryRunner } from 'typeorm';
|
||||
import { DeepPartial, FindConditions, QueryRunner } from 'typeorm';
|
||||
|
||||
export interface BlockProgressInterface {
|
||||
id: number;
|
||||
@ -68,4 +68,6 @@ export interface DatabaseInterface {
|
||||
updateSyncStatusIndexedBlock (queryRunner: QueryRunner, blockHash: string, blockNumber: number): Promise<SyncStatusInterface>;
|
||||
updateSyncStatusChainHead (queryRunner: QueryRunner, blockHash: string, blockNumber: number): Promise<SyncStatusInterface>;
|
||||
updateSyncStatusCanonicalBlock (queryRunner: QueryRunner, blockHash: string, blockNumber: number): Promise<SyncStatusInterface>;
|
||||
saveEvents (queryRunner: QueryRunner, block: DeepPartial<BlockProgressInterface>, events: DeepPartial<EventInterface>[]): Promise<void>;
|
||||
removeEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<void>
|
||||
}
|
||||
|
@ -32,6 +32,8 @@ import {
|
||||
bytecode as WETH9_BYTECODE
|
||||
} from '../artifacts/test/contracts/WETH9.sol/WETH9.json';
|
||||
|
||||
import { DatabaseInterface } from '../src/types';
|
||||
|
||||
export { abi as NFPM_ABI } from '@uniswap/v3-periphery/artifacts/contracts/NonfungiblePositionManager.sol/NonfungiblePositionManager.json';
|
||||
export { abi as TESTERC20_ABI } from '../artifacts/test/contracts/TestERC20.sol/TestERC20.json';
|
||||
|
||||
@ -147,3 +149,86 @@ export const deployNFPM = async (signer: Signer, factory: Contract, weth9Address
|
||||
signer);
|
||||
return await positionManagerFactory.deploy(factory.address, weth9Address, nftDescriptor.address);
|
||||
};
|
||||
|
||||
export const insertDummyBlock = async (db: DatabaseInterface, parentBlock: any): Promise<any> => {
|
||||
// Insert a dummy BlockProgress entity after parentBlock.
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
|
||||
try {
|
||||
const randomByte = ethers.utils.randomBytes(10);
|
||||
const blockHash = ethers.utils.sha256(randomByte);
|
||||
const blockTimestamp = Math.floor(Date.now() / 1000);
|
||||
const parentHash = parentBlock.hash;
|
||||
const blockNumber = parentBlock.number + 1;
|
||||
|
||||
const block = {
|
||||
blockNumber,
|
||||
blockHash,
|
||||
blockTimestamp,
|
||||
parentHash
|
||||
};
|
||||
|
||||
await db.updateSyncStatusChainHead(dbTx, blockHash, blockNumber);
|
||||
await db.saveEvents(dbTx, block, []);
|
||||
await db.updateSyncStatusIndexedBlock(dbTx, blockHash, blockNumber);
|
||||
|
||||
await dbTx.commitTransaction();
|
||||
|
||||
return {
|
||||
number: blockNumber,
|
||||
hash: blockHash,
|
||||
timestamp: blockTimestamp,
|
||||
parent: {
|
||||
hash: parentHash
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
};
|
||||
|
||||
export const insertNDummyBlocks = async (db: DatabaseInterface, numberOfBlocks:number, parentBlock?: any): Promise<any[]> => {
|
||||
// Insert n dummy BlockProgress serially after parentBlock.
|
||||
|
||||
const blocksArray: any[] = [];
|
||||
if (!parentBlock) {
|
||||
const randomByte = ethers.utils.randomBytes(10);
|
||||
const hash = ethers.utils.sha256(randomByte);
|
||||
parentBlock = {
|
||||
number: 0,
|
||||
hash,
|
||||
timestamp: -1,
|
||||
parent: {
|
||||
hash: ''
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let block = parentBlock;
|
||||
for (let i = 0; i < numberOfBlocks; i++) {
|
||||
block = await insertDummyBlock(db, block);
|
||||
blocksArray.push(block);
|
||||
}
|
||||
|
||||
return blocksArray;
|
||||
};
|
||||
|
||||
export async function removeEntities<Entity> (db: DatabaseInterface, entity: new () => Entity): Promise<void> {
|
||||
// Remove all entries of the specified entity from database.
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
|
||||
try {
|
||||
await db.removeEntities(dbTx, entity);
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user