mirror of
https://github.com/cerc-io/watcher-ts
synced 2025-08-07 22:20:08 +00:00
Tests for getPrevEntityVersion (#217)
* Insert and remove a dummy BlockProgress from db. * Test to fetch a Token entity from pruned region. * Check if db is empty initially. * Test to fetch a Token entity from frothy region. * Test to fetch a Token from frothy region (same block num). * Test to fetch a Token entity from another branch. * Tests to fetch a Token entity (multiple tokens). * Test to fetch updated Token entity in pruned region. Co-authored-by: prathamesh0 <prathamesh.musale0@gmail.com>
This commit is contained in:
parent
ec92779c4e
commit
9e1ed70d18
@ -23,7 +23,8 @@
|
||||
"build": "tsc",
|
||||
"generate:schema": "get-graphql-schema https://api.thegraph.com/subgraphs/name/ianlapham/uniswap-v3-alt > docs/analysis/schema/full-schema.graphql",
|
||||
"lint:schema": "graphql-schema-linter",
|
||||
"smoke-test": "mocha src/smoke.test.ts"
|
||||
"smoke-test": "mocha src/smoke.test.ts",
|
||||
"test:gpev": "mocha src/get-prev-entity.test.ts"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chance": "^1.1.2",
|
||||
|
616
packages/uni-info-watcher/src/get-prev-entity.test.ts
Normal file
616
packages/uni-info-watcher/src/get-prev-entity.test.ts
Normal file
@ -0,0 +1,616 @@
|
||||
//
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import { expect, assert } from 'chai';
|
||||
import 'mocha';
|
||||
import _ from 'lodash';
|
||||
|
||||
import {
|
||||
getConfig
|
||||
} from '@vulcanize/util';
|
||||
|
||||
import { TestDatabase } from '../test/test-db';
|
||||
import { createTestBlockTree, insertDummyToken, removeEntities } from '../test/utils';
|
||||
import { Block } from './events';
|
||||
import { BlockProgress } from './entity/BlockProgress';
|
||||
import { SyncStatus } from './entity/SyncStatus';
|
||||
import { Token } from './entity/Token';
|
||||
|
||||
describe('getPrevEntityVersion', () => {
|
||||
let db: TestDatabase;
|
||||
let blocks: Block[][];
|
||||
let tail: Block;
|
||||
let head: Block;
|
||||
let isDbEmptyBeforeTest: boolean;
|
||||
|
||||
before(async () => {
|
||||
// Get config.
|
||||
const configFile = './environments/local.toml';
|
||||
const config = await getConfig(configFile);
|
||||
|
||||
const { database: dbConfig } = config;
|
||||
assert(dbConfig, 'Missing dbConfig.');
|
||||
|
||||
// Initialize database.
|
||||
db = new TestDatabase(dbConfig);
|
||||
await db.init();
|
||||
|
||||
// Check if database is empty.
|
||||
isDbEmptyBeforeTest = await db.isEmpty();
|
||||
assert(isDbEmptyBeforeTest, 'Abort: Database not empty.');
|
||||
|
||||
// Create BlockProgress test data.
|
||||
blocks = await createTestBlockTree(db);
|
||||
tail = blocks[0][0];
|
||||
head = blocks[3][10];
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
if (isDbEmptyBeforeTest) {
|
||||
await removeEntities(db, BlockProgress);
|
||||
await removeEntities(db, SyncStatus);
|
||||
}
|
||||
await db.close();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await removeEntities(db, Token);
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// head----->| 21|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 20| | 15|------Token (token44)
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// 8 Blocks 3 Blocks
|
||||
// | /
|
||||
// | /
|
||||
// +---+ +---+ +---+
|
||||
// | 11| | 11| | 11|
|
||||
// +---+ +---+ +---+
|
||||
// \ | /
|
||||
// \ | /
|
||||
// +---+ +---+
|
||||
// | 10| | 10|
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+
|
||||
// | 9 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 7 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// tail----->| 1 |------Token (token00)
|
||||
// +---+ (Target)
|
||||
//
|
||||
it('should fetch Token in pruned region', async () => {
|
||||
// Insert a Token entity at the tail.
|
||||
const token00 = await insertDummyToken(db, tail);
|
||||
|
||||
const token44 = _.cloneDeep(token00);
|
||||
token44.txCount++;
|
||||
await insertDummyToken(db, blocks[4][4], token44);
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
try {
|
||||
const searchedToken = await db.getToken(dbTx, { id: token00.id, blockHash: head.hash });
|
||||
expect(searchedToken).to.not.be.empty;
|
||||
expect(searchedToken?.id).to.be.equal(token00.id);
|
||||
expect(searchedToken?.txCount).to.be.equal(token00.txCount.toString());
|
||||
expect(searchedToken?.blockNumber).to.be.equal(token00.blockNumber);
|
||||
expect(searchedToken?.blockHash).to.be.equal(token00.blockHash);
|
||||
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// head----->| 21|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 20| | 15|------Token (token44)
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// 8 Blocks 3 Blocks
|
||||
// | /
|
||||
// | /
|
||||
// +---+ +---+ +---+
|
||||
// | 11| | 11| | 11|
|
||||
// +---+ +---+ +---+
|
||||
// \ | /
|
||||
// \ | /
|
||||
// +---+ +---+
|
||||
// | 10| | 10|
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+
|
||||
// | 9 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 5 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 3 |------Token (token02)
|
||||
// +---+ (Target)
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 2 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// tail----->| 1 |------Token (token00)
|
||||
// +---+
|
||||
//
|
||||
it('should fetch updated Token in pruned region', async () => {
|
||||
// Insert a Token entity at the tail and update in pruned region.
|
||||
const token00 = await insertDummyToken(db, tail);
|
||||
|
||||
const token02 = _.cloneDeep(token00);
|
||||
token02.txCount++;
|
||||
await insertDummyToken(db, blocks[0][2], token02);
|
||||
|
||||
const token44 = _.cloneDeep(token00);
|
||||
token44.txCount++;
|
||||
await insertDummyToken(db, blocks[4][4], token44);
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
try {
|
||||
const searchedToken = await db.getToken(dbTx, { id: token00.id, blockHash: head.hash });
|
||||
expect(searchedToken).to.not.be.empty;
|
||||
expect(searchedToken?.id).to.be.equal(token02.id);
|
||||
expect(searchedToken?.txCount).to.be.equal(token02.txCount.toString());
|
||||
expect(searchedToken?.blockNumber).to.be.equal(token02.blockNumber);
|
||||
expect(searchedToken?.blockHash).to.be.equal(token02.blockHash);
|
||||
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// head----->| 21|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 20| | 15|------Token (token44)
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// Token (token30)-------\ | /
|
||||
// (Target) -\ 8 Blocks 3 Blocks
|
||||
// -\ | /
|
||||
// -\ | /
|
||||
// +---+ +---+ +---+
|
||||
// | 11| | 11| | 11|
|
||||
// +---+ +---+ +---+
|
||||
// \ | /
|
||||
// \ | /
|
||||
// +---+ +---+
|
||||
// | 10| | 10|
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+
|
||||
// | 9 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 7 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// tail----->| 1 |------Token (token00)
|
||||
// +---+
|
||||
//
|
||||
it('should fetch the Token in frothy region', async () => {
|
||||
// Insert a Token entity at tail and in the frothy region.
|
||||
const token00 = await insertDummyToken(db, tail);
|
||||
|
||||
const token30 = _.cloneDeep(token00);
|
||||
token30.txCount++;
|
||||
await insertDummyToken(db, blocks[3][0], token30);
|
||||
|
||||
const token44 = _.cloneDeep(token00);
|
||||
token44.txCount++;
|
||||
await insertDummyToken(db, blocks[4][4], token44);
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
try {
|
||||
const searchedToken = await db.getToken(dbTx, { id: token00.id, blockHash: head.hash });
|
||||
expect(searchedToken).to.not.be.empty;
|
||||
expect(searchedToken?.id).to.be.equal(token30.id);
|
||||
expect(searchedToken?.txCount).to.be.equal(token30.txCount.toString());
|
||||
expect(searchedToken?.blockNumber).to.be.equal(token30.blockNumber);
|
||||
expect(searchedToken?.blockHash).to.be.equal(token30.blockHash);
|
||||
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// head----->| 21|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 20| | 15|
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// Token (token30)-------\ | /
|
||||
// (Target) -\ 8 Blocks 3 Blocks
|
||||
// -\ | /
|
||||
// -\ | /
|
||||
// +---+ +---+ +---+
|
||||
// Token------| 11| | 11| | 11|------Token (token40)
|
||||
// (token11) +---+ +---+ +---+
|
||||
// \ | /
|
||||
// \ | /
|
||||
// +---+ +---+
|
||||
// | 10| | 10|
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+
|
||||
// | 9 |------Token (token08)
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 7 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// tail----->| 1 |
|
||||
// +---+
|
||||
//
|
||||
it('should fetch the Token in frothy region (same block number)', async () => {
|
||||
// Insert a Token entity in the frothy region at same block numbers.
|
||||
const token08 = await insertDummyToken(db, blocks[0][8]);
|
||||
|
||||
const token11 = _.cloneDeep(token08);
|
||||
token11.txCount++;
|
||||
await insertDummyToken(db, blocks[1][1], token11);
|
||||
|
||||
const token30 = _.cloneDeep(token08);
|
||||
token30.txCount++;
|
||||
await insertDummyToken(db, blocks[3][0], token30);
|
||||
|
||||
const token40 = _.cloneDeep(token08);
|
||||
token40.txCount++;
|
||||
await insertDummyToken(db, blocks[4][0], token40);
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
try {
|
||||
const searchedToken = await db.getToken(dbTx, { id: token08.id, blockHash: head.hash });
|
||||
expect(searchedToken).to.not.be.empty;
|
||||
expect(searchedToken?.id).to.be.equal(token30.id);
|
||||
expect(searchedToken?.txCount).to.be.equal(token30.txCount.toString());
|
||||
expect(searchedToken?.blockNumber).to.be.equal(token30.blockNumber);
|
||||
expect(searchedToken?.blockHash).to.be.equal(token30.blockHash);
|
||||
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// head----->| 21|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 20| | 15|------Token (token44)
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// 8 Blocks 3 Blocks
|
||||
// | /
|
||||
// | /
|
||||
// +---+ +---+ +---+
|
||||
// | 11| | 11| | 11|
|
||||
// +---+ +---+ +---+
|
||||
// \ | /
|
||||
// \ | /
|
||||
// +---+ +---+
|
||||
// | 10| | 10|
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+
|
||||
// | 9 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 7 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// tail----->| 1 |
|
||||
// +---+
|
||||
//
|
||||
it('should not fetch the Token from a side branch in frothy region', async () => {
|
||||
// Insert a Token entity in the frothy region in a side branch.
|
||||
const token44 = await insertDummyToken(db, blocks[4][4]);
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
try {
|
||||
const searchedToken = await db.getToken(dbTx, { id: token44.id, blockHash: head.hash });
|
||||
expect(searchedToken).to.be.undefined;
|
||||
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// head----->| 21|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 20| | 15|------TokenA (tokenA44)
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// 8 Blocks 3 Blocks
|
||||
// | /
|
||||
// | /
|
||||
// +---+ +---+ +---+
|
||||
// | 11| | 11| | 11|
|
||||
// +---+ +---+ +---+
|
||||
// \ | /
|
||||
// \ | /
|
||||
// +---+ +---+
|
||||
// | 10| | 10|
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+
|
||||
// | 9 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 6 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 2 |------TokenB
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// tail----->| 1 |------TokenA (tokenA00)
|
||||
// +---+ (Target)
|
||||
//
|
||||
it('should fetch Token in pruned region (multiple tokens)', async () => {
|
||||
// Insert multiple Token entities in the pruned region.
|
||||
const tokenA00 = await insertDummyToken(db, tail);
|
||||
|
||||
await insertDummyToken(db, blocks[0][1]);
|
||||
|
||||
const tokenA44 = _.cloneDeep(tokenA00);
|
||||
tokenA44.txCount++;
|
||||
await insertDummyToken(db, blocks[4][4], tokenA44);
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
try {
|
||||
const searchedToken = await db.getToken(dbTx, { id: tokenA00.id, blockHash: head.hash });
|
||||
expect(searchedToken).to.not.be.empty;
|
||||
expect(searchedToken?.id).to.be.equal(tokenA00.id);
|
||||
expect(searchedToken?.txCount).to.be.equal(tokenA00.txCount.toString());
|
||||
expect(searchedToken?.blockNumber).to.be.equal(tokenA00.blockNumber);
|
||||
expect(searchedToken?.blockHash).to.be.equal(tokenA00.blockHash);
|
||||
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// head----->| 21|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// TokenB (tokenB39)------| 20| | 15|------TokenA (tokenA44)
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// TokenA (tokenA30)-------\ | /
|
||||
// (Target) -\ 8 Blocks 3 Blocks
|
||||
// -\ | /
|
||||
// -\ | /
|
||||
// +---+ +---+ +---+
|
||||
// | 11| | 11| | 11|
|
||||
// +---+ +---+ +---+
|
||||
// \ | /
|
||||
// \ | /
|
||||
// +---+ +---+
|
||||
// | 10| | 10|
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+
|
||||
// | 9 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 6 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 2 |------TokenB (tokenB01)
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// tail----->| 1 |------TokenA (tokenA00)
|
||||
// +---+
|
||||
//
|
||||
it('should fetch the Token in frothy region (multiple tokens)', async () => {
|
||||
// Insert multiple Token entities in the pruned region and in the frothy region.
|
||||
const tokenA00 = await insertDummyToken(db, tail);
|
||||
|
||||
const tokenB01 = await insertDummyToken(db, blocks[0][1]);
|
||||
|
||||
const tokenA30 = _.cloneDeep(tokenA00);
|
||||
tokenA30.txCount++;
|
||||
await insertDummyToken(db, blocks[3][0], tokenA30);
|
||||
|
||||
const tokenA44 = _.cloneDeep(tokenA00);
|
||||
tokenA44.txCount++;
|
||||
await insertDummyToken(db, blocks[4][4], tokenA44);
|
||||
|
||||
const tokenB39 = _.cloneDeep(tokenB01);
|
||||
tokenB39.txCount++;
|
||||
await insertDummyToken(db, blocks[3][9], tokenB39);
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
try {
|
||||
const searchedToken = await db.getToken(dbTx, { id: tokenA00.id, blockHash: head.hash });
|
||||
expect(searchedToken).to.not.be.empty;
|
||||
expect(searchedToken?.id).to.be.equal(tokenA30.id);
|
||||
expect(searchedToken?.txCount).to.be.equal(tokenA30.txCount.toString());
|
||||
expect(searchedToken?.blockNumber).to.be.equal(tokenA30.blockNumber);
|
||||
expect(searchedToken?.blockHash).to.be.equal(tokenA30.blockHash);
|
||||
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
});
|
||||
|
||||
//
|
||||
// +---+
|
||||
// head----->| 21|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 20| | 15|
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// 7 Blocks 2 Blocks
|
||||
// | /
|
||||
// | /
|
||||
// TokenB (tokenB31) +---+ +---+
|
||||
// TokenA (tokenA31)------| 12| | 12|------TokenA (tokenA41)
|
||||
// (Target) +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// +---+ +---+ +---+
|
||||
// | 11| | 11| | 11|
|
||||
// +---+ +---+ +---+
|
||||
// \ | /
|
||||
// \ | /
|
||||
// +---+ +---+
|
||||
// | 10| | 10|
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+
|
||||
// | 9 |------TokenA (tokenA08)
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// | 8 |------TokenB (tokenB07)
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 6 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// tail----->| 1 |
|
||||
// +---+
|
||||
//
|
||||
it('should fetch the Token in frothy region (same block number) (multiple tokens)', async () => {
|
||||
// Insert multiple Token entities in the frothy region at same block numbers.
|
||||
const tokenB07 = await insertDummyToken(db, blocks[0][7]);
|
||||
|
||||
const tokenA08 = await insertDummyToken(db, blocks[0][8]);
|
||||
|
||||
const tokenA31 = _.cloneDeep(tokenA08);
|
||||
tokenA31.txCount++;
|
||||
await insertDummyToken(db, blocks[3][1], tokenA31);
|
||||
|
||||
const tokenB31 = _.cloneDeep(tokenB07);
|
||||
tokenB31.txCount++;
|
||||
await insertDummyToken(db, blocks[3][1], tokenB31);
|
||||
|
||||
const tokenA41 = _.cloneDeep(tokenA08);
|
||||
tokenA41.txCount++;
|
||||
await insertDummyToken(db, blocks[4][1], tokenA41);
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
try {
|
||||
const searchedToken = await db.getToken(dbTx, { id: tokenA08.id, blockHash: head.hash });
|
||||
expect(searchedToken).to.not.be.empty;
|
||||
expect(searchedToken?.id).to.be.equal(tokenA31.id);
|
||||
expect(searchedToken?.txCount).to.be.equal(tokenA31.txCount.toString());
|
||||
expect(searchedToken?.blockNumber).to.be.equal(tokenA31.blockNumber);
|
||||
expect(searchedToken?.blockHash).to.be.equal(tokenA31.blockHash);
|
||||
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
});
|
||||
});
|
36
packages/uni-info-watcher/test/test-db.ts
Normal file
36
packages/uni-info-watcher/test/test-db.ts
Normal file
@ -0,0 +1,36 @@
|
||||
//
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import { QueryRunner, FindConditions } from 'typeorm';
|
||||
|
||||
import { Database } from '../src/database';
|
||||
import { BlockProgress } from '../src/entity/BlockProgress';
|
||||
import { Token } from '../src/entity/Token';
|
||||
|
||||
export class TestDatabase extends Database {
|
||||
async removeEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindConditions<Entity>): Promise<void> {
|
||||
const repo = queryRunner.manager.getRepository(entity);
|
||||
|
||||
const entities = await repo.find(findConditions);
|
||||
await repo.remove(entities);
|
||||
}
|
||||
|
||||
async isEmpty (): Promise<boolean> {
|
||||
const dbTx = await this.createTransactionRunner();
|
||||
try {
|
||||
const dataBP = await this.getEntities(dbTx, BlockProgress, {}, {}, { limit: 1 });
|
||||
const dataToken = await this.getEntities(dbTx, Token, {}, {}, { limit: 1 });
|
||||
const dataSyncStatus = await this.getSyncStatus(dbTx);
|
||||
if (dataBP.length > 0 || dataToken.length > 0 || dataSyncStatus) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
}
|
||||
}
|
@ -3,8 +3,10 @@
|
||||
//
|
||||
|
||||
import { expect } from 'chai';
|
||||
import { ethers } from 'ethers';
|
||||
import { request } from 'graphql-request';
|
||||
import Decimal from 'decimal.js';
|
||||
import _ from 'lodash';
|
||||
|
||||
import {
|
||||
queryFactory,
|
||||
@ -17,6 +19,9 @@ import {
|
||||
queryTokenHourData,
|
||||
queryTransactions
|
||||
} from '../test/queries';
|
||||
import { TestDatabase } from './test-db';
|
||||
import { Block } from '../src/events';
|
||||
import { Token } from '../src/entity/Token';
|
||||
|
||||
export const checkUniswapDayData = async (endpoint: string): Promise<void> => {
|
||||
// Checked values: date, tvlUSD.
|
||||
@ -161,3 +166,159 @@ export const fetchTransaction = async (endpoint: string): Promise<{transaction:
|
||||
|
||||
return transaction;
|
||||
};
|
||||
|
||||
export const insertDummyBlock = async (db: TestDatabase, parentBlock: Block): Promise<Block> => {
|
||||
// Insert a dummy BlockProgress entity after parentBlock.
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
|
||||
try {
|
||||
const randomByte = ethers.utils.randomBytes(10);
|
||||
const blockHash = ethers.utils.sha256(randomByte);
|
||||
const blockTimestamp = Math.floor(Date.now() / 1000);
|
||||
const parentHash = parentBlock.hash;
|
||||
const blockNumber = parentBlock.number + 1;
|
||||
|
||||
const block: Block = {
|
||||
number: blockNumber,
|
||||
hash: blockHash,
|
||||
timestamp: blockTimestamp,
|
||||
parentHash
|
||||
};
|
||||
await db.updateSyncStatus(dbTx, blockHash, blockNumber);
|
||||
await db.saveEvents(dbTx, block, []);
|
||||
|
||||
await dbTx.commitTransaction();
|
||||
return block;
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
};
|
||||
|
||||
export const insertNDummyBlocks = async (db: TestDatabase, numberOfBlocks:number, parentBlock?: Block): Promise<Block[]> => {
|
||||
// Insert n dummy BlockProgress serially after parentBlock.
|
||||
|
||||
const blocksArray: Block[] = [];
|
||||
if (!parentBlock) {
|
||||
const randomByte = ethers.utils.randomBytes(10);
|
||||
const hash = ethers.utils.sha256(randomByte);
|
||||
parentBlock = {
|
||||
number: 0,
|
||||
hash,
|
||||
timestamp: -1,
|
||||
parentHash: ''
|
||||
};
|
||||
}
|
||||
|
||||
let block = parentBlock;
|
||||
for (let i = 0; i < numberOfBlocks; i++) {
|
||||
block = await insertDummyBlock(db, block);
|
||||
blocksArray.push(block);
|
||||
}
|
||||
|
||||
return blocksArray;
|
||||
};
|
||||
|
||||
export const createTestBlockTree = async (db: TestDatabase): Promise<Block[][]> => {
|
||||
// Create BlockProgress test data.
|
||||
//
|
||||
// +---+
|
||||
// head----->| 21|
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// +---+ +---+
|
||||
// | 20| | 15|------Token (token44)
|
||||
// +---+ +---+
|
||||
// | /
|
||||
// | /
|
||||
// 8 Blocks 3 Blocks
|
||||
// | /
|
||||
// | /
|
||||
// +---+ +---+ +---+
|
||||
// | 11| | 11| | 11|
|
||||
// +---+ +---+ +---+
|
||||
// \ | /
|
||||
// \ | /
|
||||
// +---+ +---+
|
||||
// | 10| | 10|
|
||||
// +---+ +---+
|
||||
// \ |
|
||||
// \ |
|
||||
// +---+
|
||||
// | 9 |
|
||||
// +---+
|
||||
// |
|
||||
// |
|
||||
// 7 Blocks
|
||||
// |
|
||||
// |
|
||||
// +---+
|
||||
// tail----->| 1 |------Token (token00)
|
||||
// +---+ (Target)
|
||||
//
|
||||
|
||||
const blocks: Block[][] = [];
|
||||
|
||||
const firstSeg = await insertNDummyBlocks(db, 9);
|
||||
const secondSeg = await insertNDummyBlocks(db, 2, _.last(firstSeg));
|
||||
const thirdSeg = await insertNDummyBlocks(db, 1, _.last(firstSeg));
|
||||
const fourthSeg = await insertNDummyBlocks(db, 11, _.last(thirdSeg));
|
||||
const fifthSeg = await insertNDummyBlocks(db, 5, _.last(thirdSeg));
|
||||
|
||||
blocks.push(firstSeg);
|
||||
blocks.push(secondSeg);
|
||||
blocks.push(thirdSeg);
|
||||
blocks.push(fourthSeg);
|
||||
blocks.push(fifthSeg);
|
||||
|
||||
return blocks;
|
||||
};
|
||||
|
||||
export const insertDummyToken = async (db: TestDatabase, block: Block, token?: Token): Promise<Token> => {
|
||||
// Insert a dummy Token entity at block.
|
||||
|
||||
if (!token) {
|
||||
const randomByte = ethers.utils.randomBytes(20);
|
||||
const tokenAddress = ethers.utils.hexValue(randomByte);
|
||||
|
||||
token = new Token();
|
||||
token.symbol = 'TEST';
|
||||
token.name = 'TestToken';
|
||||
token.id = tokenAddress;
|
||||
token.totalSupply = new Decimal(0);
|
||||
token.decimals = BigInt(0);
|
||||
}
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
|
||||
try {
|
||||
token = await db.saveToken(dbTx, token, block);
|
||||
dbTx.commitTransaction();
|
||||
return token;
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
};
|
||||
|
||||
export async function removeEntities<Entity> (db: TestDatabase, entity: new () => Entity): Promise<void> {
|
||||
// Remove all entries of the specified entity from database.
|
||||
|
||||
const dbTx = await db.createTransactionRunner();
|
||||
|
||||
try {
|
||||
await db.removeEntities(dbTx, entity);
|
||||
dbTx.commitTransaction();
|
||||
} catch (error) {
|
||||
await dbTx.rollbackTransaction();
|
||||
throw error;
|
||||
} finally {
|
||||
await dbTx.release();
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user