CLI to compare eden-watcher entities and fix mapping code (#149)

* Make sumStaked variable local in eden network mapping

* Implement compare CLI to fetch and query by ids

* Set filterLogs to true for eden-watcher

* Use varchar for bigint array type in eden-watcher

* Store subgraph entities by id in IPLD state

* Store bigint vales as string in IPLD state

* Update eden watcher hook to store single Block entity in IPLD checkpoint

* Fix entity enum type property

* Fix parsing big numbers in event params

* Fix event bigint params parsing in all watchers

* Set default limit to query result and process block after events
This commit is contained in:
nikugogoi 2022-08-08 15:58:13 +05:30 committed by GitHub
parent 6990eb892a
commit 1a903fccc6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
47 changed files with 366 additions and 135 deletions

View File

@ -52,6 +52,7 @@ import { {{subgraphEntity.className}} } from './entity/{{subgraphEntity.classNam
{{/each}} {{/each}}
const log = debug('vulcanize:indexer'); const log = debug('vulcanize:indexer');
const JSONbigNative = JSONbig({ useNativeBigInt: true });
{{#each contracts as | contract |}} {{#each contracts as | contract |}}
const KIND_{{capitalize contract.contractName}} = '{{contract.contractKind}}'; const KIND_{{capitalize contract.contractName}} = '{{contract.contractKind}}';
@ -177,8 +178,8 @@ export class Indexer implements IPLDIndexerInterface {
getResultEvent (event: Event): ResultEvent { getResultEvent (event: Event): ResultEvent {
const block = event.block; const block = event.block;
const eventFields = JSONbig.parse(event.eventInfo); const eventFields = JSONbigNative.parse(event.eventInfo);
const { tx, eventSignature } = JSON.parse(event.extraInfo); const { tx, eventSignature } = JSONbigNative.parse(event.extraInfo);
return { return {
block: { block: {
@ -288,7 +289,7 @@ export class Indexer implements IPLDIndexerInterface {
{{/if}} {{/if}}
await this._db.{{query.saveQueryName}}({ blockHash, blockNumber, contractAddress await this._db.{{query.saveQueryName}}({ blockHash, blockNumber, contractAddress
{{~#each query.params}}, {{this.name~}} {{/each}}, value: result.value, proof: JSONbig.stringify(result.proof) }); {{~#each query.params}}, {{this.name~}} {{/each}}, value: result.value, proof: JSONbigNative.stringify(result.proof) });
{{#if query.stateVariableType}} {{#if query.stateVariableType}}
{{#if (compare query.stateVariableType 'Mapping')}} {{#if (compare query.stateVariableType 'Mapping')}}
@ -441,13 +442,15 @@ export class Indexer implements IPLDIndexerInterface {
async processBlock (blockHash: string, blockNumber: number): Promise<void> { async processBlock (blockHash: string, blockNumber: number): Promise<void> {
// Call a function to create initial state for contracts. // Call a function to create initial state for contracts.
await this._baseIndexer.createInit(this, blockHash, blockNumber); await this._baseIndexer.createInit(this, blockHash, blockNumber);
{{#if (subgraphPath)}}
// Call subgraph handler for block.
await this._graphWatcher.handleBlock(blockHash);
{{/if}}
} }
{{#if (subgraphPath)}}
async processBlockAfterEvents (blockHash: string): Promise<void> {
// Call subgraph handler for block.
await this._graphWatcher.handleBlock(blockHash);
}
{{/if}}
parseEventNameAndArgs (kind: string, logObj: any): any { parseEventNameAndArgs (kind: string, logObj: any): any {
const { topics, data } = logObj; const { topics, data } = logObj;
@ -770,10 +773,10 @@ export class Indexer implements IPLDIndexerInterface {
txHash, txHash,
contract, contract,
eventName, eventName,
eventInfo: JSONbig.stringify(eventInfo), eventInfo: JSONbigNative.stringify(eventInfo),
extraInfo: JSONbig.stringify(extraInfo), extraInfo: JSONbigNative.stringify(extraInfo),
proof: JSONbig.stringify({ proof: JSONbigNative.stringify({
data: JSONbig.stringify({ data: JSONbigNative.stringify({
blockHash, blockHash,
receiptCID, receiptCID,
log: { log: {

View File

@ -10,13 +10,13 @@
checkpointInterval = 2000 checkpointInterval = 2000
# IPFS API address (can be taken from the output on running the IPFS daemon). # IPFS API address (can be taken from the output on running the IPFS daemon).
ipfsApiAddr = "/ip4/127.0.0.1/tcp/5001" # ipfsApiAddr = "/ip4/127.0.0.1/tcp/5001"
subgraphPath = "../graph-node/test/subgraph/eden" subgraphPath = "../graph-node/test/subgraph/eden"
wasmRestartBlocksInterval = 20 wasmRestartBlocksInterval = 20
# Boolean to filter logs by contract. # Boolean to filter logs by contract.
filterLogs = false filterLogs = true
# Max block range for which to return events in eventsInRange GQL query. # Max block range for which to return events in eventsInRange GQL query.
# Use -1 for skipping check on block range. # Use -1 for skipping check on block range.
@ -38,8 +38,8 @@
[upstream] [upstream]
[upstream.ethServer] [upstream.ethServer]
gqlApiEndpoint = "http://127.0.0.1:8082/graphql" gqlApiEndpoint = "http://127.0.0.1:8083/graphql"
rpcProviderEndpoint = "http://127.0.0.1:8081" rpcProviderEndpoint = "http://127.0.0.1:8082"
blockDelayInMilliSecs = 2000 blockDelayInMilliSecs = 2000
[upstream.cache] [upstream.cache]

View File

@ -35,6 +35,7 @@ export class Network {
@Column('numeric', { transformer: bigintTransformer }) @Column('numeric', { transformer: bigintTransformer })
totalStaked!: bigint; totalStaked!: bigint;
@Column('numeric', { transformer: bigintArrayTransformer, array: true }) // https://github.com/brianc/node-postgres/issues/1943#issuecomment-520500053
@Column('varchar', { transformer: bigintArrayTransformer, array: true })
stakedPercentiles!: bigint[]; stakedPercentiles!: bigint[];
} }

View File

@ -6,8 +6,8 @@ import { Entity, PrimaryColumn, Column } from 'typeorm';
import { bigintTransformer } from '@vulcanize/util'; import { bigintTransformer } from '@vulcanize/util';
enum ProducerSetChangeType { enum ProducerSetChangeType {
Added, Added = 'Added',
Removed Removed = 'Removed'
} }
@Entity() @Entity()

View File

@ -1,5 +1,5 @@
query account($id: String!, $blockHash: Bytes!){ query account($id: String!, $block: Block_height){
account(id: $id, block: { hash: $blockHash }){ account(id: $id, block: $block){
id id
totalClaimed totalClaimed
totalSlashed totalSlashed

View File

@ -1,5 +1,5 @@
query block($id: String!, $blockHash: Bytes!){ query block($id: String!, $block: Block_height){
block(id: $id, block: { hash: $blockHash }){ block(id: $id, block: $block){
id id
fromActiveProducer fromActiveProducer
hash hash

View File

@ -1,5 +1,5 @@
query claim($id: String!, $blockHash: Bytes!){ query claim($id: String!, $block: Block_height){
claim(id: $id, block: { hash: $blockHash }){ claim(id: $id, block: $block){
id id
timestamp timestamp
index index

View File

@ -1,5 +1,5 @@
query distribution($id: String!, $blockHash: Bytes!){ query distribution($id: String!, $block: Block_height){
distribution(id: $id, block: { hash: $blockHash }){ distribution(id: $id, block: $block){
id id
distributor{ distributor{
id id

View File

@ -1,5 +1,5 @@
query distributor($id: String!, $blockHash: Bytes!){ query distributor($id: String!, $block: Block_height){
distributor(id: $id, block: { hash: $blockHash }){ distributor(id: $id, block: $block){
id id
currentDistribution{ currentDistribution{
id id

View File

@ -1,5 +1,5 @@
query epoch($id: String!, $blockHash: Bytes!){ query epoch($id: String!, $block: Block_height){
epoch(id: $id, block: { hash: $blockHash }){ epoch(id: $id, block: $block){
id id
finalized finalized
epochNumber epochNumber

View File

@ -1,5 +1,5 @@
query network($id: String!, $blockHash: Bytes!){ query network($id: String!, $block: Block_height){
network(id: $id, block: { hash: $blockHash }){ network(id: $id, block: $block){
id id
slot0{ slot0{
id id

View File

@ -1,5 +1,5 @@
query producer($id: String!, $blockHash: Bytes!){ query producer($id: String!, $block: Block_height){
producer(id: $id, block: { hash: $blockHash }){ producer(id: $id, block: $block){
id id
active active
rewardCollector rewardCollector

View File

@ -1,5 +1,5 @@
query producerEpoch($id: String!, $blockHash: Bytes!){ query producerEpoch($id: String!, $block: Block_height){
producerEpoch(id: $id, block: { hash: $blockHash }){ producerEpoch(id: $id, block: $block){
id id
address address
epoch{ epoch{

View File

@ -1,5 +1,5 @@
query producerRewardCollectorChange($id: String!, $blockHash: Bytes!){ query producerRewardCollectorChange($id: String!, $block: Block_height){
producerRewardCollectorChange(id: $id, block: { hash: $blockHash }){ producerRewardCollectorChange(id: $id, block: $block){
id id
blockNumber blockNumber
producer producer

View File

@ -1,5 +1,5 @@
query producerSet($id: String!, $blockHash: Bytes!){ query producerSet($id: String!, $block: Block_height){
producerSet(id: $id, block: { hash: $blockHash }){ producerSet(id: $id, block: $block){
id id
producers{ producers{
id id

View File

@ -1,5 +1,5 @@
query producerSetChange($id: String!, $blockHash: Bytes!){ query producerSetChange($id: String!, $block: Block_height){
producerSetChange(id: $id, block: { hash: $blockHash }){ producerSetChange(id: $id, block: $block){
id id
blockNumber blockNumber
producer producer

View File

@ -1,5 +1,5 @@
query rewardSchedule($id: String!, $blockHash: Bytes!){ query rewardSchedule($id: String!, $block: Block_height){
rewardSchedule(id: $id, block: { hash: $blockHash }){ rewardSchedule(id: $id, block: $block){
id id
rewardScheduleEntries{ rewardScheduleEntries{
id id

View File

@ -1,5 +1,5 @@
query rewardScheduleEntry($id: String!, $blockHash: Bytes!){ query rewardScheduleEntry($id: String!, $block: Block_height){
rewardScheduleEntry(id: $id, block: { hash: $blockHash }){ rewardScheduleEntry(id: $id, block: $block){
id id
startTime startTime
epochDuration epochDuration

View File

@ -1,5 +1,5 @@
query slash($id: String!, $blockHash: Bytes!){ query slash($id: String!, $block: Block_height){
slash(id: $id, block: { hash: $blockHash }){ slash(id: $id, block: $block){
id id
timestamp timestamp
account{ account{

View File

@ -1,5 +1,5 @@
query slot($id: String!, $blockHash: Bytes!){ query slot($id: String!, $block: Block_height){
slot(id: $id, block: { hash: $blockHash }){ slot(id: $id, block: $block){
id id
owner owner
delegate delegate

View File

@ -1,5 +1,5 @@
query slotClaim($id: String!, $blockHash: Bytes!){ query slotClaim($id: String!, $block: Block_height){
slotClaim(id: $id, block: { hash: $blockHash }){ slotClaim(id: $id, block: $block){
id id
slot{ slot{
id id

View File

@ -1,5 +1,5 @@
query staker($id: String!, $blockHash: Bytes!){ query staker($id: String!, $block: Block_height){
staker(id: $id, block: { hash: $blockHash }){ staker(id: $id, block: $block){
id id
staked staked
rank rank

View File

@ -2,7 +2,10 @@
// Copyright 2021 Vulcanize, Inc. // Copyright 2021 Vulcanize, Inc.
// //
import { IPLDBlockInterface, StateKind } from '@vulcanize/util';
import assert from 'assert'; import assert from 'assert';
import * as codec from '@ipld/dag-cbor';
import _ from 'lodash';
import { Indexer, ResultEvent } from './indexer'; import { Indexer, ResultEvent } from './indexer';
@ -51,9 +54,60 @@ export async function createStateCheckpoint (indexer: Indexer, contractAddress:
assert(blockHash); assert(blockHash);
assert(contractAddress); assert(contractAddress);
// Use indexer.createStateCheckpoint() method to create a custom checkpoint. // TODO: Pass blockProgress instead of blockHash to hook method.
const block = await indexer.getBlockProgress(blockHash);
assert(block);
return false; // Fetch the latest 'checkpoint' | 'init' for the contract to fetch diffs after it.
let prevNonDiffBlock: IPLDBlockInterface;
let getDiffBlockNumber: number;
const checkpointBlock = await indexer.getLatestIPLDBlock(contractAddress, StateKind.Checkpoint, block.blockNumber);
if (checkpointBlock) {
const checkpointBlockNumber = checkpointBlock.block.blockNumber;
prevNonDiffBlock = checkpointBlock;
getDiffBlockNumber = checkpointBlockNumber;
// Update IPLD status map with the latest checkpoint info.
// Essential while importing state as checkpoint at the snapshot block is added by import-state CLI.
// (job-runner won't have the updated ipld status)
indexer.updateIPLDStatusMap(contractAddress, { checkpoint: checkpointBlockNumber });
} else {
// There should be an initial state at least.
const initBlock = await indexer.getLatestIPLDBlock(contractAddress, StateKind.Init);
assert(initBlock, 'No initial state found');
prevNonDiffBlock = initBlock;
// Take block number previous to initial state block to include any diff state at that block.
getDiffBlockNumber = initBlock.block.blockNumber - 1;
}
// Fetching all diff blocks after the latest 'checkpoint' | 'init'.
const diffBlocks = await indexer.getDiffIPLDBlocksByBlocknumber(contractAddress, getDiffBlockNumber);
const prevNonDiffBlockData = codec.decode(Buffer.from(prevNonDiffBlock.data)) as any;
const data = {
state: prevNonDiffBlockData.state
};
// Merge all diff blocks after previous checkpoint.
for (const diffBlock of diffBlocks) {
const diff = codec.decode(Buffer.from(diffBlock.data)) as any;
data.state = _.merge(data.state, diff.state);
}
// Check if Block entity exists.
if (data.state.Block) {
// Store only block entity at checkpoint height instead of all entities.
data.state.Block = {
[blockHash]: data.state.Block[blockHash]
};
}
await indexer.createStateCheckpoint(contractAddress, blockHash, data);
return true;
} }
/** /**

View File

@ -57,6 +57,7 @@ import { Account } from './entity/Account';
import { Slash } from './entity/Slash'; import { Slash } from './entity/Slash';
const log = debug('vulcanize:indexer'); const log = debug('vulcanize:indexer');
const JSONbigNative = JSONbig({ useNativeBigInt: true });
const KIND_EDENNETWORK = 'EdenNetwork'; const KIND_EDENNETWORK = 'EdenNetwork';
const KIND_MERKLEDISTRIBUTOR = 'EdenNetworkDistribution'; const KIND_MERKLEDISTRIBUTOR = 'EdenNetworkDistribution';
@ -176,8 +177,8 @@ export class Indexer implements IPLDIndexerInterface {
getResultEvent (event: Event): ResultEvent { getResultEvent (event: Event): ResultEvent {
const block = event.block; const block = event.block;
const eventFields = JSONbig.parse(event.eventInfo); const eventFields = JSONbigNative.parse(event.eventInfo);
const { tx, eventSignature } = JSON.parse(event.extraInfo); const { tx, eventSignature } = JSONbigNative.parse(event.extraInfo);
return { return {
block: { block: {
@ -287,6 +288,10 @@ export class Indexer implements IPLDIndexerInterface {
return this._baseIndexer.getIPLDBlockByCid(cid); return this._baseIndexer.getIPLDBlockByCid(cid);
} }
async getDiffIPLDBlocksByBlocknumber (contractAddress: string, blockNumber: number): Promise<IPLDBlock[]> {
return this._db.getDiffIPLDBlocksByBlocknumber(contractAddress, blockNumber);
}
getIPLDData (ipldBlock: IPLDBlock): any { getIPLDData (ipldBlock: IPLDBlock): any {
return this._baseIndexer.getIPLDData(ipldBlock); return this._baseIndexer.getIPLDData(ipldBlock);
} }
@ -370,13 +375,15 @@ export class Indexer implements IPLDIndexerInterface {
await this._baseIndexer.createInit(this, blockHash, blockNumber); await this._baseIndexer.createInit(this, blockHash, blockNumber);
console.timeEnd('time:indexer#processBlock-init_state'); console.timeEnd('time:indexer#processBlock-init_state');
}
console.time('time:indexer#processBlock-mapping_code'); async processBlockAfterEvents (blockHash: string): Promise<void> {
console.time('time:indexer#processBlockAfterEvents-mapping_code');
// Call subgraph handler for block. // Call subgraph handler for block.
await this._graphWatcher.handleBlock(blockHash); await this._graphWatcher.handleBlock(blockHash);
console.timeEnd('time:indexer#processBlock-mapping_code'); console.timeEnd('time:indexer#processBlockAfterEvents-mapping_code');
} }
parseEventNameAndArgs (kind: string, logObj: any): any { parseEventNameAndArgs (kind: string, logObj: any): any {
@ -1013,10 +1020,10 @@ export class Indexer implements IPLDIndexerInterface {
txHash, txHash,
contract, contract,
eventName, eventName,
eventInfo: JSONbig.stringify(eventInfo), eventInfo: JSONbigNative.stringify(eventInfo),
extraInfo: JSONbig.stringify(extraInfo), extraInfo: JSONbigNative.stringify(extraInfo),
proof: JSONbig.stringify({ proof: JSONbigNative.stringify({
data: JSONbig.stringify({ data: JSONbigNative.stringify({
blockHash, blockHash,
receiptCID, receiptCID,
log: { log: {

View File

@ -23,6 +23,7 @@ import { BlockProgress } from './entity/BlockProgress';
import { Contract } from './entity/Contract'; import { Contract } from './entity/Contract';
const log = debug('vulcanize:indexer'); const log = debug('vulcanize:indexer');
const JSONbigNative = JSONbig({ useNativeBigInt: true });
const ETH_CALL_MODE = 'eth_call'; const ETH_CALL_MODE = 'eth_call';
@ -108,7 +109,7 @@ export class Indexer implements IndexerInterface {
} }
// https://github.com/GoogleChromeLabs/jsbi/issues/30#issuecomment-521460510 // https://github.com/GoogleChromeLabs/jsbi/issues/30#issuecomment-521460510
log(JSONbig.stringify(result, null, 2)); log(JSONbigNative.stringify(result, null, 2));
return result; return result;
} }
@ -142,10 +143,10 @@ export class Indexer implements IndexerInterface {
result = await this._baseIndexer.getStorageValue(this._storageLayout, blockHash, token, '_balances', owner); result = await this._baseIndexer.getStorageValue(this._storageLayout, blockHash, token, '_balances', owner);
} }
log(JSONbig.stringify(result, null, 2)); log(JSONbigNative.stringify(result, null, 2));
const { value, proof } = result; const { value, proof } = result;
await this._db.saveBalance({ blockHash, blockNumber, token, owner, value: BigInt(value), proof: JSONbig.stringify(proof) }); await this._db.saveBalance({ blockHash, blockNumber, token, owner, value: BigInt(value), proof: JSONbigNative.stringify(proof) });
return result; return result;
} }
@ -180,7 +181,7 @@ export class Indexer implements IndexerInterface {
// log(JSONbig.stringify(result, null, 2)); // log(JSONbig.stringify(result, null, 2));
const { value, proof } = result; const { value, proof } = result;
await this._db.saveAllowance({ blockHash, blockNumber, token, owner, spender, value: BigInt(value), proof: JSONbig.stringify(proof) }); await this._db.saveAllowance({ blockHash, blockNumber, token, owner, spender, value: BigInt(value), proof: JSONbigNative.stringify(proof) });
return result; return result;
} }
@ -406,10 +407,10 @@ export class Indexer implements IndexerInterface {
txHash, txHash,
contract, contract,
eventName, eventName,
eventInfo: JSONbig.stringify(eventInfo), eventInfo: JSONbigNative.stringify(eventInfo),
extraInfo: JSONbig.stringify(extraInfo), extraInfo: JSONbigNative.stringify(extraInfo),
proof: JSONbig.stringify({ proof: JSONbigNative.stringify({
data: JSONbig.stringify({ data: JSONbigNative.stringify({
blockHash, blockHash,
receiptCID, receiptCID,
log: { log: {

View File

@ -42,6 +42,7 @@ import { IPLDBlock } from './entity/IPLDBlock';
import { TransferCount } from './entity/TransferCount'; import { TransferCount } from './entity/TransferCount';
const log = debug('vulcanize:indexer'); const log = debug('vulcanize:indexer');
const JSONbigNative = JSONbig({ useNativeBigInt: true });
const KIND_ERC721 = 'ERC721'; const KIND_ERC721 = 'ERC721';
@ -134,8 +135,8 @@ export class Indexer implements IPLDIndexerInterface {
getResultEvent (event: Event): ResultEvent { getResultEvent (event: Event): ResultEvent {
const block = event.block; const block = event.block;
const eventFields = JSONbig.parse(event.eventInfo); const eventFields = JSONbigNative.parse(event.eventInfo);
const { tx, eventSignature } = JSON.parse(event.extraInfo); const { tx, eventSignature } = JSONbigNative.parse(event.extraInfo);
return { return {
block: { block: {
@ -211,7 +212,7 @@ export class Indexer implements IPLDIndexerInterface {
const result: ValueResult = { value }; const result: ValueResult = { value };
await this._db.saveSupportsInterface({ blockHash, blockNumber, contractAddress, interfaceId, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db.saveSupportsInterface({ blockHash, blockNumber, contractAddress, interfaceId, value: result.value, proof: JSONbigNative.stringify(result.proof) });
return result; return result;
} }
@ -242,7 +243,7 @@ export class Indexer implements IPLDIndexerInterface {
const result: ValueResult = { value }; const result: ValueResult = { value };
await this._db.saveBalanceOf({ blockHash, blockNumber, contractAddress, owner, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db.saveBalanceOf({ blockHash, blockNumber, contractAddress, owner, value: result.value, proof: JSONbigNative.stringify(result.proof) });
return result; return result;
} }
@ -271,7 +272,7 @@ export class Indexer implements IPLDIndexerInterface {
const result: ValueResult = { value }; const result: ValueResult = { value };
await this._db.saveOwnerOf({ blockHash, blockNumber, contractAddress, tokenId, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db.saveOwnerOf({ blockHash, blockNumber, contractAddress, tokenId, value: result.value, proof: JSONbigNative.stringify(result.proof) });
return result; return result;
} }
@ -300,7 +301,7 @@ export class Indexer implements IPLDIndexerInterface {
const result: ValueResult = { value }; const result: ValueResult = { value };
await this._db.saveGetApproved({ blockHash, blockNumber, contractAddress, tokenId, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db.saveGetApproved({ blockHash, blockNumber, contractAddress, tokenId, value: result.value, proof: JSONbigNative.stringify(result.proof) });
return result; return result;
} }
@ -329,7 +330,7 @@ export class Indexer implements IPLDIndexerInterface {
const result: ValueResult = { value }; const result: ValueResult = { value };
await this._db.saveIsApprovedForAll({ blockHash, blockNumber, contractAddress, owner, operator, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db.saveIsApprovedForAll({ blockHash, blockNumber, contractAddress, owner, operator, value: result.value, proof: JSONbigNative.stringify(result.proof) });
return result; return result;
} }
@ -358,7 +359,7 @@ export class Indexer implements IPLDIndexerInterface {
const result: ValueResult = { value }; const result: ValueResult = { value };
await this._db.saveName({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db.saveName({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbigNative.stringify(result.proof) });
return result; return result;
} }
@ -387,7 +388,7 @@ export class Indexer implements IPLDIndexerInterface {
const result: ValueResult = { value }; const result: ValueResult = { value };
await this._db.saveSymbol({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db.saveSymbol({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbigNative.stringify(result.proof) });
return result; return result;
} }
@ -416,7 +417,7 @@ export class Indexer implements IPLDIndexerInterface {
const result: ValueResult = { value }; const result: ValueResult = { value };
await this._db.saveTokenURI({ blockHash, blockNumber, contractAddress, tokenId, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db.saveTokenURI({ blockHash, blockNumber, contractAddress, tokenId, value: result.value, proof: JSONbigNative.stringify(result.proof) });
return result; return result;
} }
@ -477,7 +478,7 @@ export class Indexer implements IPLDIndexerInterface {
'_name' '_name'
); );
await this._db._saveName({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db._saveName({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbigNative.stringify(result.proof) });
if (diff) { if (diff) {
const stateUpdate = updateStateForElementaryType({}, '_name', result.value.toString()); const stateUpdate = updateStateForElementaryType({}, '_name', result.value.toString());
@ -513,7 +514,7 @@ export class Indexer implements IPLDIndexerInterface {
'_symbol' '_symbol'
); );
await this._db._saveSymbol({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db._saveSymbol({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbigNative.stringify(result.proof) });
if (diff) { if (diff) {
const stateUpdate = updateStateForElementaryType({}, '_symbol', result.value.toString()); const stateUpdate = updateStateForElementaryType({}, '_symbol', result.value.toString());
@ -550,7 +551,7 @@ export class Indexer implements IPLDIndexerInterface {
key0 key0
); );
await this._db._saveOwners({ blockHash, blockNumber, contractAddress, key0, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db._saveOwners({ blockHash, blockNumber, contractAddress, key0, value: result.value, proof: JSONbigNative.stringify(result.proof) });
if (diff) { if (diff) {
const stateUpdate = updateStateForMappingType({}, '_owners', [key0.toString()], result.value.toString()); const stateUpdate = updateStateForMappingType({}, '_owners', [key0.toString()], result.value.toString());
@ -587,7 +588,7 @@ export class Indexer implements IPLDIndexerInterface {
key0 key0
); );
await this._db._saveBalances({ blockHash, blockNumber, contractAddress, key0, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db._saveBalances({ blockHash, blockNumber, contractAddress, key0, value: result.value, proof: JSONbigNative.stringify(result.proof) });
if (diff) { if (diff) {
const stateUpdate = updateStateForMappingType({}, '_balances', [key0.toString()], result.value.toString()); const stateUpdate = updateStateForMappingType({}, '_balances', [key0.toString()], result.value.toString());
@ -624,7 +625,7 @@ export class Indexer implements IPLDIndexerInterface {
key0 key0
); );
await this._db._saveTokenApprovals({ blockHash, blockNumber, contractAddress, key0, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db._saveTokenApprovals({ blockHash, blockNumber, contractAddress, key0, value: result.value, proof: JSONbigNative.stringify(result.proof) });
if (diff) { if (diff) {
const stateUpdate = updateStateForMappingType({}, '_tokenApprovals', [key0.toString()], result.value.toString()); const stateUpdate = updateStateForMappingType({}, '_tokenApprovals', [key0.toString()], result.value.toString());
@ -662,7 +663,7 @@ export class Indexer implements IPLDIndexerInterface {
key1 key1
); );
await this._db._saveOperatorApprovals({ blockHash, blockNumber, contractAddress, key0, key1, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db._saveOperatorApprovals({ blockHash, blockNumber, contractAddress, key0, key1, value: result.value, proof: JSONbigNative.stringify(result.proof) });
if (diff) { if (diff) {
const stateUpdate = updateStateForMappingType({}, '_operatorApprovals', [key0.toString(), key1.toString()], result.value.toString()); const stateUpdate = updateStateForMappingType({}, '_operatorApprovals', [key0.toString(), key1.toString()], result.value.toString());
@ -1057,10 +1058,10 @@ export class Indexer implements IPLDIndexerInterface {
txHash, txHash,
contract, contract,
eventName, eventName,
eventInfo: JSONbig.stringify(eventInfo), eventInfo: JSONbigNative.stringify(eventInfo),
extraInfo: JSONbig.stringify(extraInfo), extraInfo: JSONbigNative.stringify(extraInfo),
proof: JSONbig.stringify({ proof: JSONbigNative.stringify({
data: JSONbig.stringify({ data: JSONbigNative.stringify({
blockHash, blockHash,
receiptCID, receiptCID,
log: { log: {

View File

@ -55,3 +55,46 @@
``` ```
* The program will exit with code `1` if the query results are not equal. * The program will exit with code `1` if the query results are not equal.
* For comparing queries in a range of blocks:
* Config file should have the names of queries to be fired.
```toml
[queries]
queryDir = "../graph-test-watcher/src/gql/queries"
names = [
"author",
"blog"
]
```
* Run the CLI:
```bash
./bin/compare-blocks --config-file environments/compare-cli-config.toml --start-block 1 --end-block 10
```
* For comparing entities after fetching ids from one of the endpoints and then querying individually by ids:
* Set the `idsEndpoint` to choose which endpoint the ids should be fetched from.
```toml
[endpoints]
gqlEndpoint1 = "http://localhost:8000/subgraphs/name/example1"
gqlEndpoint2 = "http://localhost:3008/graphql"
[queries]
queryDir = "../graph-test-watcher/src/gql/queries"
names = [
"author",
"blog"
]
idsEndpoint = "gqlEndpoint1"
```
* Run the CLI with `fetch-ids` flag set to true:\
```bash
./bin/compare-blocks --config-file environments/compare-cli-config.toml --start-block 1 --end-block 10 --fetch-ids
```

View File

@ -5,6 +5,7 @@
[queries] [queries]
queryDir = "../graph-test-watcher/src/gql/queries" queryDir = "../graph-test-watcher/src/gql/queries"
names = [] names = []
idsEndpoint = "gqlEndpoint1"
[cache] [cache]
endpoint = "gqlEndpoint1" endpoint = "gqlEndpoint1"

View File

@ -10,6 +10,7 @@
"@types/chai": "^4.2.18", "@types/chai": "^4.2.18",
"@types/chai-spies": "^1.0.3", "@types/chai-spies": "^1.0.3",
"@types/js-yaml": "^4.0.4", "@types/js-yaml": "^4.0.4",
"@types/pluralize": "^0.0.29",
"@typescript-eslint/eslint-plugin": "^4.25.0", "@typescript-eslint/eslint-plugin": "^4.25.0",
"@typescript-eslint/parser": "^4.25.0", "@typescript-eslint/parser": "^4.25.0",
"chai": "^4.3.4", "chai": "^4.3.4",
@ -57,6 +58,7 @@
"js-yaml": "^4.1.0", "js-yaml": "^4.1.0",
"json-bigint": "^1.0.0", "json-bigint": "^1.0.0",
"json-diff": "^0.5.4", "json-diff": "^0.5.4",
"pluralize": "^8.0.0",
"reflect-metadata": "^0.1.13", "reflect-metadata": "^0.1.13",
"toml": "^3.0.0", "toml": "^3.0.0",
"typeorm": "^0.2.32", "typeorm": "^0.2.32",

View File

@ -5,6 +5,7 @@
import assert from 'assert'; import assert from 'assert';
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import pluralize from 'pluralize';
import { gql } from '@apollo/client/core'; import { gql } from '@apollo/client/core';
import { GraphQLClient, Config } from '@vulcanize/ipld-eth-client'; import { GraphQLClient, Config } from '@vulcanize/ipld-eth-client';
@ -15,6 +16,7 @@ export class Client {
_graphqlClient: GraphQLClient; _graphqlClient: GraphQLClient;
_queryDir: string; _queryDir: string;
_cache: Cache | undefined; _cache: Cache | undefined;
_endpoint: string;
constructor (config: Config, queryDir: string) { constructor (config: Config, queryDir: string) {
this._config = config; this._config = config;
@ -22,16 +24,56 @@ export class Client {
const { gqlEndpoint, cache } = config; const { gqlEndpoint, cache } = config;
assert(gqlEndpoint, 'Missing gql endpoint'); assert(gqlEndpoint, 'Missing gql endpoint');
this._endpoint = gqlEndpoint;
this._graphqlClient = new GraphQLClient(config); this._graphqlClient = new GraphQLClient(config);
this._cache = cache; this._cache = cache;
} }
get endpoint () {
return this._endpoint;
}
async getResult (queryName: string, params: { [key: string]: any }): Promise<any> { async getResult (queryName: string, params: { [key: string]: any }): Promise<any> {
return this._getCachedOrFetch(queryName, params); return this._getCachedOrFetch(queryName, params);
} }
async getIds (queryName: string, blockNumber: number): Promise<string[]> {
const keyObj = { queryName, blockNumber };
if (this._cache) {
const [value, found] = await this._cache.get(keyObj) || [undefined, false];
if (found) {
return value;
}
}
const result = await this._graphqlClient.query(
gql(
`query($blockNumber: Int){
${pluralize(queryName)}(
block: { number: $blockNumber }
) {
id
}
}`
),
{
blockNumber
}
);
const ids = result[pluralize(queryName)].map((entity: { id: string }) => entity.id);
// Cache the result and return it, if cache is enabled.
if (this._cache) {
await this._cache.put(keyObj, ids);
}
return ids;
}
async _getCachedOrFetch (queryName: string, params: {[key: string]: any}): Promise<any> { async _getCachedOrFetch (queryName: string, params: {[key: string]: any}): Promise<any> {
const keyObj = { const keyObj = {
queryName, queryName,

View File

@ -5,8 +5,10 @@
import yargs from 'yargs'; import yargs from 'yargs';
import 'reflect-metadata'; import 'reflect-metadata';
import debug from 'debug'; import debug from 'debug';
import assert from 'assert';
import { compareQuery, Config, getClients, getConfig } from './utils'; import { compareQuery, Config, getClients, getConfig } from './utils';
import { Client } from './client';
const log = debug('vulcanize:compare-blocks'); const log = debug('vulcanize:compare-blocks');
@ -40,12 +42,17 @@ export const main = async (): Promise<void> => {
type: 'boolean', type: 'boolean',
describe: 'Whether to print out raw diff object', describe: 'Whether to print out raw diff object',
default: false default: false
},
fetchIds: {
type: 'boolean',
describe: 'Fetch ids and compare multiple entities',
default: false
} }
}).argv; }).argv;
const config: Config = await getConfig(argv.configFile); const config: Config = await getConfig(argv.configFile);
const { startBlock, endBlock, rawJson, queryDir } = argv; const { startBlock, endBlock, rawJson, queryDir, fetchIds } = argv;
const queryNames = config.queries.names; const queryNames = config.queries.names;
let diffFound = false; let diffFound = false;
@ -58,13 +65,27 @@ export const main = async (): Promise<void> => {
for (const queryName of queryNames) { for (const queryName of queryNames) {
try { try {
log(`At block ${blockNumber} for query ${queryName}:`); log(`At block ${blockNumber} for query ${queryName}:`);
const resultDiff = await compareQuery(clients, queryName, { block }, rawJson);
if (resultDiff) { if (fetchIds) {
diffFound = true; const { idsEndpoint } = config.queries;
log('Results mismatch:', resultDiff); assert(idsEndpoint, 'Specify endpoint for fetching ids when fetchId is true');
const client = Object.values(clients).find(client => client.endpoint === config.endpoints[idsEndpoint]);
assert(client);
const ids = await client.getIds(queryName, blockNumber);
for (const id of ids) {
const isDiff = await compareAndLog(clients, queryName, { block, id }, rawJson);
if (isDiff) {
diffFound = isDiff;
}
}
} else { } else {
log('Results match.'); const isDiff = await compareAndLog(clients, queryName, { block }, rawJson);
if (isDiff) {
diffFound = isDiff;
}
} }
} catch (err: any) { } catch (err: any) {
log('Error:', err.message); log('Error:', err.message);
@ -78,3 +99,25 @@ export const main = async (): Promise<void> => {
process.exit(1); process.exit(1);
} }
}; };
const compareAndLog = async (
clients: { client1: Client, client2: Client },
queryName: string,
params: { [key: string]: any },
rawJson: boolean
): Promise<boolean> => {
const resultDiff = await compareQuery(
clients,
queryName,
params,
rawJson
);
if (resultDiff) {
log('Results mismatch:', resultDiff);
return true;
}
log('Results match.');
return false;
};

View File

@ -54,11 +54,10 @@ export const main = async (): Promise<void> => {
const queryName = argv.queryName; const queryName = argv.queryName;
const id = argv.entityId; const id = argv.entityId;
const blockHash = argv.blockHash;
const block = { const block = {
number: argv.blockNumber, number: argv.blockNumber,
hash: blockHash hash: argv.blockHash
}; };
const clients = await getClients(config, argv.queryDir); const clients = await getClients(config, argv.queryDir);

View File

@ -21,13 +21,14 @@ interface EndpointConfig {
interface QueryConfig { interface QueryConfig {
queryDir: string; queryDir: string;
names: string[]; names: string[];
idsEndpoint: keyof EndpointConfig;
} }
export interface Config { export interface Config {
endpoints: EndpointConfig; endpoints: EndpointConfig;
queries: QueryConfig; queries: QueryConfig;
cache: { cache: {
endpoint: string; endpoint: keyof EndpointConfig;
config: CacheConfig; config: CacheConfig;
} }
} }

View File

@ -17,6 +17,8 @@ import {
import { Block, fromEntityValue, toEntityValue } from './utils'; import { Block, fromEntityValue, toEntityValue } from './utils';
const DEFAULT_LIMIT = 100;
export class Database { export class Database {
_config: ConnectionOptions _config: ConnectionOptions
_conn!: Connection _conn!: Connection
@ -123,7 +125,8 @@ export class Database {
if (isArray) { if (isArray) {
selectQueryBuilder = selectQueryBuilder.distinctOn(['entity.id']) selectQueryBuilder = selectQueryBuilder.distinctOn(['entity.id'])
.orderBy('entity.id'); .orderBy('entity.id')
.limit(DEFAULT_LIMIT);
} else { } else {
selectQueryBuilder = selectQueryBuilder.limit(1); selectQueryBuilder = selectQueryBuilder.limit(1);
} }
@ -132,7 +135,8 @@ export class Database {
// For one to many relational field. // For one to many relational field.
selectQueryBuilder = selectQueryBuilder.where('entity.id IN (:...ids)', { ids: entityData[field] }) selectQueryBuilder = selectQueryBuilder.where('entity.id IN (:...ids)', { ids: entityData[field] })
.distinctOn(['entity.id']) .distinctOn(['entity.id'])
.orderBy('entity.id'); .orderBy('entity.id')
.limit(DEFAULT_LIMIT);
// Subquery example if distinctOn is not performant. // Subquery example if distinctOn is not performant.
// //

View File

@ -29,6 +29,8 @@ import {
} from './utils'; } from './utils';
import { Database } from './database'; import { Database } from './database';
const JSONbigString = JSONbig({ storeAsString: true });
// Endianness of BN used in bigInt store host API. // Endianness of BN used in bigInt store host API.
// Negative bigInt is being stored in wasm in 2's compliment, 'le' representation. // Negative bigInt is being stored in wasm in 2's compliment, 'le' representation.
// (for eg. bigInt.fromString(negativeI32Value)) // (for eg. bigInt.fromString(negativeI32Value))
@ -104,7 +106,12 @@ export const instantiate = async (
// JSON stringify and parse data for handling unknown types when encoding. // JSON stringify and parse data for handling unknown types when encoding.
// For example, decimal.js values are converted to string in the diff data. // For example, decimal.js values are converted to string in the diff data.
diffData.state[entityName] = JSONbig.parse(JSONbig.stringify(dbData)); diffData.state[entityName] = {
// Using JSONbigString to store bigints as string values to be encoded by IPLD dag-cbor.
// TODO: Parse and store as native bigint by using Type encoders in IPLD dag-cbor encode.
// https://github.com/rvagg/cborg#type-encoders
[dbData.id]: JSONbigString.parse(JSONbigString.stringify(dbData))
};
// Create an auto-diff. // Create an auto-diff.
assert(indexer.createDiffStaged); assert(indexer.createDiffStaged);

View File

@ -123,7 +123,7 @@ export class GraphWatcher {
async handleEvent (eventData: any) { async handleEvent (eventData: any) {
const { contract, event, eventSignature, block, tx: { hash: txHash }, eventIndex } = eventData; const { contract, event, eventSignature, block, tx: { hash: txHash }, eventIndex } = eventData;
if (!this._context.block) { if (!this._context.block || this._context.block.blockHash !== block.hash) {
this._context.block = await getFullBlock(this._ethClient, this._ethProvider, block.hash); this._context.block = await getFullBlock(this._ethClient, this._ethProvider, block.hash);
} }

View File

@ -46,6 +46,7 @@ import { Blog } from './entity/Blog';
import { Category } from './entity/Category'; import { Category } from './entity/Category';
const log = debug('vulcanize:indexer'); const log = debug('vulcanize:indexer');
const JSONbigNative = JSONbig({ useNativeBigInt: true });
const KIND_EXAMPLE1 = 'Example1'; const KIND_EXAMPLE1 = 'Example1';
const KIND_FACTORY = 'Factory'; const KIND_FACTORY = 'Factory';
@ -167,8 +168,8 @@ export class Indexer implements IPLDIndexerInterface {
getResultEvent (event: Event): ResultEvent { getResultEvent (event: Event): ResultEvent {
const block = event.block; const block = event.block;
const eventFields = JSONbig.parse(event.eventInfo); const eventFields = JSONbigNative.parse(event.eventInfo);
const { tx, eventSignature } = JSON.parse(event.extraInfo); const { tx, eventSignature } = JSONbigNative.parse(event.extraInfo);
return { return {
block: { block: {
@ -244,7 +245,7 @@ export class Indexer implements IPLDIndexerInterface {
const result: ValueResult = { value }; const result: ValueResult = { value };
await this._db.saveGetMethod({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db.saveGetMethod({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbigNative.stringify(result.proof) });
return result; return result;
} }
@ -275,7 +276,7 @@ export class Indexer implements IPLDIndexerInterface {
'_test' '_test'
); );
await this._db._saveTest({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbig.stringify(result.proof) }); await this._db._saveTest({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbigNative.stringify(result.proof) });
if (diff) { if (diff) {
const stateUpdate = updateStateForElementaryType({}, '_test', result.value.toString()); const stateUpdate = updateStateForElementaryType({}, '_test', result.value.toString());
@ -406,7 +407,9 @@ export class Indexer implements IPLDIndexerInterface {
async processBlock (blockHash: string, blockNumber: number): Promise<void> { async processBlock (blockHash: string, blockNumber: number): Promise<void> {
// Call a function to create initial state for contracts. // Call a function to create initial state for contracts.
await this._baseIndexer.createInit(this, blockHash, blockNumber); await this._baseIndexer.createInit(this, blockHash, blockNumber);
}
async processBlockAfterEvents (blockHash: string): Promise<void> {
// Call subgraph handler for block. // Call subgraph handler for block.
await this._graphWatcher.handleBlock(blockHash); await this._graphWatcher.handleBlock(blockHash);
} }
@ -749,10 +752,10 @@ export class Indexer implements IPLDIndexerInterface {
txHash, txHash,
contract, contract,
eventName, eventName,
eventInfo: JSONbig.stringify(eventInfo), eventInfo: JSONbigNative.stringify(eventInfo),
extraInfo: JSONbig.stringify(extraInfo), extraInfo: JSONbigNative.stringify(extraInfo),
proof: JSONbig.stringify({ proof: JSONbigNative.stringify({
data: JSONbig.stringify({ data: JSONbigNative.stringify({
blockHash, blockHash,
receiptCID, receiptCID,
log: { log: {

View File

@ -15,6 +15,7 @@ import lighthouseABI from './abi/Lighthouse.json';
export const UNKNOWN_EVENT_NAME = '__unknown__'; export const UNKNOWN_EVENT_NAME = '__unknown__';
const log = debug('vulcanize:indexer'); const log = debug('vulcanize:indexer');
const JSONbigNative = JSONbig({ useNativeBigInt: true });
export type ResultEvent = { export type ResultEvent = {
block: any; block: any;
@ -160,7 +161,7 @@ export class Indexer {
...eventInfo ...eventInfo
}, },
proof: { proof: {
data: JSONbig.stringify({ data: JSONbigNative.stringify({
blockHash, blockHash,
receiptCID, receiptCID,
log: { log: {

View File

@ -47,6 +47,7 @@ import { _Owner } from './entity/_Owner';
import { MultiNonce } from './entity/MultiNonce'; import { MultiNonce } from './entity/MultiNonce';
const log = debug('vulcanize:indexer'); const log = debug('vulcanize:indexer');
const JSONbigNative = JSONbig({ useNativeBigInt: true });
export const KIND_PHISHERREGISTRY = 'PhisherRegistry'; export const KIND_PHISHERREGISTRY = 'PhisherRegistry';
@ -139,8 +140,8 @@ export class Indexer implements IPLDIndexerInterface {
getResultEvent (event: Event): ResultEvent { getResultEvent (event: Event): ResultEvent {
const block = event.block; const block = event.block;
const eventFields = JSONbig.parse(event.eventInfo); const eventFields = JSONbigNative.parse(event.eventInfo);
const { tx, eventSignature } = JSON.parse(event.extraInfo); const { tx, eventSignature } = JSONbigNative.parse(event.extraInfo);
return { return {
block: { block: {
@ -395,7 +396,7 @@ export class Indexer implements IPLDIndexerInterface {
contractAddress, contractAddress,
...mappingKeys, ...mappingKeys,
value: result.value, value: result.value,
proof: result.proof ? JSONbig.stringify(result.proof) : null proof: result.proof ? JSONbigNative.stringify(result.proof) : null
} as any; } as any;
} }
@ -801,10 +802,10 @@ export class Indexer implements IPLDIndexerInterface {
txHash, txHash,
contract, contract,
eventName, eventName,
eventInfo: JSONbig.stringify(eventInfo), eventInfo: JSONbigNative.stringify(eventInfo),
extraInfo: JSONbig.stringify(extraInfo), extraInfo: JSONbigNative.stringify(extraInfo),
proof: JSONbig.stringify({ proof: JSONbigNative.stringify({
data: JSONbig.stringify({ data: JSONbigNative.stringify({
blockHash, blockHash,
receiptCID, receiptCID,
log: { log: {

View File

@ -1073,10 +1073,10 @@ describe('Get value from storage', () => {
before(async () => { before(async () => {
({ contract: testNestedArrays, storageLayout } = contracts.TestNestedArrays); ({ contract: testNestedArrays, storageLayout } = contracts.TestNestedArrays);
const transactions = []; const transactions: Array<ContractTransaction> = [];
const addresses = generateDummyAddresses(7); const addresses = generateDummyAddresses(7);
const transactionPromises = []; const transactionPromises: Promise<ContractTransaction>[] = [];
// Set value for nestedStructArray. // Set value for nestedStructArray.
for (let i = 0; i < 5; i++) { for (let i = 0; i < 5; i++) {

View File

@ -36,6 +36,7 @@ import { Tick } from './entity/Tick';
const SYNC_DELTA = 5; const SYNC_DELTA = 5;
const log = debug('vulcanize:indexer'); const log = debug('vulcanize:indexer');
const JSONbigNative = JSONbig({ useNativeBigInt: true });
export { OrderDirection, BlockHeight }; export { OrderDirection, BlockHeight };
@ -69,8 +70,8 @@ export class Indexer implements IndexerInterface {
getResultEvent (event: Event): ResultEvent { getResultEvent (event: Event): ResultEvent {
const block = event.block; const block = event.block;
const eventFields = JSON.parse(event.eventInfo); const eventFields = JSONbigNative.parse(event.eventInfo);
const { tx, eventIndex } = JSON.parse(event.extraInfo); const { tx, eventIndex } = JSONbigNative.parse(event.extraInfo);
return { return {
block: { block: {
@ -394,9 +395,9 @@ export class Indexer implements IndexerInterface {
txHash: tx.hash, txHash: tx.hash,
contract, contract,
eventName, eventName,
eventInfo: JSONbig.stringify(eventInfo), eventInfo: JSONbigNative.stringify(eventInfo),
extraInfo: JSONbig.stringify(extraInfo), extraInfo: JSONbigNative.stringify(extraInfo),
proof: JSONbig.stringify(proof) proof: JSONbigNative.stringify(proof)
}); });
} }

View File

@ -22,6 +22,7 @@ import { abi as nfpmABI, storageLayout as nfpmStorageLayout } from './artifacts/
import poolABI from './artifacts/pool.json'; import poolABI from './artifacts/pool.json';
const log = debug('vulcanize:indexer'); const log = debug('vulcanize:indexer');
const JSONbigNative = JSONbig({ useNativeBigInt: true });
type ResultEvent = { type ResultEvent = {
block: any; block: any;
@ -68,8 +69,8 @@ export class Indexer implements IndexerInterface {
getResultEvent (event: Event): ResultEvent { getResultEvent (event: Event): ResultEvent {
const block = event.block; const block = event.block;
const eventFields = JSON.parse(event.eventInfo); const eventFields = JSONbigNative.parse(event.eventInfo);
const { tx } = JSON.parse(event.extraInfo); const { tx } = JSONbigNative.parse(event.extraInfo);
return { return {
block: { block: {
@ -521,10 +522,10 @@ export class Indexer implements IndexerInterface {
txHash, txHash,
contract, contract,
eventName, eventName,
eventInfo: JSONbig.stringify(eventInfo), eventInfo: JSONbigNative.stringify(eventInfo),
extraInfo: JSONbig.stringify(extraInfo), extraInfo: JSONbigNative.stringify(extraInfo),
proof: JSONbig.stringify({ proof: JSONbigNative.stringify({
data: JSONbig.stringify({ data: JSONbigNative.stringify({
blockHash, blockHash,
receiptCID, receiptCID,
log: { log: {

View File

@ -191,4 +191,8 @@ export const processBatchEvents = async (indexer: IndexerInterface, block: Block
console.timeEnd('time:common#processBacthEvents-processing_events_batch'); console.timeEnd('time:common#processBacthEvents-processing_events_batch');
} }
if (indexer.processBlockAfterEvents) {
await indexer.processBlockAfterEvents(block.blockHash);
}
}; };

View File

@ -107,6 +107,7 @@ export interface IndexerInterface {
processInitialState?: (contractAddress: string, blockHash: string) => Promise<any> processInitialState?: (contractAddress: string, blockHash: string) => Promise<any>
processStateCheckpoint?: (contractAddress: string, blockHash: string) => Promise<boolean> processStateCheckpoint?: (contractAddress: string, blockHash: string) => Promise<boolean>
processBlock?: (blockHash: string, blockNumber: number) => Promise<void> processBlock?: (blockHash: string, blockNumber: number) => Promise<void>
processBlockAfterEvents?: (blockHash: string) => Promise<void>
} }
export interface IPLDIndexerInterface extends IndexerInterface { export interface IPLDIndexerInterface extends IndexerInterface {

View File

@ -2581,6 +2581,11 @@
dependencies: dependencies:
"@types/node" "*" "@types/node" "*"
"@types/pluralize@^0.0.29":
version "0.0.29"
resolved "https://registry.yarnpkg.com/@types/pluralize/-/pluralize-0.0.29.tgz#6ffa33ed1fc8813c469b859681d09707eb40d03c"
integrity sha512-BYOID+l2Aco2nBik+iYS4SZX0Lf20KPILP5RGmM1IgzdwNdTs0eebiFriOPcej1sX9mLnSoiNte5zcFxssgpGA==
"@types/prettier@^2.1.1": "@types/prettier@^2.1.1":
version "2.2.3" version "2.2.3"
resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.2.3.tgz#ef65165aea2924c9359205bf748865b8881753c0" resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.2.3.tgz#ef65165aea2924c9359205bf748865b8881753c0"
@ -11561,6 +11566,11 @@ please-upgrade-node@^3.2.0:
dependencies: dependencies:
semver-compare "^1.0.0" semver-compare "^1.0.0"
pluralize@^8.0.0:
version "8.0.0"
resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-8.0.0.tgz#1a6fa16a38d12a1901e0320fa017051c539ce3b1"
integrity sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==
posix-character-classes@^0.1.0: posix-character-classes@^0.1.0:
version "0.1.1" version "0.1.1"
resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab"