mirror of
https://github.com/cerc-io/watcher-ts
synced 2025-07-31 20:12:06 +00:00
Prefetch block and txs in historical processing instead of fetching them in events processing (#460)
* Set gzip true in ethersjs provider * Add timer logs and use StaticJsonRpcProvider * Fetch block data in historical processing and cache in map * Fetch txs required for event logs in historical processing * Process events with prefetched block and txs data in realtime processing * Clear old TODOs
This commit is contained in:
parent
c7bcd4c276
commit
695723955f
@ -129,8 +129,10 @@ export class CreateStateFromGQLCmd {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const blockProgress: Partial<BlockProgressInterface> = {
|
const blockProgress: Partial<BlockProgressInterface> = {
|
||||||
...block,
|
cid: block.cid,
|
||||||
blockNumber: Number(block.blockNumber)
|
blockTimestamp: Number(block.timestamp),
|
||||||
|
blockNumber: Number(block.blockNumber),
|
||||||
|
blockHash: block.blockHash
|
||||||
};
|
};
|
||||||
|
|
||||||
// Get watched contracts using subgraph dataSources
|
// Get watched contracts using subgraph dataSources
|
||||||
|
@ -54,7 +54,10 @@ export const initClients = async (config: Config): Promise<{
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const ethProvider = getCustomProvider(rpcProviderEndpoint);
|
const ethProvider = getCustomProvider({
|
||||||
|
url: rpcProviderEndpoint,
|
||||||
|
allowGzip: true
|
||||||
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
ethClient,
|
ethClient,
|
||||||
|
@ -31,7 +31,9 @@ import {
|
|||||||
FILTER_CHANGE_BLOCK,
|
FILTER_CHANGE_BLOCK,
|
||||||
Where,
|
Where,
|
||||||
Filter,
|
Filter,
|
||||||
OPERATOR_MAP
|
OPERATOR_MAP,
|
||||||
|
ExtraEventData,
|
||||||
|
EthFullTransaction
|
||||||
} from '@cerc-io/util';
|
} from '@cerc-io/util';
|
||||||
|
|
||||||
import { Context, GraphData, instantiate } from './loader';
|
import { Context, GraphData, instantiate } from './loader';
|
||||||
@ -149,12 +151,12 @@ export class GraphWatcher {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async handleEvent (eventData: any) {
|
async handleEvent (eventData: any, extraData: ExtraEventData) {
|
||||||
const { contract, event, eventSignature, block, tx: { hash: txHash }, eventIndex } = eventData;
|
const { contract, event, eventSignature, block, tx: { hash: txHash }, eventIndex } = eventData;
|
||||||
|
|
||||||
// Check if block data is already fetched by a previous event in the same block.
|
// Check if block data is already fetched by a previous event in the same block.
|
||||||
if (!this._context.block || this._context.block.blockHash !== block.hash) {
|
if (!this._context.block || this._context.block.blockHash !== block.hash) {
|
||||||
this._context.block = await getFullBlock(this._ethClient, this._ethProvider, block.hash, block.number);
|
this._context.block = getFullBlock(extraData.ethFullBlock);
|
||||||
}
|
}
|
||||||
|
|
||||||
const blockData = this._context.block;
|
const blockData = this._context.block;
|
||||||
@ -197,7 +199,7 @@ export class GraphWatcher {
|
|||||||
|
|
||||||
const eventFragment = contractInterface.getEvent(eventSignature);
|
const eventFragment = contractInterface.getEvent(eventSignature);
|
||||||
|
|
||||||
const tx = await this._getTransactionData(txHash, Number(blockData.blockNumber));
|
const tx = this._getTransactionData(txHash, extraData.ethFullTransactions);
|
||||||
|
|
||||||
const data = {
|
const data = {
|
||||||
block: blockData,
|
block: blockData,
|
||||||
@ -208,9 +210,13 @@ export class GraphWatcher {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Create ethereum event to be passed to the wasm event handler.
|
// Create ethereum event to be passed to the wasm event handler.
|
||||||
|
console.time(`time:graph-watcher#handleEvent-createEvent-block-${block.number}-event-${eventSignature}`);
|
||||||
const ethereumEvent = await createEvent(instanceExports, contract, data);
|
const ethereumEvent = await createEvent(instanceExports, contract, data);
|
||||||
|
console.timeEnd(`time:graph-watcher#handleEvent-createEvent-block-${block.number}-event-${eventSignature}`);
|
||||||
try {
|
try {
|
||||||
|
console.time(`time:graph-watcher#handleEvent-exec-${dataSource.name}-event-handler-${eventSignature}`);
|
||||||
await this._handleMemoryError(instanceExports[eventHandler.handler](ethereumEvent), dataSource.name);
|
await this._handleMemoryError(instanceExports[eventHandler.handler](ethereumEvent), dataSource.name);
|
||||||
|
console.timeEnd(`time:graph-watcher#handleEvent-exec-${dataSource.name}-event-handler-${eventSignature}`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
this._clearCachedEntities();
|
this._clearCachedEntities();
|
||||||
throw error;
|
throw error;
|
||||||
@ -237,10 +243,11 @@ export class GraphWatcher {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if block data is already fetched in handleEvent method for the same block.
|
// TODO: Use extraData full block
|
||||||
if (!this._context.block || this._context.block.blockHash !== blockHash) {
|
// // Check if block data is already fetched in handleEvent method for the same block.
|
||||||
this._context.block = await getFullBlock(this._ethClient, this._ethProvider, blockHash, blockNumber);
|
// if (!this._context.block || this._context.block.blockHash !== blockHash) {
|
||||||
}
|
// this._context.block = await getFullBlock(this._ethClient, this._ethProvider, blockHash, blockNumber);
|
||||||
|
// }
|
||||||
|
|
||||||
const blockData = this._context.block;
|
const blockData = this._context.block;
|
||||||
assert(blockData);
|
assert(blockData);
|
||||||
@ -445,15 +452,14 @@ export class GraphWatcher {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async _getTransactionData (txHash: string, blockNumber: number): Promise<Transaction> {
|
_getTransactionData (txHash: string, ethFullTransactions: EthFullTransaction[]): Transaction {
|
||||||
let transaction = this._transactionsMap.get(txHash);
|
let transaction = this._transactionsMap.get(txHash);
|
||||||
|
|
||||||
if (transaction) {
|
if (transaction) {
|
||||||
return transaction;
|
return transaction;
|
||||||
}
|
}
|
||||||
|
|
||||||
transaction = await getFullTransaction(this._ethClient, txHash, blockNumber);
|
transaction = getFullTransaction(txHash, ethFullTransactions);
|
||||||
assert(transaction);
|
|
||||||
this._transactionsMap.set(txHash, transaction);
|
this._transactionsMap.set(txHash, transaction);
|
||||||
|
|
||||||
return transaction;
|
return transaction;
|
||||||
|
@ -18,7 +18,9 @@ import {
|
|||||||
ResultEvent,
|
ResultEvent,
|
||||||
StateKind,
|
StateKind,
|
||||||
EthClient,
|
EthClient,
|
||||||
UpstreamConfig
|
UpstreamConfig,
|
||||||
|
EthFullTransaction,
|
||||||
|
EthFullBlock
|
||||||
} from '@cerc-io/util';
|
} from '@cerc-io/util';
|
||||||
import { GetStorageAt, getStorageValue, MappingKey, StorageLayout } from '@cerc-io/solidity-mapper';
|
import { GetStorageAt, getStorageValue, MappingKey, StorageLayout } from '@cerc-io/solidity-mapper';
|
||||||
|
|
||||||
@ -117,7 +119,12 @@ export class Indexer implements IndexerInterface {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
async fetchAndSaveFilteredEventsAndBlocks (startBlock: number, endBlock: number): Promise<{ blockProgress: BlockProgressInterface, events: DeepPartial<EventInterface>[] }[]> {
|
async fetchAndSaveFilteredEventsAndBlocks (startBlock: number, endBlock: number): Promise<{
|
||||||
|
blockProgress: BlockProgressInterface;
|
||||||
|
events: DeepPartial<EventInterface>[];
|
||||||
|
ethFullBlock: EthFullBlock;
|
||||||
|
ethFullTransactions: EthFullTransaction[];
|
||||||
|
}[]> {
|
||||||
assert(startBlock);
|
assert(startBlock);
|
||||||
assert(endBlock);
|
assert(endBlock);
|
||||||
|
|
||||||
@ -132,8 +139,12 @@ export class Indexer implements IndexerInterface {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
|
|
||||||
async saveBlockAndFetchEvents (block: BlockProgressInterface): Promise<[BlockProgressInterface, DeepPartial<EventInterface>[]]> {
|
async saveBlockAndFetchEvents (block: BlockProgressInterface): Promise<[
|
||||||
return [block, []];
|
BlockProgressInterface,
|
||||||
|
DeepPartial<EventInterface>[],
|
||||||
|
EthFullTransaction[]
|
||||||
|
]> {
|
||||||
|
return [block, [], []];
|
||||||
}
|
}
|
||||||
|
|
||||||
async removeUnknownEvents (block: BlockProgressInterface): Promise<void> {
|
async removeUnknownEvents (block: BlockProgressInterface): Promise<void> {
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
|
|
||||||
import { Cache } from '@cerc-io/cache';
|
import { Cache } from '@cerc-io/cache';
|
||||||
import { EthClient as EthClientInterface, FullTransaction } from '@cerc-io/util';
|
import { EthClient as EthClientInterface, EthFullTransaction } from '@cerc-io/util';
|
||||||
|
|
||||||
import ethQueries from './eth-queries';
|
import ethQueries from './eth-queries';
|
||||||
import { padKey } from './utils';
|
import { padKey } from './utils';
|
||||||
@ -93,7 +93,7 @@ export class EthClient implements EthClientInterface {
|
|||||||
|
|
||||||
async getFullBlocks ({ blockNumber, blockHash }: { blockNumber?: number, blockHash?: string }): Promise<any> {
|
async getFullBlocks ({ blockNumber, blockHash }: { blockNumber?: number, blockHash?: string }): Promise<any> {
|
||||||
console.time(`time:eth-client#getFullBlocks-${JSON.stringify({ blockNumber, blockHash })}`);
|
console.time(`time:eth-client#getFullBlocks-${JSON.stringify({ blockNumber, blockHash })}`);
|
||||||
const result = await this._graphqlClient.query(
|
const { allEthHeaderCids } = await this._graphqlClient.query(
|
||||||
ethQueries.getFullBlocks,
|
ethQueries.getFullBlocks,
|
||||||
{
|
{
|
||||||
blockNumber: blockNumber?.toString(),
|
blockNumber: blockNumber?.toString(),
|
||||||
@ -102,10 +102,10 @@ export class EthClient implements EthClientInterface {
|
|||||||
);
|
);
|
||||||
console.timeEnd(`time:eth-client#getFullBlocks-${JSON.stringify({ blockNumber, blockHash })}`);
|
console.timeEnd(`time:eth-client#getFullBlocks-${JSON.stringify({ blockNumber, blockHash })}`);
|
||||||
|
|
||||||
return result;
|
return allEthHeaderCids.nodes;
|
||||||
}
|
}
|
||||||
|
|
||||||
async getFullTransaction (txHash: string, blockNumber?: number): Promise<FullTransaction> {
|
async getFullTransaction (txHash: string, blockNumber?: number): Promise<EthFullTransaction> {
|
||||||
console.time(`time:eth-client#getFullTransaction-${JSON.stringify({ txHash, blockNumber })}`);
|
console.time(`time:eth-client#getFullTransaction-${JSON.stringify({ txHash, blockNumber })}`);
|
||||||
const result = await this._graphqlClient.query(
|
const result = await this._graphqlClient.query(
|
||||||
ethQueries.getFullTransaction,
|
ethQueries.getFullTransaction,
|
||||||
|
@ -75,6 +75,7 @@ query allEthHeaderCids($blockNumber: BigInt, $blockHash: String) {
|
|||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
// TODO: Get block size from ipld-eth-server
|
||||||
export const getFullBlocks = gql`
|
export const getFullBlocks = gql`
|
||||||
query allEthHeaderCids($blockNumber: BigInt, $blockHash: String) {
|
query allEthHeaderCids($blockNumber: BigInt, $blockHash: String) {
|
||||||
allEthHeaderCids(condition: { blockNumber: $blockNumber, blockHash: $blockHash }) {
|
allEthHeaderCids(condition: { blockNumber: $blockNumber, blockHash: $blockHash }) {
|
||||||
|
@ -6,7 +6,7 @@ import assert from 'assert';
|
|||||||
import { errors, providers, utils } from 'ethers';
|
import { errors, providers, utils } from 'ethers';
|
||||||
|
|
||||||
import { Cache } from '@cerc-io/cache';
|
import { Cache } from '@cerc-io/cache';
|
||||||
import { encodeHeader, escapeHexString, EthClient as EthClientInterface, FullTransaction } from '@cerc-io/util';
|
import { encodeHeader, escapeHexString, EthClient as EthClientInterface, EthFullTransaction } from '@cerc-io/util';
|
||||||
import { padKey } from '@cerc-io/ipld-eth-client';
|
import { padKey } from '@cerc-io/ipld-eth-client';
|
||||||
|
|
||||||
export interface Config {
|
export interface Config {
|
||||||
@ -32,7 +32,10 @@ export class EthClient implements EthClientInterface {
|
|||||||
constructor (config: Config) {
|
constructor (config: Config) {
|
||||||
const { rpcEndpoint, cache } = config;
|
const { rpcEndpoint, cache } = config;
|
||||||
assert(rpcEndpoint, 'Missing RPC endpoint');
|
assert(rpcEndpoint, 'Missing RPC endpoint');
|
||||||
this._provider = new providers.JsonRpcProvider(rpcEndpoint);
|
this._provider = new providers.StaticJsonRpcProvider({
|
||||||
|
url: rpcEndpoint,
|
||||||
|
allowGzip: true
|
||||||
|
});
|
||||||
|
|
||||||
this._cache = cache;
|
this._cache = cache;
|
||||||
}
|
}
|
||||||
@ -171,39 +174,33 @@ export class EthClient implements EthClientInterface {
|
|||||||
|
|
||||||
const rlpData = encodeHeader(header);
|
const rlpData = encodeHeader(header);
|
||||||
|
|
||||||
const allEthHeaderCids = {
|
return [{
|
||||||
nodes: [
|
blockNumber: this._provider.formatter.number(rawBlock.number).toString(),
|
||||||
{
|
blockHash: this._provider.formatter.hash(rawBlock.hash),
|
||||||
blockNumber: this._provider.formatter.number(rawBlock.number).toString(),
|
parentHash: this._provider.formatter.hash(rawBlock.parentHash),
|
||||||
blockHash: this._provider.formatter.hash(rawBlock.hash),
|
timestamp: this._provider.formatter.number(rawBlock.timestamp).toString(),
|
||||||
parentHash: this._provider.formatter.hash(rawBlock.parentHash),
|
stateRoot: this._provider.formatter.hash(rawBlock.stateRoot),
|
||||||
timestamp: this._provider.formatter.number(rawBlock.timestamp).toString(),
|
td: this._provider.formatter.bigNumber(rawBlock.totalDifficulty).toString(),
|
||||||
stateRoot: this._provider.formatter.hash(rawBlock.stateRoot),
|
txRoot: this._provider.formatter.hash(rawBlock.transactionsRoot),
|
||||||
td: this._provider.formatter.bigNumber(rawBlock.totalDifficulty).toString(),
|
receiptRoot: this._provider.formatter.hash(rawBlock.receiptsRoot),
|
||||||
txRoot: this._provider.formatter.hash(rawBlock.transactionsRoot),
|
uncleRoot: this._provider.formatter.hash(rawBlock.sha3Uncles),
|
||||||
receiptRoot: this._provider.formatter.hash(rawBlock.receiptsRoot),
|
bloom: escapeHexString(this._provider.formatter.hex(rawBlock.logsBloom)),
|
||||||
uncleRoot: this._provider.formatter.hash(rawBlock.sha3Uncles),
|
size: this._provider.formatter.number(rawBlock.size).toString(),
|
||||||
bloom: escapeHexString(this._provider.formatter.hex(rawBlock.logsBloom)),
|
blockByMhKey: {
|
||||||
blockByMhKey: {
|
data: escapeHexString(rlpData)
|
||||||
data: escapeHexString(rlpData)
|
}
|
||||||
}
|
}];
|
||||||
}
|
|
||||||
]
|
|
||||||
};
|
|
||||||
|
|
||||||
return { allEthHeaderCids };
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async getFullTransaction (txHash: string): Promise<FullTransaction> {
|
async getFullTransaction (txHash: string): Promise<EthFullTransaction> {
|
||||||
console.time(`time:eth-client#getFullTransaction-${JSON.stringify({ txHash })}`);
|
console.time(`time:eth-client#getFullTransaction-${JSON.stringify({ txHash })}`);
|
||||||
const tx = await this._provider.getTransaction(txHash);
|
const tx = await this._provider.getTransaction(txHash);
|
||||||
console.timeEnd(`time:eth-client#getFullTransaction-${JSON.stringify({ txHash })}`);
|
console.timeEnd(`time:eth-client#getFullTransaction-${JSON.stringify({ txHash })}`);
|
||||||
const txReceipt = await tx.wait();
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
ethTransactionCidByTxHash: {
|
ethTransactionCidByTxHash: {
|
||||||
txHash: tx.hash,
|
txHash: tx.hash,
|
||||||
index: txReceipt.transactionIndex,
|
index: (tx as any).transactionIndex,
|
||||||
src: tx.from,
|
src: tx.from,
|
||||||
dst: tx.to
|
dst: tx.to
|
||||||
},
|
},
|
||||||
|
@ -14,7 +14,7 @@ import {
|
|||||||
NULL_BLOCK_ERROR
|
NULL_BLOCK_ERROR
|
||||||
} from './constants';
|
} from './constants';
|
||||||
import { JobQueue } from './job-queue';
|
import { JobQueue } from './job-queue';
|
||||||
import { BlockProgressInterface, IndexerInterface, EventInterface } from './types';
|
import { BlockProgressInterface, IndexerInterface, EventInterface, EthFullTransaction, EthFullBlock } from './types';
|
||||||
import { wait } from './misc';
|
import { wait } from './misc';
|
||||||
import { OrderDirection } from './database';
|
import { OrderDirection } from './database';
|
||||||
import { JobQueueConfig } from './config';
|
import { JobQueueConfig } from './config';
|
||||||
@ -27,6 +27,8 @@ const JSONbigNative = JSONbig({ useNativeBigInt: true });
|
|||||||
export interface PrefetchedBlock {
|
export interface PrefetchedBlock {
|
||||||
block: BlockProgressInterface;
|
block: BlockProgressInterface;
|
||||||
events: DeepPartial<EventInterface>[];
|
events: DeepPartial<EventInterface>[];
|
||||||
|
ethFullBlock: EthFullBlock;
|
||||||
|
ethFullTransactions: EthFullTransaction[];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -104,6 +106,20 @@ export const fetchBlocksAtHeight = async (
|
|||||||
if (!blocks.length) {
|
if (!blocks.length) {
|
||||||
log(`No blocks fetched for block number ${blockNumber}, retrying after ${jobQueueConfig.blockDelayInMilliSecs} ms delay.`);
|
log(`No blocks fetched for block number ${blockNumber}, retrying after ${jobQueueConfig.blockDelayInMilliSecs} ms delay.`);
|
||||||
await wait(jobQueueConfig.blockDelayInMilliSecs);
|
await wait(jobQueueConfig.blockDelayInMilliSecs);
|
||||||
|
} else {
|
||||||
|
blocks.forEach(block => {
|
||||||
|
blockAndEventsMap.set(
|
||||||
|
block.blockHash,
|
||||||
|
{
|
||||||
|
// Block is set later in job-runner when saving to database
|
||||||
|
block: {} as BlockProgressInterface,
|
||||||
|
events: [],
|
||||||
|
ethFullBlock: block,
|
||||||
|
// Transactions are set later in job-runner when fetching events
|
||||||
|
ethFullTransactions: []
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
// Handle null block error in case of Lotus EVM
|
// Handle null block error in case of Lotus EVM
|
||||||
@ -153,15 +169,15 @@ export const fetchAndSaveFilteredLogsAndBlocks = async (
|
|||||||
): Promise<BlockProgressInterface[]> => {
|
): Promise<BlockProgressInterface[]> => {
|
||||||
// Fetch filtered logs and required blocks
|
// Fetch filtered logs and required blocks
|
||||||
console.time('time:common#fetchAndSaveFilteredLogsAndBlocks-fetchAndSaveFilteredEventsAndBlocks');
|
console.time('time:common#fetchAndSaveFilteredLogsAndBlocks-fetchAndSaveFilteredEventsAndBlocks');
|
||||||
const blocksWithEvents = await indexer.fetchAndSaveFilteredEventsAndBlocks(startBlock, endBlock);
|
const blocksData = await indexer.fetchAndSaveFilteredEventsAndBlocks(startBlock, endBlock);
|
||||||
console.timeEnd('time:common#fetchAndSaveFilteredLogsAndBlocks-fetchAndSaveFilteredEventsAndBlocks');
|
console.timeEnd('time:common#fetchAndSaveFilteredLogsAndBlocks-fetchAndSaveFilteredEventsAndBlocks');
|
||||||
|
|
||||||
// Set blocks with events in blockAndEventsMap cache
|
// Set blocks with events in blockAndEventsMap cache
|
||||||
blocksWithEvents.forEach(({ blockProgress, events }) => {
|
blocksData.forEach(({ blockProgress, events, ethFullBlock, ethFullTransactions }) => {
|
||||||
blockAndEventsMap.set(blockProgress.blockHash, { block: blockProgress, events });
|
blockAndEventsMap.set(blockProgress.blockHash, { block: blockProgress, events, ethFullBlock, ethFullTransactions });
|
||||||
});
|
});
|
||||||
|
|
||||||
return blocksWithEvents.map(({ blockProgress }) => blockProgress);
|
return blocksData.map(({ blockProgress }) => blockProgress);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const _prefetchBlocks = async (
|
export const _prefetchBlocks = async (
|
||||||
@ -181,7 +197,15 @@ export const _prefetchBlocks = async (
|
|||||||
);
|
);
|
||||||
|
|
||||||
blocksWithEvents.forEach(({ blockProgress, events }) => {
|
blocksWithEvents.forEach(({ blockProgress, events }) => {
|
||||||
blockAndEventsMap.set(blockProgress.blockHash, { block: blockProgress, events });
|
blockAndEventsMap.set(
|
||||||
|
blockProgress.blockHash,
|
||||||
|
{
|
||||||
|
block: blockProgress,
|
||||||
|
events,
|
||||||
|
// TODO: Set ethFullBlock and ethFullTransactions
|
||||||
|
ethFullBlock: {} as EthFullBlock,
|
||||||
|
ethFullTransactions: []
|
||||||
|
});
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -283,17 +307,23 @@ export const _fetchBatchBlocks = async (
|
|||||||
*/
|
*/
|
||||||
export const processBatchEvents = async (
|
export const processBatchEvents = async (
|
||||||
indexer: IndexerInterface,
|
indexer: IndexerInterface,
|
||||||
block: BlockProgressInterface,
|
data: {
|
||||||
eventsInBatch: number,
|
block: BlockProgressInterface;
|
||||||
subgraphEventsOrder: boolean
|
ethFullBlock: EthFullBlock;
|
||||||
|
ethFullTransactions: EthFullTransaction[];
|
||||||
|
},
|
||||||
|
{ eventsInBatch, subgraphEventsOrder }: {
|
||||||
|
eventsInBatch: number;
|
||||||
|
subgraphEventsOrder: boolean;
|
||||||
|
}
|
||||||
): Promise<boolean> => {
|
): Promise<boolean> => {
|
||||||
let dbBlock: BlockProgressInterface, updatedDbEvents: EventInterface[];
|
let dbBlock: BlockProgressInterface, updatedDbEvents: EventInterface[];
|
||||||
let isNewContractWatched = false;
|
let isNewContractWatched = false;
|
||||||
|
|
||||||
if (subgraphEventsOrder) {
|
if (subgraphEventsOrder) {
|
||||||
({ dbBlock, updatedDbEvents, isNewContractWatched } = await _processEventsInSubgraphOrder(indexer, block, eventsInBatch || DEFAULT_EVENTS_IN_BATCH));
|
({ dbBlock, updatedDbEvents, isNewContractWatched } = await _processEventsInSubgraphOrder(indexer, data, eventsInBatch || DEFAULT_EVENTS_IN_BATCH));
|
||||||
} else {
|
} else {
|
||||||
({ dbBlock, updatedDbEvents } = await _processEvents(indexer, block, eventsInBatch || DEFAULT_EVENTS_IN_BATCH));
|
({ dbBlock, updatedDbEvents } = await _processEvents(indexer, data, eventsInBatch || DEFAULT_EVENTS_IN_BATCH));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (indexer.processBlockAfterEvents) {
|
if (indexer.processBlockAfterEvents) {
|
||||||
@ -314,7 +344,15 @@ export const processBatchEvents = async (
|
|||||||
return isNewContractWatched;
|
return isNewContractWatched;
|
||||||
};
|
};
|
||||||
|
|
||||||
const _processEvents = async (indexer: IndexerInterface, block: BlockProgressInterface, eventsInBatch: number): Promise<{ dbBlock: BlockProgressInterface, updatedDbEvents: EventInterface[] }> => {
|
const _processEvents = async (
|
||||||
|
indexer: IndexerInterface,
|
||||||
|
{ block, ethFullBlock, ethFullTransactions }: {
|
||||||
|
block: BlockProgressInterface;
|
||||||
|
ethFullBlock: EthFullBlock;
|
||||||
|
ethFullTransactions: EthFullTransaction[];
|
||||||
|
},
|
||||||
|
eventsInBatch: number
|
||||||
|
): Promise<{ dbBlock: BlockProgressInterface, updatedDbEvents: EventInterface[] }> => {
|
||||||
const updatedDbEvents: EventInterface[] = [];
|
const updatedDbEvents: EventInterface[] = [];
|
||||||
|
|
||||||
let page = 0;
|
let page = 0;
|
||||||
@ -356,7 +394,7 @@ const _processEvents = async (indexer: IndexerInterface, block: BlockProgressInt
|
|||||||
updatedDbEvents.push(event);
|
updatedDbEvents.push(event);
|
||||||
}
|
}
|
||||||
|
|
||||||
await indexer.processEvent(event);
|
await indexer.processEvent(event, { ethFullBlock, ethFullTransactions });
|
||||||
}
|
}
|
||||||
|
|
||||||
block.lastProcessedEventIndex = event.index;
|
block.lastProcessedEventIndex = event.index;
|
||||||
@ -371,7 +409,15 @@ const _processEvents = async (indexer: IndexerInterface, block: BlockProgressInt
|
|||||||
return { dbBlock: block, updatedDbEvents: updatedDbEvents };
|
return { dbBlock: block, updatedDbEvents: updatedDbEvents };
|
||||||
};
|
};
|
||||||
|
|
||||||
const _processEventsInSubgraphOrder = async (indexer: IndexerInterface, block: BlockProgressInterface, eventsInBatch: number): Promise<{ dbBlock: BlockProgressInterface, updatedDbEvents: EventInterface[], isNewContractWatched: boolean }> => {
|
const _processEventsInSubgraphOrder = async (
|
||||||
|
indexer: IndexerInterface,
|
||||||
|
{ block, ethFullBlock, ethFullTransactions }: {
|
||||||
|
block: BlockProgressInterface;
|
||||||
|
ethFullBlock: EthFullBlock;
|
||||||
|
ethFullTransactions: EthFullTransaction[];
|
||||||
|
},
|
||||||
|
eventsInBatch: number
|
||||||
|
): Promise<{ dbBlock: BlockProgressInterface, updatedDbEvents: EventInterface[], isNewContractWatched: boolean }> => {
|
||||||
// Create list of initially watched contracts
|
// Create list of initially watched contracts
|
||||||
const initiallyWatchedContracts: string[] = indexer.getWatchedContracts().map(contract => contract.address);
|
const initiallyWatchedContracts: string[] = indexer.getWatchedContracts().map(contract => contract.address);
|
||||||
const unwatchedContractEvents: EventInterface[] = [];
|
const unwatchedContractEvents: EventInterface[] = [];
|
||||||
@ -411,7 +457,9 @@ const _processEventsInSubgraphOrder = async (indexer: IndexerInterface, block: B
|
|||||||
|
|
||||||
// Process known events in a loop
|
// Process known events in a loop
|
||||||
for (const event of watchedContractEvents) {
|
for (const event of watchedContractEvents) {
|
||||||
await indexer.processEvent(event);
|
console.time(`time:common#_processEventsInSubgraphOrder-block-${block.blockNumber}-processEvent-${event.eventName}`);
|
||||||
|
await indexer.processEvent(event, { ethFullBlock, ethFullTransactions });
|
||||||
|
console.timeEnd(`time:common#_processEventsInSubgraphOrder-block-${block.blockNumber}-processEvent-${event.eventName}`);
|
||||||
|
|
||||||
block.lastProcessedEventIndex = event.index;
|
block.lastProcessedEventIndex = event.index;
|
||||||
block.numProcessedEvents++;
|
block.numProcessedEvents++;
|
||||||
@ -430,7 +478,9 @@ const _processEventsInSubgraphOrder = async (indexer: IndexerInterface, block: B
|
|||||||
if (indexer.upstreamConfig.ethServer.filterLogsByAddresses) {
|
if (indexer.upstreamConfig.ethServer.filterLogsByAddresses) {
|
||||||
// Fetch and parse events for newly watched contracts
|
// Fetch and parse events for newly watched contracts
|
||||||
const newContracts = watchedContracts.filter(contract => !initiallyWatchedContracts.includes(contract));
|
const newContracts = watchedContracts.filter(contract => !initiallyWatchedContracts.includes(contract));
|
||||||
|
console.time(`time:common#_processEventsInSubgraphOrder-fetchEventsForContracts-block-${block.blockNumber}-unwatched-contract`);
|
||||||
const events = await indexer.fetchEventsForContracts(block.blockHash, block.blockNumber, newContracts);
|
const events = await indexer.fetchEventsForContracts(block.blockHash, block.blockNumber, newContracts);
|
||||||
|
console.timeEnd(`time:common#_processEventsInSubgraphOrder-fetchEventsForContracts-block-${block.blockNumber}-unwatched-contract`);
|
||||||
|
|
||||||
events.forEach(event => {
|
events.forEach(event => {
|
||||||
event.block = block;
|
event.block = block;
|
||||||
@ -457,7 +507,9 @@ const _processEventsInSubgraphOrder = async (indexer: IndexerInterface, block: B
|
|||||||
console.time('time:common#processEventsInSubgraphOrder-processing_initially_unwatched_events');
|
console.time('time:common#processEventsInSubgraphOrder-processing_initially_unwatched_events');
|
||||||
// In the end process events of newly watched contracts
|
// In the end process events of newly watched contracts
|
||||||
for (const updatedDbEvent of updatedDbEvents) {
|
for (const updatedDbEvent of updatedDbEvents) {
|
||||||
await indexer.processEvent(updatedDbEvent);
|
console.time(`time:common#processEventsInSubgraphOrder-block-${block.blockNumber}-updated-processEvent-${updatedDbEvent.eventName}`);
|
||||||
|
await indexer.processEvent(updatedDbEvent, { ethFullBlock, ethFullTransactions });
|
||||||
|
console.timeEnd(`time:common#processEventsInSubgraphOrder-block-${block.blockNumber}-updated-processEvent-${updatedDbEvent.eventName}`);
|
||||||
|
|
||||||
block.lastProcessedEventIndex = Math.max(block.lastProcessedEventIndex + 1, updatedDbEvent.index);
|
block.lastProcessedEventIndex = Math.max(block.lastProcessedEventIndex + 1, updatedDbEvent.index);
|
||||||
block.numProcessedEvents++;
|
block.numProcessedEvents++;
|
||||||
|
@ -33,6 +33,7 @@ export class EventWatcher {
|
|||||||
_indexer: IndexerInterface;
|
_indexer: IndexerInterface;
|
||||||
_pubsub: PubSub;
|
_pubsub: PubSub;
|
||||||
_jobQueue: JobQueue;
|
_jobQueue: JobQueue;
|
||||||
|
_realtimeProcessingStarted = false;
|
||||||
|
|
||||||
_shutDown = false;
|
_shutDown = false;
|
||||||
_signalCount = 0;
|
_signalCount = 0;
|
||||||
@ -135,6 +136,13 @@ export class EventWatcher {
|
|||||||
log(`Starting realtime block processing from block ${startBlockNumber}`);
|
log(`Starting realtime block processing from block ${startBlockNumber}`);
|
||||||
await processBlockByNumber(this._jobQueue, startBlockNumber);
|
await processBlockByNumber(this._jobQueue, startBlockNumber);
|
||||||
|
|
||||||
|
// Check if realtime processing already started and avoid resubscribing to block progress event
|
||||||
|
if (this._realtimeProcessingStarted) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this._realtimeProcessingStarted = true;
|
||||||
|
|
||||||
// Creating an AsyncIterable from AsyncIterator to iterate over the values.
|
// Creating an AsyncIterable from AsyncIterator to iterate over the values.
|
||||||
// https://www.codementor.io/@tiagolopesferreira/asynchronous-iterators-in-javascript-jl1yg8la1#for-wait-of
|
// https://www.codementor.io/@tiagolopesferreira/asynchronous-iterators-in-javascript-jl1yg8la1#for-wait-of
|
||||||
const blockProgressEventIterable = {
|
const blockProgressEventIterable = {
|
||||||
|
@ -34,7 +34,7 @@ export interface Transaction {
|
|||||||
hash: string;
|
hash: string;
|
||||||
index: number;
|
index: number;
|
||||||
from: string;
|
from: string;
|
||||||
to: string;
|
to?: string;
|
||||||
value: string;
|
value: string;
|
||||||
gasLimit: string;
|
gasLimit: string;
|
||||||
gasPrice?: string;
|
gasPrice?: string;
|
||||||
|
@ -6,6 +6,7 @@ import assert from 'assert';
|
|||||||
|
|
||||||
import { BlockProgressInterface, IndexerInterface } from './types';
|
import { BlockProgressInterface, IndexerInterface } from './types';
|
||||||
import { processBatchEvents } from './common';
|
import { processBatchEvents } from './common';
|
||||||
|
import { EthFullBlock } from '.';
|
||||||
|
|
||||||
export const indexBlock = async (
|
export const indexBlock = async (
|
||||||
indexer: IndexerInterface,
|
indexer: IndexerInterface,
|
||||||
@ -46,6 +47,14 @@ export const indexBlock = async (
|
|||||||
assert(indexer.processBlock);
|
assert(indexer.processBlock);
|
||||||
await indexer.processBlock(blockProgress);
|
await indexer.processBlock(blockProgress);
|
||||||
|
|
||||||
await processBatchEvents(indexer, blockProgress, eventsInBatch, subgraphEventsOrder);
|
await processBatchEvents(
|
||||||
|
indexer,
|
||||||
|
{
|
||||||
|
block: blockProgress,
|
||||||
|
// TODO: Set ethFullBlock and ethFullTransactions
|
||||||
|
ethFullBlock: {} as EthFullBlock,
|
||||||
|
ethFullTransactions: []
|
||||||
|
},
|
||||||
|
{ eventsInBatch, subgraphEventsOrder });
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -24,7 +24,9 @@ import {
|
|||||||
StateKind,
|
StateKind,
|
||||||
EthClient,
|
EthClient,
|
||||||
ContractJobData,
|
ContractJobData,
|
||||||
EventsQueueJobKind
|
EventsQueueJobKind,
|
||||||
|
EthFullBlock,
|
||||||
|
EthFullTransaction
|
||||||
} from './types';
|
} from './types';
|
||||||
import { UNKNOWN_EVENT_NAME, QUEUE_EVENT_PROCESSING, DIFF_MERGE_BATCH_SIZE } from './constants';
|
import { UNKNOWN_EVENT_NAME, QUEUE_EVENT_PROCESSING, DIFF_MERGE_BATCH_SIZE } from './constants';
|
||||||
import { JobQueue } from './job-queue';
|
import { JobQueue } from './job-queue';
|
||||||
@ -100,6 +102,11 @@ export type ResultMeta = {
|
|||||||
hasIndexingErrors: boolean;
|
hasIndexingErrors: boolean;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type ExtraEventData = {
|
||||||
|
ethFullBlock: EthFullBlock;
|
||||||
|
ethFullTransactions: EthFullTransaction[];
|
||||||
|
}
|
||||||
|
|
||||||
export class Indexer {
|
export class Indexer {
|
||||||
_serverConfig: ServerConfig;
|
_serverConfig: ServerConfig;
|
||||||
_upstreamConfig: UpstreamConfig;
|
_upstreamConfig: UpstreamConfig;
|
||||||
@ -284,10 +291,9 @@ export class Indexer {
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
async getBlocks (blockFilter: { blockNumber?: number, blockHash?: string }): Promise<any> {
|
async getBlocks (blockFilter: { blockNumber?: number, blockHash?: string }): Promise<EthFullBlock[]> {
|
||||||
assert(blockFilter.blockHash || blockFilter.blockNumber);
|
assert(blockFilter.blockHash || blockFilter.blockNumber);
|
||||||
const result = await this._ethClient.getBlocks(blockFilter);
|
const blocks = await this._ethClient.getFullBlocks(blockFilter);
|
||||||
const { allEthHeaderCids: { nodes: blocks } } = result;
|
|
||||||
|
|
||||||
if (!blocks.length) {
|
if (!blocks.length) {
|
||||||
try {
|
try {
|
||||||
@ -461,7 +467,12 @@ export class Indexer {
|
|||||||
kind: string,
|
kind: string,
|
||||||
logObj: { topics: string[]; data: string }
|
logObj: { topics: string[]; data: string }
|
||||||
) => { eventName: string; eventInfo: {[key: string]: any}; eventSignature: string }
|
) => { eventName: string; eventInfo: {[key: string]: any}; eventSignature: string }
|
||||||
): Promise<{ blockProgress: BlockProgressInterface, events: DeepPartial<EventInterface>[] }[]> {
|
): Promise<{
|
||||||
|
blockProgress: BlockProgressInterface,
|
||||||
|
events: DeepPartial<EventInterface>[],
|
||||||
|
ethFullBlock: EthFullBlock,
|
||||||
|
ethFullTransactions: EthFullTransaction[]
|
||||||
|
}[]> {
|
||||||
assert(this._ethClient.getLogsForBlockRange, 'getLogsForBlockRange() not implemented in ethClient');
|
assert(this._ethClient.getLogsForBlockRange, 'getLogsForBlockRange() not implemented in ethClient');
|
||||||
|
|
||||||
const { addresses, topics } = this._createLogsFilters(eventSignaturesMap);
|
const { addresses, topics } = this._createLogsFilters(eventSignaturesMap);
|
||||||
@ -474,45 +485,67 @@ export class Indexer {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const blockLogsMap = this._reduceLogsToBlockLogsMap(logs);
|
const blockLogsMap = this._reduceLogsToBlockLogsMap(logs);
|
||||||
|
// Create unique list of tx required
|
||||||
|
const txHashes = Array.from([
|
||||||
|
...new Set<string>(logs.map((log: any) => log.transaction.hash))
|
||||||
|
]);
|
||||||
|
|
||||||
// Fetch blocks with transactions for the logs returned
|
// Fetch blocks with transactions for the logs returned
|
||||||
console.time(`time:indexer#fetchAndSaveFilteredEventsAndBlocks-fetch-blocks-txs-${fromBlock}-${toBlock}`);
|
console.time(`time:indexer#fetchAndSaveFilteredEventsAndBlocks-fetch-blocks-txs-${fromBlock}-${toBlock}`);
|
||||||
const blocksWithTxPromises = Array.from(blockLogsMap.keys()).map(async (blockHash) => {
|
const blocksPromises = Array.from(blockLogsMap.keys()).map(async (blockHash) => {
|
||||||
const result = await this._ethClient.getBlockWithTransactions({ blockHash });
|
const [fullBlock] = await this._ethClient.getFullBlocks({ blockHash });
|
||||||
|
|
||||||
const {
|
const block = {
|
||||||
allEthHeaderCids: {
|
...fullBlock,
|
||||||
nodes: [
|
blockTimestamp: Number(fullBlock.timestamp),
|
||||||
{
|
blockNumber: Number(fullBlock.blockNumber)
|
||||||
ethTransactionCidsByHeaderId: {
|
};
|
||||||
nodes: transactions
|
|
||||||
},
|
|
||||||
...block
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
} = result;
|
|
||||||
|
|
||||||
block.blockTimestamp = Number(block.timestamp);
|
return { block, fullBlock } as { block: DeepPartial<BlockProgressInterface>; fullBlock: EthFullBlock };
|
||||||
block.blockNumber = Number(block.blockNumber);
|
|
||||||
|
|
||||||
return { block, transactions } as { block: DeepPartial<BlockProgressInterface>; transactions: any[] };
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const blockWithTxs = await Promise.all(blocksWithTxPromises);
|
const ethFullTxPromises = txHashes.map(async txHash => {
|
||||||
|
return this._ethClient.getFullTransaction(txHash);
|
||||||
|
});
|
||||||
|
|
||||||
|
const blocks = await Promise.all(blocksPromises);
|
||||||
|
const ethFullTxs = await Promise.all(ethFullTxPromises);
|
||||||
|
|
||||||
|
const ethFullTxsMap = ethFullTxs.reduce((acc: Map<string, EthFullTransaction>, ethFullTx) => {
|
||||||
|
acc.set(ethFullTx.ethTransactionCidByTxHash.txHash, ethFullTx);
|
||||||
|
return acc;
|
||||||
|
}, new Map());
|
||||||
|
|
||||||
console.timeEnd(`time:indexer#fetchAndSaveFilteredEventsAndBlocks-fetch-blocks-txs-${fromBlock}-${toBlock}`);
|
console.timeEnd(`time:indexer#fetchAndSaveFilteredEventsAndBlocks-fetch-blocks-txs-${fromBlock}-${toBlock}`);
|
||||||
|
|
||||||
// Map db ready events according to blockhash
|
// Map db ready events according to blockhash
|
||||||
console.time(`time:indexer#fetchAndSaveFilteredEventsAndBlocks-db-save-blocks-events-${fromBlock}-${toBlock}`);
|
console.time(`time:indexer#fetchAndSaveFilteredEventsAndBlocks-db-save-blocks-events-${fromBlock}-${toBlock}`);
|
||||||
const blockWithDbEventsPromises = blockWithTxs.map(async ({ block, transactions }) => {
|
const blockWithDbEventsPromises = blocks.map(async ({ block, fullBlock }) => {
|
||||||
const blockHash = block.blockHash;
|
const blockHash = block.blockHash;
|
||||||
assert(blockHash);
|
assert(blockHash);
|
||||||
const logs = blockLogsMap.get(blockHash) || [];
|
const logs = blockLogsMap.get(blockHash) || [];
|
||||||
|
|
||||||
const events = this.createDbEventsFromLogsAndTxs(blockHash, logs, transactions, parseEventNameAndArgs);
|
const txHashes = Array.from([
|
||||||
|
...new Set<string>(logs.map((log: any) => log.transaction.hash))
|
||||||
|
]);
|
||||||
|
|
||||||
|
const blockEthFullTxs = txHashes.map(txHash => ethFullTxsMap.get(txHash)) as EthFullTransaction[];
|
||||||
|
|
||||||
|
const events = this.createDbEventsFromLogsAndTxs(
|
||||||
|
blockHash,
|
||||||
|
logs,
|
||||||
|
blockEthFullTxs.map(ethFullTx => ethFullTx?.ethTransactionCidByTxHash),
|
||||||
|
parseEventNameAndArgs
|
||||||
|
);
|
||||||
const [blockProgress] = await this.saveBlockWithEvents(block, events);
|
const [blockProgress] = await this.saveBlockWithEvents(block, events);
|
||||||
|
|
||||||
return { blockProgress, events: [] };
|
return {
|
||||||
|
blockProgress,
|
||||||
|
ethFullBlock: fullBlock,
|
||||||
|
ethFullTransactions: blockEthFullTxs,
|
||||||
|
block,
|
||||||
|
events: []
|
||||||
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
const blocksWithDbEvents = await Promise.all(blockWithDbEventsPromises);
|
const blocksWithDbEvents = await Promise.all(blockWithDbEventsPromises);
|
||||||
@ -536,48 +569,57 @@ export class Indexer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Fetch events (to be saved to db) for a particular block
|
// Fetch events (to be saved to db) for a particular block
|
||||||
async fetchEvents (blockHash: string, blockNumber: number, eventSignaturesMap: Map<string, string[]>, parseEventNameAndArgs: (kind: string, logObj: any) => any): Promise<DeepPartial<EventInterface>[]> {
|
async fetchEvents (blockHash: string, blockNumber: number, eventSignaturesMap: Map<string, string[]>, parseEventNameAndArgs: (kind: string, logObj: any) => any): Promise<{ events: DeepPartial<EventInterface>[], transactions: EthFullTransaction[]}> {
|
||||||
const { addresses, topics } = this._createLogsFilters(eventSignaturesMap);
|
const { addresses, topics } = this._createLogsFilters(eventSignaturesMap);
|
||||||
const { logs, transactions } = await this._fetchLogsAndTransactions(blockHash, blockNumber, addresses, topics);
|
const { logs, transactions } = await this._fetchLogsAndTransactions(blockHash, blockNumber, addresses, topics);
|
||||||
|
|
||||||
return this.createDbEventsFromLogsAndTxs(blockHash, logs, transactions, parseEventNameAndArgs);
|
const events = this.createDbEventsFromLogsAndTxs(
|
||||||
|
blockHash,
|
||||||
|
logs,
|
||||||
|
transactions.map(tx => tx.ethTransactionCidByTxHash),
|
||||||
|
parseEventNameAndArgs
|
||||||
|
);
|
||||||
|
|
||||||
|
return { events, transactions };
|
||||||
}
|
}
|
||||||
|
|
||||||
async fetchEventsForContracts (blockHash: string, blockNumber: number, addresses: string[], eventSignaturesMap: Map<string, string[]>, parseEventNameAndArgs: (kind: string, logObj: any) => any): Promise<DeepPartial<EventInterface>[]> {
|
async fetchEventsForContracts (blockHash: string, blockNumber: number, addresses: string[], eventSignaturesMap: Map<string, string[]>, parseEventNameAndArgs: (kind: string, logObj: any) => any): Promise<DeepPartial<EventInterface>[]> {
|
||||||
const { topics } = this._createLogsFilters(eventSignaturesMap);
|
const { topics } = this._createLogsFilters(eventSignaturesMap);
|
||||||
const { logs, transactions } = await this._fetchLogsAndTransactions(blockHash, blockNumber, addresses, topics);
|
const { logs, transactions } = await this._fetchLogsAndTransactions(blockHash, blockNumber, addresses, topics);
|
||||||
|
|
||||||
return this.createDbEventsFromLogsAndTxs(blockHash, logs, transactions, parseEventNameAndArgs);
|
return this.createDbEventsFromLogsAndTxs(
|
||||||
|
blockHash,
|
||||||
|
logs,
|
||||||
|
transactions.map(tx => tx.ethTransactionCidByTxHash),
|
||||||
|
parseEventNameAndArgs
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
async _fetchLogsAndTransactions (blockHash: string, blockNumber: number, addresses?: string[], topics?: string[][]): Promise<{ logs: any[]; transactions: any[] }> {
|
async _fetchLogsAndTransactions (blockHash: string, blockNumber: number, addresses?: string[], topics?: string[][]): Promise<{ logs: any[]; transactions: EthFullTransaction[] }> {
|
||||||
const logsPromise = await this._ethClient.getLogs({
|
const { logs } = await this._ethClient.getLogs({
|
||||||
blockHash,
|
blockHash,
|
||||||
blockNumber: blockNumber.toString(),
|
blockNumber: blockNumber.toString(),
|
||||||
addresses,
|
addresses,
|
||||||
topics
|
topics
|
||||||
});
|
});
|
||||||
|
|
||||||
const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash, blockNumber });
|
const transactions = await this._fetchTxsFromLogs(logs);
|
||||||
|
|
||||||
const [
|
|
||||||
{ logs },
|
|
||||||
{
|
|
||||||
allEthHeaderCids: {
|
|
||||||
nodes: [
|
|
||||||
{
|
|
||||||
ethTransactionCidsByHeaderId: {
|
|
||||||
nodes: transactions
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
] = await Promise.all([logsPromise, transactionsPromise]);
|
|
||||||
|
|
||||||
return { logs, transactions };
|
return { logs, transactions };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async _fetchTxsFromLogs (logs: any[]): Promise<EthFullTransaction[]> {
|
||||||
|
const txHashes = Array.from([
|
||||||
|
...new Set<string>(logs.map((log) => log.transaction.hash))
|
||||||
|
]);
|
||||||
|
|
||||||
|
const ethFullTxPromises = txHashes.map(async txHash => {
|
||||||
|
return this._ethClient.getFullTransaction(txHash);
|
||||||
|
});
|
||||||
|
|
||||||
|
return Promise.all(ethFullTxPromises);
|
||||||
|
}
|
||||||
|
|
||||||
// Create events to be saved to db for a block given blockHash, logs, transactions and a parser function
|
// Create events to be saved to db for a block given blockHash, logs, transactions and a parser function
|
||||||
createDbEventsFromLogsAndTxs (blockHash: string, logs: any, transactions: any, parseEventNameAndArgs: (kind: string, logObj: any) => any): DeepPartial<EventInterface>[] {
|
createDbEventsFromLogsAndTxs (blockHash: string, logs: any, transactions: any, parseEventNameAndArgs: (kind: string, logObj: any) => any): DeepPartial<EventInterface>[] {
|
||||||
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => {
|
||||||
|
@ -502,6 +502,20 @@ export class JobRunner {
|
|||||||
throw new Error(message);
|
throw new Error(message);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
blocks.forEach(block => {
|
||||||
|
this._blockAndEventsMap.set(
|
||||||
|
block.blockHash,
|
||||||
|
{
|
||||||
|
// block is set later in job when saving to database
|
||||||
|
block: {} as BlockProgressInterface,
|
||||||
|
events: [],
|
||||||
|
ethFullBlock: block,
|
||||||
|
// Transactions are set later in job when fetching events
|
||||||
|
ethFullTransactions: []
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
const [{ cid: parentCid, blockNumber: parentBlockNumber, parentHash: grandparentHash, timestamp: parentTimestamp }] = blocks;
|
const [{ cid: parentCid, blockNumber: parentBlockNumber, parentHash: grandparentHash, timestamp: parentTimestamp }] = blocks;
|
||||||
|
|
||||||
await this.jobQueue.pushJob(QUEUE_BLOCK_PROCESSING, {
|
await this.jobQueue.pushJob(QUEUE_BLOCK_PROCESSING, {
|
||||||
@ -549,7 +563,9 @@ export class JobRunner {
|
|||||||
if (!blockProgress) {
|
if (!blockProgress) {
|
||||||
const prefetchedBlock = this._blockAndEventsMap.get(blockHash);
|
const prefetchedBlock = this._blockAndEventsMap.get(blockHash);
|
||||||
|
|
||||||
if (prefetchedBlock) {
|
// Check if prefetched block is set properly
|
||||||
|
// prefetchedBlock.block is an empty object when running in realtime processing
|
||||||
|
if (prefetchedBlock && prefetchedBlock.block.blockHash) {
|
||||||
({ block: blockProgress } = prefetchedBlock);
|
({ block: blockProgress } = prefetchedBlock);
|
||||||
} else {
|
} else {
|
||||||
// Delay required to process block.
|
// Delay required to process block.
|
||||||
@ -558,11 +574,20 @@ export class JobRunner {
|
|||||||
|
|
||||||
console.time('time:job-runner#_indexBlock-saveBlockAndFetchEvents');
|
console.time('time:job-runner#_indexBlock-saveBlockAndFetchEvents');
|
||||||
log(`_indexBlock#saveBlockAndFetchEvents: fetching from upstream server ${blockHash}`);
|
log(`_indexBlock#saveBlockAndFetchEvents: fetching from upstream server ${blockHash}`);
|
||||||
[blockProgress] = await this._indexer.saveBlockAndFetchEvents({ cid, blockHash, blockNumber, parentHash, blockTimestamp });
|
let ethFullTransactions;
|
||||||
|
[blockProgress,, ethFullTransactions] = await this._indexer.saveBlockAndFetchEvents({ cid, blockHash, blockNumber, parentHash, blockTimestamp });
|
||||||
log(`_indexBlock#saveBlockAndFetchEvents: fetched for block: ${blockProgress.blockHash} num events: ${blockProgress.numEvents}`);
|
log(`_indexBlock#saveBlockAndFetchEvents: fetched for block: ${blockProgress.blockHash} num events: ${blockProgress.numEvents}`);
|
||||||
console.timeEnd('time:job-runner#_indexBlock-saveBlockAndFetchEvents');
|
console.timeEnd('time:job-runner#_indexBlock-saveBlockAndFetchEvents');
|
||||||
|
const data = this._blockAndEventsMap.get(blockHash);
|
||||||
|
assert(data);
|
||||||
|
|
||||||
this._blockAndEventsMap.set(blockHash, { block: blockProgress, events: [] });
|
this._blockAndEventsMap.set(
|
||||||
|
blockHash,
|
||||||
|
{
|
||||||
|
...data,
|
||||||
|
block: blockProgress,
|
||||||
|
ethFullTransactions
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -588,26 +613,33 @@ export class JobRunner {
|
|||||||
const { blockHash, isRetryAttempt } = jobData;
|
const { blockHash, isRetryAttempt } = jobData;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (!this._blockAndEventsMap.has(blockHash)) {
|
// NOTE: blockAndEventsMap should contain block as watcher is reset
|
||||||
console.time('time:job-runner#_processEvents-get-block-progress');
|
// if (!this._blockAndEventsMap.has(blockHash)) {
|
||||||
const block = await this._indexer.getBlockProgress(blockHash);
|
// console.time('time:job-runner#_processEvents-get-block-progress');
|
||||||
console.timeEnd('time:job-runner#_processEvents-get-block-progress');
|
// const block = await this._indexer.getBlockProgress(blockHash);
|
||||||
|
// console.timeEnd('time:job-runner#_processEvents-get-block-progress');
|
||||||
|
|
||||||
assert(block);
|
// assert(block);
|
||||||
this._blockAndEventsMap.set(blockHash, { block, events: [] });
|
// this._blockAndEventsMap.set(blockHash, { block, events: [] });
|
||||||
}
|
// }
|
||||||
|
|
||||||
const prefetchedBlock = this._blockAndEventsMap.get(blockHash);
|
const prefetchedBlock = this._blockAndEventsMap.get(blockHash);
|
||||||
assert(prefetchedBlock);
|
assert(prefetchedBlock);
|
||||||
const { block } = prefetchedBlock;
|
const { block, ethFullBlock, ethFullTransactions } = prefetchedBlock;
|
||||||
log(`Processing events for block ${block.blockNumber}`);
|
log(`Processing events for block ${block.blockNumber}`);
|
||||||
|
|
||||||
console.time(`time:job-runner#_processEvents-events-${block.blockNumber}`);
|
console.time(`time:job-runner#_processEvents-events-${block.blockNumber}`);
|
||||||
const isNewContractWatched = await processBatchEvents(
|
const isNewContractWatched = await processBatchEvents(
|
||||||
this._indexer,
|
this._indexer,
|
||||||
block,
|
{
|
||||||
this._jobQueueConfig.eventsInBatch,
|
block,
|
||||||
this._jobQueueConfig.subgraphEventsOrder
|
ethFullBlock,
|
||||||
|
ethFullTransactions
|
||||||
|
},
|
||||||
|
{
|
||||||
|
eventsInBatch: this._jobQueueConfig.eventsInBatch,
|
||||||
|
subgraphEventsOrder: this._jobQueueConfig.subgraphEventsOrder
|
||||||
|
}
|
||||||
);
|
);
|
||||||
console.timeEnd(`time:job-runner#_processEvents-events-${block.blockNumber}`);
|
console.timeEnd(`time:job-runner#_processEvents-events-${block.blockNumber}`);
|
||||||
|
|
||||||
|
@ -18,10 +18,10 @@ import { GQLCacheConfig, Config } from './config';
|
|||||||
import { JobQueue } from './job-queue';
|
import { JobQueue } from './job-queue';
|
||||||
import { GraphDecimal } from './graph/graph-decimal';
|
import { GraphDecimal } from './graph/graph-decimal';
|
||||||
import * as EthDecoder from './eth';
|
import * as EthDecoder from './eth';
|
||||||
import { getCachedBlockSize } from './block-size-cache';
|
|
||||||
import { ResultEvent } from './indexer';
|
import { ResultEvent } from './indexer';
|
||||||
import { EventInterface, EthClient } from './types';
|
import { EventInterface, EthFullBlock, EthFullTransaction } from './types';
|
||||||
import { BlockHeight } from './database';
|
import { BlockHeight } from './database';
|
||||||
|
import { Transaction } from './graph/utils';
|
||||||
|
|
||||||
const JSONbigNative = JSONbig({ useNativeBigInt: true });
|
const JSONbigNative = JSONbig({ useNativeBigInt: true });
|
||||||
|
|
||||||
@ -154,7 +154,7 @@ export const getResetYargs = (): yargs.Argv => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const getCustomProvider = (url?: utils.ConnectionInfo | string, network?: providers.Networkish): providers.JsonRpcProvider => {
|
export const getCustomProvider = (url?: utils.ConnectionInfo | string, network?: providers.Networkish): providers.JsonRpcProvider => {
|
||||||
const provider = new providers.JsonRpcProvider(url, network);
|
const provider = new providers.StaticJsonRpcProvider(url, network);
|
||||||
provider.formatter = new CustomFormatter();
|
provider.formatter = new CustomFormatter();
|
||||||
return provider;
|
return provider;
|
||||||
};
|
};
|
||||||
@ -182,52 +182,40 @@ class CustomFormatter extends providers.Formatter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getFullBlock = async (ethClient: EthClient, ethProvider: providers.BaseProvider, blockHash: string, blockNumber: number): Promise<any> => {
|
export const getFullBlock = (ethFullBlock: EthFullBlock): any => {
|
||||||
const {
|
|
||||||
allEthHeaderCids: {
|
|
||||||
nodes: [
|
|
||||||
fullBlock
|
|
||||||
]
|
|
||||||
}
|
|
||||||
} = await ethClient.getFullBlocks({ blockHash, blockNumber });
|
|
||||||
|
|
||||||
assert(fullBlock.blockByMhKey);
|
|
||||||
|
|
||||||
// Decode the header data.
|
// Decode the header data.
|
||||||
const header = EthDecoder.decodeHeader(EthDecoder.decodeData(fullBlock.blockByMhKey.data));
|
const header = EthDecoder.decodeHeader(EthDecoder.decodeData(ethFullBlock.blockByMhKey.data));
|
||||||
assert(header);
|
assert(header);
|
||||||
|
|
||||||
// TODO: Calculate size from rlp encoded data.
|
|
||||||
// Get block info from JSON RPC API provided by ipld-eth-server.
|
|
||||||
const provider = ethProvider as providers.JsonRpcProvider;
|
|
||||||
const size = await getCachedBlockSize(provider, blockHash, Number(fullBlock.blockNumber));
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
headerId: fullBlock.id,
|
headerId: ethFullBlock.id,
|
||||||
cid: fullBlock.cid,
|
cid: ethFullBlock.cid,
|
||||||
blockNumber: fullBlock.blockNumber,
|
blockNumber: ethFullBlock.blockNumber,
|
||||||
blockHash: fullBlock.blockHash,
|
blockHash: ethFullBlock.blockHash,
|
||||||
parentHash: fullBlock.parentHash,
|
parentHash: ethFullBlock.parentHash,
|
||||||
timestamp: fullBlock.timestamp,
|
timestamp: ethFullBlock.timestamp,
|
||||||
stateRoot: fullBlock.stateRoot,
|
stateRoot: ethFullBlock.stateRoot,
|
||||||
td: fullBlock.td,
|
td: ethFullBlock.td,
|
||||||
txRoot: fullBlock.txRoot,
|
txRoot: ethFullBlock.txRoot,
|
||||||
receiptRoot: fullBlock.receiptRoot,
|
receiptRoot: ethFullBlock.receiptRoot,
|
||||||
uncleHash: fullBlock.uncleRoot,
|
uncleHash: ethFullBlock.uncleRoot,
|
||||||
difficulty: header.Difficulty.toString(),
|
difficulty: header.Difficulty.toString(),
|
||||||
gasLimit: header.GasLimit.toString(),
|
gasLimit: header.GasLimit.toString(),
|
||||||
gasUsed: header.GasUsed.toString(),
|
gasUsed: header.GasUsed.toString(),
|
||||||
author: header.Beneficiary,
|
author: header.Beneficiary,
|
||||||
size: BigInt(size).toString(),
|
size: ethFullBlock.size,
|
||||||
baseFee: header.BaseFee?.toString()
|
baseFee: header.BaseFee?.toString()
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getFullTransaction = async (ethClient: EthClient, txHash: string, blockNumber: number): Promise<any> => {
|
export const getFullTransaction = (txHash: string, ethFullTransactions: EthFullTransaction[]): Transaction => {
|
||||||
|
const ethFullTransaction = ethFullTransactions.find(ethFullTransaction => ethFullTransaction.ethTransactionCidByTxHash.txHash === txHash);
|
||||||
|
assert(ethFullTransaction);
|
||||||
|
|
||||||
let {
|
let {
|
||||||
ethTransactionCidByTxHash: fullTx,
|
ethTransactionCidByTxHash: fullTx,
|
||||||
data: txData
|
data: txData
|
||||||
} = await ethClient.getFullTransaction(txHash, blockNumber);
|
} = ethFullTransaction;
|
||||||
|
|
||||||
// Check if txData does not exist when using ipld-eth-client
|
// Check if txData does not exist when using ipld-eth-client
|
||||||
if (!txData) {
|
if (!txData) {
|
||||||
|
@ -9,7 +9,7 @@ import { MappingKey, StorageLayout } from '@cerc-io/solidity-mapper';
|
|||||||
|
|
||||||
import { ServerConfig, UpstreamConfig } from './config';
|
import { ServerConfig, UpstreamConfig } from './config';
|
||||||
import { Where, QueryOptions, Database } from './database';
|
import { Where, QueryOptions, Database } from './database';
|
||||||
import { ValueResult, StateStatus } from './indexer';
|
import { ValueResult, StateStatus, ExtraEventData } from './indexer';
|
||||||
import { JOB_KIND_CONTRACT, JOB_KIND_EVENTS } from './constants';
|
import { JOB_KIND_CONTRACT, JOB_KIND_EVENTS } from './constants';
|
||||||
|
|
||||||
export enum StateKind {
|
export enum StateKind {
|
||||||
@ -84,6 +84,77 @@ export interface StateInterface {
|
|||||||
data: Buffer;
|
data: Buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface EthFullTransaction {
|
||||||
|
ethTransactionCidByTxHash: {
|
||||||
|
txHash: string;
|
||||||
|
index: number;
|
||||||
|
src: string;
|
||||||
|
dst?: string;
|
||||||
|
blockByMhKey?: {
|
||||||
|
data: string;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
data?: Transaction;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EthFullBlock {
|
||||||
|
id?: string,
|
||||||
|
cid?: string;
|
||||||
|
blockNumber: string;
|
||||||
|
blockHash: string;
|
||||||
|
parentHash: string;
|
||||||
|
timestamp: string;
|
||||||
|
stateRoot: string;
|
||||||
|
td: string;
|
||||||
|
txRoot: string;
|
||||||
|
receiptRoot: string;
|
||||||
|
uncleRoot: string;
|
||||||
|
bloom: string;
|
||||||
|
size: string;
|
||||||
|
blockByMhKey: {
|
||||||
|
data: string;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface EthClient {
|
||||||
|
getStorageAt({ blockHash, contract, slot }: {
|
||||||
|
blockHash: string;
|
||||||
|
contract: string;
|
||||||
|
slot: string;
|
||||||
|
}): Promise<{
|
||||||
|
value: string;
|
||||||
|
proof: {
|
||||||
|
data: string;
|
||||||
|
};
|
||||||
|
}>;
|
||||||
|
getBlockWithTransactions({ blockNumber, blockHash }: {
|
||||||
|
blockNumber?: number;
|
||||||
|
blockHash?: string;
|
||||||
|
}): Promise<any>;
|
||||||
|
getBlocks({ blockNumber, blockHash }: {
|
||||||
|
blockNumber?: number;
|
||||||
|
blockHash?: string;
|
||||||
|
}): Promise<any>;
|
||||||
|
getFullBlocks({ blockNumber, blockHash }: {
|
||||||
|
blockNumber?: number;
|
||||||
|
blockHash?: string;
|
||||||
|
}): Promise<EthFullBlock[]>;
|
||||||
|
getFullTransaction(txHash: string, blockNumber?: number): Promise<EthFullTransaction>;
|
||||||
|
getBlockByHash(blockHash?: string): Promise<any>;
|
||||||
|
getLogs(vars: {
|
||||||
|
blockHash: string,
|
||||||
|
blockNumber: string,
|
||||||
|
addresses?: string[],
|
||||||
|
topics?: string[][]
|
||||||
|
}): Promise<any>;
|
||||||
|
getLogsForBlockRange?: (vars: {
|
||||||
|
fromBlock?: number,
|
||||||
|
toBlock?: number,
|
||||||
|
addresses?: string[],
|
||||||
|
topics?: string[][]
|
||||||
|
}) => Promise<any>;
|
||||||
|
}
|
||||||
|
|
||||||
export interface IndexerInterface {
|
export interface IndexerInterface {
|
||||||
readonly serverConfig: ServerConfig
|
readonly serverConfig: ServerConfig
|
||||||
readonly upstreamConfig: UpstreamConfig
|
readonly upstreamConfig: UpstreamConfig
|
||||||
@ -95,15 +166,24 @@ export interface IndexerInterface {
|
|||||||
getEvent (id: string): Promise<EventInterface | undefined>
|
getEvent (id: string): Promise<EventInterface | undefined>
|
||||||
getSyncStatus (): Promise<SyncStatusInterface | undefined>
|
getSyncStatus (): Promise<SyncStatusInterface | undefined>
|
||||||
getStateSyncStatus (): Promise<StateSyncStatusInterface | undefined>
|
getStateSyncStatus (): Promise<StateSyncStatusInterface | undefined>
|
||||||
getBlocks (blockFilter: { blockHash?: string, blockNumber?: number }): Promise<any>
|
getBlocks (blockFilter: { blockHash?: string, blockNumber?: number }): Promise<EthFullBlock[]>
|
||||||
getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgressInterface[]>
|
getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgressInterface[]>
|
||||||
getLatestCanonicalBlock (): Promise<BlockProgressInterface | undefined>
|
getLatestCanonicalBlock (): Promise<BlockProgressInterface | undefined>
|
||||||
getLatestStateIndexedBlock (): Promise<BlockProgressInterface>
|
getLatestStateIndexedBlock (): Promise<BlockProgressInterface>
|
||||||
getBlockEvents (blockHash: string, where: Where, queryOptions: QueryOptions): Promise<Array<EventInterface>>
|
getBlockEvents (blockHash: string, where: Where, queryOptions: QueryOptions): Promise<Array<EventInterface>>
|
||||||
getAncestorAtDepth (blockHash: string, depth: number): Promise<string>
|
getAncestorAtDepth (blockHash: string, depth: number): Promise<string>
|
||||||
fetchEventsAndSaveBlocks (blocks: DeepPartial<BlockProgressInterface>[]): Promise<{ blockProgress: BlockProgressInterface, events: DeepPartial<EventInterface>[] }[]>
|
fetchEventsAndSaveBlocks (blocks: DeepPartial<BlockProgressInterface>[]): Promise<{ blockProgress: BlockProgressInterface, events: DeepPartial<EventInterface>[] }[]>
|
||||||
saveBlockAndFetchEvents (block: DeepPartial<BlockProgressInterface>): Promise<[BlockProgressInterface, DeepPartial<EventInterface>[]]>
|
saveBlockAndFetchEvents (block: DeepPartial<BlockProgressInterface>): Promise<[
|
||||||
fetchAndSaveFilteredEventsAndBlocks (startBlock: number, endBlock: number): Promise<{ blockProgress: BlockProgressInterface, events: DeepPartial<EventInterface>[] }[]>
|
BlockProgressInterface,
|
||||||
|
DeepPartial<EventInterface>[],
|
||||||
|
EthFullTransaction[]
|
||||||
|
]>
|
||||||
|
fetchAndSaveFilteredEventsAndBlocks (startBlock: number, endBlock: number): Promise<{
|
||||||
|
blockProgress: BlockProgressInterface,
|
||||||
|
events: DeepPartial<EventInterface>[],
|
||||||
|
ethFullBlock: EthFullBlock,
|
||||||
|
ethFullTransactions: EthFullTransaction[]
|
||||||
|
}[]>
|
||||||
fetchEventsForContracts (blockHash: string, blockNumber: number, addresses: string[]): Promise<DeepPartial<EventInterface>[]>
|
fetchEventsForContracts (blockHash: string, blockNumber: number, addresses: string[]): Promise<DeepPartial<EventInterface>[]>
|
||||||
removeUnknownEvents (block: BlockProgressInterface): Promise<void>
|
removeUnknownEvents (block: BlockProgressInterface): Promise<void>
|
||||||
updateBlockProgress (block: BlockProgressInterface, lastProcessedEventIndex: number): Promise<BlockProgressInterface>
|
updateBlockProgress (block: BlockProgressInterface, lastProcessedEventIndex: number): Promise<BlockProgressInterface>
|
||||||
@ -117,7 +197,7 @@ export interface IndexerInterface {
|
|||||||
markBlocksAsPruned (blocks: BlockProgressInterface[]): Promise<void>
|
markBlocksAsPruned (blocks: BlockProgressInterface[]): Promise<void>
|
||||||
saveEventEntity (dbEvent: EventInterface): Promise<EventInterface>
|
saveEventEntity (dbEvent: EventInterface): Promise<EventInterface>
|
||||||
saveEvents (dbEvents: DeepPartial<EventInterface>[]): Promise<void>
|
saveEvents (dbEvents: DeepPartial<EventInterface>[]): Promise<void>
|
||||||
processEvent (event: EventInterface): Promise<void>
|
processEvent (event: EventInterface, extraData: ExtraEventData): Promise<void>
|
||||||
parseEventNameAndArgs?: (kind: string, logObj: any) => any
|
parseEventNameAndArgs?: (kind: string, logObj: any) => any
|
||||||
isWatchedContract: (address: string) => ContractInterface | undefined;
|
isWatchedContract: (address: string) => ContractInterface | undefined;
|
||||||
getWatchedContracts: () => ContractInterface[]
|
getWatchedContracts: () => ContractInterface[]
|
||||||
@ -204,58 +284,6 @@ export interface GraphWatcherInterface {
|
|||||||
setIndexer (indexer: IndexerInterface): void;
|
setIndexer (indexer: IndexerInterface): void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface FullTransaction {
|
|
||||||
ethTransactionCidByTxHash: {
|
|
||||||
txHash: string;
|
|
||||||
index: number;
|
|
||||||
src: string;
|
|
||||||
dst?: string;
|
|
||||||
blockByMhKey?: {
|
|
||||||
data: string;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
data?: Transaction;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface EthClient {
|
|
||||||
getStorageAt({ blockHash, contract, slot }: {
|
|
||||||
blockHash: string;
|
|
||||||
contract: string;
|
|
||||||
slot: string;
|
|
||||||
}): Promise<{
|
|
||||||
value: string;
|
|
||||||
proof: {
|
|
||||||
data: string;
|
|
||||||
};
|
|
||||||
}>;
|
|
||||||
getBlockWithTransactions({ blockNumber, blockHash }: {
|
|
||||||
blockNumber?: number;
|
|
||||||
blockHash?: string;
|
|
||||||
}): Promise<any>;
|
|
||||||
getBlocks({ blockNumber, blockHash }: {
|
|
||||||
blockNumber?: number;
|
|
||||||
blockHash?: string;
|
|
||||||
}): Promise<any>;
|
|
||||||
getFullBlocks({ blockNumber, blockHash }: {
|
|
||||||
blockNumber?: number;
|
|
||||||
blockHash?: string;
|
|
||||||
}): Promise<any>;
|
|
||||||
getFullTransaction(txHash: string, blockNumber?: number): Promise<FullTransaction>;
|
|
||||||
getBlockByHash(blockHash?: string): Promise<any>;
|
|
||||||
getLogs(vars: {
|
|
||||||
blockHash: string,
|
|
||||||
blockNumber: string,
|
|
||||||
addresses?: string[],
|
|
||||||
topics?: string[][]
|
|
||||||
}): Promise<any>;
|
|
||||||
getLogsForBlockRange?: (vars: {
|
|
||||||
fromBlock?: number,
|
|
||||||
toBlock?: number,
|
|
||||||
addresses?: string[],
|
|
||||||
topics?: string[][]
|
|
||||||
}) => Promise<any>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export type Clients = {
|
export type Clients = {
|
||||||
ethClient: EthClient;
|
ethClient: EthClient;
|
||||||
[key: string]: any;
|
[key: string]: any;
|
||||||
|
Loading…
Reference in New Issue
Block a user