mirror of
https://github.com/cerc-io/watcher-ts
synced 2025-01-06 19:38:05 +00:00
Refactor export-state and verify checkpoint CLIs to cli package (#252)
* Refactor export-state CLI to cli package * Refactor verify checkpoint CLI to cli package * Update mock indexer object
This commit is contained in:
parent
ace52d9da3
commit
122a64c2f9
@ -30,8 +30,6 @@ interface Arguments {
|
||||
export class CreateCheckpointCmd {
|
||||
_argv?: Arguments
|
||||
_baseCmd: BaseCmd
|
||||
_database?: DatabaseInterface
|
||||
_indexer?: IndexerInterface
|
||||
|
||||
constructor () {
|
||||
this._baseCmd = new BaseCmd();
|
||||
|
86
packages/cli/src/checkpoint/verify.ts
Normal file
86
packages/cli/src/checkpoint/verify.ts
Normal file
@ -0,0 +1,86 @@
|
||||
//
|
||||
// Copyright 2022 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import debug from 'debug';
|
||||
import 'reflect-metadata';
|
||||
import assert from 'assert';
|
||||
import { ConnectionOptions } from 'typeorm';
|
||||
|
||||
import { JsonRpcProvider } from '@ethersproject/providers';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
|
||||
import {
|
||||
JobQueue,
|
||||
DatabaseInterface,
|
||||
IndexerInterface,
|
||||
ServerConfig,
|
||||
Clients,
|
||||
verifyCheckpointData
|
||||
} from '@cerc-io/util';
|
||||
|
||||
import { BaseCmd } from '../base';
|
||||
|
||||
const log = debug('vulcanize:checkpoint-verify');
|
||||
|
||||
interface Arguments {
|
||||
configFile: string;
|
||||
cid: string;
|
||||
}
|
||||
|
||||
export class VerifyCheckpointCmd {
|
||||
_argv?: Arguments
|
||||
_baseCmd: BaseCmd
|
||||
|
||||
constructor () {
|
||||
this._baseCmd = new BaseCmd();
|
||||
}
|
||||
|
||||
async initConfig<ConfigType> (configFile: string): Promise<ConfigType> {
|
||||
return this._baseCmd.initConfig(configFile);
|
||||
}
|
||||
|
||||
async init (
|
||||
argv: any,
|
||||
Database: new (
|
||||
config: ConnectionOptions,
|
||||
serverConfig?: ServerConfig
|
||||
) => DatabaseInterface,
|
||||
Indexer: new (
|
||||
serverConfig: ServerConfig,
|
||||
db: DatabaseInterface,
|
||||
clients: Clients,
|
||||
ethProvider: JsonRpcProvider,
|
||||
jobQueue: JobQueue,
|
||||
graphWatcher?: GraphWatcher
|
||||
) => IndexerInterface,
|
||||
clients: { [key: string]: any } = {}
|
||||
): Promise<void> {
|
||||
this._argv = argv;
|
||||
await this.initConfig(argv.configFile);
|
||||
|
||||
await this._baseCmd.init(Database, Indexer, clients);
|
||||
}
|
||||
|
||||
async exec (): Promise<void> {
|
||||
assert(this._argv);
|
||||
|
||||
const database = this._baseCmd.database;
|
||||
const indexer = this._baseCmd.indexer;
|
||||
|
||||
assert(database);
|
||||
assert(indexer);
|
||||
|
||||
const graphDb: GraphDatabase | undefined = this._baseCmd.graphDb || database.graphDatabase;
|
||||
assert(graphDb);
|
||||
|
||||
const state = await indexer.getStateByCID(this._argv.cid);
|
||||
assert(state, 'State for the provided CID doesn\'t exist.');
|
||||
const data = indexer.getStateData(state);
|
||||
|
||||
log(`Verifying checkpoint data for contract ${state.contractAddress}`);
|
||||
await verifyCheckpointData(graphDb, state.block, data);
|
||||
log('Checkpoint data verified');
|
||||
|
||||
await database.close();
|
||||
}
|
||||
}
|
182
packages/cli/src/export-state.ts
Normal file
182
packages/cli/src/export-state.ts
Normal file
@ -0,0 +1,182 @@
|
||||
//
|
||||
// Copyright 2022 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import yargs from 'yargs';
|
||||
import 'reflect-metadata';
|
||||
import assert from 'assert';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import debug from 'debug';
|
||||
import { ConnectionOptions } from 'typeorm';
|
||||
|
||||
import { JsonRpcProvider } from '@ethersproject/providers';
|
||||
import { GraphWatcher } from '@cerc-io/graph-node';
|
||||
import {
|
||||
DEFAULT_CONFIG_PATH,
|
||||
JobQueue,
|
||||
DatabaseInterface,
|
||||
IndexerInterface,
|
||||
ServerConfig,
|
||||
StateKind,
|
||||
Clients
|
||||
} from '@cerc-io/util';
|
||||
import * as codec from '@ipld/dag-cbor';
|
||||
|
||||
import { BaseCmd } from './base';
|
||||
|
||||
const log = debug('vulcanize:export-state');
|
||||
|
||||
interface Arguments {
|
||||
configFile: string;
|
||||
exportFile: string;
|
||||
blockNumber: number;
|
||||
}
|
||||
|
||||
export class ExportStateCmd {
|
||||
_argv?: Arguments
|
||||
_baseCmd: BaseCmd;
|
||||
|
||||
constructor () {
|
||||
this._baseCmd = new BaseCmd();
|
||||
}
|
||||
|
||||
async initConfig<ConfigType> (): Promise<ConfigType> {
|
||||
this._argv = this._getArgv();
|
||||
assert(this._argv);
|
||||
|
||||
return this._baseCmd.initConfig(this._argv.configFile);
|
||||
}
|
||||
|
||||
async init (
|
||||
Database: new (config: ConnectionOptions,
|
||||
serverConfig?: ServerConfig
|
||||
) => DatabaseInterface,
|
||||
Indexer: new (
|
||||
serverConfig: ServerConfig,
|
||||
db: DatabaseInterface,
|
||||
clients: Clients,
|
||||
ethProvider: JsonRpcProvider,
|
||||
jobQueue: JobQueue,
|
||||
graphWatcher?: GraphWatcher
|
||||
) => IndexerInterface,
|
||||
clients: { [key: string]: any } = {}
|
||||
): Promise<void> {
|
||||
await this.initConfig();
|
||||
|
||||
await this._baseCmd.init(Database, Indexer, clients);
|
||||
}
|
||||
|
||||
async exec (): Promise<void> {
|
||||
assert(this._argv);
|
||||
|
||||
const database = this._baseCmd.database;
|
||||
const indexer = this._baseCmd.indexer;
|
||||
|
||||
assert(database);
|
||||
assert(indexer);
|
||||
|
||||
const exportData: any = {
|
||||
snapshotBlock: {},
|
||||
contracts: [],
|
||||
stateCheckpoints: []
|
||||
};
|
||||
|
||||
const contracts = await database.getContracts();
|
||||
let block = await indexer.getLatestStateIndexedBlock();
|
||||
assert(block);
|
||||
|
||||
if (this._argv.blockNumber) {
|
||||
if (this._argv.blockNumber > block.blockNumber) {
|
||||
throw new Error(`Export snapshot block height ${this._argv.blockNumber} should be less than latest state indexed block height ${block.blockNumber}`);
|
||||
}
|
||||
|
||||
const blocksAtSnapshotHeight = await indexer.getBlocksAtHeight(this._argv.blockNumber, false);
|
||||
|
||||
if (!blocksAtSnapshotHeight.length) {
|
||||
throw new Error(`No blocks at snapshot height ${this._argv.blockNumber}`);
|
||||
}
|
||||
|
||||
block = blocksAtSnapshotHeight[0];
|
||||
}
|
||||
|
||||
log(`Creating export snapshot at block height ${block.blockNumber}`);
|
||||
|
||||
// Export snapshot block.
|
||||
exportData.snapshotBlock = {
|
||||
blockNumber: block.blockNumber,
|
||||
blockHash: block.blockHash
|
||||
};
|
||||
|
||||
// Export contracts and checkpoints.
|
||||
for (const contract of contracts) {
|
||||
if (contract.startingBlock > block.blockNumber) {
|
||||
continue;
|
||||
}
|
||||
|
||||
exportData.contracts.push({
|
||||
address: contract.address,
|
||||
kind: contract.kind,
|
||||
checkpoint: contract.checkpoint,
|
||||
startingBlock: block.blockNumber
|
||||
});
|
||||
|
||||
// Create and export checkpoint if checkpointing is on for the contract.
|
||||
if (contract.checkpoint) {
|
||||
await indexer.createCheckpoint(contract.address, block.blockHash);
|
||||
|
||||
const state = await indexer.getLatestState(contract.address, StateKind.Checkpoint, block.blockNumber);
|
||||
assert(state);
|
||||
|
||||
const data = indexer.getStateData(state);
|
||||
|
||||
exportData.stateCheckpoints.push({
|
||||
contractAddress: state.contractAddress,
|
||||
cid: state.cid,
|
||||
kind: state.kind,
|
||||
data
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (this._argv.exportFile) {
|
||||
const encodedExportData = codec.encode(exportData);
|
||||
|
||||
const filePath = path.resolve(this._argv.exportFile);
|
||||
const fileDir = path.dirname(filePath);
|
||||
|
||||
if (!fs.existsSync(fileDir)) fs.mkdirSync(fileDir, { recursive: true });
|
||||
|
||||
fs.writeFileSync(filePath, encodedExportData);
|
||||
} else {
|
||||
log(exportData);
|
||||
}
|
||||
|
||||
log(`Export completed at height ${block.blockNumber}`);
|
||||
await database.close();
|
||||
}
|
||||
|
||||
_getArgv (): any {
|
||||
return yargs.parserConfiguration({
|
||||
'parse-numbers': false
|
||||
}).options({
|
||||
configFile: {
|
||||
alias: 'f',
|
||||
type: 'string',
|
||||
require: true,
|
||||
demandOption: true,
|
||||
describe: 'Configuration file path (toml)',
|
||||
default: DEFAULT_CONFIG_PATH
|
||||
},
|
||||
exportFile: {
|
||||
alias: 'o',
|
||||
type: 'string',
|
||||
describe: 'Export file path'
|
||||
},
|
||||
blockNumber: {
|
||||
type: 'number',
|
||||
describe: 'Block number to create snapshot at'
|
||||
}
|
||||
}).argv;
|
||||
}
|
||||
}
|
@ -6,5 +6,7 @@ export * from './watch-contract';
|
||||
export * from './reset/watcher';
|
||||
export * from './reset/state';
|
||||
export * from './checkpoint/create';
|
||||
export * from './checkpoint/verify';
|
||||
export * from './inspect-cid';
|
||||
export * from './import-state';
|
||||
export * from './export-state';
|
||||
|
@ -2,64 +2,27 @@
|
||||
// Copyright 2022 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import debug from 'debug';
|
||||
import assert from 'assert';
|
||||
import { VerifyCheckpointCmd } from '@cerc-io/cli';
|
||||
|
||||
import { getConfig, initClients, JobQueue, Config, verifyCheckpointData } from '@cerc-io/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
|
||||
|
||||
import { Database, ENTITY_TO_LATEST_ENTITY_MAP } from '../../database';
|
||||
import { Database } from '../../database';
|
||||
import { Indexer } from '../../indexer';
|
||||
|
||||
const log = debug('vulcanize:checkpoint-verify');
|
||||
|
||||
export const command = 'verify';
|
||||
|
||||
export const desc = 'Verify checkpoint';
|
||||
|
||||
export const builder = {
|
||||
cid: {
|
||||
alias: 'c',
|
||||
type: 'string',
|
||||
alias: 'c',
|
||||
demandOption: true,
|
||||
describe: 'Checkpoint CID to be verified'
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = async (argv: any): Promise<void> => {
|
||||
const config: Config = await getConfig(argv.configFile);
|
||||
const { ethClient, ethProvider } = await initClients(config);
|
||||
const createCheckpointCmd = new VerifyCheckpointCmd();
|
||||
await createCheckpointCmd.init(argv, Database, Indexer);
|
||||
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_TO_LATEST_ENTITY_MAP);
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
|
||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||
|
||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||
await jobQueue.start();
|
||||
|
||||
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||
await indexer.init();
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const state = await indexer.getStateByCID(argv.cid);
|
||||
assert(state, 'State for the provided CID doesn\'t exist.');
|
||||
const data = indexer.getStateData(state);
|
||||
|
||||
log(`Verifying checkpoint data for contract ${state.contractAddress}`);
|
||||
await verifyCheckpointData(graphDb, state.block, data);
|
||||
log('Checkpoint data verified');
|
||||
|
||||
await db.close();
|
||||
await createCheckpointCmd.exec();
|
||||
};
|
||||
|
@ -2,146 +2,21 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import assert from 'assert';
|
||||
import yargs from 'yargs';
|
||||
import 'reflect-metadata';
|
||||
import debug from 'debug';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, StateKind } from '@cerc-io/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
|
||||
import * as codec from '@ipld/dag-cbor';
|
||||
import { ExportStateCmd } from '@cerc-io/cli';
|
||||
|
||||
import { Database, ENTITY_TO_LATEST_ENTITY_MAP } from '../database';
|
||||
import { Database } from '../database';
|
||||
import { Indexer } from '../indexer';
|
||||
|
||||
const log = debug('vulcanize:export-state');
|
||||
|
||||
const main = async (): Promise<void> => {
|
||||
const argv = await yargs.parserConfiguration({
|
||||
'parse-numbers': false
|
||||
}).options({
|
||||
configFile: {
|
||||
alias: 'f',
|
||||
type: 'string',
|
||||
require: true,
|
||||
demandOption: true,
|
||||
describe: 'Configuration file path (toml)',
|
||||
default: DEFAULT_CONFIG_PATH
|
||||
},
|
||||
exportFile: {
|
||||
alias: 'o',
|
||||
type: 'string',
|
||||
describe: 'Export file path'
|
||||
},
|
||||
blockNumber: {
|
||||
type: 'number',
|
||||
describe: 'Block number to create snapshot at'
|
||||
}
|
||||
}).argv;
|
||||
const exportStateCmd = new ExportStateCmd();
|
||||
await exportStateCmd.init(Database, Indexer);
|
||||
|
||||
const config: Config = await getConfig(argv.configFile);
|
||||
const { ethClient, ethProvider } = await initClients(config);
|
||||
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_TO_LATEST_ENTITY_MAP);
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
|
||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||
|
||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||
await jobQueue.start();
|
||||
|
||||
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||
await indexer.init();
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const exportData: any = {
|
||||
snapshotBlock: {},
|
||||
contracts: [],
|
||||
stateCheckpoints: []
|
||||
};
|
||||
|
||||
const contracts = await db.getContracts();
|
||||
let block = await indexer.getLatestStateIndexedBlock();
|
||||
assert(block);
|
||||
|
||||
if (argv.blockNumber) {
|
||||
if (argv.blockNumber > block.blockNumber) {
|
||||
throw new Error(`Export snapshot block height ${argv.blockNumber} should be less than latest state indexed block height ${block.blockNumber}`);
|
||||
}
|
||||
|
||||
const blocksAtSnapshotHeight = await indexer.getBlocksAtHeight(argv.blockNumber, false);
|
||||
|
||||
if (!blocksAtSnapshotHeight.length) {
|
||||
throw new Error(`No blocks at snapshot height ${argv.blockNumber}`);
|
||||
}
|
||||
|
||||
block = blocksAtSnapshotHeight[0];
|
||||
}
|
||||
|
||||
log(`Creating export snapshot at block height ${block.blockNumber}`);
|
||||
|
||||
// Export snapshot block.
|
||||
exportData.snapshotBlock = {
|
||||
blockNumber: block.blockNumber,
|
||||
blockHash: block.blockHash
|
||||
};
|
||||
|
||||
// Export contracts and checkpoints.
|
||||
for (const contract of contracts) {
|
||||
if (contract.startingBlock > block.blockNumber) {
|
||||
continue;
|
||||
}
|
||||
|
||||
exportData.contracts.push({
|
||||
address: contract.address,
|
||||
kind: contract.kind,
|
||||
checkpoint: contract.checkpoint,
|
||||
startingBlock: block.blockNumber
|
||||
});
|
||||
|
||||
// Create and export checkpoint if checkpointing is on for the contract.
|
||||
if (contract.checkpoint) {
|
||||
await indexer.createCheckpoint(contract.address, block.blockHash);
|
||||
|
||||
const state = await indexer.getLatestState(contract.address, StateKind.Checkpoint, block.blockNumber);
|
||||
assert(state);
|
||||
|
||||
const data = indexer.getStateData(state);
|
||||
|
||||
exportData.stateCheckpoints.push({
|
||||
contractAddress: state.contractAddress,
|
||||
cid: state.cid,
|
||||
kind: state.kind,
|
||||
data
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (argv.exportFile) {
|
||||
const encodedExportData = codec.encode(exportData);
|
||||
|
||||
const filePath = path.resolve(argv.exportFile);
|
||||
const fileDir = path.dirname(filePath);
|
||||
|
||||
if (!fs.existsSync(fileDir)) fs.mkdirSync(fileDir, { recursive: true });
|
||||
|
||||
fs.writeFileSync(filePath, encodedExportData);
|
||||
} else {
|
||||
log(exportData);
|
||||
}
|
||||
await exportStateCmd.exec();
|
||||
};
|
||||
|
||||
main().catch(err => {
|
||||
|
@ -280,11 +280,22 @@ export class Indexer implements IndexerInterface {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async getLatestState (contractAddress: string, kind: StateKind | null, blockNumber?: number): Promise<State | undefined> {
|
||||
// TODO Implement
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async getStateByCID (cid: string): Promise<State | undefined> {
|
||||
// TODO Implement
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Method to be used by export-state CLI.
|
||||
async createCheckpoint (contractAddress: string, blockHash: string): Promise<string | undefined> {
|
||||
// TODO Implement
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async saveOrUpdateState (state: State): Promise<State> {
|
||||
return {} as State;
|
||||
}
|
||||
@ -357,6 +368,11 @@ export class Indexer implements IndexerInterface {
|
||||
return {} as StateSyncStatus;
|
||||
}
|
||||
|
||||
async getLatestStateIndexedBlock (): Promise<BlockProgress> {
|
||||
// TODO Implement
|
||||
return {} as BlockProgress;
|
||||
}
|
||||
|
||||
async getLatestCanonicalBlock (): Promise<BlockProgress> {
|
||||
const syncStatus = await this.getSyncStatus();
|
||||
assert(syncStatus);
|
||||
|
@ -2,15 +2,10 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import assert from 'assert';
|
||||
import yargs from 'yargs';
|
||||
import 'reflect-metadata';
|
||||
import debug from 'debug';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, StateKind } from '@cerc-io/util';
|
||||
import * as codec from '@ipld/dag-cbor';
|
||||
import { ExportStateCmd } from '@cerc-io/cli';
|
||||
|
||||
import { Database } from '../database';
|
||||
import { Indexer } from '../indexer';
|
||||
@ -18,121 +13,10 @@ import { Indexer } from '../indexer';
|
||||
const log = debug('vulcanize:export-state');
|
||||
|
||||
const main = async (): Promise<void> => {
|
||||
const argv = await yargs.parserConfiguration({
|
||||
'parse-numbers': false
|
||||
}).options({
|
||||
configFile: {
|
||||
alias: 'f',
|
||||
type: 'string',
|
||||
require: true,
|
||||
demandOption: true,
|
||||
describe: 'Configuration file path (toml)',
|
||||
default: DEFAULT_CONFIG_PATH
|
||||
},
|
||||
exportFile: {
|
||||
alias: 'o',
|
||||
type: 'string',
|
||||
describe: 'Export file path'
|
||||
},
|
||||
blockNumber: {
|
||||
type: 'number',
|
||||
describe: 'Block number to create snapshot at'
|
||||
}
|
||||
}).argv;
|
||||
const exportStateCmd = new ExportStateCmd();
|
||||
await exportStateCmd.init(Database, Indexer);
|
||||
|
||||
const config: Config = await getConfig(argv.configFile);
|
||||
const { ethClient, ethProvider } = await initClients(config);
|
||||
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
|
||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||
|
||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||
await jobQueue.start();
|
||||
|
||||
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||
await indexer.init();
|
||||
|
||||
const exportData: any = {
|
||||
snapshotBlock: {},
|
||||
contracts: [],
|
||||
stateCheckpoints: []
|
||||
};
|
||||
|
||||
const contracts = await db.getContracts();
|
||||
let block = await indexer.getLatestStateIndexedBlock();
|
||||
assert(block);
|
||||
|
||||
if (argv.blockNumber) {
|
||||
if (argv.blockNumber > block.blockNumber) {
|
||||
throw new Error(`Export snapshot block height ${argv.blockNumber} should be less than latest state indexed block height ${block.blockNumber}`);
|
||||
}
|
||||
|
||||
const blocksAtSnapshotHeight = await indexer.getBlocksAtHeight(argv.blockNumber, false);
|
||||
|
||||
if (!blocksAtSnapshotHeight.length) {
|
||||
throw new Error(`No blocks at snapshot height ${argv.blockNumber}`);
|
||||
}
|
||||
|
||||
block = blocksAtSnapshotHeight[0];
|
||||
}
|
||||
|
||||
log(`Creating export snapshot at block height ${block.blockNumber}`);
|
||||
|
||||
// Export snapshot block.
|
||||
exportData.snapshotBlock = {
|
||||
blockNumber: block.blockNumber,
|
||||
blockHash: block.blockHash
|
||||
};
|
||||
|
||||
// Export contracts and checkpoints.
|
||||
for (const contract of contracts) {
|
||||
if (contract.startingBlock > block.blockNumber) {
|
||||
continue;
|
||||
}
|
||||
|
||||
exportData.contracts.push({
|
||||
address: contract.address,
|
||||
kind: contract.kind,
|
||||
checkpoint: contract.checkpoint,
|
||||
startingBlock: block.blockNumber
|
||||
});
|
||||
|
||||
// Create and export checkpoint if checkpointing is on for the contract.
|
||||
if (contract.checkpoint) {
|
||||
await indexer.createCheckpoint(contract.address, block.blockHash);
|
||||
|
||||
const state = await indexer.getLatestState(contract.address, StateKind.Checkpoint, block.blockNumber);
|
||||
assert(state);
|
||||
|
||||
const data = indexer.getStateData(state);
|
||||
|
||||
exportData.stateCheckpoints.push({
|
||||
contractAddress: state.contractAddress,
|
||||
cid: state.cid,
|
||||
kind: state.kind,
|
||||
data
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (argv.exportFile) {
|
||||
const encodedExportData = codec.encode(exportData);
|
||||
|
||||
const filePath = path.resolve(argv.exportFile);
|
||||
const fileDir = path.dirname(filePath);
|
||||
|
||||
if (!fs.existsSync(fileDir)) fs.mkdirSync(fileDir, { recursive: true });
|
||||
|
||||
fs.writeFileSync(filePath, encodedExportData);
|
||||
} else {
|
||||
log(exportData);
|
||||
}
|
||||
await exportStateCmd.exec();
|
||||
};
|
||||
|
||||
main().catch(err => {
|
||||
|
@ -209,10 +209,22 @@ export class Indexer implements IndexerInterface {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async getLatestState (contractAddress: string, kind: StateKind | null, blockNumber?: number): Promise<StateInterface | undefined> {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async getStateByCID (cid: string): Promise<StateInterface | undefined> {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async createCheckpoint (contractAddress: string, blockHash: string): Promise<string | undefined> {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async getLatestStateIndexedBlock (): Promise<BlockProgressInterface> {
|
||||
return {} as BlockProgressInterface;
|
||||
}
|
||||
|
||||
async saveOrUpdateState (state: StateInterface): Promise<StateInterface> {
|
||||
return {} as StateInterface;
|
||||
}
|
||||
|
@ -2,64 +2,27 @@
|
||||
// Copyright 2022 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import debug from 'debug';
|
||||
import assert from 'assert';
|
||||
import { VerifyCheckpointCmd } from '@cerc-io/cli';
|
||||
|
||||
import { getConfig, initClients, JobQueue, Config, verifyCheckpointData } from '@cerc-io/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
|
||||
|
||||
import { Database, ENTITY_TO_LATEST_ENTITY_MAP } from '../../database';
|
||||
import { Database } from '../../database';
|
||||
import { Indexer } from '../../indexer';
|
||||
|
||||
const log = debug('vulcanize:checkpoint-verify');
|
||||
|
||||
export const command = 'verify';
|
||||
|
||||
export const desc = 'Verify checkpoint';
|
||||
|
||||
export const builder = {
|
||||
cid: {
|
||||
alias: 'c',
|
||||
type: 'string',
|
||||
alias: 'c',
|
||||
demandOption: true,
|
||||
describe: 'Checkpoint CID to be verified'
|
||||
}
|
||||
};
|
||||
|
||||
export const handler = async (argv: any): Promise<void> => {
|
||||
const config: Config = await getConfig(argv.configFile);
|
||||
const { ethClient, ethProvider } = await initClients(config);
|
||||
const createCheckpointCmd = new VerifyCheckpointCmd();
|
||||
await createCheckpointCmd.init(argv, Database, Indexer);
|
||||
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_TO_LATEST_ENTITY_MAP);
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
|
||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||
|
||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||
await jobQueue.start();
|
||||
|
||||
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||
await indexer.init();
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const state = await indexer.getStateByCID(argv.cid);
|
||||
assert(state, 'State for the provided CID doesn\'t exist.');
|
||||
const data = indexer.getStateData(state);
|
||||
|
||||
log(`Verifying checkpoint data for contract ${state.contractAddress}`);
|
||||
await verifyCheckpointData(graphDb, state.block, data);
|
||||
log('Checkpoint data verified');
|
||||
|
||||
await db.close();
|
||||
await createCheckpointCmd.exec();
|
||||
};
|
||||
|
@ -2,146 +2,21 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import assert from 'assert';
|
||||
import yargs from 'yargs';
|
||||
import 'reflect-metadata';
|
||||
import debug from 'debug';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, StateKind } from '@cerc-io/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
|
||||
import * as codec from '@ipld/dag-cbor';
|
||||
import { ExportStateCmd } from '@cerc-io/cli';
|
||||
|
||||
import { Database, ENTITY_TO_LATEST_ENTITY_MAP } from '../database';
|
||||
import { Database } from '../database';
|
||||
import { Indexer } from '../indexer';
|
||||
|
||||
const log = debug('vulcanize:export-state');
|
||||
|
||||
const main = async (): Promise<void> => {
|
||||
const argv = await yargs.parserConfiguration({
|
||||
'parse-numbers': false
|
||||
}).options({
|
||||
configFile: {
|
||||
alias: 'f',
|
||||
type: 'string',
|
||||
require: true,
|
||||
demandOption: true,
|
||||
describe: 'Configuration file path (toml)',
|
||||
default: DEFAULT_CONFIG_PATH
|
||||
},
|
||||
exportFile: {
|
||||
alias: 'o',
|
||||
type: 'string',
|
||||
describe: 'Export file path'
|
||||
},
|
||||
blockNumber: {
|
||||
type: 'number',
|
||||
describe: 'Block number to create snapshot at'
|
||||
}
|
||||
}).argv;
|
||||
const exportStateCmd = new ExportStateCmd();
|
||||
await exportStateCmd.init(Database, Indexer);
|
||||
|
||||
const config: Config = await getConfig(argv.configFile);
|
||||
const { ethClient, ethProvider } = await initClients(config);
|
||||
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_TO_LATEST_ENTITY_MAP);
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
|
||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||
|
||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||
await jobQueue.start();
|
||||
|
||||
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue, graphWatcher);
|
||||
await indexer.init();
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const exportData: any = {
|
||||
snapshotBlock: {},
|
||||
contracts: [],
|
||||
stateCheckpoints: []
|
||||
};
|
||||
|
||||
const contracts = await db.getContracts();
|
||||
let block = await indexer.getLatestStateIndexedBlock();
|
||||
assert(block);
|
||||
|
||||
if (argv.blockNumber) {
|
||||
if (argv.blockNumber > block.blockNumber) {
|
||||
throw new Error(`Export snapshot block height ${argv.blockNumber} should be less than latest state indexed block height ${block.blockNumber}`);
|
||||
}
|
||||
|
||||
const blocksAtSnapshotHeight = await indexer.getBlocksAtHeight(argv.blockNumber, false);
|
||||
|
||||
if (!blocksAtSnapshotHeight.length) {
|
||||
throw new Error(`No blocks at snapshot height ${argv.blockNumber}`);
|
||||
}
|
||||
|
||||
block = blocksAtSnapshotHeight[0];
|
||||
}
|
||||
|
||||
log(`Creating export snapshot at block height ${block.blockNumber}`);
|
||||
|
||||
// Export snapshot block.
|
||||
exportData.snapshotBlock = {
|
||||
blockNumber: block.blockNumber,
|
||||
blockHash: block.blockHash
|
||||
};
|
||||
|
||||
// Export contracts and checkpoints.
|
||||
for (const contract of contracts) {
|
||||
if (contract.startingBlock > block.blockNumber) {
|
||||
continue;
|
||||
}
|
||||
|
||||
exportData.contracts.push({
|
||||
address: contract.address,
|
||||
kind: contract.kind,
|
||||
checkpoint: contract.checkpoint,
|
||||
startingBlock: block.blockNumber
|
||||
});
|
||||
|
||||
// Create and export checkpoint if checkpointing is on for the contract.
|
||||
if (contract.checkpoint) {
|
||||
await indexer.createCheckpoint(contract.address, block.blockHash);
|
||||
|
||||
const state = await indexer.getLatestState(contract.address, StateKind.Checkpoint, block.blockNumber);
|
||||
assert(state);
|
||||
|
||||
const data = indexer.getStateData(state);
|
||||
|
||||
exportData.stateCheckpoints.push({
|
||||
contractAddress: state.contractAddress,
|
||||
cid: state.cid,
|
||||
kind: state.kind,
|
||||
data
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (argv.exportFile) {
|
||||
const encodedExportData = codec.encode(exportData);
|
||||
|
||||
const filePath = path.resolve(argv.exportFile);
|
||||
const fileDir = path.dirname(filePath);
|
||||
|
||||
if (!fs.existsSync(fileDir)) fs.mkdirSync(fileDir, { recursive: true });
|
||||
|
||||
fs.writeFileSync(filePath, encodedExportData);
|
||||
} else {
|
||||
log(exportData);
|
||||
}
|
||||
await exportStateCmd.exec();
|
||||
};
|
||||
|
||||
main().catch(err => {
|
||||
|
@ -2,15 +2,10 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import assert from 'assert';
|
||||
import yargs from 'yargs';
|
||||
import 'reflect-metadata';
|
||||
import debug from 'debug';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, StateKind } from '@cerc-io/util';
|
||||
import * as codec from '@ipld/dag-cbor';
|
||||
import { ExportStateCmd } from '@cerc-io/cli';
|
||||
|
||||
import { Database } from '../database';
|
||||
import { Indexer } from '../indexer';
|
||||
@ -18,121 +13,10 @@ import { Indexer } from '../indexer';
|
||||
const log = debug('vulcanize:export-state');
|
||||
|
||||
const main = async (): Promise<void> => {
|
||||
const argv = await yargs.parserConfiguration({
|
||||
'parse-numbers': false
|
||||
}).options({
|
||||
configFile: {
|
||||
alias: 'f',
|
||||
type: 'string',
|
||||
require: true,
|
||||
demandOption: true,
|
||||
describe: 'Configuration file path (toml)',
|
||||
default: DEFAULT_CONFIG_PATH
|
||||
},
|
||||
exportFile: {
|
||||
alias: 'o',
|
||||
type: 'string',
|
||||
describe: 'Export file path'
|
||||
},
|
||||
blockNumber: {
|
||||
type: 'number',
|
||||
describe: 'Block number to create snapshot at'
|
||||
}
|
||||
}).argv;
|
||||
const exportStateCmd = new ExportStateCmd();
|
||||
await exportStateCmd.init(Database, Indexer);
|
||||
|
||||
const config: Config = await getConfig(argv.configFile);
|
||||
const { ethClient, ethProvider } = await initClients(config);
|
||||
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
|
||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||
|
||||
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
|
||||
await jobQueue.start();
|
||||
|
||||
const indexer = new Indexer(config.server, db, { ethClient }, ethProvider, jobQueue);
|
||||
await indexer.init();
|
||||
|
||||
const exportData: any = {
|
||||
snapshotBlock: {},
|
||||
contracts: [],
|
||||
stateCheckpoints: []
|
||||
};
|
||||
|
||||
const contracts = await db.getContracts();
|
||||
let block = await indexer.getLatestStateIndexedBlock();
|
||||
assert(block);
|
||||
|
||||
if (argv.blockNumber) {
|
||||
if (argv.blockNumber > block.blockNumber) {
|
||||
throw new Error(`Export snapshot block height ${argv.blockNumber} should be less than latest state indexed block height ${block.blockNumber}`);
|
||||
}
|
||||
|
||||
const blocksAtSnapshotHeight = await indexer.getBlocksAtHeight(argv.blockNumber, false);
|
||||
|
||||
if (!blocksAtSnapshotHeight.length) {
|
||||
throw new Error(`No blocks at snapshot height ${argv.blockNumber}`);
|
||||
}
|
||||
|
||||
block = blocksAtSnapshotHeight[0];
|
||||
}
|
||||
|
||||
log(`Creating export snapshot at block height ${block.blockNumber}`);
|
||||
|
||||
// Export snapshot block.
|
||||
exportData.snapshotBlock = {
|
||||
blockNumber: block.blockNumber,
|
||||
blockHash: block.blockHash
|
||||
};
|
||||
|
||||
// Export contracts and checkpoints.
|
||||
for (const contract of contracts) {
|
||||
if (contract.startingBlock > block.blockNumber) {
|
||||
continue;
|
||||
}
|
||||
|
||||
exportData.contracts.push({
|
||||
address: contract.address,
|
||||
kind: contract.kind,
|
||||
checkpoint: contract.checkpoint,
|
||||
startingBlock: block.blockNumber
|
||||
});
|
||||
|
||||
// Create and export checkpoint if checkpointing is on for the contract.
|
||||
if (contract.checkpoint) {
|
||||
await indexer.createCheckpoint(contract.address, block.blockHash);
|
||||
|
||||
const state = await indexer.getLatestState(contract.address, StateKind.Checkpoint, block.blockNumber);
|
||||
assert(state);
|
||||
|
||||
const data = indexer.getStateData(state);
|
||||
|
||||
exportData.stateCheckpoints.push({
|
||||
contractAddress: state.contractAddress,
|
||||
cid: state.cid,
|
||||
kind: state.kind,
|
||||
data
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (argv.exportFile) {
|
||||
const encodedExportData = codec.encode(exportData);
|
||||
|
||||
const filePath = path.resolve(argv.exportFile);
|
||||
const fileDir = path.dirname(filePath);
|
||||
|
||||
if (!fs.existsSync(fileDir)) fs.mkdirSync(fileDir, { recursive: true });
|
||||
|
||||
fs.writeFileSync(filePath, encodedExportData);
|
||||
} else {
|
||||
log(exportData);
|
||||
}
|
||||
await exportStateCmd.exec();
|
||||
};
|
||||
|
||||
main().catch(err => {
|
||||
|
@ -92,6 +92,7 @@ export interface IndexerInterface {
|
||||
getBlocks (blockFilter: { blockHash?: string, blockNumber?: number }): Promise<any>
|
||||
getBlocksAtHeight (height: number, isPruned: boolean): Promise<BlockProgressInterface[]>
|
||||
getLatestCanonicalBlock (): Promise<BlockProgressInterface>
|
||||
getLatestStateIndexedBlock (): Promise<BlockProgressInterface>
|
||||
getBlockEvents (blockHash: string, where: Where, queryOptions: QueryOptions): Promise<Array<EventInterface>>
|
||||
getAncestorAtDepth (blockHash: string, depth: number): Promise<string>
|
||||
saveBlockAndFetchEvents (block: DeepPartial<BlockProgressInterface>): Promise<[BlockProgressInterface, DeepPartial<EventInterface>[]]>
|
||||
@ -120,11 +121,13 @@ export interface IndexerInterface {
|
||||
processCanonicalBlock (blockHash: string, blockNumber: number): Promise<void>
|
||||
processCheckpoint (blockHash: string): Promise<void>
|
||||
processCLICheckpoint (contractAddress: string, blockHash?: string): Promise<string | undefined>
|
||||
createCheckpoint (contractAddress: string, blockHash: string): Promise<string | undefined>
|
||||
getStorageValue (storageLayout: StorageLayout, blockHash: string, contractAddress: string, variable: string, ...mappingKeys: MappingKey[]): Promise<ValueResult>
|
||||
updateSubgraphState?: (contractAddress: string, data: any) => void
|
||||
updateStateStatusMap (address: string, stateStatus: StateStatus): void
|
||||
getStateData (state: StateInterface): any
|
||||
getStateByCID (cid: string): Promise<StateInterface | undefined>
|
||||
getLatestState (contractAddress: string, kind: StateKind | null, blockNumber?: number): Promise<StateInterface | undefined>
|
||||
saveOrUpdateState (state: StateInterface): Promise<StateInterface>
|
||||
removeStates (blockNumber: number, kind: StateKind): Promise<void>
|
||||
resetWatcherToBlock (blockNumber: number): Promise<void>
|
||||
@ -164,8 +167,8 @@ export interface DatabaseInterface {
|
||||
saveEventEntity (queryRunner: QueryRunner, entity: EventInterface): Promise<EventInterface>;
|
||||
removeEntities<Entity> (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindManyOptions<Entity> | FindConditions<Entity>): Promise<void>;
|
||||
deleteEntitiesByConditions<Entity> (queryRunner: QueryRunner, entity: EntityTarget<Entity>, findConditions: FindConditions<Entity>): Promise<void>
|
||||
getContracts?: () => Promise<ContractInterface[]>
|
||||
saveContract?: (queryRunner: QueryRunner, contractAddress: string, kind: string, checkpoint: boolean, startingBlock: number) => Promise<ContractInterface>
|
||||
getContracts: () => Promise<ContractInterface[]>
|
||||
saveContract: (queryRunner: QueryRunner, contractAddress: string, kind: string, checkpoint: boolean, startingBlock: number) => Promise<ContractInterface>
|
||||
getLatestState (contractAddress: string, kind: StateKind | null, blockNumber?: number): Promise<StateInterface | undefined>
|
||||
getStates (where: FindConditions<StateInterface>): Promise<StateInterface[]>
|
||||
getDiffStatesInRange (contractAddress: string, startBlock: number, endBlock: number): Promise<StateInterface[]>
|
||||
|
Loading…
Reference in New Issue
Block a user