Accommodate CLI refactoring changes to codegen (#261)

* Use watch-contract CLI from cli package in codegen

* Use reset CLIs from cli package in codegen

* Use create checkpoint CLI from cli package in codegen

* Use inspect-cid CLI from cli package in codegen

* Use import state CLI from cli package in codegen

* Use export-state and verify-checkpoint CLI from cli package in codegen

* Use server CLI from cli package in codegen

* Use job-runner CLI from cli package in codegen

* Use index-block CLI from cli package in codegen

* Use fill CLI from cli package in codegen

* Minor codegen fixes

* Update watcher instructions to run job-runner before server
This commit is contained in:
prathamesh0 2022-11-25 00:01:20 -06:00 committed by GitHub
parent 590482ee53
commit 63a2c5804e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
49 changed files with 249 additions and 1259 deletions

View File

@ -15,15 +15,15 @@ const VERIFY_TEMPLATE_FILE = './templates/checkpoint-verify-template.handlebars'
* Writes the checkpoint file generated from a template to a stream. * Writes the checkpoint file generated from a template to a stream.
* @param outStream A writable output stream to write the checkpoint file to. * @param outStream A writable output stream to write the checkpoint file to.
*/ */
export function exportCheckpoint (checkpointOutStream: Writable, checkpointCreateOutStream: Writable, checkpointVerifyOutStream: Writable | undefined, subgraphPath: string): void { export function exportCheckpoint (checkpointOutStream: Writable, checkpointCreateOutStream: Writable, checkpointVerifyOutStream: Writable | undefined): void {
const checkpointTemplateString = fs.readFileSync(path.resolve(__dirname, CHECKPOINT_TEMPLATE_FILE)).toString(); const checkpointTemplateString = fs.readFileSync(path.resolve(__dirname, CHECKPOINT_TEMPLATE_FILE)).toString();
const checkpointTemplate = Handlebars.compile(checkpointTemplateString); const checkpointTemplate = Handlebars.compile(checkpointTemplateString);
const checkpoint = checkpointTemplate({ subgraphPath }); const checkpoint = checkpointTemplate({});
checkpointOutStream.write(checkpoint); checkpointOutStream.write(checkpoint);
const createCheckpointTemplateString = fs.readFileSync(path.resolve(__dirname, CREATE_TEMPLATE_FILE)).toString(); const createCheckpointTemplateString = fs.readFileSync(path.resolve(__dirname, CREATE_TEMPLATE_FILE)).toString();
const createCheckpointTemplate = Handlebars.compile(createCheckpointTemplateString); const createCheckpointTemplate = Handlebars.compile(createCheckpointTemplateString);
const createCheckpoint = createCheckpointTemplate({ subgraphPath }); const createCheckpoint = createCheckpointTemplate({});
checkpointCreateOutStream.write(createCheckpoint); checkpointCreateOutStream.write(createCheckpoint);
if (checkpointVerifyOutStream) { if (checkpointVerifyOutStream) {

View File

@ -12,6 +12,9 @@ columns:
pgType: integer pgType: integer
tsType: number tsType: number
columnType: Column columnType: Column
columnOptions:
- option: nullable
value: true
imports: imports:
- toImport: - toImport:
- Entity - Entity

View File

@ -13,9 +13,9 @@ const TEMPLATE_FILE = './templates/export-state-template.handlebars';
* Writes the export-state file generated from a template to a stream. * Writes the export-state file generated from a template to a stream.
* @param outStream A writable output stream to write the export-state file to. * @param outStream A writable output stream to write the export-state file to.
*/ */
export function exportState (outStream: Writable, subgraphPath: string): void { export function exportState (outStream: Writable): void {
const templateString = fs.readFileSync(path.resolve(__dirname, TEMPLATE_FILE)).toString(); const templateString = fs.readFileSync(path.resolve(__dirname, TEMPLATE_FILE)).toString();
const template = Handlebars.compile(templateString); const template = Handlebars.compile(templateString);
const exportState = template({ subgraphPath }); const exportState = template({});
outStream.write(exportState); outStream.write(exportState);
} }

View File

@ -8,23 +8,14 @@ import Handlebars from 'handlebars';
import { Writable } from 'stream'; import { Writable } from 'stream';
const FILL_TEMPLATE_FILE = './templates/fill-template.handlebars'; const FILL_TEMPLATE_FILE = './templates/fill-template.handlebars';
const FILL_STATE_TEMPLATE_FILE = './templates/fill-state-template.handlebars';
/** /**
* Writes the fill file generated from a template to a stream. * Writes the fill file generated from a template to a stream.
* @param fillOutStream A writable output stream to write the fill file to. * @param fillOutStream A writable output stream to write the fill file to.
* @param fillStateOutStream A writable output stream to write the fill state file to.
*/ */
export function exportFill (fillOutStream: Writable, fillStateOutStream: Writable | undefined, subgraphPath: string): void { export function exportFill (fillOutStream: Writable, subgraphPath: string): void {
const templateString = fs.readFileSync(path.resolve(__dirname, FILL_TEMPLATE_FILE)).toString(); const templateString = fs.readFileSync(path.resolve(__dirname, FILL_TEMPLATE_FILE)).toString();
const template = Handlebars.compile(templateString); const template = Handlebars.compile(templateString);
const fill = template({ subgraphPath }); const fill = template({ subgraphPath });
fillOutStream.write(fill); fillOutStream.write(fill);
if (fillStateOutStream) {
const templateString = fs.readFileSync(path.resolve(__dirname, FILL_STATE_TEMPLATE_FILE)).toString();
const template = Handlebars.compile(templateString);
const fillState = template({});
fillStateOutStream.write(fillState);
}
} }

View File

@ -38,6 +38,7 @@ import { exportInspectCID } from './inspect-cid';
import { getSubgraphConfig } from './utils/subgraph'; import { getSubgraphConfig } from './utils/subgraph';
import { exportIndexBlock } from './index-block'; import { exportIndexBlock } from './index-block';
import { exportSubscriber } from './subscriber'; import { exportSubscriber } from './subscriber';
import { exportReset } from './reset';
const main = async (): Promise<void> => { const main = async (): Promise<void> => {
const argv = await yargs(hideBin(process.argv)) const argv = await yargs(hideBin(process.argv))
@ -238,7 +239,14 @@ function generateWatcher (visitor: Visitor, contracts: any[], config: any) {
outStream = outputDir outStream = outputDir
? fs.createWriteStream(path.join(outputDir, 'src/cli/watch-contract.ts')) ? fs.createWriteStream(path.join(outputDir, 'src/cli/watch-contract.ts'))
: process.stdout; : process.stdout;
exportWatchContract(outStream, config.subgraphPath); exportWatchContract(outStream);
const resetOutStream = fs.createWriteStream(path.join(outputDir, 'src/cli/reset.ts'));
const resetJQOutStream = fs.createWriteStream(path.join(outputDir, 'src/cli/reset-cmds/job-queue.ts'));
const resetWatcherOutStream = fs.createWriteStream(path.join(outputDir, 'src/cli/reset-cmds/watcher.ts'));
const resetStateOutStream = fs.createWriteStream(path.join(outputDir, 'src/cli/reset-cmds/state.ts'));
exportReset(resetOutStream, resetJQOutStream, resetWatcherOutStream, resetStateOutStream);
let checkpointOutStream, checkpointCreateOutStream, checkpointVerifyOutStream; let checkpointOutStream, checkpointCreateOutStream, checkpointVerifyOutStream;
@ -256,7 +264,7 @@ function generateWatcher (visitor: Visitor, contracts: any[], config: any) {
} }
} }
exportCheckpoint(checkpointOutStream, checkpointCreateOutStream, checkpointVerifyOutStream, config.subgraphPath); exportCheckpoint(checkpointOutStream, checkpointCreateOutStream, checkpointVerifyOutStream);
outStream = outputDir outStream = outputDir
? fs.createWriteStream(path.join(outputDir, 'src/hooks.ts')) ? fs.createWriteStream(path.join(outputDir, 'src/hooks.ts'))
@ -266,15 +274,7 @@ function generateWatcher (visitor: Visitor, contracts: any[], config: any) {
const fillOutStream = outputDir const fillOutStream = outputDir
? fs.createWriteStream(path.join(outputDir, 'src/fill.ts')) ? fs.createWriteStream(path.join(outputDir, 'src/fill.ts'))
: process.stdout; : process.stdout;
exportFill(fillOutStream, config.subgraphPath);
let fillStateOutStream;
if (config.subgraphPath) {
fillStateOutStream = outputDir
? fs.createWriteStream(path.join(outputDir, 'src/fill-state.ts'))
: process.stdout;
}
exportFill(fillOutStream, fillStateOutStream, config.subgraphPath);
outStream = outputDir outStream = outputDir
? fs.createWriteStream(path.join(outputDir, 'src/types.ts')) ? fs.createWriteStream(path.join(outputDir, 'src/types.ts'))
@ -298,27 +298,20 @@ function generateWatcher (visitor: Visitor, contracts: any[], config: any) {
: process.stdout; : process.stdout;
visitor.exportClient(outStream, schemaContent, path.join(outputDir, 'src/gql')); visitor.exportClient(outStream, schemaContent, path.join(outputDir, 'src/gql'));
const resetOutStream = fs.createWriteStream(path.join(outputDir, 'src/cli/reset.ts'));
const resetJQOutStream = fs.createWriteStream(path.join(outputDir, 'src/cli/reset-cmds/job-queue.ts'));
const resetWatcherOutStream = fs.createWriteStream(path.join(outputDir, 'src/cli/reset-cmds/watcher.ts'));
const resetStateOutStream = fs.createWriteStream(path.join(outputDir, 'src/cli/reset-cmds/state.ts'));
visitor.exportReset(resetOutStream, resetJQOutStream, resetWatcherOutStream, resetStateOutStream, config.subgraphPath);
outStream = outputDir outStream = outputDir
? fs.createWriteStream(path.join(outputDir, 'src/cli/export-state.ts')) ? fs.createWriteStream(path.join(outputDir, 'src/cli/export-state.ts'))
: process.stdout; : process.stdout;
exportState(outStream, config.subgraphPath); exportState(outStream);
outStream = outputDir outStream = outputDir
? fs.createWriteStream(path.join(outputDir, 'src/cli/import-state.ts')) ? fs.createWriteStream(path.join(outputDir, 'src/cli/import-state.ts'))
: process.stdout; : process.stdout;
importState(outStream, config.subgraphPath); importState(outStream);
outStream = outputDir outStream = outputDir
? fs.createWriteStream(path.join(outputDir, 'src/cli/inspect-cid.ts')) ? fs.createWriteStream(path.join(outputDir, 'src/cli/inspect-cid.ts'))
: process.stdout; : process.stdout;
exportInspectCID(outStream, config.subgraphPath); exportInspectCID(outStream);
outStream = outputDir outStream = outputDir
? fs.createWriteStream(path.join(outputDir, 'src/cli/index-block.ts')) ? fs.createWriteStream(path.join(outputDir, 'src/cli/index-block.ts'))

View File

@ -13,9 +13,9 @@ const TEMPLATE_FILE = './templates/import-state-template.handlebars';
* Writes the import-state file generated from a template to a stream. * Writes the import-state file generated from a template to a stream.
* @param outStream A writable output stream to write the import-state file to. * @param outStream A writable output stream to write the import-state file to.
*/ */
export function importState (outStream: Writable, subgraphPath: string): void { export function importState (outStream: Writable): void {
const templateString = fs.readFileSync(path.resolve(__dirname, TEMPLATE_FILE)).toString(); const templateString = fs.readFileSync(path.resolve(__dirname, TEMPLATE_FILE)).toString();
const template = Handlebars.compile(templateString); const template = Handlebars.compile(templateString);
const importState = template({ subgraphPath }); const importState = template({});
outStream.write(importState); outStream.write(importState);
} }

View File

@ -13,9 +13,9 @@ const TEMPLATE_FILE = './templates/inspect-cid-template.handlebars';
* Writes the inspect-cid file generated from a template to a stream. * Writes the inspect-cid file generated from a template to a stream.
* @param outStream A writable output stream to write the inspect-cid file to. * @param outStream A writable output stream to write the inspect-cid file to.
*/ */
export function exportInspectCID (outStream: Writable, subgraphPath: string): void { export function exportInspectCID (outStream: Writable): void {
const templateString = fs.readFileSync(path.resolve(__dirname, TEMPLATE_FILE)).toString(); const templateString = fs.readFileSync(path.resolve(__dirname, TEMPLATE_FILE)).toString();
const template = Handlebars.compile(templateString); const template = Handlebars.compile(templateString);
const inspectCid = template({ subgraphPath }); const inspectCid = template({});
outStream.write(inspectCid); outStream.write(inspectCid);
} }

View File

@ -12,90 +12,31 @@ const RESET_JQ_TEMPLATE_FILE = './templates/reset-job-queue-template.handlebars'
const RESET_WATCHER_TEMPLATE_FILE = './templates/reset-watcher-template.handlebars'; const RESET_WATCHER_TEMPLATE_FILE = './templates/reset-watcher-template.handlebars';
const RESET_STATE_TEMPLATE_FILE = './templates/reset-state-template.handlebars'; const RESET_STATE_TEMPLATE_FILE = './templates/reset-state-template.handlebars';
export class Reset { /**
_queries: Array<any>; * Writes the reset.ts, job-queue.ts, watcher.ts, state.ts files generated from templates to respective streams.
_resetTemplateString: string; * @param resetOutStream A writable output stream to write the reset file to.
_resetJQTemplateString: string; * @param resetJQOutStream A writable output stream to write the reset job-queue file to.
_resetWatcherTemplateString: string; * @param resetWatcherOutStream A writable output stream to write the reset watcher file to.
_resetStateTemplateString: string; * @param resetStateOutStream A writable output stream to write the reset state file to.
*/
export function exportReset (resetOutStream: Writable, resetJQOutStream: Writable, resetWatcherOutStream: Writable, resetStateOutStream: Writable): void {
const resetTemplateString = fs.readFileSync(path.resolve(__dirname, RESET_TEMPLATE_FILE)).toString();
const resetTemplate = Handlebars.compile(resetTemplateString);
const resetString = resetTemplate({});
resetOutStream.write(resetString);
constructor () { const resetJQTemplateString = fs.readFileSync(path.resolve(__dirname, RESET_JQ_TEMPLATE_FILE)).toString();
this._queries = []; const resetJQTemplate = Handlebars.compile(resetJQTemplateString);
this._resetTemplateString = fs.readFileSync(path.resolve(__dirname, RESET_TEMPLATE_FILE)).toString(); const resetJQString = resetJQTemplate({});
this._resetJQTemplateString = fs.readFileSync(path.resolve(__dirname, RESET_JQ_TEMPLATE_FILE)).toString(); resetJQOutStream.write(resetJQString);
this._resetWatcherTemplateString = fs.readFileSync(path.resolve(__dirname, RESET_WATCHER_TEMPLATE_FILE)).toString();
this._resetStateTemplateString = fs.readFileSync(path.resolve(__dirname, RESET_STATE_TEMPLATE_FILE)).toString();
}
/** const resetWatcherTemplateString = fs.readFileSync(path.resolve(__dirname, RESET_WATCHER_TEMPLATE_FILE)).toString();
* Stores the query to be passed to the template. const resetWatcherTemplate = Handlebars.compile(resetWatcherTemplateString);
* @param name Name of the query. const resetWatcher = resetWatcherTemplate({});
*/ resetWatcherOutStream.write(resetWatcher);
addQuery (name: string): void {
// Check if the query is already added.
if (this._queries.some(query => query.name === name)) {
return;
}
const queryObject = { const resetStateTemplateString = fs.readFileSync(path.resolve(__dirname, RESET_STATE_TEMPLATE_FILE)).toString();
name, const resetStateTemplate = Handlebars.compile(resetStateTemplateString);
entityName: '' const resetState = resetStateTemplate({});
}; resetStateOutStream.write(resetState);
// eth_call mode: Capitalize first letter of entity name (balanceOf -> BalanceOf).
// storage mode: Capiltalize second letter of entity name (_balances -> _Balances).
queryObject.entityName = (name.charAt(0) === '_')
? `_${name.charAt(1).toUpperCase()}${name.slice(2)}`
: `${name.charAt(0).toUpperCase()}${name.slice(1)}`;
this._queries.push(queryObject);
}
addSubgraphEntities (subgraphSchemaDocument: any): void {
const subgraphTypeDefs = subgraphSchemaDocument.definitions;
subgraphTypeDefs.forEach((def: any) => {
if (def.kind !== 'ObjectTypeDefinition') {
return;
}
this._queries.push({
entityName: def.name.value
});
});
}
/**
* Writes the reset.ts, job-queue.ts, state.ts files generated from templates to respective streams.
* @param outStream A writable output stream to write the database file to.
*/
/**
* Writes the reset.ts, job-queue.ts, watcher.ts, state.ts files generated from templates to respective streams.
* @param resetOutStream A writable output stream to write the reset file to.
* @param resetJQOutStream A writable output stream to write the reset job-queue file to.
* @param resetWatcherOutStream A writable output stream to write the reset watcher file to.
* @param resetStateOutStream A writable output stream to write the reset state file to.
*/
exportReset (resetOutStream: Writable, resetJQOutStream: Writable, resetWatcherOutStream: Writable, resetStateOutStream: Writable, subgraphPath: string): void {
const resetTemplate = Handlebars.compile(this._resetTemplateString);
const resetString = resetTemplate({});
resetOutStream.write(resetString);
const resetJQTemplate = Handlebars.compile(this._resetJQTemplateString);
const resetJQString = resetJQTemplate({});
resetJQOutStream.write(resetJQString);
const resetWatcherTemplate = Handlebars.compile(this._resetWatcherTemplateString);
const obj = {
queries: this._queries,
subgraphPath
};
const resetWatcher = resetWatcherTemplate(obj);
resetWatcherOutStream.write(resetWatcher);
const resetStateTemplate = Handlebars.compile(this._resetStateTemplateString);
const resetState = resetStateTemplate({});
resetStateOutStream.write(resetState);
}
} }

View File

@ -2,19 +2,11 @@
// Copyright 2022 Vulcanize, Inc. // Copyright 2022 Vulcanize, Inc.
// //
import debug from 'debug'; import { CreateCheckpointCmd } from '@cerc-io/cli';
import assert from 'assert';
import { getConfig, initClients, JobQueue, Config } from '@cerc-io/util';
{{#if (subgraphPath)}}
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
{{/if}}
import { Database } from '../../database'; import { Database } from '../../database';
import { Indexer } from '../../indexer'; import { Indexer } from '../../indexer';
const log = debug('vulcanize:checkpoint-create');
export const command = 'create'; export const command = 'create';
export const desc = 'Create checkpoint'; export const desc = 'Create checkpoint';
@ -33,39 +25,8 @@ export const builder = {
}; };
export const handler = async (argv: any): Promise<void> => { export const handler = async (argv: any): Promise<void> => {
const config: Config = await getConfig(argv.configFile); const createCheckpointCmd = new CreateCheckpointCmd();
const { ethClient, ethProvider } = await initClients(config); await createCheckpointCmd.init(argv, Database, Indexer);
const db = new Database(config.database); await createCheckpointCmd.exec();
await db.init();
{{#if (subgraphPath)}}
const graphDb = new GraphDatabase(config.server, db.baseDatabase);
await graphDb.init();
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
{{/if}}
const jobQueueConfig = config.jobQueue;
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
await jobQueue.start();
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
await indexer.init();
{{#if (subgraphPath)}}
graphWatcher.setIndexer(indexer);
await graphWatcher.init();
{{/if}}
const blockHash = await indexer.processCLICheckpoint(argv.address, argv.blockHash);
log(`Created a checkpoint for contract ${argv.address} at block-hash ${blockHash}`);
await db.close();
}; };

View File

@ -2,65 +2,27 @@
// Copyright 2022 Vulcanize, Inc. // Copyright 2022 Vulcanize, Inc.
// //
import path from 'path'; import { VerifyCheckpointCmd } from '@cerc-io/cli';
import debug from 'debug';
import assert from 'assert';
import { getConfig, initClients, JobQueue, Config, verifyCheckpointData } from '@cerc-io/util'; import { Database } from '../../database';
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
import { Database, ENTITY_TO_LATEST_ENTITY_MAP, ENTITY_QUERY_TYPE_MAP } from '../../database';
import { Indexer } from '../../indexer'; import { Indexer } from '../../indexer';
const log = debug('vulcanize:checkpoint-verify');
export const command = 'verify'; export const command = 'verify';
export const desc = 'Verify checkpoint'; export const desc = 'Verify checkpoint';
export const builder = { export const builder = {
cid: { cid: {
alias: 'c',
type: 'string', type: 'string',
alias: 'c',
demandOption: true, demandOption: true,
describe: 'Checkpoint CID to be verified' describe: 'Checkpoint CID to be verified'
} }
}; };
export const handler = async (argv: any): Promise<void> => { export const handler = async (argv: any): Promise<void> => {
const config: Config = await getConfig(argv.configFile); const verifyCheckpointCmd = new VerifyCheckpointCmd();
const { ethClient, ethProvider } = await initClients(config); await verifyCheckpointCmd.init(argv, Database, Indexer);
const db = new Database(config.database); await verifyCheckpointCmd.exec();
await db.init();
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP);
await graphDb.init();
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
const jobQueueConfig = config.jobQueue;
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
await jobQueue.start();
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher);
await indexer.init();
graphWatcher.setIndexer(indexer);
await graphWatcher.init();
const state = await indexer.getStateByCID(argv.cid);
assert(state, 'State for the provided CID doesn\'t exist.');
const data = indexer.getStateData(state);
log(`Verifying checkpoint data for contract ${state.contractAddress}`);
await verifyCheckpointData(graphDb, state.block, data);
log('Checkpoint data verified');
await db.close();
}; };

View File

@ -11,7 +11,8 @@ import {
EventWatcher as BaseEventWatcher, EventWatcher as BaseEventWatcher,
EventWatcherInterface, EventWatcherInterface,
QUEUE_BLOCK_PROCESSING, QUEUE_BLOCK_PROCESSING,
QUEUE_EVENT_PROCESSING QUEUE_EVENT_PROCESSING,
IndexerInterface
} from '@cerc-io/util'; } from '@cerc-io/util';
import { Indexer } from './indexer'; import { Indexer } from './indexer';
@ -24,12 +25,12 @@ export class EventWatcher implements EventWatcherInterface {
_pubsub: PubSub _pubsub: PubSub
_jobQueue: JobQueue _jobQueue: JobQueue
constructor (ethClient: EthClient, indexer: Indexer, pubsub: PubSub, jobQueue: JobQueue) { constructor (ethClient: EthClient, indexer: IndexerInterface, pubsub: PubSub, jobQueue: JobQueue) {
assert(ethClient); assert(ethClient);
assert(indexer); assert(indexer);
this._ethClient = ethClient; this._ethClient = ethClient;
this._indexer = indexer; this._indexer = indexer as Indexer;
this._pubsub = pubsub; this._pubsub = pubsub;
this._jobQueue = jobQueue; this._jobQueue = jobQueue;
this._baseEventWatcher = new BaseEventWatcher(this._ethClient, this._indexer, this._pubsub, this._jobQueue); this._baseEventWatcher = new BaseEventWatcher(this._ethClient, this._indexer, this._pubsub, this._jobQueue);

View File

@ -2,163 +2,21 @@
// Copyright 2021 Vulcanize, Inc. // Copyright 2021 Vulcanize, Inc.
// //
import assert from 'assert';
import yargs from 'yargs';
import 'reflect-metadata'; import 'reflect-metadata';
import debug from 'debug'; import debug from 'debug';
import fs from 'fs';
import path from 'path';
import { import { ExportStateCmd } from '@cerc-io/cli';
Config,
DEFAULT_CONFIG_PATH,
getConfig,
initClients,
JobQueue,
{{#if (subgraphPath)}}
{{/if}}
StateKind
} from '@cerc-io/util';
{{#if (subgraphPath)}}
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
{{/if}}
import * as codec from '@ipld/dag-cbor';
import { Database{{#if (subgraphPath)}}, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP{{/if}} } from '../database'; import { Database } from '../database';
import { Indexer } from '../indexer'; import { Indexer } from '../indexer';
const log = debug('vulcanize:export-state'); const log = debug('vulcanize:export-state');
const main = async (): Promise<void> => { const main = async (): Promise<void> => {
const argv = await yargs.parserConfiguration({ const exportStateCmd = new ExportStateCmd();
'parse-numbers': false await exportStateCmd.init(Database, Indexer);
}).options({
configFile: {
alias: 'f',
type: 'string',
require: true,
demandOption: true,
describe: 'Configuration file path (toml)',
default: DEFAULT_CONFIG_PATH
},
exportFile: {
alias: 'o',
type: 'string',
describe: 'Export file path'
},
blockNumber: {
type: 'number',
describe: 'Block number to create snapshot at'
}
}).argv;
const config: Config = await getConfig(argv.configFile); await exportStateCmd.exec();
const { ethClient, ethProvider } = await initClients(config);
const db = new Database(config.database);
await db.init();
{{#if (subgraphPath)}}
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP);
await graphDb.init();
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
{{/if}}
const jobQueueConfig = config.jobQueue;
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
await jobQueue.start();
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
await indexer.init();
{{#if (subgraphPath)}}
graphWatcher.setIndexer(indexer);
await graphWatcher.init();
{{/if}}
const exportData: any = {
snapshotBlock: {},
contracts: [],
stateCheckpoints: []
};
const contracts = await db.getContracts();
// Get latest block with hooks processed.
let block = await indexer.getLatestStateIndexedBlock();
assert(block);
if (argv.blockNumber) {
if (argv.blockNumber > block.blockNumber) {
throw new Error(`Export snapshot block height ${argv.blockNumber} should be less than latest state indexed block height ${block.blockNumber}`);
}
const blocksAtSnapshotHeight = await indexer.getBlocksAtHeight(argv.blockNumber, false);
if (!blocksAtSnapshotHeight.length) {
throw new Error(`No blocks at snapshot height ${argv.blockNumber}`);
}
block = blocksAtSnapshotHeight[0];
}
log(`Creating export snapshot at block height ${block.blockNumber}`);
// Export snapshot block.
exportData.snapshotBlock = {
blockNumber: block.blockNumber,
blockHash: block.blockHash
};
// Export contracts and checkpoints.
for (const contract of contracts) {
if (contract.startingBlock > block.blockNumber) {
continue;
}
exportData.contracts.push({
address: contract.address,
kind: contract.kind,
checkpoint: contract.checkpoint,
startingBlock: block.blockNumber
});
// Create and export checkpoint if checkpointing is on for the contract.
if (contract.checkpoint) {
await indexer.createCheckpoint(contract.address, block.blockHash);
const state = await indexer.getLatestState(contract.address, StateKind.Checkpoint, block.blockNumber);
assert(state);
const data = indexer.getStateData(state);
exportData.stateCheckpoints.push({
contractAddress: state.contractAddress,
cid: state.cid,
kind: state.kind,
data
});
}
}
if (argv.exportFile) {
const encodedExportData = codec.encode(exportData);
const filePath = path.resolve(argv.exportFile);
const fileDir = path.dirname(filePath);
if (!fs.existsSync(fileDir)) fs.mkdirSync(fileDir, { recursive: true });
fs.writeFileSync(filePath, encodedExportData);
} else {
log(exportData);
}
}; };
main().catch(err => { main().catch(err => {

View File

@ -1,104 +0,0 @@
//
// Copyright 2022 Vulcanize, Inc.
//
import 'reflect-metadata';
import debug from 'debug';
import { Between } from 'typeorm';
import { Database as GraphDatabase, prepareEntityState } from '@cerc-io/graph-node';
import { Indexer } from './indexer';
const log = debug('vulcanize:fill-state');
export const fillState = async (
indexer: Indexer,
graphDb: GraphDatabase,
dataSources: any[],
argv: {
startBlock: number,
endBlock: number
}
): Promise<void> => {
const { startBlock, endBlock } = argv;
if (startBlock > endBlock) {
log('endBlock should be greater than or equal to startBlock');
process.exit(1);
}
// NOTE: Assuming all blocks in the given range are in the pruned region
log(`Filling state for subgraph entities in range: [${startBlock}, ${endBlock}]`);
// Check that there are no existing diffs in this range
const existingStates = await indexer.getStates({ block: { blockNumber: Between(startBlock, endBlock) } });
if (existingStates.length > 0) {
log('found existing state(s) in the given range');
process.exit(1);
}
// Map: contractAddress -> entities updated
const contractEntitiesMap: Map<string, string[]> = new Map();
// Populate contractEntitiesMap using data sources from subgraph
// NOTE: Assuming each entity type is only mapped to a single contract
// This is true for eden subgraph; may not be the case for other subgraphs
dataSources.forEach((dataSource: any) => {
const { source: { address: contractAddress }, mapping: { entities } } = dataSource;
contractEntitiesMap.set(contractAddress, entities as string[]);
});
// Fill state for blocks in the given range
for (let blockNumber = startBlock; blockNumber <= endBlock; blockNumber++) {
// Get the canonical block hash at current height
const blocks = await indexer.getBlocksAtHeight(blockNumber, false);
if (blocks.length === 0) {
log(`block not found at height ${blockNumber}`);
process.exit(1);
} else if (blocks.length > 1) {
log(`found more than one non-pruned block at height ${blockNumber}`);
process.exit(1);
}
const blockHash = blocks[0].blockHash;
// Create initial state for contracts
await indexer.createInit(blockHash, blockNumber);
// Fill state for each contract in contractEntitiesMap
const contractStatePromises = Array.from(contractEntitiesMap.entries())
.map(async ([contractAddress, entities]): Promise<void> => {
// Get all the updated entities at this block
const updatedEntitiesListPromises = entities.map(async (entity): Promise<any[]> => {
return graphDb.getEntitiesForBlock(blockHash, entity);
});
const updatedEntitiesList = await Promise.all(updatedEntitiesListPromises);
// Populate state with all the updated entities of each entity type
updatedEntitiesList.forEach((updatedEntities, index) => {
const entityName = entities[index];
updatedEntities.forEach((updatedEntity) => {
// Prepare diff data for the entity update
const diffData = prepareEntityState(updatedEntity, entityName, indexer.getRelationsMap());
// Update the in-memory subgraph state
indexer.updateSubgraphState(contractAddress, diffData);
});
});
});
await Promise.all(contractStatePromises);
// Persist subgraph state to the DB
await indexer.dumpSubgraphState(blockHash, true);
await indexer.updateStateSyncStatusIndexedBlock(blockNumber);
// Create checkpoints
await indexer.processCheckpoint(blockHash);
await indexer.updateStateSyncStatusCheckpointBlock(blockNumber);
}
log(`Filled state for subgraph entities in range: [${startBlock}, ${endBlock}]`);
};

View File

@ -2,113 +2,37 @@
// Copyright 2021 Vulcanize, Inc. // Copyright 2021 Vulcanize, Inc.
// //
import assert from 'assert';
import 'reflect-metadata';
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import debug from 'debug';
import { PubSub } from 'graphql-subscriptions';
import { Config, getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, initClients } from '@cerc-io/util';
{{#if (subgraphPath)}} {{#if (subgraphPath)}}
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node'; import assert from 'assert';
{{/if}}
import 'reflect-metadata';
import debug from 'debug';
import { FillCmd } from '@cerc-io/cli';
{{#if (subgraphPath)}}
import { getContractEntitiesMap } from '@cerc-io/graph-node';
{{/if}} {{/if}}
import { Database{{#if (subgraphPath)}}, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP{{/if}} } from './database'; import { Database{{#if (subgraphPath)}}, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP{{/if}} } from './database';
import { Indexer } from './indexer'; import { Indexer } from './indexer';
import { EventWatcher } from './events'; import { EventWatcher } from './events';
{{#if (subgraphPath)}}
import { fillState } from './fill-state';
{{/if}}
const log = debug('vulcanize:server'); const log = debug('vulcanize:fill');
export const main = async (): Promise<any> => { export const main = async (): Promise<any> => {
const argv = await yargs(hideBin(process.argv)).parserConfiguration({ const fillCmd = new FillCmd();
'parse-numbers': false await fillCmd.init(Database, Indexer, EventWatcher{{#if (subgraphPath)}}, {}, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP{{/if}});
}).env(
'FILL'
).options({
configFile: {
alias: 'f',
type: 'string',
demandOption: true,
describe: 'configuration file path (toml)',
default: DEFAULT_CONFIG_PATH
},
startBlock: {
type: 'number',
demandOption: true,
describe: 'Block number to start processing at'
},
{{#if (subgraphPath)}}
state: {
type: 'boolean',
default: false,
describe: 'Fill state for subgraph entities'
},
{{/if}}
endBlock: {
type: 'number',
demandOption: true,
describe: 'Block number to stop processing at'
},
prefetch: {
type: 'boolean',
default: false,
describe: 'Block and events prefetch mode'
},
batchBlocks: {
type: 'number',
default: 10,
describe: 'Number of blocks prefetched in batch'
}
}).argv;
const config: Config = await getConfig(argv.configFile);
const { ethClient, ethProvider } = await initClients(config);
const db = new Database(config.database);
await db.init();
{{#if (subgraphPath)}} {{#if (subgraphPath)}}
const indexer = fillCmd.indexer as Indexer;
assert(indexer);
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP); // Get contractEntitiesMap required for fill-state
await graphDb.init(); // NOTE: Assuming each entity type is only mapped to a single contract
const contractEntitiesMap = getContractEntitiesMap(indexer.graphWatcher.dataSources);
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
{{/if}} {{/if}}
await fillCmd.exec({{#if (subgraphPath)}}contractEntitiesMap{{/if}});
const jobQueueConfig = config.jobQueue;
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
await jobQueue.start();
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
await indexer.init();
{{#if (subgraphPath)}}
graphWatcher.setIndexer(indexer);
await graphWatcher.init();
if (argv.state) {
assert(config.server.enableState, 'State creation disabled');
await fillState(indexer, graphDb, graphWatcher.dataSources, argv);
return;
}
{{/if}}
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
const pubsub = new PubSub();
const eventWatcher = new EventWatcher(ethClient, indexer, pubsub, jobQueue);
await fillBlocks(jobQueue, indexer, eventWatcher, jobQueueConfig.blockDelayInMilliSecs, argv);
}; };
main().catch(err => { main().catch(err => {

View File

@ -37,7 +37,7 @@ export async function createInitialState (indexer: Indexer, contractAddress: str
/** /**
* Hook function to create state diff. * Hook function to create state diff.
* @param indexer Indexer instance that contains methods to fetch the contract varaiable values. * @param indexer Indexer instance that contains methods to fetch the contract variable values.
* @param blockHash Block hash of the concerned block. * @param blockHash Block hash of the concerned block.
*/ */
export async function createStateDiff (indexer: Indexer, blockHash: string): Promise<void> { export async function createStateDiff (indexer: Indexer, blockHash: string): Promise<void> {

View File

@ -2,22 +2,12 @@
// Copyright 2021 Vulcanize, Inc. // Copyright 2021 Vulcanize, Inc.
// //
import assert from 'assert';
import 'reflect-metadata'; import 'reflect-metadata';
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import debug from 'debug'; import debug from 'debug';
import { PubSub } from 'graphql-subscriptions';
import fs from 'fs';
import path from 'path';
import { getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, Config, initClients, StateKind } from '@cerc-io/util'; import { ImportStateCmd } from '@cerc-io/cli';
{{#if (subgraphPath)}}
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
{{/if}}
import * as codec from '@ipld/dag-cbor';
import { Database{{#if (subgraphPath)}}, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP{{/if}} } from '../database'; import { Database } from '../database';
import { Indexer } from '../indexer'; import { Indexer } from '../indexer';
import { EventWatcher } from '../events'; import { EventWatcher } from '../events';
import { State } from '../entity/State'; import { State } from '../entity/State';
@ -25,115 +15,10 @@ import { State } from '../entity/State';
const log = debug('vulcanize:import-state'); const log = debug('vulcanize:import-state');
export const main = async (): Promise<any> => { export const main = async (): Promise<any> => {
const argv = await yargs(hideBin(process.argv)).parserConfiguration({ const importStateCmd = new ImportStateCmd();
'parse-numbers': false await importStateCmd.init(Database, Indexer, EventWatcher);
}).options({
configFile: {
alias: 'f',
type: 'string',
demandOption: true,
describe: 'configuration file path (toml)',
default: DEFAULT_CONFIG_PATH
},
importFile: {
alias: 'i',
type: 'string',
demandOption: true,
describe: 'Import file path (JSON)'
}
}).argv;
const config: Config = await getConfig(argv.configFile); await importStateCmd.exec(State);
const { ethClient, ethProvider } = await initClients(config);
const db = new Database(config.database);
await db.init();
{{#if (subgraphPath)}}
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP);
await graphDb.init();
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
{{/if}}
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
const pubsub = new PubSub();
const jobQueueConfig = config.jobQueue;
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
await jobQueue.start();
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
await indexer.init();
{{#if (subgraphPath)}}
graphWatcher.setIndexer(indexer);
await graphWatcher.init();
{{/if}}
const eventWatcher = new EventWatcher(ethClient, indexer, pubsub, jobQueue);
// Import data.
const importFilePath = path.resolve(argv.importFile);
const encodedImportData = fs.readFileSync(importFilePath);
const importData = codec.decode(Buffer.from(encodedImportData)) as any;
// Fill the snapshot block.
await fillBlocks(
jobQueue,
indexer,
eventWatcher,
jobQueueConfig.blockDelayInMilliSecs,
{
prefetch: true,
startBlock: importData.snapshotBlock.blockNumber,
endBlock: importData.snapshotBlock.blockNumber
}
);
// Fill the Contracts.
for (const contract of importData.contracts) {
await indexer.watchContract(contract.address, contract.kind, contract.checkpoint, contract.startingBlock);
}
// Get the snapshot block.
const block = await indexer.getBlockProgress(importData.snapshotBlock.blockHash);
assert(block);
// Fill the States.
for (const checkpoint of importData.stateCheckpoints) {
let state = new State();
state = Object.assign(state, checkpoint);
state.block = block;
state.data = Buffer.from(codec.encode(state.data));
state = await indexer.saveOrUpdateState(state);
{{#if (subgraphPath)}}
await graphWatcher.updateEntitiesFromState(state);
{{/if}}
}
// Mark snapshot block as completely processed.
block.isComplete = true;
await indexer.updateBlockProgress(block, block.lastProcessedEventIndex);
await indexer.updateSyncStatusChainHead(block.blockHash, block.blockNumber);
await indexer.updateSyncStatusIndexedBlock(block.blockHash, block.blockNumber);
await indexer.updateStateSyncStatusIndexedBlock(block.blockNumber);
await indexer.updateStateSyncStatusCheckpointBlock(block.blockNumber);
// The 'diff_staged' and 'init' State entries are unnecessary as checkpoints have been already created for the snapshot block.
await indexer.removeStates(block.blockNumber, StateKind.Init);
await indexer.removeStates(block.blockNumber, StateKind.DiffStaged);
log(`Import completed for snapshot block at height ${block.blockNumber}`);
}; };
main().catch(err => { main().catch(err => {

View File

@ -2,73 +2,21 @@
// Copyright 2022 Vulcanize, Inc. // Copyright 2022 Vulcanize, Inc.
// //
import yargs from 'yargs';
import 'reflect-metadata'; import 'reflect-metadata';
import debug from 'debug'; import debug from 'debug';
import assert from 'assert';
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, indexBlock } from '@cerc-io/util'; import { IndexBlockCmd } from '@cerc-io/cli';
{{#if (subgraphPath)}}
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
{{/if}}
import { Database{{#if (subgraphPath)}}, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP{{/if}} } from '../database'; import { Database } from '../database';
import { Indexer } from '../indexer'; import { Indexer } from '../indexer';
const log = debug('vulcanize:index-block'); const log = debug('vulcanize:index-block');
const main = async (): Promise<void> => { const main = async (): Promise<void> => {
const argv = await yargs.parserConfiguration({ const indexBlockCmd = new IndexBlockCmd();
'parse-numbers': false await indexBlockCmd.init(Database, Indexer);
}).options({
configFile: {
alias: 'f',
type: 'string',
require: true,
demandOption: true,
describe: 'Configuration file path (toml)',
default: DEFAULT_CONFIG_PATH
},
block: {
type: 'number',
require: true,
demandOption: true,
describe: 'Block number to index'
}
}).argv;
const config: Config = await getConfig(argv.configFile); await indexBlockCmd.exec();
const { ethClient, ethProvider } = await initClients(config);
const db = new Database(config.database);
await db.init();
{{#if (subgraphPath)}}
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP);
await graphDb.init();
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
{{/if}}
const jobQueueConfig = config.jobQueue;
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
await indexer.init();
{{#if (subgraphPath)}}
graphWatcher.setIndexer(indexer);
await graphWatcher.init();
{{/if}}
await indexBlock(indexer, jobQueueConfig.eventsInBatch, argv);
await db.close();
}; };
main().catch(err => { main().catch(err => {

View File

@ -31,7 +31,9 @@ import {
StateKind, StateKind,
StateStatus, StateStatus,
ResultEvent, ResultEvent,
getResultEvent getResultEvent,
DatabaseInterface,
Clients
} from '@cerc-io/util'; } from '@cerc-io/util';
{{#if (subgraphPath)}} {{#if (subgraphPath)}}
import { GraphWatcher, updateSubgraphState, dumpSubgraphState } from '@cerc-io/graph-node'; import { GraphWatcher, updateSubgraphState, dumpSubgraphState } from '@cerc-io/graph-node';
@ -87,16 +89,17 @@ export class Indexer implements IndexerInterface {
_subgraphStateMap: Map<string, any> _subgraphStateMap: Map<string, any>
{{/if}} {{/if}}
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: BaseProvider, jobQueue: JobQueue{{#if (subgraphPath)}}, graphWatcher: GraphWatcher{{/if}}) { constructor (serverConfig: ServerConfig, db: DatabaseInterface, clients: Clients, ethProvider: BaseProvider, jobQueue: JobQueue{{#if (subgraphPath)}}, graphWatcher?: GraphWatcher{{/if}}) {
assert(db); assert(db);
assert(ethClient); assert(clients.ethClient);
this._db = db; this._db = db as Database;
this._ethClient = ethClient; this._ethClient = clients.ethClient;
this._ethProvider = ethProvider; this._ethProvider = ethProvider;
this._serverConfig = serverConfig; this._serverConfig = serverConfig;
this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue); this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue);
{{#if (subgraphPath)}} {{#if (subgraphPath)}}
assert(graphWatcher);
this._graphWatcher = graphWatcher; this._graphWatcher = graphWatcher;
{{/if}} {{/if}}
@ -105,12 +108,7 @@ export class Indexer implements IndexerInterface {
this._contractMap = new Map(); this._contractMap = new Map();
{{#each contracts as | contract |}} {{#each contracts as | contract |}}
const { const { abi: {{contract.contractName}}ABI{{#if contract.contractStorageLayout}}, storageLayout: {{contract.contractName}}StorageLayout{{/if}} } = {{contract.contractName}}Artifacts;
abi: {{contract.contractName}}ABI,
{{#if contract.contractStorageLayout}}
storageLayout: {{contract.contractName}}StorageLayout
{{/if}}
} = {{contract.contractName}}Artifacts;
{{/each}} {{/each}}
{{#each contracts as | contract |}} {{#each contracts as | contract |}}

View File

@ -2,16 +2,10 @@
// Copyright 2021 Vulcanize, Inc. // Copyright 2021 Vulcanize, Inc.
// //
import assert from 'assert';
import yargs from 'yargs';
import 'reflect-metadata'; import 'reflect-metadata';
import debug from 'debug'; import debug from 'debug';
import util from 'util';
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@cerc-io/util'; import { InspectCIDCmd } from '@cerc-io/cli';
{{#if (subgraphPath)}}
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
{{/if}}
import { Database } from '../database'; import { Database } from '../database';
import { Indexer } from '../indexer'; import { Indexer } from '../indexer';
@ -19,61 +13,10 @@ import { Indexer } from '../indexer';
const log = debug('vulcanize:inspect-cid'); const log = debug('vulcanize:inspect-cid');
const main = async (): Promise<void> => { const main = async (): Promise<void> => {
const argv = await yargs.parserConfiguration({ const inspectCIDCmd = new InspectCIDCmd();
'parse-numbers': false await inspectCIDCmd.init(Database, Indexer);
}).options({
configFile: {
alias: 'f',
type: 'string',
require: true,
demandOption: true,
describe: 'Configuration file path (toml)',
default: DEFAULT_CONFIG_PATH
},
cid: {
alias: 'c',
type: 'string',
demandOption: true,
describe: 'CID to be inspected'
}
}).argv;
const config: Config = await getConfig(argv.configFile); await inspectCIDCmd.exec();
const { ethClient, ethProvider } = await initClients(config);
const db = new Database(config.database);
await db.init();
{{#if (subgraphPath)}}
const graphDb = new GraphDatabase(config.server, db.baseDatabase);
await graphDb.init();
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
{{/if}}
const jobQueueConfig = config.jobQueue;
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
await jobQueue.start();
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
await indexer.init();
{{#if (subgraphPath)}}
graphWatcher.setIndexer(indexer);
await graphWatcher.init();
{{/if}}
const state = await indexer.getStateByCID(argv.cid);
assert(state, 'State for the provided CID doesn\'t exist.');
const stateData = await indexer.getStateData(state);
log(util.inspect(stateData, false, null));
}; };
main().catch(err => { main().catch(err => {

View File

@ -2,130 +2,26 @@
// Copyright 2021 Vulcanize, Inc. // Copyright 2021 Vulcanize, Inc.
// //
import assert from 'assert';
import 'reflect-metadata';
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import debug from 'debug'; import debug from 'debug';
import { import { JobRunnerCmd } from '@cerc-io/cli';
getConfig, import { JobRunner } from '@cerc-io/util';
Config,
JobQueue,
JobRunner as BaseJobRunner,
QUEUE_BLOCK_PROCESSING,
QUEUE_EVENT_PROCESSING,
QUEUE_BLOCK_CHECKPOINT,
QUEUE_HOOKS,
JOB_KIND_PRUNE,
JobQueueConfig,
DEFAULT_CONFIG_PATH,
initClients,
startMetricsServer
} from '@cerc-io/util';
{{#if (subgraphPath)}}
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
{{/if}}
import { Indexer } from './indexer'; import { Indexer } from './indexer';
import { Database{{#if (subgraphPath)}}, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP{{/if}} } from './database'; import { Database{{#if (subgraphPath)}}, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP{{/if}} } from './database';
const log = debug('vulcanize:job-runner'); const log = debug('vulcanize:job-runner');
export class JobRunner {
_indexer: Indexer
_jobQueue: JobQueue
_baseJobRunner: BaseJobRunner
_jobQueueConfig: JobQueueConfig
constructor (jobQueueConfig: JobQueueConfig, indexer: Indexer, jobQueue: JobQueue) {
this._jobQueueConfig = jobQueueConfig;
this._indexer = indexer;
this._jobQueue = jobQueue;
this._baseJobRunner = new BaseJobRunner(this._jobQueueConfig, this._indexer, this._jobQueue);
}
async start (): Promise<void> {
await this._jobQueue.deleteAllJobs();
await this._baseJobRunner.resetToPrevIndexedBlock();
await this.subscribeBlockProcessingQueue();
await this.subscribeEventProcessingQueue();
await this.subscribeBlockCheckpointQueue();
await this.subscribeHooksQueue();
}
async subscribeBlockProcessingQueue (): Promise<void> {
await this._jobQueue.subscribe(QUEUE_BLOCK_PROCESSING, async (job) => {
await this._baseJobRunner.processBlock(job);
});
}
async subscribeEventProcessingQueue (): Promise<void> {
await this._jobQueue.subscribe(QUEUE_EVENT_PROCESSING, async (job) => {
await this._baseJobRunner.processEvent(job);
});
}
async subscribeHooksQueue (): Promise<void> {
await this._jobQueue.subscribe(QUEUE_HOOKS, async (job) => {
await this._baseJobRunner.processHooks(job);
});
}
async subscribeBlockCheckpointQueue (): Promise<void> {
await this._jobQueue.subscribe(QUEUE_BLOCK_CHECKPOINT, async (job) => {
await this._baseJobRunner.processCheckpoint(job);
});
}
}
export const main = async (): Promise<any> => { export const main = async (): Promise<any> => {
const argv = await yargs(hideBin(process.argv)) const jobRunnerCmd = new JobRunnerCmd();
.option('f', { await jobRunnerCmd.init(Database, Indexer{{#if (subgraphPath)}}, {}, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP{{/if}});
alias: 'config-file',
demandOption: true,
describe: 'configuration file path (toml)',
type: 'string',
default: DEFAULT_CONFIG_PATH
})
.argv;
const config: Config = await getConfig(argv.f); await jobRunnerCmd.exec(async (jobRunner: JobRunner): Promise<void> => {
const { ethClient, ethProvider } = await initClients(config); await jobRunner.subscribeBlockProcessingQueue();
await jobRunner.subscribeEventProcessingQueue();
const db = new Database(config.database); await jobRunner.subscribeBlockCheckpointQueue();
await db.init(); await jobRunner.subscribeHooksQueue();
{{#if (subgraphPath)}} });
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP);
await graphDb.init();
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
{{/if}}
const jobQueueConfig = config.jobQueue;
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
await jobQueue.start();
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
await indexer.init();
{{#if (subgraphPath)}}
graphWatcher.setIndexer(indexer);
await graphWatcher.init();
await indexer.addContracts();
{{/if}}
const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue);
await jobRunner.start();
startMetricsServer(config, indexer);
}; };
main().then(() => { main().then(() => {

View File

@ -42,18 +42,18 @@
"@apollo/client": "^3.3.19", "@apollo/client": "^3.3.19",
"@ethersproject/providers": "^5.4.4", "@ethersproject/providers": "^5.4.4",
"@ipld/dag-cbor": "^6.0.12", "@ipld/dag-cbor": "^6.0.12",
"@cerc-io/ipld-eth-client": "^0.2.13", "@cerc-io/cli": "^0.2.15",
"@cerc-io/solidity-mapper": "^0.2.13", "@cerc-io/ipld-eth-client": "^0.2.15",
"@cerc-io/util": "^0.2.13", "@cerc-io/solidity-mapper": "^0.2.15",
"@cerc-io/util": "^0.2.15",
{{#if (subgraphPath)}} {{#if (subgraphPath)}}
"@cerc-io/graph-node": "^0.2.13", "@cerc-io/graph-node": "^0.2.15",
{{/if}} {{/if}}
"apollo-type-bigint": "^0.1.3", "apollo-type-bigint": "^0.1.3",
"debug": "^4.3.1", "debug": "^4.3.1",
"ethers": "^5.4.4", "ethers": "^5.4.4",
"express": "^4.18.2", "express": "^4.18.2",
"graphql": "^15.5.0", "graphql": "^15.5.0",
"graphql-import-node": "^0.0.4",
"graphql-subscriptions": "^2.0.0", "graphql-subscriptions": "^2.0.0",
"json-bigint": "^1.0.0", "json-bigint": "^1.0.0",
"reflect-metadata": "^0.1.13", "reflect-metadata": "^0.1.13",

View File

@ -73,13 +73,15 @@ To enable GQL requests caching:
## Run ## Run
* Run the watcher: * If the watcher is a `lazy` watcher:
```bash * Run the server:
yarn server
```
GQL console: http://localhost:{{port}}/graphql ```bash
yarn server
```
GQL console: http://localhost:{{port}}/graphql
* If the watcher is an `active` watcher: * If the watcher is an `active` watcher:
@ -89,6 +91,14 @@ GQL console: http://localhost:{{port}}/graphql
yarn job-runner yarn job-runner
``` ```
* Run the server:
```bash
yarn server
```
GQL console: http://localhost:{{port}}/graphql
* To watch a contract: * To watch a contract:
```bash ```bash

View File

@ -2,14 +2,10 @@
// Copyright 2022 Vulcanize, Inc. // Copyright 2022 Vulcanize, Inc.
// //
import debug from 'debug'; import { ResetStateCmd } from '@cerc-io/cli';
import { getConfig, Config } from '@cerc-io/util';
import { Database } from '../../database'; import { Database } from '../../database';
const log = debug('vulcanize:reset-state');
export const command = 'state'; export const command = 'state';
export const desc = 'Reset State to a given block number'; export const desc = 'Reset State to a given block number';
@ -21,42 +17,8 @@ export const builder = {
}; };
export const handler = async (argv: any): Promise<void> => { export const handler = async (argv: any): Promise<void> => {
const { blockNumber } = argv; const resetStateCmd = new ResetStateCmd();
const config: Config = await getConfig(argv.configFile); await resetStateCmd.init(argv, Database);
// Initialize database await resetStateCmd.exec();
const db = new Database(config.database);
await db.init();
// Create a DB transaction
const dbTx = await db.createTransactionRunner();
console.time('time:reset-state');
try {
// Delete all State entries in the given range
await db.removeStatesAfterBlock(dbTx, blockNumber);
// Reset the stateSyncStatus.
const stateSyncStatus = await db.getStateSyncStatus();
if (stateSyncStatus) {
if (stateSyncStatus.latestIndexedBlockNumber > blockNumber) {
await db.updateStateSyncStatusIndexedBlock(dbTx, blockNumber, true);
}
if (stateSyncStatus.latestCheckpointBlockNumber > blockNumber) {
await db.updateStateSyncStatusCheckpointBlock(dbTx, blockNumber, true);
}
}
dbTx.commitTransaction();
} catch (error) {
await dbTx.rollbackTransaction();
throw error;
} finally {
await dbTx.release();
}
console.timeEnd('time:reset-state');
log(`Reset State successfully to block ${blockNumber}`);
}; };

View File

@ -2,24 +2,10 @@
// Copyright 2021 Vulcanize, Inc. // Copyright 2021 Vulcanize, Inc.
// //
import debug from 'debug'; import { ResetWatcherCmd } from '@cerc-io/cli';
import { MoreThan } from 'typeorm';
import assert from 'assert';
import { getConfig, initClients, resetJobs, JobQueue, Config } from '@cerc-io/util';
{{#if (subgraphPath)}}
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
{{/if}}
import { Database } from '../../database'; import { Database } from '../../database';
import { Indexer } from '../../indexer'; import { Indexer } from '../../indexer';
import { BlockProgress } from '../../entity/BlockProgress';
{{#each queries as | query |}}
import { {{query.entityName}} } from '../../entity/{{query.entityName}}';
{{/each}}
const log = debug('vulcanize:reset-watcher');
export const command = 'watcher'; export const command = 'watcher';
@ -32,38 +18,8 @@ export const builder = {
}; };
export const handler = async (argv: any): Promise<void> => { export const handler = async (argv: any): Promise<void> => {
const config: Config = await getConfig(argv.configFile); const resetWatcherCmd = new ResetWatcherCmd();
await resetJobs(config); await resetWatcherCmd.init(argv, Database, Indexer);
const { ethClient, ethProvider } = await initClients(config);
// Initialize database. await resetWatcherCmd.exec();
const db = new Database(config.database);
await db.init();
{{#if (subgraphPath)}}
const graphDb = new GraphDatabase(config.server, db.baseDatabase);
await graphDb.init();
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
{{/if}}
const jobQueueConfig = config.jobQueue;
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
await jobQueue.start();
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
await indexer.init();
{{#if (subgraphPath)}}
graphWatcher.setIndexer(indexer);
await graphWatcher.init();
{{/if}}
await indexer.resetWatcherToBlock(argv.blockNumber);
log('Reset watcher successfully');
}; };

View File

@ -8,7 +8,17 @@ import debug from 'debug';
import Decimal from 'decimal.js'; import Decimal from 'decimal.js';
import { GraphQLResolveInfo, GraphQLScalarType } from 'graphql'; import { GraphQLResolveInfo, GraphQLScalarType } from 'graphql';
import { ValueResult, BlockHeight, gqlTotalQueryCount, gqlQueryCount, jsonBigIntStringReplacer, getResultState, setGQLCacheHints } from '@cerc-io/util'; import {
ValueResult,
BlockHeight,
gqlTotalQueryCount,
gqlQueryCount,
jsonBigIntStringReplacer,
getResultState,
setGQLCacheHints,
IndexerInterface,
EventWatcherInterface
} from '@cerc-io/util';
import { Indexer } from './indexer'; import { Indexer } from './indexer';
import { EventWatcher } from './events'; import { EventWatcher } from './events';
@ -21,8 +31,9 @@ import { {{query.entityName}} } from './entity/{{query.entityName}}';
const log = debug('vulcanize:resolver'); const log = debug('vulcanize:resolver');
export const createResolvers = async (indexer: Indexer, eventWatcher: EventWatcher): Promise<any> => { export const createResolvers = async (indexerArg: IndexerInterface, eventWatcherArg: EventWatcherInterface): Promise<any> => {
assert(indexer); const indexer = indexerArg as Indexer;
const eventWatcher = eventWatcherArg as EventWatcher;
const gqlCacheConfig = indexer.serverConfig.gqlCache; const gqlCacheConfig = indexer.serverConfig.gqlCache;

View File

@ -4,19 +4,10 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import assert from 'assert';
import 'reflect-metadata'; import 'reflect-metadata';
import express, { Application } from 'express';
import { PubSub } from 'graphql-subscriptions';
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import debug from 'debug'; import debug from 'debug';
import 'graphql-import-node';
import { DEFAULT_CONFIG_PATH, getConfig, Config, JobQueue, KIND_ACTIVE, initClients, startGQLMetricsServer, createAndStartServer } from '@cerc-io/util'; import { ServerCmd } from '@cerc-io/cli';
{{#if (subgraphPath)}}
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
{{/if}}
import { createResolvers } from './resolvers'; import { createResolvers } from './resolvers';
import { Indexer } from './indexer'; import { Indexer } from './indexer';
@ -26,70 +17,12 @@ import { EventWatcher } from './events';
const log = debug('vulcanize:server'); const log = debug('vulcanize:server');
export const main = async (): Promise<any> => { export const main = async (): Promise<any> => {
const argv = await yargs(hideBin(process.argv)) const serverCmd = new ServerCmd();
.option('f', { await serverCmd.init(Database, Indexer, EventWatcher{{#if (subgraphPath)}}, {}, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP{{/if}});
alias: 'config-file',
demandOption: true,
describe: 'configuration file path (toml)',
type: 'string',
default: DEFAULT_CONFIG_PATH
})
.argv;
const config: Config = await getConfig(argv.f);
const { ethClient, ethProvider } = await initClients(config);
const { kind: watcherKind } = config.server;
const db = new Database(config.database);
await db.init();
{{#if (subgraphPath)}}
const graphDb = new GraphDatabase(config.server, db.baseDatabase, ENTITY_QUERY_TYPE_MAP, ENTITY_TO_LATEST_ENTITY_MAP);
await graphDb.init();
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
{{/if}}
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
const pubsub = new PubSub();
const jobQueueConfig = config.jobQueue;
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
await indexer.init();
{{#if (subgraphPath)}}
graphWatcher.setIndexer(indexer);
await graphWatcher.init();
{{/if}}
const eventWatcher = new EventWatcher(ethClient, indexer, pubsub, jobQueue);
if (watcherKind === KIND_ACTIVE) {
await jobQueue.start();
// Delete jobs to prevent creating jobs after completion of processing previous block.
await jobQueue.deleteAllJobs();
await eventWatcher.start();
}
const resolvers = await createResolvers(indexer, eventWatcher);
const typeDefs = fs.readFileSync(path.join(__dirname, 'schema.gql')).toString(); const typeDefs = fs.readFileSync(path.join(__dirname, 'schema.gql')).toString();
// Create an Express app return serverCmd.exec(createResolvers, typeDefs);
const app: Application = express();
const server = createAndStartServer(app, typeDefs, resolvers, config.server);
startGQLMetricsServer(config);
return { app, server };
}; };
main().then(() => { main().then(() => {

View File

@ -2,15 +2,10 @@
// Copyright 2021 Vulcanize, Inc. // Copyright 2021 Vulcanize, Inc.
// //
import yargs from 'yargs';
import 'reflect-metadata'; import 'reflect-metadata';
import debug from 'debug'; import debug from 'debug';
import assert from 'assert';
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@cerc-io/util'; import { WatchContractCmd } from '@cerc-io/cli';
{{#if (subgraphPath)}}
import { GraphWatcher, Database as GraphDatabase } from '@cerc-io/graph-node';
{{/if}}
import { Database } from '../database'; import { Database } from '../database';
import { Indexer } from '../indexer'; import { Indexer } from '../indexer';
@ -18,76 +13,10 @@ import { Indexer } from '../indexer';
const log = debug('vulcanize:watch-contract'); const log = debug('vulcanize:watch-contract');
const main = async (): Promise<void> => { const main = async (): Promise<void> => {
const argv = await yargs.parserConfiguration({ const watchContractCmd = new WatchContractCmd();
'parse-numbers': false await watchContractCmd.init(Database, Indexer);
}).options({
configFile: {
alias: 'f',
type: 'string',
require: true,
demandOption: true,
describe: 'Configuration file path (toml)',
default: DEFAULT_CONFIG_PATH
},
address: {
type: 'string',
require: true,
demandOption: true,
describe: 'Address of the deployed contract'
},
kind: {
type: 'string',
require: true,
demandOption: true,
describe: 'Kind of contract'
},
checkpoint: {
type: 'boolean',
require: true,
demandOption: true,
describe: 'Turn checkpointing on'
},
startingBlock: {
type: 'number',
default: 1,
describe: 'Starting block'
}
}).argv;
const config: Config = await getConfig(argv.configFile); await watchContractCmd.exec();
const { ethClient, ethProvider } = await initClients(config);
const db = new Database(config.database);
await db.init();
{{#if (subgraphPath)}}
const graphDb = new GraphDatabase(config.server, db.baseDatabase);
await graphDb.init();
const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server);
{{/if}}
const jobQueueConfig = config.jobQueue;
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs });
await jobQueue.start();
const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue{{#if (subgraphPath)}}, graphWatcher{{/if}});
await indexer.init();
{{#if (subgraphPath)}}
graphWatcher.setIndexer(indexer);
await graphWatcher.init();
{{/if}}
await indexer.watchContract(argv.address, argv.kind, argv.checkpoint, argv.startingBlock);
await db.close();
await jobQueue.stop();
}; };
main().catch(err => { main().catch(err => {

View File

@ -12,7 +12,6 @@ import { Indexer } from './indexer';
import { Resolvers } from './resolvers'; import { Resolvers } from './resolvers';
import { Schema } from './schema'; import { Schema } from './schema';
import { Client } from './client'; import { Client } from './client';
import { Reset } from './reset';
import { Param } from './utils/types'; import { Param } from './utils/types';
import { MODE_ETH_CALL, MODE_STORAGE } from './utils/constants'; import { MODE_ETH_CALL, MODE_STORAGE } from './utils/constants';
import { parseSubgraphSchema } from './utils/subgraph'; import { parseSubgraphSchema } from './utils/subgraph';
@ -25,7 +24,6 @@ export class Visitor {
_entity: Entity; _entity: Entity;
_database: Database; _database: Database;
_client: Client; _client: Client;
_reset: Reset;
_types: Types; _types: Types;
_contract?: { name: string, kind: string }; _contract?: { name: string, kind: string };
@ -37,7 +35,6 @@ export class Visitor {
this._entity = new Entity(); this._entity = new Entity();
this._database = new Database(); this._database = new Database();
this._client = new Client(); this._client = new Client();
this._reset = new Reset();
this._types = new Types(); this._types = new Types();
} }
@ -75,7 +72,6 @@ export class Visitor {
this._entity.addQuery(name, params, returnType); this._entity.addQuery(name, params, returnType);
this._database.addQuery(name, params, returnType); this._database.addQuery(name, params, returnType);
this._client.addQuery(name, params, returnType); this._client.addQuery(name, params, returnType);
this._reset.addQuery(name);
assert(this._contract); assert(this._contract);
this._indexer.addQuery(this._contract.name, MODE_ETH_CALL, name, params, returnType); this._indexer.addQuery(this._contract.name, MODE_ETH_CALL, name, params, returnType);
@ -123,7 +119,6 @@ export class Visitor {
this._entity.addQuery(name, params, returnType); this._entity.addQuery(name, params, returnType);
this._database.addQuery(name, params, returnType); this._database.addQuery(name, params, returnType);
this._client.addQuery(name, params, returnType); this._client.addQuery(name, params, returnType);
this._reset.addQuery(name);
assert(this._contract); assert(this._contract);
this._indexer.addQuery(this._contract.name, MODE_STORAGE, name, params, returnType, stateVariableType); this._indexer.addQuery(this._contract.name, MODE_STORAGE, name, params, returnType, stateVariableType);
@ -153,7 +148,6 @@ export class Visitor {
this._types.addSubgraphTypes(subgraphSchemaDocument); this._types.addSubgraphTypes(subgraphSchemaDocument);
this._entity.addSubgraphEntities(subgraphSchemaDocument); this._entity.addSubgraphEntities(subgraphSchemaDocument);
this._resolvers.addSubgraphResolvers(subgraphSchemaDocument); this._resolvers.addSubgraphResolvers(subgraphSchemaDocument);
this._reset.addSubgraphEntities(subgraphSchemaDocument);
this._indexer.addSubgraphEntities(subgraphSchemaDocument); this._indexer.addSubgraphEntities(subgraphSchemaDocument);
this._database.addSubgraphEntities(subgraphSchemaDocument); this._database.addSubgraphEntities(subgraphSchemaDocument);
} }
@ -210,17 +204,6 @@ export class Visitor {
this._client.exportClient(outStream, schemaContent, gqlDir); this._client.exportClient(outStream, schemaContent, gqlDir);
} }
/**
* Writes the reset.ts, job-queue.ts, watcher.ts, state.ts files generated from templates to respective streams.
* @param resetOutStream A writable output stream to write the reset file to.
* @param resetJQOutStream A writable output stream to write the reset job-queue file to.
* @param resetWatcherOutStream A writable output stream to write the reset watcher file to.
* @param resetStateOutStream A writable output stream to write the reset state file to.
*/
exportReset (resetOutStream: Writable, resetJQOutStream: Writable, resetWatcherOutStream: Writable, resetStateOutStream: Writable, subgraphPath: string): void {
this._reset.exportReset(resetOutStream, resetJQOutStream, resetWatcherOutStream, resetStateOutStream, subgraphPath);
}
/** /**
* Writes the types file generated from a template to a stream. * Writes the types file generated from a template to a stream.
* @param outStream A writable output stream to write the database file to. * @param outStream A writable output stream to write the database file to.

View File

@ -13,9 +13,9 @@ const TEMPLATE_FILE = './templates/watch-contract-template.handlebars';
* Writes the watch-contract file generated from a template to a stream. * Writes the watch-contract file generated from a template to a stream.
* @param outStream A writable output stream to write the watch-contract file to. * @param outStream A writable output stream to write the watch-contract file to.
*/ */
export function exportWatchContract (outStream: Writable, subgraphPath: string): void { export function exportWatchContract (outStream: Writable): void {
const templateString = fs.readFileSync(path.resolve(__dirname, TEMPLATE_FILE)).toString(); const templateString = fs.readFileSync(path.resolve(__dirname, TEMPLATE_FILE)).toString();
const template = Handlebars.compile(templateString); const template = Handlebars.compile(templateString);
const events = template({ subgraphPath }); const events = template({});
outStream.write(events); outStream.write(events);
} }

View File

@ -61,13 +61,15 @@
## Run ## Run
* Run the watcher: * If the watcher is a `lazy` watcher:
```bash * Run the server:
yarn server
```
GQL console: http://localhost:3012/graphql ```bash
yarn server
```
GQL console: http://localhost:3012/graphql
* If the watcher is an `active` watcher: * If the watcher is an `active` watcher:
@ -77,6 +79,14 @@ GQL console: http://localhost:3012/graphql
yarn job-runner yarn job-runner
``` ```
* Run the server:
```bash
yarn server
```
GQL console: http://localhost:3012/graphql
* To watch a contract: * To watch a contract:
```bash ```bash

View File

@ -51,7 +51,6 @@
"ethers": "^5.4.4", "ethers": "^5.4.4",
"express": "^4.18.2", "express": "^4.18.2",
"graphql": "^15.5.0", "graphql": "^15.5.0",
"graphql-import-node": "^0.0.4",
"graphql-subscriptions": "^2.0.0", "graphql-subscriptions": "^2.0.0",
"json-bigint": "^1.0.0", "json-bigint": "^1.0.0",
"reflect-metadata": "^0.1.13", "reflect-metadata": "^0.1.13",

View File

@ -21,8 +21,8 @@ export const builder = {
}; };
export const handler = async (argv: any): Promise<void> => { export const handler = async (argv: any): Promise<void> => {
const createCheckpointCmd = new VerifyCheckpointCmd(); const verifyCheckpointCmd = new VerifyCheckpointCmd();
await createCheckpointCmd.init(argv, Database, Indexer); await verifyCheckpointCmd.init(argv, Database, Indexer);
await createCheckpointCmd.exec(); await verifyCheckpointCmd.exec();
}; };

View File

@ -6,7 +6,6 @@ import fs from 'fs';
import path from 'path'; import path from 'path';
import 'reflect-metadata'; import 'reflect-metadata';
import debug from 'debug'; import debug from 'debug';
import 'graphql-import-node';
import { ServerCmd } from '@cerc-io/cli'; import { ServerCmd } from '@cerc-io/cli';

View File

@ -70,16 +70,6 @@ yarn build
Run the watcher: Run the watcher:
```bash
$ yarn server
# For development.
$ yarn server:dev
# For specifying config file.
$ yarn server -f environments/local.toml
```
Start the job runner: Start the job runner:
```bash ```bash
@ -92,6 +82,18 @@ $ yarn job-runner:dev
$ yarn job-runner -f environments/local.toml $ yarn job-runner -f environments/local.toml
``` ```
Start the server:
```bash
$ yarn server
# For development.
$ yarn server:dev
# For specifying config file.
$ yarn server -f environments/local.toml
```
GQL console: http://localhost:3001/graphql GQL console: http://localhost:3001/graphql
Deploy an ERC20 token: Deploy an ERC20 token:

View File

@ -52,7 +52,6 @@
"ethers": "^5.4.4", "ethers": "^5.4.4",
"express": "^4.18.2", "express": "^4.18.2",
"graphql": "^15.5.0", "graphql": "^15.5.0",
"graphql-import-node": "^0.0.4",
"graphql-request": "^3.4.0", "graphql-request": "^3.4.0",
"graphql-subscriptions": "^2.0.0", "graphql-subscriptions": "^2.0.0",
"json-bigint": "^1.0.0", "json-bigint": "^1.0.0",

View File

@ -3,7 +3,6 @@
// //
import debug from 'debug'; import debug from 'debug';
import 'graphql-import-node';
import { ServerCmd } from '@cerc-io/cli'; import { ServerCmd } from '@cerc-io/cli';

View File

@ -71,11 +71,15 @@ Follow the steps below or follow the [Demo](./demo.md)
* Run the watcher: * Run the watcher:
```bash * If the watcher is a `lazy` watcher:
yarn server
```
GQL console: http://localhost:3006/graphql * Run the server:
```bash
yarn server
```
GQL console: http://localhost:3006/graphql
* If the watcher is an `active` watcher: * If the watcher is an `active` watcher:
@ -85,6 +89,14 @@ GQL console: http://localhost:3006/graphql
yarn job-runner yarn job-runner
``` ```
* Run the server:
```bash
yarn server
```
GQL console: http://localhost:3012/graphql
* To watch a contract: * To watch a contract:
```bash ```bash

View File

@ -58,7 +58,6 @@
"ethers": "^5.4.4", "ethers": "^5.4.4",
"express": "^4.18.2", "express": "^4.18.2",
"graphql": "^15.5.0", "graphql": "^15.5.0",
"graphql-import-node": "^0.0.4",
"graphql-subscriptions": "^2.0.0", "graphql-subscriptions": "^2.0.0",
"json-bigint": "^1.0.0", "json-bigint": "^1.0.0",
"reflect-metadata": "^0.1.13", "reflect-metadata": "^0.1.13",

View File

@ -6,7 +6,6 @@ import fs from 'fs';
import path from 'path'; import path from 'path';
import 'reflect-metadata'; import 'reflect-metadata';
import debug from 'debug'; import debug from 'debug';
import 'graphql-import-node';
import { ServerCmd } from '@cerc-io/cli'; import { ServerCmd } from '@cerc-io/cli';

View File

@ -194,18 +194,6 @@ export class Database {
} }
} }
async getEntitiesForBlock (blockHash: string, tableName: string): Promise<any[]> {
const repo = this._conn.getRepository(tableName);
const entities = await repo.find({
where: {
blockHash
}
});
return entities;
}
async getEntityIdsAtBlockNumber (blockNumber: number, tableName: string): Promise<string[]> { async getEntityIdsAtBlockNumber (blockNumber: number, tableName: string): Promise<string[]> {
const repo = this._conn.getRepository(tableName); const repo = this._conn.getRepository(tableName);

View File

@ -12,10 +12,9 @@ import { SelectionNode } from 'graphql';
import { ResultObject } from '@vulcanize/assemblyscript/lib/loader'; import { ResultObject } from '@vulcanize/assemblyscript/lib/loader';
import { EthClient } from '@cerc-io/ipld-eth-client'; import { EthClient } from '@cerc-io/ipld-eth-client';
import { getFullBlock, BlockHeight, ServerConfig, getFullTransaction, QueryOptions, StateInterface, IndexerInterface, BlockProgressInterface } from '@cerc-io/util'; import { getFullBlock, BlockHeight, ServerConfig, getFullTransaction, QueryOptions, IndexerInterface, BlockProgressInterface } from '@cerc-io/util';
import { createBlock, createEvent, getSubgraphConfig, resolveEntityFieldConflicts, Transaction } from './utils'; import { createBlock, createEvent, getSubgraphConfig, resolveEntityFieldConflicts, Transaction } from './utils';
import { updateEntitiesFromState } from './state-utils';
import { Context, GraphData, instantiate } from './loader'; import { Context, GraphData, instantiate } from './loader';
import { Database, DEFAULT_LIMIT } from './database'; import { Database, DEFAULT_LIMIT } from './database';
@ -349,12 +348,6 @@ export class GraphWatcher {
} }
} }
// TODO Remove after updating codegen CLIs
async updateEntitiesFromState (state: StateInterface) {
assert(this._indexer);
await updateEntitiesFromState(this._database, this._indexer, state);
}
updateEntityCacheFrothyBlocks (blockProgress: BlockProgressInterface): void { updateEntityCacheFrothyBlocks (blockProgress: BlockProgressInterface): void {
assert(this._indexer); assert(this._indexer);
this._database.updateEntityCacheFrothyBlocks(blockProgress, this._indexer.serverConfig.clearEntitiesCacheInterval); this._database.updateEntityCacheFrothyBlocks(blockProgress, this._indexer.serverConfig.clearEntitiesCacheInterval);

View File

@ -63,13 +63,13 @@
## Run ## Run
* Run the watcher: * Run the server:
```bash ```bash
yarn server yarn server
``` ```
GQL console: http://localhost:3008/graphql GQL console: http://localhost:3008/graphql
* If the watcher is an `active` watcher: * If the watcher is an `active` watcher:
@ -79,6 +79,14 @@ GQL console: http://localhost:3008/graphql
yarn job-runner yarn job-runner
``` ```
* Run the server:
```bash
yarn server
```
GQL console: http://localhost:3008/graphql
* To watch a contract: * To watch a contract:
```bash ```bash

View File

@ -51,7 +51,6 @@
"ethers": "^5.4.4", "ethers": "^5.4.4",
"express": "^4.18.2", "express": "^4.18.2",
"graphql": "^15.5.0", "graphql": "^15.5.0",
"graphql-import-node": "^0.0.4",
"graphql-subscriptions": "^2.0.0", "graphql-subscriptions": "^2.0.0",
"json-bigint": "^1.0.0", "json-bigint": "^1.0.0",
"lodash": "^4.17.21", "lodash": "^4.17.21",

View File

@ -21,8 +21,8 @@ export const builder = {
}; };
export const handler = async (argv: any): Promise<void> => { export const handler = async (argv: any): Promise<void> => {
const createCheckpointCmd = new VerifyCheckpointCmd(); const verifyCheckpointCmd = new VerifyCheckpointCmd();
await createCheckpointCmd.init(argv, Database, Indexer); await verifyCheckpointCmd.init(argv, Database, Indexer);
await createCheckpointCmd.exec(); await verifyCheckpointCmd.exec();
}; };

View File

@ -6,7 +6,6 @@ import fs from 'fs';
import path from 'path'; import path from 'path';
import 'reflect-metadata'; import 'reflect-metadata';
import debug from 'debug'; import debug from 'debug';
import 'graphql-import-node';
import { ServerCmd } from '@cerc-io/cli'; import { ServerCmd } from '@cerc-io/cli';

View File

@ -63,13 +63,13 @@
Follow the steps below or follow the [Demo](./demo.md) Follow the steps below or follow the [Demo](./demo.md)
* Run the watcher: * Run the server:
```bash ```bash
yarn server yarn server
``` ```
GQL console: http://localhost:3010/graphql GQL console: http://localhost:3010/graphql
* If the watcher is an `active` watcher: * If the watcher is an `active` watcher:
@ -79,6 +79,14 @@ GQL console: http://localhost:3010/graphql
yarn job-runner yarn job-runner
``` ```
* Run the server:
```bash
yarn server
```
GQL console: http://localhost:3010/graphql
* To watch a contract: * To watch a contract:
```bash ```bash

View File

@ -49,7 +49,6 @@
"ethers": "^5.4.4", "ethers": "^5.4.4",
"express": "^4.18.2", "express": "^4.18.2",
"graphql": "^15.5.0", "graphql": "^15.5.0",
"graphql-import-node": "^0.0.4",
"graphql-subscriptions": "^2.0.0", "graphql-subscriptions": "^2.0.0",
"json-bigint": "^1.0.0", "json-bigint": "^1.0.0",
"reflect-metadata": "^0.1.13", "reflect-metadata": "^0.1.13",

View File

@ -6,7 +6,6 @@ import fs from 'fs';
import path from 'path'; import path from 'path';
import 'reflect-metadata'; import 'reflect-metadata';
import debug from 'debug'; import debug from 'debug';
import 'graphql-import-node';
import { ServerCmd } from '@cerc-io/cli'; import { ServerCmd } from '@cerc-io/cli';

View File

@ -7965,11 +7965,6 @@ graphql-compose@^9.0.3:
graphql-type-json "0.3.2" graphql-type-json "0.3.2"
object-path "0.11.7" object-path "0.11.7"
graphql-import-node@^0.0.4:
version "0.0.4"
resolved "https://registry.yarnpkg.com/graphql-import-node/-/graphql-import-node-0.0.4.tgz#0522f058978c7e1b99d1e6be1b851ee17007b111"
integrity sha512-okpdABQIgIM0qdx9Mfgdu6fTsFEazZmHZdEU34cijkUj9D1db1SyPRGHPxbXmbacamhEF41ckxpCAgHiGliryQ==
graphql-request@^3.4.0: graphql-request@^3.4.0:
version "3.4.0" version "3.4.0"
resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-3.4.0.tgz#3a400cd5511eb3c064b1873afb059196bbea9c2b" resolved "https://registry.yarnpkg.com/graphql-request/-/graphql-request-3.4.0.tgz#3a400cd5511eb3c064b1873afb059196bbea9c2b"