mirror of
https://github.com/cerc-io/watcher-ts
synced 2024-11-19 12:26:19 +00:00
Generate GQL API for subgraph entities and auto-diff based on store set (#38)
* Add subgraph schema types to the generated schema * Add queries for subgraph entities * Add entity generation for subgraph entities * Call subgraph event handler in indexer * Refactor subgraph schema and entity generation * Add resolvers generation for subgraph entities * Get event signature in the event * Add NonNullType check for field type in entity generation * Auto-diff based on store set * Use contract address from data source in loader * Change subgraph-schema arg to subgraph-path arg
This commit is contained in:
parent
83775608ec
commit
06ba24e38f
@ -21,7 +21,7 @@
|
||||
* Run the following command to generate a watcher from a contract file:
|
||||
|
||||
```bash
|
||||
yarn codegen --input-file <input-file-path> --contract-name <contract-name> --output-folder [output-folder] --mode [eth_call | storage | all] --flatten [true | false] --kind [lazy | active] --port [server-port]
|
||||
yarn codegen --input-file <input-file-path> --contract-name <contract-name> --output-folder [output-folder] --mode [eth_call | storage | all] --flatten [true | false] --kind [lazy | active] --port [server-port] --subgraph-path [subgraph-build-path]
|
||||
```
|
||||
|
||||
* `input-file`(alias: `i`): Input contract file path or an URL (required).
|
||||
@ -31,6 +31,7 @@
|
||||
* `flatten`(alias: `f`): Flatten the input contract file (default: `true`).
|
||||
* `kind` (alias: `k`): Kind of watcher (default: `active`).
|
||||
* `port` (alias: `p`): Server port (default: `3008`).
|
||||
* `subgraph-path` (alias: `s`): Path to the subgraph build.
|
||||
|
||||
**Note**: When passed an *URL* as `input-file`, it is assumed that it points to an already flattened contract file.
|
||||
|
||||
@ -78,12 +79,14 @@
|
||||
ipfs daemon
|
||||
```
|
||||
|
||||
* In the config file (`environments/local.toml`):
|
||||
|
||||
* Update the state checkpoint settings.
|
||||
|
||||
* Update the IPFS API address in `environments/local.toml`.
|
||||
|
||||
* Create the databases configured in `environments/local.toml`.
|
||||
|
||||
* Update the state checkpoint settings in `environments/local.toml`.
|
||||
|
||||
* Update the IPFS API address in `environments/local.toml`.
|
||||
|
||||
### Customize
|
||||
|
||||
* Indexing on an event:
|
||||
|
@ -19,6 +19,7 @@
|
||||
},
|
||||
"homepage": "https://github.com/vulcanize/watcher-ts#readme",
|
||||
"dependencies": {
|
||||
"@graphql-tools/load-files": "^6.5.2",
|
||||
"@poanet/solidity-flattener": "https://github.com/vulcanize/solidity-flattener.git",
|
||||
"@solidity-parser/parser": "^0.13.2",
|
||||
"@vulcanize/util": "^0.1.0",
|
||||
|
@ -16,13 +16,14 @@ const TEMPLATE_FILE = './templates/config-template.handlebars';
|
||||
* @param folderName Watcher folder name to be passed to the template.
|
||||
* @param outStream A writable output stream to write the config file to.
|
||||
*/
|
||||
export function exportConfig (watcherKind: string, port: number, folderName: string, outStream: Writable): void {
|
||||
export function exportConfig (watcherKind: string, port: number, folderName: string, outStream: Writable, subgraphPath?: string): void {
|
||||
const templateString = fs.readFileSync(path.resolve(__dirname, TEMPLATE_FILE)).toString();
|
||||
const template = Handlebars.compile(templateString);
|
||||
const config = template({
|
||||
watcherKind,
|
||||
port,
|
||||
folderName
|
||||
folderName,
|
||||
subgraphPath
|
||||
});
|
||||
outStream.write(config);
|
||||
}
|
||||
|
@ -9,6 +9,9 @@ indexOn:
|
||||
- columns:
|
||||
- parentHash
|
||||
columns:
|
||||
- name: id
|
||||
tsType: number
|
||||
columnType: PrimaryGeneratedColumn
|
||||
- name: cid
|
||||
pgType: varchar
|
||||
tsType: string
|
||||
|
@ -4,6 +4,9 @@ indexOn:
|
||||
- address
|
||||
unique: true
|
||||
columns:
|
||||
- name: id
|
||||
tsType: number
|
||||
columnType: PrimaryGeneratedColumn
|
||||
- name: address
|
||||
pgType: varchar
|
||||
tsType: string
|
||||
|
@ -8,6 +8,9 @@ indexOn:
|
||||
- contract
|
||||
- eventName
|
||||
columns:
|
||||
- name: id
|
||||
tsType: number
|
||||
columnType: PrimaryGeneratedColumn
|
||||
- name: block
|
||||
tsType: BlockProgress
|
||||
columnType: ManyToOne
|
||||
|
@ -1,6 +1,9 @@
|
||||
className: HookStatus
|
||||
indexOn: []
|
||||
columns:
|
||||
- name: id
|
||||
tsType: number
|
||||
columnType: PrimaryGeneratedColumn
|
||||
- name: latestProcessedBlockNumber
|
||||
pgType: integer
|
||||
tsType: number
|
||||
|
@ -12,6 +12,9 @@ indexOn:
|
||||
- kind
|
||||
unique: true
|
||||
columns:
|
||||
- name: id
|
||||
tsType: number
|
||||
columnType: PrimaryGeneratedColumn
|
||||
- name: block
|
||||
tsType: BlockProgress
|
||||
columnType: ManyToOne
|
||||
|
@ -2,6 +2,9 @@ className: SyncStatus
|
||||
implements: SyncStatusInterface
|
||||
indexOn: []
|
||||
columns:
|
||||
- name: id
|
||||
tsType: number
|
||||
columnType: PrimaryGeneratedColumn
|
||||
- name: chainHeadBlockHash
|
||||
pgType: varchar
|
||||
tsType: string
|
||||
|
@ -9,7 +9,7 @@ import yaml from 'js-yaml';
|
||||
import Handlebars from 'handlebars';
|
||||
import { Writable } from 'stream';
|
||||
|
||||
import { getTsForSol, getPgForTs } from './utils/type-mappings';
|
||||
import { getTsForSol, getPgForTs, getTsForGql } from './utils/type-mappings';
|
||||
import { Param } from './utils/types';
|
||||
|
||||
const TEMPLATE_FILE = './templates/entity-template.handlebars';
|
||||
@ -67,6 +67,12 @@ export class Entity {
|
||||
);
|
||||
entityObject.indexOn.push(indexObject);
|
||||
|
||||
entityObject.columns.push({
|
||||
name: 'id',
|
||||
tsType: 'number',
|
||||
columnType: 'PrimaryGeneratedColumn',
|
||||
columnOptions: []
|
||||
});
|
||||
entityObject.columns.push({
|
||||
name: 'blockHash',
|
||||
pgType: 'varchar',
|
||||
@ -156,30 +162,8 @@ export class Entity {
|
||||
]
|
||||
});
|
||||
|
||||
entityObject.columns.forEach((column: any) => {
|
||||
if (column.tsType === 'bigint') {
|
||||
column.columnOptions.push(
|
||||
{
|
||||
option: 'transformer',
|
||||
value: 'bigintTransformer'
|
||||
}
|
||||
);
|
||||
const importObject = entityObject.imports.find((element: any) => {
|
||||
return element.from === '@vulcanize/util';
|
||||
});
|
||||
|
||||
if (importObject) {
|
||||
importObject.toImport.add('bigintTransformer');
|
||||
} else {
|
||||
entityObject.imports.push(
|
||||
{
|
||||
toImport: new Set(['bigintTransformer']),
|
||||
from: '@vulcanize/util'
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
// Add bigintTransformer column option if required.
|
||||
this._addBigIntTransformerOption(entityObject);
|
||||
|
||||
this._entities.push(entityObject);
|
||||
}
|
||||
@ -206,6 +190,66 @@ export class Entity {
|
||||
});
|
||||
}
|
||||
|
||||
addSubgraphEntities (subgraphSchemaDocument: any): void {
|
||||
const subgraphTypeDefs = subgraphSchemaDocument.definitions;
|
||||
|
||||
subgraphTypeDefs.forEach((def: any) => {
|
||||
// TODO Handle enum types.
|
||||
if (def.kind !== 'ObjectTypeDefinition') {
|
||||
return;
|
||||
}
|
||||
|
||||
let entityObject: any = {
|
||||
className: def.name.value,
|
||||
indexOn: [],
|
||||
columns: [],
|
||||
imports: []
|
||||
};
|
||||
|
||||
entityObject.imports.push(
|
||||
{
|
||||
toImport: new Set(['Entity', 'PrimaryColumn', 'Column']),
|
||||
from: 'typeorm'
|
||||
}
|
||||
);
|
||||
|
||||
// Add common columns.
|
||||
entityObject.columns.push({
|
||||
name: 'id',
|
||||
pgType: 'varchar',
|
||||
tsType: 'string',
|
||||
columnType: 'PrimaryColumn',
|
||||
columnOptions: []
|
||||
});
|
||||
entityObject.columns.push({
|
||||
name: 'blockHash',
|
||||
pgType: 'varchar',
|
||||
tsType: 'string',
|
||||
columnType: 'PrimaryColumn',
|
||||
columnOptions: [
|
||||
{
|
||||
option: 'length',
|
||||
value: 66
|
||||
}
|
||||
]
|
||||
});
|
||||
entityObject.columns.push({
|
||||
name: 'blockNumber',
|
||||
pgType: 'integer',
|
||||
tsType: 'number',
|
||||
columnType: 'Column'
|
||||
});
|
||||
|
||||
// Add subgraph entity specific columns.
|
||||
entityObject = this._addSubgraphColumns(entityObject, def);
|
||||
|
||||
// Add bigintTransformer column option if required.
|
||||
this._addBigIntTransformerOption(entityObject);
|
||||
|
||||
this._entities.push(entityObject);
|
||||
});
|
||||
}
|
||||
|
||||
_addEventEntity (): void {
|
||||
const entity = yaml.load(fs.readFileSync(path.resolve(__dirname, TABLES_DIR, 'Event.yaml'), 'utf8'));
|
||||
this._entities.push(entity);
|
||||
@ -235,4 +279,123 @@ export class Entity {
|
||||
const entity = yaml.load(fs.readFileSync(path.resolve(__dirname, TABLES_DIR, 'HookStatus.yaml'), 'utf8'));
|
||||
this._entities.push(entity);
|
||||
}
|
||||
|
||||
_addBigIntTransformerOption (entityObject: any): void {
|
||||
entityObject.columns.forEach((column: any) => {
|
||||
if (column.tsType === 'bigint') {
|
||||
column.columnOptions.push(
|
||||
{
|
||||
option: 'transformer',
|
||||
value: 'bigintTransformer'
|
||||
}
|
||||
);
|
||||
|
||||
const importObject = entityObject.imports.find((element: any) => {
|
||||
return element.from === '@vulcanize/util';
|
||||
});
|
||||
|
||||
if (importObject) {
|
||||
importObject.toImport.add('bigintTransformer');
|
||||
} else {
|
||||
entityObject.imports.push(
|
||||
{
|
||||
toImport: new Set(['bigintTransformer']),
|
||||
from: '@vulcanize/util'
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_addSubgraphColumns (entityObject: any, def: any): any {
|
||||
def.fields.forEach((field: any) => {
|
||||
const name = field.name.value;
|
||||
|
||||
// Filter out already added columns.
|
||||
if (['id', 'blockHash', 'blockNumber'].includes(name)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const columnObject: any = {
|
||||
name,
|
||||
columnOptions: []
|
||||
};
|
||||
|
||||
const { typeName, array, nullable } = this._getFieldType(field.type);
|
||||
let tsType = getTsForGql(typeName);
|
||||
|
||||
if (tsType) {
|
||||
// Handle basic array types.
|
||||
if (array) {
|
||||
columnObject.columnOptions.push({
|
||||
option: 'array',
|
||||
value: 'true'
|
||||
});
|
||||
|
||||
columnObject.tsType = `${tsType}[]`;
|
||||
} else {
|
||||
columnObject.tsType = tsType;
|
||||
}
|
||||
} else {
|
||||
// TODO Handle array of custom types.
|
||||
tsType = typeName;
|
||||
columnObject.tsType = tsType;
|
||||
}
|
||||
|
||||
const pgType = getPgForTs(tsType);
|
||||
|
||||
// If basic type: create a column. If unknown: create a relation.
|
||||
if (pgType) {
|
||||
columnObject.columnType = 'Column';
|
||||
columnObject.pgType = pgType;
|
||||
} else {
|
||||
columnObject.columnType = 'ManyToOne';
|
||||
columnObject.lhs = '()';
|
||||
columnObject.rhs = tsType;
|
||||
|
||||
entityObject.imports[0].toImport.add('ManyToOne');
|
||||
|
||||
// Check if type import already added.
|
||||
const importObject = entityObject.imports.find((element: any) => {
|
||||
return element.from === `./${tsType}`;
|
||||
});
|
||||
|
||||
if (!importObject) {
|
||||
entityObject.imports.push(
|
||||
{
|
||||
toImport: new Set([tsType]),
|
||||
from: `./${tsType}`
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (nullable) {
|
||||
columnObject.columnOptions.push({
|
||||
option: 'nullable',
|
||||
value: 'true'
|
||||
});
|
||||
}
|
||||
|
||||
entityObject.columns.push(columnObject);
|
||||
});
|
||||
|
||||
return entityObject;
|
||||
}
|
||||
|
||||
_getFieldType (typeNode: any): { typeName: string, array: boolean, nullable: boolean } {
|
||||
if (typeNode.kind === 'ListType') {
|
||||
return { typeName: this._getFieldType(typeNode.type).typeName, array: true, nullable: true };
|
||||
}
|
||||
|
||||
if (typeNode.kind === 'NonNullType') {
|
||||
const fieldType = this._getFieldType(typeNode.type);
|
||||
|
||||
return { typeName: fieldType.typeName, array: fieldType.array, nullable: false };
|
||||
}
|
||||
|
||||
// If 'NamedType'.
|
||||
return { typeName: typeNode.name.value, array: false, nullable: true };
|
||||
}
|
||||
}
|
||||
|
@ -78,6 +78,11 @@ const main = async (): Promise<void> => {
|
||||
type: 'boolean',
|
||||
default: true
|
||||
})
|
||||
.option('subgraph-path', {
|
||||
alias: 's',
|
||||
describe: 'Path to the subgraph build.',
|
||||
type: 'string'
|
||||
})
|
||||
.argv;
|
||||
|
||||
let data: string;
|
||||
@ -144,6 +149,8 @@ function generateWatcher (data: string, visitor: Visitor, argv: any) {
|
||||
|
||||
registerHandlebarHelpers();
|
||||
|
||||
visitor.visitSubgraph(argv['subgraph-path']);
|
||||
|
||||
let outStream = outputDir
|
||||
? fs.createWriteStream(path.join(outputDir, 'src/schema.gql'))
|
||||
: process.stdout;
|
||||
@ -167,7 +174,7 @@ function generateWatcher (data: string, visitor: Visitor, argv: any) {
|
||||
outStream = outputDir
|
||||
? fs.createWriteStream(path.join(outputDir, 'environments/local.toml'))
|
||||
: process.stdout;
|
||||
exportConfig(argv.kind, argv.port, path.basename(outputDir), outStream);
|
||||
exportConfig(argv.kind, argv.port, path.basename(outputDir), outStream, argv['subgraph-path']);
|
||||
|
||||
outStream = outputDir
|
||||
? fs.createWriteStream(path.join(outputDir, 'src/artifacts/', `${inputFileName}.json`))
|
||||
|
@ -16,10 +16,12 @@ const TEMPLATE_FILE = './templates/resolvers-template.handlebars';
|
||||
|
||||
export class Resolvers {
|
||||
_queries: Array<any>;
|
||||
_subgraphQueries: Array<any>;
|
||||
_templateString: string;
|
||||
|
||||
constructor () {
|
||||
this._queries = [];
|
||||
this._subgraphQueries = [];
|
||||
this._templateString = fs.readFileSync(path.resolve(__dirname, TEMPLATE_FILE)).toString();
|
||||
}
|
||||
|
||||
@ -51,6 +53,26 @@ export class Resolvers {
|
||||
this._queries.push(queryObject);
|
||||
}
|
||||
|
||||
addSubgraphResolvers (subgraphSchemaDocument: any): void {
|
||||
const subgraphTypeDefs = subgraphSchemaDocument.definitions;
|
||||
|
||||
for (const subgraphTypeDef of subgraphTypeDefs) {
|
||||
if (subgraphTypeDef.kind !== 'ObjectTypeDefinition') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const entityName = subgraphTypeDef.name.value;
|
||||
const queryName = `${entityName.charAt(0).toLowerCase()}${entityName.slice(1)}`;
|
||||
|
||||
const queryObject = {
|
||||
entityName,
|
||||
queryName
|
||||
};
|
||||
|
||||
this._subgraphQueries.push(queryObject);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the resolvers file generated from a template to a stream.
|
||||
* @param outStream A writable output stream to write the resolvers file to.
|
||||
@ -58,7 +80,8 @@ export class Resolvers {
|
||||
exportResolvers (outStream: Writable): void {
|
||||
const template = Handlebars.compile(this._templateString);
|
||||
const obj = {
|
||||
queries: this._queries
|
||||
queries: this._queries,
|
||||
subgraphQueries: this._subgraphQueries
|
||||
};
|
||||
const resolvers = template(obj);
|
||||
outStream.write(resolvers);
|
||||
|
@ -3,9 +3,10 @@
|
||||
//
|
||||
|
||||
import assert from 'assert';
|
||||
import { GraphQLSchema, printSchema } from 'graphql';
|
||||
import { GraphQLSchema, parse, printSchema, print } from 'graphql';
|
||||
import { SchemaComposer } from 'graphql-compose';
|
||||
import { Writable } from 'stream';
|
||||
import _ from 'lodash';
|
||||
|
||||
import { getTsForSol, getGqlForTs } from './utils/type-mappings';
|
||||
import { Param } from './utils/types';
|
||||
@ -28,6 +29,11 @@ export class Schema {
|
||||
* @param returnType Return type for the query.
|
||||
*/
|
||||
addQuery (name: string, params: Array<Param>, returnType: string): void {
|
||||
// Check if the query is already added.
|
||||
if (this._composer.Query.hasField(name)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Handle cases where returnType/params type is an array.
|
||||
const tsReturnType = getTsForSol(returnType);
|
||||
assert(tsReturnType);
|
||||
@ -63,6 +69,11 @@ export class Schema {
|
||||
addEventType (name: string, params: Array<Param>): void {
|
||||
name = `${name}Event`;
|
||||
|
||||
// Check if the type is already added.
|
||||
if (this._composer.has(name)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const typeObject: any = {};
|
||||
typeObject.name = name;
|
||||
typeObject.fields = {};
|
||||
@ -97,15 +108,18 @@ export class Schema {
|
||||
// Add a mutation for watching a contract.
|
||||
this._addWatchContractMutation();
|
||||
|
||||
// Add IPLDBlock type and queries.
|
||||
this._addIPLDType();
|
||||
this._addIPLDQuery();
|
||||
|
||||
// Build the schema.
|
||||
return this._composer.buildSchema();
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes schema to a stream.
|
||||
* @param outStream A writable output stream to write the schema to.
|
||||
* @returns The schema string.
|
||||
*/
|
||||
exportSchema (outStream: Writable): string {
|
||||
// Get schema as a string from GraphQLSchema.
|
||||
@ -115,48 +129,112 @@ export class Schema {
|
||||
return schemaString;
|
||||
}
|
||||
|
||||
addSubgraphSchema (subgraphSchemaDocument: any): void {
|
||||
// Generating the current types.
|
||||
const schema = this._composer.buildSchema();
|
||||
|
||||
const schemaString = printSchema(schema);
|
||||
|
||||
// Parse the schema into a DocumentNode.
|
||||
const schemaDocument = parse(schemaString);
|
||||
|
||||
// Get schema types.
|
||||
const schemaTypes = schemaDocument.definitions.map((def: any) => {
|
||||
return def.name.value;
|
||||
});
|
||||
|
||||
// Filtering out existing types from subgraph types.
|
||||
const subgraphTypeDefs = subgraphSchemaDocument.definitions.filter((def: any) => {
|
||||
return !schemaTypes.includes(def.name.value);
|
||||
});
|
||||
|
||||
// Re-assigning the typeDefs.
|
||||
const modifiedSchemaDocument = _.cloneDeep(subgraphSchemaDocument);
|
||||
modifiedSchemaDocument.definitions = subgraphTypeDefs;
|
||||
|
||||
// Adding subgraph-schema types to the schema composer.
|
||||
const subgraphTypeDefsString = print(modifiedSchemaDocument);
|
||||
this._composer.addTypeDefs(subgraphTypeDefsString);
|
||||
|
||||
// Add subgraph-schema entity queries to the schema composer.
|
||||
this._addSubgraphSchemaQueries(subgraphTypeDefs);
|
||||
}
|
||||
|
||||
_addSubgraphSchemaQueries (subgraphTypeDefs: any): void {
|
||||
for (const subgraphTypeDef of subgraphTypeDefs) {
|
||||
// Filtering out enums.
|
||||
if (subgraphTypeDef.kind !== 'ObjectTypeDefinition') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const subgraphType = subgraphTypeDef.name.value;
|
||||
|
||||
// Lowercase first letter for query name.
|
||||
const queryName = `${subgraphType.charAt(0).toLowerCase()}${subgraphType.slice(1)}`;
|
||||
|
||||
const queryObject: { [key: string]: any; } = {};
|
||||
queryObject[queryName] = {
|
||||
// Get type composer object for return type from the schema composer.
|
||||
type: this._composer.getAnyTC(subgraphType).NonNull,
|
||||
args: {
|
||||
id: 'String!',
|
||||
blockHash: 'String!'
|
||||
}
|
||||
};
|
||||
|
||||
this._composer.Query.addFields(queryObject);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds basic types to the schema and typemapping.
|
||||
*/
|
||||
_addBasicTypes (): void {
|
||||
let typeComposer;
|
||||
|
||||
// Create a scalar type composer to add the scalar BigInt in the schema composer.
|
||||
this._composer.createScalarTC({
|
||||
typeComposer = this._composer.createScalarTC({
|
||||
name: 'BigInt'
|
||||
});
|
||||
this._composer.addSchemaMustHaveType(typeComposer);
|
||||
|
||||
// Create a type composer to add the type Proof in the schema composer.
|
||||
this._composer.createObjectTC({
|
||||
typeComposer = this._composer.createObjectTC({
|
||||
name: 'Proof',
|
||||
fields: {
|
||||
data: 'String!'
|
||||
}
|
||||
});
|
||||
this._composer.addSchemaMustHaveType(typeComposer);
|
||||
|
||||
this._composer.createObjectTC({
|
||||
typeComposer = this._composer.createObjectTC({
|
||||
name: 'ResultBoolean',
|
||||
fields: {
|
||||
value: 'Boolean!',
|
||||
proof: () => this._composer.getOTC('Proof')
|
||||
}
|
||||
});
|
||||
this._composer.addSchemaMustHaveType(typeComposer);
|
||||
|
||||
this._composer.createObjectTC({
|
||||
typeComposer = this._composer.createObjectTC({
|
||||
name: 'ResultString',
|
||||
fields: {
|
||||
value: 'String!',
|
||||
proof: () => this._composer.getOTC('Proof')
|
||||
}
|
||||
});
|
||||
this._composer.addSchemaMustHaveType(typeComposer);
|
||||
|
||||
this._composer.createObjectTC({
|
||||
typeComposer = this._composer.createObjectTC({
|
||||
name: 'ResultInt',
|
||||
fields: {
|
||||
value: () => 'Int!',
|
||||
proof: () => this._composer.getOTC('Proof')
|
||||
}
|
||||
});
|
||||
this._composer.addSchemaMustHaveType(typeComposer);
|
||||
|
||||
this._composer.createObjectTC({
|
||||
typeComposer = this._composer.createObjectTC({
|
||||
name: 'ResultBigInt',
|
||||
fields: {
|
||||
// Get type composer object for BigInt scalar from the schema composer.
|
||||
@ -164,16 +242,19 @@ export class Schema {
|
||||
proof: () => this._composer.getOTC('Proof')
|
||||
}
|
||||
});
|
||||
this._composer.addSchemaMustHaveType(typeComposer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds types 'ResultEvent' and 'WatchedEvent' to the schema.
|
||||
*/
|
||||
_addEventsRelatedTypes (): void {
|
||||
let typeComposer;
|
||||
|
||||
// Create Ethereum types.
|
||||
// Create the Block type.
|
||||
const blockName = 'Block';
|
||||
this._composer.createObjectTC({
|
||||
typeComposer = this._composer.createObjectTC({
|
||||
name: blockName,
|
||||
fields: {
|
||||
cid: 'String!',
|
||||
@ -183,10 +264,11 @@ export class Schema {
|
||||
parentHash: 'String!'
|
||||
}
|
||||
});
|
||||
this._composer.addSchemaMustHaveType(typeComposer);
|
||||
|
||||
// Create the Transaction type.
|
||||
const transactionName = 'Transaction';
|
||||
this._composer.createObjectTC({
|
||||
typeComposer = this._composer.createObjectTC({
|
||||
name: transactionName,
|
||||
fields: {
|
||||
hash: 'String!',
|
||||
@ -195,10 +277,11 @@ export class Schema {
|
||||
to: 'String!'
|
||||
}
|
||||
});
|
||||
this._composer.addSchemaMustHaveType(typeComposer);
|
||||
|
||||
// Create the ResultEvent type.
|
||||
const resultEventName = 'ResultEvent';
|
||||
this._composer.createObjectTC({
|
||||
typeComposer = this._composer.createObjectTC({
|
||||
name: resultEventName,
|
||||
fields: {
|
||||
// Get type composer object for 'blockName' type from the schema composer.
|
||||
@ -210,6 +293,7 @@ export class Schema {
|
||||
proof: () => this._composer.getOTC('Proof')
|
||||
}
|
||||
});
|
||||
this._composer.addSchemaMustHaveType(typeComposer);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -239,7 +323,7 @@ export class Schema {
|
||||
}
|
||||
|
||||
_addIPLDType (): void {
|
||||
this._composer.createObjectTC({
|
||||
const typeComposer = this._composer.createObjectTC({
|
||||
name: 'ResultIPLDBlock',
|
||||
fields: {
|
||||
block: () => this._composer.getOTC('Block').NonNull,
|
||||
@ -249,6 +333,7 @@ export class Schema {
|
||||
data: 'String!'
|
||||
}
|
||||
});
|
||||
this._composer.addSchemaMustHaveType(typeComposer);
|
||||
}
|
||||
|
||||
_addIPLDQuery (): void {
|
||||
|
@ -2,11 +2,13 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import path from 'path';
|
||||
import yargs from 'yargs';
|
||||
import 'reflect-metadata';
|
||||
import debug from 'debug';
|
||||
|
||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients } from '@vulcanize/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
|
||||
import { Database } from '../database';
|
||||
import { Indexer } from '../indexer';
|
||||
@ -43,7 +45,16 @@ const main = async (): Promise<void> => {
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider);
|
||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, config.server.subgraphPath);
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const blockHash = await indexer.processCLICheckpoint(argv.address, argv.blockHash);
|
||||
|
||||
log(`Created a checkpoint for contract ${argv.address} at block-hash ${blockHash}`);
|
||||
|
@ -11,6 +11,10 @@
|
||||
|
||||
# IPFS API address (can be taken from the output on running the IPFS daemon).
|
||||
ipfsApiAddr = "/ip4/127.0.0.1/tcp/5001"
|
||||
|
||||
{{#if subgraphPath}}
|
||||
subgraphPath = "{{subgraphPath}}"
|
||||
{{/if}}
|
||||
|
||||
[database]
|
||||
type = "postgres"
|
||||
|
@ -17,18 +17,17 @@ import { {{~#each import.toImport}} {{this}} {{~#unless @last}}, {{~/unless}} {{
|
||||
{{/if}}
|
||||
{{/each}}
|
||||
export class {{className}} {{~#if implements}} implements {{implements}} {{~/if}} {
|
||||
@PrimaryGeneratedColumn()
|
||||
id!: number;
|
||||
|
||||
{{#each columns as | column |}}
|
||||
{{#if (compare column.columnType 'ManyToOne')}}
|
||||
@{{column.columnType}}({{column.lhs}} => {{column.rhs}}
|
||||
{{~else}}
|
||||
@{{column.columnType}}('{{column.pgType}}'
|
||||
@{{column.columnType}}(
|
||||
{{~#if column.pgType~}} '{{column.pgType}}'
|
||||
{{~/if}}
|
||||
{{~/if}}
|
||||
{{~#if column.columnOptions}}, {
|
||||
{{~#each column.columnOptions}} {{this.option}}: {{{this.value}}}
|
||||
{{~#unless @last}}, {{/unless}}
|
||||
{{~#unless @last}},{{/unless}}
|
||||
{{~/each}} }
|
||||
{{~/if}})
|
||||
{{column.name}}!: {{column.tsType}};
|
||||
|
@ -10,6 +10,7 @@ import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients } from '@vulcanize/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
import * as codec from '@ipld/dag-cbor';
|
||||
|
||||
import { Database } from '../database';
|
||||
@ -42,7 +43,15 @@ const main = async (): Promise<void> => {
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider);
|
||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, config.server.subgraphPath);
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const exportData: any = {
|
||||
snapshotBlock: {},
|
||||
|
@ -2,6 +2,7 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import path from 'path';
|
||||
import assert from 'assert';
|
||||
import 'reflect-metadata';
|
||||
import yargs from 'yargs';
|
||||
@ -10,6 +11,7 @@ import debug from 'debug';
|
||||
import { PubSub } from 'apollo-server-express';
|
||||
|
||||
import { Config, getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, initClients } from '@vulcanize/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
|
||||
import { Database } from './database';
|
||||
import { Indexer } from './indexer';
|
||||
@ -46,10 +48,18 @@ export const main = async (): Promise<any> => {
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, config.server.subgraphPath);
|
||||
|
||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||
const pubsub = new PubSub();
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider);
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
|
@ -12,6 +12,7 @@ import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, Config, initClients } from '@vulcanize/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
import * as codec from '@ipld/dag-cbor';
|
||||
|
||||
import { Database } from '../database';
|
||||
@ -46,10 +47,18 @@ export const main = async (): Promise<any> => {
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, config.server.subgraphPath);
|
||||
|
||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||
const pubsub = new PubSub();
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider);
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
|
@ -17,6 +17,7 @@ import * as codec from '@ipld/dag-cbor';
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
import { StorageLayout } from '@vulcanize/solidity-mapper';
|
||||
import { Indexer as BaseIndexer, IndexerInterface, ValueResult, UNKNOWN_EVENT_NAME, ServerConfig, Where, QueryOptions, updateStateForElementaryType, updateStateForMappingType } from '@vulcanize/util';
|
||||
import { GraphWatcher } from '@vulcanize/graph-node';
|
||||
|
||||
import { Database } from './database';
|
||||
import { Contract } from './entity/Contract';
|
||||
@ -53,6 +54,7 @@ export type ResultEvent = {
|
||||
contract: string;
|
||||
|
||||
eventIndex: number;
|
||||
eventSignature: string;
|
||||
event: any;
|
||||
|
||||
proof: string;
|
||||
@ -79,6 +81,7 @@ export class Indexer implements IndexerInterface {
|
||||
_postgraphileClient: EthClient
|
||||
_baseIndexer: BaseIndexer
|
||||
_serverConfig: ServerConfig
|
||||
_graphWatcher: GraphWatcher;
|
||||
|
||||
_abi: JsonFragment[]
|
||||
_storageLayout: StorageLayout
|
||||
@ -86,7 +89,7 @@ export class Indexer implements IndexerInterface {
|
||||
|
||||
_ipfsClient: IPFSClient
|
||||
|
||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, postgraphileClient: EthClient, ethProvider: BaseProvider) {
|
||||
constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, postgraphileClient: EthClient, ethProvider: BaseProvider, graphWatcher: GraphWatcher) {
|
||||
assert(db);
|
||||
assert(ethClient);
|
||||
assert(postgraphileClient);
|
||||
@ -97,6 +100,7 @@ export class Indexer implements IndexerInterface {
|
||||
this._ethProvider = ethProvider;
|
||||
this._serverConfig = serverConfig;
|
||||
this._baseIndexer = new BaseIndexer(this._db, this._ethClient, this._postgraphileClient, this._ethProvider);
|
||||
this._graphWatcher = graphWatcher;
|
||||
|
||||
const { abi, storageLayout } = artifacts;
|
||||
|
||||
@ -114,7 +118,7 @@ export class Indexer implements IndexerInterface {
|
||||
getResultEvent (event: Event): ResultEvent {
|
||||
const block = event.block;
|
||||
const eventFields = JSONbig.parse(event.eventInfo);
|
||||
const { tx } = JSON.parse(event.extraInfo);
|
||||
const { tx, eventSignature } = JSON.parse(event.extraInfo);
|
||||
|
||||
return {
|
||||
block: {
|
||||
@ -135,6 +139,7 @@ export class Indexer implements IndexerInterface {
|
||||
contract: event.contract,
|
||||
|
||||
eventIndex: event.index,
|
||||
eventSignature,
|
||||
event: {
|
||||
__typename: `${event.eventName}Event`,
|
||||
...eventFields
|
||||
@ -552,9 +557,16 @@ export class Indexer implements IndexerInterface {
|
||||
return (ipfsAddr !== undefined && ipfsAddr !== null && ipfsAddr !== '');
|
||||
}
|
||||
|
||||
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, blockHash: string): Promise<Entity | undefined> {
|
||||
return this._graphWatcher.getEntity(entity, id, blockHash);
|
||||
}
|
||||
|
||||
async triggerIndexingOnEvent (event: Event): Promise<void> {
|
||||
const resultEvent = this.getResultEvent(event);
|
||||
|
||||
// Call subgraph handler for event.
|
||||
await this._graphWatcher.handleEvent(resultEvent);
|
||||
|
||||
// Call custom hook function for indexing on event.
|
||||
await handleEvent(this, resultEvent);
|
||||
}
|
||||
@ -592,7 +604,11 @@ export class Indexer implements IndexerInterface {
|
||||
{{/each}}
|
||||
}
|
||||
|
||||
return { eventName, eventInfo };
|
||||
return {
|
||||
eventName,
|
||||
eventInfo,
|
||||
eventSignature: logDescription.signature
|
||||
};
|
||||
}
|
||||
|
||||
async watchContract (address: string, kind: string, checkpoint: boolean, startingBlock?: number): Promise<boolean> {
|
||||
@ -775,7 +791,7 @@ export class Indexer implements IndexerInterface {
|
||||
let eventName = UNKNOWN_EVENT_NAME;
|
||||
let eventInfo = {};
|
||||
const tx = transactionMap[txHash];
|
||||
const extraInfo = { topics, data, tx };
|
||||
const extraInfo: { [key: string]: any } = { topics, data, tx };
|
||||
|
||||
const contract = ethers.utils.getAddress(address);
|
||||
const watchedContract = await this.isWatchedContract(contract);
|
||||
@ -784,6 +800,7 @@ export class Indexer implements IndexerInterface {
|
||||
const eventDetails = this.parseEventNameAndArgs(watchedContract.kind, logObj);
|
||||
eventName = eventDetails.eventName;
|
||||
eventInfo = eventDetails.eventInfo;
|
||||
extraInfo.eventSignature = eventDetails.eventSignature;
|
||||
}
|
||||
|
||||
dbEvents.push({
|
||||
|
@ -2,6 +2,7 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import path from 'path';
|
||||
import assert from 'assert';
|
||||
import yargs from 'yargs';
|
||||
import 'reflect-metadata';
|
||||
@ -9,6 +10,7 @@ import debug from 'debug';
|
||||
import util from 'util';
|
||||
|
||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients } from '@vulcanize/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
|
||||
import { Database } from '../database';
|
||||
import { Indexer } from '../indexer';
|
||||
@ -41,7 +43,15 @@ const main = async (): Promise<void> => {
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider);
|
||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, config.server.subgraphPath);
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const ipldBlock = await indexer.getIPLDBlockByCid(argv.cid);
|
||||
assert(ipldBlock, 'IPLDBlock for the provided CID doesn\'t exist.');
|
||||
|
@ -2,11 +2,13 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import path from 'path';
|
||||
import assert from 'assert';
|
||||
import 'reflect-metadata';
|
||||
import yargs from 'yargs';
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import debug from 'debug';
|
||||
|
||||
import {
|
||||
getConfig,
|
||||
Config,
|
||||
@ -21,6 +23,7 @@ import {
|
||||
DEFAULT_CONFIG_PATH,
|
||||
initClients
|
||||
} from '@vulcanize/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
|
||||
import { Indexer } from './indexer';
|
||||
import { Database } from './database';
|
||||
@ -127,7 +130,15 @@ export const main = async (): Promise<any> => {
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider);
|
||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, config.server.subgraphPath);
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
|
@ -41,11 +41,13 @@
|
||||
{{folderName}}-job-queue=# exit
|
||||
```
|
||||
|
||||
* Update the [config](./environments/local.toml) with database connection settings.
|
||||
* In the [config file](./environments/local.toml):
|
||||
|
||||
* Update the `upstream` config in the [config file](./environments/local.toml) and provide the `ipld-eth-server` GQL API and the `indexer-db` postgraphile endpoints.
|
||||
* Update the database connection settings.
|
||||
|
||||
* Update the `server` config in the [config file](./environments/local.toml) with state checkpoint settings and provide the IPFS API address.
|
||||
* Update the `upstream` config and provide the `ipld-eth-server` GQL API and the `indexer-db` postgraphile endpoints.
|
||||
|
||||
* Update the `server` config with state checkpoint settings and provide the IPFS API address.
|
||||
|
||||
## Customize
|
||||
|
||||
|
@ -2,11 +2,13 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import path from 'path';
|
||||
import debug from 'debug';
|
||||
import { MoreThan } from 'typeorm';
|
||||
import assert from 'assert';
|
||||
|
||||
import { getConfig, initClients, resetJobs } from '@vulcanize/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
|
||||
import { Database } from '../../database';
|
||||
import { Indexer } from '../../indexer';
|
||||
@ -37,7 +39,15 @@ export const handler = async (argv: any): Promise<void> => {
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider);
|
||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, config.server.subgraphPath);
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const syncStatus = await indexer.getSyncStatus();
|
||||
assert(syncStatus, 'Missing syncStatus');
|
||||
|
@ -11,6 +11,10 @@ import { ValueResult } from '@vulcanize/util';
|
||||
import { Indexer } from './indexer';
|
||||
import { EventWatcher } from './events';
|
||||
|
||||
{{#each subgraphQueries as | query |}}
|
||||
import { {{query.entityName}} } from './entity/{{query.entityName}}';
|
||||
{{/each}}
|
||||
|
||||
const log = debug('vulcanize:resolver');
|
||||
|
||||
export const createResolvers = async (indexer: Indexer, eventWatcher: EventWatcher): Promise<any> => {
|
||||
@ -52,6 +56,15 @@ export const createResolvers = async (indexer: Indexer, eventWatcher: EventWatch
|
||||
{{~#each this.params}}, {{this.name~}} {{/each}});
|
||||
},
|
||||
|
||||
{{/each}}
|
||||
|
||||
{{~#each subgraphQueries}}
|
||||
{{this.queryName}}: async (_: any, { id, blockHash }: { id: string, blockHash: string }): Promise<{{this.entityName}} | undefined> => {
|
||||
log('{{this.queryName}}', id, blockHash);
|
||||
|
||||
return indexer.getSubgraphEntity({{this.entityName}}, id, blockHash);
|
||||
},
|
||||
|
||||
{{/each}}
|
||||
events: async (_: any, { blockHash, contractAddress, name }: { blockHash: string, contractAddress: string, name?: string }) => {
|
||||
log('events', blockHash, contractAddress, name);
|
||||
|
@ -15,6 +15,7 @@ import 'graphql-import-node';
|
||||
import { createServer } from 'http';
|
||||
|
||||
import { DEFAULT_CONFIG_PATH, getConfig, Config, JobQueue, KIND_ACTIVE, initClients } from '@vulcanize/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
|
||||
import { createResolvers } from './resolvers';
|
||||
import { Indexer } from './indexer';
|
||||
@ -42,10 +43,18 @@ export const main = async (): Promise<any> => {
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, config.server.subgraphPath);
|
||||
|
||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||
const pubsub = new PubSub();
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider);
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
const jobQueueConfig = config.jobQueue;
|
||||
assert(jobQueueConfig, 'Missing job queue config');
|
||||
|
@ -2,11 +2,13 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import path from 'path';
|
||||
import yargs from 'yargs';
|
||||
import 'reflect-metadata';
|
||||
import debug from 'debug';
|
||||
|
||||
import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients } from '@vulcanize/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
|
||||
import { Database } from '../database';
|
||||
import { Indexer } from '../indexer';
|
||||
@ -55,7 +57,16 @@ const main = async (): Promise<void> => {
|
||||
const db = new Database(config.database);
|
||||
await db.init();
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider);
|
||||
const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, config.server.subgraphPath);
|
||||
|
||||
const indexer = new Indexer(config.server, db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
graphWatcher.setIndexer(indexer);
|
||||
await graphWatcher.init();
|
||||
|
||||
await indexer.watchContract(argv.address, argv.kind, argv.checkpoint, argv.startingBlock);
|
||||
|
||||
await db.close();
|
||||
|
58
packages/codegen/src/utils/subgraph.ts
Normal file
58
packages/codegen/src/utils/subgraph.ts
Normal file
@ -0,0 +1,58 @@
|
||||
import path from 'path';
|
||||
import assert from 'assert';
|
||||
import fs from 'fs';
|
||||
|
||||
import { loadFilesSync } from '@graphql-tools/load-files';
|
||||
|
||||
const SCALAR_MAPPING: any = {
|
||||
BigDecimal: 'String',
|
||||
Bytes: 'String'
|
||||
};
|
||||
|
||||
export function parseSubgraphSchema (subgraphPath: string): any {
|
||||
const subgraphSchemaPath = path.join(path.resolve(subgraphPath), '/schema.graphql');
|
||||
|
||||
assert(fs.existsSync(subgraphSchemaPath));
|
||||
const typesArray = loadFilesSync(subgraphSchemaPath);
|
||||
|
||||
// Get a subgraph-schema DocumentNode with existing types.
|
||||
const subgraphSchemaDocument = typesArray[0];
|
||||
const subgraphTypeDefs = subgraphSchemaDocument.definitions;
|
||||
|
||||
subgraphTypeDefs.forEach((def: any) => {
|
||||
// Remove type directives.
|
||||
def.directives = [];
|
||||
|
||||
if (def.kind === 'ObjectTypeDefinition') {
|
||||
def.fields.forEach((field: any) => {
|
||||
// Remove field directives.
|
||||
field.directives = [];
|
||||
|
||||
// Parse the field type.
|
||||
field.type = parseType(field.type);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
subgraphSchemaDocument.definitions = subgraphTypeDefs;
|
||||
|
||||
// Return a modified subgraph-schema DocumentNode.
|
||||
return subgraphSchemaDocument;
|
||||
}
|
||||
|
||||
function parseType (typeNode: any): any {
|
||||
// Check if 'NamedType' is reached.
|
||||
if (typeNode.kind === 'NamedType') {
|
||||
const typeName: string = typeNode.name.value;
|
||||
|
||||
// TODO Handle extra types provided by the graph.
|
||||
// Replace unknown scalars using SCALAR_MAPPING.
|
||||
if (typeName in SCALAR_MAPPING) {
|
||||
typeNode.name.value = SCALAR_MAPPING[typeName];
|
||||
}
|
||||
} else {
|
||||
typeNode.type = parseType(typeNode.type);
|
||||
}
|
||||
|
||||
return typeNode;
|
||||
}
|
@ -5,6 +5,7 @@
|
||||
const _solToTs: Map<string, string> = new Map();
|
||||
const _tsToGql: Map<string, string> = new Map();
|
||||
const _tsToPg: Map<string, string> = new Map();
|
||||
const _gqlToTs: Map<string, string> = new Map();
|
||||
|
||||
// TODO Get typemapping from ethersjs.
|
||||
// Solidity to Typescript type-mapping.
|
||||
@ -23,10 +24,16 @@ _tsToGql.set('boolean', 'Boolean');
|
||||
|
||||
// Typescript to Postgres type-mapping.
|
||||
_tsToPg.set('string', 'varchar');
|
||||
_tsToPg.set('number', 'numeric');
|
||||
_tsToPg.set('bigint', 'numeric');
|
||||
_tsToPg.set('number', 'integer');
|
||||
_tsToPg.set('bigint', 'bigint');
|
||||
_tsToPg.set('boolean', 'boolean');
|
||||
|
||||
// Graphql to Typescript type-mapping.
|
||||
_gqlToTs.set('String', 'string');
|
||||
_gqlToTs.set('Int', 'number');
|
||||
_gqlToTs.set('BigInt', 'bigint');
|
||||
_gqlToTs.set('Boolean', 'boolean');
|
||||
|
||||
function getTsForSol (solType: string): string | undefined {
|
||||
return _solToTs.get(solType);
|
||||
}
|
||||
@ -39,4 +46,8 @@ function getPgForTs (tsType: string): string | undefined {
|
||||
return _tsToPg.get(tsType);
|
||||
}
|
||||
|
||||
export { getTsForSol, getGqlForTs, getPgForTs };
|
||||
function getTsForGql (gqlType: string): string | undefined {
|
||||
return _gqlToTs.get(gqlType);
|
||||
}
|
||||
|
||||
export { getTsForSol, getGqlForTs, getPgForTs, getTsForGql };
|
||||
|
@ -5,14 +5,15 @@
|
||||
import { Writable } from 'stream';
|
||||
|
||||
import { Database } from './database';
|
||||
import { Param } from './utils/types';
|
||||
import { MODE_ETH_CALL, MODE_STORAGE } from './utils/constants';
|
||||
import { Entity } from './entity';
|
||||
import { Indexer } from './indexer';
|
||||
import { Resolvers } from './resolvers';
|
||||
import { Schema } from './schema';
|
||||
import { Client } from './client';
|
||||
import { Reset } from './reset';
|
||||
import { Param } from './utils/types';
|
||||
import { MODE_ETH_CALL, MODE_STORAGE } from './utils/constants';
|
||||
import { parseSubgraphSchema } from './utils/subgraph';
|
||||
|
||||
export class Visitor {
|
||||
_schema: Schema;
|
||||
@ -106,6 +107,19 @@ export class Visitor {
|
||||
this._indexer.addEvent(name, params);
|
||||
}
|
||||
|
||||
visitSubgraph (subgraphPath?: string): void {
|
||||
if (!subgraphPath) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse subgraph schema to get subgraphSchemaDocument.
|
||||
const subgraphSchemaDocument = parseSubgraphSchema(subgraphPath);
|
||||
|
||||
this._schema.addSubgraphSchema(subgraphSchemaDocument);
|
||||
this._entity.addSubgraphEntities(subgraphSchemaDocument);
|
||||
this._resolvers.addSubgraphResolvers(subgraphSchemaDocument);
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes schema to a stream.
|
||||
* @param outStream A writable output stream to write the schema to.
|
||||
|
@ -40,7 +40,7 @@ export class Database {
|
||||
return this._baseDatabase.close();
|
||||
}
|
||||
|
||||
async getEntity (blockHash: string, entity: string, id: string): Promise<any> {
|
||||
async getEntity<Entity> (entity: (new () => Entity) | string, id: string, blockHash: string): Promise<Entity | undefined> {
|
||||
const queryRunner = this._conn.createQueryRunner();
|
||||
const repo = queryRunner.manager.getRepository(entity);
|
||||
const whereOptions: { [key: string]: any } = { id };
|
||||
|
@ -13,6 +13,8 @@ import {
|
||||
ContractInterface
|
||||
} from 'ethers';
|
||||
|
||||
import { IndexerInterface } from '@vulcanize/util';
|
||||
|
||||
import { TypeId } from './types';
|
||||
import { Block, fromEthereumValue, toEthereumValue } from './utils';
|
||||
import { Database } from './database';
|
||||
@ -36,7 +38,7 @@ export interface Context {
|
||||
}
|
||||
}
|
||||
|
||||
export const instantiate = async (database: Database, context: Context, filePath: string, data: GraphData = {}): Promise<loader.ResultObject & { exports: any }> => {
|
||||
export const instantiate = async (database: Database, indexer: IndexerInterface, context: Context, filePath: string, data: GraphData = {}): Promise<loader.ResultObject & { exports: any }> => {
|
||||
const { abis = {}, dataSource } = data;
|
||||
const buffer = await fs.readFile(filePath);
|
||||
const provider = getDefaultProvider(NETWORK_URL);
|
||||
@ -48,7 +50,7 @@ export const instantiate = async (database: Database, context: Context, filePath
|
||||
const entityId = __getString(id);
|
||||
|
||||
assert(context.event.block);
|
||||
const entityData = await database.getEntity(context.event.block.blockHash, entityName, entityId);
|
||||
const entityData = await database.getEntity(entityName, entityId, context.event.block.blockHash);
|
||||
|
||||
if (!entityData) {
|
||||
return null;
|
||||
@ -64,6 +66,19 @@ export const instantiate = async (database: Database, context: Context, filePath
|
||||
assert(context.event.block);
|
||||
const dbData = await database.fromGraphEntity(exports, context.event.block, entityName, entityInstance);
|
||||
await database.saveEntity(entityName, dbData);
|
||||
|
||||
// Remove blockNumber and blockHash from dbData for auto-diff.
|
||||
delete dbData.blockNumber;
|
||||
delete dbData.blockHash;
|
||||
|
||||
// Prepare the diff data.
|
||||
const diffData: any = { state: {} };
|
||||
diffData.state[entityName] = dbData;
|
||||
|
||||
// Create an auto-diff.
|
||||
assert(indexer.createDiffStaged);
|
||||
assert(dataSource?.address);
|
||||
await indexer.createDiffStaged(dataSource.address, context.event.block.blockHash, diffData);
|
||||
},
|
||||
|
||||
'typeConversion.stringToH160': () => {
|
||||
|
@ -2,6 +2,7 @@
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import assert from 'assert';
|
||||
import 'reflect-metadata';
|
||||
import debug from 'debug';
|
||||
import path from 'path';
|
||||
@ -10,6 +11,7 @@ import { ContractInterface, utils } from 'ethers';
|
||||
|
||||
import { ResultObject } from '@vulcanize/assemblyscript/lib/loader';
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
import { IndexerInterface } from '@vulcanize/util';
|
||||
|
||||
import { createEvent, getSubgraphConfig } from './utils';
|
||||
import { Context, instantiate } from './loader';
|
||||
@ -24,6 +26,7 @@ interface DataSource {
|
||||
|
||||
export class GraphWatcher {
|
||||
_database: Database;
|
||||
_indexer?: IndexerInterface;
|
||||
_postgraphileClient: EthClient;
|
||||
_subgraphPath: string;
|
||||
_dataSources: any[] = [];
|
||||
@ -67,8 +70,10 @@ export class GraphWatcher {
|
||||
|
||||
const filePath = path.join(this._subgraphPath, file);
|
||||
|
||||
assert(this._indexer);
|
||||
|
||||
return {
|
||||
instance: await instantiate(this._database, this._context, filePath, data),
|
||||
instance: await instantiate(this._database, this._indexer, this._context, filePath, data),
|
||||
contractInterface
|
||||
};
|
||||
}, {});
|
||||
@ -144,7 +149,11 @@ export class GraphWatcher {
|
||||
await exports[eventHandler.handler](ethereumEvent);
|
||||
}
|
||||
|
||||
async getEntity (blockHash: string, entity: string, id: string): Promise<any> {
|
||||
return this._database.getEntity(blockHash, entity, id);
|
||||
setIndexer (indexer: IndexerInterface): void {
|
||||
this._indexer = indexer;
|
||||
}
|
||||
|
||||
async getEntity<Entity> (entity: new () => Entity, id: string, blockHash: string): Promise<Entity | undefined> {
|
||||
return this._database.getEntity(entity, id, blockHash);
|
||||
}
|
||||
}
|
||||
|
@ -72,6 +72,7 @@ export interface IndexerInterface {
|
||||
parseEventNameAndArgs?: (kind: string, logObj: any) => any;
|
||||
isWatchedContract?: (address: string) => Promise<ContractInterface | undefined>;
|
||||
cacheContract?: (contract: ContractInterface) => void;
|
||||
createDiffStaged?: (contractAddress: string, blockHash: string, data: any) => Promise<void>
|
||||
}
|
||||
|
||||
export interface EventWatcherInterface {
|
||||
|
42
yarn.lock
42
yarn.lock
@ -1102,6 +1102,15 @@
|
||||
dependencies:
|
||||
assemblyscript "0.19.10"
|
||||
|
||||
"@graphql-tools/load-files@^6.5.2":
|
||||
version "6.5.2"
|
||||
resolved "https://registry.yarnpkg.com/@graphql-tools/load-files/-/load-files-6.5.2.tgz#9c7890b62d7c2958dc128b1d0c1dd84bb366b71f"
|
||||
integrity sha512-ZU/v0HA7L3jCgizK5r3JHTg4ZQg+b+t3lSakU1cYT78kHT98milhlU+YF2giS7XP9KcS6jGTAalQbbX2yQA1sg==
|
||||
dependencies:
|
||||
globby "11.0.4"
|
||||
tslib "~2.3.0"
|
||||
unixify "1.0.0"
|
||||
|
||||
"@graphql-typed-document-node/core@^3.0.0":
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@graphql-typed-document-node/core/-/core-3.1.0.tgz#0eee6373e11418bfe0b5638f654df7a4ca6a3950"
|
||||
@ -7585,6 +7594,18 @@ globals@^9.18.0:
|
||||
resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a"
|
||||
integrity sha512-S0nG3CLEQiY/ILxqtztTWH/3iRRdyBLw6KMDxnKMchrtbj2OFmehVh0WUCfW3DUrIgx/qFrJPICrq4Z4sTR9UQ==
|
||||
|
||||
globby@11.0.4:
|
||||
version "11.0.4"
|
||||
resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.4.tgz#2cbaff77c2f2a62e71e9b2813a67b97a3a3001a5"
|
||||
integrity sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg==
|
||||
dependencies:
|
||||
array-union "^2.1.0"
|
||||
dir-glob "^3.0.1"
|
||||
fast-glob "^3.1.1"
|
||||
ignore "^5.1.4"
|
||||
merge2 "^1.3.0"
|
||||
slash "^3.0.0"
|
||||
|
||||
globby@^11.0.1, globby@^11.0.2:
|
||||
version "11.0.3"
|
||||
resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb"
|
||||
@ -10449,6 +10470,13 @@ normalize-package-data@^3.0.0, normalize-package-data@^3.0.2:
|
||||
semver "^7.3.4"
|
||||
validate-npm-package-license "^3.0.1"
|
||||
|
||||
normalize-path@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9"
|
||||
integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=
|
||||
dependencies:
|
||||
remove-trailing-separator "^1.0.1"
|
||||
|
||||
normalize-path@^3.0.0, normalize-path@~3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
|
||||
@ -12071,6 +12099,11 @@ regjsparser@^0.1.4:
|
||||
dependencies:
|
||||
jsesc "~0.5.0"
|
||||
|
||||
remove-trailing-separator@^1.0.1:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
|
||||
integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8=
|
||||
|
||||
repeat-element@^1.1.2:
|
||||
version "1.1.4"
|
||||
resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.4.tgz#be681520847ab58c7568ac75fbfad28ed42d39e9"
|
||||
@ -13525,7 +13558,7 @@ tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3:
|
||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
|
||||
integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==
|
||||
|
||||
tslib@^2.1.0:
|
||||
tslib@^2.1.0, tslib@~2.3.0:
|
||||
version "2.3.1"
|
||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01"
|
||||
integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==
|
||||
@ -13802,6 +13835,13 @@ universalify@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717"
|
||||
integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==
|
||||
|
||||
unixify@1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/unixify/-/unixify-1.0.0.tgz#3a641c8c2ffbce4da683a5c70f03a462940c2090"
|
||||
integrity sha1-OmQcjC/7zk2mg6XHDwOkYpQMIJA=
|
||||
dependencies:
|
||||
normalize-path "^2.1.1"
|
||||
|
||||
unorm@^1.3.3:
|
||||
version "1.6.0"
|
||||
resolved "https://registry.yarnpkg.com/unorm/-/unorm-1.6.0.tgz#029b289661fba714f1a9af439eb51d9b16c205af"
|
||||
|
Loading…
Reference in New Issue
Block a user