mirror of
https://github.com/cerc-io/watcher-ts
synced 2024-11-19 20:36:19 +00:00
Implement subgraph store host API (#35)
* Implement store get api without blockHash and blockNumber * Pass database instance to GraphWatcher * Implement store set without block data * Store blockHash and blockNumber in database entity table * Implement getting entity in subgraph from store.get * Add block data present in postgraphile * Pass db and context to instantiate method in tests * GQL API in graph-test-watcher to test store.set * Remove contract address from subgraph file * Fix block in dummy event data * Pass just blockHash to get an entity from the database * Review changes and add TODOs Co-authored-by: prathamesh <prathamesh.musale0@gmail.com>
This commit is contained in:
parent
43d64f9e4b
commit
83775608ec
@ -5,9 +5,13 @@
|
||||
"license": "AGPL-3.0",
|
||||
"devDependencies": {
|
||||
"@graphprotocol/graph-ts": "^0.22.0",
|
||||
"@types/chai": "^4.2.18",
|
||||
"@types/chai-spies": "^1.0.3",
|
||||
"@types/js-yaml": "^4.0.4",
|
||||
"@typescript-eslint/eslint-plugin": "^4.25.0",
|
||||
"@typescript-eslint/parser": "^4.25.0",
|
||||
"chai": "^4.3.4",
|
||||
"chai-spies": "^1.0.0",
|
||||
"eslint": "^7.27.0",
|
||||
"eslint-config-semistandard": "^15.0.1",
|
||||
"eslint-config-standard": "^16.0.3",
|
||||
@ -32,6 +36,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@vulcanize/assemblyscript": "0.0.1",
|
||||
"js-yaml": "^4.1.0"
|
||||
"js-yaml": "^4.1.0",
|
||||
"typeorm": "^0.2.32"
|
||||
}
|
||||
}
|
||||
|
@ -3,17 +3,54 @@
|
||||
//
|
||||
|
||||
import path from 'path';
|
||||
import chai, { assert, expect } from 'chai';
|
||||
import spies from 'chai-spies';
|
||||
|
||||
import { getDummyEventData } from '../test/utils';
|
||||
import { getDummyEventData, getTestDatabase } from '../test/utils';
|
||||
import { instantiate } from './loader';
|
||||
import { createEvent } from './utils';
|
||||
import { createEvent, Block } from './utils';
|
||||
import { Database } from './database';
|
||||
|
||||
chai.use(spies);
|
||||
|
||||
const sandbox = chai.spy.sandbox();
|
||||
|
||||
describe('call handler in mapping code', () => {
|
||||
let exports: any;
|
||||
let db: Database;
|
||||
|
||||
const eventData = getDummyEventData();
|
||||
|
||||
before(async () => {
|
||||
db = getTestDatabase();
|
||||
|
||||
sandbox.on(db, 'getEntity', (blockHash: string, entityString: string, idString: string) => {
|
||||
assert(blockHash);
|
||||
assert(entityString);
|
||||
assert(idString);
|
||||
});
|
||||
|
||||
sandbox.on(db, 'fromGraphEntity', async (instanceExports: any, block: Block, entity: string, entityInstance: any) => {
|
||||
const entityFields = [
|
||||
{ type: 'varchar', propertyName: 'blockHash' },
|
||||
{ type: 'integer', propertyName: 'blockNumber' },
|
||||
{ type: 'bigint', propertyName: 'count' },
|
||||
{ type: 'varchar', propertyName: 'param1' },
|
||||
{ type: 'integer', propertyName: 'param2' }
|
||||
];
|
||||
|
||||
return db.getEntityValues(instanceExports, block, entityInstance, entityFields);
|
||||
});
|
||||
|
||||
sandbox.on(db, 'saveEntity', (entity: string, data: any) => {
|
||||
assert(entity);
|
||||
assert(data);
|
||||
});
|
||||
});
|
||||
|
||||
it('should load the subgraph example wasm', async () => {
|
||||
const filePath = path.resolve(__dirname, '../test/subgraph/example1/build/Example1/Example1.wasm');
|
||||
const instance = await instantiate(filePath);
|
||||
const instance = await instantiate(db, { event: { block: eventData.block } }, filePath);
|
||||
exports = instance.exports;
|
||||
});
|
||||
|
||||
@ -27,8 +64,6 @@ describe('call handler in mapping code', () => {
|
||||
// TODO: Check api version https://github.com/graphprotocol/graph-node/blob/6098daa8955bdfac597cec87080af5449807e874/runtime/wasm/src/module/mod.rs#L533
|
||||
_start();
|
||||
|
||||
const eventData = getDummyEventData();
|
||||
|
||||
// Create event params data.
|
||||
eventData.eventParams = [
|
||||
{
|
||||
@ -50,5 +85,13 @@ describe('call handler in mapping code', () => {
|
||||
const test = await createEvent(exports, contractAddress, eventData);
|
||||
|
||||
await handleTest(test);
|
||||
|
||||
expect(db.getEntity).to.have.been.called();
|
||||
expect(db.fromGraphEntity).to.have.been.called();
|
||||
expect(db.saveEntity).to.have.been.called();
|
||||
});
|
||||
|
||||
after(() => {
|
||||
sandbox.restore();
|
||||
});
|
||||
});
|
||||
|
136
packages/graph-node/src/database.ts
Normal file
136
packages/graph-node/src/database.ts
Normal file
@ -0,0 +1,136 @@
|
||||
//
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import assert from 'assert';
|
||||
import {
|
||||
Connection,
|
||||
ConnectionOptions,
|
||||
FindOneOptions
|
||||
} from 'typeorm';
|
||||
|
||||
import {
|
||||
Database as BaseDatabase
|
||||
} from '@vulcanize/util';
|
||||
|
||||
import { Block, fromEntityValue, toEntityValue } from './utils';
|
||||
|
||||
export class Database {
|
||||
_config: ConnectionOptions
|
||||
_conn!: Connection
|
||||
_baseDatabase: BaseDatabase
|
||||
|
||||
constructor (config: ConnectionOptions, entitiesPath: string) {
|
||||
assert(config);
|
||||
|
||||
this._config = {
|
||||
name: 'subgraph',
|
||||
...config,
|
||||
entities: [entitiesPath]
|
||||
};
|
||||
|
||||
this._baseDatabase = new BaseDatabase(this._config);
|
||||
}
|
||||
|
||||
async init (): Promise<void> {
|
||||
this._conn = await this._baseDatabase.init();
|
||||
}
|
||||
|
||||
async close (): Promise<void> {
|
||||
return this._baseDatabase.close();
|
||||
}
|
||||
|
||||
async getEntity (blockHash: string, entity: string, id: string): Promise<any> {
|
||||
const queryRunner = this._conn.createQueryRunner();
|
||||
const repo = queryRunner.manager.getRepository(entity);
|
||||
const whereOptions: { [key: string]: any } = { id };
|
||||
|
||||
if (blockHash) {
|
||||
whereOptions.blockHash = blockHash;
|
||||
}
|
||||
|
||||
const findOptions = {
|
||||
where: whereOptions,
|
||||
order: {
|
||||
blockNumber: 'DESC'
|
||||
}
|
||||
};
|
||||
|
||||
let entityData = await repo.findOne(findOptions as FindOneOptions<any>);
|
||||
|
||||
if (!entityData && findOptions.where.blockHash) {
|
||||
entityData = await this._baseDatabase.getPrevEntityVersion(queryRunner, repo, findOptions);
|
||||
}
|
||||
|
||||
return entityData;
|
||||
}
|
||||
|
||||
async saveEntity (entity: string, data: any): Promise<void> {
|
||||
const repo = this._conn.getRepository(entity);
|
||||
|
||||
const dbEntity: any = repo.create(data);
|
||||
await repo.save(dbEntity);
|
||||
}
|
||||
|
||||
async toGraphEntity (instanceExports: any, entity: string, data: any): Promise<any> {
|
||||
// TODO: Cache schema/columns.
|
||||
const repo = this._conn.getRepository(entity);
|
||||
const entityFields = repo.metadata.columns;
|
||||
|
||||
const { Entity } = instanceExports;
|
||||
const entityInstance = await Entity.__new();
|
||||
|
||||
const entityValuePromises = entityFields.filter(field => {
|
||||
const { propertyName } = field;
|
||||
|
||||
// TODO: Will clash if entity has blockHash and blockNumber fields.
|
||||
if (propertyName === 'blockHash' || propertyName === 'blockNumber') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}).map(async (field) => {
|
||||
const { type, propertyName } = field;
|
||||
|
||||
return toEntityValue(instanceExports, entityInstance, data, type.toString(), propertyName);
|
||||
}, {});
|
||||
|
||||
await Promise.all(entityValuePromises);
|
||||
|
||||
return entityInstance;
|
||||
}
|
||||
|
||||
async fromGraphEntity (instanceExports: any, block: Block, entity: string, entityInstance: any): Promise<{ [key: string]: any } > {
|
||||
// TODO: Cache schema/columns.
|
||||
const repo = this._conn.getRepository(entity);
|
||||
const entityFields = repo.metadata.columns;
|
||||
|
||||
return this.getEntityValues(instanceExports, block, entityInstance, entityFields);
|
||||
}
|
||||
|
||||
async getEntityValues (instanceExports: any, block: Block, entityInstance: any, entityFields: any): Promise<{ [key: string]: any } > {
|
||||
const entityValuePromises = entityFields.map(async (field: any) => {
|
||||
const { type, propertyName } = field;
|
||||
|
||||
// TODO: Will clash if entity has blockHash and blockNumber fields.
|
||||
if (propertyName === 'blockHash') {
|
||||
return block.blockHash;
|
||||
}
|
||||
|
||||
if (propertyName === 'blockNumber') {
|
||||
return block.blockNumber;
|
||||
}
|
||||
|
||||
return fromEntityValue(instanceExports, entityInstance, type.toString(), propertyName);
|
||||
}, {});
|
||||
|
||||
const entityValues = await Promise.all(entityValuePromises);
|
||||
|
||||
return entityFields.reduce((acc: { [key: string]: any }, field: any, index: number) => {
|
||||
const { propertyName } = field;
|
||||
acc[propertyName] = entityValues[index];
|
||||
|
||||
return acc;
|
||||
}, {});
|
||||
}
|
||||
}
|
@ -5,19 +5,48 @@
|
||||
import assert from 'assert';
|
||||
import { ethers } from 'ethers';
|
||||
import path from 'path';
|
||||
import chai from 'chai';
|
||||
import spies from 'chai-spies';
|
||||
|
||||
import { instantiate } from './loader';
|
||||
import { createEvent } from './utils';
|
||||
import { createEvent, Block } from './utils';
|
||||
import edenNetworkAbi from '../test/subgraph/eden/EdenNetwork/abis/EdenNetwork.json';
|
||||
import merkleDistributorAbi from '../test/subgraph/eden/EdenNetworkDistribution/abis/MerkleDistributor.json';
|
||||
import distributorGovernanceAbi from '../test/subgraph/eden/EdenNetworkGovernance/abis/DistributorGovernance.json';
|
||||
import { getDummyEventData } from '../test/utils';
|
||||
import { getDummyEventData, getTestDatabase } from '../test/utils';
|
||||
import { Database } from './database';
|
||||
|
||||
const ZERO_ADDRESS = '0x0000000000000000000000000000000000000000';
|
||||
|
||||
describe('eden wasm loader tests', () => {
|
||||
chai.use(spies);
|
||||
|
||||
const sandbox = chai.spy.sandbox();
|
||||
|
||||
describe('eden wasm loader tests', async () => {
|
||||
let db: Database;
|
||||
const eventData = getDummyEventData();
|
||||
|
||||
before(async () => {
|
||||
db = getTestDatabase();
|
||||
|
||||
sandbox.on(db, 'getEntity', (blockHash: string, entityString: string, idString: string) => {
|
||||
assert(blockHash);
|
||||
assert(entityString);
|
||||
assert(idString);
|
||||
});
|
||||
|
||||
sandbox.on(db, 'fromGraphEntity', async (instanceExports: any, block: Block, entity: string, entityInstance: any) => {
|
||||
const entityFields: any = [];
|
||||
|
||||
return db.getEntityValues(instanceExports, block, entityInstance, entityFields);
|
||||
});
|
||||
|
||||
sandbox.on(db, 'saveEntity', (entity: string, data: any) => {
|
||||
assert(entity);
|
||||
assert(data);
|
||||
});
|
||||
});
|
||||
|
||||
describe('EdenNetwork wasm', () => {
|
||||
let exports: any;
|
||||
|
||||
@ -36,7 +65,7 @@ describe('eden wasm loader tests', () => {
|
||||
|
||||
it('should load the subgraph network wasm', async () => {
|
||||
const filePath = path.resolve(__dirname, '../test/subgraph/eden/EdenNetwork/EdenNetwork.wasm');
|
||||
({ exports } = await instantiate(filePath, data));
|
||||
({ exports } = await instantiate(db, { event: { block: eventData.block } }, filePath, data));
|
||||
const { _start } = exports;
|
||||
_start();
|
||||
});
|
||||
@ -195,7 +224,7 @@ describe('eden wasm loader tests', () => {
|
||||
|
||||
it('should load the subgraph network distribution wasm', async () => {
|
||||
const filePath = path.resolve(__dirname, '../test/subgraph/eden/EdenNetworkDistribution/EdenNetworkDistribution.wasm');
|
||||
({ exports } = await instantiate(filePath, data));
|
||||
({ exports } = await instantiate(db, { event: { block: eventData.block } }, filePath, data));
|
||||
const { _start } = exports;
|
||||
_start();
|
||||
});
|
||||
@ -339,7 +368,7 @@ describe('eden wasm loader tests', () => {
|
||||
|
||||
it('should load the subgraph network governance wasm', async () => {
|
||||
const filePath = path.resolve(__dirname, '../test/subgraph/eden/EdenNetworkGovernance/EdenNetworkGovernance.wasm');
|
||||
({ exports } = await instantiate(filePath, data));
|
||||
({ exports } = await instantiate(db, { event: { block: eventData.block } }, filePath, data));
|
||||
const { _start } = exports;
|
||||
_start();
|
||||
});
|
||||
@ -427,4 +456,8 @@ describe('eden wasm loader tests', () => {
|
||||
await rewardScheduleChanged(rewardScheduleChangedEvent);
|
||||
});
|
||||
});
|
||||
|
||||
after(() => {
|
||||
sandbox.restore();
|
||||
});
|
||||
});
|
||||
|
@ -7,9 +7,12 @@ import path from 'path';
|
||||
|
||||
import { instantiate } from './loader';
|
||||
import exampleAbi from '../test/subgraph/example1/build/Example1/abis/Example1.json';
|
||||
import { getTestDatabase } from '../test/utils';
|
||||
import { Database } from './database';
|
||||
|
||||
describe('eth-call wasm tests', () => {
|
||||
let exports: any;
|
||||
let db: Database;
|
||||
|
||||
const contractAddress = process.env.EXAMPLE_CONTRACT_ADDRESS;
|
||||
assert(contractAddress);
|
||||
@ -23,9 +26,13 @@ describe('eth-call wasm tests', () => {
|
||||
}
|
||||
};
|
||||
|
||||
before(async () => {
|
||||
db = getTestDatabase();
|
||||
});
|
||||
|
||||
it('should load the subgraph example wasm', async () => {
|
||||
const filePath = path.resolve(__dirname, '../test/subgraph/example1/build/Example1/Example1.wasm');
|
||||
const instance = await instantiate(filePath, data);
|
||||
const instance = await instantiate(db, { event: {} }, filePath, data);
|
||||
exports = instance.exports;
|
||||
});
|
||||
|
||||
|
@ -1 +1,2 @@
|
||||
export * from './watcher';
|
||||
export * from './database';
|
||||
|
@ -6,15 +6,21 @@ import path from 'path';
|
||||
import { expect } from 'chai';
|
||||
|
||||
import { instantiate } from './loader';
|
||||
import { getTestDatabase } from '../test/utils';
|
||||
import { Database } from './database';
|
||||
|
||||
const WASM_FILE_PATH = '../build/debug.wasm';
|
||||
|
||||
describe('wasm loader tests', () => {
|
||||
let exports: any;
|
||||
let db: Database;
|
||||
|
||||
before(async () => {
|
||||
db = getTestDatabase();
|
||||
|
||||
const filePath = path.resolve(__dirname, WASM_FILE_PATH);
|
||||
const instance = await instantiate(filePath);
|
||||
const instance = await instantiate(db, { event: {} }, filePath);
|
||||
|
||||
exports = instance.exports;
|
||||
});
|
||||
|
||||
|
@ -14,7 +14,8 @@ import {
|
||||
} from 'ethers';
|
||||
|
||||
import { TypeId } from './types';
|
||||
import { fromEthereumValue, toEthereumValue } from './utils';
|
||||
import { Block, fromEthereumValue, toEthereumValue } from './utils';
|
||||
import { Database } from './database';
|
||||
|
||||
const NETWORK_URL = 'http://127.0.0.1:8081';
|
||||
|
||||
@ -29,7 +30,13 @@ interface GraphData {
|
||||
dataSource?: DataSource;
|
||||
}
|
||||
|
||||
export const instantiate = async (filePath: string, data: GraphData = {}): Promise<loader.ResultObject & { exports: any }> => {
|
||||
export interface Context {
|
||||
event: {
|
||||
block?: Block
|
||||
}
|
||||
}
|
||||
|
||||
export const instantiate = async (database: Database, context: Context, filePath: string, data: GraphData = {}): Promise<loader.ResultObject & { exports: any }> => {
|
||||
const { abis = {}, dataSource } = data;
|
||||
const buffer = await fs.readFile(filePath);
|
||||
const provider = getDefaultProvider(NETWORK_URL);
|
||||
@ -37,31 +44,26 @@ export const instantiate = async (filePath: string, data: GraphData = {}): Promi
|
||||
const imports: WebAssembly.Imports = {
|
||||
index: {
|
||||
'store.get': async (entity: number, id: number) => {
|
||||
console.log('store.get');
|
||||
const entityName = __getString(entity);
|
||||
const entityId = __getString(id);
|
||||
|
||||
const entityString = __getString(entity);
|
||||
console.log('entity:', entityString);
|
||||
const idString = __getString(id);
|
||||
console.log('id:', idString);
|
||||
assert(context.event.block);
|
||||
const entityData = await database.getEntity(context.event.block.blockHash, entityName, entityId);
|
||||
|
||||
// TODO: Implement store get to fetch from DB using entity and id.
|
||||
|
||||
// TODO: Fill entity with field values.
|
||||
// return Entity.__new()
|
||||
if (!entityData) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return database.toGraphEntity(exports, entityName, entityData);
|
||||
},
|
||||
'store.set': async (entity: number, id: number, data: number) => {
|
||||
console.log('store.set');
|
||||
const entityName = __getString(entity);
|
||||
|
||||
const entityString = __getString(entity);
|
||||
console.log('entity:', entityString);
|
||||
const idString = __getString(id);
|
||||
console.log('id:', idString);
|
||||
const entityInstance = await Entity.wrap(data);
|
||||
const entityInstanceId = __getString(await entityInstance.getString(await __newString('id')));
|
||||
console.log('entity instance id:', entityInstanceId);
|
||||
|
||||
// TODO: Implement store set to save entity in db with values from entityInstance.
|
||||
assert(context.event.block);
|
||||
const dbData = await database.fromGraphEntity(exports, context.event.block, entityName, entityInstance);
|
||||
await database.saveEntity(entityName, dbData);
|
||||
},
|
||||
|
||||
'typeConversion.stringToH160': () => {
|
||||
|
@ -6,15 +6,20 @@ import path from 'path';
|
||||
import { expect } from 'chai';
|
||||
|
||||
import { instantiate } from './loader';
|
||||
import { getTestDatabase } from '../test/utils';
|
||||
import { Database } from './database';
|
||||
|
||||
const EXAMPLE_WASM_FILE_PATH = '../test/subgraph/example1/build/Example1/Example1.wasm';
|
||||
|
||||
describe('numbers wasm tests', () => {
|
||||
let exports: any;
|
||||
let db: Database;
|
||||
|
||||
before(async () => {
|
||||
db = getTestDatabase();
|
||||
|
||||
const filePath = path.resolve(__dirname, EXAMPLE_WASM_FILE_PATH);
|
||||
const instance = await instantiate(filePath);
|
||||
const instance = await instantiate(db, { event: {} }, filePath);
|
||||
exports = instance.exports;
|
||||
const { _start } = exports;
|
||||
|
||||
|
@ -6,15 +6,20 @@ import path from 'path';
|
||||
import { expect } from 'chai';
|
||||
|
||||
import { instantiate } from './loader';
|
||||
import { getTestDatabase } from '../test/utils';
|
||||
import { Database } from './database';
|
||||
|
||||
const EXAMPLE_WASM_FILE_PATH = '../test/subgraph/example1/build/Example1/Example1.wasm';
|
||||
|
||||
describe('typeConversion wasm tests', () => {
|
||||
let exports: any;
|
||||
let db: Database;
|
||||
|
||||
before(async () => {
|
||||
db = getTestDatabase();
|
||||
|
||||
const filePath = path.resolve(__dirname, EXAMPLE_WASM_FILE_PATH);
|
||||
const instance = await instantiate(filePath);
|
||||
const instance = await instantiate(db, { event: {} }, filePath);
|
||||
exports = instance.exports;
|
||||
const { _start } = exports;
|
||||
|
||||
|
@ -14,13 +14,6 @@ interface EventParam {
|
||||
kind: string;
|
||||
}
|
||||
|
||||
interface Block {
|
||||
hash: string;
|
||||
number: number;
|
||||
timestamp: number;
|
||||
parentHash: string;
|
||||
}
|
||||
|
||||
interface Transaction {
|
||||
hash: string;
|
||||
index: number;
|
||||
@ -28,6 +21,17 @@ interface Transaction {
|
||||
to: string;
|
||||
}
|
||||
|
||||
export interface Block {
|
||||
blockHash: string;
|
||||
blockNumber: string;
|
||||
timestamp: string;
|
||||
parentHash: string;
|
||||
stateRoot: string;
|
||||
td: string;
|
||||
txRoot: string;
|
||||
receiptRoot: string;
|
||||
}
|
||||
|
||||
export interface EventData {
|
||||
block: Block;
|
||||
tx: Transaction;
|
||||
@ -161,41 +165,48 @@ export const createEvent = async (instanceExports: any, contractAddress: string,
|
||||
} = instanceExports;
|
||||
|
||||
// Fill block data.
|
||||
const blockHashByteArray = await ByteArray.fromHexString(await __newString(blockData.hash));
|
||||
const blockHashByteArray = await ByteArray.fromHexString(await __newString(blockData.blockHash));
|
||||
const blockHash = await Bytes.fromByteArray(blockHashByteArray);
|
||||
|
||||
const parentHashByteArray = await ByteArray.fromHexString(await __newString(blockData.parentHash));
|
||||
const parentHash = await Bytes.fromByteArray(parentHashByteArray);
|
||||
|
||||
const blockNumber = await BigInt.fromI32(blockData.number);
|
||||
const blockNumber = await BigInt.fromString(await __newString(blockData.blockNumber));
|
||||
|
||||
const blockTimestamp = await BigInt.fromI32(blockData.timestamp);
|
||||
const blockTimestamp = await BigInt.fromString(await __newString(blockData.timestamp));
|
||||
|
||||
const stateRootByteArray = await ByteArray.fromHexString(await __newString(blockData.stateRoot));
|
||||
const stateRoot = await Bytes.fromByteArray(stateRootByteArray);
|
||||
|
||||
const transactionsRootByteArray = await ByteArray.fromHexString(await __newString(blockData.txRoot));
|
||||
const transactionsRoot = await Bytes.fromByteArray(transactionsRootByteArray);
|
||||
|
||||
const receiptsRootByteArray = await ByteArray.fromHexString(await __newString(blockData.receiptRoot));
|
||||
const receiptsRoot = await Bytes.fromByteArray(receiptsRootByteArray);
|
||||
|
||||
const totalDifficulty = await BigInt.fromString(await __newString(blockData.td));
|
||||
|
||||
// Missing fields from watcher in block data:
|
||||
// unclesHash
|
||||
// author
|
||||
// stateRoot
|
||||
// transactionsRoot
|
||||
// receiptsRoot
|
||||
// gasUsed
|
||||
// gasLimit
|
||||
// difficulty
|
||||
// totalDifficulty
|
||||
// size
|
||||
const block = await ethereum.Block.__new(
|
||||
blockHash,
|
||||
parentHash,
|
||||
await Bytes.empty(),
|
||||
await Address.zero(),
|
||||
await Bytes.empty(),
|
||||
await Bytes.empty(),
|
||||
await Bytes.empty(),
|
||||
stateRoot,
|
||||
transactionsRoot,
|
||||
receiptsRoot,
|
||||
blockNumber,
|
||||
await BigInt.fromI32(0),
|
||||
await BigInt.fromI32(0),
|
||||
blockTimestamp,
|
||||
await BigInt.fromI32(0),
|
||||
await BigInt.fromI32(0),
|
||||
totalDifficulty,
|
||||
null
|
||||
);
|
||||
|
||||
@ -266,3 +277,55 @@ export const getSubgraphConfig = async (subgraphPath: string): Promise<any> => {
|
||||
|
||||
return config;
|
||||
};
|
||||
|
||||
export const toEntityValue = async (instanceExports: any, entityInstance: any, data: any, type: string, key: string) => {
|
||||
const { __newString, BigInt: ExportBigInt } = instanceExports;
|
||||
const entityKey = await __newString(key);
|
||||
const value = data[key];
|
||||
|
||||
switch (type) {
|
||||
case 'varchar': {
|
||||
const entityValue = await __newString(value);
|
||||
|
||||
return entityInstance.setString(entityKey, entityValue);
|
||||
}
|
||||
|
||||
case 'integer': {
|
||||
return entityInstance.setI32(entityKey, value);
|
||||
}
|
||||
|
||||
case 'bigint': {
|
||||
const bigInt = await ExportBigInt.fromString(await __newString(value.toString()));
|
||||
|
||||
return entityInstance.setBigInt(entityKey, bigInt);
|
||||
}
|
||||
|
||||
// TODO: Support more types.
|
||||
default:
|
||||
throw new Error(`Unsupported type: ${type}`);
|
||||
}
|
||||
};
|
||||
|
||||
export const fromEntityValue = async (instanceExports: any, entityInstance: any, type: string, key: string): Promise<any> => {
|
||||
const { __newString, __getString, BigInt: ExportBigInt } = instanceExports;
|
||||
const entityKey = await __newString(key);
|
||||
|
||||
switch (type) {
|
||||
case 'varchar': {
|
||||
return __getString(await entityInstance.getString(entityKey));
|
||||
}
|
||||
|
||||
case 'integer': {
|
||||
return entityInstance.getI32(entityKey);
|
||||
}
|
||||
|
||||
case 'bigint': {
|
||||
const bigInt = ExportBigInt.wrap(await entityInstance.getBigInt(entityKey));
|
||||
return BigInt(__getString(await bigInt.toString()));
|
||||
}
|
||||
|
||||
// TODO: Support more types.
|
||||
default:
|
||||
throw new Error(`Unsupported type: ${type}`);
|
||||
}
|
||||
};
|
||||
|
@ -9,9 +9,11 @@ import fs from 'fs';
|
||||
import { ContractInterface, utils } from 'ethers';
|
||||
|
||||
import { ResultObject } from '@vulcanize/assemblyscript/lib/loader';
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
|
||||
import { createEvent, getSubgraphConfig } from './utils';
|
||||
import { instantiate } from './loader';
|
||||
import { Context, instantiate } from './loader';
|
||||
import { Database } from './database';
|
||||
|
||||
const log = debug('vulcanize:graph-watcher');
|
||||
|
||||
@ -21,11 +23,19 @@ interface DataSource {
|
||||
}
|
||||
|
||||
export class GraphWatcher {
|
||||
_database: Database;
|
||||
_postgraphileClient: EthClient;
|
||||
_subgraphPath: string;
|
||||
_dataSources: any[] = [];
|
||||
_dataSourceMap: { [key: string]: DataSource } = {};
|
||||
|
||||
constructor (subgraphPath: string) {
|
||||
_context: Context = {
|
||||
event: {}
|
||||
}
|
||||
|
||||
constructor (database: Database, postgraphileClient: EthClient, subgraphPath: string) {
|
||||
this._database = database;
|
||||
this._postgraphileClient = postgraphileClient;
|
||||
this._subgraphPath = subgraphPath;
|
||||
}
|
||||
|
||||
@ -58,7 +68,7 @@ export class GraphWatcher {
|
||||
const filePath = path.join(this._subgraphPath, file);
|
||||
|
||||
return {
|
||||
instance: await instantiate(filePath, data),
|
||||
instance: await instantiate(this._database, this._context, filePath, data),
|
||||
contractInterface
|
||||
};
|
||||
}, {});
|
||||
@ -83,6 +93,16 @@ export class GraphWatcher {
|
||||
async handleEvent (eventData: any) {
|
||||
const { contract, event, eventSignature, block, tx, eventIndex } = eventData;
|
||||
|
||||
const {
|
||||
allEthHeaderCids: {
|
||||
nodes: [
|
||||
blockData
|
||||
]
|
||||
}
|
||||
} = await this._postgraphileClient.getBlocks({ blockHash: block.hash });
|
||||
|
||||
this._context.event.block = blockData;
|
||||
|
||||
// Get dataSource in subgraph yaml based on contract address.
|
||||
const dataSource = this._dataSources.find(dataSource => dataSource.source.address === contract);
|
||||
|
||||
@ -113,7 +133,7 @@ export class GraphWatcher {
|
||||
|
||||
const data = {
|
||||
eventParams: eventParams,
|
||||
block,
|
||||
block: blockData,
|
||||
tx,
|
||||
eventIndex
|
||||
};
|
||||
@ -123,4 +143,8 @@ export class GraphWatcher {
|
||||
|
||||
await exports[eventHandler.handler](ethereumEvent);
|
||||
}
|
||||
|
||||
async getEntity (blockHash: string, entity: string, id: string): Promise<any> {
|
||||
return this._database.getEntity(blockHash, entity, id);
|
||||
}
|
||||
}
|
||||
|
@ -4,36 +4,37 @@ import {
|
||||
Example1,
|
||||
Test
|
||||
} from '../generated/Example1/Example1';
|
||||
// import { ExampleEntity } from '../generated/schema';
|
||||
import { ExampleEntity } from '../generated/schema';
|
||||
|
||||
export function handleTest (event: Test): void {
|
||||
log.debug('event.address: {}', [event.address.toHexString()]);
|
||||
log.debug('event.params.param1: {}', [event.params.param1]);
|
||||
log.debug('event.params.param2: {}', [event.params.param2.toString()]);
|
||||
log.debug('event.block.hash: {}', [event.block.hash.toHexString()]);
|
||||
log.debug('event.block.stateRoot: {}', [event.block.stateRoot.toHexString()]);
|
||||
|
||||
// Entities can be loaded from the store using a string ID; this ID
|
||||
// needs to be unique across all entities of the same type
|
||||
// let entity = ExampleEntity.load(event.transaction.from.toHex());
|
||||
let entity = ExampleEntity.load(event.transaction.from.toHex());
|
||||
|
||||
// Entities only exist after they have been saved to the store;
|
||||
// `null` checks allow to create entities on demand
|
||||
// if (!entity) {
|
||||
// entity = new ExampleEntity(event.transaction.from.toHex());
|
||||
if (!entity) {
|
||||
entity = new ExampleEntity(event.transaction.from.toHex());
|
||||
|
||||
// // Entity fields can be set using simple assignments
|
||||
// entity.count = BigInt.fromI32(0);
|
||||
// }
|
||||
// Entity fields can be set using simple assignments
|
||||
entity.count = BigInt.fromString('0');
|
||||
}
|
||||
|
||||
// BigInt and BigDecimal math are supported
|
||||
// entity.count = entity.count + BigInt.fromI32(1)
|
||||
entity.count = entity.count + BigInt.fromString('1');
|
||||
|
||||
// Entity fields can be set based on event parameters
|
||||
// entity.param1 = event.params.param1;
|
||||
// entity.param2 = event.params.param2;
|
||||
entity.param1 = event.params.param1;
|
||||
entity.param2 = event.params.param2;
|
||||
|
||||
// Entities can be written to the store with `.save()`
|
||||
// entity.save();
|
||||
entity.save();
|
||||
|
||||
// Note: If a handler doesn't require existing field values, it is faster
|
||||
// _not_ to load the entity from the store. Instead, create it fresh with
|
||||
|
@ -6,7 +6,7 @@ dataSources:
|
||||
name: Example1
|
||||
network: mainnet
|
||||
source:
|
||||
address: "0x4Ab7aE18973491Df21d6103dfA55170fdB2CCC98"
|
||||
address: ""
|
||||
abi: Example1
|
||||
mapping:
|
||||
kind: ethereum/events
|
||||
|
@ -3,16 +3,21 @@
|
||||
//
|
||||
|
||||
import { EventData } from '../../src/utils';
|
||||
import { Database } from '../../src/database';
|
||||
|
||||
export const ZERO_ADDRESS = '0x0000000000000000000000000000000000000000';
|
||||
export const ZERO_HASH = '0x0000000000000000000000000000000000000000000000000000000000000000';
|
||||
|
||||
export const getDummyEventData = (): EventData => {
|
||||
const block = {
|
||||
hash: ZERO_HASH,
|
||||
number: 0,
|
||||
timestamp: 0,
|
||||
parentHash: ZERO_HASH
|
||||
blockHash: ZERO_HASH,
|
||||
blockNumber: '0',
|
||||
timestamp: '0',
|
||||
parentHash: ZERO_HASH,
|
||||
stateRoot: ZERO_HASH,
|
||||
td: ZERO_HASH,
|
||||
txRoot: ZERO_HASH,
|
||||
receiptRoot: ZERO_HASH
|
||||
};
|
||||
|
||||
const tx = {
|
||||
@ -29,3 +34,7 @@ export const getDummyEventData = (): EventData => {
|
||||
eventIndex: 0
|
||||
};
|
||||
};
|
||||
|
||||
export const getTestDatabase = (): Database => {
|
||||
return new Database({ type: 'postgres' }, '');
|
||||
};
|
||||
|
27
packages/graph-test-watcher/src/entity/ExampleEntity.ts
Normal file
27
packages/graph-test-watcher/src/entity/ExampleEntity.ts
Normal file
@ -0,0 +1,27 @@
|
||||
//
|
||||
// Copyright 2021 Vulcanize, Inc.
|
||||
//
|
||||
|
||||
import { Entity, PrimaryColumn, Column } from 'typeorm';
|
||||
|
||||
@Entity()
|
||||
export class ExampleEntity {
|
||||
@PrimaryColumn('varchar')
|
||||
id!: string;
|
||||
|
||||
// https://typeorm.io/#/entities/primary-columns
|
||||
@PrimaryColumn('varchar', { length: 66 })
|
||||
blockHash!: string
|
||||
|
||||
@Column('integer')
|
||||
blockNumber!: number;
|
||||
|
||||
@Column('bigint')
|
||||
count!: bigint
|
||||
|
||||
@Column('varchar')
|
||||
param1!: string
|
||||
|
||||
@Column('integer')
|
||||
param2!: number
|
||||
}
|
@ -8,10 +8,12 @@ import yargs from 'yargs';
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import debug from 'debug';
|
||||
import { PubSub } from 'apollo-server-express';
|
||||
import path from 'path';
|
||||
|
||||
import { getCache } from '@vulcanize/cache';
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
import { getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, getCustomProvider } from '@vulcanize/util';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
|
||||
import { Database } from './database';
|
||||
import { Indexer } from './indexer';
|
||||
@ -46,7 +48,7 @@ export const main = async (): Promise<any> => {
|
||||
|
||||
assert(config.server, 'Missing server config');
|
||||
|
||||
const { upstream, database: dbConfig, jobQueue: jobQueueConfig } = config;
|
||||
const { upstream, database: dbConfig, jobQueue: jobQueueConfig, server: { subgraphPath } } = config;
|
||||
|
||||
assert(dbConfig, 'Missing database config');
|
||||
|
||||
@ -72,10 +74,16 @@ export const main = async (): Promise<any> => {
|
||||
|
||||
const ethProvider = getCustomProvider(rpcProviderEndpoint);
|
||||
|
||||
const graphDb = new GraphDatabase(dbConfig, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, subgraphPath);
|
||||
await graphWatcher.init();
|
||||
|
||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||
const pubsub = new PubSub();
|
||||
const indexer = new Indexer(db, ethClient, postgraphileClient, ethProvider);
|
||||
const indexer = new Indexer(db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
||||
const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig;
|
||||
assert(dbConnectionString, 'Missing job queue db connection string');
|
||||
|
@ -167,6 +167,10 @@ export class Indexer {
|
||||
return result;
|
||||
}
|
||||
|
||||
async getExampleEntity (blockHash: string, id: string): Promise<string> {
|
||||
return this._graphWatcher.getEntity(blockHash, 'ExampleEntity', id);
|
||||
}
|
||||
|
||||
async triggerIndexingOnEvent (event: Event): Promise<void> {
|
||||
const resultEvent = this.getResultEvent(event);
|
||||
|
||||
|
@ -7,6 +7,7 @@ import 'reflect-metadata';
|
||||
import yargs from 'yargs';
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import debug from 'debug';
|
||||
import path from 'path';
|
||||
|
||||
import { getCache } from '@vulcanize/cache';
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
@ -20,7 +21,7 @@ import {
|
||||
DEFAULT_CONFIG_PATH,
|
||||
getCustomProvider
|
||||
} from '@vulcanize/util';
|
||||
import { GraphWatcher } from '@vulcanize/graph-node';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
|
||||
import { Indexer } from './indexer';
|
||||
import { Database } from './database';
|
||||
@ -107,7 +108,10 @@ export const main = async (): Promise<any> => {
|
||||
cache
|
||||
});
|
||||
|
||||
const graphWatcher = new GraphWatcher(subgraphPath);
|
||||
const graphDb = new GraphDatabase(dbConfig, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, subgraphPath);
|
||||
await graphWatcher.init();
|
||||
|
||||
const ethProvider = getCustomProvider(rpcProviderEndpoint);
|
||||
|
@ -73,6 +73,14 @@ export const createResolvers = async (indexer: Indexer, eventWatcher: EventWatch
|
||||
|
||||
const events = await indexer.getEventsInRange(fromBlockNumber, toBlockNumber);
|
||||
return events.map(event => indexer.getResultEvent(event));
|
||||
},
|
||||
|
||||
exampleEntity: async (_: any, { blockHash, id }: { blockHash: string, id: string }) => {
|
||||
log('exampleEntity', blockHash, id);
|
||||
|
||||
const exampleEntity = await indexer.getExampleEntity(blockHash, id);
|
||||
|
||||
return JSON.stringify(exampleEntity, undefined, 2);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -3,6 +3,7 @@ type Query {
|
||||
eventsInRange(fromBlockNumber: Int!, toBlockNumber: Int!): [ResultEvent!]
|
||||
getMethod(blockHash: String!, contractAddress: String!): ResultString!
|
||||
_test(blockHash: String!, contractAddress: String!): ResultBigInt!
|
||||
exampleEntity(blockHash: String!, id: String!): String!
|
||||
}
|
||||
|
||||
type ResultEvent {
|
||||
|
@ -17,7 +17,7 @@ import { createServer } from 'http';
|
||||
import { getCache } from '@vulcanize/cache';
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
import { DEFAULT_CONFIG_PATH, getConfig, JobQueue, KIND_ACTIVE, getCustomProvider } from '@vulcanize/util';
|
||||
import { GraphWatcher } from '@vulcanize/graph-node';
|
||||
import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node';
|
||||
|
||||
import { createResolvers } from './resolvers';
|
||||
import { Indexer } from './indexer';
|
||||
@ -70,7 +70,10 @@ export const main = async (): Promise<any> => {
|
||||
|
||||
const ethProvider = getCustomProvider(rpcProviderEndpoint);
|
||||
|
||||
const graphWatcher = new GraphWatcher(subgraphPath);
|
||||
const graphDb = new GraphDatabase(dbConfig, path.resolve(__dirname, 'entity/*'));
|
||||
await graphDb.init();
|
||||
|
||||
const graphWatcher = new GraphWatcher(graphDb, postgraphileClient, subgraphPath);
|
||||
await graphWatcher.init();
|
||||
|
||||
const indexer = new Indexer(db, ethClient, postgraphileClient, ethProvider, graphWatcher);
|
||||
|
12
yarn.lock
12
yarn.lock
@ -2287,6 +2287,13 @@
|
||||
"@types/connect" "*"
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/chai-spies@^1.0.3":
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/chai-spies/-/chai-spies-1.0.3.tgz#a52dc61af3853ec9b80965040811d15dfd401542"
|
||||
integrity sha512-RBZjhVuK7vrg4rWMt04UF5zHYwfHnpk5mIWu3nQvU3AKGDixXzSjZ6v0zke6pBcaJqMv3IBZ5ibLWPMRDL0sLw==
|
||||
dependencies:
|
||||
"@types/chai" "*"
|
||||
|
||||
"@types/chai@*", "@types/chai@^4.2.18":
|
||||
version "4.2.18"
|
||||
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.18.tgz#0c8e298dbff8205e2266606c1ea5fbdba29b46e4"
|
||||
@ -4617,6 +4624,11 @@ catering@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/catering/-/catering-2.0.0.tgz#15ce31bcbffafbf62855ea7677b0e5d23581233d"
|
||||
integrity sha512-aD/WmxhGwUGsVPrj8C80vH7C7GphJilYVSdudoV4u16XdrLF7CVyfBmENsc4tLTVsJJzCRid8GbwJ7mcPLee6Q==
|
||||
|
||||
chai-spies@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/chai-spies/-/chai-spies-1.0.0.tgz#d16b39336fb316d03abf8c375feb23c0c8bb163d"
|
||||
integrity sha512-elF2ZUczBsFoP07qCfMO/zeggs8pqCf3fZGyK5+2X4AndS8jycZYID91ztD9oQ7d/0tnS963dPkd0frQEThDsg==
|
||||
|
||||
chai@^4.3.4:
|
||||
version "4.3.4"
|
||||
resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.4.tgz#b55e655b31e1eac7099be4c08c21964fce2e6c49"
|
||||
|
Loading…
Reference in New Issue
Block a user