Implement typescript strict mode and ESLint in watcher package (#40)

* Set up typescript build and eslint.

* Automatic lint fixes.

* Fix argument type warnings.

* Fix function return types.

* Fix typescript errors in watcher.

Co-authored-by: nikugogoi <95nikass@gmail.com>
This commit is contained in:
Ashwin Phatak 2021-06-04 18:08:26 +05:30 committed by GitHub
parent 3aaa9dd7f8
commit 945cba9392
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 402 additions and 204 deletions

View File

@ -8,7 +8,7 @@ import debug from 'debug';
const log = debug('vulcanize:cache'); const log = debug('vulcanize:cache');
interface Config { export interface Config {
name: string; name: string;
enabled: boolean; enabled: boolean;
deleteOnStart: boolean; deleteOnStart: boolean;

View File

@ -0,0 +1,5 @@
# Don't lint node_modules.
node_modules
# Don't lint build output.
dist

View File

@ -0,0 +1,27 @@
{
"env": {
"browser": true,
"es2021": true
},
"extends": [
"semistandard",
"plugin:@typescript-eslint/recommended"
],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 12,
"sourceType": "module"
},
"plugins": [
"@typescript-eslint"
],
"rules": {
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/explicit-module-boundary-types": [
"warn",
{
"allowArgumentsExplicitlyTypedAsAny": true
}
]
}
}

View File

@ -6,7 +6,9 @@
"scripts": { "scripts": {
"server": "DEBUG=vulcanize:* nodemon src/server.ts -f environments/local.toml", "server": "DEBUG=vulcanize:* nodemon src/server.ts -f environments/local.toml",
"server:mock": "MOCK=1 nodemon src/server.ts -f environments/local.toml", "server:mock": "MOCK=1 nodemon src/server.ts -f environments/local.toml",
"test": "mocha -r ts-node/register src/**/*.spec.ts" "test": "mocha -r ts-node/register src/**/*.spec.ts",
"lint": "eslint .",
"build": "tsc"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
@ -50,6 +52,15 @@
"@types/fs-extra": "^9.0.11", "@types/fs-extra": "^9.0.11",
"@types/yargs": "^17.0.0", "@types/yargs": "^17.0.0",
"nodemon": "^2.0.7", "nodemon": "^2.0.7",
"ts-node": "^10.0.0" "ts-node": "^10.0.0",
"@typescript-eslint/eslint-plugin": "^4.25.0",
"@typescript-eslint/parser": "^4.25.0",
"eslint": "^7.27.0",
"eslint-config-semistandard": "^15.0.1",
"eslint-config-standard": "^16.0.3",
"eslint-plugin-import": "^2.23.3",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^5.1.0",
"eslint-plugin-standard": "^5.0.0"
} }
} }

View File

@ -1,23 +1,22 @@
import assert from "assert"; import assert from 'assert';
import { Connection, createConnection } from "typeorm"; import { Connection, ConnectionOptions, createConnection, DeepPartial } from 'typeorm';
import { SnakeNamingStrategy } from 'typeorm-naming-strategies'; import { SnakeNamingStrategy } from 'typeorm-naming-strategies';
import { Allowance } from "./entity/Allowance"; import { Allowance } from './entity/Allowance';
import { Balance } from "./entity/Balance"; import { Balance } from './entity/Balance';
import { Event } from "./entity/Event"; import { Event } from './entity/Event';
import { EventSyncProgress } from "./entity/EventProgress"; import { EventSyncProgress } from './entity/EventProgress';
export class Database { export class Database {
_config: ConnectionOptions
_conn!: Connection
_config: any constructor (config: ConnectionOptions) {
_conn: Connection
constructor(config) {
assert(config); assert(config);
this._config = config; this._config = config;
} }
async init() { async init (): Promise<void> {
assert(!this._conn); assert(!this._conn);
this._conn = await createConnection({ this._conn = await createConnection({
@ -26,10 +25,14 @@ export class Database {
}); });
} }
async getBalance({ blockHash, token, owner }) { async getBalance ({ blockHash, token, owner }: { blockHash: string, token: string, owner: string }): Promise<Balance | undefined> {
if (!this._conn) {
return;
}
return this._conn.getRepository(Balance) return this._conn.getRepository(Balance)
.createQueryBuilder("balance") .createQueryBuilder('balance')
.where("block_hash = :blockHash AND token = :token AND owner = :owner", { .where('block_hash = :blockHash AND token = :token AND owner = :owner', {
blockHash, blockHash,
token, token,
owner owner
@ -37,10 +40,14 @@ export class Database {
.getOne(); .getOne();
} }
async getAllowance({ blockHash, token, owner, spender }) { async getAllowance ({ blockHash, token, owner, spender }: { blockHash: string, token: string, owner: string, spender: string }): Promise<Allowance | undefined> {
if (!this._conn) {
return;
}
return this._conn.getRepository(Allowance) return this._conn.getRepository(Allowance)
.createQueryBuilder("allowance") .createQueryBuilder('allowance')
.where("block_hash = :blockHash AND token = :token AND owner = :owner AND spender = :spender", { .where('block_hash = :blockHash AND token = :token AND owner = :owner AND spender = :spender', {
blockHash, blockHash,
token, token,
owner, owner,
@ -49,45 +56,61 @@ export class Database {
.getOne(); .getOne();
} }
async saveBalance({ blockHash, token, owner, value, proof }) { async saveBalance ({ blockHash, token, owner, value, proof }: DeepPartial<Balance>): Promise<Balance | undefined> {
if (!this._conn) {
return;
}
const repo = this._conn.getRepository(Balance); const repo = this._conn.getRepository(Balance);
const entity = repo.create({ blockHash, token, owner, value, proof }); const entity = repo.create({ blockHash, token, owner, value, proof });
return repo.save(entity); return repo.save(entity);
} }
async saveAllowance({ blockHash, token, owner, spender, value, proof }) { async saveAllowance ({ blockHash, token, owner, spender, value, proof }: DeepPartial<Allowance>): Promise<Allowance | undefined> {
if (!this._conn) {
return;
}
const repo = this._conn.getRepository(Allowance); const repo = this._conn.getRepository(Allowance);
const entity = repo.create({ blockHash, token, owner, spender, value, proof }); const entity = repo.create({ blockHash, token, owner, spender, value, proof });
return repo.save(entity); return repo.save(entity);
} }
// Returns true if events have already been synced for the (block, token) combination. // Returns true if events have already been synced for the (block, token) combination.
async didSyncEvents({ blockHash, token }) { async didSyncEvents ({ blockHash, token }: { blockHash: string, token: string }): Promise<boolean | undefined> {
if (!this._conn) {
return;
}
const numRows = await this._conn.getRepository(EventSyncProgress) const numRows = await this._conn.getRepository(EventSyncProgress)
.createQueryBuilder() .createQueryBuilder()
.where("block_hash = :blockHash AND token = :token", { .where('block_hash = :blockHash AND token = :token', {
blockHash, blockHash,
token, token
}) })
.getCount(); .getCount();
return numRows > 0; return numRows > 0;
} }
async getEvents({ blockHash, token }) { async getEvents ({ blockHash, token }: { blockHash: string, token: string }): Promise<Event[]> {
return this._conn.getRepository(Event) return this._conn.getRepository(Event)
.createQueryBuilder("event") .createQueryBuilder('event')
.where("block_hash = :blockHash AND token = :token", { .where('block_hash = :blockHash AND token = :token', {
blockHash, blockHash,
token, token
}) })
.getMany(); .getMany();
} }
async getEventsByName({ blockHash, token, eventName }) { async getEventsByName ({ blockHash, token, eventName }: { blockHash: string, token: string, eventName: string }): Promise<Event[] | undefined> {
if (!this._conn) {
return;
}
return this._conn.getRepository(Event) return this._conn.getRepository(Event)
.createQueryBuilder("event") .createQueryBuilder('event')
.where("block_hash = :blockHash AND token = :token AND :eventName = :eventName", { .where('block_hash = :blockHash AND token = :token AND :eventName = :eventName', {
blockHash, blockHash,
token, token,
eventName eventName
@ -95,7 +118,11 @@ export class Database {
.getMany(); .getMany();
} }
async saveEvents({ blockHash, token, events }) { async saveEvents ({ blockHash, token, events }: { blockHash: string, token: string, events: DeepPartial<Event>[] }): Promise<void> {
if (!this._conn) {
return;
}
// TODO: Using the same connection doesn't work when > 1 inserts are attempted at the same time (e.g. simultaneous GQL requests). // TODO: Using the same connection doesn't work when > 1 inserts are attempted at the same time (e.g. simultaneous GQL requests).
// In a transaction: // In a transaction:

View File

@ -1,27 +1,26 @@
import { Entity, PrimaryGeneratedColumn, Column, Index } from "typeorm"; import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
@Entity() @Entity()
@Index(["blockHash", "token", "owner", "spender"], { unique: true }) @Index(['blockHash', 'token', 'owner', 'spender'], { unique: true })
export class Allowance { export class Allowance {
@PrimaryGeneratedColumn() @PrimaryGeneratedColumn()
id: number; id!: number;
@Column("varchar", { length: 66 }) @Column('varchar', { length: 66 })
blockHash: string; blockHash!: string;
@Column("varchar", { length: 42 }) @Column('varchar', { length: 42 })
token: string; token!: string;
@Column("varchar", { length: 42 }) @Column('varchar', { length: 42 })
owner: string; owner!: string;
@Column("varchar", { length: 42 }) @Column('varchar', { length: 42 })
spender: string; spender!: string;
@Column("numeric") @Column('numeric')
value: number; value!: BigInt;
@Column("text") @Column('text')
proof: string; proof!: string;
} }

View File

@ -1,24 +1,23 @@
import { Entity, PrimaryGeneratedColumn, Column, Index } from "typeorm"; import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
@Entity() @Entity()
@Index(["blockHash", "token", "owner"], { unique: true }) @Index(['blockHash', 'token', 'owner'], { unique: true })
export class Balance { export class Balance {
@PrimaryGeneratedColumn() @PrimaryGeneratedColumn()
id: number; id!: number;
@Column("varchar", { length: 66 }) @Column('varchar', { length: 66 })
blockHash: string; blockHash!: string;
@Column("varchar", { length: 42 }) @Column('varchar', { length: 42 })
token: string; token!: string;
@Column("varchar", { length: 42 }) @Column('varchar', { length: 42 })
owner: string; owner!: string;
@Column("numeric") @Column('numeric')
value: number; value!: BigInt;
@Column("text") @Column('text')
proof: string; proof!: string;
} }

View File

@ -1,46 +1,45 @@
import { Entity, PrimaryGeneratedColumn, Column, Index } from "typeorm"; import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
@Entity() @Entity()
// Index to query all events for a contract efficiently. // Index to query all events for a contract efficiently.
@Index(["blockHash", "token"]) @Index(['blockHash', 'token'])
// Index to query 'Transfer' events efficiently. // Index to query 'Transfer' events efficiently.
@Index(["blockHash", "token", "eventName", "transferFrom", "transferTo"]) @Index(['blockHash', 'token', 'eventName', 'transferFrom', 'transferTo'])
// Index to query 'Approval' events efficiently. // Index to query 'Approval' events efficiently.
@Index(["blockHash", "token", "eventName", "approvalOwner", "approvalSpender"]) @Index(['blockHash', 'token', 'eventName', 'approvalOwner', 'approvalSpender'])
export class Event { export class Event {
@PrimaryGeneratedColumn() @PrimaryGeneratedColumn()
id: number; id!: number;
@Column("varchar", { length: 66 }) @Column('varchar', { length: 66 })
blockHash: string; blockHash!: string;
@Column("varchar", { length: 42 }) @Column('varchar', { length: 42 })
token: string; token!: string;
@Column("varchar", { length: 256 }) @Column('varchar', { length: 256 })
eventName: string; eventName!: string;
@Column("text") @Column('text')
proof: string; proof!: string;
// Transfer event columns. // Transfer event columns.
@Column("varchar", { length: 42, nullable: true }) @Column('varchar', { length: 42, nullable: true })
transferFrom: string; transferFrom!: string;
@Column("varchar", { length: 42, nullable: true }) @Column('varchar', { length: 42, nullable: true })
transferTo: string; transferTo!: string;
@Column("numeric", { nullable: true }) @Column('numeric', { nullable: true })
transferValue: number; transferValue!: BigInt;
// Approval event columns. // Approval event columns.
@Column("varchar", { length: 42, nullable: true }) @Column('varchar', { length: 42, nullable: true })
approvalOwner: string; approvalOwner!: string;
@Column("varchar", { length: 42, nullable: true }) @Column('varchar', { length: 42, nullable: true })
approvalSpender: string; approvalSpender!: string;
@Column("numeric", { nullable: true }) @Column('numeric', { nullable: true })
approvalValue: number; approvalValue!: BigInt;
} }

View File

@ -1,4 +1,4 @@
import { Entity, PrimaryGeneratedColumn, Column, Index } from "typeorm"; import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
// Stores a row if events for a (block, token) combination have already been fetched. // Stores a row if events for a (block, token) combination have already been fetched.
// //
@ -7,15 +7,14 @@ import { Entity, PrimaryGeneratedColumn, Column, Index } from "typeorm";
// yet been synced from upstream. // yet been synced from upstream.
// //
@Entity() @Entity()
@Index(["blockHash", "token"], { unique: true }) @Index(['blockHash', 'token'], { unique: true })
export class EventSyncProgress { export class EventSyncProgress {
@PrimaryGeneratedColumn() @PrimaryGeneratedColumn()
id: number; id!: number;
@Column("varchar", { length: 66 }) @Column('varchar', { length: 66 })
blockHash: string; blockHash!: string;
@Column("varchar", { length: 42 }) @Column('varchar', { length: 42 })
token: string; token!: string;
} }

View File

@ -1,12 +1,13 @@
import 'graphql-import-node'; import 'graphql-import-node';
import { makeExecutableSchema } from '@graphql-tools/schema'; import { makeExecutableSchema } from '@graphql-tools/schema';
import { GraphQLSchema } from 'graphql';
import * as typeDefs from './erc20.graphql'; import * as typeDefs from './erc20.graphql';
import { createResolvers as createMockResolvers } from './mock/resolvers'; import { createResolvers as createMockResolvers } from './mock/resolvers';
import { createResolvers } from './resolvers'; import { Config, createResolvers } from './resolvers';
export const createSchema = async (config) => { export const createSchema = async (config: Config): Promise<GraphQLSchema> => {
const resolvers = process.env.MOCK ? await createMockResolvers(config) : await createResolvers(config); const resolvers = process.env.MOCK ? await createMockResolvers() : await createResolvers(config);
return makeExecutableSchema({ return makeExecutableSchema({
typeDefs, typeDefs,

View File

@ -1,24 +1,50 @@
import assert from "assert"; import assert from 'assert';
import debug from 'debug'; import debug from 'debug';
import { invert } from "lodash"; import { invert } from 'lodash';
import { JsonFragment } from '@ethersproject/abi';
import { DeepPartial } from 'typeorm';
import { EthClient, getMappingSlot, topictoAddress } from "@vulcanize/ipld-eth-client"; import { EthClient, getMappingSlot, topictoAddress } from '@vulcanize/ipld-eth-client';
import { getStorageInfo, getEventNameTopics, getStorageValue, GetStorageAt } from '@vulcanize/solidity-mapper'; import { getStorageInfo, getEventNameTopics, getStorageValue, GetStorageAt, StorageLayout } from '@vulcanize/solidity-mapper';
import { Database } from './database'; import { Database } from './database';
import { Event } from './entity/Event';
const log = debug('vulcanize:indexer'); const log = debug('vulcanize:indexer');
export class Indexer { interface Artifacts {
abi: JsonFragment[];
storageLayout: StorageLayout;
}
export interface ValueResult {
value: string | BigInt;
proof: {
data: string;
}
}
type EventsResult = Array<{
event: {
from?: string;
to?: string;
owner?: string;
spender?: string;
value?: BigInt;
__typename: string;
}
proof: string;
}>
export class Indexer {
_db: Database _db: Database
_ethClient: EthClient _ethClient: EthClient
_getStorageAt: GetStorageAt _getStorageAt: GetStorageAt
_abi: any _abi: JsonFragment[]
_storageLayout: any _storageLayout: StorageLayout
constructor(db, ethClient, artifacts) { constructor (db: Database, ethClient: EthClient, artifacts: Artifacts) {
assert(db); assert(db);
assert(ethClient); assert(ethClient);
assert(artifacts); assert(artifacts);
@ -36,7 +62,7 @@ export class Indexer {
this._storageLayout = storageLayout; this._storageLayout = storageLayout;
} }
async totalSupply(blockHash, token) { async totalSupply (blockHash: string, token: string): Promise<ValueResult> {
// TODO: Use getStorageValue when it supports uint256 values. // TODO: Use getStorageValue when it supports uint256 values.
const { slot } = getStorageInfo(this._storageLayout, '_totalSupply'); const { slot } = getStorageInfo(this._storageLayout, '_totalSupply');
@ -52,13 +78,13 @@ export class Indexer {
return result; return result;
} }
async balanceOf(blockHash, token, owner) { async balanceOf (blockHash: string, token: string, owner: string): Promise<ValueResult> {
const entity = await this._db.getBalance({ blockHash, token, owner }); const entity = await this._db.getBalance({ blockHash, token, owner });
if (entity) { if (entity) {
return { return {
value: entity.value, value: entity.value,
proof: JSON.parse(entity.proof) proof: JSON.parse(entity.proof)
} };
} }
// TODO: Use getStorageValue when it supports mappings. // TODO: Use getStorageValue when it supports mappings.
@ -80,13 +106,13 @@ export class Indexer {
return result; return result;
} }
async allowance(blockHash, token, owner, spender) { async allowance (blockHash: string, token: string, owner: string, spender: string): Promise<ValueResult> {
const entity = await this._db.getAllowance({ blockHash, token, owner, spender }); const entity = await this._db.getAllowance({ blockHash, token, owner, spender });
if (entity) { if (entity) {
return { return {
value: entity.value, value: entity.value,
proof: JSON.parse(entity.proof) proof: JSON.parse(entity.proof)
} };
} }
// TODO: Use getStorageValue when it supports nested mappings. // TODO: Use getStorageValue when it supports nested mappings.
@ -108,7 +134,7 @@ export class Indexer {
return result; return result;
} }
async name(blockHash, token) { async name (blockHash: string, token: string): Promise<ValueResult> {
const result = await this._getStorageValue(blockHash, token, '_name'); const result = await this._getStorageValue(blockHash, token, '_name');
log(JSON.stringify(result, null, 2)); log(JSON.stringify(result, null, 2));
@ -116,7 +142,7 @@ export class Indexer {
return result; return result;
} }
async symbol(blockHash, token) { async symbol (blockHash: string, token: string): Promise<ValueResult> {
const result = await this._getStorageValue(blockHash, token, '_symbol'); const result = await this._getStorageValue(blockHash, token, '_symbol');
log(JSON.stringify(result, null, 2)); log(JSON.stringify(result, null, 2));
@ -124,14 +150,14 @@ export class Indexer {
return result; return result;
} }
async decimals(blockHash, token) { async decimals (): Promise<void> {
// Not a state variable, uses hardcoded return value in contract function. // Not a state variable, uses hardcoded return value in contract function.
// See https://github.com/OpenZeppelin/openzeppelin-contracts/blob/master/contracts/token/ERC20/ERC20.sol#L86 // See https://github.com/OpenZeppelin/openzeppelin-contracts/blob/master/contracts/token/ERC20/ERC20.sol#L86
throw new Error('Not implemented.'); throw new Error('Not implemented.');
} }
async getEvents(blockHash, token, name) { async getEvents (blockHash: string, token: string, name: string): Promise<EventsResult> {
const didSyncEvents = await this._db.didSyncEvents({ blockHash, token }); const didSyncEvents = await this._db.didSyncEvents({ blockHash, token });
if (!didSyncEvents) { if (!didSyncEvents) {
// Fetch and save events first and make a note in the event sync progress table. // Fetch and save events first and make a note in the event sync progress table.
@ -147,21 +173,27 @@ export class Indexer {
// TODO: Filter using db WHERE condition when name is not empty. // TODO: Filter using db WHERE condition when name is not empty.
.filter(event => !name || name === event.eventName) .filter(event => !name || name === event.eventName)
.map(e => { .map(e => {
const eventFields = {}; const eventFields: {
from?: string,
to?: string,
value?: BigInt,
owner?: string,
spender?: string,
} = {};
switch (e.eventName) { switch (e.eventName) {
case 'Transfer': { case 'Transfer': {
eventFields['from'] = e.transferFrom; eventFields.from = e.transferFrom;
eventFields['to'] = e.transferTo; eventFields.to = e.transferTo;
eventFields['value'] = e.transferValue; eventFields.value = e.transferValue;
break; break;
}; }
case 'Approval': { case 'Approval': {
eventFields['owner'] = e.approvalOwner; eventFields.owner = e.approvalOwner;
eventFields['spender'] = e.approvalSpender; eventFields.spender = e.approvalSpender;
eventFields['value'] = e.approvalValue; eventFields.value = e.approvalValue;
break; break;
}; }
} }
return { return {
@ -171,7 +203,7 @@ export class Indexer {
}, },
// TODO: Return proof only if requested. // TODO: Return proof only if requested.
proof: JSON.parse(e.proof) proof: JSON.parse(e.proof)
} };
}); });
log(JSON.stringify(result, null, 2)); log(JSON.stringify(result, null, 2));
@ -180,7 +212,7 @@ export class Indexer {
} }
// TODO: Move into base/class or framework package. // TODO: Move into base/class or framework package.
async _getStorageValue(blockHash, token, variable) { async _getStorageValue (blockHash: string, token: string, variable: string): Promise<ValueResult> {
return getStorageValue( return getStorageValue(
this._storageLayout, this._storageLayout,
this._getStorageAt, this._getStorageAt,
@ -190,14 +222,14 @@ export class Indexer {
); );
} }
async _fetchAndSaveEvents({ blockHash, token }) { async _fetchAndSaveEvents ({ blockHash, token }: { blockHash: string, token: string }): Promise<void> {
const logs = await this._ethClient.getLogs({ blockHash, contract: token }); const logs = await this._ethClient.getLogs({ blockHash, contract: token });
log(JSON.stringify(logs, null, 2)); log(JSON.stringify(logs, null, 2));
const eventNameToTopic = getEventNameTopics(this._abi); const eventNameToTopic = getEventNameTopics(this._abi);
const logTopicToEventName = invert(eventNameToTopic); const logTopicToEventName = invert(eventNameToTopic);
const dbEvents = logs.map(log => { const dbEvents = logs.map((log: any) => {
const { topics, data: value, cid, ipldBlock } = log; const { topics, data: value, cid, ipldBlock } = log;
const [topic0, topic1, topic2] = topics; const [topic0, topic1, topic2] = topics;
@ -206,7 +238,7 @@ export class Indexer {
const address1 = topictoAddress(topic1); const address1 = topictoAddress(topic1);
const address2 = topictoAddress(topic2); const address2 = topictoAddress(topic2);
const event = { const event: DeepPartial<Event> = {
blockHash, blockHash,
token, token,
eventName, eventName,
@ -219,22 +251,22 @@ export class Indexer {
ipldBlock ipldBlock
} }
}) })
}), })
}; };
switch (eventName) { switch (eventName) {
case 'Transfer': { case 'Transfer': {
event['transferFrom'] = address1; event.transferFrom = address1;
event['transferTo'] = address2; event.transferTo = address2;
event['transferValue'] = BigInt(value); event.transferValue = BigInt(value);
break; break;
}; }
case 'Approval': { case 'Approval': {
event['approvalOwner'] = address1; event.approvalOwner = address1;
event['approvalSpender'] = address2; event.approvalSpender = address2;
event['approvalValue'] = BigInt(value); event.approvalValue = BigInt(value);
break; break;
}; }
} }
return event; return event;
@ -242,4 +274,4 @@ export class Indexer {
await this._db.saveEvents({ blockHash, token, events: dbEvents }); await this._db.saveEvents({ blockHash, token, events: dbEvents });
} }
} }

View File

@ -1,6 +1,6 @@
// TODO: Pull mock data for 5 tokens from rinkeby. // TODO: Pull mock data for 5 tokens from rinkeby.
export const tokens = { export const tokens: {[address: string]: {[variable: string]: string}} = {
'0xd87fea54f506972e3267239ec8e159548892074a': { '0xd87fea54f506972e3267239ec8e159548892074a': {
name: 'ChainLink Token', name: 'ChainLink Token',
symbol: 'LINK', symbol: 'LINK',
@ -9,7 +9,7 @@ export const tokens = {
} }
}; };
export const blocks = { export const blocks: {[blockHash: string]: {[address: string]: any}} = {
// Block hash. // Block hash.
'0x77b5479a5856dd8ec63df6aabf9ce0913071a6dda3a3d54f3c9c940574bcb8ab': { '0x77b5479a5856dd8ec63df6aabf9ce0913071a6dda3a3d54f3c9c940574bcb8ab': {

View File

@ -5,13 +5,12 @@ import { blocks } from './data';
const log = debug('test'); const log = debug('test');
export const createResolvers = async (config) => { export const createResolvers = async (): Promise<any> => {
return { return {
BigInt: new BigInt('bigInt'), BigInt: new BigInt('bigInt'),
TokenEvent: { TokenEvent: {
__resolveType: (obj) => { __resolveType: (obj: any) => {
if (obj.owner) { if (obj.owner) {
return 'ApprovalEvent'; return 'ApprovalEvent';
} }
@ -22,65 +21,65 @@ export const createResolvers = async (config) => {
Query: { Query: {
totalSupply: (_, { blockHash, token }) => { totalSupply: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
log('totalSupply', blockHash, token); log('totalSupply', blockHash, token);
return { return {
value: blocks[blockHash][token].totalSupply, value: blocks[blockHash][token].totalSupply,
proof: { data: '' } proof: { data: '' }
} };
}, },
balanceOf: (_, { blockHash, token, owner }) => { balanceOf: (_: any, { blockHash, token, owner }: { blockHash: string, token: string, owner: string }) => {
log('balanceOf', blockHash, token, owner); log('balanceOf', blockHash, token, owner);
return { return {
value: blocks[blockHash][token].balanceOf[owner], value: blocks[blockHash][token].balanceOf[owner],
proof: { data: '' } proof: { data: '' }
} };
}, },
allowance: (_, { blockHash, token, owner, spender }) => { allowance: (_: any, { blockHash, token, owner, spender }: { blockHash: string, token: string, owner: string, spender: string }) => {
log('allowance', blockHash, token, owner, spender); log('allowance', blockHash, token, owner, spender);
return { return {
value: blocks[blockHash][token].allowance[owner][spender], value: blocks[blockHash][token].allowance[owner][spender],
proof: { data: '' } proof: { data: '' }
} };
}, },
name: (_, { blockHash, token }) => { name: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
log('name', blockHash, token); log('name', blockHash, token);
return { return {
value: blocks[blockHash][token].name, value: blocks[blockHash][token].name,
proof: { data: '' } proof: { data: '' }
} };
}, },
symbol: (_, { blockHash, token }) => { symbol: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
log('symbol', blockHash, token); log('symbol', blockHash, token);
return { return {
value: blocks[blockHash][token].symbol, value: blocks[blockHash][token].symbol,
proof: { data: '' } proof: { data: '' }
} };
}, },
decimals: (_, { blockHash, token }) => { decimals: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
log('decimals', blockHash, token); log('decimals', blockHash, token);
return { return {
value: blocks[blockHash][token].decimals, value: blocks[blockHash][token].decimals,
proof: { data: '' } proof: { data: '' }
} };
}, },
events: (_, { blockHash, token, name }) => { events: (_: any, { blockHash, token, name }: { blockHash: string, token: string, name: string }) => {
log('events', blockHash, token, name); log('events', blockHash, token, name);
return blocks[blockHash][token].events return blocks[blockHash][token].events
.filter(e => !name || name === e.name) .filter((e: any) => !name || name === e.name)
.map(e => ({ 'event': e })); .map((e: any) => ({ event: e }));
} }
} }
}; };

View File

@ -16,11 +16,16 @@ import {
import { blocks, tokens as tokenInfo } from './data'; import { blocks, tokens as tokenInfo } from './data';
const testCases = { const testCases: {
'balanceOf': [], balanceOf: any[],
'allowance': [], allowance: any[],
'events': [], events: any[],
'tokens': [] tokens: any[]
} = {
balanceOf: [],
allowance: [],
events: [],
tokens: []
}; };
const blockHashes = _.keys(blocks); const blockHashes = _.keys(blocks);
@ -45,7 +50,7 @@ blockHashes.forEach(blockHash => {
}); });
// Balance test cases. // Balance test cases.
const balanceOfOwners = _.keys(tokenObj['balanceOf']); const balanceOfOwners = _.keys(tokenObj.balanceOf);
balanceOfOwners.forEach(owner => { balanceOfOwners.forEach(owner => {
testCases.balanceOf.push({ testCases.balanceOf.push({
blockHash, blockHash,
@ -56,7 +61,7 @@ blockHashes.forEach(blockHash => {
}); });
// Allowance test cases. // Allowance test cases.
const allowanceOwners = _.keys(tokenObj['allowance']); const allowanceOwners = _.keys(tokenObj.allowance);
allowanceOwners.forEach(owner => { allowanceOwners.forEach(owner => {
const allowanceObj = tokenObj.allowance[owner]; const allowanceObj = tokenObj.allowance[owner];
const spenders = _.keys(allowanceObj); const spenders = _.keys(allowanceObj);
@ -74,8 +79,7 @@ blockHashes.forEach(blockHash => {
}); });
describe('server', () => { describe('server', () => {
const client = new GraphQLClient('http://localhost:3001/graphql');
const client = new GraphQLClient("http://localhost:3001/graphql");
it('query token info', async () => { it('query token info', async () => {
const tests = testCases.tokens; const tests = testCases.tokens;
@ -144,10 +148,10 @@ describe('server', () => {
const testCase = tests[i]; const testCase = tests[i];
const result = await client.request(queryEvents, testCase); const result = await client.request(queryEvents, testCase);
const resultEvents = result.events.map(record => record.event); const resultEvents = result.events.map((record: any) => record.event);
expect(resultEvents.length).to.equal(testCase.events.length); expect(resultEvents.length).to.equal(testCase.events.length);
resultEvents.forEach((resultEvent, index) => { resultEvents.forEach((resultEvent: any, index: number) => {
const { name, ...testCaseEvent } = testCase.events[index]; const { name, ...testCaseEvent } = testCase.events[index];
if (name === 'Transfer') { if (name === 'Transfer') {

View File

@ -1,40 +1,50 @@
import assert from 'assert'; import assert from 'assert';
import BigInt from 'apollo-type-bigint'; import BigInt from 'apollo-type-bigint';
import debug from 'debug'; import debug from 'debug';
import fs from 'fs-extra'; import 'reflect-metadata';
import path from 'path'; import { ConnectionOptions } from 'typeorm';
import "reflect-metadata";
import { getCache } from '@vulcanize/cache'; import { getCache, Config as CacheConfig } from '@vulcanize/cache';
import { EthClient } from '@vulcanize/ipld-eth-client'; import { EthClient } from '@vulcanize/ipld-eth-client';
import artifacts from './artifacts/ERC20.json'; import artifacts from './artifacts/ERC20.json';
import { Indexer } from './indexer'; import { Indexer, ValueResult } from './indexer';
import { Database } from './database'; import { Database } from './database';
export interface Config {
server: {
host: string;
port: string;
};
database: ConnectionOptions;
upstream: {
gqlEndpoint: string;
cache: CacheConfig
}
}
const log = debug('vulcanize:resolver'); const log = debug('vulcanize:resolver');
export const createResolvers = async (config) => { export const createResolvers = async (config: Config): Promise<any> => {
const { upstream, database } = config; const { upstream, database } = config;
assert(database, 'Missing database config'); assert(database, 'Missing database config');
const ormConfig = { const ormConfig: ConnectionOptions = {
...database, ...database,
entities: [ entities: [
"src/entity/**/*.ts" 'src/entity/**/*.ts'
], ],
migrations: [ migrations: [
"src/migration/**/*.ts" 'src/migration/**/*.ts'
], ],
subscribers: [ subscribers: [
"src/subscriber/**/*.ts" 'src/subscriber/**/*.ts'
], ],
cli: { cli: {
entitiesDir: "src/entity", entitiesDir: 'src/entity',
migrationsDir: "src/migration", migrationsDir: 'src/migration',
subscribersDir: "src/subscriber" subscribersDir: 'src/subscriber'
} }
}; };
@ -47,6 +57,8 @@ export const createResolvers = async (config) => {
assert(upstream, 'Missing upstream gqlEndpoint'); assert(upstream, 'Missing upstream gqlEndpoint');
const cache = await getCache(cacheConfig); const cache = await getCache(cacheConfig);
assert(cache, 'Missing cache');
const ethClient = new EthClient({ gqlEndpoint, cache }); const ethClient = new EthClient({ gqlEndpoint, cache });
const indexer = new Indexer(db, ethClient, artifacts); const indexer = new Indexer(db, ethClient, artifacts);
@ -55,7 +67,7 @@ export const createResolvers = async (config) => {
BigInt: new BigInt('bigInt'), BigInt: new BigInt('bigInt'),
TokenEvent: { TokenEvent: {
__resolveType: (obj) => { __resolveType: (obj: any) => {
if (obj.owner) { if (obj.owner) {
return 'ApprovalEvent'; return 'ApprovalEvent';
} }
@ -66,37 +78,37 @@ export const createResolvers = async (config) => {
Query: { Query: {
totalSupply: (_, { blockHash, token }) => { totalSupply: (_: any, { blockHash, token }: { blockHash: string, token: string }): Promise<ValueResult> => {
log('totalSupply', blockHash, token); log('totalSupply', blockHash, token);
return indexer.totalSupply(blockHash, token); return indexer.totalSupply(blockHash, token);
}, },
balanceOf: async (_, { blockHash, token, owner }) => { balanceOf: async (_: any, { blockHash, token, owner }: { blockHash: string, token: string, owner: string }) => {
log('balanceOf', blockHash, token, owner); log('balanceOf', blockHash, token, owner);
return indexer.balanceOf(blockHash, token, owner); return indexer.balanceOf(blockHash, token, owner);
}, },
allowance: async (_, { blockHash, token, owner, spender }) => { allowance: async (_: any, { blockHash, token, owner, spender }: { blockHash: string, token: string, owner: string, spender: string }) => {
log('allowance', blockHash, token, owner, spender); log('allowance', blockHash, token, owner, spender);
return indexer.allowance(blockHash, token, owner, spender); return indexer.allowance(blockHash, token, owner, spender);
}, },
name: (_, { blockHash, token }) => { name: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
log('name', blockHash, token); log('name', blockHash, token);
return indexer.name(blockHash, token); return indexer.name(blockHash, token);
}, },
symbol: (_, { blockHash, token }) => { symbol: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
log('symbol', blockHash, token); log('symbol', blockHash, token);
return indexer.symbol(blockHash, token); return indexer.symbol(blockHash, token);
}, },
decimals: (_, { blockHash, token }) => { decimals: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
log('decimals', blockHash, token); log('decimals', blockHash, token);
return indexer.decimals(blockHash, token); return indexer.decimals();
}, },
events: async (_, { blockHash, token, name }) => { events: async (_: any, { blockHash, token, name }: { blockHash: string, token: string, name: string }) => {
log('events', blockHash, token, name); log('events', blockHash, token, name);
return indexer.getEvents(blockHash, token, name); return indexer.getEvents(blockHash, token, name);
} }

View File

@ -5,32 +5,32 @@ import fs from 'fs-extra';
import path from 'path'; import path from 'path';
import toml from 'toml'; import toml from 'toml';
import yargs from 'yargs'; import yargs from 'yargs';
import { hideBin } from 'yargs/helpers' import { hideBin } from 'yargs/helpers';
import debug from 'debug'; import debug from 'debug';
import { createSchema } from './gql'; import { createSchema } from './gql';
const log = debug('vulcanize:server'); const log = debug('vulcanize:server');
export const createServer = async () => { export const createServer = async (): Promise<Application> => {
const argv = yargs(hideBin(process.argv)) const argv = await yargs(hideBin(process.argv))
.option('f', { .option('f', {
alias: 'config-file', alias: 'config-file',
demandOption: true, demandOption: true,
describe: 'configuration file path (toml)', describe: 'configuration file path (toml)',
type: 'string' type: 'string'
}) })
.argv .argv;
const configFile = argv['configFile']; const configFile = argv.f;
const configFilePath = path.resolve(configFile); const configFilePath = path.resolve(configFile);
const fileExists = await fs.pathExists(configFilePath); const fileExists = await fs.pathExists(configFilePath);
if (!fileExists) { if (!fileExists) {
throw new Error(`Config file not found: ${configFilePath}`); throw new Error(`Config file not found: ${configFilePath}`);
} }
var config = toml.parse(await fs.readFile(configFilePath, 'utf8')); const config = toml.parse(await fs.readFile(configFilePath, 'utf8'));
log("config", JSON.stringify(config, null, 2)); log('config', JSON.stringify(config, null, 2));
assert(config.server, 'Missing server config'); assert(config.server, 'Missing server config');
@ -44,8 +44,8 @@ export const createServer = async () => {
'/graphql', '/graphql',
graphqlHTTP({ graphqlHTTP({
schema, schema,
graphiql: true, graphiql: true
}), })
); );
app.get('/', (req: Request, res: Response) => { app.get('/', (req: Request, res: Response) => {

View File

@ -0,0 +1,2 @@
// https://medium.com/@steveruiz/using-a-javascript-library-without-type-declarations-in-a-typescript-project-3643490015f3
declare module 'canonical-json'

View File

@ -0,0 +1,5 @@
{
"name": "common",
"version": "0.1.0",
"typings": "main.d.ts"
}

View File

@ -0,0 +1,77 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig.json to read more about this file */
/* Basic Options */
// "incremental": true, /* Enable incremental compilation */
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
"lib": [ "ES5", "ES6" ], /* Specify library files to be included in the compilation. */
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
"declaration": true, /* Generates corresponding '.d.ts' file. */
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
// "sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "dist", /* Redirect output structure to the directory. */
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "composite": true, /* Enable project compilation */
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true, /* Enable all strict type-checking options. */
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* Enable strict null checks. */
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
// "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an 'override' modifier. */
// "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */
/* Module Resolution Options */
"moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
"typeRoots": [
"./src/types"
], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
/* Source Map Options */
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
"experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
/* Advanced Options */
"skipLibCheck": true, /* Skip type checking of declaration files. */
"forceConsistentCasingInFileNames": true, /* Disallow inconsistently-cased references to the same file. */
"resolveJsonModule": true /* Enabling the option allows importing JSON, and validating the types in that JSON file. */
},
"include": ["src"],
"exclude": ["dist"]
}