mirror of
https://github.com/cerc-io/watcher-ts
synced 2025-01-07 20:08:06 +00:00
Implement typescript strict mode and ESLint in watcher package (#40)
* Set up typescript build and eslint. * Automatic lint fixes. * Fix argument type warnings. * Fix function return types. * Fix typescript errors in watcher. Co-authored-by: nikugogoi <95nikass@gmail.com>
This commit is contained in:
parent
3aaa9dd7f8
commit
945cba9392
2
packages/cache/src/cache.ts
vendored
2
packages/cache/src/cache.ts
vendored
@ -8,7 +8,7 @@ import debug from 'debug';
|
||||
|
||||
const log = debug('vulcanize:cache');
|
||||
|
||||
interface Config {
|
||||
export interface Config {
|
||||
name: string;
|
||||
enabled: boolean;
|
||||
deleteOnStart: boolean;
|
||||
|
5
packages/watcher/.eslintignore
Normal file
5
packages/watcher/.eslintignore
Normal file
@ -0,0 +1,5 @@
|
||||
# Don't lint node_modules.
|
||||
node_modules
|
||||
|
||||
# Don't lint build output.
|
||||
dist
|
27
packages/watcher/.eslintrc.json
Normal file
27
packages/watcher/.eslintrc.json
Normal file
@ -0,0 +1,27 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2021": true
|
||||
},
|
||||
"extends": [
|
||||
"semistandard",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 12,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": [
|
||||
"@typescript-eslint"
|
||||
],
|
||||
"rules": {
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/explicit-module-boundary-types": [
|
||||
"warn",
|
||||
{
|
||||
"allowArgumentsExplicitlyTypedAsAny": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
@ -6,7 +6,9 @@
|
||||
"scripts": {
|
||||
"server": "DEBUG=vulcanize:* nodemon src/server.ts -f environments/local.toml",
|
||||
"server:mock": "MOCK=1 nodemon src/server.ts -f environments/local.toml",
|
||||
"test": "mocha -r ts-node/register src/**/*.spec.ts"
|
||||
"test": "mocha -r ts-node/register src/**/*.spec.ts",
|
||||
"lint": "eslint .",
|
||||
"build": "tsc"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@ -50,6 +52,15 @@
|
||||
"@types/fs-extra": "^9.0.11",
|
||||
"@types/yargs": "^17.0.0",
|
||||
"nodemon": "^2.0.7",
|
||||
"ts-node": "^10.0.0"
|
||||
"ts-node": "^10.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^4.25.0",
|
||||
"@typescript-eslint/parser": "^4.25.0",
|
||||
"eslint": "^7.27.0",
|
||||
"eslint-config-semistandard": "^15.0.1",
|
||||
"eslint-config-standard": "^16.0.3",
|
||||
"eslint-plugin-import": "^2.23.3",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-promise": "^5.1.0",
|
||||
"eslint-plugin-standard": "^5.0.0"
|
||||
}
|
||||
}
|
||||
|
@ -1,23 +1,22 @@
|
||||
import assert from "assert";
|
||||
import { Connection, createConnection } from "typeorm";
|
||||
import assert from 'assert';
|
||||
import { Connection, ConnectionOptions, createConnection, DeepPartial } from 'typeorm';
|
||||
import { SnakeNamingStrategy } from 'typeorm-naming-strategies';
|
||||
|
||||
import { Allowance } from "./entity/Allowance";
|
||||
import { Balance } from "./entity/Balance";
|
||||
import { Event } from "./entity/Event";
|
||||
import { EventSyncProgress } from "./entity/EventProgress";
|
||||
import { Allowance } from './entity/Allowance';
|
||||
import { Balance } from './entity/Balance';
|
||||
import { Event } from './entity/Event';
|
||||
import { EventSyncProgress } from './entity/EventProgress';
|
||||
|
||||
export class Database {
|
||||
_config: ConnectionOptions
|
||||
_conn!: Connection
|
||||
|
||||
_config: any
|
||||
_conn: Connection
|
||||
|
||||
constructor(config) {
|
||||
constructor (config: ConnectionOptions) {
|
||||
assert(config);
|
||||
this._config = config;
|
||||
}
|
||||
|
||||
async init() {
|
||||
async init (): Promise<void> {
|
||||
assert(!this._conn);
|
||||
|
||||
this._conn = await createConnection({
|
||||
@ -26,10 +25,14 @@ export class Database {
|
||||
});
|
||||
}
|
||||
|
||||
async getBalance({ blockHash, token, owner }) {
|
||||
async getBalance ({ blockHash, token, owner }: { blockHash: string, token: string, owner: string }): Promise<Balance | undefined> {
|
||||
if (!this._conn) {
|
||||
return;
|
||||
}
|
||||
|
||||
return this._conn.getRepository(Balance)
|
||||
.createQueryBuilder("balance")
|
||||
.where("block_hash = :blockHash AND token = :token AND owner = :owner", {
|
||||
.createQueryBuilder('balance')
|
||||
.where('block_hash = :blockHash AND token = :token AND owner = :owner', {
|
||||
blockHash,
|
||||
token,
|
||||
owner
|
||||
@ -37,10 +40,14 @@ export class Database {
|
||||
.getOne();
|
||||
}
|
||||
|
||||
async getAllowance({ blockHash, token, owner, spender }) {
|
||||
async getAllowance ({ blockHash, token, owner, spender }: { blockHash: string, token: string, owner: string, spender: string }): Promise<Allowance | undefined> {
|
||||
if (!this._conn) {
|
||||
return;
|
||||
}
|
||||
|
||||
return this._conn.getRepository(Allowance)
|
||||
.createQueryBuilder("allowance")
|
||||
.where("block_hash = :blockHash AND token = :token AND owner = :owner AND spender = :spender", {
|
||||
.createQueryBuilder('allowance')
|
||||
.where('block_hash = :blockHash AND token = :token AND owner = :owner AND spender = :spender', {
|
||||
blockHash,
|
||||
token,
|
||||
owner,
|
||||
@ -49,45 +56,61 @@ export class Database {
|
||||
.getOne();
|
||||
}
|
||||
|
||||
async saveBalance({ blockHash, token, owner, value, proof }) {
|
||||
async saveBalance ({ blockHash, token, owner, value, proof }: DeepPartial<Balance>): Promise<Balance | undefined> {
|
||||
if (!this._conn) {
|
||||
return;
|
||||
}
|
||||
|
||||
const repo = this._conn.getRepository(Balance);
|
||||
const entity = repo.create({ blockHash, token, owner, value, proof });
|
||||
return repo.save(entity);
|
||||
}
|
||||
|
||||
async saveAllowance({ blockHash, token, owner, spender, value, proof }) {
|
||||
async saveAllowance ({ blockHash, token, owner, spender, value, proof }: DeepPartial<Allowance>): Promise<Allowance | undefined> {
|
||||
if (!this._conn) {
|
||||
return;
|
||||
}
|
||||
|
||||
const repo = this._conn.getRepository(Allowance);
|
||||
const entity = repo.create({ blockHash, token, owner, spender, value, proof });
|
||||
return repo.save(entity);
|
||||
}
|
||||
|
||||
// Returns true if events have already been synced for the (block, token) combination.
|
||||
async didSyncEvents({ blockHash, token }) {
|
||||
async didSyncEvents ({ blockHash, token }: { blockHash: string, token: string }): Promise<boolean | undefined> {
|
||||
if (!this._conn) {
|
||||
return;
|
||||
}
|
||||
|
||||
const numRows = await this._conn.getRepository(EventSyncProgress)
|
||||
.createQueryBuilder()
|
||||
.where("block_hash = :blockHash AND token = :token", {
|
||||
.where('block_hash = :blockHash AND token = :token', {
|
||||
blockHash,
|
||||
token,
|
||||
token
|
||||
})
|
||||
.getCount();
|
||||
|
||||
return numRows > 0;
|
||||
}
|
||||
|
||||
async getEvents({ blockHash, token }) {
|
||||
async getEvents ({ blockHash, token }: { blockHash: string, token: string }): Promise<Event[]> {
|
||||
return this._conn.getRepository(Event)
|
||||
.createQueryBuilder("event")
|
||||
.where("block_hash = :blockHash AND token = :token", {
|
||||
.createQueryBuilder('event')
|
||||
.where('block_hash = :blockHash AND token = :token', {
|
||||
blockHash,
|
||||
token,
|
||||
token
|
||||
})
|
||||
.getMany();
|
||||
}
|
||||
|
||||
async getEventsByName({ blockHash, token, eventName }) {
|
||||
async getEventsByName ({ blockHash, token, eventName }: { blockHash: string, token: string, eventName: string }): Promise<Event[] | undefined> {
|
||||
if (!this._conn) {
|
||||
return;
|
||||
}
|
||||
|
||||
return this._conn.getRepository(Event)
|
||||
.createQueryBuilder("event")
|
||||
.where("block_hash = :blockHash AND token = :token AND :eventName = :eventName", {
|
||||
.createQueryBuilder('event')
|
||||
.where('block_hash = :blockHash AND token = :token AND :eventName = :eventName', {
|
||||
blockHash,
|
||||
token,
|
||||
eventName
|
||||
@ -95,7 +118,11 @@ export class Database {
|
||||
.getMany();
|
||||
}
|
||||
|
||||
async saveEvents({ blockHash, token, events }) {
|
||||
async saveEvents ({ blockHash, token, events }: { blockHash: string, token: string, events: DeepPartial<Event>[] }): Promise<void> {
|
||||
if (!this._conn) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Using the same connection doesn't work when > 1 inserts are attempted at the same time (e.g. simultaneous GQL requests).
|
||||
|
||||
// In a transaction:
|
||||
|
@ -1,27 +1,26 @@
|
||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from "typeorm";
|
||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
|
||||
|
||||
@Entity()
|
||||
@Index(["blockHash", "token", "owner", "spender"], { unique: true })
|
||||
@Index(['blockHash', 'token', 'owner', 'spender'], { unique: true })
|
||||
export class Allowance {
|
||||
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number;
|
||||
id!: number;
|
||||
|
||||
@Column("varchar", { length: 66 })
|
||||
blockHash: string;
|
||||
@Column('varchar', { length: 66 })
|
||||
blockHash!: string;
|
||||
|
||||
@Column("varchar", { length: 42 })
|
||||
token: string;
|
||||
@Column('varchar', { length: 42 })
|
||||
token!: string;
|
||||
|
||||
@Column("varchar", { length: 42 })
|
||||
owner: string;
|
||||
@Column('varchar', { length: 42 })
|
||||
owner!: string;
|
||||
|
||||
@Column("varchar", { length: 42 })
|
||||
spender: string;
|
||||
@Column('varchar', { length: 42 })
|
||||
spender!: string;
|
||||
|
||||
@Column("numeric")
|
||||
value: number;
|
||||
@Column('numeric')
|
||||
value!: BigInt;
|
||||
|
||||
@Column("text")
|
||||
proof: string;
|
||||
@Column('text')
|
||||
proof!: string;
|
||||
}
|
||||
|
@ -1,24 +1,23 @@
|
||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from "typeorm";
|
||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
|
||||
|
||||
@Entity()
|
||||
@Index(["blockHash", "token", "owner"], { unique: true })
|
||||
@Index(['blockHash', 'token', 'owner'], { unique: true })
|
||||
export class Balance {
|
||||
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number;
|
||||
id!: number;
|
||||
|
||||
@Column("varchar", { length: 66 })
|
||||
blockHash: string;
|
||||
@Column('varchar', { length: 66 })
|
||||
blockHash!: string;
|
||||
|
||||
@Column("varchar", { length: 42 })
|
||||
token: string;
|
||||
@Column('varchar', { length: 42 })
|
||||
token!: string;
|
||||
|
||||
@Column("varchar", { length: 42 })
|
||||
owner: string;
|
||||
@Column('varchar', { length: 42 })
|
||||
owner!: string;
|
||||
|
||||
@Column("numeric")
|
||||
value: number;
|
||||
@Column('numeric')
|
||||
value!: BigInt;
|
||||
|
||||
@Column("text")
|
||||
proof: string;
|
||||
@Column('text')
|
||||
proof!: string;
|
||||
}
|
||||
|
@ -1,46 +1,45 @@
|
||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from "typeorm";
|
||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
|
||||
|
||||
@Entity()
|
||||
// Index to query all events for a contract efficiently.
|
||||
@Index(["blockHash", "token"])
|
||||
@Index(['blockHash', 'token'])
|
||||
// Index to query 'Transfer' events efficiently.
|
||||
@Index(["blockHash", "token", "eventName", "transferFrom", "transferTo"])
|
||||
@Index(['blockHash', 'token', 'eventName', 'transferFrom', 'transferTo'])
|
||||
// Index to query 'Approval' events efficiently.
|
||||
@Index(["blockHash", "token", "eventName", "approvalOwner", "approvalSpender"])
|
||||
@Index(['blockHash', 'token', 'eventName', 'approvalOwner', 'approvalSpender'])
|
||||
export class Event {
|
||||
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number;
|
||||
id!: number;
|
||||
|
||||
@Column("varchar", { length: 66 })
|
||||
blockHash: string;
|
||||
@Column('varchar', { length: 66 })
|
||||
blockHash!: string;
|
||||
|
||||
@Column("varchar", { length: 42 })
|
||||
token: string;
|
||||
@Column('varchar', { length: 42 })
|
||||
token!: string;
|
||||
|
||||
@Column("varchar", { length: 256 })
|
||||
eventName: string;
|
||||
@Column('varchar', { length: 256 })
|
||||
eventName!: string;
|
||||
|
||||
@Column("text")
|
||||
proof: string;
|
||||
@Column('text')
|
||||
proof!: string;
|
||||
|
||||
// Transfer event columns.
|
||||
@Column("varchar", { length: 42, nullable: true })
|
||||
transferFrom: string;
|
||||
@Column('varchar', { length: 42, nullable: true })
|
||||
transferFrom!: string;
|
||||
|
||||
@Column("varchar", { length: 42, nullable: true })
|
||||
transferTo: string;
|
||||
@Column('varchar', { length: 42, nullable: true })
|
||||
transferTo!: string;
|
||||
|
||||
@Column("numeric", { nullable: true })
|
||||
transferValue: number;
|
||||
@Column('numeric', { nullable: true })
|
||||
transferValue!: BigInt;
|
||||
|
||||
// Approval event columns.
|
||||
@Column("varchar", { length: 42, nullable: true })
|
||||
approvalOwner: string;
|
||||
@Column('varchar', { length: 42, nullable: true })
|
||||
approvalOwner!: string;
|
||||
|
||||
@Column("varchar", { length: 42, nullable: true })
|
||||
approvalSpender: string;
|
||||
@Column('varchar', { length: 42, nullable: true })
|
||||
approvalSpender!: string;
|
||||
|
||||
@Column("numeric", { nullable: true })
|
||||
approvalValue: number;
|
||||
@Column('numeric', { nullable: true })
|
||||
approvalValue!: BigInt;
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from "typeorm";
|
||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
|
||||
|
||||
// Stores a row if events for a (block, token) combination have already been fetched.
|
||||
//
|
||||
@ -7,15 +7,14 @@ import { Entity, PrimaryGeneratedColumn, Column, Index } from "typeorm";
|
||||
// yet been synced from upstream.
|
||||
//
|
||||
@Entity()
|
||||
@Index(["blockHash", "token"], { unique: true })
|
||||
@Index(['blockHash', 'token'], { unique: true })
|
||||
export class EventSyncProgress {
|
||||
|
||||
@PrimaryGeneratedColumn()
|
||||
id: number;
|
||||
id!: number;
|
||||
|
||||
@Column("varchar", { length: 66 })
|
||||
blockHash: string;
|
||||
@Column('varchar', { length: 66 })
|
||||
blockHash!: string;
|
||||
|
||||
@Column("varchar", { length: 42 })
|
||||
token: string;
|
||||
@Column('varchar', { length: 42 })
|
||||
token!: string;
|
||||
}
|
||||
|
@ -1,12 +1,13 @@
|
||||
import 'graphql-import-node';
|
||||
import { makeExecutableSchema } from '@graphql-tools/schema';
|
||||
import { GraphQLSchema } from 'graphql';
|
||||
|
||||
import * as typeDefs from './erc20.graphql';
|
||||
import { createResolvers as createMockResolvers } from './mock/resolvers';
|
||||
import { createResolvers } from './resolvers';
|
||||
import { Config, createResolvers } from './resolvers';
|
||||
|
||||
export const createSchema = async (config) => {
|
||||
const resolvers = process.env.MOCK ? await createMockResolvers(config) : await createResolvers(config);
|
||||
export const createSchema = async (config: Config): Promise<GraphQLSchema> => {
|
||||
const resolvers = process.env.MOCK ? await createMockResolvers() : await createResolvers(config);
|
||||
|
||||
return makeExecutableSchema({
|
||||
typeDefs,
|
||||
|
@ -1,24 +1,50 @@
|
||||
import assert from "assert";
|
||||
import assert from 'assert';
|
||||
import debug from 'debug';
|
||||
import { invert } from "lodash";
|
||||
import { invert } from 'lodash';
|
||||
import { JsonFragment } from '@ethersproject/abi';
|
||||
import { DeepPartial } from 'typeorm';
|
||||
|
||||
import { EthClient, getMappingSlot, topictoAddress } from "@vulcanize/ipld-eth-client";
|
||||
import { getStorageInfo, getEventNameTopics, getStorageValue, GetStorageAt } from '@vulcanize/solidity-mapper';
|
||||
import { EthClient, getMappingSlot, topictoAddress } from '@vulcanize/ipld-eth-client';
|
||||
import { getStorageInfo, getEventNameTopics, getStorageValue, GetStorageAt, StorageLayout } from '@vulcanize/solidity-mapper';
|
||||
|
||||
import { Database } from './database';
|
||||
import { Event } from './entity/Event';
|
||||
|
||||
const log = debug('vulcanize:indexer');
|
||||
|
||||
export class Indexer {
|
||||
interface Artifacts {
|
||||
abi: JsonFragment[];
|
||||
storageLayout: StorageLayout;
|
||||
}
|
||||
|
||||
export interface ValueResult {
|
||||
value: string | BigInt;
|
||||
proof: {
|
||||
data: string;
|
||||
}
|
||||
}
|
||||
|
||||
type EventsResult = Array<{
|
||||
event: {
|
||||
from?: string;
|
||||
to?: string;
|
||||
owner?: string;
|
||||
spender?: string;
|
||||
value?: BigInt;
|
||||
__typename: string;
|
||||
}
|
||||
proof: string;
|
||||
}>
|
||||
|
||||
export class Indexer {
|
||||
_db: Database
|
||||
_ethClient: EthClient
|
||||
_getStorageAt: GetStorageAt
|
||||
|
||||
_abi: any
|
||||
_storageLayout: any
|
||||
_abi: JsonFragment[]
|
||||
_storageLayout: StorageLayout
|
||||
|
||||
constructor(db, ethClient, artifacts) {
|
||||
constructor (db: Database, ethClient: EthClient, artifacts: Artifacts) {
|
||||
assert(db);
|
||||
assert(ethClient);
|
||||
assert(artifacts);
|
||||
@ -36,7 +62,7 @@ export class Indexer {
|
||||
this._storageLayout = storageLayout;
|
||||
}
|
||||
|
||||
async totalSupply(blockHash, token) {
|
||||
async totalSupply (blockHash: string, token: string): Promise<ValueResult> {
|
||||
// TODO: Use getStorageValue when it supports uint256 values.
|
||||
const { slot } = getStorageInfo(this._storageLayout, '_totalSupply');
|
||||
|
||||
@ -52,13 +78,13 @@ export class Indexer {
|
||||
return result;
|
||||
}
|
||||
|
||||
async balanceOf(blockHash, token, owner) {
|
||||
async balanceOf (blockHash: string, token: string, owner: string): Promise<ValueResult> {
|
||||
const entity = await this._db.getBalance({ blockHash, token, owner });
|
||||
if (entity) {
|
||||
return {
|
||||
value: entity.value,
|
||||
proof: JSON.parse(entity.proof)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: Use getStorageValue when it supports mappings.
|
||||
@ -80,13 +106,13 @@ export class Indexer {
|
||||
return result;
|
||||
}
|
||||
|
||||
async allowance(blockHash, token, owner, spender) {
|
||||
async allowance (blockHash: string, token: string, owner: string, spender: string): Promise<ValueResult> {
|
||||
const entity = await this._db.getAllowance({ blockHash, token, owner, spender });
|
||||
if (entity) {
|
||||
return {
|
||||
value: entity.value,
|
||||
proof: JSON.parse(entity.proof)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: Use getStorageValue when it supports nested mappings.
|
||||
@ -108,7 +134,7 @@ export class Indexer {
|
||||
return result;
|
||||
}
|
||||
|
||||
async name(blockHash, token) {
|
||||
async name (blockHash: string, token: string): Promise<ValueResult> {
|
||||
const result = await this._getStorageValue(blockHash, token, '_name');
|
||||
|
||||
log(JSON.stringify(result, null, 2));
|
||||
@ -116,7 +142,7 @@ export class Indexer {
|
||||
return result;
|
||||
}
|
||||
|
||||
async symbol(blockHash, token) {
|
||||
async symbol (blockHash: string, token: string): Promise<ValueResult> {
|
||||
const result = await this._getStorageValue(blockHash, token, '_symbol');
|
||||
|
||||
log(JSON.stringify(result, null, 2));
|
||||
@ -124,14 +150,14 @@ export class Indexer {
|
||||
return result;
|
||||
}
|
||||
|
||||
async decimals(blockHash, token) {
|
||||
async decimals (): Promise<void> {
|
||||
// Not a state variable, uses hardcoded return value in contract function.
|
||||
// See https://github.com/OpenZeppelin/openzeppelin-contracts/blob/master/contracts/token/ERC20/ERC20.sol#L86
|
||||
|
||||
throw new Error('Not implemented.');
|
||||
}
|
||||
|
||||
async getEvents(blockHash, token, name) {
|
||||
async getEvents (blockHash: string, token: string, name: string): Promise<EventsResult> {
|
||||
const didSyncEvents = await this._db.didSyncEvents({ blockHash, token });
|
||||
if (!didSyncEvents) {
|
||||
// Fetch and save events first and make a note in the event sync progress table.
|
||||
@ -147,21 +173,27 @@ export class Indexer {
|
||||
// TODO: Filter using db WHERE condition when name is not empty.
|
||||
.filter(event => !name || name === event.eventName)
|
||||
.map(e => {
|
||||
const eventFields = {};
|
||||
const eventFields: {
|
||||
from?: string,
|
||||
to?: string,
|
||||
value?: BigInt,
|
||||
owner?: string,
|
||||
spender?: string,
|
||||
} = {};
|
||||
|
||||
switch (e.eventName) {
|
||||
case 'Transfer': {
|
||||
eventFields['from'] = e.transferFrom;
|
||||
eventFields['to'] = e.transferTo;
|
||||
eventFields['value'] = e.transferValue;
|
||||
eventFields.from = e.transferFrom;
|
||||
eventFields.to = e.transferTo;
|
||||
eventFields.value = e.transferValue;
|
||||
break;
|
||||
};
|
||||
}
|
||||
case 'Approval': {
|
||||
eventFields['owner'] = e.approvalOwner;
|
||||
eventFields['spender'] = e.approvalSpender;
|
||||
eventFields['value'] = e.approvalValue;
|
||||
eventFields.owner = e.approvalOwner;
|
||||
eventFields.spender = e.approvalSpender;
|
||||
eventFields.value = e.approvalValue;
|
||||
break;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
@ -171,7 +203,7 @@ export class Indexer {
|
||||
},
|
||||
// TODO: Return proof only if requested.
|
||||
proof: JSON.parse(e.proof)
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
log(JSON.stringify(result, null, 2));
|
||||
@ -180,7 +212,7 @@ export class Indexer {
|
||||
}
|
||||
|
||||
// TODO: Move into base/class or framework package.
|
||||
async _getStorageValue(blockHash, token, variable) {
|
||||
async _getStorageValue (blockHash: string, token: string, variable: string): Promise<ValueResult> {
|
||||
return getStorageValue(
|
||||
this._storageLayout,
|
||||
this._getStorageAt,
|
||||
@ -190,14 +222,14 @@ export class Indexer {
|
||||
);
|
||||
}
|
||||
|
||||
async _fetchAndSaveEvents({ blockHash, token }) {
|
||||
async _fetchAndSaveEvents ({ blockHash, token }: { blockHash: string, token: string }): Promise<void> {
|
||||
const logs = await this._ethClient.getLogs({ blockHash, contract: token });
|
||||
log(JSON.stringify(logs, null, 2));
|
||||
|
||||
const eventNameToTopic = getEventNameTopics(this._abi);
|
||||
const logTopicToEventName = invert(eventNameToTopic);
|
||||
|
||||
const dbEvents = logs.map(log => {
|
||||
const dbEvents = logs.map((log: any) => {
|
||||
const { topics, data: value, cid, ipldBlock } = log;
|
||||
|
||||
const [topic0, topic1, topic2] = topics;
|
||||
@ -206,7 +238,7 @@ export class Indexer {
|
||||
const address1 = topictoAddress(topic1);
|
||||
const address2 = topictoAddress(topic2);
|
||||
|
||||
const event = {
|
||||
const event: DeepPartial<Event> = {
|
||||
blockHash,
|
||||
token,
|
||||
eventName,
|
||||
@ -219,22 +251,22 @@ export class Indexer {
|
||||
ipldBlock
|
||||
}
|
||||
})
|
||||
}),
|
||||
})
|
||||
};
|
||||
|
||||
switch (eventName) {
|
||||
case 'Transfer': {
|
||||
event['transferFrom'] = address1;
|
||||
event['transferTo'] = address2;
|
||||
event['transferValue'] = BigInt(value);
|
||||
event.transferFrom = address1;
|
||||
event.transferTo = address2;
|
||||
event.transferValue = BigInt(value);
|
||||
break;
|
||||
};
|
||||
}
|
||||
case 'Approval': {
|
||||
event['approvalOwner'] = address1;
|
||||
event['approvalSpender'] = address2;
|
||||
event['approvalValue'] = BigInt(value);
|
||||
event.approvalOwner = address1;
|
||||
event.approvalSpender = address2;
|
||||
event.approvalValue = BigInt(value);
|
||||
break;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return event;
|
||||
@ -242,4 +274,4 @@ export class Indexer {
|
||||
|
||||
await this._db.saveEvents({ blockHash, token, events: dbEvents });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
// TODO: Pull mock data for 5 tokens from rinkeby.
|
||||
|
||||
export const tokens = {
|
||||
export const tokens: {[address: string]: {[variable: string]: string}} = {
|
||||
'0xd87fea54f506972e3267239ec8e159548892074a': {
|
||||
name: 'ChainLink Token',
|
||||
symbol: 'LINK',
|
||||
@ -9,7 +9,7 @@ export const tokens = {
|
||||
}
|
||||
};
|
||||
|
||||
export const blocks = {
|
||||
export const blocks: {[blockHash: string]: {[address: string]: any}} = {
|
||||
// Block hash.
|
||||
'0x77b5479a5856dd8ec63df6aabf9ce0913071a6dda3a3d54f3c9c940574bcb8ab': {
|
||||
|
||||
|
@ -5,13 +5,12 @@ import { blocks } from './data';
|
||||
|
||||
const log = debug('test');
|
||||
|
||||
export const createResolvers = async (config) => {
|
||||
|
||||
export const createResolvers = async (): Promise<any> => {
|
||||
return {
|
||||
BigInt: new BigInt('bigInt'),
|
||||
|
||||
TokenEvent: {
|
||||
__resolveType: (obj) => {
|
||||
__resolveType: (obj: any) => {
|
||||
if (obj.owner) {
|
||||
return 'ApprovalEvent';
|
||||
}
|
||||
@ -22,65 +21,65 @@ export const createResolvers = async (config) => {
|
||||
|
||||
Query: {
|
||||
|
||||
totalSupply: (_, { blockHash, token }) => {
|
||||
totalSupply: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
|
||||
log('totalSupply', blockHash, token);
|
||||
|
||||
return {
|
||||
value: blocks[blockHash][token].totalSupply,
|
||||
proof: { data: '' }
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
balanceOf: (_, { blockHash, token, owner }) => {
|
||||
balanceOf: (_: any, { blockHash, token, owner }: { blockHash: string, token: string, owner: string }) => {
|
||||
log('balanceOf', blockHash, token, owner);
|
||||
|
||||
return {
|
||||
value: blocks[blockHash][token].balanceOf[owner],
|
||||
proof: { data: '' }
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
allowance: (_, { blockHash, token, owner, spender }) => {
|
||||
allowance: (_: any, { blockHash, token, owner, spender }: { blockHash: string, token: string, owner: string, spender: string }) => {
|
||||
log('allowance', blockHash, token, owner, spender);
|
||||
|
||||
return {
|
||||
value: blocks[blockHash][token].allowance[owner][spender],
|
||||
proof: { data: '' }
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
name: (_, { blockHash, token }) => {
|
||||
name: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
|
||||
log('name', blockHash, token);
|
||||
|
||||
return {
|
||||
value: blocks[blockHash][token].name,
|
||||
proof: { data: '' }
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
symbol: (_, { blockHash, token }) => {
|
||||
symbol: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
|
||||
log('symbol', blockHash, token);
|
||||
|
||||
return {
|
||||
value: blocks[blockHash][token].symbol,
|
||||
proof: { data: '' }
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
decimals: (_, { blockHash, token }) => {
|
||||
decimals: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
|
||||
log('decimals', blockHash, token);
|
||||
|
||||
return {
|
||||
value: blocks[blockHash][token].decimals,
|
||||
proof: { data: '' }
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
events: (_, { blockHash, token, name }) => {
|
||||
events: (_: any, { blockHash, token, name }: { blockHash: string, token: string, name: string }) => {
|
||||
log('events', blockHash, token, name);
|
||||
return blocks[blockHash][token].events
|
||||
.filter(e => !name || name === e.name)
|
||||
.map(e => ({ 'event': e }));
|
||||
.filter((e: any) => !name || name === e.name)
|
||||
.map((e: any) => ({ event: e }));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -16,11 +16,16 @@ import {
|
||||
|
||||
import { blocks, tokens as tokenInfo } from './data';
|
||||
|
||||
const testCases = {
|
||||
'balanceOf': [],
|
||||
'allowance': [],
|
||||
'events': [],
|
||||
'tokens': []
|
||||
const testCases: {
|
||||
balanceOf: any[],
|
||||
allowance: any[],
|
||||
events: any[],
|
||||
tokens: any[]
|
||||
} = {
|
||||
balanceOf: [],
|
||||
allowance: [],
|
||||
events: [],
|
||||
tokens: []
|
||||
};
|
||||
|
||||
const blockHashes = _.keys(blocks);
|
||||
@ -45,7 +50,7 @@ blockHashes.forEach(blockHash => {
|
||||
});
|
||||
|
||||
// Balance test cases.
|
||||
const balanceOfOwners = _.keys(tokenObj['balanceOf']);
|
||||
const balanceOfOwners = _.keys(tokenObj.balanceOf);
|
||||
balanceOfOwners.forEach(owner => {
|
||||
testCases.balanceOf.push({
|
||||
blockHash,
|
||||
@ -56,7 +61,7 @@ blockHashes.forEach(blockHash => {
|
||||
});
|
||||
|
||||
// Allowance test cases.
|
||||
const allowanceOwners = _.keys(tokenObj['allowance']);
|
||||
const allowanceOwners = _.keys(tokenObj.allowance);
|
||||
allowanceOwners.forEach(owner => {
|
||||
const allowanceObj = tokenObj.allowance[owner];
|
||||
const spenders = _.keys(allowanceObj);
|
||||
@ -74,8 +79,7 @@ blockHashes.forEach(blockHash => {
|
||||
});
|
||||
|
||||
describe('server', () => {
|
||||
|
||||
const client = new GraphQLClient("http://localhost:3001/graphql");
|
||||
const client = new GraphQLClient('http://localhost:3001/graphql');
|
||||
|
||||
it('query token info', async () => {
|
||||
const tests = testCases.tokens;
|
||||
@ -144,10 +148,10 @@ describe('server', () => {
|
||||
const testCase = tests[i];
|
||||
const result = await client.request(queryEvents, testCase);
|
||||
|
||||
const resultEvents = result.events.map(record => record.event);
|
||||
const resultEvents = result.events.map((record: any) => record.event);
|
||||
expect(resultEvents.length).to.equal(testCase.events.length);
|
||||
|
||||
resultEvents.forEach((resultEvent, index) => {
|
||||
resultEvents.forEach((resultEvent: any, index: number) => {
|
||||
const { name, ...testCaseEvent } = testCase.events[index];
|
||||
|
||||
if (name === 'Transfer') {
|
||||
|
@ -1,40 +1,50 @@
|
||||
import assert from 'assert';
|
||||
import BigInt from 'apollo-type-bigint';
|
||||
import debug from 'debug';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import "reflect-metadata";
|
||||
import 'reflect-metadata';
|
||||
import { ConnectionOptions } from 'typeorm';
|
||||
|
||||
import { getCache } from '@vulcanize/cache';
|
||||
import { getCache, Config as CacheConfig } from '@vulcanize/cache';
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
|
||||
import artifacts from './artifacts/ERC20.json';
|
||||
import { Indexer } from './indexer';
|
||||
import { Indexer, ValueResult } from './indexer';
|
||||
import { Database } from './database';
|
||||
|
||||
export interface Config {
|
||||
server: {
|
||||
host: string;
|
||||
port: string;
|
||||
};
|
||||
database: ConnectionOptions;
|
||||
upstream: {
|
||||
gqlEndpoint: string;
|
||||
cache: CacheConfig
|
||||
}
|
||||
}
|
||||
|
||||
const log = debug('vulcanize:resolver');
|
||||
|
||||
export const createResolvers = async (config) => {
|
||||
|
||||
export const createResolvers = async (config: Config): Promise<any> => {
|
||||
const { upstream, database } = config;
|
||||
|
||||
assert(database, 'Missing database config');
|
||||
|
||||
const ormConfig = {
|
||||
const ormConfig: ConnectionOptions = {
|
||||
...database,
|
||||
entities: [
|
||||
"src/entity/**/*.ts"
|
||||
'src/entity/**/*.ts'
|
||||
],
|
||||
migrations: [
|
||||
"src/migration/**/*.ts"
|
||||
'src/migration/**/*.ts'
|
||||
],
|
||||
subscribers: [
|
||||
"src/subscriber/**/*.ts"
|
||||
'src/subscriber/**/*.ts'
|
||||
],
|
||||
cli: {
|
||||
entitiesDir: "src/entity",
|
||||
migrationsDir: "src/migration",
|
||||
subscribersDir: "src/subscriber"
|
||||
entitiesDir: 'src/entity',
|
||||
migrationsDir: 'src/migration',
|
||||
subscribersDir: 'src/subscriber'
|
||||
}
|
||||
};
|
||||
|
||||
@ -47,6 +57,8 @@ export const createResolvers = async (config) => {
|
||||
assert(upstream, 'Missing upstream gqlEndpoint');
|
||||
|
||||
const cache = await getCache(cacheConfig);
|
||||
assert(cache, 'Missing cache');
|
||||
|
||||
const ethClient = new EthClient({ gqlEndpoint, cache });
|
||||
|
||||
const indexer = new Indexer(db, ethClient, artifacts);
|
||||
@ -55,7 +67,7 @@ export const createResolvers = async (config) => {
|
||||
BigInt: new BigInt('bigInt'),
|
||||
|
||||
TokenEvent: {
|
||||
__resolveType: (obj) => {
|
||||
__resolveType: (obj: any) => {
|
||||
if (obj.owner) {
|
||||
return 'ApprovalEvent';
|
||||
}
|
||||
@ -66,37 +78,37 @@ export const createResolvers = async (config) => {
|
||||
|
||||
Query: {
|
||||
|
||||
totalSupply: (_, { blockHash, token }) => {
|
||||
totalSupply: (_: any, { blockHash, token }: { blockHash: string, token: string }): Promise<ValueResult> => {
|
||||
log('totalSupply', blockHash, token);
|
||||
return indexer.totalSupply(blockHash, token);
|
||||
},
|
||||
|
||||
balanceOf: async (_, { blockHash, token, owner }) => {
|
||||
balanceOf: async (_: any, { blockHash, token, owner }: { blockHash: string, token: string, owner: string }) => {
|
||||
log('balanceOf', blockHash, token, owner);
|
||||
return indexer.balanceOf(blockHash, token, owner);
|
||||
},
|
||||
|
||||
allowance: async (_, { blockHash, token, owner, spender }) => {
|
||||
allowance: async (_: any, { blockHash, token, owner, spender }: { blockHash: string, token: string, owner: string, spender: string }) => {
|
||||
log('allowance', blockHash, token, owner, spender);
|
||||
return indexer.allowance(blockHash, token, owner, spender);
|
||||
},
|
||||
|
||||
name: (_, { blockHash, token }) => {
|
||||
name: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
|
||||
log('name', blockHash, token);
|
||||
return indexer.name(blockHash, token);
|
||||
},
|
||||
|
||||
symbol: (_, { blockHash, token }) => {
|
||||
symbol: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
|
||||
log('symbol', blockHash, token);
|
||||
return indexer.symbol(blockHash, token);
|
||||
},
|
||||
|
||||
decimals: (_, { blockHash, token }) => {
|
||||
decimals: (_: any, { blockHash, token }: { blockHash: string, token: string }) => {
|
||||
log('decimals', blockHash, token);
|
||||
return indexer.decimals(blockHash, token);
|
||||
return indexer.decimals();
|
||||
},
|
||||
|
||||
events: async (_, { blockHash, token, name }) => {
|
||||
events: async (_: any, { blockHash, token, name }: { blockHash: string, token: string, name: string }) => {
|
||||
log('events', blockHash, token, name);
|
||||
return indexer.getEvents(blockHash, token, name);
|
||||
}
|
||||
|
@ -5,32 +5,32 @@ import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import toml from 'toml';
|
||||
import yargs from 'yargs';
|
||||
import { hideBin } from 'yargs/helpers'
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import debug from 'debug';
|
||||
|
||||
import { createSchema } from './gql';
|
||||
|
||||
const log = debug('vulcanize:server');
|
||||
|
||||
export const createServer = async () => {
|
||||
const argv = yargs(hideBin(process.argv))
|
||||
export const createServer = async (): Promise<Application> => {
|
||||
const argv = await yargs(hideBin(process.argv))
|
||||
.option('f', {
|
||||
alias: 'config-file',
|
||||
demandOption: true,
|
||||
describe: 'configuration file path (toml)',
|
||||
type: 'string'
|
||||
})
|
||||
.argv
|
||||
.argv;
|
||||
|
||||
const configFile = argv['configFile'];
|
||||
const configFile = argv.f;
|
||||
const configFilePath = path.resolve(configFile);
|
||||
const fileExists = await fs.pathExists(configFilePath);
|
||||
if (!fileExists) {
|
||||
throw new Error(`Config file not found: ${configFilePath}`);
|
||||
}
|
||||
|
||||
var config = toml.parse(await fs.readFile(configFilePath, 'utf8'));
|
||||
log("config", JSON.stringify(config, null, 2));
|
||||
const config = toml.parse(await fs.readFile(configFilePath, 'utf8'));
|
||||
log('config', JSON.stringify(config, null, 2));
|
||||
|
||||
assert(config.server, 'Missing server config');
|
||||
|
||||
@ -44,8 +44,8 @@ export const createServer = async () => {
|
||||
'/graphql',
|
||||
graphqlHTTP({
|
||||
schema,
|
||||
graphiql: true,
|
||||
}),
|
||||
graphiql: true
|
||||
})
|
||||
);
|
||||
|
||||
app.get('/', (req: Request, res: Response) => {
|
||||
|
2
packages/watcher/src/types/common/main.d.ts
vendored
Normal file
2
packages/watcher/src/types/common/main.d.ts
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// https://medium.com/@steveruiz/using-a-javascript-library-without-type-declarations-in-a-typescript-project-3643490015f3
|
||||
declare module 'canonical-json'
|
5
packages/watcher/src/types/common/package.json
Normal file
5
packages/watcher/src/types/common/package.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "common",
|
||||
"version": "0.1.0",
|
||||
"typings": "main.d.ts"
|
||||
}
|
77
packages/watcher/tsconfig.json
Normal file
77
packages/watcher/tsconfig.json
Normal file
@ -0,0 +1,77 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Visit https://aka.ms/tsconfig.json to read more about this file */
|
||||
|
||||
/* Basic Options */
|
||||
// "incremental": true, /* Enable incremental compilation */
|
||||
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
||||
"lib": [ "ES5", "ES6" ], /* Specify library files to be included in the compilation. */
|
||||
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||
// "checkJs": true, /* Report errors in .js files. */
|
||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
||||
"declaration": true, /* Generates corresponding '.d.ts' file. */
|
||||
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
|
||||
// "sourceMap": true, /* Generates corresponding '.map' file. */
|
||||
// "outFile": "./", /* Concatenate and emit output to single file. */
|
||||
"outDir": "dist", /* Redirect output structure to the directory. */
|
||||
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
// "composite": true, /* Enable project compilation */
|
||||
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
|
||||
// "removeComments": true, /* Do not emit comments to output. */
|
||||
// "noEmit": true, /* Do not emit outputs. */
|
||||
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
|
||||
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
|
||||
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
|
||||
|
||||
/* Strict Type-Checking Options */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
// "strictNullChecks": true, /* Enable strict null checks. */
|
||||
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
|
||||
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
|
||||
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
|
||||
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
|
||||
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
|
||||
|
||||
/* Additional Checks */
|
||||
// "noUnusedLocals": true, /* Report errors on unused locals. */
|
||||
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||
// "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
|
||||
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an 'override' modifier. */
|
||||
// "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */
|
||||
|
||||
/* Module Resolution Options */
|
||||
"moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
|
||||
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||
"typeRoots": [
|
||||
"./src/types"
|
||||
], /* List of folders to include type definitions from. */
|
||||
// "types": [], /* Type declaration files to be included in compilation. */
|
||||
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
|
||||
/* Source Map Options */
|
||||
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
|
||||
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
|
||||
|
||||
/* Experimental Options */
|
||||
"experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
|
||||
|
||||
/* Advanced Options */
|
||||
"skipLibCheck": true, /* Skip type checking of declaration files. */
|
||||
"forceConsistentCasingInFileNames": true, /* Disallow inconsistently-cased references to the same file. */
|
||||
"resolveJsonModule": true /* Enabling the option allows importing JSON, and validating the types in that JSON file. */
|
||||
},
|
||||
"include": ["src"],
|
||||
"exclude": ["dist"]
|
||||
}
|
Loading…
Reference in New Issue
Block a user