mirror of
https://github.com/cerc-io/watcher-ts
synced 2025-01-08 12:28:05 +00:00
uniswap-info-watcher package with frontend schema (#114)
* Implement mock bundle query in watcher. * Use chance js for generating mock data. * Add bundles query. * Setup resolvers with uniswap info schema. * Add mock query for Burn entity. * Add mock query for Factory entity. * Add mock queries for entities Mint, Pool and Transaction. * Add mock query for Swap entity. * Add mock queries for day data entities. * Implment mock ticks query. * Add mock tokenHourDatas query. * Add mock queries for uni-info-watcher to readme. Co-authored-by: nikugogoi <95nikass@gmail.com>
This commit is contained in:
parent
1b6d4511f7
commit
b7ffb7c672
5
packages/uni-info-watcher/.eslintignore
Normal file
5
packages/uni-info-watcher/.eslintignore
Normal file
@ -0,0 +1,5 @@
|
||||
# Don't lint node_modules.
|
||||
node_modules
|
||||
|
||||
# Don't lint build output.
|
||||
dist
|
27
packages/uni-info-watcher/.eslintrc.json
Normal file
27
packages/uni-info-watcher/.eslintrc.json
Normal file
@ -0,0 +1,27 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2021": true
|
||||
},
|
||||
"extends": [
|
||||
"semistandard",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 12,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": [
|
||||
"@typescript-eslint"
|
||||
],
|
||||
"rules": {
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/explicit-module-boundary-types": [
|
||||
"warn",
|
||||
{
|
||||
"allowArgumentsExplicitlyTypedAsAny": true
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
6
packages/uni-info-watcher/.gitignore
vendored
Normal file
6
packages/uni-info-watcher/.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
.idea/
|
||||
.vscode/
|
||||
node_modules/
|
||||
build/
|
||||
tmp/
|
||||
temp/
|
205
packages/uni-info-watcher/README.md
Normal file
205
packages/uni-info-watcher/README.md
Normal file
@ -0,0 +1,205 @@
|
||||
# uni-info-watcher
|
||||
|
||||
## Instructions
|
||||
|
||||
* To start the server run `yarn server`.
|
||||
|
||||
* Run `yarn server:mock` to run server with mock data.
|
||||
|
||||
## Mock Queries
|
||||
|
||||
```graphql
|
||||
{
|
||||
bundle(id: "1", block: { number: 2 }) {
|
||||
id
|
||||
ethPriceUSD
|
||||
}
|
||||
|
||||
bundles(first: 1, block: { number: 2 }) {
|
||||
id
|
||||
ethPriceUSD
|
||||
}
|
||||
|
||||
burns(first: 2, orderBy: timestamp) {
|
||||
amount0
|
||||
amount1
|
||||
amountUSD
|
||||
id
|
||||
origin
|
||||
owner
|
||||
pool {
|
||||
id
|
||||
}
|
||||
timestamp
|
||||
transaction {
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
factories(first: 1, block: { number: 2 }) {
|
||||
id
|
||||
totalFeesUSD
|
||||
totalValueLockedUSD
|
||||
totalVolumeUSD
|
||||
txCount
|
||||
}
|
||||
|
||||
mints(first: 2) {
|
||||
amount0
|
||||
amount1
|
||||
amountUSD
|
||||
id
|
||||
origin
|
||||
owner
|
||||
pool {
|
||||
id
|
||||
}
|
||||
timestamp
|
||||
transaction {
|
||||
id
|
||||
}
|
||||
sender
|
||||
}
|
||||
|
||||
pools(first: 2, block: { number:2 }) {
|
||||
feeTier
|
||||
id
|
||||
liquidity
|
||||
sqrtPrice
|
||||
tick
|
||||
token0 {
|
||||
name
|
||||
}
|
||||
token0Price
|
||||
token1 {
|
||||
name
|
||||
}
|
||||
token1Price
|
||||
totalValueLockedToken0
|
||||
totalValueLockedToken1
|
||||
totalValueLockedUSD
|
||||
txCount
|
||||
volumeUSD
|
||||
}
|
||||
|
||||
tokens {
|
||||
derivedETH
|
||||
feesUSD
|
||||
id
|
||||
name
|
||||
symbol
|
||||
totalValueLocked
|
||||
totalValueLockedUSD
|
||||
txCount
|
||||
volume
|
||||
volumeUSD
|
||||
}
|
||||
|
||||
transactions(first: 2) {
|
||||
burns {
|
||||
id
|
||||
}
|
||||
id
|
||||
mints {
|
||||
id
|
||||
}
|
||||
swaps{
|
||||
id
|
||||
}
|
||||
timestamp
|
||||
}
|
||||
|
||||
swaps(first: 2) {
|
||||
amount0
|
||||
amount1
|
||||
amountUSD
|
||||
id
|
||||
origin
|
||||
pool {
|
||||
id
|
||||
}
|
||||
timestamp
|
||||
transaction {
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
poolDayDatas(skip: 1, first: 2) {
|
||||
date
|
||||
id
|
||||
tvlUSD
|
||||
volumeUSD
|
||||
}
|
||||
|
||||
tokenDayDatas(first: 2, where: {}) {
|
||||
date
|
||||
id
|
||||
totalValueLockedUSD
|
||||
volumeUSD
|
||||
}
|
||||
|
||||
uniswapDayDatas(skip:1, first: 2) {
|
||||
date
|
||||
id
|
||||
tvlUSD
|
||||
volumeUSD
|
||||
}
|
||||
|
||||
ticks(skip: 1, first: 2, block: { number: 2 }) {
|
||||
id
|
||||
liquidityGross
|
||||
liquidityNet
|
||||
price0
|
||||
price1
|
||||
tickIdx
|
||||
}
|
||||
|
||||
tokenHourDatas(skip: 1, first: 2) {
|
||||
close
|
||||
high
|
||||
id
|
||||
low
|
||||
open
|
||||
periodStartUnix
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Queries with ID param
|
||||
```graphql
|
||||
{
|
||||
pool(id: "0x38bb4e5eb41aeaeec59e60ba075298f4d4dfd2a2") {
|
||||
feeTier
|
||||
id
|
||||
liquidity
|
||||
sqrtPrice
|
||||
tick
|
||||
token0 {
|
||||
name
|
||||
}
|
||||
token0Price
|
||||
token1 {
|
||||
name
|
||||
}
|
||||
token1Price
|
||||
totalValueLockedToken0
|
||||
totalValueLockedToken1
|
||||
totalValueLockedUSD
|
||||
txCount
|
||||
volumeUSD
|
||||
}
|
||||
|
||||
token(id: "0xb87ddd8af3242e56e52318bacf27fe9dcc75c15a", block: { number:2}) {
|
||||
derivedETH
|
||||
feesUSD
|
||||
id
|
||||
name
|
||||
symbol
|
||||
totalValueLocked
|
||||
totalValueLockedUSD
|
||||
txCount
|
||||
volume
|
||||
volumeUSD
|
||||
}
|
||||
}
|
||||
```
|
31
packages/uni-info-watcher/environments/local.toml
Normal file
31
packages/uni-info-watcher/environments/local.toml
Normal file
@ -0,0 +1,31 @@
|
||||
[server]
|
||||
host = "127.0.0.1"
|
||||
port = 3003
|
||||
|
||||
[database]
|
||||
type = "postgres"
|
||||
host = "localhost"
|
||||
port = 5432
|
||||
database = "uni-info-watcher"
|
||||
username = "postgres"
|
||||
password = "postgres"
|
||||
synchronize = true
|
||||
logging = false
|
||||
|
||||
entities = [ "src/entity/**/*.ts" ]
|
||||
migrations = [ "src/migration/**/*.ts" ]
|
||||
subscribers = [ "src/subscriber/**/*.ts" ]
|
||||
|
||||
[database.cli]
|
||||
entitiesDir = "src/entity"
|
||||
migrationsDir = "src/migration"
|
||||
subscribersDir = "src/subscriber"
|
||||
|
||||
[upstream]
|
||||
gqlEndpoint = "http://127.0.0.1:8083/graphql"
|
||||
gqlSubscriptionEndpoint = "http://127.0.0.1:5000/graphql"
|
||||
|
||||
[upstream.cache]
|
||||
name = "requests"
|
||||
enabled = false
|
||||
deleteOnStart = false
|
37
packages/uni-info-watcher/package.json
Normal file
37
packages/uni-info-watcher/package.json
Normal file
@ -0,0 +1,37 @@
|
||||
{
|
||||
"name": "@vulcanize/uni-info-watcher",
|
||||
"version": "0.1.0",
|
||||
"main": "index.js",
|
||||
"license": "UNLICENSED",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@vulcanize/cache": "^0.1.0",
|
||||
"@vulcanize/ipld-eth-client": "^0.1.0",
|
||||
"apollo-server-express": "^2.25.0",
|
||||
"apollo-type-bigint": "^0.1.3"
|
||||
},
|
||||
"scripts": {
|
||||
"server": "DEBUG=vulcanize:* nodemon src/server.ts -f environments/local.toml",
|
||||
"server:mock": "MOCK=1 nodemon src/server.ts -f environments/local.toml",
|
||||
"test": "mocha -r ts-node/register src/**/*.spec.ts",
|
||||
"lint": "eslint .",
|
||||
"build": "tsc"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chance": "^1.1.2",
|
||||
"@types/express": "^4.17.11",
|
||||
"@typescript-eslint/eslint-plugin": "^4.25.0",
|
||||
"@typescript-eslint/parser": "^4.25.0",
|
||||
"chai": "^4.3.4",
|
||||
"chance": "^1.1.7",
|
||||
"eslint": "^7.27.0",
|
||||
"eslint-config-semistandard": "^15.0.1",
|
||||
"eslint-config-standard": "^16.0.3",
|
||||
"eslint-plugin-import": "^2.23.3",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-promise": "^5.1.0",
|
||||
"eslint-plugin-standard": "^5.0.0",
|
||||
"mocha": "^8.4.0",
|
||||
"nodemon": "^2.0.7"
|
||||
}
|
||||
}
|
40
packages/uni-info-watcher/src/config.ts
Normal file
40
packages/uni-info-watcher/src/config.ts
Normal file
@ -0,0 +1,40 @@
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import toml from 'toml';
|
||||
import debug from 'debug';
|
||||
import { ConnectionOptions } from 'typeorm';
|
||||
|
||||
import { Config as CacheConfig } from '@vulcanize/cache';
|
||||
|
||||
const log = debug('vulcanize:config');
|
||||
|
||||
export interface Config {
|
||||
server: {
|
||||
host: string;
|
||||
port: number;
|
||||
};
|
||||
database: ConnectionOptions;
|
||||
upstream: {
|
||||
gqlEndpoint: string;
|
||||
gqlSubscriptionEndpoint: string;
|
||||
traceProviderEndpoint: string;
|
||||
cache: CacheConfig
|
||||
}
|
||||
jobQueue: {
|
||||
dbConnectionString: string;
|
||||
maxCompletionLag: number;
|
||||
}
|
||||
}
|
||||
|
||||
export const getConfig = async (configFile: string): Promise<Config> => {
|
||||
const configFilePath = path.resolve(configFile);
|
||||
const fileExists = await fs.pathExists(configFilePath);
|
||||
if (!fileExists) {
|
||||
throw new Error(`Config file not found: ${configFilePath}`);
|
||||
}
|
||||
|
||||
const config = toml.parse(await fs.readFile(configFilePath, 'utf8'));
|
||||
log('config', JSON.stringify(config, null, 2));
|
||||
|
||||
return config;
|
||||
};
|
96
packages/uni-info-watcher/src/database.ts
Normal file
96
packages/uni-info-watcher/src/database.ts
Normal file
@ -0,0 +1,96 @@
|
||||
import assert from 'assert';
|
||||
import { Connection, ConnectionOptions, createConnection, DeepPartial } from 'typeorm';
|
||||
import { SnakeNamingStrategy } from 'typeorm-naming-strategies';
|
||||
|
||||
import { Event } from './entity/Event';
|
||||
import { EventSyncProgress } from './entity/EventProgress';
|
||||
|
||||
export class Database {
|
||||
_config: ConnectionOptions
|
||||
_conn!: Connection
|
||||
|
||||
constructor (config: ConnectionOptions) {
|
||||
assert(config);
|
||||
this._config = config;
|
||||
}
|
||||
|
||||
async init (): Promise<void> {
|
||||
assert(!this._conn);
|
||||
|
||||
this._conn = await createConnection({
|
||||
...this._config,
|
||||
namingStrategy: new SnakeNamingStrategy()
|
||||
});
|
||||
}
|
||||
|
||||
async close (): Promise<void> {
|
||||
return this._conn.close();
|
||||
}
|
||||
|
||||
// Returns true if events have already been synced for the (block, token) combination.
|
||||
async didSyncEvents ({ blockHash, token }: { blockHash: string, token: string }): Promise<boolean> {
|
||||
const numRows = await this._conn.getRepository(EventSyncProgress)
|
||||
.createQueryBuilder()
|
||||
.where('block_hash = :blockHash AND token = :token', {
|
||||
blockHash,
|
||||
token
|
||||
})
|
||||
.getCount();
|
||||
|
||||
return numRows > 0;
|
||||
}
|
||||
|
||||
async getEvents ({ blockHash, token }: { blockHash: string, token: string }): Promise<Event[]> {
|
||||
return this._conn.getRepository(Event)
|
||||
.createQueryBuilder('event')
|
||||
.where('block_hash = :blockHash AND token = :token', {
|
||||
blockHash,
|
||||
token
|
||||
})
|
||||
.addOrderBy('id', 'ASC')
|
||||
.getMany();
|
||||
}
|
||||
|
||||
async getEventsByName ({ blockHash, token, eventName }: { blockHash: string, token: string, eventName: string }): Promise<Event[] | undefined> {
|
||||
return this._conn.getRepository(Event)
|
||||
.createQueryBuilder('event')
|
||||
.where('block_hash = :blockHash AND token = :token AND :eventName = :eventName', {
|
||||
blockHash,
|
||||
token,
|
||||
eventName
|
||||
})
|
||||
.getMany();
|
||||
}
|
||||
|
||||
async saveEvents ({ blockHash, token, events }: { blockHash: string, token: string, events: DeepPartial<Event>[] }): Promise<void> {
|
||||
// In a transaction:
|
||||
// (1) Save all the events in the database.
|
||||
// (2) Add an entry to the event progress table.
|
||||
|
||||
await this._conn.transaction(async (tx) => {
|
||||
const repo = tx.getRepository(EventSyncProgress);
|
||||
|
||||
// Check sync progress inside the transaction.
|
||||
const numRows = await repo
|
||||
.createQueryBuilder()
|
||||
.where('block_hash = :blockHash AND token = :token', {
|
||||
blockHash,
|
||||
token
|
||||
})
|
||||
.getCount();
|
||||
|
||||
if (numRows === 0) {
|
||||
// Bulk insert events.
|
||||
await tx.createQueryBuilder()
|
||||
.insert()
|
||||
.into(Event)
|
||||
.values(events)
|
||||
.execute();
|
||||
|
||||
// Update event sync progress.
|
||||
const progress = repo.create({ blockHash, token });
|
||||
await repo.save(progress);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
21
packages/uni-info-watcher/src/entity/Event.ts
Normal file
21
packages/uni-info-watcher/src/entity/Event.ts
Normal file
@ -0,0 +1,21 @@
|
||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
|
||||
|
||||
@Entity()
|
||||
// Index to query all events for a contract efficiently.
|
||||
@Index(['blockHash', 'token'])
|
||||
export class Event {
|
||||
@PrimaryGeneratedColumn()
|
||||
id!: number;
|
||||
|
||||
@Column('varchar', { length: 66 })
|
||||
blockHash!: string;
|
||||
|
||||
@Column('varchar', { length: 42 })
|
||||
token!: string;
|
||||
|
||||
@Column('varchar', { length: 256 })
|
||||
eventName!: string;
|
||||
|
||||
@Column('text')
|
||||
proof!: string;
|
||||
}
|
20
packages/uni-info-watcher/src/entity/EventProgress.ts
Normal file
20
packages/uni-info-watcher/src/entity/EventProgress.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
|
||||
|
||||
// Stores a row if events for a (block, token) combination have already been fetched.
|
||||
//
|
||||
// Required as a particular block may not have events from a particular contract,
|
||||
// and we need to differentiate between that case and the case where data hasn't
|
||||
// yet been synced from upstream.
|
||||
//
|
||||
@Entity()
|
||||
@Index(['blockHash', 'token'], { unique: true })
|
||||
export class EventSyncProgress {
|
||||
@PrimaryGeneratedColumn()
|
||||
id!: number;
|
||||
|
||||
@Column('varchar', { length: 66 })
|
||||
blockHash!: string;
|
||||
|
||||
@Column('varchar', { length: 42 })
|
||||
token!: string;
|
||||
}
|
59
packages/uni-info-watcher/src/events.ts
Normal file
59
packages/uni-info-watcher/src/events.ts
Normal file
@ -0,0 +1,59 @@
|
||||
import assert from 'assert';
|
||||
import debug from 'debug';
|
||||
import _ from 'lodash';
|
||||
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
|
||||
import { Indexer } from './indexer';
|
||||
|
||||
const log = debug('vulcanize:events');
|
||||
|
||||
export class EventWatcher {
|
||||
_ethClient: EthClient
|
||||
_indexer: Indexer
|
||||
_subscription: ZenObservable.Subscription | undefined
|
||||
|
||||
constructor (ethClient: EthClient, indexer: Indexer) {
|
||||
assert(ethClient);
|
||||
assert(indexer);
|
||||
|
||||
this._ethClient = ethClient;
|
||||
this._indexer = indexer;
|
||||
}
|
||||
|
||||
async start (): Promise<void> {
|
||||
assert(!this._subscription, 'subscription already started');
|
||||
|
||||
log('Started watching upstream logs...');
|
||||
|
||||
this._subscription = await this._ethClient.watchLogs(async (value) => {
|
||||
const receipt = _.get(value, 'data.listen.relatedNode');
|
||||
log('watchLogs', JSON.stringify(receipt, null, 2));
|
||||
|
||||
// Check if this log is for a contract we care about.
|
||||
const { logContracts } = receipt;
|
||||
if (logContracts && logContracts.length) {
|
||||
for (let logIndex = 0; logIndex < logContracts.length; logIndex++) {
|
||||
const contractAddress = logContracts[logIndex];
|
||||
const isWatchedContract = await this._indexer.isUniswapContract(contractAddress);
|
||||
if (isWatchedContract) {
|
||||
// TODO: Move processing to background task runner.
|
||||
|
||||
const { ethTransactionCidByTxId: { ethHeaderCidByHeaderId: { blockHash } } } = receipt;
|
||||
await this._indexer.getEvents(blockHash, contractAddress, null);
|
||||
|
||||
// Trigger other indexer methods based on event topic.
|
||||
await this._indexer.processEvent(blockHash, contractAddress, receipt, logIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async stop (): Promise<void> {
|
||||
if (this._subscription) {
|
||||
log('Stopped watching upstream logs');
|
||||
this._subscription.unsubscribe();
|
||||
}
|
||||
}
|
||||
}
|
228
packages/uni-info-watcher/src/indexer.ts
Normal file
228
packages/uni-info-watcher/src/indexer.ts
Normal file
@ -0,0 +1,228 @@
|
||||
import assert from 'assert';
|
||||
import debug from 'debug';
|
||||
import { invert } from 'lodash';
|
||||
import { DeepPartial } from 'typeorm';
|
||||
import JSONbig from 'json-bigint';
|
||||
import { PubSub } from 'apollo-server-express';
|
||||
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
import {
|
||||
GetStorageAt
|
||||
// StorageLayout
|
||||
} from '@vulcanize/solidity-mapper';
|
||||
|
||||
import { Database } from './database';
|
||||
import { Event } from './entity/Event';
|
||||
|
||||
const log = debug('vulcanize:indexer');
|
||||
|
||||
export interface ValueResult {
|
||||
value: string | bigint;
|
||||
proof: {
|
||||
data: string;
|
||||
}
|
||||
}
|
||||
|
||||
type EventsResult = Array<{
|
||||
event: {
|
||||
from?: string;
|
||||
to?: string;
|
||||
owner?: string;
|
||||
spender?: string;
|
||||
value?: BigInt;
|
||||
__typename: string;
|
||||
}
|
||||
proof: string;
|
||||
}>
|
||||
|
||||
export class Indexer {
|
||||
_db: Database
|
||||
_ethClient: EthClient
|
||||
_pubsub: PubSub
|
||||
_getStorageAt: GetStorageAt
|
||||
|
||||
// _abi: JsonFragment[]
|
||||
// _storageLayout: StorageLayout
|
||||
// _contract: ethers.utils.Interface
|
||||
|
||||
constructor (db: Database, ethClient: EthClient, pubsub: PubSub) {
|
||||
assert(db);
|
||||
assert(ethClient);
|
||||
assert(pubsub);
|
||||
|
||||
// const { abi, storageLayout } = artifacts;
|
||||
|
||||
// assert(abi);
|
||||
// assert(storageLayout);
|
||||
|
||||
this._db = db;
|
||||
this._ethClient = ethClient;
|
||||
this._pubsub = pubsub;
|
||||
this._getStorageAt = this._ethClient.getStorageAt.bind(this._ethClient);
|
||||
|
||||
// this._abi = abi;
|
||||
// this._storageLayout = storageLayout;
|
||||
|
||||
// this._contract = new ethers.utils.Interface(this._abi);
|
||||
}
|
||||
|
||||
getEventIterator (): AsyncIterator<any> {
|
||||
return this._pubsub.asyncIterator(['event']);
|
||||
}
|
||||
|
||||
async getEvents (blockHash: string, token: string, name: string | null): Promise<EventsResult> {
|
||||
const didSyncEvents = await this._db.didSyncEvents({ blockHash, token });
|
||||
if (!didSyncEvents) {
|
||||
// Fetch and save events first and make a note in the event sync progress table.
|
||||
await this._fetchAndSaveEvents({ blockHash, token });
|
||||
log('getEvents: db miss, fetching from upstream server');
|
||||
}
|
||||
|
||||
assert(await this._db.didSyncEvents({ blockHash, token }));
|
||||
|
||||
const events = await this._db.getEvents({ blockHash, token });
|
||||
log('getEvents: db hit');
|
||||
|
||||
const result = events
|
||||
// TODO: Filter using db WHERE condition when name is not empty.
|
||||
.filter(event => !name || name === event.eventName)
|
||||
.map(e => {
|
||||
const eventFields: {
|
||||
from?: string,
|
||||
to?: string,
|
||||
value?: BigInt,
|
||||
owner?: string,
|
||||
spender?: string,
|
||||
} = {};
|
||||
|
||||
// switch (e.eventName) {
|
||||
// // TODO: Handle events.
|
||||
// }
|
||||
|
||||
return {
|
||||
event: {
|
||||
__typename: `${e.eventName}Event`,
|
||||
...eventFields
|
||||
},
|
||||
// TODO: Return proof only if requested.
|
||||
proof: JSON.parse(e.proof)
|
||||
};
|
||||
});
|
||||
|
||||
// log(JSONbig.stringify(result, null, 2));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async triggerIndexingOnEvent (blockHash: string, token: string, receipt: any, logIndex: number): Promise<void> {
|
||||
const topics = [];
|
||||
|
||||
// We only care about the event type for now.
|
||||
// const data = '0x0000000000000000000000000000000000000000000000000000000000000000';
|
||||
|
||||
topics.push(receipt.topic0S[logIndex]);
|
||||
topics.push(receipt.topic1S[logIndex]);
|
||||
topics.push(receipt.topic2S[logIndex]);
|
||||
|
||||
// const { name: eventName, args } = this._contract.parseLog({ topics, data });
|
||||
// log(`trigger indexing on event: ${eventName} ${args}`);
|
||||
|
||||
// What data we index depends on the kind of event.
|
||||
// switch (eventName) {
|
||||
// TODO: Index event.
|
||||
// }
|
||||
}
|
||||
|
||||
async publishEventToSubscribers (blockHash: string, token: string, logIndex: number): Promise<void> {
|
||||
// TODO: Optimize this fetching of events.
|
||||
const events = await this.getEvents(blockHash, token, null);
|
||||
const event = events[logIndex];
|
||||
|
||||
log(`pushing event to GQL subscribers: ${event.event.__typename}`);
|
||||
|
||||
// Publishing the event here will result in pushing the payload to GQL subscribers for `onTokenEvent`.
|
||||
await this._pubsub.publish('event', {
|
||||
onTokenEvent: {
|
||||
blockHash,
|
||||
token,
|
||||
event
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async isUniswapContract (address: string): Promise<boolean> {
|
||||
// TODO: Return true for uniswap contracts of interest to the indexer (from config?).
|
||||
return address != null;
|
||||
}
|
||||
|
||||
async processEvent (blockHash: string, token: string, receipt: any, logIndex: number): Promise<void> {
|
||||
// Trigger indexing of data based on the event.
|
||||
await this.triggerIndexingOnEvent(blockHash, token, receipt, logIndex);
|
||||
|
||||
// Also trigger downstream event watcher subscriptions.
|
||||
await this.publishEventToSubscribers(blockHash, token, logIndex);
|
||||
}
|
||||
|
||||
// TODO: Move into base/class or framework package.
|
||||
async _getStorageValue (
|
||||
// blockHash: string,
|
||||
// token: string,
|
||||
// variable: string,
|
||||
// ...mappingKeys: string[]
|
||||
): Promise<ValueResult> {
|
||||
return {
|
||||
value: '',
|
||||
proof: {
|
||||
data: ''
|
||||
}
|
||||
};
|
||||
|
||||
// return getStorageValue(
|
||||
// this._storageLayout,
|
||||
// this._getStorageAt,
|
||||
// blockHash,
|
||||
// token,
|
||||
// variable,
|
||||
// ...mappingKeys
|
||||
// );
|
||||
}
|
||||
|
||||
async _fetchAndSaveEvents ({ blockHash, token }: { blockHash: string, token: string }): Promise<void> {
|
||||
const logs = await this._ethClient.getLogs({ blockHash, contract: token });
|
||||
|
||||
const eventNameToTopic = {}; // getEventNameTopics(this._abi);
|
||||
const logTopicToEventName = invert(eventNameToTopic);
|
||||
|
||||
const dbEvents = logs.map((log: any) => {
|
||||
const { topics, cid, ipldBlock } = log;
|
||||
|
||||
const [topic0] = topics;
|
||||
|
||||
const eventName = logTopicToEventName[topic0];
|
||||
|
||||
const event: DeepPartial<Event> = {
|
||||
blockHash,
|
||||
token,
|
||||
eventName,
|
||||
|
||||
proof: JSONbig.stringify({
|
||||
data: JSONbig.stringify({
|
||||
blockHash,
|
||||
receipt: {
|
||||
cid,
|
||||
ipldBlock
|
||||
}
|
||||
})
|
||||
})
|
||||
};
|
||||
|
||||
// switch (eventName) {
|
||||
// // TODO: Handle event.
|
||||
// }
|
||||
|
||||
return event;
|
||||
});
|
||||
|
||||
await this._db.saveEvents({ blockHash, token, events: dbEvents });
|
||||
}
|
||||
}
|
252
packages/uni-info-watcher/src/mock/data.ts
Normal file
252
packages/uni-info-watcher/src/mock/data.ts
Normal file
@ -0,0 +1,252 @@
|
||||
import Chance from 'chance';
|
||||
import { ethers } from 'ethers';
|
||||
|
||||
export const NO_OF_BLOCKS = 3;
|
||||
|
||||
export interface Entity {
|
||||
blockNumber: number
|
||||
id: string
|
||||
[field: string]: any
|
||||
}
|
||||
|
||||
export class Data {
|
||||
static _instance: Data;
|
||||
|
||||
_entities: {[key: string]: Array<Entity>} = {
|
||||
bundles: [],
|
||||
burns: [],
|
||||
transactions: [],
|
||||
pools: [],
|
||||
tokens: [],
|
||||
factories: [],
|
||||
mints: [],
|
||||
swaps: [],
|
||||
poolDayDatas: [],
|
||||
tokenDayDatas: [],
|
||||
uniswapDayDatas: [],
|
||||
ticks: [],
|
||||
tokenHourDatas: []
|
||||
}
|
||||
|
||||
_chance: Chance.Chance
|
||||
|
||||
constructor () {
|
||||
this._chance = new Chance();
|
||||
this._generateData();
|
||||
}
|
||||
|
||||
static getInstance (): Data {
|
||||
if (!this._instance) {
|
||||
this._instance = new Data();
|
||||
}
|
||||
return this._instance;
|
||||
}
|
||||
|
||||
get entities (): {[key: string]: Array<Entity>} {
|
||||
return this._entities;
|
||||
}
|
||||
|
||||
_generateData (): void {
|
||||
const factoryAddress = this._getRandomAddress();
|
||||
|
||||
// Generate data for each block.
|
||||
Array.from(Array(NO_OF_BLOCKS))
|
||||
.forEach((_, blockNumber) => {
|
||||
// Generate data for Factory.
|
||||
this._entities.factories.push({
|
||||
blockNumber,
|
||||
id: factoryAddress,
|
||||
totalFeesUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
totalValueLockedUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
totalVolumeUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
txCount: this._chance.integer({ min: 1 })
|
||||
});
|
||||
|
||||
// Generate Bundle.
|
||||
this._entities.bundles.push({
|
||||
blockNumber,
|
||||
id: '1',
|
||||
ethPriceUSD: this._chance.floating({ min: 1, fixed: 2 })
|
||||
});
|
||||
|
||||
// Generate Pools.
|
||||
Array.from(Array(3))
|
||||
.forEach(() => {
|
||||
const token0 = {
|
||||
blockNumber: blockNumber,
|
||||
id: this._getRandomAddress(),
|
||||
symbol: this._chance.string({ length: 3, casing: 'upper', alpha: false }),
|
||||
name: this._chance.word({ syllables: 1 }),
|
||||
volume: this._chance.integer({ min: 1 }),
|
||||
volumeUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
feesUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
txCount: this._chance.integer({ min: 1 }),
|
||||
totalValueLocked: this._chance.integer({ min: 1 }),
|
||||
totalValueLockedUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
derivedETH: this._chance.floating({ min: 1, fixed: 2 })
|
||||
};
|
||||
|
||||
const token1 = {
|
||||
blockNumber: blockNumber,
|
||||
id: this._getRandomAddress(),
|
||||
symbol: this._chance.string({ length: 3, casing: 'upper', alpha: false }),
|
||||
name: this._chance.word({ syllables: 1 }),
|
||||
volume: this._chance.integer({ min: 1 }),
|
||||
volumeUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
feesUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
txCount: this._chance.integer({ min: 1 }),
|
||||
totalValueLocked: this._chance.integer({ min: 1 }),
|
||||
totalValueLockedUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
derivedETH: this._chance.floating({ min: 1, fixed: 2 })
|
||||
};
|
||||
|
||||
const pool = {
|
||||
blockNumber: blockNumber,
|
||||
id: this._getRandomAddress(),
|
||||
token0: token0.id,
|
||||
token1: token1.id,
|
||||
feeTier: this._chance.integer({ min: 1 }),
|
||||
liquidity: this._chance.integer({ min: 1 }),
|
||||
sqrtPrice: this._chance.integer({ min: 1 }),
|
||||
token0Price: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
token1Price: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
tick: this._chance.integer({ min: 1 }),
|
||||
volumeUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
txCount: this._chance.integer({ min: 1 }),
|
||||
totalValueLockedToken0: this._chance.integer({ min: 1 }),
|
||||
totalValueLockedToken1: this._chance.integer({ min: 1 }),
|
||||
totalValueLockedUSD: this._chance.floating({ min: 1, fixed: 2 })
|
||||
};
|
||||
|
||||
const timestamp = this._chance.timestamp();
|
||||
|
||||
this._entities.poolDayDatas.push({
|
||||
blockNumber,
|
||||
date: timestamp,
|
||||
id: String(timestamp),
|
||||
tvlUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
volumeUSD: this._chance.floating({ min: 1, fixed: 2 })
|
||||
});
|
||||
|
||||
this._entities.tokenDayDatas.push(
|
||||
{
|
||||
blockNumber,
|
||||
date: timestamp,
|
||||
id: `${token0.id}-${timestamp}`,
|
||||
totalValueLockedUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
volumeUSD: this._chance.floating({ min: 1, fixed: 2 })
|
||||
},
|
||||
{
|
||||
blockNumber,
|
||||
date: timestamp,
|
||||
id: `${token1.id}-${timestamp}`,
|
||||
totalValueLockedUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
volumeUSD: this._chance.floating({ min: 1, fixed: 2 })
|
||||
}
|
||||
);
|
||||
|
||||
this._entities.uniswapDayDatas.push({
|
||||
blockNumber,
|
||||
date: timestamp,
|
||||
id: String(timestamp),
|
||||
tvlUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
volumeUSD: this._chance.floating({ min: 1, fixed: 2 })
|
||||
});
|
||||
|
||||
this._entities.ticks.push({
|
||||
blockNumber,
|
||||
id: `${pool.id}#${this._chance.integer({ min: 1 })}`,
|
||||
liquidityGross: this._chance.integer({ min: 1 }),
|
||||
liquidityNet: this._chance.integer({ min: 1 }),
|
||||
price0: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
price1: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
tickIdx: this._chance.integer({ min: 1 })
|
||||
});
|
||||
|
||||
this._entities.tokenHourDatas.push(
|
||||
{
|
||||
blockNumber,
|
||||
close: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
high: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
id: `${token0.id}-${timestamp}`,
|
||||
low: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
open: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
periodStartUnix: timestamp
|
||||
},
|
||||
{
|
||||
blockNumber,
|
||||
close: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
high: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
id: `${token1.id}-${timestamp}`,
|
||||
low: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
open: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
periodStartUnix: timestamp
|
||||
}
|
||||
);
|
||||
|
||||
this._entities.tokens.push(token0, token1);
|
||||
this._entities.pools.push(pool);
|
||||
|
||||
// Generate Transactions.
|
||||
Array.from(Array(3))
|
||||
.forEach((_, transactionIndex) => {
|
||||
const transactionHash = ethers.utils.hexlify(ethers.utils.randomBytes(32));
|
||||
|
||||
const transaction = {
|
||||
blockNumber,
|
||||
id: transactionHash,
|
||||
timestamp: this._chance.timestamp()
|
||||
};
|
||||
|
||||
this._entities.transactions.push(transaction);
|
||||
|
||||
// Generate Burns
|
||||
this._entities.burns.push({
|
||||
id: `${transaction.id}#${transactionIndex}`,
|
||||
blockNumber,
|
||||
transaction: transaction.id,
|
||||
pool: pool.id,
|
||||
timestamp: this._chance.timestamp(),
|
||||
owner: this._getRandomAddress(),
|
||||
origin: this._getRandomAddress(),
|
||||
amount0: this._chance.integer({ min: 1 }),
|
||||
amount1: this._chance.integer({ min: 1 }),
|
||||
amountUSD: this._chance.floating({ min: 1, fixed: 2 })
|
||||
});
|
||||
|
||||
// Generate Mints
|
||||
this._entities.mints.push({
|
||||
id: `${transaction.id}#${transactionIndex}`,
|
||||
blockNumber,
|
||||
transaction: transaction.id,
|
||||
pool: pool.id,
|
||||
timestamp: this._chance.timestamp(),
|
||||
owner: this._getRandomAddress(),
|
||||
origin: this._getRandomAddress(),
|
||||
amount0: this._chance.integer({ min: 1 }),
|
||||
amount1: this._chance.integer({ min: 1 }),
|
||||
amountUSD: this._chance.floating({ min: 1, fixed: 2 }),
|
||||
sender: this._getRandomAddress()
|
||||
});
|
||||
|
||||
// Generate Swaps
|
||||
this._entities.swaps.push({
|
||||
id: `${transaction.id}#${transactionIndex}`,
|
||||
blockNumber,
|
||||
transaction: transaction.id,
|
||||
pool: pool.id,
|
||||
timestamp: this._chance.timestamp(),
|
||||
origin: this._getRandomAddress(),
|
||||
amount0: this._chance.integer({ min: 1 }),
|
||||
amount1: this._chance.integer({ min: 1 }),
|
||||
amountUSD: this._chance.floating({ min: 1, fixed: 2 })
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
_getRandomAddress (): string {
|
||||
return ethers.utils.hexlify(ethers.utils.randomBytes(20));
|
||||
}
|
||||
}
|
465
packages/uni-info-watcher/src/mock/resolvers.ts
Normal file
465
packages/uni-info-watcher/src/mock/resolvers.ts
Normal file
@ -0,0 +1,465 @@
|
||||
/* eslint-disable camelcase */
|
||||
import debug from 'debug';
|
||||
import BigInt from 'apollo-type-bigint';
|
||||
import { Data, Entity, NO_OF_BLOCKS } from './data';
|
||||
|
||||
const log = debug('vulcanize:test');
|
||||
|
||||
interface BlockHeight {
|
||||
number: number;
|
||||
hash: string;
|
||||
}
|
||||
|
||||
enum OrderDirection {
|
||||
asc,
|
||||
desc
|
||||
}
|
||||
|
||||
enum BurnOrderBy {
|
||||
timestamp
|
||||
}
|
||||
|
||||
interface BurnFilter {
|
||||
pool: string;
|
||||
token0: string;
|
||||
token1: string;
|
||||
}
|
||||
|
||||
enum MintOrderBy {
|
||||
timestamp
|
||||
}
|
||||
|
||||
interface MintFilter {
|
||||
pool: string;
|
||||
token0: string;
|
||||
token1: string;
|
||||
}
|
||||
|
||||
enum PoolOrderBy {
|
||||
totalValueLockedUSD
|
||||
}
|
||||
|
||||
interface PoolFilter {
|
||||
id: string;
|
||||
id_in: [string];
|
||||
token0: string;
|
||||
token0_in: [string];
|
||||
token1: string;
|
||||
token1_in: [string];
|
||||
}
|
||||
|
||||
enum TokenOrderBy {
|
||||
totalValueLockedUSD
|
||||
}
|
||||
|
||||
interface TokenFilter {
|
||||
id: string;
|
||||
id_in: [string];
|
||||
name_contains: string;
|
||||
symbol_contains: string;
|
||||
}
|
||||
|
||||
enum TransactionOrderBy {
|
||||
timestamp
|
||||
}
|
||||
|
||||
interface SwapFilter {
|
||||
pool: string;
|
||||
token0: string;
|
||||
token1: string;
|
||||
}
|
||||
|
||||
enum SwapOrderBy {
|
||||
timestamp
|
||||
}
|
||||
|
||||
enum DayDataOrderBy {
|
||||
date
|
||||
}
|
||||
|
||||
interface DayDataFilter {
|
||||
date_gt: number;
|
||||
pool: string;
|
||||
}
|
||||
|
||||
interface TickFilter {
|
||||
poolAddress: string;
|
||||
tickIdx_gte: number;
|
||||
tickIdx_lte: number;
|
||||
}
|
||||
|
||||
enum TokenHourDataOrderBy {
|
||||
periodStartUnix
|
||||
}
|
||||
|
||||
interface TokenHourDataFilter {
|
||||
periodStartUnix_gt: number;
|
||||
token: string;
|
||||
}
|
||||
|
||||
export const createResolvers = async (): Promise<any> => {
|
||||
const latestBlockNumber = NO_OF_BLOCKS - 1;
|
||||
const data = Data.getInstance();
|
||||
const { bundles, burns, pools, transactions, factories, mints, tokens, swaps, poolDayDatas, tokenDayDatas, uniswapDayDatas, ticks, tokenHourDatas } = data.entities;
|
||||
|
||||
return {
|
||||
BigInt: new BigInt('bigInt'),
|
||||
|
||||
Query: {
|
||||
bundle: (_: any, { id: bundleId, block }: { id: string, block: BlockHeight }) => {
|
||||
log('bundle', bundleId, block);
|
||||
const res = bundles.find((bundle: Entity) => bundle.blockNumber === block.number && bundle.id === bundleId);
|
||||
|
||||
if (res) {
|
||||
const { ethPriceUSD, id } = res;
|
||||
return { ethPriceUSD, id };
|
||||
}
|
||||
},
|
||||
|
||||
bundles: (_: any, { first, block }: { first: number, block: BlockHeight }) => {
|
||||
log('bundles', first, block);
|
||||
|
||||
const res = bundles.filter((bundle: Entity) => bundle.blockNumber === block.number)
|
||||
.slice(0, first)
|
||||
.map(({ ethPriceUSD, id }) => ({ ethPriceUSD, id }));
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
burns: (_: any, { first, orderBy, orderDirection, where }: { first: number, orderBy: BurnOrderBy, orderDirection: OrderDirection, where: BurnFilter }) => {
|
||||
log('burns', first, orderBy, orderDirection, where);
|
||||
|
||||
const res = burns.filter((burn: Entity) => {
|
||||
if (burn.blockNumber === latestBlockNumber) {
|
||||
return Object.entries(where || {})
|
||||
.every(([field, value]) => burn[field] === value);
|
||||
}
|
||||
|
||||
return false;
|
||||
}).slice(0, first)
|
||||
.sort((a: any, b: any) => {
|
||||
a = a[orderBy];
|
||||
b = b[orderBy];
|
||||
return orderDirection === OrderDirection.asc ? (a - b) : (b - a);
|
||||
})
|
||||
.map(burn => {
|
||||
return {
|
||||
...burn,
|
||||
pool: pools.find(pool => pool.id === burn.pool),
|
||||
transaction: transactions.find(transaction => transaction.id === burn.transaction)
|
||||
};
|
||||
});
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
factories: (_: any, { first, block }: { first: number, block: BlockHeight }) => {
|
||||
log('factories', first, block);
|
||||
|
||||
const res = factories.filter((factory: Entity) => factory.blockNumber === block.number)
|
||||
.slice(0, first);
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
mints: (_: any, { first, orderBy, orderDirection, where }: { first: number, orderBy: MintOrderBy, orderDirection: OrderDirection, where: MintFilter }) => {
|
||||
log('mints', first, orderBy, orderDirection, where);
|
||||
|
||||
const res = mints.filter((mint: Entity) => {
|
||||
if (mint.blockNumber === latestBlockNumber) {
|
||||
return Object.entries(where || {})
|
||||
.every(([field, value]) => mint[field] === value);
|
||||
}
|
||||
|
||||
return false;
|
||||
}).slice(0, first)
|
||||
.sort((a: any, b: any) => {
|
||||
a = a[orderBy];
|
||||
b = b[orderBy];
|
||||
return orderDirection === OrderDirection.asc ? (a - b) : (b - a);
|
||||
})
|
||||
.map(mint => {
|
||||
return {
|
||||
...mint,
|
||||
pool: pools.find(pool => pool.id === mint.pool),
|
||||
transaction: transactions.find(transaction => transaction.id === mint.transaction)
|
||||
};
|
||||
});
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
pool: (_: any, { id: poolId }: { id: string }) => {
|
||||
log('pool', poolId);
|
||||
const res = pools.find((pool: Entity) => pool.id === poolId);
|
||||
|
||||
if (res) {
|
||||
return {
|
||||
...res,
|
||||
token0: tokens.find(token => token.id === res.token0),
|
||||
token1: tokens.find(token => token.id === res.token1)
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
pools: (_: any, { first, orderBy, orderDirection, where, block }: { first: number, orderBy: PoolOrderBy, orderDirection: OrderDirection, where: PoolFilter, block: BlockHeight }) => {
|
||||
log('pools', first, orderBy, orderDirection, where, block);
|
||||
|
||||
const res = pools.filter((pool: Entity) => {
|
||||
if (pool.blockNumber === latestBlockNumber) {
|
||||
return Object.entries(where || {})
|
||||
.every(([filter, value]) => {
|
||||
if (filter.endsWith('_in')) {
|
||||
const field = filter.substring(0, filter.length - 3);
|
||||
|
||||
return value.some((el: any) => el === pool[field]);
|
||||
}
|
||||
|
||||
return pool[filter] === value;
|
||||
});
|
||||
}
|
||||
|
||||
return false;
|
||||
}).slice(0, first)
|
||||
.sort((a: any, b: any) => {
|
||||
a = a[orderBy];
|
||||
b = b[orderBy];
|
||||
return orderDirection === OrderDirection.asc ? (a - b) : (b - a);
|
||||
})
|
||||
.map(pool => {
|
||||
return {
|
||||
...pool,
|
||||
token0: tokens.find(token => token.id === pool.token0),
|
||||
token1: tokens.find(token => token.id === pool.token1)
|
||||
};
|
||||
});
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
token: (_: any, { id: tokenId, block }: { id: string, block: BlockHeight }) => {
|
||||
log('token', tokenId, block);
|
||||
const res = tokens.find((token: Entity) => token.blockNumber === block.number && token.id === tokenId);
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
tokens: (_: any, { orderBy, orderDirection, where }: { orderBy: TokenOrderBy, orderDirection: OrderDirection, where: TokenFilter }) => {
|
||||
log('tokens', orderBy, orderDirection, where);
|
||||
|
||||
const res = tokens.filter((token: Entity) => {
|
||||
if (token.blockNumber === latestBlockNumber) {
|
||||
return Object.entries(where || {})
|
||||
.every(([filter, value]) => {
|
||||
if (filter.endsWith('_in')) {
|
||||
const field = filter.substring(0, filter.length - 3);
|
||||
|
||||
return value.some((el: any) => el === token[field]);
|
||||
}
|
||||
|
||||
return token[filter] === value;
|
||||
});
|
||||
}
|
||||
|
||||
return false;
|
||||
}).sort((a: any, b: any) => {
|
||||
a = a[orderBy];
|
||||
b = b[orderBy];
|
||||
return orderDirection === OrderDirection.asc ? (a - b) : (b - a);
|
||||
});
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
transactions: (_: any, { first, orderBy, orderDirection }: { first: number, orderBy: TransactionOrderBy, orderDirection: OrderDirection }) => {
|
||||
log('transactions', first, orderBy, orderDirection);
|
||||
|
||||
const res = transactions.filter((transaction: Entity) => transaction.blockNumber === latestBlockNumber)
|
||||
.slice(0, first)
|
||||
.sort((a: any, b: any) => {
|
||||
a = a[orderBy];
|
||||
b = b[orderBy];
|
||||
return orderDirection === OrderDirection.asc ? (a - b) : (b - a);
|
||||
})
|
||||
.map(transaction => {
|
||||
return {
|
||||
...transaction,
|
||||
burns: burns.filter(burn => burn.transaction === transaction.id),
|
||||
mints: mints.filter(mint => mint.transaction === transaction.id),
|
||||
swaps: swaps.filter(swap => swap.transaction === transaction.id)
|
||||
};
|
||||
});
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
swaps: (_: any, { first, orderBy, orderDirection, where }: { first: number, orderBy: SwapOrderBy, orderDirection: OrderDirection, where: SwapFilter }) => {
|
||||
log('swaps', first, orderBy, orderDirection, where);
|
||||
|
||||
const res = swaps.filter((swap: Entity) => {
|
||||
if (swap.blockNumber === latestBlockNumber) {
|
||||
return Object.entries(where || {})
|
||||
.every(([field, value]) => swap[field] === value);
|
||||
}
|
||||
|
||||
return false;
|
||||
}).slice(0, first)
|
||||
.sort((a: any, b: any) => {
|
||||
a = a[orderBy];
|
||||
b = b[orderBy];
|
||||
return orderDirection === OrderDirection.asc ? (a - b) : (b - a);
|
||||
})
|
||||
.map(swap => {
|
||||
return {
|
||||
...swap,
|
||||
pool: pools.find(pool => pool.id === swap.pool),
|
||||
transaction: transactions.find(transaction => transaction.id === swap.transaction)
|
||||
};
|
||||
});
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
poolDayDatas: (_: any, { skip, first, orderBy, orderDirection, where }: { skip: number, first: number, orderBy: DayDataOrderBy, orderDirection: OrderDirection, where: DayDataFilter }) => {
|
||||
log('poolDayDatas', skip, first, orderBy, orderDirection, where);
|
||||
|
||||
const res = poolDayDatas.filter((poolDayData: Entity) => {
|
||||
if (poolDayData.blockNumber === latestBlockNumber) {
|
||||
return Object.entries(where || {})
|
||||
.every(([filter, value]) => {
|
||||
if (filter.endsWith('_gt')) {
|
||||
const field = filter.substring(0, filter.length - 3);
|
||||
|
||||
return poolDayData[field] > value;
|
||||
}
|
||||
|
||||
return poolDayData[filter] === value;
|
||||
});
|
||||
}
|
||||
|
||||
return false;
|
||||
}).slice(skip, skip + first)
|
||||
.sort((a: any, b: any) => {
|
||||
a = a[orderBy];
|
||||
b = b[orderBy];
|
||||
return orderDirection === OrderDirection.asc ? (a - b) : (b - a);
|
||||
});
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
tokenDayDatas: (_: any, { skip, first, orderBy, orderDirection, where }: { skip: number, first: number, orderBy: DayDataOrderBy, orderDirection: OrderDirection, where: DayDataFilter }) => {
|
||||
log('tokenDayDatas', skip, first, orderBy, orderDirection, where);
|
||||
|
||||
const res = tokenDayDatas.filter((tokenDayData: Entity) => {
|
||||
if (tokenDayData.blockNumber === latestBlockNumber) {
|
||||
return Object.entries(where || {})
|
||||
.every(([filter, value]) => {
|
||||
if (filter.endsWith('_gt')) {
|
||||
const field = filter.substring(0, filter.length - 3);
|
||||
|
||||
return tokenDayData[field] > value;
|
||||
}
|
||||
|
||||
return tokenDayData[filter] === value;
|
||||
});
|
||||
}
|
||||
|
||||
return false;
|
||||
}).slice(skip, skip + first)
|
||||
.sort((a: any, b: any) => {
|
||||
a = a[orderBy];
|
||||
b = b[orderBy];
|
||||
return orderDirection === OrderDirection.asc ? (a - b) : (b - a);
|
||||
});
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
uniswapDayDatas: (_: any, { skip, first, orderBy, orderDirection, where }: { skip: number, first: number, orderBy: DayDataOrderBy, orderDirection: OrderDirection, where: DayDataFilter }) => {
|
||||
log('uniswapDayDatas', skip, first, orderBy, orderDirection, where);
|
||||
|
||||
const res = uniswapDayDatas.filter((uniswapDayData: Entity) => {
|
||||
if (uniswapDayData.blockNumber === latestBlockNumber) {
|
||||
return Object.entries(where || {})
|
||||
.every(([filter, value]) => {
|
||||
if (filter.endsWith('_gt')) {
|
||||
const field = filter.substring(0, filter.length - 3);
|
||||
|
||||
return uniswapDayData[field] > value;
|
||||
}
|
||||
|
||||
return uniswapDayData[filter] === value;
|
||||
});
|
||||
}
|
||||
|
||||
return false;
|
||||
}).slice(skip, skip + first)
|
||||
.sort((a: any, b: any) => {
|
||||
a = a[orderBy];
|
||||
b = b[orderBy];
|
||||
return orderDirection === OrderDirection.asc ? (a - b) : (b - a);
|
||||
});
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
ticks: (_: any, { skip, first, where, block }: { skip: number, first: number, where: TickFilter, block: BlockHeight }) => {
|
||||
log('ticks', skip, first, where, block);
|
||||
|
||||
const res = ticks.filter((tick: Entity) => {
|
||||
if (tick.blockNumber === block.number) {
|
||||
return Object.entries(where || {})
|
||||
.every(([filter, value]) => {
|
||||
if (filter.endsWith('_gte')) {
|
||||
const field = filter.substring(0, filter.length - 3);
|
||||
|
||||
return tick[field] >= value;
|
||||
}
|
||||
|
||||
if (filter.endsWith('_lte')) {
|
||||
const field = filter.substring(0, filter.length - 3);
|
||||
|
||||
return tick[field] <= value;
|
||||
}
|
||||
|
||||
return tick[filter] === value;
|
||||
});
|
||||
}
|
||||
|
||||
return false;
|
||||
}).slice(skip, skip + first);
|
||||
|
||||
return res;
|
||||
},
|
||||
|
||||
tokenHourDatas: (_: any, { skip, first, orderBy, orderDirection, where }: { skip: number, first: number, orderBy: TokenHourDataOrderBy, orderDirection: OrderDirection, where: TokenHourDataFilter }) => {
|
||||
log('tokenHourDatas', skip, first, orderBy, orderDirection, where);
|
||||
|
||||
const res = tokenHourDatas.filter((tokenHourData: Entity) => {
|
||||
if (tokenHourData.blockNumber === latestBlockNumber) {
|
||||
return Object.entries(where || {})
|
||||
.every(([filter, value]) => {
|
||||
if (filter.endsWith('_gt')) {
|
||||
const field = filter.substring(0, filter.length - 3);
|
||||
|
||||
return tokenHourData[field] > value;
|
||||
}
|
||||
|
||||
return tokenHourData[filter] === value;
|
||||
});
|
||||
}
|
||||
|
||||
return false;
|
||||
}).slice(skip, skip + first)
|
||||
.sort((a: any, b: any) => {
|
||||
a = a[orderBy];
|
||||
b = b[orderBy];
|
||||
return orderDirection === OrderDirection.asc ? (a - b) : (b - a);
|
||||
});
|
||||
|
||||
return res;
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
25
packages/uni-info-watcher/src/mock/server.spec.ts
Normal file
25
packages/uni-info-watcher/src/mock/server.spec.ts
Normal file
@ -0,0 +1,25 @@
|
||||
import 'mocha';
|
||||
import { expect } from 'chai';
|
||||
import { GraphQLClient } from 'graphql-request';
|
||||
|
||||
import { queryBundle } from '../queries';
|
||||
import { Data } from './data';
|
||||
|
||||
describe('server', () => {
|
||||
const client = new GraphQLClient('http://localhost:3003/graphql');
|
||||
const data = Data.getInstance();
|
||||
|
||||
it('query bundle', async () => {
|
||||
const { bundles } = data.entities;
|
||||
expect(bundles.length).to.be.greaterThan(0);
|
||||
|
||||
for (let i = 0; i < bundles.length; i++) {
|
||||
const { id, blockNumber, ethPriceUSD } = bundles[i];
|
||||
|
||||
// Bundle query.
|
||||
const result = await client.request(queryBundle, { id, blockNumber });
|
||||
expect(result.bundle.id).to.equal(id);
|
||||
expect(result.bundle.ethPriceUSD).to.equal(ethPriceUSD);
|
||||
}
|
||||
});
|
||||
});
|
10
packages/uni-info-watcher/src/queries.ts
Normal file
10
packages/uni-info-watcher/src/queries.ts
Normal file
@ -0,0 +1,10 @@
|
||||
import { gql } from 'graphql-request';
|
||||
|
||||
export const queryBundle = gql`
|
||||
query getBundle($id: ID!, $blockNumber: Int!) {
|
||||
bundle(id: $id, block: { number: $blockNumber }) {
|
||||
id
|
||||
ethPriceUSD
|
||||
}
|
||||
}
|
||||
`;
|
14
packages/uni-info-watcher/src/resolvers.ts
Normal file
14
packages/uni-info-watcher/src/resolvers.ts
Normal file
@ -0,0 +1,14 @@
|
||||
import assert from 'assert';
|
||||
import BigInt from 'apollo-type-bigint';
|
||||
|
||||
import { Indexer } from './indexer';
|
||||
|
||||
export const createResolvers = async (indexer: Indexer): Promise<any> => {
|
||||
assert(indexer);
|
||||
|
||||
return {
|
||||
BigInt: new BigInt('bigInt'),
|
||||
|
||||
Query: {}
|
||||
};
|
||||
};
|
386
packages/uni-info-watcher/src/schema.ts
Normal file
386
packages/uni-info-watcher/src/schema.ts
Normal file
@ -0,0 +1,386 @@
|
||||
import { gql } from '@apollo/client/core';
|
||||
|
||||
export default gql`
|
||||
scalar BigDecimal
|
||||
|
||||
scalar BigInt
|
||||
|
||||
scalar Bytes
|
||||
|
||||
input Block_height {
|
||||
hash: Bytes
|
||||
number: Int
|
||||
}
|
||||
|
||||
type Pool {
|
||||
feeTier: BigInt!
|
||||
id: ID!
|
||||
liquidity: BigInt!
|
||||
sqrtPrice: BigInt!
|
||||
tick: BigInt
|
||||
token0: Token!
|
||||
token0Price: BigDecimal!
|
||||
token1: Token!
|
||||
token1Price: BigDecimal!
|
||||
totalValueLockedToken0: BigDecimal!
|
||||
totalValueLockedToken1: BigDecimal!
|
||||
totalValueLockedUSD: BigDecimal!
|
||||
txCount: BigInt!
|
||||
volumeUSD: BigDecimal!
|
||||
}
|
||||
|
||||
type PoolDayData {
|
||||
date: Int!
|
||||
id: ID!
|
||||
tvlUSD: BigDecimal!
|
||||
volumeUSD: BigDecimal!
|
||||
}
|
||||
|
||||
type Tick {
|
||||
id: ID!
|
||||
liquidityGross: BigInt!
|
||||
liquidityNet: BigInt!
|
||||
price0: BigDecimal!
|
||||
price1: BigDecimal!
|
||||
tickIdx: BigInt!
|
||||
}
|
||||
|
||||
type Mint {
|
||||
amount0: BigDecimal!
|
||||
amount1: BigDecimal!
|
||||
amountUSD: BigDecimal
|
||||
id: ID!
|
||||
origin: Bytes!
|
||||
owner: Bytes!
|
||||
pool: Pool!
|
||||
sender: Bytes
|
||||
timestamp: BigInt!
|
||||
transaction: Transaction!
|
||||
}
|
||||
|
||||
type Swap {
|
||||
amount0: BigDecimal!
|
||||
amount1: BigDecimal!
|
||||
amountUSD: BigDecimal!
|
||||
id: ID!
|
||||
origin: Bytes!
|
||||
pool: Pool!
|
||||
timestamp: BigInt!
|
||||
transaction: Transaction!
|
||||
}
|
||||
|
||||
type Burn {
|
||||
amount0: BigDecimal!
|
||||
amount1: BigDecimal!
|
||||
amountUSD: BigDecimal
|
||||
id: ID!
|
||||
origin: Bytes!
|
||||
owner: Bytes
|
||||
pool: Pool!
|
||||
timestamp: BigInt!
|
||||
transaction: Transaction!
|
||||
}
|
||||
|
||||
type UniswapDayData {
|
||||
date: Int!
|
||||
id: ID!
|
||||
tvlUSD: BigDecimal!
|
||||
volumeUSD: BigDecimal!
|
||||
}
|
||||
|
||||
type Factory {
|
||||
id: ID!
|
||||
totalFeesUSD: BigDecimal!
|
||||
totalValueLockedUSD: BigDecimal!
|
||||
totalVolumeUSD: BigDecimal!
|
||||
txCount: BigInt!
|
||||
}
|
||||
|
||||
type Transaction {
|
||||
burns(skip: Int = 0, first: Int = 100, orderBy: Burn_orderBy, orderDirection: OrderDirection, where: Burn_filter): [Burn]!
|
||||
id: ID!
|
||||
mints(skip: Int = 0, first: Int = 100, orderBy: Mint_orderBy, orderDirection: OrderDirection, where: Mint_filter): [Mint]!
|
||||
swaps(skip: Int = 0, first: Int = 100, orderBy: Swap_orderBy, orderDirection: OrderDirection, where: Swap_filter): [Swap]!
|
||||
timestamp: BigInt!
|
||||
}
|
||||
|
||||
type Token {
|
||||
derivedETH: BigDecimal!
|
||||
feesUSD: BigDecimal!
|
||||
id: ID!
|
||||
name: String!
|
||||
symbol: String!
|
||||
totalValueLocked: BigDecimal!
|
||||
totalValueLockedUSD: BigDecimal!
|
||||
txCount: BigInt!
|
||||
volume: BigDecimal!
|
||||
volumeUSD: BigDecimal!
|
||||
}
|
||||
|
||||
type TokenDayData {
|
||||
date: Int!
|
||||
id: ID!
|
||||
totalValueLockedUSD: BigDecimal!
|
||||
volumeUSD: BigDecimal!
|
||||
}
|
||||
|
||||
type Bundle {
|
||||
ethPriceUSD: BigDecimal!
|
||||
id: ID!
|
||||
}
|
||||
|
||||
type TokenHourData {
|
||||
close: BigDecimal!
|
||||
high: BigDecimal!
|
||||
id: ID!
|
||||
low: BigDecimal!
|
||||
open: BigDecimal!
|
||||
periodStartUnix: Int!
|
||||
}
|
||||
|
||||
enum OrderDirection {
|
||||
asc
|
||||
desc
|
||||
}
|
||||
|
||||
input PoolDayData_filter {
|
||||
date_gt: Int
|
||||
pool: String
|
||||
}
|
||||
|
||||
enum PoolDayData_orderBy {
|
||||
date
|
||||
}
|
||||
|
||||
input Pool_filter {
|
||||
id: ID
|
||||
id_in: [ID!]
|
||||
token0: String
|
||||
token0_in: [String!]
|
||||
token1: String
|
||||
token1_in: [String!]
|
||||
}
|
||||
|
||||
enum Pool_orderBy {
|
||||
totalValueLockedUSD
|
||||
}
|
||||
|
||||
input Tick_filter {
|
||||
poolAddress: String
|
||||
tickIdx_gte: BigInt
|
||||
tickIdx_lte: BigInt
|
||||
}
|
||||
|
||||
input Mint_filter {
|
||||
pool: String
|
||||
token0: String
|
||||
token1: String
|
||||
}
|
||||
|
||||
enum Mint_orderBy {
|
||||
timestamp
|
||||
}
|
||||
|
||||
input Swap_filter {
|
||||
pool: String
|
||||
token0: String
|
||||
token1: String
|
||||
}
|
||||
|
||||
enum Swap_orderBy {
|
||||
timestamp
|
||||
}
|
||||
|
||||
input Burn_filter {
|
||||
pool: String
|
||||
token0: String
|
||||
token1: String
|
||||
}
|
||||
|
||||
enum Burn_orderBy {
|
||||
timestamp
|
||||
}
|
||||
|
||||
enum UniswapDayData_orderBy {
|
||||
date
|
||||
}
|
||||
|
||||
input UniswapDayData_filter {
|
||||
date_gt: Int
|
||||
}
|
||||
|
||||
enum Transaction_orderBy {
|
||||
timestamp
|
||||
}
|
||||
|
||||
input Token_filter {
|
||||
id: ID
|
||||
id_in: [ID!]
|
||||
name_contains: String
|
||||
symbol_contains: String
|
||||
}
|
||||
|
||||
enum Token_orderBy {
|
||||
totalValueLockedUSD
|
||||
}
|
||||
|
||||
input TokenDayData_filter {
|
||||
date_gt: Int
|
||||
token: String
|
||||
}
|
||||
|
||||
enum TokenDayData_orderBy {
|
||||
date
|
||||
}
|
||||
|
||||
input TokenHourData_filter {
|
||||
periodStartUnix_gt: Int
|
||||
token: String
|
||||
}
|
||||
|
||||
enum TokenHourData_orderBy {
|
||||
periodStartUnix
|
||||
}
|
||||
|
||||
type Query {
|
||||
bundle(
|
||||
id: ID!
|
||||
|
||||
"""
|
||||
The block at which the query should be executed. Can either be an '{ number:
|
||||
Int }' containing the block number or a '{ hash: Bytes }' value containing a
|
||||
block hash. Defaults to the latest block when omitted.
|
||||
"""
|
||||
block: Block_height
|
||||
): Bundle
|
||||
|
||||
bundles(
|
||||
first: Int = 100
|
||||
|
||||
"""
|
||||
The block at which the query should be executed. Can either be an '{ number:
|
||||
Int }' containing the block number or a '{ hash: Bytes }' value containing a
|
||||
block hash. Defaults to the latest block when omitted.
|
||||
"""
|
||||
block: Block_height
|
||||
): [Bundle!]!
|
||||
|
||||
burns(
|
||||
first: Int = 100
|
||||
orderBy: Burn_orderBy
|
||||
orderDirection: OrderDirection
|
||||
where: Burn_filter
|
||||
): [Burn!]!
|
||||
|
||||
factories(
|
||||
first: Int = 100
|
||||
|
||||
"""
|
||||
The block at which the query should be executed. Can either be an '{ number:
|
||||
Int }' containing the block number or a '{ hash: Bytes }' value containing a
|
||||
block hash. Defaults to the latest block when omitted.
|
||||
"""
|
||||
block: Block_height
|
||||
): [Factory!]!
|
||||
|
||||
mints(
|
||||
first: Int = 100
|
||||
orderBy: Mint_orderBy
|
||||
orderDirection: OrderDirection
|
||||
where: Mint_filter
|
||||
): [Mint!]!
|
||||
|
||||
pool(
|
||||
id: ID!
|
||||
): Pool
|
||||
|
||||
poolDayDatas(
|
||||
skip: Int = 0
|
||||
first: Int = 100
|
||||
orderBy: PoolDayData_orderBy
|
||||
orderDirection: OrderDirection
|
||||
where: PoolDayData_filter
|
||||
): [PoolDayData!]!
|
||||
|
||||
pools(
|
||||
first: Int = 100
|
||||
orderBy: Pool_orderBy
|
||||
orderDirection: OrderDirection
|
||||
where: Pool_filter
|
||||
|
||||
"""
|
||||
The block at which the query should be executed. Can either be an '{ number:
|
||||
Int }' containing the block number or a '{ hash: Bytes }' value containing a
|
||||
block hash. Defaults to the latest block when omitted.
|
||||
"""
|
||||
block: Block_height
|
||||
): [Pool!]!
|
||||
|
||||
swaps(
|
||||
first: Int = 100
|
||||
orderBy: Swap_orderBy
|
||||
orderDirection: OrderDirection
|
||||
where: Swap_filter
|
||||
): [Swap!]!
|
||||
|
||||
ticks(
|
||||
skip: Int = 0
|
||||
first: Int = 100
|
||||
where: Tick_filter
|
||||
|
||||
"""
|
||||
The block at which the query should be executed. Can either be an '{ number:
|
||||
Int }' containing the block number or a '{ hash: Bytes }' value containing a
|
||||
block hash. Defaults to the latest block when omitted.
|
||||
"""
|
||||
block: Block_height
|
||||
): [Tick!]!
|
||||
|
||||
token(
|
||||
id: ID!
|
||||
|
||||
"""
|
||||
The block at which the query should be executed. Can either be an '{ number:
|
||||
Int }' containing the block number or a '{ hash: Bytes }' value containing a
|
||||
block hash. Defaults to the latest block when omitted.
|
||||
"""
|
||||
block: Block_height
|
||||
): Token
|
||||
|
||||
tokenDayDatas(
|
||||
skip: Int = 0
|
||||
first: Int = 100
|
||||
orderBy: TokenDayData_orderBy
|
||||
orderDirection: OrderDirection
|
||||
where: TokenDayData_filter
|
||||
): [TokenDayData!]!
|
||||
|
||||
tokenHourDatas(
|
||||
skip: Int = 0
|
||||
first: Int = 100
|
||||
orderBy: TokenHourData_orderBy
|
||||
orderDirection: OrderDirection
|
||||
where: TokenHourData_filter
|
||||
): [TokenHourData!]!
|
||||
|
||||
tokens(
|
||||
orderBy: Token_orderBy
|
||||
orderDirection: OrderDirection
|
||||
where: Token_filter
|
||||
): [Token!]!
|
||||
|
||||
transactions(
|
||||
first: Int = 100
|
||||
orderBy: Transaction_orderBy
|
||||
orderDirection: OrderDirection
|
||||
): [Transaction!]!
|
||||
|
||||
uniswapDayDatas(
|
||||
skip: Int = 0
|
||||
first: Int = 100
|
||||
orderBy: UniswapDayData_orderBy
|
||||
orderDirection: OrderDirection
|
||||
where: UniswapDayData_filter
|
||||
): [UniswapDayData!]!
|
||||
}
|
||||
`;
|
90
packages/uni-info-watcher/src/server.ts
Normal file
90
packages/uni-info-watcher/src/server.ts
Normal file
@ -0,0 +1,90 @@
|
||||
import assert from 'assert';
|
||||
import 'reflect-metadata';
|
||||
import express, { Application } from 'express';
|
||||
import { ApolloServer, PubSub } from 'apollo-server-express';
|
||||
import yargs from 'yargs';
|
||||
import { hideBin } from 'yargs/helpers';
|
||||
import debug from 'debug';
|
||||
import 'graphql-import-node';
|
||||
import { createServer } from 'http';
|
||||
|
||||
import { getCache } from '@vulcanize/cache';
|
||||
import { EthClient } from '@vulcanize/ipld-eth-client';
|
||||
|
||||
import typeDefs from './schema';
|
||||
|
||||
import { createResolvers as createMockResolvers } from './mock/resolvers';
|
||||
import { createResolvers } from './resolvers';
|
||||
import { Indexer } from './indexer';
|
||||
import { Database } from './database';
|
||||
import { EventWatcher } from './events';
|
||||
import { getConfig } from './config';
|
||||
|
||||
const log = debug('vulcanize:server');
|
||||
|
||||
export const main = async (): Promise<any> => {
|
||||
const argv = await yargs(hideBin(process.argv))
|
||||
.option('f', {
|
||||
alias: 'config-file',
|
||||
demandOption: true,
|
||||
describe: 'configuration file path (toml)',
|
||||
type: 'string'
|
||||
})
|
||||
.argv;
|
||||
|
||||
const config = await getConfig(argv.f);
|
||||
|
||||
assert(config.server, 'Missing server config');
|
||||
|
||||
const { host, port } = config.server;
|
||||
|
||||
const { upstream, database: dbConfig } = config;
|
||||
|
||||
assert(dbConfig, 'Missing database config');
|
||||
|
||||
const db = new Database(dbConfig);
|
||||
await db.init();
|
||||
|
||||
assert(upstream, 'Missing upstream config');
|
||||
const { gqlEndpoint, gqlSubscriptionEndpoint, cache: cacheConfig } = upstream;
|
||||
assert(gqlEndpoint, 'Missing upstream gqlEndpoint');
|
||||
assert(gqlSubscriptionEndpoint, 'Missing upstream gqlSubscriptionEndpoint');
|
||||
|
||||
const cache = await getCache(cacheConfig);
|
||||
|
||||
const ethClient = new EthClient({ gqlEndpoint, gqlSubscriptionEndpoint, cache });
|
||||
|
||||
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
|
||||
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
|
||||
const pubsub = new PubSub();
|
||||
const indexer = new Indexer(db, ethClient, pubsub);
|
||||
|
||||
const eventWatcher = new EventWatcher(ethClient, indexer);
|
||||
await eventWatcher.start();
|
||||
|
||||
const resolvers = process.env.MOCK ? await createMockResolvers() : await createResolvers(indexer);
|
||||
|
||||
const app: Application = express();
|
||||
const server = new ApolloServer({
|
||||
typeDefs,
|
||||
resolvers
|
||||
});
|
||||
|
||||
await server.start();
|
||||
server.applyMiddleware({ app });
|
||||
|
||||
const httpServer = createServer(app);
|
||||
server.installSubscriptionHandlers(httpServer);
|
||||
|
||||
httpServer.listen(port, host, () => {
|
||||
log(`Server is listening on host ${host} port ${port}`);
|
||||
});
|
||||
|
||||
return { app, server };
|
||||
};
|
||||
|
||||
main().then(() => {
|
||||
log('Starting server...');
|
||||
}).catch(err => {
|
||||
log(err);
|
||||
});
|
2
packages/uni-info-watcher/src/types/common/main.d.ts
vendored
Normal file
2
packages/uni-info-watcher/src/types/common/main.d.ts
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
// https://medium.com/@steveruiz/using-a-javascript-library-without-type-declarations-in-a-typescript-project-3643490015f3
|
||||
declare module 'canonical-json'
|
5
packages/uni-info-watcher/src/types/common/package.json
Normal file
5
packages/uni-info-watcher/src/types/common/package.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "common",
|
||||
"version": "0.1.0",
|
||||
"typings": "main.d.ts"
|
||||
}
|
77
packages/uni-info-watcher/tsconfig.json
Normal file
77
packages/uni-info-watcher/tsconfig.json
Normal file
@ -0,0 +1,77 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Visit https://aka.ms/tsconfig.json to read more about this file */
|
||||
|
||||
/* Basic Options */
|
||||
// "incremental": true, /* Enable incremental compilation */
|
||||
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */
|
||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
||||
"lib": [ "ES5", "ES6", "ES2020" ], /* Specify library files to be included in the compilation. */
|
||||
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||
// "checkJs": true, /* Report errors in .js files. */
|
||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */
|
||||
"declaration": true, /* Generates corresponding '.d.ts' file. */
|
||||
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
|
||||
// "sourceMap": true, /* Generates corresponding '.map' file. */
|
||||
// "outFile": "./", /* Concatenate and emit output to single file. */
|
||||
"outDir": "dist", /* Redirect output structure to the directory. */
|
||||
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
// "composite": true, /* Enable project compilation */
|
||||
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
|
||||
// "removeComments": true, /* Do not emit comments to output. */
|
||||
// "noEmit": true, /* Do not emit outputs. */
|
||||
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
|
||||
"downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
|
||||
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
|
||||
|
||||
/* Strict Type-Checking Options */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
// "strictNullChecks": true, /* Enable strict null checks. */
|
||||
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
|
||||
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
|
||||
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
|
||||
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
|
||||
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
|
||||
|
||||
/* Additional Checks */
|
||||
// "noUnusedLocals": true, /* Report errors on unused locals. */
|
||||
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||
// "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */
|
||||
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an 'override' modifier. */
|
||||
// "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */
|
||||
|
||||
/* Module Resolution Options */
|
||||
"moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
|
||||
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||
"typeRoots": [
|
||||
"./src/types"
|
||||
], /* List of folders to include type definitions from. */
|
||||
// "types": [], /* Type declaration files to be included in compilation. */
|
||||
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
|
||||
/* Source Map Options */
|
||||
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
|
||||
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
|
||||
|
||||
/* Experimental Options */
|
||||
"experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
|
||||
|
||||
/* Advanced Options */
|
||||
"skipLibCheck": true, /* Skip type checking of declaration files. */
|
||||
"forceConsistentCasingInFileNames": true, /* Disallow inconsistently-cased references to the same file. */
|
||||
"resolveJsonModule": true /* Enabling the option allows importing JSON, and validating the types in that JSON file. */
|
||||
},
|
||||
"include": ["src"],
|
||||
"exclude": ["dist"]
|
||||
}
|
80
yarn.lock
80
yarn.lock
@ -2160,6 +2160,11 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.19.tgz#80f286b515897413c7a35bdda069cc80f2344233"
|
||||
integrity sha512-jRJgpRBuY+7izT7/WNXP/LsMO9YonsstuL+xuvycDyESpoDoIAsMd7suwpB4h9oEWB+ZlPTqJJ8EHomzNhwTPQ==
|
||||
|
||||
"@types/chance@^1.1.2":
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/chance/-/chance-1.1.2.tgz#0f397c17e9d5a9e83914e767ca6f419b2ded09dd"
|
||||
integrity sha512-OYwnnh2D7QAleRpPWnBQBfDZMlapMHoNeuvyEg7WrDiMApgcKOnXgyiVAl+OzBvhyQmfYkx7YtFXOm8E9IYsNw==
|
||||
|
||||
"@types/connect@*":
|
||||
version "3.4.34"
|
||||
resolved "https://registry.yarnpkg.com/@types/connect/-/connect-3.4.34.tgz#170a40223a6d666006d93ca128af2beb1d9b1901"
|
||||
@ -4301,6 +4306,11 @@ chalk@^4.0.0, chalk@^4.1.0:
|
||||
ansi-styles "^4.1.0"
|
||||
supports-color "^7.1.0"
|
||||
|
||||
chance@^1.1.7:
|
||||
version "1.1.7"
|
||||
resolved "https://registry.yarnpkg.com/chance/-/chance-1.1.7.tgz#e99dde5ac16681af787b5ba94c8277c090d6cfe8"
|
||||
integrity sha512-bua/2cZEfzS6qPm0vi3JEvGNbriDLcMj9lKxCQOjUcCJRcyjA7umP0zZm6bKWWlBN04vA0L99QGH/CZQawr0eg==
|
||||
|
||||
chardet@^0.7.0:
|
||||
version "0.7.0"
|
||||
resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e"
|
||||
@ -6977,7 +6987,7 @@ glob@7.1.6:
|
||||
once "^1.3.0"
|
||||
path-is-absolute "^1.0.0"
|
||||
|
||||
glob@^7.0.0, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@~7.1.6:
|
||||
glob@7.1.7, glob@^7.0.0, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@~7.1.6:
|
||||
version "7.1.7"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90"
|
||||
integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==
|
||||
@ -8021,6 +8031,11 @@ is-typedarray@^1.0.0, is-typedarray@~1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
|
||||
integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=
|
||||
|
||||
is-unicode-supported@^0.1.0:
|
||||
version "0.1.0"
|
||||
resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz#3f26c76a809593b52bfa2ecb5710ed2779b522a7"
|
||||
integrity sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==
|
||||
|
||||
is-url@^1.2.4:
|
||||
version "1.2.4"
|
||||
resolved "https://registry.yarnpkg.com/is-url/-/is-url-1.2.4.tgz#04a4df46d28c4cff3d73d01ff06abeb318a1aa52"
|
||||
@ -8128,6 +8143,13 @@ js-yaml@4.0.0:
|
||||
dependencies:
|
||||
argparse "^2.0.1"
|
||||
|
||||
js-yaml@4.1.0, js-yaml@^4.0.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602"
|
||||
integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==
|
||||
dependencies:
|
||||
argparse "^2.0.1"
|
||||
|
||||
js-yaml@^3.13.1:
|
||||
version "3.14.1"
|
||||
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537"
|
||||
@ -8136,13 +8158,6 @@ js-yaml@^3.13.1:
|
||||
argparse "^1.0.7"
|
||||
esprima "^4.0.0"
|
||||
|
||||
js-yaml@^4.0.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602"
|
||||
integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==
|
||||
dependencies:
|
||||
argparse "^2.0.1"
|
||||
|
||||
jsbn@~0.1.0:
|
||||
version "0.1.1"
|
||||
resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513"
|
||||
@ -8838,6 +8853,14 @@ log-symbols@4.0.0:
|
||||
dependencies:
|
||||
chalk "^4.0.0"
|
||||
|
||||
log-symbols@4.1.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.1.0.tgz#3fbdbb95b4683ac9fc785111e792e558d4abd503"
|
||||
integrity sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==
|
||||
dependencies:
|
||||
chalk "^4.1.0"
|
||||
is-unicode-supported "^0.1.0"
|
||||
|
||||
loglevel@^1.6.7:
|
||||
version "1.7.1"
|
||||
resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.7.1.tgz#005fde2f5e6e47068f935ff28573e125ef72f197"
|
||||
@ -9455,6 +9478,37 @@ mocha@^8.4.0:
|
||||
yargs-parser "20.2.4"
|
||||
yargs-unparser "2.0.0"
|
||||
|
||||
mocha@^9.0.1:
|
||||
version "9.0.1"
|
||||
resolved "https://registry.yarnpkg.com/mocha/-/mocha-9.0.1.tgz#01e66b7af0012330c0a38c4b6eaa6d92b8a81bf9"
|
||||
integrity sha512-9zwsavlRO+5csZu6iRtl3GHImAbhERoDsZwdRkdJ/bE+eVplmoxNKE901ZJ9LdSchYBjSCPbjKc5XvcAri2ylw==
|
||||
dependencies:
|
||||
"@ungap/promise-all-settled" "1.1.2"
|
||||
ansi-colors "4.1.1"
|
||||
browser-stdout "1.3.1"
|
||||
chokidar "3.5.1"
|
||||
debug "4.3.1"
|
||||
diff "5.0.0"
|
||||
escape-string-regexp "4.0.0"
|
||||
find-up "5.0.0"
|
||||
glob "7.1.7"
|
||||
growl "1.10.5"
|
||||
he "1.2.0"
|
||||
js-yaml "4.1.0"
|
||||
log-symbols "4.1.0"
|
||||
minimatch "3.0.4"
|
||||
ms "2.1.3"
|
||||
nanoid "3.1.23"
|
||||
serialize-javascript "5.0.1"
|
||||
strip-json-comments "3.1.1"
|
||||
supports-color "8.1.1"
|
||||
which "2.0.2"
|
||||
wide-align "1.1.3"
|
||||
workerpool "6.1.4"
|
||||
yargs "16.2.0"
|
||||
yargs-parser "20.2.4"
|
||||
yargs-unparser "2.0.0"
|
||||
|
||||
mock-fs@^4.1.0:
|
||||
version "4.14.0"
|
||||
resolved "https://registry.yarnpkg.com/mock-fs/-/mock-fs-4.14.0.tgz#ce5124d2c601421255985e6e94da80a7357b1b18"
|
||||
@ -9560,6 +9614,11 @@ nanoid@3.1.20:
|
||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.20.tgz#badc263c6b1dcf14b71efaa85f6ab4c1d6cfc788"
|
||||
integrity sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==
|
||||
|
||||
nanoid@3.1.23:
|
||||
version "3.1.23"
|
||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.23.tgz#f744086ce7c2bc47ee0a8472574d5c78e4183a81"
|
||||
integrity sha512-FiB0kzdP0FFVGDKlRLEQ1BgDzU87dy5NnzjeW9YZNt+/c3+q82EQDUwniSAUxp/F0gFNI1ZhKU1FqYsMuqZVnw==
|
||||
|
||||
nanomatch@^1.2.9:
|
||||
version "1.2.13"
|
||||
resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119"
|
||||
@ -13427,6 +13486,11 @@ workerpool@6.1.0:
|
||||
resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.1.0.tgz#a8e038b4c94569596852de7a8ea4228eefdeb37b"
|
||||
integrity sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg==
|
||||
|
||||
workerpool@6.1.4:
|
||||
version "6.1.4"
|
||||
resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.1.4.tgz#6a972b6df82e38d50248ee2820aa98e2d0ad3090"
|
||||
integrity sha512-jGWPzsUqzkow8HoAvqaPWTUPCrlPJaJ5tY8Iz7n1uCz3tTp6s3CDG0FF1NsX42WNlkRSW6Mr+CDZGnNoSsKa7g==
|
||||
|
||||
wrap-ansi@^2.0.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85"
|
||||
|
Loading…
Reference in New Issue
Block a user