diff --git a/packages/erc721-watcher/.eslintignore b/packages/erc721-watcher/.eslintignore new file mode 100644 index 00000000..55cb5225 --- /dev/null +++ b/packages/erc721-watcher/.eslintignore @@ -0,0 +1,2 @@ +# Don't lint build output. +dist diff --git a/packages/erc721-watcher/.eslintrc.json b/packages/erc721-watcher/.eslintrc.json new file mode 100644 index 00000000..476d529d --- /dev/null +++ b/packages/erc721-watcher/.eslintrc.json @@ -0,0 +1,27 @@ +{ + "env": { + "browser": true, + "es2021": true + }, + "extends": [ + "semistandard", + "plugin:@typescript-eslint/recommended" + ], + "parser": "@typescript-eslint/parser", + "parserOptions": { + "ecmaVersion": 12, + "sourceType": "module" + }, + "plugins": [ + "@typescript-eslint" + ], + "rules": { + "@typescript-eslint/no-explicit-any": "off", + "@typescript-eslint/explicit-module-boundary-types": [ + "warn", + { + "allowArgumentsExplicitlyTypedAsAny": true + } + ] + } +} diff --git a/packages/erc721-watcher/README.md b/packages/erc721-watcher/README.md new file mode 100644 index 00000000..528a7fb1 --- /dev/null +++ b/packages/erc721-watcher/README.md @@ -0,0 +1,189 @@ +# erc721-watcher + +## Setup + +* Run the following command to install required packages: + + ```bash + yarn + ``` + +* Run the IPFS (go-ipfs version 0.12.2) daemon: + + ```bash + ipfs daemon + ``` + +* Create a postgres12 database for the watcher: + + ```bash + sudo su - postgres + createdb erc721-watcher + ``` + +* If the watcher is an `active` watcher: + + Create database for the job queue and enable the `pgcrypto` extension on them (https://github.com/timgit/pg-boss/blob/master/docs/usage.md#intro): + + ``` + createdb erc721-watcher-job-queue + ``` + + ``` + postgres@tesla:~$ psql -U postgres -h localhost erc721-watcher-job-queue + Password for user postgres: + psql (12.7 (Ubuntu 12.7-1.pgdg18.04+1)) + SSL connection (protocol: TLSv1.3, cipher: TLS_AES_256_GCM_SHA384, bits: 256, compression: off) + Type "help" for help. + + erc721-watcher-job-queue=# CREATE EXTENSION pgcrypto; + CREATE EXTENSION + erc721-watcher-job-queue=# exit + ``` + +* In the [config file](./environments/local.toml): + + * Update the database connection settings. + + * Update the `upstream` config and provide the `ipld-eth-server` GQL API endpoint. + + * Update the `server` config with state checkpoint settings and provide the IPFS API address. + +## Customize + +* Indexing on an event: + + * Edit the custom hook function `handleEvent` (triggered on an event) in [hooks.ts](./src/hooks.ts) to perform corresponding indexing using the `Indexer` object. + + * While using the indexer storage methods for indexing, pass `diff` as true if default state is desired to be generated using the state variables being indexed. + +* Generating state: + + * Edit the custom hook function `createInitialState` (triggered if the watcher passes the start block, checkpoint: `true`) in [hooks.ts](./src/hooks.ts) to save an initial state `IPLDBlock` using the `Indexer` object. + + * Edit the custom hook function `createStateDiff` (triggered on a block) in [hooks.ts](./src/hooks.ts) to save the state in a `diff` `IPLDBlock` using the `Indexer` object. The default state (if exists) is updated. + + * Edit the custom hook function `createStateCheckpoint` (triggered just before default and CLI checkpoint) in [hooks.ts](./src/hooks.ts) to save the state in a `checkpoint` `IPLDBlock` using the `Indexer` object. + +## Run + +* Run the watcher: + + ```bash + yarn server + ``` + +GQL console: http://localhost:3006/graphql + +* If the watcher is an `active` watcher: + + * Run the job-runner: + + ```bash + yarn job-runner + ``` + + * To watch a contract: + + ```bash + yarn watch:contract --address --kind --checkpoint --starting-block [block-number] + ``` + + * `address`: Address or identifier of the contract to be watched. + * `kind`: Kind of the contract. + * `checkpoint`: Turn checkpointing on (`true` | `false`). + * `starting-block`: Starting block for the contract (default: `1`). + + Examples: + + Watch a contract with its address and checkpointing on: + + ```bash + yarn watch:contract --address 0x1F78641644feB8b64642e833cE4AFE93DD6e7833 --kind ERC20 --checkpoint true + ``` + + Watch a contract with its identifier and checkpointing on: + + ```bash + yarn watch:contract --address MyProtocol --kind protocol --checkpoint true + ``` + + * To fill a block range: + + ```bash + yarn fill --start-block --end-block + ``` + + * `start-block`: Block number to start filling from. + * `end-block`: Block number till which to fill. + + * To create a checkpoint for a contract: + + ```bash + yarn checkpoint --address --block-hash [block-hash] + ``` + + * `address`: Address or identifier of the contract for which to create a checkpoint. + * `block-hash`: Hash of a block (in the pruned region) at which to create the checkpoint (default: latest canonical block hash). + + * To reset the watcher to a previous block number: + + * Reset state: + + ```bash + yarn reset state --block-number + ``` + + * Reset job-queue: + + ```bash + yarn reset job-queue --block-number + ``` + + * `block-number`: Block number to which to reset the watcher. + + * To export and import the watcher state: + + * In source watcher, export watcher state: + + ```bash + yarn export-state --export-file [export-file-path] + ``` + + * `export-file`: Path of JSON file to which to export the watcher data. + + * In target watcher, run job-runner: + + ```bash + yarn job-runner + ``` + + * Import watcher state: + + ```bash + yarn import-state --import-file + ``` + + * `import-file`: Path of JSON file from which to import the watcher data. + + * Run fill: + + ```bash + yarn fill --start-block --end-block + ``` + + * `snapshot-block`: Block number at which the watcher state was exported. + + * Run server: + + ```bash + yarn server + ``` + + * To inspect a CID: + + ```bash + yarn inspect-cid --cid + ``` + + * `cid`: CID to be inspected. diff --git a/packages/erc721-watcher/environments/local.toml b/packages/erc721-watcher/environments/local.toml new file mode 100644 index 00000000..9b6e1e7f --- /dev/null +++ b/packages/erc721-watcher/environments/local.toml @@ -0,0 +1,41 @@ +[server] + host = "127.0.0.1" + port = 3006 + kind = "active" + + # Checkpointing state. + checkpointing = true + + # Checkpoint interval in number of blocks. + checkpointInterval = 2000 + + # IPFS API address (can be taken from the output on running the IPFS daemon). + # ipfsApiAddr = "/ip4/127.0.0.1/tcp/5001" + + +[database] + type = "postgres" + host = "localhost" + port = 5432 + database = "erc721-watcher" + username = "postgres" + password = "postgres" + synchronize = true + logging = false + +[upstream] + [upstream.ethServer] + gqlApiEndpoint = "http://127.0.0.1:8082/graphql" + rpcProviderEndpoint = "http://127.0.0.1:8081" + blockDelayInMilliSecs = 2000 + + [upstream.cache] + name = "requests" + enabled = false + deleteOnStart = false + +[jobQueue] + dbConnectionString = "postgres://postgres:postgres@localhost/erc721-watcher-job-queue" + maxCompletionLagInSecs = 300 + jobDelayInMilliSecs = 100 + eventsInBatch = 50 diff --git a/packages/erc721-watcher/package.json b/packages/erc721-watcher/package.json new file mode 100644 index 00000000..4068fb3d --- /dev/null +++ b/packages/erc721-watcher/package.json @@ -0,0 +1,67 @@ +{ + "name": "@vulcanize/erc721-watcher", + "version": "0.1.0", + "description": "erc721-watcher", + "private": true, + "main": "dist/index.js", + "scripts": { + "lint": "eslint .", + "build": "tsc", + "server": "DEBUG=vulcanize:* ts-node src/server.ts", + "job-runner": "DEBUG=vulcanize:* ts-node src/job-runner.ts", + "watch:contract": "DEBUG=vulcanize:* ts-node src/cli/watch-contract.ts", + "fill": "DEBUG=vulcanize:* ts-node src/fill.ts", + "reset": "DEBUG=vulcanize:* ts-node src/cli/reset.ts", + "checkpoint": "DEBUG=vulcanize:* ts-node src/cli/checkpoint.ts", + "export-state": "DEBUG=vulcanize:* ts-node src/cli/export-state.ts", + "import-state": "DEBUG=vulcanize:* ts-node src/cli/import-state.ts", + "inspect-cid": "DEBUG=vulcanize:* ts-node src/cli/inspect-cid.ts" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/vulcanize/watcher-ts.git" + }, + "author": "", + "license": "AGPL-3.0", + "bugs": { + "url": "https://github.com/vulcanize/watcher-ts/issues" + }, + "homepage": "https://github.com/vulcanize/watcher-ts#readme", + "dependencies": { + "@apollo/client": "^3.3.19", + "@ethersproject/providers": "5.3.0", + "@ipld/dag-cbor": "^6.0.12", + "@vulcanize/ipld-eth-client": "^0.1.0", + "@vulcanize/solidity-mapper": "^0.1.0", + "@vulcanize/util": "^0.1.0", + "@vulcanize/graph-node": "^0.1.0", + "apollo-server-express": "^2.25.0", + "apollo-type-bigint": "^0.1.3", + "debug": "^4.3.1", + "ethers": "^5.2.0", + "express": "^4.17.1", + "graphql": "^15.5.0", + "graphql-import-node": "^0.0.4", + "json-bigint": "^1.0.0", + "reflect-metadata": "^0.1.13", + "typeorm": "^0.2.32", + "yargs": "^17.0.1", + "decimal.js": "^10.3.1" + }, + "devDependencies": { + "@ethersproject/abi": "^5.3.0", + "@types/express": "^4.17.11", + "@types/yargs": "^17.0.0", + "@typescript-eslint/eslint-plugin": "^4.25.0", + "@typescript-eslint/parser": "^4.25.0", + "eslint": "^7.27.0", + "eslint-config-semistandard": "^15.0.1", + "eslint-config-standard": "^16.0.3", + "eslint-plugin-import": "^2.23.3", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "eslint-plugin-standard": "^5.0.0", + "ts-node": "^10.0.0", + "typescript": "^4.3.2" + } +} diff --git a/packages/erc721-watcher/src/artifacts/ERC721.json b/packages/erc721-watcher/src/artifacts/ERC721.json new file mode 100644 index 00000000..ebebbd41 --- /dev/null +++ b/packages/erc721-watcher/src/artifacts/ERC721.json @@ -0,0 +1,452 @@ +{ + "abi": [ + { + "inputs": [ + { + "internalType": "string", + "name": "name_", + "type": "string" + }, + { + "internalType": "string", + "name": "symbol_", + "type": "string" + } + ], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "approved", + "type": "address" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "tokenId", + "type": "uint256" + } + ], + "name": "Approval", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "operator", + "type": "address" + }, + { + "indexed": false, + "internalType": "bool", + "name": "approved", + "type": "bool" + } + ], + "name": "ApprovalForAll", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "indexed": true, + "internalType": "uint256", + "name": "tokenId", + "type": "uint256" + } + ], + "name": "Transfer", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256" + } + ], + "name": "approve", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + } + ], + "name": "balanceOf", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256" + } + ], + "name": "getApproved", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "owner", + "type": "address" + }, + { + "internalType": "address", + "name": "operator", + "type": "address" + } + ], + "name": "isApprovedForAll", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "name", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256" + } + ], + "name": "ownerOf", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256" + } + ], + "name": "safeTransferFrom", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256" + }, + { + "internalType": "bytes", + "name": "_data", + "type": "bytes" + } + ], + "name": "safeTransferFrom", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "operator", + "type": "address" + }, + { + "internalType": "bool", + "name": "approved", + "type": "bool" + } + ], + "name": "setApprovalForAll", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bytes4", + "name": "interfaceId", + "type": "bytes4" + } + ], + "name": "supportsInterface", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "symbol", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256" + } + ], + "name": "tokenURI", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "from", + "type": "address" + }, + { + "internalType": "address", + "name": "to", + "type": "address" + }, + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256" + } + ], + "name": "transferFrom", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + } + ], + "storageLayout": { + "storage": [ + { + "astId": 718, + "contract": "ERC721.sol:ERC721", + "label": "_name", + "offset": 0, + "slot": "0", + "type": "t_string_storage" + }, + { + "astId": 720, + "contract": "ERC721.sol:ERC721", + "label": "_symbol", + "offset": 0, + "slot": "1", + "type": "t_string_storage" + }, + { + "astId": 724, + "contract": "ERC721.sol:ERC721", + "label": "_owners", + "offset": 0, + "slot": "2", + "type": "t_mapping(t_uint256,t_address)" + }, + { + "astId": 728, + "contract": "ERC721.sol:ERC721", + "label": "_balances", + "offset": 0, + "slot": "3", + "type": "t_mapping(t_address,t_uint256)" + }, + { + "astId": 732, + "contract": "ERC721.sol:ERC721", + "label": "_tokenApprovals", + "offset": 0, + "slot": "4", + "type": "t_mapping(t_uint256,t_address)" + }, + { + "astId": 738, + "contract": "ERC721.sol:ERC721", + "label": "_operatorApprovals", + "offset": 0, + "slot": "5", + "type": "t_mapping(t_address,t_mapping(t_address,t_bool))" + } + ], + "types": { + "t_address": { + "encoding": "inplace", + "label": "address", + "numberOfBytes": "20" + }, + "t_bool": { + "encoding": "inplace", + "label": "bool", + "numberOfBytes": "1" + }, + "t_mapping(t_address,t_bool)": { + "encoding": "mapping", + "key": "t_address", + "label": "mapping(address => bool)", + "numberOfBytes": "32", + "value": "t_bool" + }, + "t_mapping(t_address,t_mapping(t_address,t_bool))": { + "encoding": "mapping", + "key": "t_address", + "label": "mapping(address => mapping(address => bool))", + "numberOfBytes": "32", + "value": "t_mapping(t_address,t_bool)" + }, + "t_mapping(t_address,t_uint256)": { + "encoding": "mapping", + "key": "t_address", + "label": "mapping(address => uint256)", + "numberOfBytes": "32", + "value": "t_uint256" + }, + "t_mapping(t_uint256,t_address)": { + "encoding": "mapping", + "key": "t_uint256", + "label": "mapping(uint256 => address)", + "numberOfBytes": "32", + "value": "t_address" + }, + "t_string_storage": { + "encoding": "bytes", + "label": "string", + "numberOfBytes": "32" + }, + "t_uint256": { + "encoding": "inplace", + "label": "uint256", + "numberOfBytes": "32" + } + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/cli/checkpoint.ts b/packages/erc721-watcher/src/cli/checkpoint.ts new file mode 100644 index 00000000..84230167 --- /dev/null +++ b/packages/erc721-watcher/src/cli/checkpoint.ts @@ -0,0 +1,79 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import path from 'path'; +import yargs from 'yargs'; +import 'reflect-metadata'; +import debug from 'debug'; +import assert from 'assert'; + +import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util'; +import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node'; + +import { Database } from '../database'; +import { Indexer } from '../indexer'; + +const log = debug('vulcanize:checkpoint'); + +const main = async (): Promise => { + const argv = await yargs.parserConfiguration({ + 'parse-numbers': false + }).options({ + configFile: { + alias: 'f', + type: 'string', + require: true, + demandOption: true, + describe: 'Configuration file path (toml)', + default: DEFAULT_CONFIG_PATH + }, + address: { + type: 'string', + require: true, + demandOption: true, + describe: 'Contract address to create the checkpoint for.' + }, + blockHash: { + type: 'string', + describe: 'Blockhash at which to create the checkpoint.' + } + }).argv; + + const config: Config = await getConfig(argv.configFile); + const { ethClient, ethProvider } = await initClients(config); + + const db = new Database(config.database); + await db.init(); + + const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*')); + await graphDb.init(); + + const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server); + + const jobQueueConfig = config.jobQueue; + assert(jobQueueConfig, 'Missing job queue config'); + + const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig; + assert(dbConnectionString, 'Missing job queue db connection string'); + + const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs }); + await jobQueue.start(); + + const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher); + await indexer.init(); + + graphWatcher.setIndexer(indexer); + + const blockHash = await indexer.processCLICheckpoint(argv.address, argv.blockHash); + + log(`Created a checkpoint for contract ${argv.address} at block-hash ${blockHash}`); + + await db.close(); +}; + +main().catch(err => { + log(err); +}).finally(() => { + process.exit(0); +}); diff --git a/packages/erc721-watcher/src/cli/export-state.ts b/packages/erc721-watcher/src/cli/export-state.ts new file mode 100644 index 00000000..a42c6aa3 --- /dev/null +++ b/packages/erc721-watcher/src/cli/export-state.ts @@ -0,0 +1,132 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import assert from 'assert'; +import yargs from 'yargs'; +import 'reflect-metadata'; +import debug from 'debug'; +import fs from 'fs'; +import path from 'path'; + +import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue, StateKind } from '@vulcanize/util'; +import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node'; +import * as codec from '@ipld/dag-cbor'; + +import { Database } from '../database'; +import { Indexer } from '../indexer'; + +const log = debug('vulcanize:export-state'); + +const main = async (): Promise => { + const argv = await yargs.parserConfiguration({ + 'parse-numbers': false + }).options({ + configFile: { + alias: 'f', + type: 'string', + require: true, + demandOption: true, + describe: 'Configuration file path (toml)', + default: DEFAULT_CONFIG_PATH + }, + exportFile: { + alias: 'o', + type: 'string', + describe: 'Export file path' + } + }).argv; + + const config: Config = await getConfig(argv.configFile); + const { ethClient, ethProvider } = await initClients(config); + + const db = new Database(config.database); + await db.init(); + + const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*')); + await graphDb.init(); + + const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server); + + const jobQueueConfig = config.jobQueue; + assert(jobQueueConfig, 'Missing job queue config'); + + const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig; + assert(dbConnectionString, 'Missing job queue db connection string'); + + const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs }); + await jobQueue.start(); + + const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher); + await indexer.init(); + + graphWatcher.setIndexer(indexer); + + const exportData: any = { + snapshotBlock: {}, + contracts: [], + ipldCheckpoints: [] + }; + + const contracts = await db.getContracts(); + + // Get latest block with hooks processed. + const block = await indexer.getLatestHooksProcessedBlock(); + assert(block); + + // Export snapshot block. + exportData.snapshotBlock = { + blockNumber: block.blockNumber, + blockHash: block.blockHash + }; + + // Export contracts and checkpoints. + for (const contract of contracts) { + exportData.contracts.push({ + address: contract.address, + kind: contract.kind, + checkpoint: contract.checkpoint, + startingBlock: block.blockNumber + }); + + // Create and export checkpoint if checkpointing is on for the contract. + if (contract.checkpoint) { + await indexer.createCheckpoint(contract.address, block.blockHash); + + const ipldBlock = await indexer.getLatestIPLDBlock(contract.address, StateKind.Checkpoint, block.blockNumber); + assert(ipldBlock); + + const data = indexer.getIPLDData(ipldBlock); + + if (indexer.isIPFSConfigured()) { + await indexer.pushToIPFS(data); + } + + exportData.ipldCheckpoints.push({ + contractAddress: ipldBlock.contractAddress, + cid: ipldBlock.cid, + kind: ipldBlock.kind, + data + }); + } + } + + if (argv.exportFile) { + const encodedExportData = codec.encode(exportData); + + const filePath = path.resolve(argv.exportFile); + const fileDir = path.dirname(filePath); + + if (!fs.existsSync(fileDir)) fs.mkdirSync(fileDir, { recursive: true }); + + fs.writeFileSync(filePath, encodedExportData); + } else { + log(exportData); + } +}; + +main().catch(err => { + log(err); +}).finally(() => { + process.exit(0); +}); diff --git a/packages/erc721-watcher/src/cli/import-state.ts b/packages/erc721-watcher/src/cli/import-state.ts new file mode 100644 index 00000000..f3fa932f --- /dev/null +++ b/packages/erc721-watcher/src/cli/import-state.ts @@ -0,0 +1,122 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import assert from 'assert'; +import 'reflect-metadata'; +import yargs from 'yargs'; +import { hideBin } from 'yargs/helpers'; +import debug from 'debug'; +import { PubSub } from 'apollo-server-express'; +import fs from 'fs'; +import path from 'path'; + +import { getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, Config, initClients, StateKind } from '@vulcanize/util'; +import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node'; +import * as codec from '@ipld/dag-cbor'; + +import { Database } from '../database'; +import { Indexer } from '../indexer'; +import { EventWatcher } from '../events'; +import { IPLDBlock } from '../entity/IPLDBlock'; + +const log = debug('vulcanize:import-state'); + +export const main = async (): Promise => { + const argv = await yargs(hideBin(process.argv)).parserConfiguration({ + 'parse-numbers': false + }).options({ + configFile: { + alias: 'f', + type: 'string', + demandOption: true, + describe: 'configuration file path (toml)', + default: DEFAULT_CONFIG_PATH + }, + importFile: { + alias: 'i', + type: 'string', + demandOption: true, + describe: 'Import file path (JSON)' + } + }).argv; + + const config: Config = await getConfig(argv.configFile); + const { ethClient, ethProvider } = await initClients(config); + + const db = new Database(config.database); + await db.init(); + + const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*')); + await graphDb.init(); + + const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server); + + // Note: In-memory pubsub works fine for now, as each watcher is a single process anyway. + // Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries + const pubsub = new PubSub(); + + const jobQueueConfig = config.jobQueue; + assert(jobQueueConfig, 'Missing job queue config'); + + const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig; + assert(dbConnectionString, 'Missing job queue db connection string'); + + const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs }); + await jobQueue.start(); + + const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher); + await indexer.init(); + + graphWatcher.setIndexer(indexer); + + const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue); + + // Import data. + const importFilePath = path.resolve(argv.importFile); + const encodedImportData = fs.readFileSync(importFilePath); + const importData = codec.decode(Buffer.from(encodedImportData)) as any; + + // Fill the snapshot block. + await fillBlocks( + jobQueue, + indexer, + eventWatcher, + config.upstream.ethServer.blockDelayInMilliSecs, + { + startBlock: importData.snapshotBlock.blockNumber, + endBlock: importData.snapshotBlock.blockNumber + } + ); + + // Fill the Contracts. + for (const contract of importData.contracts) { + await indexer.watchContract(contract.address, contract.kind, contract.checkpoint, contract.startingBlock); + } + + // Get the snapshot block. + const block = await indexer.getBlockProgress(importData.snapshotBlock.blockHash); + assert(block); + + // Fill the IPLDBlocks. + for (const checkpoint of importData.ipldCheckpoints) { + let ipldBlock = new IPLDBlock(); + + ipldBlock = Object.assign(ipldBlock, checkpoint); + ipldBlock.block = block; + + ipldBlock.data = Buffer.from(codec.encode(ipldBlock.data)); + + await indexer.saveOrUpdateIPLDBlock(ipldBlock); + } + + // The 'diff_staged' and 'init' IPLD blocks are unnecessary as checkpoints have been already created for the snapshot block. + await indexer.removeIPLDBlocks(block.blockNumber, StateKind.Init); + await indexer.removeIPLDBlocks(block.blockNumber, StateKind.DiffStaged); +}; + +main().catch(err => { + log(err); +}).finally(() => { + process.exit(0); +}); diff --git a/packages/erc721-watcher/src/cli/inspect-cid.ts b/packages/erc721-watcher/src/cli/inspect-cid.ts new file mode 100644 index 00000000..4abc919b --- /dev/null +++ b/packages/erc721-watcher/src/cli/inspect-cid.ts @@ -0,0 +1,77 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import path from 'path'; +import assert from 'assert'; +import yargs from 'yargs'; +import 'reflect-metadata'; +import debug from 'debug'; +import util from 'util'; + +import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util'; +import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node'; + +import { Database } from '../database'; +import { Indexer } from '../indexer'; + +const log = debug('vulcanize:inspect-cid'); + +const main = async (): Promise => { + const argv = await yargs.parserConfiguration({ + 'parse-numbers': false + }).options({ + configFile: { + alias: 'f', + type: 'string', + require: true, + demandOption: true, + describe: 'Configuration file path (toml)', + default: DEFAULT_CONFIG_PATH + }, + cid: { + alias: 'c', + type: 'string', + demandOption: true, + describe: 'CID to be inspected' + } + }).argv; + + const config: Config = await getConfig(argv.configFile); + const { ethClient, ethProvider } = await initClients(config); + + const db = new Database(config.database); + await db.init(); + + const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*')); + await graphDb.init(); + + const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server); + + const jobQueueConfig = config.jobQueue; + assert(jobQueueConfig, 'Missing job queue config'); + + const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig; + assert(dbConnectionString, 'Missing job queue db connection string'); + + const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs }); + await jobQueue.start(); + + const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher); + await indexer.init(); + + graphWatcher.setIndexer(indexer); + + const ipldBlock = await indexer.getIPLDBlockByCid(argv.cid); + assert(ipldBlock, 'IPLDBlock for the provided CID doesn\'t exist.'); + + const ipldData = await indexer.getIPLDData(ipldBlock); + + log(util.inspect(ipldData, false, null)); +}; + +main().catch(err => { + log(err); +}).finally(() => { + process.exit(0); +}); diff --git a/packages/erc721-watcher/src/cli/reset-cmds/job-queue.ts b/packages/erc721-watcher/src/cli/reset-cmds/job-queue.ts new file mode 100644 index 00000000..a8766bcf --- /dev/null +++ b/packages/erc721-watcher/src/cli/reset-cmds/job-queue.ts @@ -0,0 +1,22 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import debug from 'debug'; + +import { getConfig, resetJobs } from '@vulcanize/util'; + +const log = debug('vulcanize:reset-job-queue'); + +export const command = 'job-queue'; + +export const desc = 'Reset job queue'; + +export const builder = {}; + +export const handler = async (argv: any): Promise => { + const config = await getConfig(argv.configFile); + await resetJobs(config); + + log('Job queue reset successfully'); +}; diff --git a/packages/erc721-watcher/src/cli/reset-cmds/state.ts b/packages/erc721-watcher/src/cli/reset-cmds/state.ts new file mode 100644 index 00000000..f711b770 --- /dev/null +++ b/packages/erc721-watcher/src/cli/reset-cmds/state.ts @@ -0,0 +1,126 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import path from 'path'; +import debug from 'debug'; +import { MoreThan } from 'typeorm'; +import assert from 'assert'; + +import { getConfig, initClients, resetJobs, JobQueue } from '@vulcanize/util'; +import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node'; + +import { Database } from '../../database'; +import { Indexer } from '../../indexer'; +import { BlockProgress } from '../../entity/BlockProgress'; + +import { SupportsInterface } from '../../entity/SupportsInterface'; +import { BalanceOf } from '../../entity/BalanceOf'; +import { OwnerOf } from '../../entity/OwnerOf'; +import { GetApproved } from '../../entity/GetApproved'; +import { IsApprovedForAll } from '../../entity/IsApprovedForAll'; +import { Name } from '../../entity/Name'; +import { Symbol } from '../../entity/Symbol'; +import { TokenURI } from '../../entity/TokenURI'; +import { _Name } from '../../entity/_Name'; +import { _Symbol } from '../../entity/_Symbol'; +import { _Owners } from '../../entity/_Owners'; +import { _Balances } from '../../entity/_Balances'; +import { _TokenApprovals } from '../../entity/_TokenApprovals'; +import { _OperatorApprovals } from '../../entity/_OperatorApprovals'; + +const log = debug('vulcanize:reset-state'); + +export const command = 'state'; + +export const desc = 'Reset state to block number'; + +export const builder = { + blockNumber: { + type: 'number' + } +}; + +export const handler = async (argv: any): Promise => { + const config = await getConfig(argv.configFile); + await resetJobs(config); + const { ethClient, ethProvider } = await initClients(config); + + // Initialize database. + const db = new Database(config.database); + await db.init(); + + const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*')); + await graphDb.init(); + + const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server); + + const jobQueueConfig = config.jobQueue; + assert(jobQueueConfig, 'Missing job queue config'); + + const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig; + assert(dbConnectionString, 'Missing job queue db connection string'); + + const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs }); + await jobQueue.start(); + + const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher); + await indexer.init(); + + graphWatcher.setIndexer(indexer); + + const blockProgresses = await indexer.getBlocksAtHeight(argv.blockNumber, false); + assert(blockProgresses.length, `No blocks at specified block number ${argv.blockNumber}`); + assert(!blockProgresses.some(block => !block.isComplete), `Incomplete block at block number ${argv.blockNumber} with unprocessed events`); + const [blockProgress] = blockProgresses; + + const dbTx = await db.createTransactionRunner(); + + try { + const entities = [BlockProgress, SupportsInterface, BalanceOf, OwnerOf, GetApproved, IsApprovedForAll, Name, Symbol, TokenURI, _Name, _Symbol, _Owners, _Balances, _TokenApprovals, _OperatorApprovals]; + + const removeEntitiesPromise = entities.map(async entityClass => { + return db.removeEntities(dbTx, entityClass, { blockNumber: MoreThan(argv.blockNumber) }); + }); + + await Promise.all(removeEntitiesPromise); + + const syncStatus = await indexer.getSyncStatus(); + assert(syncStatus, 'Missing syncStatus'); + + if (syncStatus.latestIndexedBlockNumber > blockProgress.blockNumber) { + await indexer.updateSyncStatusIndexedBlock(blockProgress.blockHash, blockProgress.blockNumber, true); + } + + if (syncStatus.latestCanonicalBlockNumber > blockProgress.blockNumber) { + await indexer.updateSyncStatusCanonicalBlock(blockProgress.blockHash, blockProgress.blockNumber, true); + } + + const ipldStatus = await indexer.getIPLDStatus(); + + if (ipldStatus) { + if (ipldStatus.latestHooksBlockNumber > blockProgress.blockNumber) { + await indexer.updateIPLDStatusHooksBlock(blockProgress.blockNumber, true); + } + + if (ipldStatus.latestCheckpointBlockNumber > blockProgress.blockNumber) { + await indexer.updateIPLDStatusCheckpointBlock(blockProgress.blockNumber, true); + } + + if (ipldStatus.latestIPFSBlockNumber > blockProgress.blockNumber) { + await indexer.updateIPLDStatusIPFSBlock(blockProgress.blockNumber, true); + } + } + + await indexer.updateSyncStatusChainHead(blockProgress.blockHash, blockProgress.blockNumber, true); + + dbTx.commitTransaction(); + } catch (error) { + await dbTx.rollbackTransaction(); + throw error; + } finally { + await dbTx.release(); + } + + log('Reset state successfully'); +}; diff --git a/packages/erc721-watcher/src/cli/reset.ts b/packages/erc721-watcher/src/cli/reset.ts new file mode 100644 index 00000000..2ddebf10 --- /dev/null +++ b/packages/erc721-watcher/src/cli/reset.ts @@ -0,0 +1,24 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import 'reflect-metadata'; +import debug from 'debug'; + +import { getResetYargs } from '@vulcanize/util'; + +const log = debug('vulcanize:reset'); + +const main = async () => { + return getResetYargs() + .commandDir('reset-cmds', { extensions: ['ts', 'js'], exclude: /([a-zA-Z0-9\s_\\.\-:])+(.d.ts)$/ }) + .demandCommand(1) + .help() + .argv; +}; + +main().then(() => { + process.exit(); +}).catch(err => { + log(err); +}); diff --git a/packages/erc721-watcher/src/cli/watch-contract.ts b/packages/erc721-watcher/src/cli/watch-contract.ts new file mode 100644 index 00000000..63bf09d3 --- /dev/null +++ b/packages/erc721-watcher/src/cli/watch-contract.ts @@ -0,0 +1,91 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import path from 'path'; +import yargs from 'yargs'; +import 'reflect-metadata'; +import debug from 'debug'; +import assert from 'assert'; + +import { Config, DEFAULT_CONFIG_PATH, getConfig, initClients, JobQueue } from '@vulcanize/util'; +import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node'; + +import { Database } from '../database'; +import { Indexer } from '../indexer'; + +const log = debug('vulcanize:watch-contract'); + +const main = async (): Promise => { + const argv = await yargs.parserConfiguration({ + 'parse-numbers': false + }).options({ + configFile: { + alias: 'f', + type: 'string', + require: true, + demandOption: true, + describe: 'Configuration file path (toml)', + default: DEFAULT_CONFIG_PATH + }, + address: { + type: 'string', + require: true, + demandOption: true, + describe: 'Address of the deployed contract' + }, + kind: { + type: 'string', + require: true, + demandOption: true, + describe: 'Kind of contract' + }, + checkpoint: { + type: 'boolean', + require: true, + demandOption: true, + describe: 'Turn checkpointing on' + }, + startingBlock: { + type: 'number', + default: 1, + describe: 'Starting block' + } + }).argv; + + const config: Config = await getConfig(argv.configFile); + const { ethClient, ethProvider } = await initClients(config); + + const db = new Database(config.database); + await db.init(); + + const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*')); + await graphDb.init(); + + const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server); + + const jobQueueConfig = config.jobQueue; + assert(jobQueueConfig, 'Missing job queue config'); + + const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig; + assert(dbConnectionString, 'Missing job queue db connection string'); + + const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs }); + await jobQueue.start(); + + const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher); + await indexer.init(); + + graphWatcher.setIndexer(indexer); + + await indexer.watchContract(argv.address, argv.kind, argv.checkpoint, argv.startingBlock); + + await db.close(); + await jobQueue.stop(); +}; + +main().catch(err => { + log(err); +}).finally(() => { + process.exit(0); +}); diff --git a/packages/erc721-watcher/src/client.ts b/packages/erc721-watcher/src/client.ts new file mode 100644 index 00000000..616a8c94 --- /dev/null +++ b/packages/erc721-watcher/src/client.ts @@ -0,0 +1,181 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { gql } from '@apollo/client/core'; +import { GraphQLClient, GraphQLConfig } from '@vulcanize/ipld-eth-client'; + +import { queries, mutations, subscriptions } from './gql'; + +export class Client { + _config: GraphQLConfig; + _client: GraphQLClient; + + constructor (config: GraphQLConfig) { + this._config = config; + + this._client = new GraphQLClient(config); + } + + async getSupportsInterface (blockHash: string, contractAddress: string, interfaceId: string): Promise { + const { supportsInterface } = await this._client.query( + gql(queries.supportsInterface), + { blockHash, contractAddress, interfaceId } + ); + + return supportsInterface; + } + + async getBalanceOf (blockHash: string, contractAddress: string, owner: string): Promise { + const { balanceOf } = await this._client.query( + gql(queries.balanceOf), + { blockHash, contractAddress, owner } + ); + + return balanceOf; + } + + async getOwnerOf (blockHash: string, contractAddress: string, tokenId: bigint): Promise { + const { ownerOf } = await this._client.query( + gql(queries.ownerOf), + { blockHash, contractAddress, tokenId } + ); + + return ownerOf; + } + + async getGetApproved (blockHash: string, contractAddress: string, tokenId: bigint): Promise { + const { getApproved } = await this._client.query( + gql(queries.getApproved), + { blockHash, contractAddress, tokenId } + ); + + return getApproved; + } + + async getIsApprovedForAll (blockHash: string, contractAddress: string, owner: string, operator: string): Promise { + const { isApprovedForAll } = await this._client.query( + gql(queries.isApprovedForAll), + { blockHash, contractAddress, owner, operator } + ); + + return isApprovedForAll; + } + + async getName (blockHash: string, contractAddress: string): Promise { + const { name } = await this._client.query( + gql(queries.name), + { blockHash, contractAddress } + ); + + return name; + } + + async getSymbol (blockHash: string, contractAddress: string): Promise { + const { symbol } = await this._client.query( + gql(queries.symbol), + { blockHash, contractAddress } + ); + + return symbol; + } + + async getTokenURI (blockHash: string, contractAddress: string, tokenId: bigint): Promise { + const { tokenURI } = await this._client.query( + gql(queries.tokenURI), + { blockHash, contractAddress, tokenId } + ); + + return tokenURI; + } + + async _getName (blockHash: string, contractAddress: string): Promise { + const { _name } = await this._client.query( + gql(queries._name), + { blockHash, contractAddress } + ); + + return _name; + } + + async _getSymbol (blockHash: string, contractAddress: string): Promise { + const { _symbol } = await this._client.query( + gql(queries._symbol), + { blockHash, contractAddress } + ); + + return _symbol; + } + + async _getOwners (blockHash: string, contractAddress: string, key0: bigint): Promise { + const { _owners } = await this._client.query( + gql(queries._owners), + { blockHash, contractAddress, key0 } + ); + + return _owners; + } + + async _getBalances (blockHash: string, contractAddress: string, key0: string): Promise { + const { _balances } = await this._client.query( + gql(queries._balances), + { blockHash, contractAddress, key0 } + ); + + return _balances; + } + + async _getTokenApprovals (blockHash: string, contractAddress: string, key0: bigint): Promise { + const { _tokenApprovals } = await this._client.query( + gql(queries._tokenApprovals), + { blockHash, contractAddress, key0 } + ); + + return _tokenApprovals; + } + + async _getOperatorApprovals (blockHash: string, contractAddress: string, key0: string, key1: string): Promise { + const { _operatorApprovals } = await this._client.query( + gql(queries._operatorApprovals), + { blockHash, contractAddress, key0, key1 } + ); + + return _operatorApprovals; + } + + async getEvents (blockHash: string, contractAddress: string, name: string): Promise { + const { events } = await this._client.query( + gql(queries.events), + { blockHash, contractAddress, name } + ); + + return events; + } + + async getEventsInRange (fromBlockNumber: number, toBlockNumber: number): Promise { + const { eventsInRange } = await this._client.query( + gql(queries.eventsInRange), + { fromBlockNumber, toBlockNumber } + ); + + return eventsInRange; + } + + async watchContract (contractAddress: string, startingBlock?: number): Promise { + const { watchContract } = await this._client.mutate( + gql(mutations.watchContract), + { contractAddress, startingBlock } + ); + + return watchContract; + } + + async watchEvents (onNext: (value: any) => void): Promise { + return this._client.subscribe( + gql(subscriptions.onEvent), + ({ data }) => { + onNext(data.onEvent); + } + ); + } +} diff --git a/packages/erc721-watcher/src/database.ts b/packages/erc721-watcher/src/database.ts new file mode 100644 index 00000000..877e021a --- /dev/null +++ b/packages/erc721-watcher/src/database.ts @@ -0,0 +1,469 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import assert from 'assert'; +import { Connection, ConnectionOptions, DeepPartial, FindConditions, QueryRunner, FindManyOptions } from 'typeorm'; +import path from 'path'; + +import { IPLDDatabase as BaseDatabase, IPLDDatabaseInterface, QueryOptions, StateKind, Where } from '@vulcanize/util'; + +import { Contract } from './entity/Contract'; +import { Event } from './entity/Event'; +import { SyncStatus } from './entity/SyncStatus'; +import { IpldStatus } from './entity/IpldStatus'; +import { BlockProgress } from './entity/BlockProgress'; +import { IPLDBlock } from './entity/IPLDBlock'; +import { SupportsInterface } from './entity/SupportsInterface'; +import { BalanceOf } from './entity/BalanceOf'; +import { OwnerOf } from './entity/OwnerOf'; +import { GetApproved } from './entity/GetApproved'; +import { IsApprovedForAll } from './entity/IsApprovedForAll'; +import { Name } from './entity/Name'; +import { Symbol } from './entity/Symbol'; +import { TokenURI } from './entity/TokenURI'; +import { _Name } from './entity/_Name'; +import { _Symbol } from './entity/_Symbol'; +import { _Owners } from './entity/_Owners'; +import { _Balances } from './entity/_Balances'; +import { _TokenApprovals } from './entity/_TokenApprovals'; +import { _OperatorApprovals } from './entity/_OperatorApprovals'; + +export class Database implements IPLDDatabaseInterface { + _config: ConnectionOptions; + _conn!: Connection; + _baseDatabase: BaseDatabase; + _propColMaps: { [key: string]: Map; } + + constructor (config: ConnectionOptions) { + assert(config); + + this._config = { + ...config, + entities: [path.join(__dirname, 'entity/*')] + }; + + this._baseDatabase = new BaseDatabase(this._config); + this._propColMaps = {}; + } + + async init (): Promise { + this._conn = await this._baseDatabase.init(); + this._setPropColMaps(); + } + + async close (): Promise { + return this._baseDatabase.close(); + } + + async getSupportsInterface ({ blockHash, contractAddress, interfaceId }: { blockHash: string, contractAddress: string, interfaceId: string }): Promise { + return this._conn.getRepository(SupportsInterface) + .findOne({ + blockHash, + contractAddress, + interfaceId + }); + } + + async getBalanceOf ({ blockHash, contractAddress, owner }: { blockHash: string, contractAddress: string, owner: string }): Promise { + return this._conn.getRepository(BalanceOf) + .findOne({ + blockHash, + contractAddress, + owner + }); + } + + async getOwnerOf ({ blockHash, contractAddress, tokenId }: { blockHash: string, contractAddress: string, tokenId: bigint }): Promise { + return this._conn.getRepository(OwnerOf) + .findOne({ + blockHash, + contractAddress, + tokenId + }); + } + + async getGetApproved ({ blockHash, contractAddress, tokenId }: { blockHash: string, contractAddress: string, tokenId: bigint }): Promise { + return this._conn.getRepository(GetApproved) + .findOne({ + blockHash, + contractAddress, + tokenId + }); + } + + async getIsApprovedForAll ({ blockHash, contractAddress, owner, operator }: { blockHash: string, contractAddress: string, owner: string, operator: string }): Promise { + return this._conn.getRepository(IsApprovedForAll) + .findOne({ + blockHash, + contractAddress, + owner, + operator + }); + } + + async getName ({ blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise { + return this._conn.getRepository(Name) + .findOne({ + blockHash, + contractAddress + }); + } + + // eslint-disable-next-line @typescript-eslint/ban-types + async getSymbol ({ blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise { + return this._conn.getRepository(Symbol) + .findOne({ + blockHash, + contractAddress + }); + } + + async getTokenURI ({ blockHash, contractAddress, tokenId }: { blockHash: string, contractAddress: string, tokenId: bigint }): Promise { + return this._conn.getRepository(TokenURI) + .findOne({ + blockHash, + contractAddress, + tokenId + }); + } + + async _getName ({ blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise<_Name | undefined> { + return this._conn.getRepository(_Name) + .findOne({ + blockHash, + contractAddress + }); + } + + async _getSymbol ({ blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise<_Symbol | undefined> { + return this._conn.getRepository(_Symbol) + .findOne({ + blockHash, + contractAddress + }); + } + + async _getOwners ({ blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: bigint }): Promise<_Owners | undefined> { + return this._conn.getRepository(_Owners) + .findOne({ + blockHash, + contractAddress, + key0 + }); + } + + async _getBalances ({ blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: string }): Promise<_Balances | undefined> { + return this._conn.getRepository(_Balances) + .findOne({ + blockHash, + contractAddress, + key0 + }); + } + + async _getTokenApprovals ({ blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: bigint }): Promise<_TokenApprovals | undefined> { + return this._conn.getRepository(_TokenApprovals) + .findOne({ + blockHash, + contractAddress, + key0 + }); + } + + async _getOperatorApprovals ({ blockHash, contractAddress, key0, key1 }: { blockHash: string, contractAddress: string, key0: string, key1: string }): Promise<_OperatorApprovals | undefined> { + return this._conn.getRepository(_OperatorApprovals) + .findOne({ + blockHash, + contractAddress, + key0, + key1 + }); + } + + async saveSupportsInterface ({ blockHash, blockNumber, contractAddress, interfaceId, value, proof }: DeepPartial): Promise { + const repo = this._conn.getRepository(SupportsInterface); + const entity = repo.create({ blockHash, blockNumber, contractAddress, interfaceId, value, proof }); + return repo.save(entity); + } + + async saveBalanceOf ({ blockHash, blockNumber, contractAddress, owner, value, proof }: DeepPartial): Promise { + const repo = this._conn.getRepository(BalanceOf); + const entity = repo.create({ blockHash, blockNumber, contractAddress, owner, value, proof }); + return repo.save(entity); + } + + async saveOwnerOf ({ blockHash, blockNumber, contractAddress, tokenId, value, proof }: DeepPartial): Promise { + const repo = this._conn.getRepository(OwnerOf); + const entity = repo.create({ blockHash, blockNumber, contractAddress, tokenId, value, proof }); + return repo.save(entity); + } + + async saveGetApproved ({ blockHash, blockNumber, contractAddress, tokenId, value, proof }: DeepPartial): Promise { + const repo = this._conn.getRepository(GetApproved); + const entity = repo.create({ blockHash, blockNumber, contractAddress, tokenId, value, proof }); + return repo.save(entity); + } + + async saveIsApprovedForAll ({ blockHash, blockNumber, contractAddress, owner, operator, value, proof }: DeepPartial): Promise { + const repo = this._conn.getRepository(IsApprovedForAll); + const entity = repo.create({ blockHash, blockNumber, contractAddress, owner, operator, value, proof }); + return repo.save(entity); + } + + async saveName ({ blockHash, blockNumber, contractAddress, value, proof }: DeepPartial): Promise { + const repo = this._conn.getRepository(Name); + const entity = repo.create({ blockHash, blockNumber, contractAddress, value, proof }); + return repo.save(entity); + } + + // eslint-disable-next-line @typescript-eslint/ban-types + async saveSymbol ({ blockHash, blockNumber, contractAddress, value, proof }: DeepPartial): Promise { + const repo = this._conn.getRepository(Symbol); + const entity = repo.create({ blockHash, blockNumber, contractAddress, value, proof }); + return repo.save(entity); + } + + async saveTokenURI ({ blockHash, blockNumber, contractAddress, tokenId, value, proof }: DeepPartial): Promise { + const repo = this._conn.getRepository(TokenURI); + const entity = repo.create({ blockHash, blockNumber, contractAddress, tokenId, value, proof }); + return repo.save(entity); + } + + async _saveName ({ blockHash, blockNumber, contractAddress, value, proof }: DeepPartial<_Name>): Promise<_Name> { + const repo = this._conn.getRepository(_Name); + const entity = repo.create({ blockHash, blockNumber, contractAddress, value, proof }); + return repo.save(entity); + } + + async _saveSymbol ({ blockHash, blockNumber, contractAddress, value, proof }: DeepPartial<_Symbol>): Promise<_Symbol> { + const repo = this._conn.getRepository(_Symbol); + const entity = repo.create({ blockHash, blockNumber, contractAddress, value, proof }); + return repo.save(entity); + } + + async _saveOwners ({ blockHash, blockNumber, contractAddress, key0, value, proof }: DeepPartial<_Owners>): Promise<_Owners> { + const repo = this._conn.getRepository(_Owners); + const entity = repo.create({ blockHash, blockNumber, contractAddress, key0, value, proof }); + return repo.save(entity); + } + + async _saveBalances ({ blockHash, blockNumber, contractAddress, key0, value, proof }: DeepPartial<_Balances>): Promise<_Balances> { + const repo = this._conn.getRepository(_Balances); + const entity = repo.create({ blockHash, blockNumber, contractAddress, key0, value, proof }); + return repo.save(entity); + } + + async _saveTokenApprovals ({ blockHash, blockNumber, contractAddress, key0, value, proof }: DeepPartial<_TokenApprovals>): Promise<_TokenApprovals> { + const repo = this._conn.getRepository(_TokenApprovals); + const entity = repo.create({ blockHash, blockNumber, contractAddress, key0, value, proof }); + return repo.save(entity); + } + + async _saveOperatorApprovals ({ blockHash, blockNumber, contractAddress, key0, key1, value, proof }: DeepPartial<_OperatorApprovals>): Promise<_OperatorApprovals> { + const repo = this._conn.getRepository(_OperatorApprovals); + const entity = repo.create({ blockHash, blockNumber, contractAddress, key0, key1, value, proof }); + return repo.save(entity); + } + + getNewIPLDBlock (): IPLDBlock { + return new IPLDBlock(); + } + + async getIPLDBlocks (where: FindConditions): Promise { + const repo = this._conn.getRepository(IPLDBlock); + + return this._baseDatabase.getIPLDBlocks(repo, where); + } + + async getLatestIPLDBlock (contractAddress: string, kind: StateKind | null, blockNumber?: number): Promise { + const repo = this._conn.getRepository(IPLDBlock); + + return this._baseDatabase.getLatestIPLDBlock(repo, contractAddress, kind, blockNumber); + } + + async getPrevIPLDBlock (blockHash: string, contractAddress: string, kind?: string): Promise { + const repo = this._conn.getRepository(IPLDBlock); + + return this._baseDatabase.getPrevIPLDBlock(repo, blockHash, contractAddress, kind); + } + + // Fetch all diff IPLDBlocks after the specified block number. + async getDiffIPLDBlocksByBlocknumber (contractAddress: string, blockNumber: number): Promise { + const repo = this._conn.getRepository(IPLDBlock); + + return this._baseDatabase.getDiffIPLDBlocksByBlocknumber(repo, contractAddress, blockNumber); + } + + async saveOrUpdateIPLDBlock (dbTx: QueryRunner, ipldBlock: IPLDBlock): Promise { + const repo = dbTx.manager.getRepository(IPLDBlock); + + return this._baseDatabase.saveOrUpdateIPLDBlock(repo, ipldBlock); + } + + async removeIPLDBlocks (dbTx: QueryRunner, blockNumber: number, kind: string): Promise { + const repo = dbTx.manager.getRepository(IPLDBlock); + + await this._baseDatabase.removeIPLDBlocks(repo, blockNumber, kind); + } + + async getIPLDStatus (): Promise { + const repo = this._conn.getRepository(IpldStatus); + + return this._baseDatabase.getIPLDStatus(repo); + } + + async updateIPLDStatusHooksBlock (queryRunner: QueryRunner, blockNumber: number, force?: boolean): Promise { + const repo = queryRunner.manager.getRepository(IpldStatus); + + return this._baseDatabase.updateIPLDStatusHooksBlock(repo, blockNumber, force); + } + + async updateIPLDStatusCheckpointBlock (queryRunner: QueryRunner, blockNumber: number, force?: boolean): Promise { + const repo = queryRunner.manager.getRepository(IpldStatus); + + return this._baseDatabase.updateIPLDStatusCheckpointBlock(repo, blockNumber, force); + } + + async updateIPLDStatusIPFSBlock (queryRunner: QueryRunner, blockNumber: number, force?: boolean): Promise { + const repo = queryRunner.manager.getRepository(IpldStatus); + + return this._baseDatabase.updateIPLDStatusIPFSBlock(repo, blockNumber, force); + } + + async getContracts (): Promise { + const repo = this._conn.getRepository(Contract); + + return this._baseDatabase.getContracts(repo); + } + + async createTransactionRunner (): Promise { + return this._baseDatabase.createTransactionRunner(); + } + + async getProcessedBlockCountForRange (fromBlockNumber: number, toBlockNumber: number): Promise<{ expected: number, actual: number }> { + const repo = this._conn.getRepository(BlockProgress); + + return this._baseDatabase.getProcessedBlockCountForRange(repo, fromBlockNumber, toBlockNumber); + } + + async getEventsInRange (fromBlockNumber: number, toBlockNumber: number): Promise> { + const repo = this._conn.getRepository(Event); + + return this._baseDatabase.getEventsInRange(repo, fromBlockNumber, toBlockNumber); + } + + async saveEventEntity (queryRunner: QueryRunner, entity: Event): Promise { + const repo = queryRunner.manager.getRepository(Event); + return this._baseDatabase.saveEventEntity(repo, entity); + } + + async getBlockEvents (blockHash: string, where: Where, queryOptions: QueryOptions): Promise { + const repo = this._conn.getRepository(Event); + + return this._baseDatabase.getBlockEvents(repo, blockHash, where, queryOptions); + } + + async saveEvents (queryRunner: QueryRunner, block: DeepPartial, events: DeepPartial[]): Promise { + const blockRepo = queryRunner.manager.getRepository(BlockProgress); + const eventRepo = queryRunner.manager.getRepository(Event); + + return this._baseDatabase.saveEvents(blockRepo, eventRepo, block, events); + } + + async saveContract (queryRunner: QueryRunner, address: string, kind: string, checkpoint: boolean, startingBlock: number): Promise { + const repo = queryRunner.manager.getRepository(Contract); + + return this._baseDatabase.saveContract(repo, address, kind, checkpoint, startingBlock); + } + + async updateSyncStatusIndexedBlock (queryRunner: QueryRunner, blockHash: string, blockNumber: number, force = false): Promise { + const repo = queryRunner.manager.getRepository(SyncStatus); + + return this._baseDatabase.updateSyncStatusIndexedBlock(repo, blockHash, blockNumber, force); + } + + async updateSyncStatusCanonicalBlock (queryRunner: QueryRunner, blockHash: string, blockNumber: number, force = false): Promise { + const repo = queryRunner.manager.getRepository(SyncStatus); + + return this._baseDatabase.updateSyncStatusCanonicalBlock(repo, blockHash, blockNumber, force); + } + + async updateSyncStatusChainHead (queryRunner: QueryRunner, blockHash: string, blockNumber: number, force = false): Promise { + const repo = queryRunner.manager.getRepository(SyncStatus); + + return this._baseDatabase.updateSyncStatusChainHead(repo, blockHash, blockNumber, force); + } + + async getSyncStatus (queryRunner: QueryRunner): Promise { + const repo = queryRunner.manager.getRepository(SyncStatus); + + return this._baseDatabase.getSyncStatus(repo); + } + + async getEvent (id: string): Promise { + const repo = this._conn.getRepository(Event); + + return this._baseDatabase.getEvent(repo, id); + } + + async getBlocksAtHeight (height: number, isPruned: boolean): Promise { + const repo = this._conn.getRepository(BlockProgress); + + return this._baseDatabase.getBlocksAtHeight(repo, height, isPruned); + } + + async markBlocksAsPruned (queryRunner: QueryRunner, blocks: BlockProgress[]): Promise { + const repo = queryRunner.manager.getRepository(BlockProgress); + + return this._baseDatabase.markBlocksAsPruned(repo, blocks); + } + + async getBlockProgress (blockHash: string): Promise { + const repo = this._conn.getRepository(BlockProgress); + return this._baseDatabase.getBlockProgress(repo, blockHash); + } + + async getBlockProgressEntities (where: FindConditions, options: FindManyOptions): Promise { + const repo = this._conn.getRepository(BlockProgress); + + return this._baseDatabase.getBlockProgressEntities(repo, where, options); + } + + async updateBlockProgress (queryRunner: QueryRunner, block: BlockProgress, lastProcessedEventIndex: number): Promise { + const repo = queryRunner.manager.getRepository(BlockProgress); + + return this._baseDatabase.updateBlockProgress(repo, block, lastProcessedEventIndex); + } + + async removeEntities (queryRunner: QueryRunner, entity: new () => Entity, findConditions?: FindManyOptions | FindConditions): Promise { + return this._baseDatabase.removeEntities(queryRunner, entity, findConditions); + } + + async getAncestorAtDepth (blockHash: string, depth: number): Promise { + return this._baseDatabase.getAncestorAtDepth(blockHash, depth); + } + + _getPropertyColumnMapForEntity (entityName: string): Map { + return this._conn.getMetadata(entityName).ownColumns.reduce((acc, curr) => { + return acc.set(curr.propertyName, curr.databaseName); + }, new Map()); + } + + _setPropColMaps (): void { + this._propColMaps.SupportsInterface = this._getPropertyColumnMapForEntity('SupportsInterface'); + this._propColMaps.BalanceOf = this._getPropertyColumnMapForEntity('BalanceOf'); + this._propColMaps.OwnerOf = this._getPropertyColumnMapForEntity('OwnerOf'); + this._propColMaps.GetApproved = this._getPropertyColumnMapForEntity('GetApproved'); + this._propColMaps.IsApprovedForAll = this._getPropertyColumnMapForEntity('IsApprovedForAll'); + this._propColMaps.Name = this._getPropertyColumnMapForEntity('Name'); + this._propColMaps.Symbol = this._getPropertyColumnMapForEntity('Symbol'); + this._propColMaps.TokenURI = this._getPropertyColumnMapForEntity('TokenURI'); + this._propColMaps._Name = this._getPropertyColumnMapForEntity('_Name'); + this._propColMaps._Symbol = this._getPropertyColumnMapForEntity('_Symbol'); + this._propColMaps._Owners = this._getPropertyColumnMapForEntity('_Owners'); + this._propColMaps._Balances = this._getPropertyColumnMapForEntity('_Balances'); + this._propColMaps._TokenApprovals = this._getPropertyColumnMapForEntity('_TokenApprovals'); + this._propColMaps._OperatorApprovals = this._getPropertyColumnMapForEntity('_OperatorApprovals'); + } +} diff --git a/packages/erc721-watcher/src/entity/BalanceOf.ts b/packages/erc721-watcher/src/entity/BalanceOf.ts new file mode 100644 index 00000000..77f91af1 --- /dev/null +++ b/packages/erc721-watcher/src/entity/BalanceOf.ts @@ -0,0 +1,31 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; +import { bigintTransformer } from '@vulcanize/util'; + +@Entity() +@Index(['blockHash', 'contractAddress', 'owner'], { unique: true }) +export class BalanceOf { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('varchar', { length: 42 }) + owner!: string; + + @Column('numeric', { transformer: bigintTransformer }) + value!: bigint; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/BlockProgress.ts b/packages/erc721-watcher/src/entity/BlockProgress.ts new file mode 100644 index 00000000..a4ba34c1 --- /dev/null +++ b/packages/erc721-watcher/src/entity/BlockProgress.ts @@ -0,0 +1,48 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index, CreateDateColumn } from 'typeorm'; +import { BlockProgressInterface } from '@vulcanize/util'; + +@Entity() +@Index(['blockHash'], { unique: true }) +@Index(['blockNumber']) +@Index(['parentHash']) +export class BlockProgress implements BlockProgressInterface { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar') + cid!: string; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('varchar', { length: 66 }) + parentHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('integer') + blockTimestamp!: number; + + @Column('integer') + numEvents!: number; + + @Column('integer') + numProcessedEvents!: number; + + @Column('integer') + lastProcessedEventIndex!: number; + + @Column('boolean') + isComplete!: boolean; + + @Column('boolean', { default: false }) + isPruned!: boolean; + + @CreateDateColumn() + createdAt!: Date; +} diff --git a/packages/erc721-watcher/src/entity/Contract.ts b/packages/erc721-watcher/src/entity/Contract.ts new file mode 100644 index 00000000..0727c538 --- /dev/null +++ b/packages/erc721-watcher/src/entity/Contract.ts @@ -0,0 +1,24 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; + +@Entity() +@Index(['address'], { unique: true }) +export class Contract { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 42 }) + address!: string; + + @Column('varchar') + kind!: string; + + @Column('boolean') + checkpoint!: boolean; + + @Column('integer') + startingBlock!: number; +} diff --git a/packages/erc721-watcher/src/entity/Event.ts b/packages/erc721-watcher/src/entity/Event.ts new file mode 100644 index 00000000..c7c09d6b --- /dev/null +++ b/packages/erc721-watcher/src/entity/Event.ts @@ -0,0 +1,38 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index, ManyToOne } from 'typeorm'; +import { BlockProgress } from './BlockProgress'; + +@Entity() +@Index(['block', 'contract']) +@Index(['block', 'contract', 'eventName']) +export class Event { + @PrimaryGeneratedColumn() + id!: number; + + @ManyToOne(() => BlockProgress, { onDelete: 'CASCADE' }) + block!: BlockProgress; + + @Column('varchar', { length: 66 }) + txHash!: string; + + @Column('integer') + index!: number; + + @Column('varchar', { length: 42 }) + contract!: string; + + @Column('varchar', { length: 256 }) + eventName!: string; + + @Column('text') + eventInfo!: string; + + @Column('text') + extraInfo!: string; + + @Column('text') + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/GetApproved.ts b/packages/erc721-watcher/src/entity/GetApproved.ts new file mode 100644 index 00000000..727c539b --- /dev/null +++ b/packages/erc721-watcher/src/entity/GetApproved.ts @@ -0,0 +1,31 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; +import { bigintTransformer } from '@vulcanize/util'; + +@Entity() +@Index(['blockHash', 'contractAddress', 'tokenId'], { unique: true }) +export class GetApproved { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('numeric', { transformer: bigintTransformer }) + tokenId!: bigint; + + @Column('varchar') + value!: string; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/IPLDBlock.ts b/packages/erc721-watcher/src/entity/IPLDBlock.ts new file mode 100644 index 00000000..25e5ca21 --- /dev/null +++ b/packages/erc721-watcher/src/entity/IPLDBlock.ts @@ -0,0 +1,31 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index, ManyToOne } from 'typeorm'; +import { StateKind } from '@vulcanize/util'; +import { BlockProgress } from './BlockProgress'; + +@Entity() +@Index(['cid'], { unique: true }) +@Index(['block', 'contractAddress']) +@Index(['block', 'contractAddress', 'kind'], { unique: true }) +export class IPLDBlock { + @PrimaryGeneratedColumn() + id!: number; + + @ManyToOne(() => BlockProgress, { onDelete: 'CASCADE' }) + block!: BlockProgress; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('varchar') + cid!: string; + + @Column({ type: 'enum', enum: StateKind }) + kind!: StateKind; + + @Column('bytea') + data!: Buffer; +} diff --git a/packages/erc721-watcher/src/entity/IpldStatus.ts b/packages/erc721-watcher/src/entity/IpldStatus.ts new file mode 100644 index 00000000..d99eebf0 --- /dev/null +++ b/packages/erc721-watcher/src/entity/IpldStatus.ts @@ -0,0 +1,20 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column } from 'typeorm'; + +@Entity() +export class IpldStatus { + @PrimaryGeneratedColumn() + id!: number; + + @Column('integer') + latestHooksBlockNumber!: number; + + @Column('integer') + latestCheckpointBlockNumber!: number; + + @Column('integer') + latestIPFSBlockNumber!: number; +} diff --git a/packages/erc721-watcher/src/entity/IsApprovedForAll.ts b/packages/erc721-watcher/src/entity/IsApprovedForAll.ts new file mode 100644 index 00000000..7f0c2423 --- /dev/null +++ b/packages/erc721-watcher/src/entity/IsApprovedForAll.ts @@ -0,0 +1,33 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; + +@Entity() +@Index(['blockHash', 'contractAddress', 'owner', 'operator'], { unique: true }) +export class IsApprovedForAll { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('varchar', { length: 42 }) + owner!: string; + + @Column('varchar', { length: 42 }) + operator!: string; + + @Column('boolean') + value!: boolean; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/Name.ts b/packages/erc721-watcher/src/entity/Name.ts new file mode 100644 index 00000000..19850a1e --- /dev/null +++ b/packages/erc721-watcher/src/entity/Name.ts @@ -0,0 +1,27 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; + +@Entity() +@Index(['blockHash', 'contractAddress'], { unique: true }) +export class Name { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('varchar') + value!: string; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/OwnerOf.ts b/packages/erc721-watcher/src/entity/OwnerOf.ts new file mode 100644 index 00000000..35e48727 --- /dev/null +++ b/packages/erc721-watcher/src/entity/OwnerOf.ts @@ -0,0 +1,31 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; +import { bigintTransformer } from '@vulcanize/util'; + +@Entity() +@Index(['blockHash', 'contractAddress', 'tokenId'], { unique: true }) +export class OwnerOf { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('numeric', { transformer: bigintTransformer }) + tokenId!: bigint; + + @Column('varchar') + value!: string; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/SupportsInterface.ts b/packages/erc721-watcher/src/entity/SupportsInterface.ts new file mode 100644 index 00000000..f668540b --- /dev/null +++ b/packages/erc721-watcher/src/entity/SupportsInterface.ts @@ -0,0 +1,30 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; + +@Entity() +@Index(['blockHash', 'contractAddress', 'interfaceId'], { unique: true }) +export class SupportsInterface { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('varchar') + interfaceId!: string; + + @Column('boolean') + value!: boolean; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/Symbol.ts b/packages/erc721-watcher/src/entity/Symbol.ts new file mode 100644 index 00000000..e77aa200 --- /dev/null +++ b/packages/erc721-watcher/src/entity/Symbol.ts @@ -0,0 +1,27 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; + +@Entity() +@Index(['blockHash', 'contractAddress'], { unique: true }) +export class Symbol { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('varchar') + value!: string; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/SyncStatus.ts b/packages/erc721-watcher/src/entity/SyncStatus.ts new file mode 100644 index 00000000..74983ed5 --- /dev/null +++ b/packages/erc721-watcher/src/entity/SyncStatus.ts @@ -0,0 +1,30 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column } from 'typeorm'; +import { SyncStatusInterface } from '@vulcanize/util'; + +@Entity() +export class SyncStatus implements SyncStatusInterface { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + chainHeadBlockHash!: string; + + @Column('integer') + chainHeadBlockNumber!: number; + + @Column('varchar', { length: 66 }) + latestIndexedBlockHash!: string; + + @Column('integer') + latestIndexedBlockNumber!: number; + + @Column('varchar', { length: 66 }) + latestCanonicalBlockHash!: string; + + @Column('integer') + latestCanonicalBlockNumber!: number; +} diff --git a/packages/erc721-watcher/src/entity/TokenURI.ts b/packages/erc721-watcher/src/entity/TokenURI.ts new file mode 100644 index 00000000..d783f321 --- /dev/null +++ b/packages/erc721-watcher/src/entity/TokenURI.ts @@ -0,0 +1,31 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; +import { bigintTransformer } from '@vulcanize/util'; + +@Entity() +@Index(['blockHash', 'contractAddress', 'tokenId'], { unique: true }) +export class TokenURI { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('numeric', { transformer: bigintTransformer }) + tokenId!: bigint; + + @Column('varchar') + value!: string; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/_Balances.ts b/packages/erc721-watcher/src/entity/_Balances.ts new file mode 100644 index 00000000..3b7c9c70 --- /dev/null +++ b/packages/erc721-watcher/src/entity/_Balances.ts @@ -0,0 +1,31 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; +import { bigintTransformer } from '@vulcanize/util'; + +@Entity() +@Index(['blockHash', 'contractAddress', 'key0'], { unique: true }) +export class _Balances { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('varchar', { length: 42 }) + key0!: string; + + @Column('numeric', { transformer: bigintTransformer }) + value!: bigint; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/_Name.ts b/packages/erc721-watcher/src/entity/_Name.ts new file mode 100644 index 00000000..13ac2945 --- /dev/null +++ b/packages/erc721-watcher/src/entity/_Name.ts @@ -0,0 +1,27 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; + +@Entity() +@Index(['blockHash', 'contractAddress'], { unique: true }) +export class _Name { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('varchar') + value!: string; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/_OperatorApprovals.ts b/packages/erc721-watcher/src/entity/_OperatorApprovals.ts new file mode 100644 index 00000000..cd152900 --- /dev/null +++ b/packages/erc721-watcher/src/entity/_OperatorApprovals.ts @@ -0,0 +1,33 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; + +@Entity() +@Index(['blockHash', 'contractAddress', 'key0', 'key1'], { unique: true }) +export class _OperatorApprovals { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('varchar', { length: 42 }) + key0!: string; + + @Column('varchar', { length: 42 }) + key1!: string; + + @Column('boolean') + value!: boolean; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/_Owners.ts b/packages/erc721-watcher/src/entity/_Owners.ts new file mode 100644 index 00000000..3ee73ea4 --- /dev/null +++ b/packages/erc721-watcher/src/entity/_Owners.ts @@ -0,0 +1,31 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; +import { bigintTransformer } from '@vulcanize/util'; + +@Entity() +@Index(['blockHash', 'contractAddress', 'key0'], { unique: true }) +export class _Owners { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('numeric', { transformer: bigintTransformer }) + key0!: bigint; + + @Column('varchar') + value!: string; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/_Symbol.ts b/packages/erc721-watcher/src/entity/_Symbol.ts new file mode 100644 index 00000000..e39a64d3 --- /dev/null +++ b/packages/erc721-watcher/src/entity/_Symbol.ts @@ -0,0 +1,27 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; + +@Entity() +@Index(['blockHash', 'contractAddress'], { unique: true }) +export class _Symbol { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('varchar') + value!: string; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/entity/_TokenApprovals.ts b/packages/erc721-watcher/src/entity/_TokenApprovals.ts new file mode 100644 index 00000000..032a16b3 --- /dev/null +++ b/packages/erc721-watcher/src/entity/_TokenApprovals.ts @@ -0,0 +1,31 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm'; +import { bigintTransformer } from '@vulcanize/util'; + +@Entity() +@Index(['blockHash', 'contractAddress', 'key0'], { unique: true }) +export class _TokenApprovals { + @PrimaryGeneratedColumn() + id!: number; + + @Column('varchar', { length: 66 }) + blockHash!: string; + + @Column('integer') + blockNumber!: number; + + @Column('varchar', { length: 42 }) + contractAddress!: string; + + @Column('numeric', { transformer: bigintTransformer }) + key0!: bigint; + + @Column('varchar') + value!: string; + + @Column('text', { nullable: true }) + proof!: string; +} diff --git a/packages/erc721-watcher/src/events.ts b/packages/erc721-watcher/src/events.ts new file mode 100644 index 00000000..a23a9f19 --- /dev/null +++ b/packages/erc721-watcher/src/events.ts @@ -0,0 +1,121 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import assert from 'assert'; +import debug from 'debug'; +import { PubSub } from 'apollo-server-express'; + +import { EthClient } from '@vulcanize/ipld-eth-client'; +import { + JobQueue, + EventWatcher as BaseEventWatcher, + EventWatcherInterface, + QUEUE_BLOCK_PROCESSING, + QUEUE_EVENT_PROCESSING, + UNKNOWN_EVENT_NAME, + UpstreamConfig +} from '@vulcanize/util'; + +import { Indexer } from './indexer'; +import { Event } from './entity/Event'; + +const EVENT = 'event'; + +const log = debug('vulcanize:events'); + +export class EventWatcher implements EventWatcherInterface { + _ethClient: EthClient + _indexer: Indexer + _subscription: ZenObservable.Subscription | undefined + _baseEventWatcher: BaseEventWatcher + _pubsub: PubSub + _jobQueue: JobQueue + + constructor (upstreamConfig: UpstreamConfig, ethClient: EthClient, indexer: Indexer, pubsub: PubSub, jobQueue: JobQueue) { + assert(ethClient); + assert(indexer); + + this._ethClient = ethClient; + this._indexer = indexer; + this._pubsub = pubsub; + this._jobQueue = jobQueue; + this._baseEventWatcher = new BaseEventWatcher(upstreamConfig, this._ethClient, this._indexer, this._pubsub, this._jobQueue); + } + + getEventIterator (): AsyncIterator { + return this._pubsub.asyncIterator([EVENT]); + } + + getBlockProgressEventIterator (): AsyncIterator { + return this._baseEventWatcher.getBlockProgressEventIterator(); + } + + async start (): Promise { + assert(!this._subscription, 'subscription already started'); + + await this.initBlockProcessingOnCompleteHandler(); + await this.initEventProcessingOnCompleteHandler(); + this._baseEventWatcher.startBlockProcessing(); + } + + async stop (): Promise { + this._baseEventWatcher.stop(); + } + + async initBlockProcessingOnCompleteHandler (): Promise { + this._jobQueue.onComplete(QUEUE_BLOCK_PROCESSING, async (job) => { + const { id, data: { failed } } = job; + + if (failed) { + log(`Job ${id} for queue ${QUEUE_BLOCK_PROCESSING} failed`); + return; + } + + await this._baseEventWatcher.blockProcessingCompleteHandler(job); + }); + } + + async initEventProcessingOnCompleteHandler (): Promise { + await this._jobQueue.onComplete(QUEUE_EVENT_PROCESSING, async (job) => { + const { id, data: { request, failed, state, createdOn } } = job; + + if (failed) { + log(`Job ${id} for queue ${QUEUE_EVENT_PROCESSING} failed`); + return; + } + + const dbEvents = await this._baseEventWatcher.eventProcessingCompleteHandler(job); + const timeElapsedInSeconds = (Date.now() - Date.parse(createdOn)) / 1000; + + // Cannot publish individual event as they are processed together in a single job. + // TODO: Use a different pubsub to publish event from job-runner. + // https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries + for (const dbEvent of dbEvents) { + log(`Job onComplete event ${dbEvent.id} publish ${!!request.data.publish}`); + + if (!failed && state === 'completed' && request.data.publish) { + // Check for max acceptable lag time between request and sending results to live subscribers. + if (timeElapsedInSeconds <= this._jobQueue.maxCompletionLag) { + await this.publishEventToSubscribers(dbEvent, timeElapsedInSeconds); + } else { + log(`event ${dbEvent.id} is too old (${timeElapsedInSeconds}s), not broadcasting to live subscribers`); + } + } + } + }); + } + + async publishEventToSubscribers (dbEvent: Event, timeElapsedInSeconds: number): Promise { + if (dbEvent && dbEvent.eventName !== UNKNOWN_EVENT_NAME) { + const resultEvent = this._indexer.getResultEvent(dbEvent); + + log(`pushing event to GQL subscribers (${timeElapsedInSeconds}s elapsed): ${resultEvent.event.__typename}`); + + // Publishing the event here will result in pushing the payload to GQL subscribers for `onEvent`. + await this._pubsub.publish(EVENT, { + onEvent: resultEvent + }); + } + } +} diff --git a/packages/erc721-watcher/src/fill.ts b/packages/erc721-watcher/src/fill.ts new file mode 100644 index 00000000..4a95f247 --- /dev/null +++ b/packages/erc721-watcher/src/fill.ts @@ -0,0 +1,98 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import path from 'path'; +import assert from 'assert'; +import 'reflect-metadata'; +import yargs from 'yargs'; +import { hideBin } from 'yargs/helpers'; +import debug from 'debug'; +import { PubSub } from 'apollo-server-express'; + +import { Config, getConfig, fillBlocks, JobQueue, DEFAULT_CONFIG_PATH, initClients } from '@vulcanize/util'; +import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node'; + +import { Database } from './database'; +import { Indexer } from './indexer'; +import { EventWatcher } from './events'; + +const log = debug('vulcanize:server'); + +export const main = async (): Promise => { + const argv = await yargs(hideBin(process.argv)).parserConfiguration({ + 'parse-numbers': false + }).options({ + configFile: { + alias: 'f', + type: 'string', + demandOption: true, + describe: 'configuration file path (toml)', + default: DEFAULT_CONFIG_PATH + }, + startBlock: { + type: 'number', + demandOption: true, + describe: 'Block number to start processing at' + }, + endBlock: { + type: 'number', + demandOption: true, + describe: 'Block number to stop processing at' + }, + prefetch: { + type: 'boolean', + default: false, + describe: 'Block and events prefetch mode' + }, + batchBlocks: { + type: 'number', + default: 10, + describe: 'Number of blocks prefetched in batch' + } + }).argv; + + const config: Config = await getConfig(argv.configFile); + const { ethClient, ethProvider } = await initClients(config); + + const db = new Database(config.database); + await db.init(); + + const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*')); + await graphDb.init(); + + const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server); + + const jobQueueConfig = config.jobQueue; + assert(jobQueueConfig, 'Missing job queue config'); + + const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig; + assert(dbConnectionString, 'Missing job queue db connection string'); + + const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs }); + await jobQueue.start(); + + const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher); + await indexer.init(); + + graphWatcher.setIndexer(indexer); + + // Note: In-memory pubsub works fine for now, as each watcher is a single process anyway. + // Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries + const pubsub = new PubSub(); + + const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue); + + await fillBlocks(jobQueue, indexer, eventWatcher, config.upstream.ethServer.blockDelayInMilliSecs, argv); +}; + +main().catch(err => { + log(err); +}).finally(() => { + process.exit(); +}); + +process.on('SIGINT', () => { + log(`Exiting process ${process.pid} with code 0`); + process.exit(0); +}); diff --git a/packages/erc721-watcher/src/gql/index.ts b/packages/erc721-watcher/src/gql/index.ts new file mode 100644 index 00000000..4732f682 --- /dev/null +++ b/packages/erc721-watcher/src/gql/index.ts @@ -0,0 +1,3 @@ +export * as mutations from './mutations'; +export * as queries from './queries'; +export * as subscriptions from './subscriptions'; diff --git a/packages/erc721-watcher/src/gql/mutations/index.ts b/packages/erc721-watcher/src/gql/mutations/index.ts new file mode 100644 index 00000000..0c3bd853 --- /dev/null +++ b/packages/erc721-watcher/src/gql/mutations/index.ts @@ -0,0 +1,4 @@ +import fs from 'fs'; +import path from 'path'; + +export const watchContract = fs.readFileSync(path.join(__dirname, 'watchContract.gql'), 'utf8'); diff --git a/packages/erc721-watcher/src/gql/mutations/watchContract.gql b/packages/erc721-watcher/src/gql/mutations/watchContract.gql new file mode 100644 index 00000000..2ecc74f7 --- /dev/null +++ b/packages/erc721-watcher/src/gql/mutations/watchContract.gql @@ -0,0 +1,3 @@ +mutation watchContract($address: String!, $kind: String!, $checkpoint: Boolean!, $startingBlock: Int){ + watchContract(address: $address, kind: $kind, checkpoint: $checkpoint, startingBlock: $startingBlock) +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/_balances.gql b/packages/erc721-watcher/src/gql/queries/_balances.gql new file mode 100644 index 00000000..2dd7805e --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/_balances.gql @@ -0,0 +1,8 @@ +query _balances($blockHash: String!, $contractAddress: String!, $key0: String!){ + _balances(blockHash: $blockHash, contractAddress: $contractAddress, key0: $key0){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/_name.gql b/packages/erc721-watcher/src/gql/queries/_name.gql new file mode 100644 index 00000000..58ba0554 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/_name.gql @@ -0,0 +1,8 @@ +query _name($blockHash: String!, $contractAddress: String!){ + _name(blockHash: $blockHash, contractAddress: $contractAddress){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/_operatorApprovals.gql b/packages/erc721-watcher/src/gql/queries/_operatorApprovals.gql new file mode 100644 index 00000000..3d89b84a --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/_operatorApprovals.gql @@ -0,0 +1,8 @@ +query _operatorApprovals($blockHash: String!, $contractAddress: String!, $key0: String!, $key1: String!){ + _operatorApprovals(blockHash: $blockHash, contractAddress: $contractAddress, key0: $key0, key1: $key1){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/_owners.gql b/packages/erc721-watcher/src/gql/queries/_owners.gql new file mode 100644 index 00000000..8022b8b0 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/_owners.gql @@ -0,0 +1,8 @@ +query _owners($blockHash: String!, $contractAddress: String!, $key0: BigInt!){ + _owners(blockHash: $blockHash, contractAddress: $contractAddress, key0: $key0){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/_symbol.gql b/packages/erc721-watcher/src/gql/queries/_symbol.gql new file mode 100644 index 00000000..a5794794 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/_symbol.gql @@ -0,0 +1,8 @@ +query _symbol($blockHash: String!, $contractAddress: String!){ + _symbol(blockHash: $blockHash, contractAddress: $contractAddress){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/_tokenApprovals.gql b/packages/erc721-watcher/src/gql/queries/_tokenApprovals.gql new file mode 100644 index 00000000..15199a97 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/_tokenApprovals.gql @@ -0,0 +1,8 @@ +query _tokenApprovals($blockHash: String!, $contractAddress: String!, $key0: BigInt!){ + _tokenApprovals(blockHash: $blockHash, contractAddress: $contractAddress, key0: $key0){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/balanceOf.gql b/packages/erc721-watcher/src/gql/queries/balanceOf.gql new file mode 100644 index 00000000..40af44c4 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/balanceOf.gql @@ -0,0 +1,8 @@ +query balanceOf($blockHash: String!, $contractAddress: String!, $owner: String!){ + balanceOf(blockHash: $blockHash, contractAddress: $contractAddress, owner: $owner){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/events.gql b/packages/erc721-watcher/src/gql/queries/events.gql new file mode 100644 index 00000000..cd71e6ff --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/events.gql @@ -0,0 +1,39 @@ +query events($blockHash: String!, $contractAddress: String!, $name: String){ + events(blockHash: $blockHash, contractAddress: $contractAddress, name: $name){ + block{ + cid + hash + number + timestamp + parentHash + } + tx{ + hash + index + from + to + } + contract + eventIndex + event{ + ... on ApprovalEvent { + owner + approved + tokenId + } + ... on ApprovalForAllEvent { + owner + operator + approved + } + ... on TransferEvent { + from + to + tokenId + } + } + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/eventsInRange.gql b/packages/erc721-watcher/src/gql/queries/eventsInRange.gql new file mode 100644 index 00000000..2cec4398 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/eventsInRange.gql @@ -0,0 +1,39 @@ +query eventsInRange($fromBlockNumber: Int!, $toBlockNumber: Int!){ + eventsInRange(fromBlockNumber: $fromBlockNumber, toBlockNumber: $toBlockNumber){ + block{ + cid + hash + number + timestamp + parentHash + } + tx{ + hash + index + from + to + } + contract + eventIndex + event{ + ... on ApprovalEvent { + owner + approved + tokenId + } + ... on ApprovalForAllEvent { + owner + operator + approved + } + ... on TransferEvent { + from + to + tokenId + } + } + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/getApproved.gql b/packages/erc721-watcher/src/gql/queries/getApproved.gql new file mode 100644 index 00000000..c290206d --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/getApproved.gql @@ -0,0 +1,8 @@ +query getApproved($blockHash: String!, $contractAddress: String!, $tokenId: BigInt!){ + getApproved(blockHash: $blockHash, contractAddress: $contractAddress, tokenId: $tokenId){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/getState.gql b/packages/erc721-watcher/src/gql/queries/getState.gql new file mode 100644 index 00000000..3b8f6050 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/getState.gql @@ -0,0 +1,15 @@ +query getState($blockHash: String!, $contractAddress: String!, $kind: String){ + getState(blockHash: $blockHash, contractAddress: $contractAddress, kind: $kind){ + block{ + cid + hash + number + timestamp + parentHash + } + contractAddress + cid + kind + data + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/getStateByCID.gql b/packages/erc721-watcher/src/gql/queries/getStateByCID.gql new file mode 100644 index 00000000..6c3c4fd8 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/getStateByCID.gql @@ -0,0 +1,15 @@ +query getStateByCID($cid: String!){ + getStateByCID(cid: $cid){ + block{ + cid + hash + number + timestamp + parentHash + } + contractAddress + cid + kind + data + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/index.ts b/packages/erc721-watcher/src/gql/queries/index.ts new file mode 100644 index 00000000..7f33700c --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/index.ts @@ -0,0 +1,21 @@ +import fs from 'fs'; +import path from 'path'; + +export const events = fs.readFileSync(path.join(__dirname, 'events.gql'), 'utf8'); +export const eventsInRange = fs.readFileSync(path.join(__dirname, 'eventsInRange.gql'), 'utf8'); +export const supportsInterface = fs.readFileSync(path.join(__dirname, 'supportsInterface.gql'), 'utf8'); +export const balanceOf = fs.readFileSync(path.join(__dirname, 'balanceOf.gql'), 'utf8'); +export const ownerOf = fs.readFileSync(path.join(__dirname, 'ownerOf.gql'), 'utf8'); +export const getApproved = fs.readFileSync(path.join(__dirname, 'getApproved.gql'), 'utf8'); +export const isApprovedForAll = fs.readFileSync(path.join(__dirname, 'isApprovedForAll.gql'), 'utf8'); +export const name = fs.readFileSync(path.join(__dirname, 'name.gql'), 'utf8'); +export const symbol = fs.readFileSync(path.join(__dirname, 'symbol.gql'), 'utf8'); +export const tokenURI = fs.readFileSync(path.join(__dirname, 'tokenURI.gql'), 'utf8'); +export const _name = fs.readFileSync(path.join(__dirname, '_name.gql'), 'utf8'); +export const _symbol = fs.readFileSync(path.join(__dirname, '_symbol.gql'), 'utf8'); +export const _owners = fs.readFileSync(path.join(__dirname, '_owners.gql'), 'utf8'); +export const _balances = fs.readFileSync(path.join(__dirname, '_balances.gql'), 'utf8'); +export const _tokenApprovals = fs.readFileSync(path.join(__dirname, '_tokenApprovals.gql'), 'utf8'); +export const _operatorApprovals = fs.readFileSync(path.join(__dirname, '_operatorApprovals.gql'), 'utf8'); +export const getStateByCID = fs.readFileSync(path.join(__dirname, 'getStateByCID.gql'), 'utf8'); +export const getState = fs.readFileSync(path.join(__dirname, 'getState.gql'), 'utf8'); diff --git a/packages/erc721-watcher/src/gql/queries/isApprovedForAll.gql b/packages/erc721-watcher/src/gql/queries/isApprovedForAll.gql new file mode 100644 index 00000000..6ea8ee5f --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/isApprovedForAll.gql @@ -0,0 +1,8 @@ +query isApprovedForAll($blockHash: String!, $contractAddress: String!, $owner: String!, $operator: String!){ + isApprovedForAll(blockHash: $blockHash, contractAddress: $contractAddress, owner: $owner, operator: $operator){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/name.gql b/packages/erc721-watcher/src/gql/queries/name.gql new file mode 100644 index 00000000..0431f0d6 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/name.gql @@ -0,0 +1,8 @@ +query name($blockHash: String!, $contractAddress: String!){ + name(blockHash: $blockHash, contractAddress: $contractAddress){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/ownerOf.gql b/packages/erc721-watcher/src/gql/queries/ownerOf.gql new file mode 100644 index 00000000..0c281503 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/ownerOf.gql @@ -0,0 +1,8 @@ +query ownerOf($blockHash: String!, $contractAddress: String!, $tokenId: BigInt!){ + ownerOf(blockHash: $blockHash, contractAddress: $contractAddress, tokenId: $tokenId){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/supportsInterface.gql b/packages/erc721-watcher/src/gql/queries/supportsInterface.gql new file mode 100644 index 00000000..7cc2a2cd --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/supportsInterface.gql @@ -0,0 +1,8 @@ +query supportsInterface($blockHash: String!, $contractAddress: String!, $interfaceId: String!){ + supportsInterface(blockHash: $blockHash, contractAddress: $contractAddress, interfaceId: $interfaceId){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/symbol.gql b/packages/erc721-watcher/src/gql/queries/symbol.gql new file mode 100644 index 00000000..fa278b80 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/symbol.gql @@ -0,0 +1,8 @@ +query symbol($blockHash: String!, $contractAddress: String!){ + symbol(blockHash: $blockHash, contractAddress: $contractAddress){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/queries/tokenURI.gql b/packages/erc721-watcher/src/gql/queries/tokenURI.gql new file mode 100644 index 00000000..80679694 --- /dev/null +++ b/packages/erc721-watcher/src/gql/queries/tokenURI.gql @@ -0,0 +1,8 @@ +query tokenURI($blockHash: String!, $contractAddress: String!, $tokenId: BigInt!){ + tokenURI(blockHash: $blockHash, contractAddress: $contractAddress, tokenId: $tokenId){ + value + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/gql/subscriptions/index.ts b/packages/erc721-watcher/src/gql/subscriptions/index.ts new file mode 100644 index 00000000..f12910c5 --- /dev/null +++ b/packages/erc721-watcher/src/gql/subscriptions/index.ts @@ -0,0 +1,4 @@ +import fs from 'fs'; +import path from 'path'; + +export const onEvent = fs.readFileSync(path.join(__dirname, 'onEvent.gql'), 'utf8'); diff --git a/packages/erc721-watcher/src/gql/subscriptions/onEvent.gql b/packages/erc721-watcher/src/gql/subscriptions/onEvent.gql new file mode 100644 index 00000000..25cf0c8a --- /dev/null +++ b/packages/erc721-watcher/src/gql/subscriptions/onEvent.gql @@ -0,0 +1,39 @@ +subscription onEvent{ + onEvent{ + block{ + cid + hash + number + timestamp + parentHash + } + tx{ + hash + index + from + to + } + contract + eventIndex + event{ + ... on ApprovalEvent { + owner + approved + tokenId + } + ... on ApprovalForAllEvent { + owner + operator + approved + } + ... on TransferEvent { + from + to + tokenId + } + } + proof{ + data + } + } +} \ No newline at end of file diff --git a/packages/erc721-watcher/src/hooks.ts b/packages/erc721-watcher/src/hooks.ts new file mode 100644 index 00000000..09298b7c --- /dev/null +++ b/packages/erc721-watcher/src/hooks.ts @@ -0,0 +1,80 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import assert from 'assert'; + +// import { updateStateForMappingType, updateStateForElementaryType } from '@vulcanize/util'; + +import { Indexer, ResultEvent } from './indexer'; + +/** + * Hook function to store an initial state. + * @param indexer Indexer instance. + * @param blockHash Hash of the concerned block. + * @param contractAddress Address of the concerned contract. + * @returns Data block to be stored. + */ +export async function createInitialState (indexer: Indexer, contractAddress: string, blockHash: string): Promise { + assert(indexer); + assert(blockHash); + assert(contractAddress); + + // Store the desired initial state in an IPLDBlock. + const ipldBlockData: any = { + state: {} + }; + + // Use updateStateForElementaryType to update initial state with an elementary property. + // Eg. const ipldBlockData = updateStateForElementaryType(ipldBlockData, '_totalBalance', result.value.toString()); + + // Use updateStateForMappingType to update initial state with a nested property. + // Eg. const ipldBlockData = updateStateForMappingType(ipldBlockData, '_allowances', [owner, spender], allowance.value.toString()); + + // Return initial state data to be saved. + return ipldBlockData; +} + +/** + * Hook function to create state diff. + * @param indexer Indexer instance that contains methods to fetch the contract varaiable values. + * @param blockHash Block hash of the concerned block. + */ +export async function createStateDiff (indexer: Indexer, blockHash: string): Promise { + assert(indexer); + assert(blockHash); + + // Use indexer.createStateDiff() method to save custom state diff(s). +} + +/** + * Hook function to create state checkpoint + * @param indexer Indexer instance. + * @param contractAddress Address of the concerned contract. + * @param blockHash Block hash of the concerned block. + * @returns Whether to disable default checkpoint. If false, the state from this hook is updated with that from default checkpoint. + */ +export async function createStateCheckpoint (indexer: Indexer, contractAddress: string, blockHash: string): Promise { + assert(indexer); + assert(blockHash); + assert(contractAddress); + + // Use indexer.createStateCheckpoint() method to create a custom checkpoint. + + // Return false to update the state created by this hook by auto-generated checkpoint state. + // Return true to disable update of the state created by this hook by auto-generated checkpoint state. + return false; +} + +/** + * Event hook function. + * @param indexer Indexer instance that contains methods to fetch and update the contract values in the database. + * @param eventData ResultEvent object containing event information. + */ +export async function handleEvent (indexer: Indexer, eventData: ResultEvent): Promise { + assert(indexer); + assert(eventData); + + // Use indexer methods to index data. + // Pass `diff` parameter to indexer methods as true to save an auto-generated state from the indexed data. +} diff --git a/packages/erc721-watcher/src/indexer.ts b/packages/erc721-watcher/src/indexer.ts new file mode 100644 index 00000000..e6fbb34d --- /dev/null +++ b/packages/erc721-watcher/src/indexer.ts @@ -0,0 +1,1132 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import assert from 'assert'; +import debug from 'debug'; +import { DeepPartial, FindConditions, FindManyOptions } from 'typeorm'; +import JSONbig from 'json-bigint'; +import { ethers } from 'ethers'; + +import { JsonFragment } from '@ethersproject/abi'; +import { BaseProvider } from '@ethersproject/providers'; +import * as codec from '@ipld/dag-cbor'; +import { EthClient } from '@vulcanize/ipld-eth-client'; +import { StorageLayout } from '@vulcanize/solidity-mapper'; +import { + IPLDIndexer as BaseIndexer, + IPLDIndexerInterface, + ValueResult, + UNKNOWN_EVENT_NAME, + ServerConfig, + JobQueue, + Where, + QueryOptions, + updateStateForElementaryType, + updateStateForMappingType, + BlockHeight, + IPFSClient, + StateKind, + IpldStatus as IpldStatusInterface +} from '@vulcanize/util'; +import { GraphWatcher } from '@vulcanize/graph-node'; + +import ERC721Artifacts from './artifacts/ERC721.json'; +import { Database } from './database'; +import { createInitialState, handleEvent, createStateDiff, createStateCheckpoint } from './hooks'; +import { Contract } from './entity/Contract'; +import { Event } from './entity/Event'; +import { SyncStatus } from './entity/SyncStatus'; +import { IpldStatus } from './entity/IpldStatus'; +import { BlockProgress } from './entity/BlockProgress'; +import { IPLDBlock } from './entity/IPLDBlock'; + +const log = debug('vulcanize:indexer'); + +const KIND_ERC721 = 'ERC721'; + +const APPROVAL_EVENT = 'Approval'; +const APPROVALFORALL_EVENT = 'ApprovalForAll'; +const TRANSFER_EVENT = 'Transfer'; + +export type ResultEvent = { + block: { + cid: string; + hash: string; + number: number; + timestamp: number; + parentHash: string; + }; + tx: { + hash: string; + from: string; + to: string; + index: number; + }; + + contract: string; + + eventIndex: number; + eventSignature: string; + event: any; + + proof: string; +}; + +export type ResultIPLDBlock = { + block: { + cid: string; + hash: string; + number: number; + timestamp: number; + parentHash: string; + }; + contractAddress: string; + cid: string; + kind: string; + data: string; +}; + +export class Indexer implements IPLDIndexerInterface { + _db: Database + _ethClient: EthClient + _ethProvider: BaseProvider + _baseIndexer: BaseIndexer + _serverConfig: ServerConfig + _graphWatcher: GraphWatcher; + + _abiMap: Map + _storageLayoutMap: Map + _contractMap: Map + + _ipfsClient: IPFSClient + + _entityTypesMap: Map + _relationsMap: Map + + constructor (serverConfig: ServerConfig, db: Database, ethClient: EthClient, ethProvider: BaseProvider, jobQueue: JobQueue, graphWatcher: GraphWatcher) { + assert(db); + assert(ethClient); + + this._db = db; + this._ethClient = ethClient; + this._ethProvider = ethProvider; + this._serverConfig = serverConfig; + this._ipfsClient = new IPFSClient(this._serverConfig.ipfsApiAddr); + this._baseIndexer = new BaseIndexer(this._serverConfig, this._db, this._ethClient, this._ethProvider, jobQueue, this._ipfsClient); + this._graphWatcher = graphWatcher; + + this._abiMap = new Map(); + this._storageLayoutMap = new Map(); + this._contractMap = new Map(); + + const { + abi: ERC721ABI, + storageLayout: ERC721StorageLayout + } = ERC721Artifacts; + + assert(ERC721ABI); + this._abiMap.set(KIND_ERC721, ERC721ABI); + assert(ERC721StorageLayout); + this._storageLayoutMap.set(KIND_ERC721, ERC721StorageLayout); + this._contractMap.set(KIND_ERC721, new ethers.utils.Interface(ERC721ABI)); + + this._entityTypesMap = new Map(); + this._populateEntityTypesMap(); + + this._relationsMap = new Map(); + this._populateRelationsMap(); + } + + async init (): Promise { + await this._baseIndexer.fetchContracts(); + await this._baseIndexer.fetchIPLDStatus(); + } + + getResultEvent (event: Event): ResultEvent { + const block = event.block; + const eventFields = JSONbig.parse(event.eventInfo); + const { tx, eventSignature } = JSON.parse(event.extraInfo); + + return { + block: { + cid: block.cid, + hash: block.blockHash, + number: block.blockNumber, + timestamp: block.blockTimestamp, + parentHash: block.parentHash + }, + + tx: { + hash: event.txHash, + from: tx.src, + to: tx.dst, + index: tx.index + }, + + contract: event.contract, + + eventIndex: event.index, + eventSignature, + event: { + __typename: `${event.eventName}Event`, + ...eventFields + }, + + // TODO: Return proof only if requested. + proof: JSON.parse(event.proof) + }; + } + + getResultIPLDBlock (ipldBlock: IPLDBlock): ResultIPLDBlock { + const block = ipldBlock.block; + + const data = codec.decode(Buffer.from(ipldBlock.data)) as any; + + return { + block: { + cid: block.cid, + hash: block.blockHash, + number: block.blockNumber, + timestamp: block.blockTimestamp, + parentHash: block.parentHash + }, + contractAddress: ipldBlock.contractAddress, + cid: ipldBlock.cid, + kind: ipldBlock.kind, + data: JSON.stringify(data) + }; + } + + async supportsInterface (blockHash: string, contractAddress: string, interfaceId: string): Promise { + const entity = await this._db.getSupportsInterface({ blockHash, contractAddress, interfaceId }); + if (entity) { + log('supportsInterface: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('supportsInterface: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const abi = this._abiMap.get(KIND_ERC721); + assert(abi); + + const contract = new ethers.Contract(contractAddress, abi, this._ethProvider); + const value = await contract.supportsInterface(interfaceId, { blockTag: blockHash }); + + const result: ValueResult = { value }; + + await this._db.saveSupportsInterface({ blockHash, blockNumber, contractAddress, interfaceId, value: result.value, proof: JSONbig.stringify(result.proof) }); + + return result; + } + + async balanceOf (blockHash: string, contractAddress: string, owner: string): Promise { + const entity = await this._db.getBalanceOf({ blockHash, contractAddress, owner }); + if (entity) { + log('balanceOf: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('balanceOf: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const abi = this._abiMap.get(KIND_ERC721); + assert(abi); + + const contract = new ethers.Contract(contractAddress, abi, this._ethProvider); + let value = await contract.balanceOf(owner, { blockTag: blockHash }); + value = value.toString(); + value = BigInt(value); + + const result: ValueResult = { value }; + + await this._db.saveBalanceOf({ blockHash, blockNumber, contractAddress, owner, value: result.value, proof: JSONbig.stringify(result.proof) }); + + return result; + } + + async ownerOf (blockHash: string, contractAddress: string, tokenId: bigint): Promise { + const entity = await this._db.getOwnerOf({ blockHash, contractAddress, tokenId }); + if (entity) { + log('ownerOf: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('ownerOf: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const abi = this._abiMap.get(KIND_ERC721); + assert(abi); + + const contract = new ethers.Contract(contractAddress, abi, this._ethProvider); + const value = await contract.ownerOf(tokenId, { blockTag: blockHash }); + + const result: ValueResult = { value }; + + await this._db.saveOwnerOf({ blockHash, blockNumber, contractAddress, tokenId, value: result.value, proof: JSONbig.stringify(result.proof) }); + + return result; + } + + async getApproved (blockHash: string, contractAddress: string, tokenId: bigint): Promise { + const entity = await this._db.getGetApproved({ blockHash, contractAddress, tokenId }); + if (entity) { + log('getApproved: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('getApproved: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const abi = this._abiMap.get(KIND_ERC721); + assert(abi); + + const contract = new ethers.Contract(contractAddress, abi, this._ethProvider); + const value = await contract.getApproved(tokenId, { blockTag: blockHash }); + + const result: ValueResult = { value }; + + await this._db.saveGetApproved({ blockHash, blockNumber, contractAddress, tokenId, value: result.value, proof: JSONbig.stringify(result.proof) }); + + return result; + } + + async isApprovedForAll (blockHash: string, contractAddress: string, owner: string, operator: string): Promise { + const entity = await this._db.getIsApprovedForAll({ blockHash, contractAddress, owner, operator }); + if (entity) { + log('isApprovedForAll: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('isApprovedForAll: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const abi = this._abiMap.get(KIND_ERC721); + assert(abi); + + const contract = new ethers.Contract(contractAddress, abi, this._ethProvider); + const value = await contract.isApprovedForAll(owner, operator, { blockTag: blockHash }); + + const result: ValueResult = { value }; + + await this._db.saveIsApprovedForAll({ blockHash, blockNumber, contractAddress, owner, operator, value: result.value, proof: JSONbig.stringify(result.proof) }); + + return result; + } + + async name (blockHash: string, contractAddress: string): Promise { + const entity = await this._db.getName({ blockHash, contractAddress }); + if (entity) { + log('name: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('name: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const abi = this._abiMap.get(KIND_ERC721); + assert(abi); + + const contract = new ethers.Contract(contractAddress, abi, this._ethProvider); + const value = await contract.name({ blockTag: blockHash }); + + const result: ValueResult = { value }; + + await this._db.saveName({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbig.stringify(result.proof) }); + + return result; + } + + async symbol (blockHash: string, contractAddress: string): Promise { + const entity = await this._db.getSymbol({ blockHash, contractAddress }); + if (entity) { + log('symbol: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('symbol: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const abi = this._abiMap.get(KIND_ERC721); + assert(abi); + + const contract = new ethers.Contract(contractAddress, abi, this._ethProvider); + const value = await contract.symbol({ blockTag: blockHash }); + + const result: ValueResult = { value }; + + await this._db.saveSymbol({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbig.stringify(result.proof) }); + + return result; + } + + async tokenURI (blockHash: string, contractAddress: string, tokenId: bigint): Promise { + const entity = await this._db.getTokenURI({ blockHash, contractAddress, tokenId }); + if (entity) { + log('tokenURI: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('tokenURI: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const abi = this._abiMap.get(KIND_ERC721); + assert(abi); + + const contract = new ethers.Contract(contractAddress, abi, this._ethProvider); + const value = await contract.tokenURI(tokenId, { blockTag: blockHash }); + + const result: ValueResult = { value }; + + await this._db.saveTokenURI({ blockHash, blockNumber, contractAddress, tokenId, value: result.value, proof: JSONbig.stringify(result.proof) }); + + return result; + } + + async _name (blockHash: string, contractAddress: string, diff = false): Promise { + const entity = await this._db._getName({ blockHash, contractAddress }); + if (entity) { + log('_name: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('_name: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const storageLayout = this._storageLayoutMap.get(KIND_ERC721); + assert(storageLayout); + + const result = await this._baseIndexer.getStorageValue( + storageLayout, + blockHash, + contractAddress, + '_name' + ); + + await this._db._saveName({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbig.stringify(result.proof) }); + + if (diff) { + const stateUpdate = updateStateForElementaryType({}, '_name', result.value.toString()); + await this.createDiffStaged(contractAddress, blockHash, stateUpdate); + } + + return result; + } + + async _symbol (blockHash: string, contractAddress: string, diff = false): Promise { + const entity = await this._db._getSymbol({ blockHash, contractAddress }); + if (entity) { + log('_symbol: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('_symbol: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const storageLayout = this._storageLayoutMap.get(KIND_ERC721); + assert(storageLayout); + + const result = await this._baseIndexer.getStorageValue( + storageLayout, + blockHash, + contractAddress, + '_symbol' + ); + + await this._db._saveSymbol({ blockHash, blockNumber, contractAddress, value: result.value, proof: JSONbig.stringify(result.proof) }); + + if (diff) { + const stateUpdate = updateStateForElementaryType({}, '_symbol', result.value.toString()); + await this.createDiffStaged(contractAddress, blockHash, stateUpdate); + } + + return result; + } + + async _owners (blockHash: string, contractAddress: string, key0: bigint, diff = false): Promise { + const entity = await this._db._getOwners({ blockHash, contractAddress, key0 }); + if (entity) { + log('_owners: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('_owners: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const storageLayout = this._storageLayoutMap.get(KIND_ERC721); + assert(storageLayout); + + const result = await this._baseIndexer.getStorageValue( + storageLayout, + blockHash, + contractAddress, + '_owners', + key0 + ); + + await this._db._saveOwners({ blockHash, blockNumber, contractAddress, key0, value: result.value, proof: JSONbig.stringify(result.proof) }); + + if (diff) { + const stateUpdate = updateStateForMappingType({}, '_owners', [key0.toString()], result.value.toString()); + await this.createDiffStaged(contractAddress, blockHash, stateUpdate); + } + + return result; + } + + async _balances (blockHash: string, contractAddress: string, key0: string, diff = false): Promise { + const entity = await this._db._getBalances({ blockHash, contractAddress, key0 }); + if (entity) { + log('_balances: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('_balances: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const storageLayout = this._storageLayoutMap.get(KIND_ERC721); + assert(storageLayout); + + const result = await this._baseIndexer.getStorageValue( + storageLayout, + blockHash, + contractAddress, + '_balances', + key0 + ); + + await this._db._saveBalances({ blockHash, blockNumber, contractAddress, key0, value: result.value, proof: JSONbig.stringify(result.proof) }); + + if (diff) { + const stateUpdate = updateStateForMappingType({}, '_balances', [key0.toString()], result.value.toString()); + await this.createDiffStaged(contractAddress, blockHash, stateUpdate); + } + + return result; + } + + async _tokenApprovals (blockHash: string, contractAddress: string, key0: bigint, diff = false): Promise { + const entity = await this._db._getTokenApprovals({ blockHash, contractAddress, key0 }); + if (entity) { + log('_tokenApprovals: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('_tokenApprovals: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const storageLayout = this._storageLayoutMap.get(KIND_ERC721); + assert(storageLayout); + + const result = await this._baseIndexer.getStorageValue( + storageLayout, + blockHash, + contractAddress, + '_tokenApprovals', + key0 + ); + + await this._db._saveTokenApprovals({ blockHash, blockNumber, contractAddress, key0, value: result.value, proof: JSONbig.stringify(result.proof) }); + + if (diff) { + const stateUpdate = updateStateForMappingType({}, '_tokenApprovals', [key0.toString()], result.value.toString()); + await this.createDiffStaged(contractAddress, blockHash, stateUpdate); + } + + return result; + } + + async _operatorApprovals (blockHash: string, contractAddress: string, key0: string, key1: string, diff = false): Promise { + const entity = await this._db._getOperatorApprovals({ blockHash, contractAddress, key0, key1 }); + if (entity) { + log('_operatorApprovals: db hit.'); + + return { + value: entity.value, + proof: JSON.parse(entity.proof) + }; + } + + log('_operatorApprovals: db miss, fetching from upstream server'); + + const { block: { number } } = await this._ethClient.getBlockByHash(blockHash); + const blockNumber = ethers.BigNumber.from(number).toNumber(); + + const storageLayout = this._storageLayoutMap.get(KIND_ERC721); + assert(storageLayout); + + const result = await this._baseIndexer.getStorageValue( + storageLayout, + blockHash, + contractAddress, + '_operatorApprovals', + key0, + key1 + ); + + await this._db._saveOperatorApprovals({ blockHash, blockNumber, contractAddress, key0, key1, value: result.value, proof: JSONbig.stringify(result.proof) }); + + if (diff) { + const stateUpdate = updateStateForMappingType({}, '_operatorApprovals', [key0.toString(), key1.toString()], result.value.toString()); + await this.createDiffStaged(contractAddress, blockHash, stateUpdate); + } + + return result; + } + + async pushToIPFS (data: any): Promise { + await this._baseIndexer.pushToIPFS(data); + } + + async processInitialState (contractAddress: string, blockHash: string): Promise { + // Call initial state hook. + return createInitialState(this, contractAddress, blockHash); + } + + async processStateCheckpoint (contractAddress: string, blockHash: string): Promise { + // Call checkpoint hook. + return createStateCheckpoint(this, contractAddress, blockHash); + } + + async processCanonicalBlock (blockHash: string): Promise { + // Finalize staged diff blocks if any. + await this._baseIndexer.finalizeDiffStaged(blockHash); + + // Call custom stateDiff hook. + await createStateDiff(this, blockHash); + } + + async processCheckpoint (blockHash: string): Promise { + // Return if checkpointInterval is <= 0. + const checkpointInterval = this._serverConfig.checkpointInterval; + if (checkpointInterval <= 0) return; + + await this._baseIndexer.processCheckpoint(this, blockHash, checkpointInterval); + } + + async processCLICheckpoint (contractAddress: string, blockHash?: string): Promise { + return this._baseIndexer.processCLICheckpoint(this, contractAddress, blockHash); + } + + async getPrevIPLDBlock (blockHash: string, contractAddress: string, kind?: string): Promise { + return this._db.getPrevIPLDBlock(blockHash, contractAddress, kind); + } + + async getLatestIPLDBlock (contractAddress: string, kind: StateKind | null, blockNumber?: number): Promise { + return this._db.getLatestIPLDBlock(contractAddress, kind, blockNumber); + } + + async getIPLDBlocksByHash (blockHash: string): Promise { + return this._baseIndexer.getIPLDBlocksByHash(blockHash); + } + + async getIPLDBlockByCid (cid: string): Promise { + return this._baseIndexer.getIPLDBlockByCid(cid); + } + + getIPLDData (ipldBlock: IPLDBlock): any { + return this._baseIndexer.getIPLDData(ipldBlock); + } + + isIPFSConfigured (): boolean { + return this._baseIndexer.isIPFSConfigured(); + } + + // Method used to create auto diffs (diff_staged). + async createDiffStaged (contractAddress: string, blockHash: string, data: any): Promise { + await this._baseIndexer.createDiffStaged(contractAddress, blockHash, data); + } + + // Method to be used by createStateDiff hook. + async createDiff (contractAddress: string, blockHash: string, data: any): Promise { + const block = await this.getBlockProgress(blockHash); + assert(block); + + await this._baseIndexer.createDiff(contractAddress, block, data); + } + + // Method to be used by createStateCheckpoint hook. + async createStateCheckpoint (contractAddress: string, blockHash: string, data: any): Promise { + const block = await this.getBlockProgress(blockHash); + assert(block); + + return this._baseIndexer.createStateCheckpoint(contractAddress, block, data); + } + + // Method to be used by checkpoint CLI. + async createCheckpoint (contractAddress: string, blockHash: string): Promise { + const block = await this.getBlockProgress(blockHash); + assert(block); + + return this._baseIndexer.createCheckpoint(this, contractAddress, block); + } + + async saveOrUpdateIPLDBlock (ipldBlock: IPLDBlock): Promise { + return this._baseIndexer.saveOrUpdateIPLDBlock(ipldBlock); + } + + async removeIPLDBlocks (blockNumber: number, kind: StateKind): Promise { + await this._baseIndexer.removeIPLDBlocks(blockNumber, kind); + } + + async getSubgraphEntity (entity: new () => Entity, id: string, block?: BlockHeight): Promise { + const relations = this._relationsMap.get(entity) || {}; + + const data = await this._graphWatcher.getEntity(entity, id, relations, block); + + return data; + } + + async triggerIndexingOnEvent (event: Event): Promise { + const resultEvent = this.getResultEvent(event); + + // Call subgraph handler for event. + await this._graphWatcher.handleEvent(resultEvent); + + // Call custom hook function for indexing on event. + await handleEvent(this, resultEvent); + } + + async processEvent (event: Event): Promise { + // Trigger indexing of data based on the event. + await this.triggerIndexingOnEvent(event); + } + + async processBlock (blockHash: string, blockNumber: number): Promise { + // Call a function to create initial state for contracts. + await this._baseIndexer.createInit(this, blockHash, blockNumber); + + // Call subgraph handler for block. + await this._graphWatcher.handleBlock(blockHash); + } + + parseEventNameAndArgs (kind: string, logObj: any): any { + let eventName = UNKNOWN_EVENT_NAME; + let eventInfo = {}; + + const { topics, data } = logObj; + + const contract = this._contractMap.get(kind); + assert(contract); + + const logDescription = contract.parseLog({ data, topics }); + + switch (kind) { + case KIND_ERC721: { + ({ eventName, eventInfo } = this.parseERC721Event(logDescription)); + + break; + } + } + + return { + eventName, + eventInfo, + eventSignature: logDescription.signature + }; + } + + parseERC721Event (logDescription: ethers.utils.LogDescription): { eventName: string, eventInfo: any } { + let eventName = UNKNOWN_EVENT_NAME; + let eventInfo = {}; + + switch (logDescription.name) { + case APPROVAL_EVENT: { + eventName = logDescription.name; + const { owner, approved, tokenId } = logDescription.args; + eventInfo = { + owner, + approved, + tokenId: BigInt(tokenId.toString()) + }; + + break; + } + case APPROVALFORALL_EVENT: { + eventName = logDescription.name; + const { owner, operator, approved } = logDescription.args; + eventInfo = { + owner, + operator, + approved + }; + + break; + } + case TRANSFER_EVENT: { + eventName = logDescription.name; + const { from, to, tokenId } = logDescription.args; + eventInfo = { + from, + to, + tokenId: BigInt(tokenId.toString()) + }; + + break; + } + } + + return { + eventName, + eventInfo + }; + } + + async getIPLDStatus (): Promise { + return this._db.getIPLDStatus(); + } + + async updateIPLDStatusHooksBlock (blockNumber: number, force?: boolean): Promise { + const dbTx = await this._db.createTransactionRunner(); + let res; + + try { + res = await this._db.updateIPLDStatusHooksBlock(dbTx, blockNumber, force); + await dbTx.commitTransaction(); + } catch (error) { + await dbTx.rollbackTransaction(); + throw error; + } finally { + await dbTx.release(); + } + + return res; + } + + async updateIPLDStatusCheckpointBlock (blockNumber: number, force?: boolean): Promise { + const dbTx = await this._db.createTransactionRunner(); + let res; + + try { + res = await this._db.updateIPLDStatusCheckpointBlock(dbTx, blockNumber, force); + await dbTx.commitTransaction(); + } catch (error) { + await dbTx.rollbackTransaction(); + throw error; + } finally { + await dbTx.release(); + } + + return res; + } + + async updateIPLDStatusIPFSBlock (blockNumber: number, force?: boolean): Promise { + const dbTx = await this._db.createTransactionRunner(); + let res; + + try { + res = await this._db.updateIPLDStatusIPFSBlock(dbTx, blockNumber, force); + await dbTx.commitTransaction(); + } catch (error) { + await dbTx.rollbackTransaction(); + throw error; + } finally { + await dbTx.release(); + } + + return res; + } + + async getLatestCanonicalBlock (): Promise { + const syncStatus = await this.getSyncStatus(); + assert(syncStatus); + + const latestCanonicalBlock = await this.getBlockProgress(syncStatus.latestCanonicalBlockHash); + assert(latestCanonicalBlock); + + return latestCanonicalBlock; + } + + async getLatestHooksProcessedBlock (): Promise { + return this._baseIndexer.getLatestHooksProcessedBlock(); + } + + async watchContract (address: string, kind: string, checkpoint: boolean, startingBlock: number): Promise { + await this.updateIPLDStatusMap(address, {}); + + return this._baseIndexer.watchContract(address, kind, checkpoint, startingBlock); + } + + async updateIPLDStatusMap (address: string, ipldStatus: IpldStatusInterface): Promise { + await this._baseIndexer.updateIPLDStatusMap(address, ipldStatus); + } + + cacheContract (contract: Contract): void { + return this._baseIndexer.cacheContract(contract); + } + + async saveEventEntity (dbEvent: Event): Promise { + return this._baseIndexer.saveEventEntity(dbEvent); + } + + async getEventsByFilter (blockHash: string, contract?: string, name?: string): Promise> { + return this._baseIndexer.getEventsByFilter(blockHash, contract, name); + } + + async isWatchedContract (address : string): Promise { + return this._baseIndexer.isWatchedContract(address); + } + + getContractsByKind (kind: string): Contract[] { + return this._baseIndexer.getContractsByKind(kind); + } + + async getProcessedBlockCountForRange (fromBlockNumber: number, toBlockNumber: number): Promise<{ expected: number, actual: number }> { + return this._baseIndexer.getProcessedBlockCountForRange(fromBlockNumber, toBlockNumber); + } + + async getEventsInRange (fromBlockNumber: number, toBlockNumber: number): Promise> { + return this._baseIndexer.getEventsInRange(fromBlockNumber, toBlockNumber); + } + + async getSyncStatus (): Promise { + return this._baseIndexer.getSyncStatus(); + } + + async getBlocks (blockFilter: { blockHash?: string, blockNumber?: number }): Promise { + return this._baseIndexer.getBlocks(blockFilter); + } + + async updateSyncStatusIndexedBlock (blockHash: string, blockNumber: number, force = false): Promise { + return this._baseIndexer.updateSyncStatusIndexedBlock(blockHash, blockNumber, force); + } + + async updateSyncStatusChainHead (blockHash: string, blockNumber: number, force = false): Promise { + return this._baseIndexer.updateSyncStatusChainHead(blockHash, blockNumber, force); + } + + async updateSyncStatusCanonicalBlock (blockHash: string, blockNumber: number, force = false): Promise { + return this._baseIndexer.updateSyncStatusCanonicalBlock(blockHash, blockNumber, force); + } + + async getEvent (id: string): Promise { + return this._baseIndexer.getEvent(id); + } + + async getBlockProgress (blockHash: string): Promise { + return this._baseIndexer.getBlockProgress(blockHash); + } + + async getBlockProgressEntities (where: FindConditions, options: FindManyOptions): Promise { + return this._baseIndexer.getBlockProgressEntities(where, options); + } + + async getBlocksAtHeight (height: number, isPruned: boolean): Promise { + return this._baseIndexer.getBlocksAtHeight(height, isPruned); + } + + async fetchBlockEvents (block: DeepPartial): Promise { + return this._baseIndexer.fetchBlockEvents(block, this._fetchAndSaveEvents.bind(this)); + } + + async getBlockEvents (blockHash: string, where: Where, queryOptions: QueryOptions): Promise> { + return this._baseIndexer.getBlockEvents(blockHash, where, queryOptions); + } + + async removeUnknownEvents (block: BlockProgress): Promise { + return this._baseIndexer.removeUnknownEvents(Event, block); + } + + async markBlocksAsPruned (blocks: BlockProgress[]): Promise { + return this._baseIndexer.markBlocksAsPruned(blocks); + } + + async updateBlockProgress (block: BlockProgress, lastProcessedEventIndex: number): Promise { + return this._baseIndexer.updateBlockProgress(block, lastProcessedEventIndex); + } + + async getAncestorAtDepth (blockHash: string, depth: number): Promise { + return this._baseIndexer.getAncestorAtDepth(blockHash, depth); + } + + getEntityTypesMap (): Map { + return this._entityTypesMap; + } + + async _fetchAndSaveEvents ({ cid: blockCid, blockHash }: DeepPartial): Promise { + assert(blockHash); + + const logsPromise = this._ethClient.getLogs({ blockHash }); + const transactionsPromise = this._ethClient.getBlockWithTransactions({ blockHash }); + + let [ + { block, logs }, + { + allEthHeaderCids: { + nodes: [ + { + ethTransactionCidsByHeaderId: { + nodes: transactions + } + } + ] + } + } + ] = await Promise.all([logsPromise, transactionsPromise]); + + const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => { + acc[transaction.txHash] = transaction; + return acc; + }, {}); + + const dbEvents: Array> = []; + + for (let li = 0; li < logs.length; li++) { + const logObj = logs[li]; + const { + topics, + data, + index: logIndex, + cid, + ipldBlock, + account: { + address + }, + transaction: { + hash: txHash + }, + receiptCID, + status + } = logObj; + + if (status) { + let eventName = UNKNOWN_EVENT_NAME; + let eventInfo = {}; + const tx = transactionMap[txHash]; + const extraInfo: { [key: string]: any } = { topics, data, tx }; + + const contract = ethers.utils.getAddress(address); + const watchedContract = await this.isWatchedContract(contract); + + if (watchedContract) { + const eventDetails = this.parseEventNameAndArgs(watchedContract.kind, logObj); + eventName = eventDetails.eventName; + eventInfo = eventDetails.eventInfo; + extraInfo.eventSignature = eventDetails.eventSignature; + } + + dbEvents.push({ + index: logIndex, + txHash, + contract, + eventName, + eventInfo: JSONbig.stringify(eventInfo), + extraInfo: JSONbig.stringify(extraInfo), + proof: JSONbig.stringify({ + data: JSONbig.stringify({ + blockHash, + receiptCID, + log: { + cid, + ipldBlock + } + }) + }) + }); + } else { + log(`Skipping event for receipt ${receiptCID} due to failed transaction.`); + } + } + + const dbTx = await this._db.createTransactionRunner(); + + try { + block = { + cid: blockCid, + blockHash, + blockNumber: block.number, + blockTimestamp: block.timestamp, + parentHash: block.parent.hash + }; + + const blockProgress = await this._db.saveEvents(dbTx, block, dbEvents); + await dbTx.commitTransaction(); + + return blockProgress; + } catch (error) { + await dbTx.rollbackTransaction(); + throw error; + } finally { + await dbTx.release(); + } + } +} diff --git a/packages/erc721-watcher/src/job-runner.ts b/packages/erc721-watcher/src/job-runner.ts new file mode 100644 index 00000000..6e934621 --- /dev/null +++ b/packages/erc721-watcher/src/job-runner.ts @@ -0,0 +1,287 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import path from 'path'; +import assert from 'assert'; +import 'reflect-metadata'; +import yargs from 'yargs'; +import { hideBin } from 'yargs/helpers'; +import debug from 'debug'; + +import { + getConfig, + Config, + JobQueue, + JobRunner as BaseJobRunner, + QUEUE_BLOCK_PROCESSING, + QUEUE_EVENT_PROCESSING, + QUEUE_BLOCK_CHECKPOINT, + QUEUE_HOOKS, + QUEUE_IPFS, + JOB_KIND_PRUNE, + JobQueueConfig, + DEFAULT_CONFIG_PATH, + initClients +} from '@vulcanize/util'; +import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node'; + +import { Indexer } from './indexer'; +import { Database } from './database'; + +const log = debug('vulcanize:job-runner'); + +export class JobRunner { + _indexer: Indexer + _jobQueue: JobQueue + _baseJobRunner: BaseJobRunner + _jobQueueConfig: JobQueueConfig + + constructor (jobQueueConfig: JobQueueConfig, indexer: Indexer, jobQueue: JobQueue) { + this._jobQueueConfig = jobQueueConfig; + this._indexer = indexer; + this._jobQueue = jobQueue; + this._baseJobRunner = new BaseJobRunner(this._jobQueueConfig, this._indexer, this._jobQueue); + } + + async start (): Promise { + await this.subscribeBlockProcessingQueue(); + await this.subscribeEventProcessingQueue(); + await this.subscribeBlockCheckpointQueue(); + await this.subscribeHooksQueue(); + await this.subscribeIPFSQueue(); + } + + async subscribeBlockProcessingQueue (): Promise { + await this._jobQueue.subscribe(QUEUE_BLOCK_PROCESSING, async (job) => { + await this._baseJobRunner.processBlock(job); + + const { data: { kind } } = job; + + // If it's a pruning job: Create a hooks job. + if (kind === JOB_KIND_PRUNE) { + await this.createHooksJob(); + } + + await this._jobQueue.markComplete(job); + }); + } + + async subscribeEventProcessingQueue (): Promise { + await this._jobQueue.subscribe(QUEUE_EVENT_PROCESSING, async (job) => { + await this._baseJobRunner.processEvent(job); + }); + } + + async subscribeHooksQueue (): Promise { + await this._jobQueue.subscribe(QUEUE_HOOKS, async (job) => { + const { data: { blockHash, blockNumber } } = job; + + // Get the current IPLD Status. + const ipldStatus = await this._indexer.getIPLDStatus(); + + if (ipldStatus) { + if (ipldStatus.latestHooksBlockNumber < (blockNumber - 1)) { + // Create hooks job for parent block. + const [parentBlock] = await this._indexer.getBlocksAtHeight(blockNumber - 1, false); + await this.createHooksJob(parentBlock.blockHash, parentBlock.blockNumber); + + const message = `Hooks for blockNumber ${blockNumber - 1} not processed yet, aborting`; + log(message); + + throw new Error(message); + } + + if (ipldStatus.latestHooksBlockNumber > (blockNumber - 1)) { + log(`Hooks for blockNumber ${blockNumber} already processed`); + + return; + } + } + + // Process the hooks for the given block number. + await this._indexer.processCanonicalBlock(blockHash); + + // Update the IPLD status. + await this._indexer.updateIPLDStatusHooksBlock(blockNumber); + + // Create a checkpoint job after completion of a hook job. + await this.createCheckpointJob(blockHash, blockNumber); + + await this._jobQueue.markComplete(job); + }); + } + + async subscribeBlockCheckpointQueue (): Promise { + await this._jobQueue.subscribe(QUEUE_BLOCK_CHECKPOINT, async (job) => { + const { data: { blockHash, blockNumber } } = job; + + // Get the current IPLD Status. + const ipldStatus = await this._indexer.getIPLDStatus(); + assert(ipldStatus); + + if (ipldStatus.latestCheckpointBlockNumber >= 0) { + if (ipldStatus.latestCheckpointBlockNumber < (blockNumber - 1)) { + // Create a checkpoint job for parent block. + const [parentBlock] = await this._indexer.getBlocksAtHeight(blockNumber - 1, false); + await this.createCheckpointJob(parentBlock.blockHash, parentBlock.blockNumber); + + const message = `Checkpoints for blockNumber ${blockNumber - 1} not processed yet, aborting`; + log(message); + + throw new Error(message); + } + + if (ipldStatus.latestCheckpointBlockNumber > (blockNumber - 1)) { + log(`Checkpoints for blockNumber ${blockNumber} already processed`); + + return; + } + } + + // Process checkpoints for the given block. + await this._indexer.processCheckpoint(blockHash); + + // Update the IPLD status. + await this._indexer.updateIPLDStatusCheckpointBlock(blockNumber); + + // Create an IPFS job after completion of a checkpoint job. + if (this._indexer.isIPFSConfigured()) { + await this.createIPFSPutJob(blockHash, blockNumber); + } + + await this._jobQueue.markComplete(job); + }); + } + + async subscribeIPFSQueue (): Promise { + await this._jobQueue.subscribe(QUEUE_IPFS, async (job) => { + const { data: { blockHash, blockNumber } } = job; + + const ipldStatus = await this._indexer.getIPLDStatus(); + assert(ipldStatus); + + if (ipldStatus.latestIPFSBlockNumber >= 0) { + if (ipldStatus.latestIPFSBlockNumber < (blockNumber - 1)) { + // Create a IPFS job for parent block. + const [parentBlock] = await this._indexer.getBlocksAtHeight(blockNumber - 1, false); + await this.createIPFSPutJob(parentBlock.blockHash, parentBlock.blockNumber); + + const message = `IPFS for blockNumber ${blockNumber - 1} not processed yet, aborting`; + log(message); + + throw new Error(message); + } + + if (ipldStatus.latestIPFSBlockNumber > (blockNumber - 1)) { + log(`IPFS for blockNumber ${blockNumber} already processed`); + + return; + } + } + + // Get IPLDBlocks for the given blocHash. + const ipldBlocks = await this._indexer.getIPLDBlocksByHash(blockHash); + + // Push all the IPLDBlocks to IPFS. + for (const ipldBlock of ipldBlocks) { + const data = this._indexer.getIPLDData(ipldBlock); + await this._indexer.pushToIPFS(data); + } + + // Update the IPLD status. + await this._indexer.updateIPLDStatusIPFSBlock(blockNumber); + + await this._jobQueue.markComplete(job); + }); + } + + async createHooksJob (blockHash?: string, blockNumber?: number): Promise { + if (!blockNumber || !blockHash) { + // Get the latest canonical block + const latestCanonicalBlock = await this._indexer.getLatestCanonicalBlock(); + + // Create a hooks job for parent block of latestCanonicalBlock because pruning for first block is skipped as it is assumed to be a canonical block. + blockHash = latestCanonicalBlock.parentHash; + blockNumber = latestCanonicalBlock.blockNumber - 1; + } + + await this._jobQueue.pushJob( + QUEUE_HOOKS, + { + blockHash, + blockNumber + } + ); + } + + async createCheckpointJob (blockHash: string, blockNumber: number): Promise { + await this._jobQueue.pushJob( + QUEUE_BLOCK_CHECKPOINT, + { + blockHash, + blockNumber + } + ); + } + + async createIPFSPutJob (blockHash: string, blockNumber: number): Promise { + await this._jobQueue.pushJob( + QUEUE_IPFS, + { + blockHash, + blockNumber + } + ); + } +} + +export const main = async (): Promise => { + const argv = await yargs(hideBin(process.argv)) + .option('f', { + alias: 'config-file', + demandOption: true, + describe: 'configuration file path (toml)', + type: 'string', + default: DEFAULT_CONFIG_PATH + }) + .argv; + + const config: Config = await getConfig(argv.f); + const { ethClient, ethProvider } = await initClients(config); + + const db = new Database(config.database); + await db.init(); + + const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*')); + await graphDb.init(); + + const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server); + + const jobQueueConfig = config.jobQueue; + assert(jobQueueConfig, 'Missing job queue config'); + + const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig; + assert(dbConnectionString, 'Missing job queue db connection string'); + + const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs }); + await jobQueue.start(); + + const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher); + await indexer.init(); + + graphWatcher.setIndexer(indexer); + + const jobRunner = new JobRunner(jobQueueConfig, indexer, jobQueue); + await jobRunner.start(); +}; + +main().then(() => { + log('Starting job runner...'); +}).catch(err => { + log(err); +}); + +process.on('uncaughtException', err => { + log('uncaughtException', err); +}); diff --git a/packages/erc721-watcher/src/resolvers.ts b/packages/erc721-watcher/src/resolvers.ts new file mode 100644 index 00000000..5d662ad7 --- /dev/null +++ b/packages/erc721-watcher/src/resolvers.ts @@ -0,0 +1,172 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import assert from 'assert'; +import BigInt from 'apollo-type-bigint'; +import debug from 'debug'; +import Decimal from 'decimal.js'; +import { GraphQLScalarType } from 'graphql'; + +import { ValueResult, BlockHeight, StateKind } from '@vulcanize/util'; + +import { Indexer } from './indexer'; +import { EventWatcher } from './events'; + +const log = debug('vulcanize:resolver'); + +export const createResolvers = async (indexer: Indexer, eventWatcher: EventWatcher): Promise => { + assert(indexer); + + return { + BigInt: new BigInt('bigInt'), + + BigDecimal: new GraphQLScalarType({ + name: 'BigDecimal', + description: 'BigDecimal custom scalar type', + parseValue (value) { + // value from the client + return new Decimal(value); + }, + serialize (value: Decimal) { + // value sent to the client + return value.toFixed(); + } + }), + + Event: { + __resolveType: (obj: any) => { + assert(obj.__typename); + + return obj.__typename; + } + }, + + Subscription: { + onEvent: { + subscribe: () => eventWatcher.getEventIterator() + } + }, + + Mutation: { + watchContract: async (_: any, { address, kind, checkpoint, startingBlock = 1 }: { address: string, kind: string, checkpoint: boolean, startingBlock: number }): Promise => { + log('watchContract', address, kind, checkpoint, startingBlock); + await indexer.watchContract(address, kind, checkpoint, startingBlock); + + return true; + } + }, + + Query: { + supportsInterface: (_: any, { blockHash, contractAddress, interfaceId }: { blockHash: string, contractAddress: string, interfaceId: string }): Promise => { + log('supportsInterface', blockHash, contractAddress, interfaceId); + return indexer.supportsInterface(blockHash, contractAddress, interfaceId); + }, + + balanceOf: (_: any, { blockHash, contractAddress, owner }: { blockHash: string, contractAddress: string, owner: string }): Promise => { + log('balanceOf', blockHash, contractAddress, owner); + return indexer.balanceOf(blockHash, contractAddress, owner); + }, + + ownerOf: (_: any, { blockHash, contractAddress, tokenId }: { blockHash: string, contractAddress: string, tokenId: bigint }): Promise => { + log('ownerOf', blockHash, contractAddress, tokenId); + return indexer.ownerOf(blockHash, contractAddress, tokenId); + }, + + getApproved: (_: any, { blockHash, contractAddress, tokenId }: { blockHash: string, contractAddress: string, tokenId: bigint }): Promise => { + log('getApproved', blockHash, contractAddress, tokenId); + return indexer.getApproved(blockHash, contractAddress, tokenId); + }, + + isApprovedForAll: (_: any, { blockHash, contractAddress, owner, operator }: { blockHash: string, contractAddress: string, owner: string, operator: string }): Promise => { + log('isApprovedForAll', blockHash, contractAddress, owner, operator); + return indexer.isApprovedForAll(blockHash, contractAddress, owner, operator); + }, + + name: (_: any, { blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise => { + log('name', blockHash, contractAddress); + return indexer.name(blockHash, contractAddress); + }, + + symbol: (_: any, { blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise => { + log('symbol', blockHash, contractAddress); + return indexer.symbol(blockHash, contractAddress); + }, + + tokenURI: (_: any, { blockHash, contractAddress, tokenId }: { blockHash: string, contractAddress: string, tokenId: bigint }): Promise => { + log('tokenURI', blockHash, contractAddress, tokenId); + return indexer.tokenURI(blockHash, contractAddress, tokenId); + }, + + _name: (_: any, { blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise => { + log('_name', blockHash, contractAddress); + return indexer._name(blockHash, contractAddress); + }, + + _symbol: (_: any, { blockHash, contractAddress }: { blockHash: string, contractAddress: string }): Promise => { + log('_symbol', blockHash, contractAddress); + return indexer._symbol(blockHash, contractAddress); + }, + + _owners: (_: any, { blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: bigint }): Promise => { + log('_owners', blockHash, contractAddress, key0); + return indexer._owners(blockHash, contractAddress, key0); + }, + + _balances: (_: any, { blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: string }): Promise => { + log('_balances', blockHash, contractAddress, key0); + return indexer._balances(blockHash, contractAddress, key0); + }, + + _tokenApprovals: (_: any, { blockHash, contractAddress, key0 }: { blockHash: string, contractAddress: string, key0: bigint }): Promise => { + log('_tokenApprovals', blockHash, contractAddress, key0); + return indexer._tokenApprovals(blockHash, contractAddress, key0); + }, + + _operatorApprovals: (_: any, { blockHash, contractAddress, key0, key1 }: { blockHash: string, contractAddress: string, key0: string, key1: string }): Promise => { + log('_operatorApprovals', blockHash, contractAddress, key0, key1); + return indexer._operatorApprovals(blockHash, contractAddress, key0, key1); + }, + + events: async (_: any, { blockHash, contractAddress, name }: { blockHash: string, contractAddress: string, name?: string }) => { + log('events', blockHash, contractAddress, name); + + const block = await indexer.getBlockProgress(blockHash); + if (!block || !block.isComplete) { + throw new Error(`Block hash ${blockHash} number ${block?.blockNumber} not processed yet`); + } + + const events = await indexer.getEventsByFilter(blockHash, contractAddress, name); + return events.map(event => indexer.getResultEvent(event)); + }, + + eventsInRange: async (_: any, { fromBlockNumber, toBlockNumber }: { fromBlockNumber: number, toBlockNumber: number }) => { + log('eventsInRange', fromBlockNumber, toBlockNumber); + + const { expected, actual } = await indexer.getProcessedBlockCountForRange(fromBlockNumber, toBlockNumber); + if (expected !== actual) { + throw new Error(`Range not available, expected ${expected}, got ${actual} blocks in range`); + } + + const events = await indexer.getEventsInRange(fromBlockNumber, toBlockNumber); + return events.map(event => indexer.getResultEvent(event)); + }, + + getStateByCID: async (_: any, { cid }: { cid: string }) => { + log('getStateByCID', cid); + + const ipldBlock = await indexer.getIPLDBlockByCid(cid); + + return ipldBlock && ipldBlock.block.isComplete ? indexer.getResultIPLDBlock(ipldBlock) : undefined; + }, + + getState: async (_: any, { blockHash, contractAddress, kind = StateKind.Diff }: { blockHash: string, contractAddress: string, kind: string }) => { + log('getState', blockHash, contractAddress, kind); + + const ipldBlock = await indexer.getPrevIPLDBlock(blockHash, contractAddress, kind); + + return ipldBlock && ipldBlock.block.isComplete ? indexer.getResultIPLDBlock(ipldBlock) : undefined; + } + } + }; +}; diff --git a/packages/erc721-watcher/src/schema.gql b/packages/erc721-watcher/src/schema.gql new file mode 100644 index 00000000..b345f2b3 --- /dev/null +++ b/packages/erc721-watcher/src/schema.gql @@ -0,0 +1,110 @@ +scalar BigInt + +scalar BigDecimal + +scalar Bytes + +type Proof { + data: String! +} + +type ResultBoolean { + value: Boolean! + proof: Proof +} + +type ResultString { + value: String! + proof: Proof +} + +type ResultInt { + value: Int! + proof: Proof +} + +type ResultBigInt { + value: BigInt! + proof: Proof +} + +type _Block_ { + cid: String! + hash: String! + number: Int! + timestamp: Int! + parentHash: String! +} + +type _Transaction_ { + hash: String! + index: Int! + from: String! + to: String! +} + +type ResultEvent { + block: _Block_! + tx: _Transaction_! + contract: String! + eventIndex: Int! + event: Event! + proof: Proof +} + +union Event = ApprovalEvent | ApprovalForAllEvent | TransferEvent + +type ApprovalEvent { + owner: String! + approved: String! + tokenId: BigInt! +} + +type ApprovalForAllEvent { + owner: String! + operator: String! + approved: Boolean! +} + +type TransferEvent { + from: String! + to: String! + tokenId: BigInt! +} + +type ResultIPLDBlock { + block: _Block_! + contractAddress: String! + cid: String! + kind: String! + data: String! +} + +type Query { + events(blockHash: String!, contractAddress: String!, name: String): [ResultEvent!] + eventsInRange(fromBlockNumber: Int!, toBlockNumber: Int!): [ResultEvent!] + supportsInterface(blockHash: String!, contractAddress: String!, interfaceId: String!): ResultBoolean! + balanceOf(blockHash: String!, contractAddress: String!, owner: String!): ResultBigInt! + ownerOf(blockHash: String!, contractAddress: String!, tokenId: BigInt!): ResultString! + getApproved(blockHash: String!, contractAddress: String!, tokenId: BigInt!): ResultString! + isApprovedForAll(blockHash: String!, contractAddress: String!, owner: String!, operator: String!): ResultBoolean! + name(blockHash: String!, contractAddress: String!): ResultString! + symbol(blockHash: String!, contractAddress: String!): ResultString! + tokenURI(blockHash: String!, contractAddress: String!, tokenId: BigInt!): ResultString! + _name(blockHash: String!, contractAddress: String!): ResultString! + _symbol(blockHash: String!, contractAddress: String!): ResultString! + _owners(blockHash: String!, contractAddress: String!, key0: BigInt!): ResultString! + _balances(blockHash: String!, contractAddress: String!, key0: String!): ResultBigInt! + _tokenApprovals(blockHash: String!, contractAddress: String!, key0: BigInt!): ResultString! + _operatorApprovals(blockHash: String!, contractAddress: String!, key0: String!, key1: String!): ResultBoolean! + getStateByCID(cid: String!): ResultIPLDBlock + getState(blockHash: String!, contractAddress: String!, kind: String): ResultIPLDBlock +} + +type Mutation { + watchContract(address: String!, kind: String!, checkpoint: Boolean!, startingBlock: Int): Boolean! +} + +type Subscription { + onEvent: ResultEvent! +} diff --git a/packages/erc721-watcher/src/server.ts b/packages/erc721-watcher/src/server.ts new file mode 100644 index 00000000..1dce3dbe --- /dev/null +++ b/packages/erc721-watcher/src/server.ts @@ -0,0 +1,106 @@ +// +// Copyright 2021 Vulcanize, Inc. +// + +import fs from 'fs'; +import path from 'path'; +import assert from 'assert'; +import 'reflect-metadata'; +import express, { Application } from 'express'; +import { ApolloServer, PubSub } from 'apollo-server-express'; +import yargs from 'yargs'; +import { hideBin } from 'yargs/helpers'; +import debug from 'debug'; +import 'graphql-import-node'; +import { createServer } from 'http'; + +import { DEFAULT_CONFIG_PATH, getConfig, Config, JobQueue, KIND_ACTIVE, initClients } from '@vulcanize/util'; +import { GraphWatcher, Database as GraphDatabase } from '@vulcanize/graph-node'; + +import { createResolvers } from './resolvers'; +import { Indexer } from './indexer'; +import { Database } from './database'; +import { EventWatcher } from './events'; + +const log = debug('vulcanize:server'); + +export const main = async (): Promise => { + const argv = await yargs(hideBin(process.argv)) + .option('f', { + alias: 'config-file', + demandOption: true, + describe: 'configuration file path (toml)', + type: 'string', + default: DEFAULT_CONFIG_PATH + }) + .argv; + + const config: Config = await getConfig(argv.f); + const { ethClient, ethProvider } = await initClients(config); + + const { host, port, kind: watcherKind } = config.server; + + const db = new Database(config.database); + await db.init(); + + const graphDb = new GraphDatabase(config.database, path.resolve(__dirname, 'entity/*')); + await graphDb.init(); + + const graphWatcher = new GraphWatcher(graphDb, ethClient, ethProvider, config.server); + + // Note: In-memory pubsub works fine for now, as each watcher is a single process anyway. + // Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries + const pubsub = new PubSub(); + + const jobQueueConfig = config.jobQueue; + assert(jobQueueConfig, 'Missing job queue config'); + + const { dbConnectionString, maxCompletionLagInSecs } = jobQueueConfig; + assert(dbConnectionString, 'Missing job queue db connection string'); + + const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag: maxCompletionLagInSecs }); + + const indexer = new Indexer(config.server, db, ethClient, ethProvider, jobQueue, graphWatcher); + await indexer.init(); + + graphWatcher.setIndexer(indexer); + + const eventWatcher = new EventWatcher(config.upstream, ethClient, indexer, pubsub, jobQueue); + + if (watcherKind === KIND_ACTIVE) { + await jobQueue.start(); + await eventWatcher.start(); + } + + const resolvers = await createResolvers(indexer, eventWatcher); + + const app: Application = express(); + const typeDefs = fs.readFileSync(path.join(__dirname, 'schema.gql')).toString(); + const server = new ApolloServer({ + typeDefs, + resolvers + }); + + await server.start(); + server.applyMiddleware({ app }); + + const httpServer = createServer(app); + server.installSubscriptionHandlers(httpServer); + + httpServer.listen(port, host, () => { + log(`Server is listening on host ${host} port ${port}`); + }); + + return { app, server }; +}; + +main().then(() => { + log('Starting server...'); +}).catch(err => { + log(err); +}); + +process.on('SIGINT', () => { + log(`Exiting process ${process.pid} with code 0`); + process.exit(0); +}); diff --git a/packages/erc721-watcher/src/types.ts b/packages/erc721-watcher/src/types.ts new file mode 100644 index 00000000..c4562172 --- /dev/null +++ b/packages/erc721-watcher/src/types.ts @@ -0,0 +1,3 @@ +// +// Copyright 2021 Vulcanize, Inc. +// diff --git a/packages/erc721-watcher/tsconfig.json b/packages/erc721-watcher/tsconfig.json new file mode 100644 index 00000000..99712bdf --- /dev/null +++ b/packages/erc721-watcher/tsconfig.json @@ -0,0 +1,74 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig.json to read more about this file */ + + /* Basic Options */ + // "incremental": true, /* Enable incremental compilation */ + "target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */ + "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */ + // "lib": [], /* Specify library files to be included in the compilation. */ + // "allowJs": true, /* Allow javascript files to be compiled. */ + // "checkJs": true, /* Report errors in .js files. */ + // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */ + // "declaration": true, /* Generates corresponding '.d.ts' file. */ + // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ + // "sourceMap": true, /* Generates corresponding '.map' file. */ + // "outFile": "./", /* Concatenate and emit output to single file. */ + "outDir": "dist", /* Redirect output structure to the directory. */ + // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ + // "composite": true, /* Enable project compilation */ + // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ + // "removeComments": true, /* Do not emit comments to output. */ + // "noEmit": true, /* Do not emit outputs. */ + // "importHelpers": true, /* Import emit helpers from 'tslib'. */ + // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ + // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ + + /* Strict Type-Checking Options */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* Enable strict null checks. */ + // "strictFunctionTypes": true, /* Enable strict checking of function types. */ + // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ + // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ + // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ + // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ + + /* Additional Checks */ + // "noUnusedLocals": true, /* Report errors on unused locals. */ + // "noUnusedParameters": true, /* Report errors on unused parameters. */ + // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ + // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ + // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an 'override' modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */ + + /* Module Resolution Options */ + // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ + // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ + // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ + // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ + // "typeRoots": [], /* List of folders to include type definitions from. */ + // "types": [], /* Type declaration files to be included in compilation. */ + // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ + "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ + // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + + /* Source Map Options */ + // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ + // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ + + /* Experimental Options */ + "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ + // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ + + /* Advanced Options */ + "skipLibCheck": true, /* Skip type checking of declaration files. */ + "forceConsistentCasingInFileNames": true, /* Disallow inconsistently-cased references to the same file. */ + "resolveJsonModule": true /* Enabling the option allows importing JSON, and validating the types in that JSON file. */ + }, + "include": ["src/**/*"] +}