Add a CLI to compare entity query results from two GQL endpoints (#57)

* Add CLI to compare entities from two GQL endpoints

* Print out result diffs in compare-entity CLI

* Get the colorized result diff in compare-entity CLI

* Read query dir from config file or as an arg

* Make config file arg required

* Make queries in Example schema similar to that in graph-node

* Get non-colorized output on choosing raw-json diff option

* Make queries in eden-watcher similar to that in graph-node
This commit is contained in:
prathamesh0 2021-11-23 16:15:42 +05:30 committed by nabarun
parent 31b302c9b5
commit d979e51723
35 changed files with 535 additions and 413 deletions

View File

@ -1,5 +1,5 @@
query account($id: String!, $blockHash: String!){
account(id: $id, blockHash: $blockHash){
query account($id: String!, $blockHash: Bytes!){
account(id: $id, block: { hash: $blockHash }){
id
totalClaimed
totalSlashed
@ -7,23 +7,13 @@ query account($id: String!, $blockHash: String!){
id
timestamp
index
account{
id
totalClaimed
totalSlashed
slashes{
id
timestamp
account{
id
totalClaimed
totalSlashed
}
slashed
}
}
totalEarned
claimed
}
slashes{
id
timestamp
slashed
}
}
}

View File

@ -0,0 +1,20 @@
query block($id: String!, $blockHash: Bytes!){
block(id: $id, block: { hash: $blockHash }){
id
fromActiveProducer
hash
parentHash
unclesHash
author
stateRoot
transactionsRoot
receiptsRoot
number
gasUsed
gasLimit
timestamp
difficulty
totalDifficulty
size
}
}

View File

@ -1,5 +1,5 @@
query claim($id: String!, $blockHash: String!){
claim(id: $id, blockHash: $blockHash){
query claim($id: String!, $blockHash: Bytes!){
claim(id: $id, block: { hash: $blockHash }){
id
timestamp
index
@ -7,28 +7,6 @@ query claim($id: String!, $blockHash: String!){
id
totalClaimed
totalSlashed
claims{
id
timestamp
index
account{
id
totalClaimed
totalSlashed
slashes{
id
timestamp
account{
id
totalClaimed
totalSlashed
}
slashed
}
}
totalEarned
claimed
}
}
totalEarned
claimed

View File

@ -1,18 +1,8 @@
query distribution($id: String!, $blockHash: String!){
distribution(id: $id, blockHash: $blockHash){
query distribution($id: String!, $blockHash: Bytes!){
distribution(id: $id, block: { hash: $blockHash }){
id
distributor{
id
currentDistribution{
id
distributor{
id
}
timestamp
distributionNumber
merkleRoot
metadataURI
}
}
timestamp
distributionNumber

View File

@ -1,11 +1,8 @@
query distributor($id: String!, $blockHash: String!){
distributor(id: $id, blockHash: $blockHash){
query distributor($id: String!, $blockHash: Bytes!){
distributor(id: $id, block: { hash: $blockHash }){
id
currentDistribution{
id
distributor{
id
}
timestamp
distributionNumber
merkleRoot

View File

@ -1,21 +1,43 @@
query epoch($id: String!, $blockHash: String!){
epoch(id: $id, blockHash: $blockHash){
query epoch($id: String!, $blockHash: Bytes!){
epoch(id: $id, block: { hash: $blockHash }){
id
finalized
epochNumber
startBlock{
cid
id
fromActiveProducer
hash
number
timestamp
parentHash
unclesHash
author
stateRoot
transactionsRoot
receiptsRoot
number
gasUsed
gasLimit
timestamp
difficulty
totalDifficulty
size
}
endBlock{
cid
id
fromActiveProducer
hash
number
timestamp
parentHash
unclesHash
author
stateRoot
transactionsRoot
receiptsRoot
number
gasUsed
gasLimit
timestamp
difficulty
totalDifficulty
size
}
producerBlocks
allBlocks
@ -23,14 +45,6 @@ query epoch($id: String!, $blockHash: String!){
producerRewards{
id
address
epoch{
id
finalized
epochNumber
producerBlocks
allBlocks
producerBlocksRatio
}
totalRewards
blocksProduced
blocksProducedRatio

View File

@ -1,5 +1,5 @@
query network($id: String!, $blockHash: String!){
network(id: $id, blockHash: $blockHash){
query network($id: String!, $blockHash: Bytes!){
network(id: $id, block: { hash: $blockHash }){
id
slot0{
id
@ -10,34 +10,6 @@ query network($id: String!, $blockHash: String!){
startTime
expirationTime
taxRatePerDay
claims{
id
slot{
id
owner
delegate
winningBid
oldBid
startTime
expirationTime
taxRatePerDay
claims{
id
owner
winningBid
oldBid
startTime
expirationTime
taxRatePerDay
}
}
owner
winningBid
oldBid
startTime
expirationTime
taxRatePerDay
}
}
slot1{
id
@ -48,15 +20,6 @@ query network($id: String!, $blockHash: String!){
startTime
expirationTime
taxRatePerDay
claims{
id
owner
winningBid
oldBid
startTime
expirationTime
taxRatePerDay
}
}
slot2{
id
@ -67,15 +30,6 @@ query network($id: String!, $blockHash: String!){
startTime
expirationTime
taxRatePerDay
claims{
id
owner
winningBid
oldBid
startTime
expirationTime
taxRatePerDay
}
}
stakers{
id

View File

@ -1,5 +1,5 @@
query producer($id: String!, $blockHash: String!){
producer(id: $id, blockHash: $blockHash){
query producer($id: String!, $blockHash: Bytes!){
producer(id: $id, block: { hash: $blockHash }){
id
active
rewardCollector

View File

@ -1,43 +1,14 @@
query producerEpoch($id: String!, $blockHash: String!){
producerEpoch(id: $id, blockHash: $blockHash){
query producerEpoch($id: String!, $blockHash: Bytes!){
producerEpoch(id: $id, block: { hash: $blockHash }){
id
address
epoch{
id
finalized
epochNumber
startBlock{
cid
hash
number
timestamp
parentHash
}
endBlock{
cid
hash
number
timestamp
parentHash
}
producerBlocks
allBlocks
producerBlocksRatio
producerRewards{
id
address
epoch{
id
finalized
epochNumber
producerBlocks
allBlocks
producerBlocksRatio
}
totalRewards
blocksProduced
blocksProducedRatio
}
}
totalRewards
blocksProduced

View File

@ -1,5 +1,5 @@
query producerRewardCollectorChange($id: String!, $blockHash: String!){
producerRewardCollectorChange(id: $id, blockHash: $blockHash){
query producerRewardCollectorChange($id: String!, $blockHash: Bytes!){
producerRewardCollectorChange(id: $id, block: { hash: $blockHash }){
id
blockNumber
producer

View File

@ -1,5 +1,5 @@
query producerSet($id: String!, $blockHash: String!){
producerSet(id: $id, blockHash: $blockHash){
query producerSet($id: String!, $blockHash: Bytes!){
producerSet(id: $id, block: { hash: $blockHash }){
id
producers{
id

View File

@ -1,5 +1,5 @@
query producerSetChange($id: String!, $blockHash: String!){
producerSetChange(id: $id, blockHash: $blockHash){
query producerSetChange($id: String!, $blockHash: Bytes!){
producerSetChange(id: $id, block: { hash: $blockHash }){
id
blockNumber
producer

View File

@ -1,5 +1,5 @@
query rewardSchedule($id: String!, $blockHash: String!){
rewardSchedule(id: $id, blockHash: $blockHash){
query rewardSchedule($id: String!, $blockHash: Bytes!){
rewardSchedule(id: $id, block: { hash: $blockHash }){
id
rewardScheduleEntries{
id
@ -11,88 +11,17 @@ query rewardSchedule($id: String!, $blockHash: String!){
id
finalized
epochNumber
startBlock{
cid
hash
number
timestamp
parentHash
}
endBlock{
cid
hash
number
timestamp
parentHash
}
producerBlocks
allBlocks
producerBlocksRatio
producerRewards{
id
address
epoch{
id
finalized
epochNumber
startBlock{
cid
hash
number
timestamp
parentHash
}
endBlock{
cid
hash
number
timestamp
parentHash
}
producerBlocks
allBlocks
producerBlocksRatio
producerRewards{
id
address
totalRewards
blocksProduced
blocksProducedRatio
}
}
totalRewards
blocksProduced
blocksProducedRatio
}
}
pendingEpoch{
id
finalized
epochNumber
startBlock{
cid
hash
number
timestamp
parentHash
}
endBlock{
cid
hash
number
timestamp
parentHash
}
producerBlocks
allBlocks
producerBlocksRatio
producerRewards{
id
address
totalRewards
blocksProduced
blocksProducedRatio
}
}
activeRewardScheduleEntry{
id

View File

@ -1,5 +1,5 @@
query rewardScheduleEntry($id: String!, $blockHash: String!){
rewardScheduleEntry(id: $id, blockHash: $blockHash){
query rewardScheduleEntry($id: String!, $blockHash: Bytes!){
rewardScheduleEntry(id: $id, block: { hash: $blockHash }){
id
startTime
epochDuration

View File

@ -1,33 +1,11 @@
query slash($id: String!, $blockHash: String!){
slash(id: $id, blockHash: $blockHash){
query slash($id: String!, $blockHash: Bytes!){
slash(id: $id, block: { hash: $blockHash }){
id
timestamp
account{
id
totalClaimed
totalSlashed
claims{
id
timestamp
index
account{
id
totalClaimed
totalSlashed
slashes{
id
timestamp
account{
id
totalClaimed
totalSlashed
}
slashed
}
}
totalEarned
claimed
}
}
slashed
}

View File

@ -1,5 +1,5 @@
query slot($id: String!, $blockHash: String!){
slot(id: $id, blockHash: $blockHash){
query slot($id: String!, $blockHash: Bytes!){
slot(id: $id, block: { hash: $blockHash }){
id
owner
delegate
@ -10,16 +10,6 @@ query slot($id: String!, $blockHash: String!){
taxRatePerDay
claims{
id
slot{
id
owner
delegate
winningBid
oldBid
startTime
expirationTime
taxRatePerDay
}
owner
winningBid
oldBid

View File

@ -1,5 +1,5 @@
query slotClaim($id: String!, $blockHash: String!){
slotClaim(id: $id, blockHash: $blockHash){
query slotClaim($id: String!, $blockHash: Bytes!){
slotClaim(id: $id, block: { hash: $blockHash }){
id
slot{
id
@ -10,25 +10,6 @@ query slotClaim($id: String!, $blockHash: String!){
startTime
expirationTime
taxRatePerDay
claims{
id
slot{
id
owner
delegate
winningBid
oldBid
startTime
expirationTime
taxRatePerDay
}
owner
winningBid
oldBid
startTime
expirationTime
taxRatePerDay
}
}
owner
winningBid

View File

@ -1,5 +1,5 @@
query staker($id: String!, $blockHash: String!){
staker(id: $id, blockHash: $blockHash){
query staker($id: String!, $blockHash: Bytes!){
staker(id: $id, block: { hash: $blockHash }){
id
staked
rank

View File

@ -547,10 +547,10 @@ export class Indexer implements IndexerInterface {
return (ipfsAddr !== undefined && ipfsAddr !== null && ipfsAddr !== '');
}
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, blockHash: string): Promise<any> {
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, blockHash?: string): Promise<any> {
const relations = this._relationsMap.get(entity) || {};
const data = await this._graphWatcher.getEntity(entity, id, blockHash, relations);
const data = await this._graphWatcher.getEntity(entity, id, relations, blockHash);
return data;
}

View File

@ -6,6 +6,8 @@ import assert from 'assert';
import BigInt from 'apollo-type-bigint';
import debug from 'debug';
import { BlockHeight } from '@vulcanize/util';
import { Indexer } from './indexer';
import { EventWatcher } from './events';
@ -60,112 +62,112 @@ export const createResolvers = async (indexer: Indexer, eventWatcher: EventWatch
},
Query: {
producer: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('producer', id, blockHash);
producer: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('producer', id, block);
return indexer.getSubgraphEntity(Producer, id, blockHash);
return indexer.getSubgraphEntity(Producer, id, block.hash);
},
producerSet: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('producerSet', id, blockHash);
producerSet: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('producerSet', id, block);
return indexer.getSubgraphEntity(ProducerSet, id, blockHash);
return indexer.getSubgraphEntity(ProducerSet, id, block.hash);
},
producerSetChange: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('producerSetChange', id, blockHash);
producerSetChange: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('producerSetChange', id, block);
return indexer.getSubgraphEntity(ProducerSetChange, id, blockHash);
return indexer.getSubgraphEntity(ProducerSetChange, id, block.hash);
},
producerRewardCollectorChange: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('producerRewardCollectorChange', id, blockHash);
producerRewardCollectorChange: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('producerRewardCollectorChange', id, block);
return indexer.getSubgraphEntity(ProducerRewardCollectorChange, id, blockHash);
return indexer.getSubgraphEntity(ProducerRewardCollectorChange, id, block.hash);
},
rewardScheduleEntry: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('rewardScheduleEntry', id, blockHash);
rewardScheduleEntry: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('rewardScheduleEntry', id, block);
return indexer.getSubgraphEntity(RewardScheduleEntry, id, blockHash);
return indexer.getSubgraphEntity(RewardScheduleEntry, id, block.hash);
},
rewardSchedule: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('rewardSchedule', id, blockHash);
rewardSchedule: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('rewardSchedule', id, block);
return indexer.getSubgraphEntity(RewardSchedule, id, blockHash);
return indexer.getSubgraphEntity(RewardSchedule, id, block.hash);
},
producerEpoch: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('producerEpoch', id, blockHash);
producerEpoch: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('producerEpoch', id, block);
return indexer.getSubgraphEntity(ProducerEpoch, id, blockHash);
return indexer.getSubgraphEntity(ProducerEpoch, id, block.hash);
},
block: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('block', id, blockHash);
block: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('block', id, block);
return indexer.getSubgraphEntity(Block, id, blockHash);
return indexer.getSubgraphEntity(Block, id, block.hash);
},
epoch: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('epoch', id, blockHash);
epoch: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('epoch', id, block);
return indexer.getSubgraphEntity(Epoch, id, blockHash);
return indexer.getSubgraphEntity(Epoch, id, block.hash);
},
slotClaim: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('slotClaim', id, blockHash);
slotClaim: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('slotClaim', id, block);
return indexer.getSubgraphEntity(SlotClaim, id, blockHash);
return indexer.getSubgraphEntity(SlotClaim, id, block.hash);
},
slot: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('slot', id, blockHash);
slot: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('slot', id, block);
return indexer.getSubgraphEntity(Slot, id, blockHash);
return indexer.getSubgraphEntity(Slot, id, block.hash);
},
staker: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('staker', id, blockHash);
staker: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('staker', id, block);
return indexer.getSubgraphEntity(Staker, id, blockHash);
return indexer.getSubgraphEntity(Staker, id, block.hash);
},
network: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('network', id, blockHash);
network: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('network', id, block);
return indexer.getSubgraphEntity(Network, id, blockHash);
return indexer.getSubgraphEntity(Network, id, block.hash);
},
distributor: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('distributor', id, blockHash);
distributor: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('distributor', id, block);
return indexer.getSubgraphEntity(Distributor, id, blockHash);
return indexer.getSubgraphEntity(Distributor, id, block.hash);
},
distribution: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('distribution', id, blockHash);
distribution: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('distribution', id, block);
return indexer.getSubgraphEntity(Distribution, id, blockHash);
return indexer.getSubgraphEntity(Distribution, id, block.hash);
},
claim: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('claim', id, blockHash);
claim: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('claim', id, block);
return indexer.getSubgraphEntity(Claim, id, blockHash);
return indexer.getSubgraphEntity(Claim, id, block.hash);
},
slash: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('slash', id, blockHash);
slash: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('slash', id, block);
return indexer.getSubgraphEntity(Slash, id, blockHash);
return indexer.getSubgraphEntity(Slash, id, block.hash);
},
account: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
log('account', id, blockHash);
account: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
log('account', id, block);
return indexer.getSubgraphEntity(Account, id, blockHash);
return indexer.getSubgraphEntity(Account, id, block.hash);
},
events: async (_: any, { blockHash, contractAddress, name }: { blockHash: string, contractAddress: string, name?: string }) => {

View File

@ -4,6 +4,11 @@ scalar Bytes
scalar BigDecimal
input Block_height {
hash: Bytes
number: Int
}
type Proof {
data: String!
}
@ -218,24 +223,24 @@ type ResultIPLDBlock {
type Query {
events(blockHash: String!, contractAddress: String!, name: String): [ResultEvent!]
eventsInRange(fromBlockNumber: Int!, toBlockNumber: Int!): [ResultEvent!]
producer(id: String!, blockHash: String!): Producer!
producerSet(id: String!, blockHash: String!): ProducerSet!
producerSetChange(id: String!, blockHash: String!): ProducerSetChange!
producerRewardCollectorChange(id: String!, blockHash: String!): ProducerRewardCollectorChange!
rewardScheduleEntry(id: String!, blockHash: String!): RewardScheduleEntry!
rewardSchedule(id: String!, blockHash: String!): RewardSchedule!
producerEpoch(id: String!, blockHash: String!): ProducerEpoch!
block(id: String!, blockHash: String!): Block!
epoch(id: String!, blockHash: String!): Epoch!
slotClaim(id: String!, blockHash: String!): SlotClaim!
slot(id: String!, blockHash: String!): Slot!
staker(id: String!, blockHash: String!): Staker!
network(id: String!, blockHash: String!): Network!
distributor(id: String!, blockHash: String!): Distributor!
distribution(id: String!, blockHash: String!): Distribution!
claim(id: String!, blockHash: String!): Claim!
slash(id: String!, blockHash: String!): Slash!
account(id: String!, blockHash: String!): Account!
producer(id: String!, block: Block_height): Producer!
producerSet(id: String!, block: Block_height): ProducerSet!
producerSetChange(id: String!, block: Block_height): ProducerSetChange!
producerRewardCollectorChange(id: String!, block: Block_height): ProducerRewardCollectorChange!
rewardScheduleEntry(id: String!, block: Block_height): RewardScheduleEntry!
rewardSchedule(id: String!, block: Block_height): RewardSchedule!
producerEpoch(id: String!, block: Block_height): ProducerEpoch!
block(id: String!, block: Block_height): Block!
epoch(id: String!, block: Block_height): Epoch!
slotClaim(id: String!, block: Block_height): SlotClaim!
slot(id: String!, block: Block_height): Slot!
staker(id: String!, block: Block_height): Staker!
network(id: String!, block: Block_height): Network!
distributor(id: String!, block: Block_height): Distributor!
distribution(id: String!, block: Block_height): Distribution!
claim(id: String!, block: Block_height): Claim!
slash(id: String!, block: Block_height): Slash!
account(id: String!, block: Block_height): Account!
getStateByCID(cid: String!): ResultIPLDBlock
getState(blockHash: String!, contractAddress: String!, kind: String): ResultIPLDBlock
}

View File

@ -36,3 +36,45 @@
6. Run `yarn build:example` to build the wasm files.
7. Run `yarn test`.
## Run
* Compare query results from two different GQL endpoints:
* In a config file (sample: `environments/compare-cli-config.toml`):
* Specify the two GQL endpoints in the endpoints config.
* Specify the query directory in queries config or pass as an arg. to the CLI.
* Example:
```
[endpoints]
gqlEndpoint1 = "http://localhost:8000/subgraphs/name/example1"
gqlEndpoint2 = "http://localhost:3008/graphql"
[queries]
queryDir = "../graph-test-watcher/src/gql/queries"
```
* Fire a query and get the diff of the results from the two GQL endpoints:
```bash
yarn compare-entity --config-file <config-file-path> --query-dir [query-dir] --query-name <query-name> --block-hash <block-hash> --entity-id <entity-id> --raw-json [true | false]
```
* `config-file`(alias: `cf`): Configuration file path (toml) (required).
* `query-dir`(alias: `qf`): Path to queries directory (defualt: taken from the config file).
* `query-name`(alias: `q`): Query to be fired (required).
* `block-hash`(alias: `b`): Block hash (required).
* `entity-id`(alias: `i`): Entity Id (required).
* `raw-json`(alias: `j`): Whether to print out a raw diff object (default: `false`).
Example:
```bash
yarn compare-entity --config-file environments/compare-cli-config.toml --query-name exampleEntity --block-hash 0xceed7ee9d3de97c99db12e42433cae9115bb311c516558539fb7114fa17d545b --entity-id 0x2886bae64814bd959aec4282f86f3a97bf1e16e4111b39fd7bdd592b516c66c6
```
* The program will exit with code `1` if the query results are not equal.

View File

@ -0,0 +1,6 @@
[endpoints]
gqlEndpoint1 = "http://localhost:8000/subgraphs/name/example1"
gqlEndpoint2 = "http://localhost:3008/graphql"
[queries]
queryDir = "../graph-test-watcher/src/gql/queries"

View File

@ -32,13 +32,25 @@
"asbuild": "yarn asbuild:debug && yarn asbuild:release",
"test": "yarn asbuild:debug && mocha src/**/*.test.ts",
"build:example": "cd test/subgraph/example1 && yarn && yarn build",
"watch": "DEBUG=vulcanize:* nodemon --watch src src/watcher.ts"
"watch": "DEBUG=vulcanize:* nodemon --watch src src/watcher.ts",
"compare-entity": "DEBUG=vulcanize:* ts-node src/cli/compare/compare-entity.ts"
},
"dependencies": {
"@apollo/client": "^3.3.19",
"@types/json-diff": "^0.5.2",
"@vulcanize/assemblyscript": "0.0.1",
"@vulcanize/ipld-eth-client": "^0.1.0",
"@vulcanize/util": "^0.1.0",
"debug": "^4.3.1",
"decimal.js": "^10.3.1",
"fs-extra": "^10.0.0",
"js-yaml": "^4.1.0",
"json-bigint": "^1.0.0",
"json-diff": "^0.5.4",
"lodash": "^4.17.21",
"reflect-metadata": "^0.1.13",
"toml": "^3.0.0",
"typeorm": "^0.2.32",
"decimal.js": "^10.3.1"
"yargs": "^17.0.1"
}
}

View File

@ -0,0 +1,38 @@
//
// Copyright 2021 Vulcanize, Inc.
//
import assert from 'assert';
import fs from 'fs';
import path from 'path';
import { gql } from '@apollo/client/core';
import { GraphQLClient, GraphQLConfig } from '@vulcanize/ipld-eth-client';
export class Client {
_config: GraphQLConfig;
_graphqlClient: GraphQLClient;
_queryDir: string;
constructor (config: GraphQLConfig, queryDir: string) {
this._config = config;
this._queryDir = path.resolve(process.cwd(), queryDir);
const { gqlEndpoint } = config;
assert(gqlEndpoint, 'Missing gql endpoint');
this._graphqlClient = new GraphQLClient(config);
}
async getEntity ({ blockHash, queryName, id }: { blockHash: string, queryName: string, id: string }): Promise<any> {
const entityQuery = fs.readFileSync(path.resolve(this._queryDir, `${queryName}.gql`), 'utf8');
return this._graphqlClient.query(
gql(entityQuery),
{
id,
blockHash
}
);
}
}

View File

@ -0,0 +1,150 @@
//
// Copyright 2021 Vulcanize, Inc.
//
import yargs from 'yargs';
import 'reflect-metadata';
import path from 'path';
import toml from 'toml';
import fs from 'fs-extra';
import assert from 'assert';
import util from 'util';
import { diffString, diff } from 'json-diff';
import { Client } from './client';
interface EndpointConfig {
gqlEndpoint1: string;
gqlEndpoint2: string;
}
interface QueryConfig {
queryDir: string;
}
interface Config {
endpoints: EndpointConfig;
queries: QueryConfig;
}
const main = async (): Promise<void> => {
const argv = await yargs.parserConfiguration({
'parse-numbers': false
}).options({
configFile: {
alias: 'cf',
type: 'string',
demandOption: true,
describe: 'Configuration file path (toml)'
},
queryDir: {
alias: 'qf',
type: 'string',
describe: 'Path to queries directory'
},
blockHash: {
alias: 'b',
type: 'string',
demandOption: true,
describe: 'Blockhash'
},
queryName: {
alias: 'q',
type: 'string',
demandOption: true,
describe: 'Query name'
},
entityId: {
alias: 'i',
type: 'string',
demandOption: true,
describe: 'Id of the entity to be queried'
},
rawJson: {
alias: 'j',
type: 'boolean',
describe: 'Whether to print out raw diff object',
default: false
}
}).argv;
const config: Config = await getConfig(argv.configFile);
const { client1, client2 } = await getClients(config, argv.queryDir);
const queryName = argv.queryName;
const id = argv.entityId;
const blockHash = argv.blockHash;
const result1 = await client1.getEntity({ blockHash, queryName, id });
const result2 = await client2.getEntity({ blockHash, queryName, id });
// Getting the diff of two result objects.
let resultDiff;
if (argv.rawJson) {
resultDiff = diff(result1, result2);
if (resultDiff) {
// Use util.inspect to extend depth limit in the output.
resultDiff = util.inspect(diff(result1, result2), false, null);
}
} else {
resultDiff = diffString(result1, result2);
}
if (resultDiff) {
console.log(resultDiff);
process.exit(1);
}
};
async function getConfig (configFile: string): Promise<Config> {
const configFilePath = path.resolve(configFile);
const fileExists = await fs.pathExists(configFilePath);
if (!fileExists) {
throw new Error(`Config file not found: ${configFilePath}`);
}
const config = toml.parse(await fs.readFile(configFilePath, 'utf8'));
return config;
}
async function getClients (config: Config, queryDir?: string): Promise<{
client1: Client,
client2: Client
}> {
assert(config.endpoints, 'Missing endpoints config');
const gqlEndpoint1 = config.endpoints.gqlEndpoint1;
const gqlEndpoint2 = config.endpoints.gqlEndpoint2;
assert(gqlEndpoint1, 'Missing endpoint one');
assert(gqlEndpoint2, 'Missing endpoint two');
if (!queryDir) {
assert(config.queries, 'Missing queries config');
queryDir = config.queries.queryDir;
}
assert(queryDir, 'Query directory not provided');
const client1 = new Client({
gqlEndpoint: gqlEndpoint1
}, queryDir);
const client2 = new Client({
gqlEndpoint: gqlEndpoint2
}, queryDir);
return {
client1,
client2
};
}
main().catch(err => {
console.log(err);
}).finally(() => {
process.exit(0);
});

View File

@ -40,7 +40,8 @@ export class Database {
return this._baseDatabase.close();
}
async getEntity<Entity> (entity: (new () => Entity) | string, id: string, blockHash: string): Promise<Entity | undefined> {
async getEntity<Entity> (entity: (new () => Entity) | string, id: string, blockHash?: string): Promise<Entity | undefined> {
// TODO: Take block number as an optional argument
const queryRunner = this._conn.createQueryRunner();
try {
@ -73,33 +74,36 @@ export class Database {
}
}
async getEntityWithRelations<Entity> (entity: (new () => Entity) | string, id: string, blockHash: string, relations: { [key: string]: any }): Promise<Entity | undefined> {
async getEntityWithRelations<Entity> (entity: (new () => Entity) | string, id: string, relations: { [key: string]: any }, blockHash?: string): Promise<Entity | undefined> {
const queryRunner = this._conn.createQueryRunner();
try {
const repo = queryRunner.manager.getRepository(entity);
// Fetching blockHash for previous entity in frothy region.
const { blockHash: entityblockHash, blockNumber, id: frothyId } = await this._baseDatabase.getFrothyEntity(queryRunner, repo, { blockHash, id });
let selectQueryBuilder = repo.createQueryBuilder('entity');
if (frothyId) {
// If entity found in frothy region.
selectQueryBuilder = selectQueryBuilder.where('entity.block_hash = :entityblockHash', { entityblockHash });
} else {
// If entity not in frothy region.
const canonicalBlockNumber = blockNumber + 1;
selectQueryBuilder = selectQueryBuilder.where('entity.id = :id', { id })
.orderBy('entity.block_number', 'DESC')
.limit(1);
selectQueryBuilder = selectQueryBuilder.innerJoinAndSelect('block_progress', 'block', 'block.block_hash = entity.block_hash')
.where('block.is_pruned = false')
.andWhere('entity.block_number <= :canonicalBlockNumber', { canonicalBlockNumber })
.orderBy('entity.block_number', 'DESC')
.limit(1);
// Use blockHash if provided.
if (blockHash) {
// Fetching blockHash for previous entity in frothy region.
const { blockHash: entityblockHash, blockNumber, id: frothyId } = await this._baseDatabase.getFrothyEntity(queryRunner, repo, { blockHash, id });
if (frothyId) {
// If entity found in frothy region.
selectQueryBuilder = selectQueryBuilder.andWhere('entity.block_hash = :entityblockHash', { entityblockHash });
} else {
// If entity not in frothy region.
const canonicalBlockNumber = blockNumber + 1;
selectQueryBuilder = selectQueryBuilder.innerJoinAndSelect('block_progress', 'block', 'block.block_hash = entity.block_hash')
.andWhere('block.is_pruned = false')
.andWhere('entity.block_number <= :canonicalBlockNumber', { canonicalBlockNumber });
}
}
selectQueryBuilder = selectQueryBuilder.andWhere('entity.id = :id', { id });
// TODO: Implement query for nested relations.
Object.entries(relations).forEach(([field, data], index) => {
const { entity: relatedEntity, isArray } = data;

View File

@ -177,9 +177,9 @@ export class GraphWatcher {
this._indexer = indexer;
}
async getEntity<Entity> (entity: new () => Entity, id: string, blockHash: string, relations: { [key: string]: any }): Promise<any> {
async getEntity<Entity> (entity: new () => Entity, id: string, relations: { [key: string]: any }, blockHash?: string): Promise<any> {
// Get entity from the database.
const result = await this._database.getEntityWithRelations(entity, id, blockHash, relations) as any;
const result = await this._database.getEntityWithRelations(entity, id, relations, blockHash) as any;
// Resolve any field name conflicts in the entity result.
return resolveEntityFieldConflicts(result);

View File

@ -1,8 +1,21 @@
query exampleEntity($id: String!, $blockHash: String!){
exampleEntity(id: $id, blockHash: $blockHash){
query exampleEntity($id: String!, $blockHash: Bytes!){
exampleEntity(id: $id, block: { hash: $blockHash }){
id
count
param1
param2
paramString
paramInt
paramBoolean
paramBytes
paramEnum
paramBigDecimal
related {
id
paramBigInt
bigIntArray
}
manyRelated {
id
count
}
}
}
}

View File

@ -0,0 +1,6 @@
query manyRelatedEntity($id: String!, $blockHash: Bytes!){
manyRelatedEntity(id: $id, block: { hash: $blockHash }){
id
count
}
}

View File

@ -0,0 +1,7 @@
query relatedEntity($id: String!, $blockHash: Bytes!){
relatedEntity(id: $id, block: { hash: $blockHash }){
id
paramBigInt
bigIntArray
}
}

View File

@ -544,10 +544,10 @@ export class Indexer implements IndexerInterface {
return (ipfsAddr !== undefined && ipfsAddr !== null && ipfsAddr !== '');
}
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, blockHash: string): Promise<Entity | undefined> {
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, blockHash?: string): Promise<Entity | undefined> {
const relations = this._relationsMap.get(entity) || {};
const data = await this._graphWatcher.getEntity(entity, id, blockHash, relations);
const data = await this._graphWatcher.getEntity(entity, id, relations, blockHash);
return data;
}

View File

@ -6,7 +6,7 @@ import assert from 'assert';
import BigInt from 'apollo-type-bigint';
import debug from 'debug';
import { ValueResult } from '@vulcanize/util';
import { ValueResult, BlockHeight } from '@vulcanize/util';
import { Indexer } from './indexer';
import { EventWatcher } from './events';
@ -57,22 +57,22 @@ export const createResolvers = async (indexer: Indexer, eventWatcher: EventWatch
return indexer._test(blockHash, contractAddress);
},
exampleEntity: async (_: any, { id, blockHash }: { id: string, blockHash: string }): Promise<ExampleEntity | undefined> => {
log('exampleEntity', id, blockHash);
relatedEntity: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }): Promise<RelatedEntity | undefined> => {
log('relatedEntity', id, block);
return indexer.getSubgraphEntity(ExampleEntity, id, blockHash);
return indexer.getSubgraphEntity(RelatedEntity, id, block.hash);
},
relatedEntity: async (_: any, { id, blockHash }: { id: string, blockHash: string }): Promise<RelatedEntity | undefined> => {
log('relatedEntity', id, blockHash);
manyRelatedEntity: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }): Promise<ManyRelatedEntity | undefined> => {
log('relatedEntity', id, block);
return indexer.getSubgraphEntity(RelatedEntity, id, blockHash);
return indexer.getSubgraphEntity(ManyRelatedEntity, id, block.hash);
},
manyRelatedEntity: async (_: any, { id, blockHash }: { id: string, blockHash: string }): Promise<ManyRelatedEntity | undefined> => {
log('relatedEntity', id, blockHash);
exampleEntity: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }): Promise<ExampleEntity | undefined> => {
log('exampleEntity', id, block);
return indexer.getSubgraphEntity(ManyRelatedEntity, id, blockHash);
return indexer.getSubgraphEntity(ExampleEntity, id, block.hash);
},
events: async (_: any, { blockHash, contractAddress, name }: { blockHash: string, contractAddress: string, name?: string }) => {

View File

@ -4,6 +4,11 @@ scalar Bytes
scalar BigDecimal
input Block_height {
hash: Bytes
number: Int
}
type Proof {
data: String!
}
@ -72,9 +77,9 @@ type Query {
eventsInRange(fromBlockNumber: Int!, toBlockNumber: Int!): [ResultEvent!]
getMethod(blockHash: String!, contractAddress: String!): ResultString!
_test(blockHash: String!, contractAddress: String!): ResultBigInt!
relatedEntity(id: String!, blockHash: String!): RelatedEntity!
exampleEntity(id: String!, blockHash: String!): ExampleEntity!
manyRelatedEntity(id: String!, blockHash: String!): ManyRelatedEntity!
relatedEntity(id: String!, block: Block_height): RelatedEntity!
exampleEntity(id: String!, block: Block_height): ExampleEntity!
manyRelatedEntity(id: String!, block: Block_height): ManyRelatedEntity!
getStateByCID(cid: String!): ResultIPLDBlock
getState(blockHash: String!, contractAddress: String!, kind: String): ResultIPLDBlock
}

View File

@ -2458,6 +2458,11 @@
resolved "https://registry.yarnpkg.com/@types/json-bigint/-/json-bigint-1.0.0.tgz#7a4726540cc6fe47cfa54b9b3022b89cf7fe1517"
integrity sha512-WW+0cfH3ovFN6ROV+p/Xfw36dT6s16hbXBYIG49PYw6+j6e+AkpqYccctgxwyicBmC8CZDBnPhOH94shFhXgHQ==
"@types/json-diff@^0.5.2":
version "0.5.2"
resolved "https://registry.yarnpkg.com/@types/json-diff/-/json-diff-0.5.2.tgz#83689a504b3c7759f046d125d5521d6760ab9d0d"
integrity sha512-2oqXStJYYLDHCciNAClY277Ti3kXT+JLvPD7lLm/490i+B7g0GR6M4qiW+bd2V5vpB+yMKY8IelbsHMAYX1D0A==
"@types/json-schema@^7.0.3":
version "7.0.7"
resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.7.tgz#98a993516c859eb0d5c4c8f098317a9ea68db9ad"
@ -4852,6 +4857,13 @@ cli-boxes@^2.2.0:
resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f"
integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==
cli-color@~0.1.6:
version "0.1.7"
resolved "https://registry.yarnpkg.com/cli-color/-/cli-color-0.1.7.tgz#adc3200fa471cc211b0da7f566b71e98b9d67347"
integrity sha1-rcMgD6RxzCEbDaf1ZrcemLnWc0c=
dependencies:
es5-ext "0.8.x"
cli-cursor@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307"
@ -5770,6 +5782,13 @@ diffie-hellman@^5.0.0:
miller-rabin "^4.0.0"
randombytes "^2.0.0"
difflib@~0.2.1:
version "0.2.4"
resolved "https://registry.yarnpkg.com/difflib/-/difflib-0.2.4.tgz#b5e30361a6db023176d562892db85940a718f47e"
integrity sha1-teMDYabbAjF21WKJLbhZQKcY9H4=
dependencies:
heap ">= 0.2.0"
dir-glob@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f"
@ -5836,6 +5855,13 @@ dotignore@~0.1.2:
dependencies:
minimatch "^3.0.4"
dreamopt@~0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/dreamopt/-/dreamopt-0.6.0.tgz#d813ccdac8d39d8ad526775514a13dda664d6b4b"
integrity sha1-2BPM2sjTnYrVJndVFKE92mZNa0s=
dependencies:
wordwrap ">=0.0.2"
dtrace-provider@~0.8:
version "0.8.8"
resolved "https://registry.yarnpkg.com/dtrace-provider/-/dtrace-provider-0.8.8.tgz#2996d5490c37e1347be263b423ed7b297fb0d97e"
@ -6035,6 +6061,11 @@ es-to-primitive@^1.2.1:
is-date-object "^1.0.1"
is-symbol "^1.0.2"
es5-ext@0.8.x:
version "0.8.2"
resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.8.2.tgz#aba8d9e1943a895ac96837a62a39b3f55ecd94ab"
integrity sha1-q6jZ4ZQ6iVrJaDemKjmz9V7NlKs=
es5-ext@^0.10.35, es5-ext@^0.10.50:
version "0.10.53"
resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1"
@ -8056,7 +8087,7 @@ he@1.2.0:
resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
heap@0.2.6:
heap@0.2.6, "heap@>= 0.2.0":
version "0.2.6"
resolved "https://registry.yarnpkg.com/heap/-/heap-0.2.6.tgz#087e1f10b046932fc8594dd9e6d378afc9d1e5ac"
integrity sha1-CH4fELBGky/IWU3Z5tN4r8nR5aw=
@ -9078,6 +9109,15 @@ json-buffer@3.0.0:
resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898"
integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=
json-diff@^0.5.4:
version "0.5.4"
resolved "https://registry.yarnpkg.com/json-diff/-/json-diff-0.5.4.tgz#7bc8198c441756632aab66c7d9189d365a7a035a"
integrity sha512-q5Xmx9QXNOzOzIlMoYtLrLiu4Jl/Ce2bn0CNcv54PhyH89CI4GWlGVDye8ei2Ijt9R3U+vsWPsXpLUNob8bs8Q==
dependencies:
cli-color "~0.1.6"
difflib "~0.2.1"
dreamopt "~0.6.0"
json-parse-better-errors@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9"
@ -14797,7 +14837,7 @@ word-wrap@^1.2.3:
resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==
wordwrap@^1.0.0:
wordwrap@>=0.0.2, wordwrap@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb"
integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=