mirror of
https://github.com/cerc-io/watcher-ts
synced 2025-08-03 12:44:08 +00:00
Add a CLI to compare entity query results from two GQL endpoints (#57)
* Add CLI to compare entities from two GQL endpoints * Print out result diffs in compare-entity CLI * Get the colorized result diff in compare-entity CLI * Read query dir from config file or as an arg * Make config file arg required * Make queries in Example schema similar to that in graph-node * Get non-colorized output on choosing raw-json diff option * Make queries in eden-watcher similar to that in graph-node
This commit is contained in:
parent
31b302c9b5
commit
d979e51723
@ -1,5 +1,5 @@
|
|||||||
query account($id: String!, $blockHash: String!){
|
query account($id: String!, $blockHash: Bytes!){
|
||||||
account(id: $id, blockHash: $blockHash){
|
account(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
totalClaimed
|
totalClaimed
|
||||||
totalSlashed
|
totalSlashed
|
||||||
@ -7,23 +7,13 @@ query account($id: String!, $blockHash: String!){
|
|||||||
id
|
id
|
||||||
timestamp
|
timestamp
|
||||||
index
|
index
|
||||||
account{
|
|
||||||
id
|
|
||||||
totalClaimed
|
|
||||||
totalSlashed
|
|
||||||
slashes{
|
|
||||||
id
|
|
||||||
timestamp
|
|
||||||
account{
|
|
||||||
id
|
|
||||||
totalClaimed
|
|
||||||
totalSlashed
|
|
||||||
}
|
|
||||||
slashed
|
|
||||||
}
|
|
||||||
}
|
|
||||||
totalEarned
|
totalEarned
|
||||||
claimed
|
claimed
|
||||||
}
|
}
|
||||||
|
slashes{
|
||||||
|
id
|
||||||
|
timestamp
|
||||||
|
slashed
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
20
packages/eden-watcher/src/gql/queries/block.gql
Normal file
20
packages/eden-watcher/src/gql/queries/block.gql
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
query block($id: String!, $blockHash: Bytes!){
|
||||||
|
block(id: $id, block: { hash: $blockHash }){
|
||||||
|
id
|
||||||
|
fromActiveProducer
|
||||||
|
hash
|
||||||
|
parentHash
|
||||||
|
unclesHash
|
||||||
|
author
|
||||||
|
stateRoot
|
||||||
|
transactionsRoot
|
||||||
|
receiptsRoot
|
||||||
|
number
|
||||||
|
gasUsed
|
||||||
|
gasLimit
|
||||||
|
timestamp
|
||||||
|
difficulty
|
||||||
|
totalDifficulty
|
||||||
|
size
|
||||||
|
}
|
||||||
|
}
|
@ -1,5 +1,5 @@
|
|||||||
query claim($id: String!, $blockHash: String!){
|
query claim($id: String!, $blockHash: Bytes!){
|
||||||
claim(id: $id, blockHash: $blockHash){
|
claim(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
timestamp
|
timestamp
|
||||||
index
|
index
|
||||||
@ -7,28 +7,6 @@ query claim($id: String!, $blockHash: String!){
|
|||||||
id
|
id
|
||||||
totalClaimed
|
totalClaimed
|
||||||
totalSlashed
|
totalSlashed
|
||||||
claims{
|
|
||||||
id
|
|
||||||
timestamp
|
|
||||||
index
|
|
||||||
account{
|
|
||||||
id
|
|
||||||
totalClaimed
|
|
||||||
totalSlashed
|
|
||||||
slashes{
|
|
||||||
id
|
|
||||||
timestamp
|
|
||||||
account{
|
|
||||||
id
|
|
||||||
totalClaimed
|
|
||||||
totalSlashed
|
|
||||||
}
|
|
||||||
slashed
|
|
||||||
}
|
|
||||||
}
|
|
||||||
totalEarned
|
|
||||||
claimed
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
totalEarned
|
totalEarned
|
||||||
claimed
|
claimed
|
||||||
|
@ -1,18 +1,8 @@
|
|||||||
query distribution($id: String!, $blockHash: String!){
|
query distribution($id: String!, $blockHash: Bytes!){
|
||||||
distribution(id: $id, blockHash: $blockHash){
|
distribution(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
distributor{
|
distributor{
|
||||||
id
|
id
|
||||||
currentDistribution{
|
|
||||||
id
|
|
||||||
distributor{
|
|
||||||
id
|
|
||||||
}
|
|
||||||
timestamp
|
|
||||||
distributionNumber
|
|
||||||
merkleRoot
|
|
||||||
metadataURI
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
timestamp
|
timestamp
|
||||||
distributionNumber
|
distributionNumber
|
||||||
|
@ -1,11 +1,8 @@
|
|||||||
query distributor($id: String!, $blockHash: String!){
|
query distributor($id: String!, $blockHash: Bytes!){
|
||||||
distributor(id: $id, blockHash: $blockHash){
|
distributor(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
currentDistribution{
|
currentDistribution{
|
||||||
id
|
id
|
||||||
distributor{
|
|
||||||
id
|
|
||||||
}
|
|
||||||
timestamp
|
timestamp
|
||||||
distributionNumber
|
distributionNumber
|
||||||
merkleRoot
|
merkleRoot
|
||||||
|
@ -1,21 +1,43 @@
|
|||||||
query epoch($id: String!, $blockHash: String!){
|
query epoch($id: String!, $blockHash: Bytes!){
|
||||||
epoch(id: $id, blockHash: $blockHash){
|
epoch(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
finalized
|
finalized
|
||||||
epochNumber
|
epochNumber
|
||||||
startBlock{
|
startBlock{
|
||||||
cid
|
id
|
||||||
|
fromActiveProducer
|
||||||
hash
|
hash
|
||||||
number
|
|
||||||
timestamp
|
|
||||||
parentHash
|
parentHash
|
||||||
|
unclesHash
|
||||||
|
author
|
||||||
|
stateRoot
|
||||||
|
transactionsRoot
|
||||||
|
receiptsRoot
|
||||||
|
number
|
||||||
|
gasUsed
|
||||||
|
gasLimit
|
||||||
|
timestamp
|
||||||
|
difficulty
|
||||||
|
totalDifficulty
|
||||||
|
size
|
||||||
}
|
}
|
||||||
endBlock{
|
endBlock{
|
||||||
cid
|
id
|
||||||
|
fromActiveProducer
|
||||||
hash
|
hash
|
||||||
number
|
|
||||||
timestamp
|
|
||||||
parentHash
|
parentHash
|
||||||
|
unclesHash
|
||||||
|
author
|
||||||
|
stateRoot
|
||||||
|
transactionsRoot
|
||||||
|
receiptsRoot
|
||||||
|
number
|
||||||
|
gasUsed
|
||||||
|
gasLimit
|
||||||
|
timestamp
|
||||||
|
difficulty
|
||||||
|
totalDifficulty
|
||||||
|
size
|
||||||
}
|
}
|
||||||
producerBlocks
|
producerBlocks
|
||||||
allBlocks
|
allBlocks
|
||||||
@ -23,14 +45,6 @@ query epoch($id: String!, $blockHash: String!){
|
|||||||
producerRewards{
|
producerRewards{
|
||||||
id
|
id
|
||||||
address
|
address
|
||||||
epoch{
|
|
||||||
id
|
|
||||||
finalized
|
|
||||||
epochNumber
|
|
||||||
producerBlocks
|
|
||||||
allBlocks
|
|
||||||
producerBlocksRatio
|
|
||||||
}
|
|
||||||
totalRewards
|
totalRewards
|
||||||
blocksProduced
|
blocksProduced
|
||||||
blocksProducedRatio
|
blocksProducedRatio
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
query network($id: String!, $blockHash: String!){
|
query network($id: String!, $blockHash: Bytes!){
|
||||||
network(id: $id, blockHash: $blockHash){
|
network(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
slot0{
|
slot0{
|
||||||
id
|
id
|
||||||
@ -10,34 +10,6 @@ query network($id: String!, $blockHash: String!){
|
|||||||
startTime
|
startTime
|
||||||
expirationTime
|
expirationTime
|
||||||
taxRatePerDay
|
taxRatePerDay
|
||||||
claims{
|
|
||||||
id
|
|
||||||
slot{
|
|
||||||
id
|
|
||||||
owner
|
|
||||||
delegate
|
|
||||||
winningBid
|
|
||||||
oldBid
|
|
||||||
startTime
|
|
||||||
expirationTime
|
|
||||||
taxRatePerDay
|
|
||||||
claims{
|
|
||||||
id
|
|
||||||
owner
|
|
||||||
winningBid
|
|
||||||
oldBid
|
|
||||||
startTime
|
|
||||||
expirationTime
|
|
||||||
taxRatePerDay
|
|
||||||
}
|
|
||||||
}
|
|
||||||
owner
|
|
||||||
winningBid
|
|
||||||
oldBid
|
|
||||||
startTime
|
|
||||||
expirationTime
|
|
||||||
taxRatePerDay
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
slot1{
|
slot1{
|
||||||
id
|
id
|
||||||
@ -48,15 +20,6 @@ query network($id: String!, $blockHash: String!){
|
|||||||
startTime
|
startTime
|
||||||
expirationTime
|
expirationTime
|
||||||
taxRatePerDay
|
taxRatePerDay
|
||||||
claims{
|
|
||||||
id
|
|
||||||
owner
|
|
||||||
winningBid
|
|
||||||
oldBid
|
|
||||||
startTime
|
|
||||||
expirationTime
|
|
||||||
taxRatePerDay
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
slot2{
|
slot2{
|
||||||
id
|
id
|
||||||
@ -67,15 +30,6 @@ query network($id: String!, $blockHash: String!){
|
|||||||
startTime
|
startTime
|
||||||
expirationTime
|
expirationTime
|
||||||
taxRatePerDay
|
taxRatePerDay
|
||||||
claims{
|
|
||||||
id
|
|
||||||
owner
|
|
||||||
winningBid
|
|
||||||
oldBid
|
|
||||||
startTime
|
|
||||||
expirationTime
|
|
||||||
taxRatePerDay
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
stakers{
|
stakers{
|
||||||
id
|
id
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
query producer($id: String!, $blockHash: String!){
|
query producer($id: String!, $blockHash: Bytes!){
|
||||||
producer(id: $id, blockHash: $blockHash){
|
producer(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
active
|
active
|
||||||
rewardCollector
|
rewardCollector
|
||||||
|
@ -1,43 +1,14 @@
|
|||||||
query producerEpoch($id: String!, $blockHash: String!){
|
query producerEpoch($id: String!, $blockHash: Bytes!){
|
||||||
producerEpoch(id: $id, blockHash: $blockHash){
|
producerEpoch(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
address
|
address
|
||||||
epoch{
|
epoch{
|
||||||
id
|
id
|
||||||
finalized
|
finalized
|
||||||
epochNumber
|
epochNumber
|
||||||
startBlock{
|
|
||||||
cid
|
|
||||||
hash
|
|
||||||
number
|
|
||||||
timestamp
|
|
||||||
parentHash
|
|
||||||
}
|
|
||||||
endBlock{
|
|
||||||
cid
|
|
||||||
hash
|
|
||||||
number
|
|
||||||
timestamp
|
|
||||||
parentHash
|
|
||||||
}
|
|
||||||
producerBlocks
|
producerBlocks
|
||||||
allBlocks
|
allBlocks
|
||||||
producerBlocksRatio
|
producerBlocksRatio
|
||||||
producerRewards{
|
|
||||||
id
|
|
||||||
address
|
|
||||||
epoch{
|
|
||||||
id
|
|
||||||
finalized
|
|
||||||
epochNumber
|
|
||||||
producerBlocks
|
|
||||||
allBlocks
|
|
||||||
producerBlocksRatio
|
|
||||||
}
|
|
||||||
totalRewards
|
|
||||||
blocksProduced
|
|
||||||
blocksProducedRatio
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
totalRewards
|
totalRewards
|
||||||
blocksProduced
|
blocksProduced
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
query producerRewardCollectorChange($id: String!, $blockHash: String!){
|
query producerRewardCollectorChange($id: String!, $blockHash: Bytes!){
|
||||||
producerRewardCollectorChange(id: $id, blockHash: $blockHash){
|
producerRewardCollectorChange(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
blockNumber
|
blockNumber
|
||||||
producer
|
producer
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
query producerSet($id: String!, $blockHash: String!){
|
query producerSet($id: String!, $blockHash: Bytes!){
|
||||||
producerSet(id: $id, blockHash: $blockHash){
|
producerSet(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
producers{
|
producers{
|
||||||
id
|
id
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
query producerSetChange($id: String!, $blockHash: String!){
|
query producerSetChange($id: String!, $blockHash: Bytes!){
|
||||||
producerSetChange(id: $id, blockHash: $blockHash){
|
producerSetChange(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
blockNumber
|
blockNumber
|
||||||
producer
|
producer
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
query rewardSchedule($id: String!, $blockHash: String!){
|
query rewardSchedule($id: String!, $blockHash: Bytes!){
|
||||||
rewardSchedule(id: $id, blockHash: $blockHash){
|
rewardSchedule(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
rewardScheduleEntries{
|
rewardScheduleEntries{
|
||||||
id
|
id
|
||||||
@ -11,88 +11,17 @@ query rewardSchedule($id: String!, $blockHash: String!){
|
|||||||
id
|
id
|
||||||
finalized
|
finalized
|
||||||
epochNumber
|
epochNumber
|
||||||
startBlock{
|
|
||||||
cid
|
|
||||||
hash
|
|
||||||
number
|
|
||||||
timestamp
|
|
||||||
parentHash
|
|
||||||
}
|
|
||||||
endBlock{
|
|
||||||
cid
|
|
||||||
hash
|
|
||||||
number
|
|
||||||
timestamp
|
|
||||||
parentHash
|
|
||||||
}
|
|
||||||
producerBlocks
|
producerBlocks
|
||||||
allBlocks
|
allBlocks
|
||||||
producerBlocksRatio
|
producerBlocksRatio
|
||||||
producerRewards{
|
|
||||||
id
|
|
||||||
address
|
|
||||||
epoch{
|
|
||||||
id
|
|
||||||
finalized
|
|
||||||
epochNumber
|
|
||||||
startBlock{
|
|
||||||
cid
|
|
||||||
hash
|
|
||||||
number
|
|
||||||
timestamp
|
|
||||||
parentHash
|
|
||||||
}
|
|
||||||
endBlock{
|
|
||||||
cid
|
|
||||||
hash
|
|
||||||
number
|
|
||||||
timestamp
|
|
||||||
parentHash
|
|
||||||
}
|
|
||||||
producerBlocks
|
|
||||||
allBlocks
|
|
||||||
producerBlocksRatio
|
|
||||||
producerRewards{
|
|
||||||
id
|
|
||||||
address
|
|
||||||
totalRewards
|
|
||||||
blocksProduced
|
|
||||||
blocksProducedRatio
|
|
||||||
}
|
|
||||||
}
|
|
||||||
totalRewards
|
|
||||||
blocksProduced
|
|
||||||
blocksProducedRatio
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
pendingEpoch{
|
pendingEpoch{
|
||||||
id
|
id
|
||||||
finalized
|
finalized
|
||||||
epochNumber
|
epochNumber
|
||||||
startBlock{
|
|
||||||
cid
|
|
||||||
hash
|
|
||||||
number
|
|
||||||
timestamp
|
|
||||||
parentHash
|
|
||||||
}
|
|
||||||
endBlock{
|
|
||||||
cid
|
|
||||||
hash
|
|
||||||
number
|
|
||||||
timestamp
|
|
||||||
parentHash
|
|
||||||
}
|
|
||||||
producerBlocks
|
producerBlocks
|
||||||
allBlocks
|
allBlocks
|
||||||
producerBlocksRatio
|
producerBlocksRatio
|
||||||
producerRewards{
|
|
||||||
id
|
|
||||||
address
|
|
||||||
totalRewards
|
|
||||||
blocksProduced
|
|
||||||
blocksProducedRatio
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
activeRewardScheduleEntry{
|
activeRewardScheduleEntry{
|
||||||
id
|
id
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
query rewardScheduleEntry($id: String!, $blockHash: String!){
|
query rewardScheduleEntry($id: String!, $blockHash: Bytes!){
|
||||||
rewardScheduleEntry(id: $id, blockHash: $blockHash){
|
rewardScheduleEntry(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
startTime
|
startTime
|
||||||
epochDuration
|
epochDuration
|
||||||
|
@ -1,33 +1,11 @@
|
|||||||
query slash($id: String!, $blockHash: String!){
|
query slash($id: String!, $blockHash: Bytes!){
|
||||||
slash(id: $id, blockHash: $blockHash){
|
slash(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
timestamp
|
timestamp
|
||||||
account{
|
account{
|
||||||
id
|
id
|
||||||
totalClaimed
|
totalClaimed
|
||||||
totalSlashed
|
totalSlashed
|
||||||
claims{
|
|
||||||
id
|
|
||||||
timestamp
|
|
||||||
index
|
|
||||||
account{
|
|
||||||
id
|
|
||||||
totalClaimed
|
|
||||||
totalSlashed
|
|
||||||
slashes{
|
|
||||||
id
|
|
||||||
timestamp
|
|
||||||
account{
|
|
||||||
id
|
|
||||||
totalClaimed
|
|
||||||
totalSlashed
|
|
||||||
}
|
|
||||||
slashed
|
|
||||||
}
|
|
||||||
}
|
|
||||||
totalEarned
|
|
||||||
claimed
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
slashed
|
slashed
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
query slot($id: String!, $blockHash: String!){
|
query slot($id: String!, $blockHash: Bytes!){
|
||||||
slot(id: $id, blockHash: $blockHash){
|
slot(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
owner
|
owner
|
||||||
delegate
|
delegate
|
||||||
@ -10,16 +10,6 @@ query slot($id: String!, $blockHash: String!){
|
|||||||
taxRatePerDay
|
taxRatePerDay
|
||||||
claims{
|
claims{
|
||||||
id
|
id
|
||||||
slot{
|
|
||||||
id
|
|
||||||
owner
|
|
||||||
delegate
|
|
||||||
winningBid
|
|
||||||
oldBid
|
|
||||||
startTime
|
|
||||||
expirationTime
|
|
||||||
taxRatePerDay
|
|
||||||
}
|
|
||||||
owner
|
owner
|
||||||
winningBid
|
winningBid
|
||||||
oldBid
|
oldBid
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
query slotClaim($id: String!, $blockHash: String!){
|
query slotClaim($id: String!, $blockHash: Bytes!){
|
||||||
slotClaim(id: $id, blockHash: $blockHash){
|
slotClaim(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
slot{
|
slot{
|
||||||
id
|
id
|
||||||
@ -10,25 +10,6 @@ query slotClaim($id: String!, $blockHash: String!){
|
|||||||
startTime
|
startTime
|
||||||
expirationTime
|
expirationTime
|
||||||
taxRatePerDay
|
taxRatePerDay
|
||||||
claims{
|
|
||||||
id
|
|
||||||
slot{
|
|
||||||
id
|
|
||||||
owner
|
|
||||||
delegate
|
|
||||||
winningBid
|
|
||||||
oldBid
|
|
||||||
startTime
|
|
||||||
expirationTime
|
|
||||||
taxRatePerDay
|
|
||||||
}
|
|
||||||
owner
|
|
||||||
winningBid
|
|
||||||
oldBid
|
|
||||||
startTime
|
|
||||||
expirationTime
|
|
||||||
taxRatePerDay
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
owner
|
owner
|
||||||
winningBid
|
winningBid
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
query staker($id: String!, $blockHash: String!){
|
query staker($id: String!, $blockHash: Bytes!){
|
||||||
staker(id: $id, blockHash: $blockHash){
|
staker(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
staked
|
staked
|
||||||
rank
|
rank
|
||||||
|
@ -547,10 +547,10 @@ export class Indexer implements IndexerInterface {
|
|||||||
return (ipfsAddr !== undefined && ipfsAddr !== null && ipfsAddr !== '');
|
return (ipfsAddr !== undefined && ipfsAddr !== null && ipfsAddr !== '');
|
||||||
}
|
}
|
||||||
|
|
||||||
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, blockHash: string): Promise<any> {
|
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, blockHash?: string): Promise<any> {
|
||||||
const relations = this._relationsMap.get(entity) || {};
|
const relations = this._relationsMap.get(entity) || {};
|
||||||
|
|
||||||
const data = await this._graphWatcher.getEntity(entity, id, blockHash, relations);
|
const data = await this._graphWatcher.getEntity(entity, id, relations, blockHash);
|
||||||
|
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,8 @@ import assert from 'assert';
|
|||||||
import BigInt from 'apollo-type-bigint';
|
import BigInt from 'apollo-type-bigint';
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
|
|
||||||
|
import { BlockHeight } from '@vulcanize/util';
|
||||||
|
|
||||||
import { Indexer } from './indexer';
|
import { Indexer } from './indexer';
|
||||||
import { EventWatcher } from './events';
|
import { EventWatcher } from './events';
|
||||||
|
|
||||||
@ -60,112 +62,112 @@ export const createResolvers = async (indexer: Indexer, eventWatcher: EventWatch
|
|||||||
},
|
},
|
||||||
|
|
||||||
Query: {
|
Query: {
|
||||||
producer: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
producer: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('producer', id, blockHash);
|
log('producer', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(Producer, id, blockHash);
|
return indexer.getSubgraphEntity(Producer, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
producerSet: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
producerSet: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('producerSet', id, blockHash);
|
log('producerSet', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(ProducerSet, id, blockHash);
|
return indexer.getSubgraphEntity(ProducerSet, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
producerSetChange: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
producerSetChange: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('producerSetChange', id, blockHash);
|
log('producerSetChange', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(ProducerSetChange, id, blockHash);
|
return indexer.getSubgraphEntity(ProducerSetChange, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
producerRewardCollectorChange: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
producerRewardCollectorChange: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('producerRewardCollectorChange', id, blockHash);
|
log('producerRewardCollectorChange', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(ProducerRewardCollectorChange, id, blockHash);
|
return indexer.getSubgraphEntity(ProducerRewardCollectorChange, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
rewardScheduleEntry: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
rewardScheduleEntry: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('rewardScheduleEntry', id, blockHash);
|
log('rewardScheduleEntry', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(RewardScheduleEntry, id, blockHash);
|
return indexer.getSubgraphEntity(RewardScheduleEntry, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
rewardSchedule: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
rewardSchedule: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('rewardSchedule', id, blockHash);
|
log('rewardSchedule', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(RewardSchedule, id, blockHash);
|
return indexer.getSubgraphEntity(RewardSchedule, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
producerEpoch: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
producerEpoch: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('producerEpoch', id, blockHash);
|
log('producerEpoch', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(ProducerEpoch, id, blockHash);
|
return indexer.getSubgraphEntity(ProducerEpoch, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
block: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
block: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('block', id, blockHash);
|
log('block', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(Block, id, blockHash);
|
return indexer.getSubgraphEntity(Block, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
epoch: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
epoch: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('epoch', id, blockHash);
|
log('epoch', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(Epoch, id, blockHash);
|
return indexer.getSubgraphEntity(Epoch, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
slotClaim: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
slotClaim: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('slotClaim', id, blockHash);
|
log('slotClaim', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(SlotClaim, id, blockHash);
|
return indexer.getSubgraphEntity(SlotClaim, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
slot: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
slot: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('slot', id, blockHash);
|
log('slot', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(Slot, id, blockHash);
|
return indexer.getSubgraphEntity(Slot, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
staker: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
staker: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('staker', id, blockHash);
|
log('staker', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(Staker, id, blockHash);
|
return indexer.getSubgraphEntity(Staker, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
network: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
network: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('network', id, blockHash);
|
log('network', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(Network, id, blockHash);
|
return indexer.getSubgraphEntity(Network, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
distributor: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
distributor: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('distributor', id, blockHash);
|
log('distributor', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(Distributor, id, blockHash);
|
return indexer.getSubgraphEntity(Distributor, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
distribution: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
distribution: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('distribution', id, blockHash);
|
log('distribution', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(Distribution, id, blockHash);
|
return indexer.getSubgraphEntity(Distribution, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
claim: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
claim: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('claim', id, blockHash);
|
log('claim', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(Claim, id, blockHash);
|
return indexer.getSubgraphEntity(Claim, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
slash: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
slash: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('slash', id, blockHash);
|
log('slash', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(Slash, id, blockHash);
|
return indexer.getSubgraphEntity(Slash, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
account: async (_: any, { id, blockHash }: { id: string, blockHash: string }) => {
|
account: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }) => {
|
||||||
log('account', id, blockHash);
|
log('account', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(Account, id, blockHash);
|
return indexer.getSubgraphEntity(Account, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
events: async (_: any, { blockHash, contractAddress, name }: { blockHash: string, contractAddress: string, name?: string }) => {
|
events: async (_: any, { blockHash, contractAddress, name }: { blockHash: string, contractAddress: string, name?: string }) => {
|
||||||
|
@ -4,6 +4,11 @@ scalar Bytes
|
|||||||
|
|
||||||
scalar BigDecimal
|
scalar BigDecimal
|
||||||
|
|
||||||
|
input Block_height {
|
||||||
|
hash: Bytes
|
||||||
|
number: Int
|
||||||
|
}
|
||||||
|
|
||||||
type Proof {
|
type Proof {
|
||||||
data: String!
|
data: String!
|
||||||
}
|
}
|
||||||
@ -218,24 +223,24 @@ type ResultIPLDBlock {
|
|||||||
type Query {
|
type Query {
|
||||||
events(blockHash: String!, contractAddress: String!, name: String): [ResultEvent!]
|
events(blockHash: String!, contractAddress: String!, name: String): [ResultEvent!]
|
||||||
eventsInRange(fromBlockNumber: Int!, toBlockNumber: Int!): [ResultEvent!]
|
eventsInRange(fromBlockNumber: Int!, toBlockNumber: Int!): [ResultEvent!]
|
||||||
producer(id: String!, blockHash: String!): Producer!
|
producer(id: String!, block: Block_height): Producer!
|
||||||
producerSet(id: String!, blockHash: String!): ProducerSet!
|
producerSet(id: String!, block: Block_height): ProducerSet!
|
||||||
producerSetChange(id: String!, blockHash: String!): ProducerSetChange!
|
producerSetChange(id: String!, block: Block_height): ProducerSetChange!
|
||||||
producerRewardCollectorChange(id: String!, blockHash: String!): ProducerRewardCollectorChange!
|
producerRewardCollectorChange(id: String!, block: Block_height): ProducerRewardCollectorChange!
|
||||||
rewardScheduleEntry(id: String!, blockHash: String!): RewardScheduleEntry!
|
rewardScheduleEntry(id: String!, block: Block_height): RewardScheduleEntry!
|
||||||
rewardSchedule(id: String!, blockHash: String!): RewardSchedule!
|
rewardSchedule(id: String!, block: Block_height): RewardSchedule!
|
||||||
producerEpoch(id: String!, blockHash: String!): ProducerEpoch!
|
producerEpoch(id: String!, block: Block_height): ProducerEpoch!
|
||||||
block(id: String!, blockHash: String!): Block!
|
block(id: String!, block: Block_height): Block!
|
||||||
epoch(id: String!, blockHash: String!): Epoch!
|
epoch(id: String!, block: Block_height): Epoch!
|
||||||
slotClaim(id: String!, blockHash: String!): SlotClaim!
|
slotClaim(id: String!, block: Block_height): SlotClaim!
|
||||||
slot(id: String!, blockHash: String!): Slot!
|
slot(id: String!, block: Block_height): Slot!
|
||||||
staker(id: String!, blockHash: String!): Staker!
|
staker(id: String!, block: Block_height): Staker!
|
||||||
network(id: String!, blockHash: String!): Network!
|
network(id: String!, block: Block_height): Network!
|
||||||
distributor(id: String!, blockHash: String!): Distributor!
|
distributor(id: String!, block: Block_height): Distributor!
|
||||||
distribution(id: String!, blockHash: String!): Distribution!
|
distribution(id: String!, block: Block_height): Distribution!
|
||||||
claim(id: String!, blockHash: String!): Claim!
|
claim(id: String!, block: Block_height): Claim!
|
||||||
slash(id: String!, blockHash: String!): Slash!
|
slash(id: String!, block: Block_height): Slash!
|
||||||
account(id: String!, blockHash: String!): Account!
|
account(id: String!, block: Block_height): Account!
|
||||||
getStateByCID(cid: String!): ResultIPLDBlock
|
getStateByCID(cid: String!): ResultIPLDBlock
|
||||||
getState(blockHash: String!, contractAddress: String!, kind: String): ResultIPLDBlock
|
getState(blockHash: String!, contractAddress: String!, kind: String): ResultIPLDBlock
|
||||||
}
|
}
|
||||||
|
@ -36,3 +36,45 @@
|
|||||||
6. Run `yarn build:example` to build the wasm files.
|
6. Run `yarn build:example` to build the wasm files.
|
||||||
|
|
||||||
7. Run `yarn test`.
|
7. Run `yarn test`.
|
||||||
|
|
||||||
|
## Run
|
||||||
|
|
||||||
|
* Compare query results from two different GQL endpoints:
|
||||||
|
|
||||||
|
* In a config file (sample: `environments/compare-cli-config.toml`):
|
||||||
|
|
||||||
|
* Specify the two GQL endpoints in the endpoints config.
|
||||||
|
|
||||||
|
* Specify the query directory in queries config or pass as an arg. to the CLI.
|
||||||
|
|
||||||
|
* Example:
|
||||||
|
|
||||||
|
```
|
||||||
|
[endpoints]
|
||||||
|
gqlEndpoint1 = "http://localhost:8000/subgraphs/name/example1"
|
||||||
|
gqlEndpoint2 = "http://localhost:3008/graphql"
|
||||||
|
|
||||||
|
[queries]
|
||||||
|
queryDir = "../graph-test-watcher/src/gql/queries"
|
||||||
|
```
|
||||||
|
|
||||||
|
* Fire a query and get the diff of the results from the two GQL endpoints:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn compare-entity --config-file <config-file-path> --query-dir [query-dir] --query-name <query-name> --block-hash <block-hash> --entity-id <entity-id> --raw-json [true | false]
|
||||||
|
```
|
||||||
|
|
||||||
|
* `config-file`(alias: `cf`): Configuration file path (toml) (required).
|
||||||
|
* `query-dir`(alias: `qf`): Path to queries directory (defualt: taken from the config file).
|
||||||
|
* `query-name`(alias: `q`): Query to be fired (required).
|
||||||
|
* `block-hash`(alias: `b`): Block hash (required).
|
||||||
|
* `entity-id`(alias: `i`): Entity Id (required).
|
||||||
|
* `raw-json`(alias: `j`): Whether to print out a raw diff object (default: `false`).
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn compare-entity --config-file environments/compare-cli-config.toml --query-name exampleEntity --block-hash 0xceed7ee9d3de97c99db12e42433cae9115bb311c516558539fb7114fa17d545b --entity-id 0x2886bae64814bd959aec4282f86f3a97bf1e16e4111b39fd7bdd592b516c66c6
|
||||||
|
```
|
||||||
|
|
||||||
|
* The program will exit with code `1` if the query results are not equal.
|
||||||
|
6
packages/graph-node/environments/compare-cli-config.toml
Normal file
6
packages/graph-node/environments/compare-cli-config.toml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
[endpoints]
|
||||||
|
gqlEndpoint1 = "http://localhost:8000/subgraphs/name/example1"
|
||||||
|
gqlEndpoint2 = "http://localhost:3008/graphql"
|
||||||
|
|
||||||
|
[queries]
|
||||||
|
queryDir = "../graph-test-watcher/src/gql/queries"
|
@ -32,13 +32,25 @@
|
|||||||
"asbuild": "yarn asbuild:debug && yarn asbuild:release",
|
"asbuild": "yarn asbuild:debug && yarn asbuild:release",
|
||||||
"test": "yarn asbuild:debug && mocha src/**/*.test.ts",
|
"test": "yarn asbuild:debug && mocha src/**/*.test.ts",
|
||||||
"build:example": "cd test/subgraph/example1 && yarn && yarn build",
|
"build:example": "cd test/subgraph/example1 && yarn && yarn build",
|
||||||
"watch": "DEBUG=vulcanize:* nodemon --watch src src/watcher.ts"
|
"watch": "DEBUG=vulcanize:* nodemon --watch src src/watcher.ts",
|
||||||
|
"compare-entity": "DEBUG=vulcanize:* ts-node src/cli/compare/compare-entity.ts"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@apollo/client": "^3.3.19",
|
||||||
|
"@types/json-diff": "^0.5.2",
|
||||||
"@vulcanize/assemblyscript": "0.0.1",
|
"@vulcanize/assemblyscript": "0.0.1",
|
||||||
|
"@vulcanize/ipld-eth-client": "^0.1.0",
|
||||||
"@vulcanize/util": "^0.1.0",
|
"@vulcanize/util": "^0.1.0",
|
||||||
|
"debug": "^4.3.1",
|
||||||
|
"decimal.js": "^10.3.1",
|
||||||
|
"fs-extra": "^10.0.0",
|
||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
|
"json-bigint": "^1.0.0",
|
||||||
|
"json-diff": "^0.5.4",
|
||||||
|
"lodash": "^4.17.21",
|
||||||
|
"reflect-metadata": "^0.1.13",
|
||||||
|
"toml": "^3.0.0",
|
||||||
"typeorm": "^0.2.32",
|
"typeorm": "^0.2.32",
|
||||||
"decimal.js": "^10.3.1"
|
"yargs": "^17.0.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
38
packages/graph-node/src/cli/compare/client.ts
Normal file
38
packages/graph-node/src/cli/compare/client.ts
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
//
|
||||||
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
|
//
|
||||||
|
|
||||||
|
import assert from 'assert';
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
import { gql } from '@apollo/client/core';
|
||||||
|
import { GraphQLClient, GraphQLConfig } from '@vulcanize/ipld-eth-client';
|
||||||
|
|
||||||
|
export class Client {
|
||||||
|
_config: GraphQLConfig;
|
||||||
|
_graphqlClient: GraphQLClient;
|
||||||
|
_queryDir: string;
|
||||||
|
|
||||||
|
constructor (config: GraphQLConfig, queryDir: string) {
|
||||||
|
this._config = config;
|
||||||
|
this._queryDir = path.resolve(process.cwd(), queryDir);
|
||||||
|
|
||||||
|
const { gqlEndpoint } = config;
|
||||||
|
assert(gqlEndpoint, 'Missing gql endpoint');
|
||||||
|
|
||||||
|
this._graphqlClient = new GraphQLClient(config);
|
||||||
|
}
|
||||||
|
|
||||||
|
async getEntity ({ blockHash, queryName, id }: { blockHash: string, queryName: string, id: string }): Promise<any> {
|
||||||
|
const entityQuery = fs.readFileSync(path.resolve(this._queryDir, `${queryName}.gql`), 'utf8');
|
||||||
|
|
||||||
|
return this._graphqlClient.query(
|
||||||
|
gql(entityQuery),
|
||||||
|
{
|
||||||
|
id,
|
||||||
|
blockHash
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
150
packages/graph-node/src/cli/compare/compare-entity.ts
Normal file
150
packages/graph-node/src/cli/compare/compare-entity.ts
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
//
|
||||||
|
// Copyright 2021 Vulcanize, Inc.
|
||||||
|
//
|
||||||
|
|
||||||
|
import yargs from 'yargs';
|
||||||
|
import 'reflect-metadata';
|
||||||
|
import path from 'path';
|
||||||
|
import toml from 'toml';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import assert from 'assert';
|
||||||
|
import util from 'util';
|
||||||
|
import { diffString, diff } from 'json-diff';
|
||||||
|
|
||||||
|
import { Client } from './client';
|
||||||
|
|
||||||
|
interface EndpointConfig {
|
||||||
|
gqlEndpoint1: string;
|
||||||
|
gqlEndpoint2: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface QueryConfig {
|
||||||
|
queryDir: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Config {
|
||||||
|
endpoints: EndpointConfig;
|
||||||
|
queries: QueryConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
const main = async (): Promise<void> => {
|
||||||
|
const argv = await yargs.parserConfiguration({
|
||||||
|
'parse-numbers': false
|
||||||
|
}).options({
|
||||||
|
configFile: {
|
||||||
|
alias: 'cf',
|
||||||
|
type: 'string',
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Configuration file path (toml)'
|
||||||
|
},
|
||||||
|
queryDir: {
|
||||||
|
alias: 'qf',
|
||||||
|
type: 'string',
|
||||||
|
describe: 'Path to queries directory'
|
||||||
|
},
|
||||||
|
blockHash: {
|
||||||
|
alias: 'b',
|
||||||
|
type: 'string',
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Blockhash'
|
||||||
|
},
|
||||||
|
queryName: {
|
||||||
|
alias: 'q',
|
||||||
|
type: 'string',
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Query name'
|
||||||
|
},
|
||||||
|
entityId: {
|
||||||
|
alias: 'i',
|
||||||
|
type: 'string',
|
||||||
|
demandOption: true,
|
||||||
|
describe: 'Id of the entity to be queried'
|
||||||
|
},
|
||||||
|
rawJson: {
|
||||||
|
alias: 'j',
|
||||||
|
type: 'boolean',
|
||||||
|
describe: 'Whether to print out raw diff object',
|
||||||
|
default: false
|
||||||
|
}
|
||||||
|
}).argv;
|
||||||
|
|
||||||
|
const config: Config = await getConfig(argv.configFile);
|
||||||
|
|
||||||
|
const { client1, client2 } = await getClients(config, argv.queryDir);
|
||||||
|
|
||||||
|
const queryName = argv.queryName;
|
||||||
|
const id = argv.entityId;
|
||||||
|
const blockHash = argv.blockHash;
|
||||||
|
|
||||||
|
const result1 = await client1.getEntity({ blockHash, queryName, id });
|
||||||
|
const result2 = await client2.getEntity({ blockHash, queryName, id });
|
||||||
|
|
||||||
|
// Getting the diff of two result objects.
|
||||||
|
let resultDiff;
|
||||||
|
if (argv.rawJson) {
|
||||||
|
resultDiff = diff(result1, result2);
|
||||||
|
|
||||||
|
if (resultDiff) {
|
||||||
|
// Use util.inspect to extend depth limit in the output.
|
||||||
|
resultDiff = util.inspect(diff(result1, result2), false, null);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
resultDiff = diffString(result1, result2);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resultDiff) {
|
||||||
|
console.log(resultDiff);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
async function getConfig (configFile: string): Promise<Config> {
|
||||||
|
const configFilePath = path.resolve(configFile);
|
||||||
|
const fileExists = await fs.pathExists(configFilePath);
|
||||||
|
if (!fileExists) {
|
||||||
|
throw new Error(`Config file not found: ${configFilePath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const config = toml.parse(await fs.readFile(configFilePath, 'utf8'));
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getClients (config: Config, queryDir?: string): Promise<{
|
||||||
|
client1: Client,
|
||||||
|
client2: Client
|
||||||
|
}> {
|
||||||
|
assert(config.endpoints, 'Missing endpoints config');
|
||||||
|
|
||||||
|
const gqlEndpoint1 = config.endpoints.gqlEndpoint1;
|
||||||
|
const gqlEndpoint2 = config.endpoints.gqlEndpoint2;
|
||||||
|
|
||||||
|
assert(gqlEndpoint1, 'Missing endpoint one');
|
||||||
|
assert(gqlEndpoint2, 'Missing endpoint two');
|
||||||
|
|
||||||
|
if (!queryDir) {
|
||||||
|
assert(config.queries, 'Missing queries config');
|
||||||
|
queryDir = config.queries.queryDir;
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(queryDir, 'Query directory not provided');
|
||||||
|
|
||||||
|
const client1 = new Client({
|
||||||
|
gqlEndpoint: gqlEndpoint1
|
||||||
|
}, queryDir);
|
||||||
|
|
||||||
|
const client2 = new Client({
|
||||||
|
gqlEndpoint: gqlEndpoint2
|
||||||
|
}, queryDir);
|
||||||
|
|
||||||
|
return {
|
||||||
|
client1,
|
||||||
|
client2
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch(err => {
|
||||||
|
console.log(err);
|
||||||
|
}).finally(() => {
|
||||||
|
process.exit(0);
|
||||||
|
});
|
@ -40,7 +40,8 @@ export class Database {
|
|||||||
return this._baseDatabase.close();
|
return this._baseDatabase.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
async getEntity<Entity> (entity: (new () => Entity) | string, id: string, blockHash: string): Promise<Entity | undefined> {
|
async getEntity<Entity> (entity: (new () => Entity) | string, id: string, blockHash?: string): Promise<Entity | undefined> {
|
||||||
|
// TODO: Take block number as an optional argument
|
||||||
const queryRunner = this._conn.createQueryRunner();
|
const queryRunner = this._conn.createQueryRunner();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -73,33 +74,36 @@ export class Database {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async getEntityWithRelations<Entity> (entity: (new () => Entity) | string, id: string, blockHash: string, relations: { [key: string]: any }): Promise<Entity | undefined> {
|
async getEntityWithRelations<Entity> (entity: (new () => Entity) | string, id: string, relations: { [key: string]: any }, blockHash?: string): Promise<Entity | undefined> {
|
||||||
const queryRunner = this._conn.createQueryRunner();
|
const queryRunner = this._conn.createQueryRunner();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const repo = queryRunner.manager.getRepository(entity);
|
const repo = queryRunner.manager.getRepository(entity);
|
||||||
|
|
||||||
// Fetching blockHash for previous entity in frothy region.
|
|
||||||
const { blockHash: entityblockHash, blockNumber, id: frothyId } = await this._baseDatabase.getFrothyEntity(queryRunner, repo, { blockHash, id });
|
|
||||||
|
|
||||||
let selectQueryBuilder = repo.createQueryBuilder('entity');
|
let selectQueryBuilder = repo.createQueryBuilder('entity');
|
||||||
|
|
||||||
if (frothyId) {
|
selectQueryBuilder = selectQueryBuilder.where('entity.id = :id', { id })
|
||||||
// If entity found in frothy region.
|
.orderBy('entity.block_number', 'DESC')
|
||||||
selectQueryBuilder = selectQueryBuilder.where('entity.block_hash = :entityblockHash', { entityblockHash });
|
.limit(1);
|
||||||
} else {
|
|
||||||
// If entity not in frothy region.
|
|
||||||
const canonicalBlockNumber = blockNumber + 1;
|
|
||||||
|
|
||||||
selectQueryBuilder = selectQueryBuilder.innerJoinAndSelect('block_progress', 'block', 'block.block_hash = entity.block_hash')
|
// Use blockHash if provided.
|
||||||
.where('block.is_pruned = false')
|
if (blockHash) {
|
||||||
.andWhere('entity.block_number <= :canonicalBlockNumber', { canonicalBlockNumber })
|
// Fetching blockHash for previous entity in frothy region.
|
||||||
.orderBy('entity.block_number', 'DESC')
|
const { blockHash: entityblockHash, blockNumber, id: frothyId } = await this._baseDatabase.getFrothyEntity(queryRunner, repo, { blockHash, id });
|
||||||
.limit(1);
|
|
||||||
|
if (frothyId) {
|
||||||
|
// If entity found in frothy region.
|
||||||
|
selectQueryBuilder = selectQueryBuilder.andWhere('entity.block_hash = :entityblockHash', { entityblockHash });
|
||||||
|
} else {
|
||||||
|
// If entity not in frothy region.
|
||||||
|
const canonicalBlockNumber = blockNumber + 1;
|
||||||
|
|
||||||
|
selectQueryBuilder = selectQueryBuilder.innerJoinAndSelect('block_progress', 'block', 'block.block_hash = entity.block_hash')
|
||||||
|
.andWhere('block.is_pruned = false')
|
||||||
|
.andWhere('entity.block_number <= :canonicalBlockNumber', { canonicalBlockNumber });
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
selectQueryBuilder = selectQueryBuilder.andWhere('entity.id = :id', { id });
|
|
||||||
|
|
||||||
// TODO: Implement query for nested relations.
|
// TODO: Implement query for nested relations.
|
||||||
Object.entries(relations).forEach(([field, data], index) => {
|
Object.entries(relations).forEach(([field, data], index) => {
|
||||||
const { entity: relatedEntity, isArray } = data;
|
const { entity: relatedEntity, isArray } = data;
|
||||||
|
@ -177,9 +177,9 @@ export class GraphWatcher {
|
|||||||
this._indexer = indexer;
|
this._indexer = indexer;
|
||||||
}
|
}
|
||||||
|
|
||||||
async getEntity<Entity> (entity: new () => Entity, id: string, blockHash: string, relations: { [key: string]: any }): Promise<any> {
|
async getEntity<Entity> (entity: new () => Entity, id: string, relations: { [key: string]: any }, blockHash?: string): Promise<any> {
|
||||||
// Get entity from the database.
|
// Get entity from the database.
|
||||||
const result = await this._database.getEntityWithRelations(entity, id, blockHash, relations) as any;
|
const result = await this._database.getEntityWithRelations(entity, id, relations, blockHash) as any;
|
||||||
|
|
||||||
// Resolve any field name conflicts in the entity result.
|
// Resolve any field name conflicts in the entity result.
|
||||||
return resolveEntityFieldConflicts(result);
|
return resolveEntityFieldConflicts(result);
|
||||||
|
@ -1,8 +1,21 @@
|
|||||||
query exampleEntity($id: String!, $blockHash: String!){
|
query exampleEntity($id: String!, $blockHash: Bytes!){
|
||||||
exampleEntity(id: $id, blockHash: $blockHash){
|
exampleEntity(id: $id, block: { hash: $blockHash }){
|
||||||
id
|
id
|
||||||
count
|
count
|
||||||
param1
|
paramString
|
||||||
param2
|
paramInt
|
||||||
|
paramBoolean
|
||||||
|
paramBytes
|
||||||
|
paramEnum
|
||||||
|
paramBigDecimal
|
||||||
|
related {
|
||||||
|
id
|
||||||
|
paramBigInt
|
||||||
|
bigIntArray
|
||||||
|
}
|
||||||
|
manyRelated {
|
||||||
|
id
|
||||||
|
count
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -0,0 +1,6 @@
|
|||||||
|
query manyRelatedEntity($id: String!, $blockHash: Bytes!){
|
||||||
|
manyRelatedEntity(id: $id, block: { hash: $blockHash }){
|
||||||
|
id
|
||||||
|
count
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,7 @@
|
|||||||
|
query relatedEntity($id: String!, $blockHash: Bytes!){
|
||||||
|
relatedEntity(id: $id, block: { hash: $blockHash }){
|
||||||
|
id
|
||||||
|
paramBigInt
|
||||||
|
bigIntArray
|
||||||
|
}
|
||||||
|
}
|
@ -544,10 +544,10 @@ export class Indexer implements IndexerInterface {
|
|||||||
return (ipfsAddr !== undefined && ipfsAddr !== null && ipfsAddr !== '');
|
return (ipfsAddr !== undefined && ipfsAddr !== null && ipfsAddr !== '');
|
||||||
}
|
}
|
||||||
|
|
||||||
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, blockHash: string): Promise<Entity | undefined> {
|
async getSubgraphEntity<Entity> (entity: new () => Entity, id: string, blockHash?: string): Promise<Entity | undefined> {
|
||||||
const relations = this._relationsMap.get(entity) || {};
|
const relations = this._relationsMap.get(entity) || {};
|
||||||
|
|
||||||
const data = await this._graphWatcher.getEntity(entity, id, blockHash, relations);
|
const data = await this._graphWatcher.getEntity(entity, id, relations, blockHash);
|
||||||
|
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,7 @@ import assert from 'assert';
|
|||||||
import BigInt from 'apollo-type-bigint';
|
import BigInt from 'apollo-type-bigint';
|
||||||
import debug from 'debug';
|
import debug from 'debug';
|
||||||
|
|
||||||
import { ValueResult } from '@vulcanize/util';
|
import { ValueResult, BlockHeight } from '@vulcanize/util';
|
||||||
|
|
||||||
import { Indexer } from './indexer';
|
import { Indexer } from './indexer';
|
||||||
import { EventWatcher } from './events';
|
import { EventWatcher } from './events';
|
||||||
@ -57,22 +57,22 @@ export const createResolvers = async (indexer: Indexer, eventWatcher: EventWatch
|
|||||||
return indexer._test(blockHash, contractAddress);
|
return indexer._test(blockHash, contractAddress);
|
||||||
},
|
},
|
||||||
|
|
||||||
exampleEntity: async (_: any, { id, blockHash }: { id: string, blockHash: string }): Promise<ExampleEntity | undefined> => {
|
relatedEntity: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }): Promise<RelatedEntity | undefined> => {
|
||||||
log('exampleEntity', id, blockHash);
|
log('relatedEntity', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(ExampleEntity, id, blockHash);
|
return indexer.getSubgraphEntity(RelatedEntity, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
relatedEntity: async (_: any, { id, blockHash }: { id: string, blockHash: string }): Promise<RelatedEntity | undefined> => {
|
manyRelatedEntity: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }): Promise<ManyRelatedEntity | undefined> => {
|
||||||
log('relatedEntity', id, blockHash);
|
log('relatedEntity', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(RelatedEntity, id, blockHash);
|
return indexer.getSubgraphEntity(ManyRelatedEntity, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
manyRelatedEntity: async (_: any, { id, blockHash }: { id: string, blockHash: string }): Promise<ManyRelatedEntity | undefined> => {
|
exampleEntity: async (_: any, { id, block = {} }: { id: string, block: BlockHeight }): Promise<ExampleEntity | undefined> => {
|
||||||
log('relatedEntity', id, blockHash);
|
log('exampleEntity', id, block);
|
||||||
|
|
||||||
return indexer.getSubgraphEntity(ManyRelatedEntity, id, blockHash);
|
return indexer.getSubgraphEntity(ExampleEntity, id, block.hash);
|
||||||
},
|
},
|
||||||
|
|
||||||
events: async (_: any, { blockHash, contractAddress, name }: { blockHash: string, contractAddress: string, name?: string }) => {
|
events: async (_: any, { blockHash, contractAddress, name }: { blockHash: string, contractAddress: string, name?: string }) => {
|
||||||
|
@ -4,6 +4,11 @@ scalar Bytes
|
|||||||
|
|
||||||
scalar BigDecimal
|
scalar BigDecimal
|
||||||
|
|
||||||
|
input Block_height {
|
||||||
|
hash: Bytes
|
||||||
|
number: Int
|
||||||
|
}
|
||||||
|
|
||||||
type Proof {
|
type Proof {
|
||||||
data: String!
|
data: String!
|
||||||
}
|
}
|
||||||
@ -72,9 +77,9 @@ type Query {
|
|||||||
eventsInRange(fromBlockNumber: Int!, toBlockNumber: Int!): [ResultEvent!]
|
eventsInRange(fromBlockNumber: Int!, toBlockNumber: Int!): [ResultEvent!]
|
||||||
getMethod(blockHash: String!, contractAddress: String!): ResultString!
|
getMethod(blockHash: String!, contractAddress: String!): ResultString!
|
||||||
_test(blockHash: String!, contractAddress: String!): ResultBigInt!
|
_test(blockHash: String!, contractAddress: String!): ResultBigInt!
|
||||||
relatedEntity(id: String!, blockHash: String!): RelatedEntity!
|
relatedEntity(id: String!, block: Block_height): RelatedEntity!
|
||||||
exampleEntity(id: String!, blockHash: String!): ExampleEntity!
|
exampleEntity(id: String!, block: Block_height): ExampleEntity!
|
||||||
manyRelatedEntity(id: String!, blockHash: String!): ManyRelatedEntity!
|
manyRelatedEntity(id: String!, block: Block_height): ManyRelatedEntity!
|
||||||
getStateByCID(cid: String!): ResultIPLDBlock
|
getStateByCID(cid: String!): ResultIPLDBlock
|
||||||
getState(blockHash: String!, contractAddress: String!, kind: String): ResultIPLDBlock
|
getState(blockHash: String!, contractAddress: String!, kind: String): ResultIPLDBlock
|
||||||
}
|
}
|
||||||
|
44
yarn.lock
44
yarn.lock
@ -2458,6 +2458,11 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@types/json-bigint/-/json-bigint-1.0.0.tgz#7a4726540cc6fe47cfa54b9b3022b89cf7fe1517"
|
resolved "https://registry.yarnpkg.com/@types/json-bigint/-/json-bigint-1.0.0.tgz#7a4726540cc6fe47cfa54b9b3022b89cf7fe1517"
|
||||||
integrity sha512-WW+0cfH3ovFN6ROV+p/Xfw36dT6s16hbXBYIG49PYw6+j6e+AkpqYccctgxwyicBmC8CZDBnPhOH94shFhXgHQ==
|
integrity sha512-WW+0cfH3ovFN6ROV+p/Xfw36dT6s16hbXBYIG49PYw6+j6e+AkpqYccctgxwyicBmC8CZDBnPhOH94shFhXgHQ==
|
||||||
|
|
||||||
|
"@types/json-diff@^0.5.2":
|
||||||
|
version "0.5.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/json-diff/-/json-diff-0.5.2.tgz#83689a504b3c7759f046d125d5521d6760ab9d0d"
|
||||||
|
integrity sha512-2oqXStJYYLDHCciNAClY277Ti3kXT+JLvPD7lLm/490i+B7g0GR6M4qiW+bd2V5vpB+yMKY8IelbsHMAYX1D0A==
|
||||||
|
|
||||||
"@types/json-schema@^7.0.3":
|
"@types/json-schema@^7.0.3":
|
||||||
version "7.0.7"
|
version "7.0.7"
|
||||||
resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.7.tgz#98a993516c859eb0d5c4c8f098317a9ea68db9ad"
|
resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.7.tgz#98a993516c859eb0d5c4c8f098317a9ea68db9ad"
|
||||||
@ -4852,6 +4857,13 @@ cli-boxes@^2.2.0:
|
|||||||
resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f"
|
resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f"
|
||||||
integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==
|
integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==
|
||||||
|
|
||||||
|
cli-color@~0.1.6:
|
||||||
|
version "0.1.7"
|
||||||
|
resolved "https://registry.yarnpkg.com/cli-color/-/cli-color-0.1.7.tgz#adc3200fa471cc211b0da7f566b71e98b9d67347"
|
||||||
|
integrity sha1-rcMgD6RxzCEbDaf1ZrcemLnWc0c=
|
||||||
|
dependencies:
|
||||||
|
es5-ext "0.8.x"
|
||||||
|
|
||||||
cli-cursor@^3.1.0:
|
cli-cursor@^3.1.0:
|
||||||
version "3.1.0"
|
version "3.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307"
|
resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307"
|
||||||
@ -5770,6 +5782,13 @@ diffie-hellman@^5.0.0:
|
|||||||
miller-rabin "^4.0.0"
|
miller-rabin "^4.0.0"
|
||||||
randombytes "^2.0.0"
|
randombytes "^2.0.0"
|
||||||
|
|
||||||
|
difflib@~0.2.1:
|
||||||
|
version "0.2.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/difflib/-/difflib-0.2.4.tgz#b5e30361a6db023176d562892db85940a718f47e"
|
||||||
|
integrity sha1-teMDYabbAjF21WKJLbhZQKcY9H4=
|
||||||
|
dependencies:
|
||||||
|
heap ">= 0.2.0"
|
||||||
|
|
||||||
dir-glob@^3.0.1:
|
dir-glob@^3.0.1:
|
||||||
version "3.0.1"
|
version "3.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f"
|
resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f"
|
||||||
@ -5836,6 +5855,13 @@ dotignore@~0.1.2:
|
|||||||
dependencies:
|
dependencies:
|
||||||
minimatch "^3.0.4"
|
minimatch "^3.0.4"
|
||||||
|
|
||||||
|
dreamopt@~0.6.0:
|
||||||
|
version "0.6.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/dreamopt/-/dreamopt-0.6.0.tgz#d813ccdac8d39d8ad526775514a13dda664d6b4b"
|
||||||
|
integrity sha1-2BPM2sjTnYrVJndVFKE92mZNa0s=
|
||||||
|
dependencies:
|
||||||
|
wordwrap ">=0.0.2"
|
||||||
|
|
||||||
dtrace-provider@~0.8:
|
dtrace-provider@~0.8:
|
||||||
version "0.8.8"
|
version "0.8.8"
|
||||||
resolved "https://registry.yarnpkg.com/dtrace-provider/-/dtrace-provider-0.8.8.tgz#2996d5490c37e1347be263b423ed7b297fb0d97e"
|
resolved "https://registry.yarnpkg.com/dtrace-provider/-/dtrace-provider-0.8.8.tgz#2996d5490c37e1347be263b423ed7b297fb0d97e"
|
||||||
@ -6035,6 +6061,11 @@ es-to-primitive@^1.2.1:
|
|||||||
is-date-object "^1.0.1"
|
is-date-object "^1.0.1"
|
||||||
is-symbol "^1.0.2"
|
is-symbol "^1.0.2"
|
||||||
|
|
||||||
|
es5-ext@0.8.x:
|
||||||
|
version "0.8.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.8.2.tgz#aba8d9e1943a895ac96837a62a39b3f55ecd94ab"
|
||||||
|
integrity sha1-q6jZ4ZQ6iVrJaDemKjmz9V7NlKs=
|
||||||
|
|
||||||
es5-ext@^0.10.35, es5-ext@^0.10.50:
|
es5-ext@^0.10.35, es5-ext@^0.10.50:
|
||||||
version "0.10.53"
|
version "0.10.53"
|
||||||
resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1"
|
resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.53.tgz#93c5a3acfdbef275220ad72644ad02ee18368de1"
|
||||||
@ -8056,7 +8087,7 @@ he@1.2.0:
|
|||||||
resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
|
resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f"
|
||||||
integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
|
integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==
|
||||||
|
|
||||||
heap@0.2.6:
|
heap@0.2.6, "heap@>= 0.2.0":
|
||||||
version "0.2.6"
|
version "0.2.6"
|
||||||
resolved "https://registry.yarnpkg.com/heap/-/heap-0.2.6.tgz#087e1f10b046932fc8594dd9e6d378afc9d1e5ac"
|
resolved "https://registry.yarnpkg.com/heap/-/heap-0.2.6.tgz#087e1f10b046932fc8594dd9e6d378afc9d1e5ac"
|
||||||
integrity sha1-CH4fELBGky/IWU3Z5tN4r8nR5aw=
|
integrity sha1-CH4fELBGky/IWU3Z5tN4r8nR5aw=
|
||||||
@ -9078,6 +9109,15 @@ json-buffer@3.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898"
|
resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898"
|
||||||
integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=
|
integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=
|
||||||
|
|
||||||
|
json-diff@^0.5.4:
|
||||||
|
version "0.5.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/json-diff/-/json-diff-0.5.4.tgz#7bc8198c441756632aab66c7d9189d365a7a035a"
|
||||||
|
integrity sha512-q5Xmx9QXNOzOzIlMoYtLrLiu4Jl/Ce2bn0CNcv54PhyH89CI4GWlGVDye8ei2Ijt9R3U+vsWPsXpLUNob8bs8Q==
|
||||||
|
dependencies:
|
||||||
|
cli-color "~0.1.6"
|
||||||
|
difflib "~0.2.1"
|
||||||
|
dreamopt "~0.6.0"
|
||||||
|
|
||||||
json-parse-better-errors@^1.0.1:
|
json-parse-better-errors@^1.0.1:
|
||||||
version "1.0.2"
|
version "1.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9"
|
resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9"
|
||||||
@ -14797,7 +14837,7 @@ word-wrap@^1.2.3:
|
|||||||
resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
|
resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
|
||||||
integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==
|
integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==
|
||||||
|
|
||||||
wordwrap@^1.0.0:
|
wordwrap@>=0.0.2, wordwrap@^1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb"
|
resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb"
|
||||||
integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=
|
integrity sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=
|
||||||
|
Loading…
Reference in New Issue
Block a user