Tracing job queue (#89)

* eth-client API to get transaction in block.

* Job queue to process tracing requests.

* Request traces from job queue runner.

* Max trace completion lag time config for downstream events.
This commit is contained in:
Ashwin Phatak 2021-06-25 16:35:47 +05:30 committed by GitHub
parent 27b732fef7
commit 07805b6ae9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 330 additions and 82 deletions

View File

@ -0,0 +1,19 @@
# Address Watcher
## Setup
Enable the `pgcrypto` extension on the database (https://github.com/timgit/pg-boss/blob/master/docs/usage.md#intro).
Example:
```
postgres@tesla:~$ psql -U postgres -h localhost job-queue
Password for user postgres:
psql (12.7 (Ubuntu 12.7-1.pgdg18.04+1))
SSL connection (protocol: TLSv1.3, cipher: TLS_AES_256_GCM_SHA384, bits: 256, compression: off)
Type "help" for help.
job-queue=# CREATE EXTENSION pgcrypto;
CREATE EXTENSION
job-queue=# exit
```

View File

@ -22,7 +22,7 @@
subscribersDir = "src/subscriber"
[upstream]
gqlEndpoint = "http://127.0.0.1:8083/graphql"
gqlEndpoint = "http://127.0.0.1:5000/graphql"
gqlSubscriptionEndpoint = "http://127.0.0.1:5000/graphql"
traceProviderEndpoint = "http://127.0.0.1:8545"
@ -30,3 +30,7 @@
name = "requests"
enabled = false
deleteOnStart = false
[jobQueue]
dbConnectionString = "postgres://postgres:postgres@localhost/job-queue"
maxCompletionLag = 300

View File

@ -5,6 +5,7 @@
"private": true,
"scripts": {
"server": "DEBUG=vulcanize:* nodemon src/server.ts -f environments/local.toml",
"job-runner": "DEBUG=vulcanize:* nodemon src/job-runner.ts -f environments/local.toml",
"test": "mocha -r ts-node/register src/**/*.spec.ts",
"lint": "eslint .",
"build": "tsc"
@ -39,6 +40,7 @@
"json-bigint": "^1.0.0",
"lodash": "^4.17.21",
"pg": "^8.6.0",
"pg-boss": "^6.1.0",
"reflect-metadata": "^0.1.13",
"toml": "^3.0.0",
"typeorm": "^0.2.32",

View File

@ -20,6 +20,10 @@ export interface Config {
traceProviderEndpoint: string;
cache: CacheConfig
}
jobQueue: {
dbConnectionString: string;
maxCompletionLag: number;
}
}
export const getConfig = async (configFile: string): Promise<Config> => {

View File

@ -1,7 +1,6 @@
import assert from 'assert';
import debug from 'debug';
import { ethers } from 'ethers';
import { PubSub } from 'apollo-server-express';
import { EthClient } from '@vulcanize/ipld-eth-client';
import { GetStorageAt } from '@vulcanize/solidity-mapper';
@ -14,32 +13,23 @@ import { Account } from './entity/Account';
const log = debug('vulcanize:indexer');
const AddressEvent = 'address_event';
export class Indexer {
_db: Database
_ethClient: EthClient
_pubsub: PubSub
_getStorageAt: GetStorageAt
_tracingClient: TracingClient
constructor (db: Database, ethClient: EthClient, pubsub: PubSub, tracingClient: TracingClient) {
constructor (db: Database, ethClient: EthClient, tracingClient: TracingClient) {
assert(db);
assert(ethClient);
assert(pubsub);
assert(tracingClient);
this._db = db;
this._ethClient = ethClient;
this._pubsub = pubsub;
this._getStorageAt = this._ethClient.getStorageAt.bind(this._ethClient);
this._tracingClient = tracingClient;
}
getAddressEventIterator (): AsyncIterator<any> {
return this._pubsub.asyncIterator([AddressEvent]);
}
async isWatchedAddress (address : string): Promise<boolean> {
assert(address);
@ -57,34 +47,6 @@ export class Indexer {
return this._db.getTrace(txHash);
}
async publishAddressEventToSubscribers (txHash: string): Promise<void> {
const traceObj = await this._db.getTrace(txHash);
if (!traceObj) {
return;
}
const { blockNumber, blockHash, trace } = traceObj;
for (let i = 0; i < traceObj.accounts.length; i++) {
const account = traceObj.accounts[i];
log(`pushing tx ${txHash} event to GQL subscribers for address ${account.address}`);
// Publishing the event here will result in pushing the payload to GQL subscribers for `onAddressEvent(address)`.
await this._pubsub.publish(AddressEvent, {
onAddressEvent: {
address: account.address,
txTrace: {
txHash,
blockHash,
blockNumber,
trace
}
}
});
}
}
async traceTxAndIndexAppearances (txHash: string): Promise<Trace> {
let entity = await this._db.getTrace(txHash);
if (entity) {
@ -105,7 +67,7 @@ export class Indexer {
entity = await this._db.getTrace(txHash);
assert(entity);
await this.indexAppearances(entity);
await this._indexAppearances(entity);
}
return entity;
@ -115,7 +77,7 @@ export class Indexer {
return this._db.getAppearances(address, fromBlockNumber, toBlockNumber);
}
async indexAppearances (trace: Trace): Promise<Trace> {
async _indexAppearances (trace: Trace): Promise<Trace> {
const traceObj = JSON.parse(trace.trace);
// TODO: Check if tx has failed?

View File

@ -0,0 +1,56 @@
import assert from 'assert';
import debug from 'debug';
import PgBoss from 'pg-boss';
interface Config {
dbConnectionString: string
maxCompletionLag: number
}
type JobCallback = (job: any) => Promise<void>;
const log = debug('vulcanize:job-queue');
export class JobQueue {
_config: Config;
_boss: PgBoss;
constructor (config: Config) {
this._config = config;
this._boss = new PgBoss({ connectionString: this._config.dbConnectionString, onComplete: true });
this._boss.on('error', error => log(error));
}
get maxCompletionLag (): number {
return this._config.maxCompletionLag;
}
async start (): Promise<void> {
await this._boss.start();
}
async subscribe (queue: string, callback: JobCallback): Promise<string> {
return await this._boss.subscribe(queue, async (job: any) => {
log(`Processing queue ${queue} job ${job.id}...`);
await callback(job);
});
}
async onComplete (queue: string, callback: JobCallback): Promise<string> {
return await this._boss.onComplete(queue, async (job: any) => {
log(`Job complete for queue ${queue} job ${job.id}...`);
await callback(job);
});
}
async markComplete (job: any): Promise<void> {
this._boss.complete(job.id);
}
async pushJob (queue: string, job: any): Promise<void> {
assert(this._boss);
const jobId = await this._boss.publish(queue, job);
log(`Created job in queue ${queue}: ${jobId}`);
}
}

View File

@ -0,0 +1,73 @@
import assert from 'assert';
import 'reflect-metadata';
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import debug from 'debug';
import { getCache } from '@vulcanize/cache';
import { EthClient } from '@vulcanize/ipld-eth-client';
import { TracingClient } from '@vulcanize/tracing-client';
import { Indexer } from './indexer';
import { Database } from './database';
import { getConfig } from './config';
import { JobQueue } from './job-queue';
import { QUEUE_TX_TRACING } from './tx-watcher';
const log = debug('vulcanize:server');
export const main = async (): Promise<any> => {
const argv = await yargs(hideBin(process.argv))
.option('f', {
alias: 'config-file',
demandOption: true,
describe: 'configuration file path (toml)',
type: 'string'
})
.argv;
const config = await getConfig(argv.f);
assert(config.server, 'Missing server config');
const { upstream, database: dbConfig, jobQueue: jobQueueConfig } = config;
assert(dbConfig, 'Missing database config');
const db = new Database(dbConfig);
await db.init();
assert(upstream, 'Missing upstream config');
const { gqlEndpoint, gqlSubscriptionEndpoint, traceProviderEndpoint, cache: cacheConfig } = upstream;
assert(gqlEndpoint, 'Missing upstream gqlEndpoint');
assert(gqlSubscriptionEndpoint, 'Missing upstream gqlSubscriptionEndpoint');
assert(traceProviderEndpoint, 'Missing upstream traceProviderEndpoint');
const cache = await getCache(cacheConfig);
const ethClient = new EthClient({ gqlEndpoint, gqlSubscriptionEndpoint, cache });
const tracingClient = new TracingClient(traceProviderEndpoint);
const indexer = new Indexer(db, ethClient, tracingClient);
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLag } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag });
await jobQueue.start();
await jobQueue.subscribe(QUEUE_TX_TRACING, async (job) => {
const { data: { txHash } } = job;
await indexer.traceTxAndIndexAppearances(txHash);
await jobQueue.markComplete(job);
});
};
main().then(() => {
log('Starting job runner...');
}).catch(err => {
log(err);
});

View File

@ -1,9 +1,9 @@
import assert from 'assert';
import debug from 'debug';
import { withFilter } from 'apollo-server-express';
import { ethers } from 'ethers';
import { Indexer } from './indexer';
import { TxWatcher } from './tx-watcher';
const log = debug('vulcanize:resolver');
@ -18,14 +18,12 @@ interface AppearanceParams {
toBlockNumber: number
}
export const createResolvers = async (indexer: Indexer): Promise<any> => {
assert(indexer);
export const createResolvers = async (indexer: Indexer, txWatcher: TxWatcher): Promise<any> => {
return {
Subscription: {
onAddressEvent: {
subscribe: withFilter(
() => indexer.getAddressEventIterator(),
() => txWatcher.getAddressEventIterator(),
(payload: any, variables: any) => {
return payload.onAddressEvent.address === ethers.utils.getAddress(variables.address);
}

View File

@ -5,7 +5,6 @@ import { ApolloServer, PubSub } from 'apollo-server-express';
import yargs from 'yargs';
import { hideBin } from 'yargs/helpers';
import debug from 'debug';
import 'graphql-import-node';
import { createServer } from 'http';
import { getCache } from '@vulcanize/cache';
@ -19,6 +18,7 @@ import { Indexer } from './indexer';
import { Database } from './database';
import { getConfig } from './config';
import { TxWatcher } from './tx-watcher';
import { JobQueue } from './job-queue';
const log = debug('vulcanize:server');
@ -38,7 +38,7 @@ export const main = async (): Promise<any> => {
const { host, port } = config.server;
const { upstream, database: dbConfig } = config;
const { upstream, database: dbConfig, jobQueue: jobQueueConfig } = config;
assert(dbConfig, 'Missing database config');
@ -57,15 +57,24 @@ export const main = async (): Promise<any> => {
const tracingClient = new TracingClient(traceProviderEndpoint);
const indexer = new Indexer(db, ethClient, tracingClient);
assert(jobQueueConfig, 'Missing job queue config');
const { dbConnectionString, maxCompletionLag } = jobQueueConfig;
assert(dbConnectionString, 'Missing job queue db connection string');
assert(dbConnectionString, 'Missing job queue max completion lag time (seconds)');
const jobQueue = new JobQueue({ dbConnectionString, maxCompletionLag });
await jobQueue.start();
// Note: In-memory pubsub works fine for now, as each watcher is a single process anyway.
// Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries
const pubsub = new PubSub();
const indexer = new Indexer(db, ethClient, pubsub, tracingClient);
const txWatcher = new TxWatcher(ethClient, indexer);
const txWatcher = new TxWatcher(ethClient, indexer, pubsub, jobQueue);
await txWatcher.start();
const resolvers = await createResolvers(indexer);
const resolvers = await createResolvers(indexer, txWatcher);
const app: Application = express();
const server = new ApolloServer({

View File

@ -1,44 +1,93 @@
import assert from 'assert';
import debug from 'debug';
import _ from 'lodash';
import { PubSub } from 'apollo-server-express';
import { EthClient } from '@vulcanize/ipld-eth-client';
import { Indexer } from './indexer';
import { JobQueue } from './job-queue';
const log = debug('vulcanize:tx-watcher');
export const AddressEvent = 'address-event';
export const QUEUE_TX_TRACING = 'tx-tracing';
export class TxWatcher {
_ethClient: EthClient
_indexer: Indexer
_subscription: ZenObservable.Subscription | undefined
constructor (ethClient: EthClient, indexer: Indexer) {
assert(ethClient);
assert(indexer);
_pubsub: PubSub
_watchTxSubscription: ZenObservable.Subscription | undefined
_jobQueue: JobQueue
constructor (ethClient: EthClient, indexer: Indexer, pubsub: PubSub, jobQueue: JobQueue) {
this._ethClient = ethClient;
this._indexer = indexer;
this._pubsub = pubsub;
this._jobQueue = jobQueue;
}
getAddressEventIterator (): AsyncIterator<any> {
return this._pubsub.asyncIterator([AddressEvent]);
}
async start (): Promise<void> {
assert(!this._subscription, 'subscription already started');
assert(!this._watchTxSubscription, 'subscription already started');
log('Started watching upstream tx...');
this._subscription = await this._ethClient.watchTransactions(async (value) => {
this._jobQueue.onComplete(QUEUE_TX_TRACING, async (job) => {
const { data: { request, failed, state, createdOn } } = job;
const timeElapsedInSeconds = (Date.now() - Date.parse(createdOn)) / 1000;
if (!failed && state === 'completed') {
// Check for max acceptable lag time between tracing request and sending results to live subscribers.
if (timeElapsedInSeconds <= this._jobQueue.maxCompletionLag) {
return await this.publishAddressEventToSubscribers(request.data.txHash, timeElapsedInSeconds);
} else {
log(`tx ${request.data.txHash} is too old (${timeElapsedInSeconds}s), not broadcasting to live subscribers`);
}
}
});
this._watchTxSubscription = await this._ethClient.watchTransactions(async (value) => {
const { txHash, ethHeaderCidByHeaderId: { blockHash, blockNumber } } = _.get(value, 'data.listen.relatedNode');
log('watchTransaction', JSON.stringify({ txHash, blockHash, blockNumber }, null, 2));
await this._indexer.traceTxAndIndexAppearances(txHash);
await this._indexer.publishAddressEventToSubscribers(txHash);
this._jobQueue.pushJob(QUEUE_TX_TRACING, { txHash });
});
}
async publishAddressEventToSubscribers (txHash: string, timeElapsedInSeconds: number): Promise<void> {
const traceObj = await this._indexer.getTrace(txHash);
if (!traceObj) {
return;
}
const { blockNumber, blockHash, trace } = traceObj;
for (let i = 0; i < traceObj.accounts.length; i++) {
const account = traceObj.accounts[i];
log(`publishing trace for ${txHash} (${timeElapsedInSeconds}s elapsed) to GQL subscribers for address ${account.address}`);
// Publishing the event here will result in pushing the payload to GQL subscribers for `onAddressEvent(address)`.
await this._pubsub.publish(AddressEvent, {
onAddressEvent: {
address: account.address,
txTrace: {
txHash,
blockHash,
blockNumber,
trace
}
}
});
}
}
async stop (): Promise<void> {
if (this._subscription) {
if (this._watchTxSubscription) {
log('Stopped watching upstream tx');
this._subscription.unsubscribe();
this._watchTxSubscription.unsubscribe();
}
}
}

View File

@ -106,6 +106,12 @@ export class EthClient {
};
}
async getBlockWithTransactions (blockNumber: string): Promise<any> {
const { data: result } = await this._client.query({ query: ethQueries.getBlockWithTransactions, variables: { blockNumber } });
return result;
}
async getLogs (vars: Vars): Promise<any> {
const result = await this._getCachedOrFetch('getLogs', vars);
const { getLogs: logs } = result;

View File

@ -24,6 +24,24 @@ query getLogs($blockHash: Bytes32!, $contract: Address!) {
}
`;
export const getBlockWithTransactions = gql`
query allEthHeaderCids($blockNumber: BigInt) {
allEthHeaderCids(condition: { blockNumber: $blockNumber }) {
nodes {
cid
blockNumber
blockHash
ethTransactionCidsByHeaderId {
nodes {
cid
txHash
}
}
}
}
}
`;
export const subscribeLogs = gql`
subscription SubscriptionReceipt {
listen(topic: "receipt_cids") {
@ -66,6 +84,7 @@ subscription SubscriptionHeader {
export default {
getStorageAt,
getLogs,
getBlockWithTransactions,
subscribeLogs,
subscribeTransactions
};

View File

@ -54,11 +54,13 @@
// step is invoked for every opcode that the VM executes.
step: function(log, db) {
var topOfStack = log.stack.peek(0).toString(16);
var result = this.isAddress(log, db, topOfStack);
if (log.stack.length()) {
var topOfStack = log.stack.peek(0).toString(16);
var result = this.isAddress(log, db, topOfStack);
if (result.isAddress) {
this.data[result.address] = result.confidence;
if (result.isAddress) {
this.data[result.address] = result.confidence;
}
}
},

View File

@ -240,19 +240,21 @@
this.callstack[left-1].calls.push(call);
}
var topOfStack = log.stack.peek(0).toString(16);
var result = this.isAddress(log, db, topOfStack);
if (result.isAddress) {
var call = this.callstack[this.callstack.length - 1];
if (!call.addresses) {
call.addresses = {};
}
if (log.stack.length()) {
var topOfStack = log.stack.peek(0).toString(16);
var result = this.isAddress(log, db, topOfStack);
if (result.isAddress) {
var call = this.callstack[this.callstack.length - 1];
if (!call.addresses) {
call.addresses = {};
}
if (!call.addresses[result.address]) {
call.addresses[result.address] = { confidence: result.confidence, opcodes: [] };
}
if (!call.addresses[result.address]) {
call.addresses[result.address] = { confidence: result.confidence, opcodes: [] };
}
call.addresses[result.address].opcodes.push(this.prevStepOp);
call.addresses[result.address].opcodes.push(this.prevStepOp);
}
}
this.prevStepOp = log.op.toString();

View File

@ -4832,6 +4832,14 @@ create-require@^1.1.0:
resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333"
integrity sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==
cron-parser@^3.3.0:
version "3.5.0"
resolved "https://registry.yarnpkg.com/cron-parser/-/cron-parser-3.5.0.tgz#b1a9da9514c0310aa7ef99c2f3f1d0f8c235257c"
integrity sha512-wyVZtbRs6qDfFd8ap457w3XVntdvqcwBGxBoTvJQH9KGVKL/fB+h2k3C8AqiVxvUQKN1Ps/Ns46CNViOpVDhfQ==
dependencies:
is-nan "^1.3.2"
luxon "^1.26.0"
cross-fetch@^2.1.0, cross-fetch@^2.1.1:
version "2.2.3"
resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-2.2.3.tgz#e8a0b3c54598136e037f8650f8e823ccdfac198e"
@ -5100,6 +5108,11 @@ defined@~1.0.0:
resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693"
integrity sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM=
delay@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/delay/-/delay-5.0.0.tgz#137045ef1b96e5071060dd5be60bf9334436bd1d"
integrity sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw==
delayed-stream@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
@ -7802,6 +7815,14 @@ is-lambda@^1.0.1:
resolved "https://registry.yarnpkg.com/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5"
integrity sha1-PZh3iZ5qU+/AFgUEzeFfgubwYdU=
is-nan@^1.3.2:
version "1.3.2"
resolved "https://registry.yarnpkg.com/is-nan/-/is-nan-1.3.2.tgz#043a54adea31748b55b6cd4e09aadafa69bd9e1d"
integrity sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==
dependencies:
call-bind "^1.0.0"
define-properties "^1.1.3"
is-negative-zero@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.1.tgz#3de746c18dda2319241a53675908d8f766f11c24"
@ -8680,6 +8701,11 @@ lodash.clonedeep@^4.5.0:
resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef"
integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=
lodash.debounce@^4.0.8:
version "4.0.8"
resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af"
integrity sha1-gteb/zCmfEAF/9XiUVMArZyk168=
lodash.ismatch@^4.4.0:
version "4.4.0"
resolved "https://registry.yarnpkg.com/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz#756cb5150ca3ba6f11085a78849645f188f85f37"
@ -8827,6 +8853,11 @@ ltgt@~2.1.1:
resolved "https://registry.yarnpkg.com/ltgt/-/ltgt-2.1.3.tgz#10851a06d9964b971178441c23c9e52698eece34"
integrity sha1-EIUaBtmWS5cReEQcI8nlJpjuzjQ=
luxon@^1.26.0:
version "1.27.0"
resolved "https://registry.yarnpkg.com/luxon/-/luxon-1.27.0.tgz#ae10c69113d85dab8f15f5e8390d0cbeddf4f00f"
integrity sha512-VKsFsPggTA0DvnxtJdiExAucKdAnwbCCNlMM5ENvHlxubqWd0xhZcdb4XgZ7QFNhaRhilXCFxHuoObP5BNA4PA==
make-dir@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5"
@ -10365,6 +10396,18 @@ performance-now@^2.1.0:
resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b"
integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=
pg-boss@^6.1.0:
version "6.1.0"
resolved "https://registry.yarnpkg.com/pg-boss/-/pg-boss-6.1.0.tgz#5f113ecfe29abb016504c00d2ced763a8550d705"
integrity sha512-U1wJj4J9tORQUveFSQC+5P2oNAgb6pj/YYr1isoCP+5oYXGrUYBB1PmF+8xCtNn7tEbacGMkTsDMTABbUCbdAA==
dependencies:
cron-parser "^3.3.0"
delay "^5.0.0"
lodash.debounce "^4.0.8"
p-map "^4.0.0"
pg "^8.5.1"
uuid "^8.3.2"
pg-connection-string@^2.5.0:
version "2.5.0"
resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34"
@ -10396,7 +10439,7 @@ pg-types@^2.1.0:
postgres-date "~1.0.4"
postgres-interval "^1.1.0"
pg@^8.6.0:
pg@^8.5.1, pg@^8.6.0:
version "8.6.0"
resolved "https://registry.yarnpkg.com/pg/-/pg-8.6.0.tgz#e222296b0b079b280cce106ea991703335487db2"
integrity sha512-qNS9u61lqljTDFvmk/N66EeGq3n6Ujzj0FFyNMGQr6XuEv4tgNTXvJQTfJdcvGit5p5/DWPu+wj920hAJFI+QQ==
@ -12859,7 +12902,7 @@ uuid@^3.1.0, uuid@^3.3.2:
resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee"
integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==
uuid@^8.0.0:
uuid@^8.0.0, uuid@^8.3.2:
version "8.3.2"
resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2"
integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==