Fix events queue job retry by resetting watcher in job-runner (#470)

* Reset watcher after events job retry in job-runner

* Push next historical job from job-runner instead of event-watcher
This commit is contained in:
Nabarun Gogoi 2023-11-14 18:43:17 +05:30 committed by GitHub
parent 6ce8d4746d
commit 7c4f9fb797
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 39 additions and 29 deletions

View File

@ -88,16 +88,17 @@ export class EventWatcher {
} }
async startBlockProcessing (): Promise<void> { async startBlockProcessing (): Promise<void> {
// Get latest block in chain and sync status from DB. // Wait for events job queue to be empty before starting historical or realtime processing
const [{ block: latestBlock }, syncStatus] = await Promise.all([ await this._jobQueue.waitForEmptyQueue(QUEUE_EVENT_PROCESSING);
// Get latest block in chain and sync status from DB
// Also get historical-processing queu size
const [{ block: latestBlock }, syncStatus, historicalProcessingQueueSize] = await Promise.all([
this._ethClient.getBlockByHash(), this._ethClient.getBlockByHash(),
this._indexer.getSyncStatus(), this._indexer.getSyncStatus(),
// Wait for events job queue to be empty before starting historical or realtime processing this._jobQueue.getQueueSize(QUEUE_HISTORICAL_PROCESSING, 'completed')
this._jobQueue.waitForEmptyQueue(QUEUE_EVENT_PROCESSING)
]); ]);
const historicalProcessingQueueSize = await this._jobQueue.getQueueSize(QUEUE_HISTORICAL_PROCESSING, 'completed');
// Stop if there are active or pending historical processing jobs // Stop if there are active or pending historical processing jobs
// Might be created on encountering template create in events processing // Might be created on encountering template create in events processing
if (historicalProcessingQueueSize > 0) { if (historicalProcessingQueueSize > 0) {
@ -144,7 +145,8 @@ export class EventWatcher {
{ {
blockNumber: startBlockNumber, blockNumber: startBlockNumber,
processingEndBlockNumber: this._historicalProcessingEndBlockNumber processingEndBlockNumber: this._historicalProcessingEndBlockNumber
} },
{ priority: 1 }
); );
} }
@ -241,22 +243,11 @@ export class EventWatcher {
if (nextBatchStartBlockNumber > this._historicalProcessingEndBlockNumber) { if (nextBatchStartBlockNumber > this._historicalProcessingEndBlockNumber) {
// Start next batch of historical processing or realtime processing // Start next batch of historical processing or realtime processing
this.startBlockProcessing(); this.startBlockProcessing();
return;
} }
// Push job for next batch of blocks
await this._jobQueue.pushJob(
QUEUE_HISTORICAL_PROCESSING,
{
blockNumber: nextBatchStartBlockNumber,
processingEndBlockNumber: this._historicalProcessingEndBlockNumber
}
);
} }
async eventProcessingCompleteHandler (job: PgBoss.JobWithMetadata<any>): Promise<void> { async eventProcessingCompleteHandler (job: PgBoss.JobWithMetadata<any>): Promise<void> {
const { id, retrycount, data: { request: { data }, failed, state, createdOn } } = job; const { id, data: { request: { data }, failed, state, createdOn, retryCount } } = job;
if (failed) { if (failed) {
log(`Job ${id} for queue ${QUEUE_EVENT_PROCESSING} failed`); log(`Job ${id} for queue ${QUEUE_EVENT_PROCESSING} failed`);
@ -274,9 +265,7 @@ export class EventWatcher {
assert(blockProgress); assert(blockProgress);
// Check if job was retried // Check if job was retried
if (retrycount > 0) { if (retryCount > 0) {
// Reset watcher to remove any data after this block
await this._indexer.resetWatcherToBlock(blockProgress.blockNumber);
// Start block processing (Try restarting historical processing or continue realtime processing) // Start block processing (Try restarting historical processing or continue realtime processing)
this.startBlockProcessing(); this.startBlockProcessing();
} }

View File

@ -20,7 +20,7 @@ type JobCompleteCallback = (job: PgBoss.Job | PgBoss.JobWithMetadata) => Promise
const DEFAULT_JOBS_PER_INTERVAL = 5; const DEFAULT_JOBS_PER_INTERVAL = 5;
// Interval time to check for events queue to be empty // Interval time to check for events queue to be empty
const EMPTY_QUEUE_CHECK_INTERVAL = 5000; const EMPTY_QUEUE_CHECK_INTERVAL = 1000;
const log = debug('vulcanize:job-queue'); const log = debug('vulcanize:job-queue');

View File

@ -170,8 +170,12 @@ export class JobRunner {
if (this._historicalProcessingCompletedUpto) { if (this._historicalProcessingCompletedUpto) {
// Check if historical processing start is for a previous block which happens incase of template create // Check if historical processing start is for a previous block which happens incase of template create
if (startBlock < this._historicalProcessingCompletedUpto) { if (startBlock < this._historicalProcessingCompletedUpto) {
// Delete any pending historical processing jobs await Promise.all([
await this.jobQueue.deleteJobs(QUEUE_HISTORICAL_PROCESSING); // Delete any pending historical processing jobs
this.jobQueue.deleteJobs(QUEUE_HISTORICAL_PROCESSING),
// Remove pending events queue jobs
this.jobQueue.deleteJobs(QUEUE_EVENT_PROCESSING)
]);
// Wait for events queue to be empty // Wait for events queue to be empty
log(`Waiting for events queue to be empty before resetting watcher to block ${startBlock - 1}`); log(`Waiting for events queue to be empty before resetting watcher to block ${startBlock - 1}`);
@ -242,6 +246,17 @@ export class JobRunner {
this._historicalProcessingCompletedUpto = endBlock; this._historicalProcessingCompletedUpto = endBlock;
if (endBlock < processingEndBlockNumber) {
// If endBlock is lesser than processingEndBlockNumber push new historical job
await this.jobQueue.pushJob(
QUEUE_HISTORICAL_PROCESSING,
{
blockNumber: endBlock + 1,
processingEndBlockNumber: processingEndBlockNumber
}
);
}
await this.jobQueue.markComplete( await this.jobQueue.markComplete(
job, job,
{ isComplete: true, endBlock } { isComplete: true, endBlock }
@ -679,9 +694,15 @@ export class JobRunner {
this._endBlockProcessTimer = lastBlockProcessDuration.startTimer(); this._endBlockProcessTimer = lastBlockProcessDuration.startTimer();
await this._indexer.updateSyncStatusProcessedBlock(block.blockHash, block.blockNumber); await this._indexer.updateSyncStatusProcessedBlock(block.blockHash, block.blockNumber);
// If this was a retry attempt, unset the indexing error flag in sync status
if (retryCount > 0) { if (retryCount > 0) {
await this._indexer.updateSyncStatusIndexingError(false); await Promise.all([
// If this was a retry attempt, unset the indexing error flag in sync status
this._indexer.updateSyncStatusIndexingError(false),
// Reset watcher after succesfull retry so that block processing starts after this block
this._indexer.resetWatcherToBlock(block.blockNumber)
]);
log(`Watcher reset to block ${block.blockNumber} after succesffully retrying events processing`);
} }
} catch (error) { } catch (error) {
log(`Error in processing events for block ${block.blockNumber} hash ${block.blockHash}`); log(`Error in processing events for block ${block.blockNumber} hash ${block.blockHash}`);
@ -691,8 +712,8 @@ export class JobRunner {
this._indexer.clearProcessedBlockData(block), this._indexer.clearProcessedBlockData(block),
// Delete all pending event processing jobs // Delete all pending event processing jobs
this.jobQueue.deleteJobs(QUEUE_EVENT_PROCESSING), this.jobQueue.deleteJobs(QUEUE_EVENT_PROCESSING),
// Delete all pending historical processing jobs // Delete all active and pending historical processing jobs
this.jobQueue.deleteJobs(QUEUE_HISTORICAL_PROCESSING, 'active'), this.jobQueue.deleteJobs(QUEUE_HISTORICAL_PROCESSING, 'completed'),
// Set the indexing error flag in sync status // Set the indexing error flag in sync status
this._indexer.updateSyncStatusIndexingError(true) this._indexer.updateSyncStatusIndexingError(true)
]); ]);