From d5ec3566732b2273beae7750807db27492e75676 Mon Sep 17 00:00:00 2001 From: Recoup Agent Date: Sat, 7 Mar 2026 02:58:10 +0000 Subject: [PATCH 1/2] agent: @U0AJM7X8FBR make a tiny change to any bug you can find in the tasks rep --- src/polling/pollScraperResults.ts | 23 ++++++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/src/polling/pollScraperResults.ts b/src/polling/pollScraperResults.ts index ed8af96..7f6e990 100644 --- a/src/polling/pollScraperResults.ts +++ b/src/polling/pollScraperResults.ts @@ -12,6 +12,8 @@ export type PollResult = ScrapeRun & { data?: unknown[]; }; +const MAX_POLL_FAILURES = 5; + /** * Polls each scraper run in parallel until all are completed (SUCCEEDED or FAILED). * Returns an array of results for each run. @@ -23,6 +25,7 @@ export async function pollScraperResults( const pendingRuns = new Map( runs.map((run) => [run.runId, run]) ); + const failureCounts = new Map(); while (pendingRuns.size > 0) { // Poll all pending runs in parallel @@ -30,10 +33,28 @@ export async function pollScraperResults( const result = await getScraperResults(run.runId); if (!result) { - logger.warn("Failed to get scraper result", { runId: run.runId }); + const failures = (failureCounts.get(run.runId) ?? 0) + 1; + failureCounts.set(run.runId, failures); + logger.warn("Failed to get scraper result", { runId: run.runId, consecutiveFailures: failures }); + + if (failures >= MAX_POLL_FAILURES) { + logger.error("Max poll failures reached, marking run as FAILED", { runId: run.runId }); + return { + run, + pollResult: { + runId: run.runId, + datasetId: run.datasetId, + status: "FAILED", + }, + }; + } + return null; } + // Reset failure count on successful poll + failureCounts.delete(run.runId); + if (result.status === "SUCCEEDED") { const completedResult = result as { status: string; From 3f8b03b37187bb83e1dbf4c15fb525a38d4e741e Mon Sep 17 00:00:00 2001 From: Recoup Agent Date: Sat, 7 Mar 2026 03:03:23 +0000 Subject: [PATCH 2/2] refactor: DRY up poll failure handling with shared failedResult helper --- src/polling/pollScraperResults.ts | 37 +++++++++++++++---------------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/src/polling/pollScraperResults.ts b/src/polling/pollScraperResults.ts index 7f6e990..570c90c 100644 --- a/src/polling/pollScraperResults.ts +++ b/src/polling/pollScraperResults.ts @@ -14,8 +14,16 @@ export type PollResult = ScrapeRun & { const MAX_POLL_FAILURES = 5; +/** + * Builds a PollResult marking a run as failed. + */ +function failedResult(run: ScrapeRun): PollResult { + return { runId: run.runId, datasetId: run.datasetId, status: "FAILED" }; +} + /** * Polls each scraper run in parallel until all are completed (SUCCEEDED or FAILED). + * Marks a run as FAILED after MAX_POLL_FAILURES consecutive poll errors. * Returns an array of results for each run. */ export async function pollScraperResults( @@ -35,18 +43,16 @@ export async function pollScraperResults( if (!result) { const failures = (failureCounts.get(run.runId) ?? 0) + 1; failureCounts.set(run.runId, failures); - logger.warn("Failed to get scraper result", { runId: run.runId, consecutiveFailures: failures }); + logger.warn("Failed to get scraper result", { + runId: run.runId, + consecutiveFailures: failures, + }); if (failures >= MAX_POLL_FAILURES) { - logger.error("Max poll failures reached, marking run as FAILED", { runId: run.runId }); - return { - run, - pollResult: { - runId: run.runId, - datasetId: run.datasetId, - status: "FAILED", - }, - }; + logger.error("Max poll failures reached, marking run as FAILED", { + runId: run.runId, + }); + return { run, pollResult: failedResult(run) }; } return null; @@ -68,17 +74,10 @@ export async function pollScraperResults( datasetId: completedResult.datasetId, status: completedResult.status, data: completedResult.data, - }, + } as PollResult, }; } else if (result.status === "FAILED") { - return { - run, - pollResult: { - runId: run.runId, - datasetId: result.datasetId, - status: result.status, - }, - }; + return { run, pollResult: failedResult(run) }; } return null; // Still running