From 2849ce2f1369ae40fe2b0214ac8ec000676a9f05 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gerg=C5=91=20M=C3=B3ricz?= Date: Thu, 9 Jan 2025 18:48:47 +0100 Subject: [PATCH] fix(queue-worker): errored job logging --- apps/api/src/services/queue-worker.ts | 58 +++++++++++---------------- 1 file changed, 24 insertions(+), 34 deletions(-) diff --git a/apps/api/src/services/queue-worker.ts b/apps/api/src/services/queue-worker.ts index adefd143..051217c6 100644 --- a/apps/api/src/services/queue-worker.ts +++ b/apps/api/src/services/queue-worker.ts @@ -989,16 +989,30 @@ async function processJob(job: Job & { id: string }, token: string) { job.data.crawlerOptions !== null ? "crawl.page" : "batch_scrape.page", ); } - // if (job.data.v1) { - // callWebhook( - // job.data.team_id, - // job.id as string, - // [], - // job.data.webhook, - // job.data.v1, - // "crawl.failed" - // ); - // } + + logger.debug("Logging job to DB..."); + await logJob( + { + job_id: job.id as string, + success: false, + message: + typeof error === "string" + ? error + : (error.message ?? + "Something went wrong... Contact help@mendable.ai"), + num_docs: 0, + docs: [], + time_taken: 0, + team_id: job.data.team_id, + mode: job.data.mode, + url: job.data.url, + crawlerOptions: job.data.crawlerOptions, + scrapeOptions: job.data.scrapeOptions, + origin: job.data.origin, + crawl_id: job.data.crawl_id, + }, + true, + ); if (job.data.crawl_id) { const sc = (await getCrawl(job.data.crawl_id)) as StoredCrawl; @@ -1010,30 +1024,6 @@ async function processJob(job: Job & { id: string }, token: string) { normalizeURL(job.data.url, sc), ); - logger.debug("Logging job to DB..."); - await logJob( - { - job_id: job.id as string, - success: false, - message: - typeof error === "string" - ? error - : (error.message ?? - "Something went wrong... Contact help@mendable.ai"), - num_docs: 0, - docs: [], - time_taken: 0, - team_id: job.data.team_id, - mode: job.data.mode, - url: job.data.url, - crawlerOptions: sc.crawlerOptions, - scrapeOptions: job.data.scrapeOptions, - origin: job.data.origin, - crawl_id: job.data.crawl_id, - }, - true, - ); - await finishCrawlIfNeeded(job, sc); // await logJob({