From b5b612c35b70c42b5f05e23fed12e71f46412ca0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Gerg=C5=91=20M=C3=B3ricz?= Date: Thu, 15 May 2025 16:32:59 +0200 Subject: [PATCH] feat(api/extract/fire-0): error logging (#1556) --- .../extract/fire-0/extraction-service-f0.ts | 81 +++++++++++++++++++ 1 file changed, 81 insertions(+) diff --git a/apps/api/src/lib/extract/fire-0/extraction-service-f0.ts b/apps/api/src/lib/extract/fire-0/extraction-service-f0.ts index fcf133e5..72cc8e2e 100644 --- a/apps/api/src/lib/extract/fire-0/extraction-service-f0.ts +++ b/apps/api/src/lib/extract/fire-0/extraction-service-f0.ts @@ -106,6 +106,22 @@ import { getACUCTeam } from "../../../controllers/auth"; logger.error("No search results found", { query: request.prompt, }); + logJob({ + job_id: extractId, + success: false, + message: "No search results found", + num_docs: 1, + docs: [], + time_taken: (new Date().getTime() - Date.now()) / 1000, + team_id: teamId, + mode: "extract", + url: request.urls?.join(", ") || "", + scrapeOptions: request, + origin: request.origin ?? "api", + num_tokens: 0, + tokens_billed: 0, + sources, + }); return { success: false, error: "No search results found", @@ -191,6 +207,22 @@ import { getACUCTeam } from "../../../controllers/auth"; logger.error("0 links! Bailing.", { linkCount: links.length, }); + logJob({ + job_id: extractId, + success: false, + message: "No valid URLs found to scrape", + num_docs: 1, + docs: [], + time_taken: (new Date().getTime() - Date.now()) / 1000, + team_id: teamId, + mode: "extract", + url: request.urls?.join(", ") || "", + scrapeOptions: request, + origin: request.origin ?? "api", + num_tokens: 0, + tokens_billed: 0, + sources, + }); return { success: false, error: @@ -524,6 +556,22 @@ import { getACUCTeam } from "../../../controllers/auth"; } catch (error) { logger.error(`Failed to transform array to object`, { error }); + logJob({ + job_id: extractId, + success: false, + message: "Failed to transform array to object", + num_docs: 1, + docs: [], + time_taken: (new Date().getTime() - Date.now()) / 1000, + team_id: teamId, + mode: "extract", + url: request.urls?.join(", ") || "", + scrapeOptions: request, + origin: request.origin ?? "api", + num_tokens: 0, + tokens_billed: 0, + sources, + }); return { success: false, error: @@ -602,6 +650,23 @@ import { getACUCTeam } from "../../../controllers/auth"; logger.debug("Scrapes finished.", { docCount: validResults.length }); } catch (error) { + logger.error("Failed to scrape documents", { error }); + logJob({ + job_id: extractId, + success: false, + message: "Failed to scrape documents", + num_docs: 1, + docs: [], + time_taken: (new Date().getTime() - Date.now()) / 1000, + team_id: teamId, + mode: "extract", + url: request.urls?.join(", ") || "", + scrapeOptions: request, + origin: request.origin ?? "api", + num_tokens: 0, + tokens_billed: 0, + sources, + }); return { success: false, error: error.message, @@ -614,6 +679,22 @@ import { getACUCTeam } from "../../../controllers/auth"; if (docsMap.size == 0) { // All urls are invalid logger.error("All provided URLs are invalid!"); + logJob({ + job_id: extractId, + success: false, + message: "All provided URLs are invalid", + num_docs: 1, + docs: [], + time_taken: (new Date().getTime() - Date.now()) / 1000, + team_id: teamId, + mode: "extract", + url: request.urls?.join(", ") || "", + scrapeOptions: request, + origin: request.origin ?? "api", + num_tokens: 0, + tokens_billed: 0, + sources, + }); return { success: false, error: