diff --git a/apps/api/src/controllers/auth.ts b/apps/api/src/controllers/auth.ts index 44796276..5b8a141b 100644 --- a/apps/api/src/controllers/auth.ts +++ b/apps/api/src/controllers/auth.ts @@ -16,6 +16,7 @@ import { redlock } from "../../src/services/redlock"; import { getValue } from "../../src/services/redis"; import { setValue } from "../../src/services/redis"; import { validate } from "uuid"; +import * as Sentry from "@sentry/node"; function normalizedApiIsUuid(potentialUuid: string): boolean { // Check if the string is a valid UUID @@ -35,6 +36,7 @@ function setTrace(team_id: string, api_key: string) { api_key, }); } catch (error) { + Sentry.captureException(error); Logger.error(`Error setting trace attributes: ${error.message}`); } } @@ -50,6 +52,7 @@ async function getKeyAndPriceId(normalizedApi: string): Promise<{ api_key: normalizedApi, }); if (error) { + Sentry.captureException(error); Logger.error(`RPC ERROR (get_key_and_price_id_2): ${error.message}`); return { success: false, @@ -60,6 +63,7 @@ async function getKeyAndPriceId(normalizedApi: string): Promise<{ } if (!data || data.length === 0) { Logger.warn(`Error fetching api key: ${error.message} or data is empty`); + Sentry.captureException(error); // TODO: change this error code ? return { success: false, @@ -153,6 +157,7 @@ export async function supaAuthenticateUser( ); } } catch (error) { + Sentry.captureException(error); Logger.error(`Error with auth function: ${error}`); // const { // success, @@ -303,6 +308,9 @@ export async function supaAuthenticateUser( .eq("key", normalizedApi); if (error || !data || data.length === 0) { + if (error) { + Sentry.captureException(error); + } Logger.warn(`Error fetching api key: ${error.message} or data is empty`); return { success: false, diff --git a/apps/api/src/controllers/crawl-cancel.ts b/apps/api/src/controllers/crawl-cancel.ts index ed2c4166..1de9af60 100644 --- a/apps/api/src/controllers/crawl-cancel.ts +++ b/apps/api/src/controllers/crawl-cancel.ts @@ -4,6 +4,7 @@ import { RateLimiterMode } from "../../src/types"; import { supabase_service } from "../../src/services/supabase"; import { Logger } from "../../src/lib/logger"; import { getCrawl, saveCrawl } from "../../src/lib/crawl-redis"; +import * as Sentry from "@sentry/node"; export async function crawlCancelController(req: Request, res: Response) { try { @@ -50,6 +51,7 @@ export async function crawlCancelController(req: Request, res: Response) { status: "cancelled" }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/crawl-status.ts b/apps/api/src/controllers/crawl-status.ts index 3488ce26..76147263 100644 --- a/apps/api/src/controllers/crawl-status.ts +++ b/apps/api/src/controllers/crawl-status.ts @@ -5,6 +5,7 @@ import { getScrapeQueue } from "../../src/services/queue-service"; import { Logger } from "../../src/lib/logger"; import { getCrawl, getCrawlJobs } from "../../src/lib/crawl-redis"; import { supabaseGetJobsById } from "../../src/lib/supabase-jobs"; +import * as Sentry from "@sentry/node"; export async function getJobs(ids: string[]) { const jobs = (await Promise.all(ids.map(x => getScrapeQueue().getJob(x)))).filter(x => x); @@ -63,6 +64,7 @@ export async function crawlStatusController(req: Request, res: Response) { partial_data: jobStatus === "completed" ? [] : data.filter(x => x !== null), }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/crawl.ts b/apps/api/src/controllers/crawl.ts index cfac8f56..7bd9b373 100644 --- a/apps/api/src/controllers/crawl.ts +++ b/apps/api/src/controllers/crawl.ts @@ -26,6 +26,7 @@ import { import { getScrapeQueue } from "../../src/services/queue-service"; import { checkAndUpdateURL } from "../../src/lib/validateUrl"; import { getJobPriority } from "../../src/lib/job-priority"; +import * as Sentry from "@sentry/node"; export async function crawlController(req: Request, res: Response) { try { @@ -209,6 +210,7 @@ export async function crawlController(req: Request, res: Response) { res.json({ jobId: id }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/crawlPreview.ts b/apps/api/src/controllers/crawlPreview.ts index 20eae731..3e43f07f 100644 --- a/apps/api/src/controllers/crawlPreview.ts +++ b/apps/api/src/controllers/crawlPreview.ts @@ -7,6 +7,7 @@ import { Logger } from "../../src/lib/logger"; import { addCrawlJob, crawlToCrawler, lockURL, saveCrawl, StoredCrawl } from "../../src/lib/crawl-redis"; import { addScrapeJob } from "../../src/services/queue-jobs"; import { checkAndUpdateURL } from "../../src/lib/validateUrl"; +import * as Sentry from "@sentry/node"; export async function crawlPreviewController(req: Request, res: Response) { try { @@ -130,6 +131,7 @@ export async function crawlPreviewController(req: Request, res: Response) { res.json({ jobId: id }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/scrape.ts b/apps/api/src/controllers/scrape.ts index bb991060..f33aa058 100644 --- a/apps/api/src/controllers/scrape.ts +++ b/apps/api/src/controllers/scrape.ts @@ -13,6 +13,7 @@ import { scrapeQueueEvents } from '../services/queue-service'; import { v4 as uuidv4 } from "uuid"; import { Logger } from '../lib/logger'; import { getJobPriority } from '../lib/job-priority'; +import * as Sentry from "@sentry/node"; export async function scrapeHelper( jobId: string, @@ -189,6 +190,7 @@ export async function scrapeController(req: Request, res: Response) { return res.status(result.returnCode).json(result); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/search.ts b/apps/api/src/controllers/search.ts index 570f755f..a8bf3a69 100644 --- a/apps/api/src/controllers/search.ts +++ b/apps/api/src/controllers/search.ts @@ -11,6 +11,7 @@ import { v4 as uuidv4 } from "uuid"; import { Logger } from "../lib/logger"; import { getScrapeQueue, scrapeQueueEvents } from "../services/queue-service"; import { getJobPriority } from "../lib/job-priority"; +import * as Sentry from "@sentry/node"; export async function searchHelper( jobId: string, @@ -158,6 +159,7 @@ export async function searchController(req: Request, res: Response) { return res.status(402).json({ error: "Insufficient credits" }); } } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: "Internal server error" }); } @@ -189,6 +191,7 @@ export async function searchController(req: Request, res: Response) { }); return res.status(result.returnCode).json(result); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/status.ts b/apps/api/src/controllers/status.ts index e469060f..c3ca906f 100644 --- a/apps/api/src/controllers/status.ts +++ b/apps/api/src/controllers/status.ts @@ -4,6 +4,7 @@ import { getCrawl, getCrawlJobs } from "../../src/lib/crawl-redis"; import { getScrapeQueue } from "../../src/services/queue-service"; import { supabaseGetJobById } from "../../src/lib/supabase-jobs"; import { getJobs } from "./crawl-status"; +import * as Sentry from "@sentry/node"; export async function crawlJobStatusPreviewController(req: Request, res: Response) { try { @@ -37,6 +38,7 @@ export async function crawlJobStatusPreviewController(req: Request, res: Respons partial_data: jobStatus === "completed" ? [] : data.filter(x => x !== null), }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); }