From 0cdf41587e30b62589367eb317e573621d6b5e48 Mon Sep 17 00:00:00 2001 From: Gergo Moricz Date: Thu, 22 Aug 2024 03:55:40 +0200 Subject: [PATCH] feat(sentry): add error handles to try-catch blocks --- apps/api/src/controllers/auth.ts | 8 ++++++++ apps/api/src/controllers/crawl-cancel.ts | 2 ++ apps/api/src/controllers/crawl-status.ts | 2 ++ apps/api/src/controllers/crawl.ts | 2 ++ apps/api/src/controllers/crawlPreview.ts | 2 ++ apps/api/src/controllers/scrape.ts | 2 ++ apps/api/src/controllers/search.ts | 3 +++ apps/api/src/controllers/status.ts | 2 ++ 8 files changed, 23 insertions(+) diff --git a/apps/api/src/controllers/auth.ts b/apps/api/src/controllers/auth.ts index 3b862c48..467d09fc 100644 --- a/apps/api/src/controllers/auth.ts +++ b/apps/api/src/controllers/auth.ts @@ -15,6 +15,7 @@ import { redlock } from "../../src/services/redlock"; import { getValue } from "../../src/services/redis"; import { setValue } from "../../src/services/redis"; import { validate } from "uuid"; +import * as Sentry from "@sentry/node"; function normalizedApiIsUuid(potentialUuid: string): boolean { // Check if the string is a valid UUID @@ -34,6 +35,7 @@ function setTrace(team_id: string, api_key: string) { api_key, }); } catch (error) { + Sentry.captureException(error); Logger.error(`Error setting trace attributes: ${error.message}`); } } @@ -49,6 +51,7 @@ async function getKeyAndPriceId(normalizedApi: string): Promise<{ api_key: normalizedApi, }); if (error) { + Sentry.captureException(error); Logger.error(`RPC ERROR (get_key_and_price_id_2): ${error.message}`); return { success: false, @@ -59,6 +62,7 @@ async function getKeyAndPriceId(normalizedApi: string): Promise<{ } if (!data || data.length === 0) { Logger.warn(`Error fetching api key: ${error.message} or data is empty`); + Sentry.captureException(error); // TODO: change this error code ? return { success: false, @@ -152,6 +156,7 @@ export async function supaAuthenticateUser( ); } } catch (error) { + Sentry.captureException(error); Logger.error(`Error with auth function: ${error}`); // const { // success, @@ -302,6 +307,9 @@ export async function supaAuthenticateUser( .eq("key", normalizedApi); if (error || !data || data.length === 0) { + if (error) { + Sentry.captureException(error); + } Logger.warn(`Error fetching api key: ${error.message} or data is empty`); return { success: false, diff --git a/apps/api/src/controllers/crawl-cancel.ts b/apps/api/src/controllers/crawl-cancel.ts index ed2c4166..1de9af60 100644 --- a/apps/api/src/controllers/crawl-cancel.ts +++ b/apps/api/src/controllers/crawl-cancel.ts @@ -4,6 +4,7 @@ import { RateLimiterMode } from "../../src/types"; import { supabase_service } from "../../src/services/supabase"; import { Logger } from "../../src/lib/logger"; import { getCrawl, saveCrawl } from "../../src/lib/crawl-redis"; +import * as Sentry from "@sentry/node"; export async function crawlCancelController(req: Request, res: Response) { try { @@ -50,6 +51,7 @@ export async function crawlCancelController(req: Request, res: Response) { status: "cancelled" }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/crawl-status.ts b/apps/api/src/controllers/crawl-status.ts index 3488ce26..76147263 100644 --- a/apps/api/src/controllers/crawl-status.ts +++ b/apps/api/src/controllers/crawl-status.ts @@ -5,6 +5,7 @@ import { getScrapeQueue } from "../../src/services/queue-service"; import { Logger } from "../../src/lib/logger"; import { getCrawl, getCrawlJobs } from "../../src/lib/crawl-redis"; import { supabaseGetJobsById } from "../../src/lib/supabase-jobs"; +import * as Sentry from "@sentry/node"; export async function getJobs(ids: string[]) { const jobs = (await Promise.all(ids.map(x => getScrapeQueue().getJob(x)))).filter(x => x); @@ -63,6 +64,7 @@ export async function crawlStatusController(req: Request, res: Response) { partial_data: jobStatus === "completed" ? [] : data.filter(x => x !== null), }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/crawl.ts b/apps/api/src/controllers/crawl.ts index 1dfe758f..d40f2a9e 100644 --- a/apps/api/src/controllers/crawl.ts +++ b/apps/api/src/controllers/crawl.ts @@ -25,6 +25,7 @@ import { } from "../../src/lib/crawl-redis"; import { getScrapeQueue } from "../../src/services/queue-service"; import { checkAndUpdateURL } from "../../src/lib/validateUrl"; +import * as Sentry from "@sentry/node"; export async function crawlController(req: Request, res: Response) { try { @@ -194,6 +195,7 @@ export async function crawlController(req: Request, res: Response) { res.json({ jobId: id }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/crawlPreview.ts b/apps/api/src/controllers/crawlPreview.ts index cc10dc8e..59b54458 100644 --- a/apps/api/src/controllers/crawlPreview.ts +++ b/apps/api/src/controllers/crawlPreview.ts @@ -7,6 +7,7 @@ import { Logger } from "../../src/lib/logger"; import { addCrawlJob, crawlToCrawler, lockURL, saveCrawl, StoredCrawl } from "../../src/lib/crawl-redis"; import { addScrapeJob } from "../../src/services/queue-jobs"; import { checkAndUpdateURL } from "../../src/lib/validateUrl"; +import * as Sentry from "@sentry/node"; export async function crawlPreviewController(req: Request, res: Response) { try { @@ -129,6 +130,7 @@ export async function crawlPreviewController(req: Request, res: Response) { res.json({ jobId: id }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/scrape.ts b/apps/api/src/controllers/scrape.ts index 273b4c56..b2d1db34 100644 --- a/apps/api/src/controllers/scrape.ts +++ b/apps/api/src/controllers/scrape.ts @@ -12,6 +12,7 @@ import { addScrapeJob } from '../services/queue-jobs'; import { scrapeQueueEvents } from '../services/queue-service'; import { v4 as uuidv4 } from "uuid"; import { Logger } from '../lib/logger'; +import * as Sentry from "@sentry/node"; export async function scrapeHelper( jobId: string, @@ -186,6 +187,7 @@ export async function scrapeController(req: Request, res: Response) { return res.status(result.returnCode).json(result); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/search.ts b/apps/api/src/controllers/search.ts index 759c7805..21a8e390 100644 --- a/apps/api/src/controllers/search.ts +++ b/apps/api/src/controllers/search.ts @@ -10,6 +10,7 @@ import { isUrlBlocked } from "../scraper/WebScraper/utils/blocklist"; import { v4 as uuidv4 } from "uuid"; import { Logger } from "../lib/logger"; import { getScrapeQueue, scrapeQueueEvents } from "../services/queue-service"; +import * as Sentry from "@sentry/node"; export async function searchHelper( jobId: string, @@ -154,6 +155,7 @@ export async function searchController(req: Request, res: Response) { return res.status(402).json({ error: "Insufficient credits" }); } } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: "Internal server error" }); } @@ -184,6 +186,7 @@ export async function searchController(req: Request, res: Response) { }); return res.status(result.returnCode).json(result); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); } diff --git a/apps/api/src/controllers/status.ts b/apps/api/src/controllers/status.ts index e469060f..c3ca906f 100644 --- a/apps/api/src/controllers/status.ts +++ b/apps/api/src/controllers/status.ts @@ -4,6 +4,7 @@ import { getCrawl, getCrawlJobs } from "../../src/lib/crawl-redis"; import { getScrapeQueue } from "../../src/services/queue-service"; import { supabaseGetJobById } from "../../src/lib/supabase-jobs"; import { getJobs } from "./crawl-status"; +import * as Sentry from "@sentry/node"; export async function crawlJobStatusPreviewController(req: Request, res: Response) { try { @@ -37,6 +38,7 @@ export async function crawlJobStatusPreviewController(req: Request, res: Respons partial_data: jobStatus === "completed" ? [] : data.filter(x => x !== null), }); } catch (error) { + Sentry.captureException(error); Logger.error(error); return res.status(500).json({ error: error.message }); }