mirror of
https://git.mirrors.martin98.com/https://github.com/mendableai/firecrawl
synced 2025-08-14 22:15:58 +08:00
Merge branch 'main' into nsc/job-priority
This commit is contained in:
commit
8a778278a9
@ -16,6 +16,7 @@ import { redlock } from "../../src/services/redlock";
|
||||
import { getValue } from "../../src/services/redis";
|
||||
import { setValue } from "../../src/services/redis";
|
||||
import { validate } from "uuid";
|
||||
import * as Sentry from "@sentry/node";
|
||||
|
||||
function normalizedApiIsUuid(potentialUuid: string): boolean {
|
||||
// Check if the string is a valid UUID
|
||||
@ -35,6 +36,7 @@ function setTrace(team_id: string, api_key: string) {
|
||||
api_key,
|
||||
});
|
||||
} catch (error) {
|
||||
Sentry.captureException(error);
|
||||
Logger.error(`Error setting trace attributes: ${error.message}`);
|
||||
}
|
||||
}
|
||||
@ -50,6 +52,7 @@ async function getKeyAndPriceId(normalizedApi: string): Promise<{
|
||||
api_key: normalizedApi,
|
||||
});
|
||||
if (error) {
|
||||
Sentry.captureException(error);
|
||||
Logger.error(`RPC ERROR (get_key_and_price_id_2): ${error.message}`);
|
||||
return {
|
||||
success: false,
|
||||
@ -60,6 +63,7 @@ async function getKeyAndPriceId(normalizedApi: string): Promise<{
|
||||
}
|
||||
if (!data || data.length === 0) {
|
||||
Logger.warn(`Error fetching api key: ${error.message} or data is empty`);
|
||||
Sentry.captureException(error);
|
||||
// TODO: change this error code ?
|
||||
return {
|
||||
success: false,
|
||||
@ -153,6 +157,7 @@ export async function supaAuthenticateUser(
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
Sentry.captureException(error);
|
||||
Logger.error(`Error with auth function: ${error}`);
|
||||
// const {
|
||||
// success,
|
||||
@ -303,6 +308,9 @@ export async function supaAuthenticateUser(
|
||||
.eq("key", normalizedApi);
|
||||
|
||||
if (error || !data || data.length === 0) {
|
||||
if (error) {
|
||||
Sentry.captureException(error);
|
||||
}
|
||||
Logger.warn(`Error fetching api key: ${error.message} or data is empty`);
|
||||
return {
|
||||
success: false,
|
||||
|
@ -4,6 +4,7 @@ import { RateLimiterMode } from "../../src/types";
|
||||
import { supabase_service } from "../../src/services/supabase";
|
||||
import { Logger } from "../../src/lib/logger";
|
||||
import { getCrawl, saveCrawl } from "../../src/lib/crawl-redis";
|
||||
import * as Sentry from "@sentry/node";
|
||||
|
||||
export async function crawlCancelController(req: Request, res: Response) {
|
||||
try {
|
||||
@ -50,6 +51,7 @@ export async function crawlCancelController(req: Request, res: Response) {
|
||||
status: "cancelled"
|
||||
});
|
||||
} catch (error) {
|
||||
Sentry.captureException(error);
|
||||
Logger.error(error);
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import { getScrapeQueue } from "../../src/services/queue-service";
|
||||
import { Logger } from "../../src/lib/logger";
|
||||
import { getCrawl, getCrawlJobs } from "../../src/lib/crawl-redis";
|
||||
import { supabaseGetJobsById } from "../../src/lib/supabase-jobs";
|
||||
import * as Sentry from "@sentry/node";
|
||||
|
||||
export async function getJobs(ids: string[]) {
|
||||
const jobs = (await Promise.all(ids.map(x => getScrapeQueue().getJob(x)))).filter(x => x);
|
||||
@ -63,6 +64,7 @@ export async function crawlStatusController(req: Request, res: Response) {
|
||||
partial_data: jobStatus === "completed" ? [] : data.filter(x => x !== null),
|
||||
});
|
||||
} catch (error) {
|
||||
Sentry.captureException(error);
|
||||
Logger.error(error);
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ import {
|
||||
import { getScrapeQueue } from "../../src/services/queue-service";
|
||||
import { checkAndUpdateURL } from "../../src/lib/validateUrl";
|
||||
import { getJobPriority } from "../../src/lib/job-priority";
|
||||
import * as Sentry from "@sentry/node";
|
||||
|
||||
export async function crawlController(req: Request, res: Response) {
|
||||
try {
|
||||
@ -209,6 +210,7 @@ export async function crawlController(req: Request, res: Response) {
|
||||
|
||||
res.json({ jobId: id });
|
||||
} catch (error) {
|
||||
Sentry.captureException(error);
|
||||
Logger.error(error);
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
|
@ -7,6 +7,7 @@ import { Logger } from "../../src/lib/logger";
|
||||
import { addCrawlJob, crawlToCrawler, lockURL, saveCrawl, StoredCrawl } from "../../src/lib/crawl-redis";
|
||||
import { addScrapeJob } from "../../src/services/queue-jobs";
|
||||
import { checkAndUpdateURL } from "../../src/lib/validateUrl";
|
||||
import * as Sentry from "@sentry/node";
|
||||
|
||||
export async function crawlPreviewController(req: Request, res: Response) {
|
||||
try {
|
||||
@ -130,6 +131,7 @@ export async function crawlPreviewController(req: Request, res: Response) {
|
||||
|
||||
res.json({ jobId: id });
|
||||
} catch (error) {
|
||||
Sentry.captureException(error);
|
||||
Logger.error(error);
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ import { scrapeQueueEvents } from '../services/queue-service';
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { Logger } from '../lib/logger';
|
||||
import { getJobPriority } from '../lib/job-priority';
|
||||
import * as Sentry from "@sentry/node";
|
||||
|
||||
export async function scrapeHelper(
|
||||
jobId: string,
|
||||
@ -189,6 +190,7 @@ export async function scrapeController(req: Request, res: Response) {
|
||||
|
||||
return res.status(result.returnCode).json(result);
|
||||
} catch (error) {
|
||||
Sentry.captureException(error);
|
||||
Logger.error(error);
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
|
@ -11,6 +11,7 @@ import { v4 as uuidv4 } from "uuid";
|
||||
import { Logger } from "../lib/logger";
|
||||
import { getScrapeQueue, scrapeQueueEvents } from "../services/queue-service";
|
||||
import { getJobPriority } from "../lib/job-priority";
|
||||
import * as Sentry from "@sentry/node";
|
||||
|
||||
export async function searchHelper(
|
||||
jobId: string,
|
||||
@ -158,6 +159,7 @@ export async function searchController(req: Request, res: Response) {
|
||||
return res.status(402).json({ error: "Insufficient credits" });
|
||||
}
|
||||
} catch (error) {
|
||||
Sentry.captureException(error);
|
||||
Logger.error(error);
|
||||
return res.status(500).json({ error: "Internal server error" });
|
||||
}
|
||||
@ -189,6 +191,7 @@ export async function searchController(req: Request, res: Response) {
|
||||
});
|
||||
return res.status(result.returnCode).json(result);
|
||||
} catch (error) {
|
||||
Sentry.captureException(error);
|
||||
Logger.error(error);
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import { getCrawl, getCrawlJobs } from "../../src/lib/crawl-redis";
|
||||
import { getScrapeQueue } from "../../src/services/queue-service";
|
||||
import { supabaseGetJobById } from "../../src/lib/supabase-jobs";
|
||||
import { getJobs } from "./crawl-status";
|
||||
import * as Sentry from "@sentry/node";
|
||||
|
||||
export async function crawlJobStatusPreviewController(req: Request, res: Response) {
|
||||
try {
|
||||
@ -37,6 +38,7 @@ export async function crawlJobStatusPreviewController(req: Request, res: Respons
|
||||
partial_data: jobStatus === "completed" ? [] : data.filter(x => x !== null),
|
||||
});
|
||||
} catch (error) {
|
||||
Sentry.captureException(error);
|
||||
Logger.error(error);
|
||||
return res.status(500).json({ error: error.message });
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user