mirror of
https://git.mirrors.martin98.com/https://github.com/mendableai/firecrawl
synced 2025-08-06 08:06:01 +08:00
Update scrape-events.ts
This commit is contained in:
parent
cc98f83fda
commit
50d2426fc4
@ -1,6 +1,7 @@
|
|||||||
import { Job, JobId } from "bull";
|
import { Job, JobId } from "bull";
|
||||||
import type { baseScrapers } from "../scraper/WebScraper/single_url";
|
import type { baseScrapers } from "../scraper/WebScraper/single_url";
|
||||||
import { supabase_service as supabase } from "../services/supabase";
|
import { supabase_service as supabase } from "../services/supabase";
|
||||||
|
import { Logger } from "./logger";
|
||||||
|
|
||||||
export type ScrapeErrorEvent = {
|
export type ScrapeErrorEvent = {
|
||||||
type: "error",
|
type: "error",
|
||||||
@ -36,13 +37,18 @@ export class ScrapeEvents {
|
|||||||
if (jobId === "TEST") return null;
|
if (jobId === "TEST") return null;
|
||||||
|
|
||||||
if (process.env.USE_DB_AUTHENTICATION) {
|
if (process.env.USE_DB_AUTHENTICATION) {
|
||||||
const result = await supabase.from("scrape_events").insert({
|
try {
|
||||||
job_id: jobId,
|
const result = await supabase.from("scrape_events").insert({
|
||||||
type: content.type,
|
job_id: jobId,
|
||||||
content: content,
|
type: content.type,
|
||||||
// created_at
|
content: content,
|
||||||
}).select().single();
|
// created_at
|
||||||
return (result.data as any).id;
|
}).select().single();
|
||||||
|
return (result.data as any).id;
|
||||||
|
} catch (error) {
|
||||||
|
Logger.error(`Error inserting scrape event: ${error}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
@ -51,20 +57,28 @@ export class ScrapeEvents {
|
|||||||
static async updateScrapeResult(logId: number | null, result: ScrapeScrapeEvent["result"]) {
|
static async updateScrapeResult(logId: number | null, result: ScrapeScrapeEvent["result"]) {
|
||||||
if (logId === null) return;
|
if (logId === null) return;
|
||||||
|
|
||||||
const previousLog = (await supabase.from("scrape_events").select().eq("id", logId).single()).data as any;
|
try {
|
||||||
await supabase.from("scrape_events").update({
|
const previousLog = (await supabase.from("scrape_events").select().eq("id", logId).single()).data as any;
|
||||||
content: {
|
await supabase.from("scrape_events").update({
|
||||||
...previousLog.content,
|
content: {
|
||||||
result,
|
...previousLog.content,
|
||||||
}
|
result,
|
||||||
}).eq("id", logId);
|
}
|
||||||
|
}).eq("id", logId);
|
||||||
|
} catch (error) {
|
||||||
|
Logger.error(`Error updating scrape result: ${error}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static async logJobEvent(job: Job | JobId, event: ScrapeQueueEvent["event"]) {
|
static async logJobEvent(job: Job | JobId, event: ScrapeQueueEvent["event"]) {
|
||||||
await this.insert(((job as any).id ? (job as any).id : job) as string, {
|
try {
|
||||||
type: "queue",
|
await this.insert(((job as any).id ? (job as any).id : job) as string, {
|
||||||
event,
|
type: "queue",
|
||||||
worker: process.env.FLY_MACHINE_ID,
|
event,
|
||||||
});
|
worker: process.env.FLY_MACHINE_ID,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
Logger.error(`Error logging job event: ${error}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user