Update scrape-events.ts

This commit is contained in:
Nicolas 2024-07-25 16:20:29 -04:00
parent cc98f83fda
commit 50d2426fc4

View File

@ -1,6 +1,7 @@
import { Job, JobId } from "bull";
import type { baseScrapers } from "../scraper/WebScraper/single_url";
import { supabase_service as supabase } from "../services/supabase";
import { Logger } from "./logger";
export type ScrapeErrorEvent = {
type: "error",
@ -36,6 +37,7 @@ export class ScrapeEvents {
if (jobId === "TEST") return null;
if (process.env.USE_DB_AUTHENTICATION) {
try {
const result = await supabase.from("scrape_events").insert({
job_id: jobId,
type: content.type,
@ -43,6 +45,10 @@ export class ScrapeEvents {
// created_at
}).select().single();
return (result.data as any).id;
} catch (error) {
Logger.error(`Error inserting scrape event: ${error}`);
return null;
}
}
return null;
@ -51,6 +57,7 @@ export class ScrapeEvents {
static async updateScrapeResult(logId: number | null, result: ScrapeScrapeEvent["result"]) {
if (logId === null) return;
try {
const previousLog = (await supabase.from("scrape_events").select().eq("id", logId).single()).data as any;
await supabase.from("scrape_events").update({
content: {
@ -58,13 +65,20 @@ export class ScrapeEvents {
result,
}
}).eq("id", logId);
} catch (error) {
Logger.error(`Error updating scrape result: ${error}`);
}
}
static async logJobEvent(job: Job | JobId, event: ScrapeQueueEvent["event"]) {
try {
await this.insert(((job as any).id ? (job as any).id : job) as string, {
type: "queue",
event,
worker: process.env.FLY_MACHINE_ID,
});
} catch (error) {
Logger.error(`Error logging job event: ${error}`);
}
}
}