looking good

This commit is contained in:
rafaelsideguide 2024-06-27 16:00:45 -03:00
parent c40da77be0
commit d66e1f7846
6 changed files with 37 additions and 22 deletions

View File

@ -331,7 +331,7 @@ describe("E2E Tests for API Routes", () => {
expect(completedResponse.body.data[0].content).toContain("Mendable"); expect(completedResponse.body.data[0].content).toContain("Mendable");
expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200); expect(completedResponse.body.data[0].metadata.pageStatusCode).toBe(200);
expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined(); expect(completedResponse.body.data[0].metadata.pageError).toBeUndefined();
}, 60000); // 60 seconds }, 180000); // 180 seconds
it.concurrent("should return a successful response with a valid API key and valid excludes option", async () => { it.concurrent("should return a successful response with a valid API key and valid excludes option", async () => {
const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL) const crawlResponse: FirecrawlCrawlResponse = await request(TEST_URL)

View File

@ -40,7 +40,7 @@ export async function crawlStatusController(req: Request, res: Response) {
current_url, current_url,
current_step, current_step,
total, total,
data, data: data ? data : null,
partial_data: jobStatus == 'completed' ? [] : partialDocs, partial_data: jobStatus == 'completed' ? [] : partialDocs,
}); });
} catch (error) { } catch (error) {

View File

@ -1,5 +1,6 @@
import { Request, Response } from "express"; import { Request, Response } from "express";
import { getWebScraperQueue } from "../../src/services/queue-service"; import { getWebScraperQueue } from "../../src/services/queue-service";
import { supabaseGetJobById } from "../../src/lib/supabase-jobs";
export async function crawlJobStatusPreviewController(req: Request, res: Response) { export async function crawlJobStatusPreviewController(req: Request, res: Response) {
try { try {
@ -9,15 +10,26 @@ export async function crawlJobStatusPreviewController(req: Request, res: Respons
} }
const { current, current_url, total, current_step, partialDocs } = await job.progress(); const { current, current_url, total, current_step, partialDocs } = await job.progress();
let data = job.returnvalue;
if (process.env.USE_DB_AUTHENTICATION) {
const supabaseData = await supabaseGetJobById(req.params.jobId);
if (supabaseData) {
data = supabaseData.docs;
}
}
const jobStatus = await job.getState();
res.json({ res.json({
status: await job.getState(), status: jobStatus,
// progress: job.progress(), // progress: job.progress(),
current: current, current,
current_url: current_url, current_url,
current_step: current_step, current_step,
total: total, total,
data: job.returnvalue, data: data ? data : null,
partial_data: partialDocs ?? [], partial_data: jobStatus == 'completed' ? [] : partialDocs,
}); });
} catch (error) { } catch (error) {
console.error(error); console.error(error);

View File

@ -8,7 +8,6 @@ export const supabaseGetJobById = async (jobId: string) => {
.single(); .single();
if (error) { if (error) {
console.error('Error while fetching supabase for job:', jobId, 'error:', error);
return null; return null;
} }

View File

@ -110,15 +110,20 @@ export async function runWebScraper({
} }
const saveJob = async (job: Job, result: any) => { const saveJob = async (job: Job, result: any) => {
try {
if (process.env.USE_DB_AUTHENTICATION) { if (process.env.USE_DB_AUTHENTICATION) {
const { data, error } = await supabase_service const { data, error } = await supabase_service
.from("firecrawl_jobs") .from("firecrawl_jobs")
.update({ docs: result }) .update({ docs: result })
.eq("job_id", job.id); .eq("job_id", job.id);
job.moveToCompleted(null); // returnvalue if (error) throw new Error(error.message);
await job.moveToCompleted(null);
} else { } else {
job.moveToCompleted(result); // returnvalue await job.moveToCompleted(result);
}
} catch (error) {
console.error("Failed to update job status:", error);
} }
} }

View File

@ -6,10 +6,9 @@ import "dotenv/config";
export async function logJob(job: FirecrawlJob) { export async function logJob(job: FirecrawlJob) {
try { try {
// Only log jobs in production if (!process.env.USE_DB_AUTHENTICATION) {
// if (process.env.ENV !== "production") { return;
// return; }
// }
const { data, error } = await supabase_service const { data, error } = await supabase_service
.from("firecrawl_jobs") .from("firecrawl_jobs")