mirror of
https://git.mirrors.martin98.com/https://github.com/mendableai/firecrawl
synced 2025-08-13 04:29:00 +08:00
fix(queue-worker): errored job logging
This commit is contained in:
parent
97bf54214f
commit
2849ce2f13
@ -989,26 +989,6 @@ async function processJob(job: Job & { id: string }, token: string) {
|
|||||||
job.data.crawlerOptions !== null ? "crawl.page" : "batch_scrape.page",
|
job.data.crawlerOptions !== null ? "crawl.page" : "batch_scrape.page",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// if (job.data.v1) {
|
|
||||||
// callWebhook(
|
|
||||||
// job.data.team_id,
|
|
||||||
// job.id as string,
|
|
||||||
// [],
|
|
||||||
// job.data.webhook,
|
|
||||||
// job.data.v1,
|
|
||||||
// "crawl.failed"
|
|
||||||
// );
|
|
||||||
// }
|
|
||||||
|
|
||||||
if (job.data.crawl_id) {
|
|
||||||
const sc = (await getCrawl(job.data.crawl_id)) as StoredCrawl;
|
|
||||||
|
|
||||||
logger.debug("Declaring job as done...");
|
|
||||||
await addCrawlJobDone(job.data.crawl_id, job.id, false);
|
|
||||||
await redisConnection.srem(
|
|
||||||
"crawl:" + job.data.crawl_id + ":visited_unique",
|
|
||||||
normalizeURL(job.data.url, sc),
|
|
||||||
);
|
|
||||||
|
|
||||||
logger.debug("Logging job to DB...");
|
logger.debug("Logging job to DB...");
|
||||||
await logJob(
|
await logJob(
|
||||||
@ -1026,7 +1006,7 @@ async function processJob(job: Job & { id: string }, token: string) {
|
|||||||
team_id: job.data.team_id,
|
team_id: job.data.team_id,
|
||||||
mode: job.data.mode,
|
mode: job.data.mode,
|
||||||
url: job.data.url,
|
url: job.data.url,
|
||||||
crawlerOptions: sc.crawlerOptions,
|
crawlerOptions: job.data.crawlerOptions,
|
||||||
scrapeOptions: job.data.scrapeOptions,
|
scrapeOptions: job.data.scrapeOptions,
|
||||||
origin: job.data.origin,
|
origin: job.data.origin,
|
||||||
crawl_id: job.data.crawl_id,
|
crawl_id: job.data.crawl_id,
|
||||||
@ -1034,6 +1014,16 @@ async function processJob(job: Job & { id: string }, token: string) {
|
|||||||
true,
|
true,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (job.data.crawl_id) {
|
||||||
|
const sc = (await getCrawl(job.data.crawl_id)) as StoredCrawl;
|
||||||
|
|
||||||
|
logger.debug("Declaring job as done...");
|
||||||
|
await addCrawlJobDone(job.data.crawl_id, job.id, false);
|
||||||
|
await redisConnection.srem(
|
||||||
|
"crawl:" + job.data.crawl_id + ":visited_unique",
|
||||||
|
normalizeURL(job.data.url, sc),
|
||||||
|
);
|
||||||
|
|
||||||
await finishCrawlIfNeeded(job, sc);
|
await finishCrawlIfNeeded(job, sc);
|
||||||
|
|
||||||
// await logJob({
|
// await logJob({
|
||||||
|
Loading…
x
Reference in New Issue
Block a user