mirror of
https://git.mirrors.martin98.com/https://github.com/mendableai/firecrawl
synced 2025-08-14 05:15:56 +08:00
Nick:
This commit is contained in:
parent
d872bf0c4c
commit
4003d37fbc
@ -1,19 +1,27 @@
|
|||||||
import { ExtractorOptions, PageOptions } from './../../lib/entities';
|
import { ExtractorOptions, PageOptions } from "./../../lib/entities";
|
||||||
import { Request, Response } from "express";
|
import { Request, Response } from "express";
|
||||||
import { billTeam, checkTeamCredits } from "../../services/billing/credit_billing";
|
import {
|
||||||
|
billTeam,
|
||||||
|
checkTeamCredits,
|
||||||
|
} from "../../services/billing/credit_billing";
|
||||||
import { authenticateUser } from "../auth";
|
import { authenticateUser } from "../auth";
|
||||||
import { PlanType, RateLimiterMode } from "../../types";
|
import { PlanType, RateLimiterMode } from "../../types";
|
||||||
import { logJob } from "../../services/logging/log_job";
|
import { logJob } from "../../services/logging/log_job";
|
||||||
import { Document } from "../../lib/entities";
|
import { Document } from "../../lib/entities";
|
||||||
import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist"; // Import the isUrlBlocked function
|
import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist"; // Import the isUrlBlocked function
|
||||||
import { numTokensFromString } from '../../lib/LLM-extraction/helpers';
|
import { numTokensFromString } from "../../lib/LLM-extraction/helpers";
|
||||||
import { defaultPageOptions, defaultExtractorOptions, defaultTimeout, defaultOrigin } from '../../lib/default-values';
|
import {
|
||||||
import { addScrapeJob, waitForJob } from '../../services/queue-jobs';
|
defaultPageOptions,
|
||||||
import { getScrapeQueue } from '../../services/queue-service';
|
defaultExtractorOptions,
|
||||||
|
defaultTimeout,
|
||||||
|
defaultOrigin,
|
||||||
|
} from "../../lib/default-values";
|
||||||
|
import { addScrapeJob, waitForJob } from "../../services/queue-jobs";
|
||||||
|
import { getScrapeQueue } from "../../services/queue-service";
|
||||||
import { v4 as uuidv4 } from "uuid";
|
import { v4 as uuidv4 } from "uuid";
|
||||||
import { Logger } from '../../lib/logger';
|
import { Logger } from "../../lib/logger";
|
||||||
import * as Sentry from "@sentry/node";
|
import * as Sentry from "@sentry/node";
|
||||||
import { getJobPriority } from '../../lib/job-priority';
|
import { getJobPriority } from "../../lib/job-priority";
|
||||||
|
|
||||||
export async function scrapeHelper(
|
export async function scrapeHelper(
|
||||||
jobId: string,
|
jobId: string,
|
||||||
@ -36,12 +44,18 @@ export async function scrapeHelper(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (isUrlBlocked(url)) {
|
if (isUrlBlocked(url)) {
|
||||||
return { success: false, error: "Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.", returnCode: 403 };
|
return {
|
||||||
|
success: false,
|
||||||
|
error:
|
||||||
|
"Firecrawl currently does not support social media scraping due to policy restrictions. We're actively working on building support for it.",
|
||||||
|
returnCode: 403,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const jobPriority = await getJobPriority({plan, team_id, basePriority: 10})
|
const jobPriority = await getJobPriority({ plan, team_id, basePriority: 10 });
|
||||||
|
|
||||||
const job = await addScrapeJob({
|
const job = await addScrapeJob(
|
||||||
|
{
|
||||||
url,
|
url,
|
||||||
mode: "single_urls",
|
mode: "single_urls",
|
||||||
crawlerOptions,
|
crawlerOptions,
|
||||||
@ -49,11 +63,21 @@ export async function scrapeHelper(
|
|||||||
pageOptions,
|
pageOptions,
|
||||||
extractorOptions,
|
extractorOptions,
|
||||||
origin: req.body.origin ?? defaultOrigin,
|
origin: req.body.origin ?? defaultOrigin,
|
||||||
}, {}, jobId, jobPriority);
|
},
|
||||||
|
{},
|
||||||
|
jobId,
|
||||||
|
jobPriority
|
||||||
|
);
|
||||||
|
|
||||||
let doc;
|
let doc;
|
||||||
|
|
||||||
const err = await Sentry.startSpan({ name: "Wait for job to finish", op: "bullmq.wait", attributes: { job: jobId } }, async (span) => {
|
const err = await Sentry.startSpan(
|
||||||
|
{
|
||||||
|
name: "Wait for job to finish",
|
||||||
|
op: "bullmq.wait",
|
||||||
|
attributes: { job: jobId },
|
||||||
|
},
|
||||||
|
async (span) => {
|
||||||
try {
|
try {
|
||||||
doc = (await waitForJob(job.id, timeout))[0];
|
doc = (await waitForJob(job.id, timeout))[0];
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
@ -63,8 +87,15 @@ export async function scrapeHelper(
|
|||||||
success: false,
|
success: false,
|
||||||
error: "Request timed out",
|
error: "Request timed out",
|
||||||
returnCode: 408,
|
returnCode: 408,
|
||||||
}
|
};
|
||||||
} else if (typeof e === "string" && (e.includes("Error generating completions: ") || e.includes("Invalid schema for function") || e.includes("LLM extraction did not match the extraction schema you provided."))) {
|
} else if (
|
||||||
|
typeof e === "string" &&
|
||||||
|
(e.includes("Error generating completions: ") ||
|
||||||
|
e.includes("Invalid schema for function") ||
|
||||||
|
e.includes(
|
||||||
|
"LLM extraction did not match the extraction schema you provided."
|
||||||
|
))
|
||||||
|
) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error: e,
|
error: e,
|
||||||
@ -76,7 +107,8 @@ export async function scrapeHelper(
|
|||||||
}
|
}
|
||||||
span.setAttribute("result", JSON.stringify(doc));
|
span.setAttribute("result", JSON.stringify(doc));
|
||||||
return null;
|
return null;
|
||||||
});
|
}
|
||||||
|
);
|
||||||
|
|
||||||
if (err !== null) {
|
if (err !== null) {
|
||||||
return err;
|
return err;
|
||||||
@ -86,14 +118,22 @@ export async function scrapeHelper(
|
|||||||
|
|
||||||
if (!doc) {
|
if (!doc) {
|
||||||
console.error("!!! PANIC DOC IS", doc, job);
|
console.error("!!! PANIC DOC IS", doc, job);
|
||||||
return { success: true, error: "No page found", returnCode: 200, data: doc };
|
return {
|
||||||
|
success: true,
|
||||||
|
error: "No page found",
|
||||||
|
returnCode: 200,
|
||||||
|
data: doc,
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
delete doc.index;
|
delete doc.index;
|
||||||
delete doc.provider;
|
delete doc.provider;
|
||||||
|
|
||||||
// Remove rawHtml if pageOptions.rawHtml is false and extractorOptions.mode is llm-extraction-from-raw-html
|
// Remove rawHtml if pageOptions.rawHtml is false and extractorOptions.mode is llm-extraction-from-raw-html
|
||||||
if (!pageOptions.includeRawHtml && extractorOptions.mode == "llm-extraction-from-raw-html") {
|
if (
|
||||||
|
!pageOptions.includeRawHtml &&
|
||||||
|
extractorOptions.mode == "llm-extraction-from-raw-html"
|
||||||
|
) {
|
||||||
if (doc.rawHtml) {
|
if (doc.rawHtml) {
|
||||||
delete doc.rawHtml;
|
delete doc.rawHtml;
|
||||||
}
|
}
|
||||||
@ -127,13 +167,24 @@ export async function scrapeController(req: Request, res: Response) {
|
|||||||
|
|
||||||
const crawlerOptions = req.body.crawlerOptions ?? {};
|
const crawlerOptions = req.body.crawlerOptions ?? {};
|
||||||
const pageOptions = { ...defaultPageOptions, ...req.body.pageOptions };
|
const pageOptions = { ...defaultPageOptions, ...req.body.pageOptions };
|
||||||
const extractorOptions = { ...defaultExtractorOptions, ...req.body.extractorOptions };
|
const extractorOptions = {
|
||||||
|
...defaultExtractorOptions,
|
||||||
|
...req.body.extractorOptions,
|
||||||
|
};
|
||||||
const origin = req.body.origin ?? defaultOrigin;
|
const origin = req.body.origin ?? defaultOrigin;
|
||||||
let timeout = req.body.timeout ?? defaultTimeout;
|
let timeout = req.body.timeout ?? defaultTimeout;
|
||||||
|
|
||||||
if (extractorOptions.mode.includes("llm-extraction")) {
|
if (extractorOptions.mode.includes("llm-extraction")) {
|
||||||
if (typeof extractorOptions.extractionSchema !== "object" || extractorOptions.extractionSchema === null) {
|
if (
|
||||||
return res.status(400).json({ error: "extractorOptions.extractionSchema must be an object if llm-extraction mode is specified" });
|
typeof extractorOptions.extractionSchema !== "object" ||
|
||||||
|
extractorOptions.extractionSchema === null
|
||||||
|
) {
|
||||||
|
return res
|
||||||
|
.status(400)
|
||||||
|
.json({
|
||||||
|
error:
|
||||||
|
"extractorOptions.extractionSchema must be an object if llm-extraction mode is specified",
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
pageOptions.onlyMainContent = true;
|
pageOptions.onlyMainContent = true;
|
||||||
@ -142,7 +193,8 @@ export async function scrapeController(req: Request, res: Response) {
|
|||||||
|
|
||||||
// checkCredits
|
// checkCredits
|
||||||
try {
|
try {
|
||||||
const { success: creditsCheckSuccess, message: creditsCheckMessage } = await checkTeamCredits(team_id, 1);
|
const { success: creditsCheckSuccess, message: creditsCheckMessage } =
|
||||||
|
await checkTeamCredits(team_id, 1);
|
||||||
if (!creditsCheckSuccess) {
|
if (!creditsCheckSuccess) {
|
||||||
earlyReturn = true;
|
earlyReturn = true;
|
||||||
return res.status(402).json({ error: "Insufficient credits" });
|
return res.status(402).json({ error: "Insufficient credits" });
|
||||||
@ -150,7 +202,12 @@ export async function scrapeController(req: Request, res: Response) {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
Logger.error(error);
|
Logger.error(error);
|
||||||
earlyReturn = true;
|
earlyReturn = true;
|
||||||
return res.status(500).json({ error: "Error checking team credits. Please contact hello@firecrawl.com for help." });
|
return res
|
||||||
|
.status(500)
|
||||||
|
.json({
|
||||||
|
error:
|
||||||
|
"Error checking team credits. Please contact hello@firecrawl.com for help.",
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
const jobId = uuidv4();
|
const jobId = uuidv4();
|
||||||
@ -168,7 +225,10 @@ export async function scrapeController(req: Request, res: Response) {
|
|||||||
);
|
);
|
||||||
const endTime = new Date().getTime();
|
const endTime = new Date().getTime();
|
||||||
const timeTakenInSeconds = (endTime - startTime) / 1000;
|
const timeTakenInSeconds = (endTime - startTime) / 1000;
|
||||||
const numTokens = (result.data && result.data.markdown) ? numTokensFromString(result.data.markdown, "gpt-3.5-turbo") : 0;
|
const numTokens =
|
||||||
|
result.data && result.data.markdown
|
||||||
|
? numTokensFromString(result.data.markdown, "gpt-3.5-turbo")
|
||||||
|
: 0;
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
let creditsToBeBilled = 0; // billing for doc done on queue end
|
let creditsToBeBilled = 0; // billing for doc done on queue end
|
||||||
@ -185,14 +245,12 @@ export async function scrapeController(req: Request, res: Response) {
|
|||||||
// Don't bill if we're early returning
|
// Don't bill if we're early returning
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const billingResult = await billTeam(
|
const billingResult = await billTeam(team_id, creditsToBeBilled);
|
||||||
team_id,
|
|
||||||
creditsToBeBilled
|
|
||||||
);
|
|
||||||
if (!billingResult.success) {
|
if (!billingResult.success) {
|
||||||
return res.status(402).json({
|
return res.status(402).json({
|
||||||
success: false,
|
success: false,
|
||||||
error: "Failed to bill team. Insufficient credits or subscription not found.",
|
error:
|
||||||
|
"Failed to bill team. Insufficient credits or subscription not found.",
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -214,12 +272,17 @@ export async function scrapeController(req: Request, res: Response) {
|
|||||||
num_tokens: numTokens,
|
num_tokens: numTokens,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
return res.status(result.returnCode).json(result);
|
return res.status(result.returnCode).json(result);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
Sentry.captureException(error);
|
Sentry.captureException(error);
|
||||||
Logger.error(error);
|
Logger.error(error);
|
||||||
return res.status(500).json({ error: typeof error === "string" ? error : (error?.message ?? "Internal Server Error") });
|
return res
|
||||||
|
.status(500)
|
||||||
|
.json({
|
||||||
|
error:
|
||||||
|
typeof error === "string"
|
||||||
|
? error
|
||||||
|
: error?.message ?? "Internal Server Error",
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,7 @@ import { logCrawl } from "../../services/logging/crawl_log";
|
|||||||
import { getScrapeQueue } from "../../services/queue-service";
|
import { getScrapeQueue } from "../../services/queue-service";
|
||||||
import { addScrapeJob } from "../../services/queue-jobs";
|
import { addScrapeJob } from "../../services/queue-jobs";
|
||||||
import { Logger } from "../../lib/logger";
|
import { Logger } from "../../lib/logger";
|
||||||
|
import { getJobPriority } from "../../lib/job-priority";
|
||||||
|
|
||||||
export async function crawlController(
|
export async function crawlController(
|
||||||
req: RequestWithAuth<{}, CrawlResponse, CrawlRequest>,
|
req: RequestWithAuth<{}, CrawlResponse, CrawlRequest>,
|
||||||
@ -66,6 +67,7 @@ export async function crawlController(
|
|||||||
pageOptions,
|
pageOptions,
|
||||||
team_id: req.auth.team_id,
|
team_id: req.auth.team_id,
|
||||||
createdAt: Date.now(),
|
createdAt: Date.now(),
|
||||||
|
plan: req.auth.plan,
|
||||||
};
|
};
|
||||||
|
|
||||||
const crawler = crawlToCrawler(id, sc);
|
const crawler = crawlToCrawler(id, sc);
|
||||||
@ -86,7 +88,14 @@ export async function crawlController(
|
|||||||
? null
|
? null
|
||||||
: await crawler.tryGetSitemap();
|
: await crawler.tryGetSitemap();
|
||||||
|
|
||||||
if (sitemap !== null) {
|
if (sitemap !== null && sitemap.length > 0) {
|
||||||
|
let jobPriority = 20;
|
||||||
|
// If it is over 1000, we need to get the job priority,
|
||||||
|
// otherwise we can use the default priority of 20
|
||||||
|
if(sitemap.length > 1000){
|
||||||
|
// set base to 21
|
||||||
|
jobPriority = await getJobPriority({plan: req.auth.plan, team_id: req.auth.team_id, basePriority: 21})
|
||||||
|
}
|
||||||
const jobs = sitemap.map((x) => {
|
const jobs = sitemap.map((x) => {
|
||||||
const url = x.url;
|
const url = x.url;
|
||||||
const uuid = uuidv4();
|
const uuid = uuidv4();
|
||||||
|
@ -39,6 +39,7 @@ export async function mapController(
|
|||||||
pageOptions: {},
|
pageOptions: {},
|
||||||
team_id: req.auth.team_id,
|
team_id: req.auth.team_id,
|
||||||
createdAt: Date.now(),
|
createdAt: Date.now(),
|
||||||
|
plan: req.auth.plan,
|
||||||
};
|
};
|
||||||
|
|
||||||
const crawler = crawlToCrawler(id, sc);
|
const crawler = crawlToCrawler(id, sc);
|
||||||
|
@ -6,6 +6,8 @@ import { v4 as uuidv4 } from 'uuid';
|
|||||||
import { numTokensFromString } from "../../lib/LLM-extraction/helpers";
|
import { numTokensFromString } from "../../lib/LLM-extraction/helpers";
|
||||||
import { addScrapeJob, waitForJob } from "../../services/queue-jobs";
|
import { addScrapeJob, waitForJob } from "../../services/queue-jobs";
|
||||||
import { logJob } from "../../services/logging/log_job";
|
import { logJob } from "../../services/logging/log_job";
|
||||||
|
import { getJobPriority } from "../../lib/job-priority";
|
||||||
|
import { PlanType } from "../../types";
|
||||||
|
|
||||||
export async function scrapeController(req: RequestWithAuth<{}, ScrapeResponse, ScrapeRequest>, res: Response<ScrapeResponse>) {
|
export async function scrapeController(req: RequestWithAuth<{}, ScrapeResponse, ScrapeRequest>, res: Response<ScrapeResponse>) {
|
||||||
req.body = scrapeRequestSchema.parse(req.body);
|
req.body = scrapeRequestSchema.parse(req.body);
|
||||||
@ -17,6 +19,8 @@ export async function scrapeController(req: RequestWithAuth<{}, ScrapeResponse,
|
|||||||
const jobId = uuidv4();
|
const jobId = uuidv4();
|
||||||
|
|
||||||
const startTime = new Date().getTime();
|
const startTime = new Date().getTime();
|
||||||
|
const jobPriority = await getJobPriority({plan: req.auth.plan as PlanType, team_id: req.auth.team_id, basePriority: 10})
|
||||||
|
|
||||||
const job = await addScrapeJob({
|
const job = await addScrapeJob({
|
||||||
url: req.body.url,
|
url: req.body.url,
|
||||||
mode: "single_urls",
|
mode: "single_urls",
|
||||||
@ -25,7 +29,7 @@ export async function scrapeController(req: RequestWithAuth<{}, ScrapeResponse,
|
|||||||
pageOptions,
|
pageOptions,
|
||||||
extractorOptions: {},
|
extractorOptions: {},
|
||||||
origin: req.body.origin,
|
origin: req.body.origin,
|
||||||
}, {}, jobId);
|
}, {}, jobId, jobPriority);
|
||||||
|
|
||||||
let doc: any | undefined;
|
let doc: any | undefined;
|
||||||
try {
|
try {
|
||||||
|
@ -3,6 +3,7 @@ import { z } from "zod";
|
|||||||
import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist";
|
import { isUrlBlocked } from "../../scraper/WebScraper/utils/blocklist";
|
||||||
import { PageOptions } from "../../lib/entities";
|
import { PageOptions } from "../../lib/entities";
|
||||||
import { protocolIncluded, checkUrl } from "../../lib/validateUrl";
|
import { protocolIncluded, checkUrl } from "../../lib/validateUrl";
|
||||||
|
import { PlanType } from "../../types";
|
||||||
|
|
||||||
export type Format =
|
export type Format =
|
||||||
| "markdown"
|
| "markdown"
|
||||||
@ -229,7 +230,7 @@ export type CrawlStatusResponse =
|
|||||||
|
|
||||||
type AuthObject = {
|
type AuthObject = {
|
||||||
team_id: string;
|
team_id: string;
|
||||||
plan: string;
|
plan: PlanType;
|
||||||
};
|
};
|
||||||
|
|
||||||
type Account = {
|
type Account = {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user