mirror of
https://git.mirrors.martin98.com/https://github.com/mendableai/firecrawl
synced 2025-08-13 00:58:59 +08:00
fix(queue-worker/billing): fix crawl overbilling
This commit is contained in:
parent
db89e365eb
commit
ce2f6ff884
@ -168,26 +168,6 @@ export async function runWebScraper({
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (error === undefined && response?.success) {
|
if (error === undefined && response?.success) {
|
||||||
if (is_scrape === false) {
|
|
||||||
let creditsToBeBilled = 1; // Assuming 1 credit per document
|
|
||||||
if (scrapeOptions.extract) {
|
|
||||||
creditsToBeBilled = 5;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the team is the background index team, return the response
|
|
||||||
if (team_id === process.env.BACKGROUND_INDEX_TEAM_ID!) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
billTeam(team_id, undefined, creditsToBeBilled, logger).catch((error) => {
|
|
||||||
logger.error(
|
|
||||||
`Failed to bill team ${team_id} for ${creditsToBeBilled} credits`,
|
|
||||||
{ error },
|
|
||||||
);
|
|
||||||
// Optionally, you could notify an admin or add to a retry queue here
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return response;
|
return response;
|
||||||
} else {
|
} else {
|
||||||
if (response !== undefined) {
|
if (response !== undefined) {
|
||||||
|
@ -59,6 +59,7 @@ import { performExtraction } from "../lib/extract/extraction-service";
|
|||||||
import { supabase_service } from "../services/supabase";
|
import { supabase_service } from "../services/supabase";
|
||||||
import { normalizeUrl, normalizeUrlOnlyHostname } from "../lib/canonical-url";
|
import { normalizeUrl, normalizeUrlOnlyHostname } from "../lib/canonical-url";
|
||||||
import { saveExtract, updateExtract } from "../lib/extract/extract-redis";
|
import { saveExtract, updateExtract } from "../lib/extract/extract-redis";
|
||||||
|
import { billTeam } from "./billing/credit_billing";
|
||||||
|
|
||||||
configDotenv();
|
configDotenv();
|
||||||
|
|
||||||
@ -754,7 +755,6 @@ async function processJob(job: Job & { id: string }, token: string) {
|
|||||||
]);
|
]);
|
||||||
|
|
||||||
if (!pipeline.success) {
|
if (!pipeline.success) {
|
||||||
// TODO: let's Not do this
|
|
||||||
throw pipeline.error;
|
throw pipeline.error;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -961,6 +961,23 @@ async function processJob(job: Job & { id: string }, token: string) {
|
|||||||
indexJob(job, doc);
|
indexJob(job, doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (job.data.is_scrape !== true) {
|
||||||
|
let creditsToBeBilled = 1; // Assuming 1 credit per document
|
||||||
|
if (job.data.scrapeOptions.extract) {
|
||||||
|
creditsToBeBilled = 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (job.data.team_id !== process.env.BACKGROUND_INDEX_TEAM_ID!) {
|
||||||
|
billTeam(job.data.team_id, undefined, creditsToBeBilled, logger).catch((error) => {
|
||||||
|
logger.error(
|
||||||
|
`Failed to bill team ${job.data.team_id} for ${creditsToBeBilled} credits`,
|
||||||
|
{ error },
|
||||||
|
);
|
||||||
|
// Optionally, you could notify an admin or add to a retry queue here
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(`🐂 Job done ${job.id}`);
|
logger.info(`🐂 Job done ${job.id}`);
|
||||||
return data;
|
return data;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user