mirror of
https://git.mirrors.martin98.com/https://github.com/mendableai/firecrawl
synced 2025-08-15 12:45:58 +08:00
Loggin num tokens
This commit is contained in:
parent
769f08c10d
commit
3de4997f4d
@ -197,51 +197,6 @@ export async function extractController(
|
|||||||
// Optionally, you could notify an admin or add to a retry queue here
|
// Optionally, you could notify an admin or add to a retry queue here
|
||||||
});
|
});
|
||||||
|
|
||||||
// if (!req.body.formats.includes("rawHtml")) {
|
|
||||||
// if (doc && doc.rawHtml) {
|
|
||||||
// delete doc.rawHtml;
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// billTeam(teamId, subId, 1).catch((error) => {
|
|
||||||
// logger.error(
|
|
||||||
// `Failed to bill team ${teamId} for 1 credit: ${error}`
|
|
||||||
// );
|
|
||||||
// });
|
|
||||||
|
|
||||||
// const linksToReturn = links.slice(0, limit);
|
|
||||||
|
|
||||||
// logJob({
|
|
||||||
// job_id: id,
|
|
||||||
// success: links.length > 0,
|
|
||||||
// message: "Extract completed",
|
|
||||||
// num_docs: linksToReturn.length,
|
|
||||||
// docs: linksToReturn,
|
|
||||||
// time_taken: (new Date().getTime() - Date.now()) / 1000,
|
|
||||||
// team_id: teamId,
|
|
||||||
// mode: "extract",
|
|
||||||
// url: urls[0],
|
|
||||||
// crawlerOptions: {},
|
|
||||||
// scrapeOptions: {},
|
|
||||||
// origin: origin ?? "api",
|
|
||||||
// num_tokens: 0,
|
|
||||||
// });
|
|
||||||
|
|
||||||
// return {
|
|
||||||
|
|
||||||
// };
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// const response = {
|
|
||||||
// success: true as const,
|
|
||||||
// data: result.data,
|
|
||||||
// scrape_id: result.scrape_id
|
|
||||||
// };
|
|
||||||
|
|
||||||
console.log("completions.extract", completions.extract);
|
console.log("completions.extract", completions.extract);
|
||||||
|
|
||||||
@ -264,9 +219,9 @@ export async function extractController(
|
|||||||
url: req.body.urls.join(", "),
|
url: req.body.urls.join(", "),
|
||||||
scrapeOptions: req.body,
|
scrapeOptions: req.body,
|
||||||
origin: req.body.origin ?? "api",
|
origin: req.body.origin ?? "api",
|
||||||
num_tokens: 0, // TODO: fix
|
num_tokens: completions.numTokens ?? 0
|
||||||
});
|
});
|
||||||
|
|
||||||
return res.status(200).json({
|
return res.status(200).json({
|
||||||
success: true,
|
success: true,
|
||||||
data: data,
|
data: data,
|
||||||
|
@ -58,7 +58,7 @@ function normalizeSchema(x: any): any {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function generateOpenAICompletions(logger: Logger, options: ExtractOptions, markdown?: string, previousWarning?: string): Promise<{ extract: any, warning: string | undefined }> {
|
export async function generateOpenAICompletions(logger: Logger, options: ExtractOptions, markdown?: string, previousWarning?: string): Promise<{ extract: any, numTokens: number, warning: string | undefined }> {
|
||||||
let extract: any;
|
let extract: any;
|
||||||
let warning: string | undefined;
|
let warning: string | undefined;
|
||||||
|
|
||||||
@ -170,7 +170,7 @@ export async function generateOpenAICompletions(logger: Logger, options: Extract
|
|||||||
if (options.schema && options.schema.type === "array" && !schema?.required?.includes("items")) {
|
if (options.schema && options.schema.type === "array" && !schema?.required?.includes("items")) {
|
||||||
extract = extract?.items;
|
extract = extract?.items;
|
||||||
}
|
}
|
||||||
return { extract, warning };
|
return { extract, warning, numTokens };
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function performLLMExtract(meta: Meta, document: Document): Promise<Document> {
|
export async function performLLMExtract(meta: Meta, document: Document): Promise<Document> {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user