Nick: fixes

This commit is contained in:
Nicolas 2025-02-19 15:21:52 -03:00
parent a60f3ff645
commit f5de803a9d
2 changed files with 34 additions and 2 deletions

View File

@ -9,6 +9,8 @@ import { z } from "zod";
import { scrapeDocument } from "../extract/document-scraper";
import { PlanType } from "../../types";
import { getLlmsTextFromCache, saveLlmsTextToCache } from "./generate-llmstxt-supabase";
import { billTeam } from "../../services/billing/credit_billing";
import { logJob } from "../../services/logging/log_job";
interface GenerateLLMsTextServiceOptions {
generationId: string;
@ -17,6 +19,7 @@ interface GenerateLLMsTextServiceOptions {
url: string;
maxUrls: number;
showFullText: boolean;
subId?: string;
}
@ -27,8 +30,8 @@ const DescriptionSchema = z.object({
export async function performGenerateLlmsTxt(options: GenerateLLMsTextServiceOptions) {
const openai = new OpenAI();
const { generationId, teamId, plan, url, maxUrls, showFullText } = options;
const { generationId, teamId, plan, url, maxUrls, showFullText, subId } = options;
const startTime = Date.now();
const logger = _logger.child({
module: "generate-llmstxt",
method: "performGenerateLlmsTxt",
@ -152,6 +155,33 @@ export async function performGenerateLlmsTxt(options: GenerateLLMsTextServiceOpt
showFullText: showFullText,
});
// Log job with token usage and sources
await logJob({
job_id: generationId,
success: true,
message: "LLMs text generation completed",
num_docs: urls.length,
docs: [{ llmstxt: llmstxt, llmsfulltxt: llmsFulltxt }],
time_taken: (Date.now() - startTime) / 1000,
team_id: teamId,
mode: "llmstxt",
url: url,
scrapeOptions: options,
origin: "api",
num_tokens: 0,
tokens_billed: 0,
sources: {},
});
// Bill team for usage
billTeam(teamId, subId, urls.length, logger).catch(
(error) => {
logger.error(
`Failed to bill team ${teamId} for ${urls.length} urls`, { teamId, count: urls.length, error },
);
},
);
return {
success: true,
data: {

View File

@ -405,6 +405,7 @@ const processDeepResearchJobInternal = async (
topic: job.data.request.topic,
maxDepth: job.data.request.maxDepth,
timeLimit: job.data.request.timeLimit,
subId: job.data.subId,
});
if(result.success) {
@ -474,6 +475,7 @@ const processGenerateLlmsTxtJobInternal = async (
url: job.data.request.url,
maxUrls: job.data.request.maxUrls,
showFullText: job.data.request.showFullText,
subId: job.data.subId,
});
if (result.success) {