feat(tests/snips): add billing tests + misc billing fixes (FIR-1280) (#1283)

* feat(tests/snips): add billing tests + misc billing fixes

* add testing key

* asd
This commit is contained in:
Gergő Móricz 2025-03-02 20:51:42 +01:00 committed by GitHub
parent 4f25f12a12
commit 9ad947884d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 502 additions and 235 deletions

View File

@ -75,10 +75,14 @@ jobs:
run: npm start &
working-directory: ./apps/api
id: start_app
- name: Start workers
- name: Start worker
run: npm run workers &
working-directory: ./apps/api
id: start_workers
- name: Start index worker
run: npm run index-worker &
working-directory: ./apps/api
id: start_index_worker
- name: Run snippet tests
run: |
npm run test:snips

View File

@ -1,56 +1,4 @@
import request from "supertest";
import { configDotenv } from "dotenv";
import { BatchScrapeRequestInput } from "../../controllers/v1/types";
configDotenv();
const TEST_URL = "http://127.0.0.1:3002";
async function batchScrapeStart(body: BatchScrapeRequestInput) {
return await request(TEST_URL)
.post("/v1/batch/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
async function batchScrapeStatus(id: string) {
return await request(TEST_URL)
.get("/v1/batch/scrape/" + encodeURIComponent(id))
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.send();
}
async function batchScrape(body: BatchScrapeRequestInput): ReturnType<typeof batchScrapeStatus> {
const bss = await batchScrapeStart(body);
expectBatchScrapeStartToSucceed(bss);
let x;
do {
x = await batchScrapeStatus(bss.body.id);
expect(x.statusCode).toBe(200);
expect(typeof x.body.status).toBe("string");
} while (x.body.status === "scraping");
expectBatchScrapeToSucceed(x);
return x;
}
function expectBatchScrapeStartToSucceed(response: Awaited<ReturnType<typeof batchScrape>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.id).toBe("string");
}
function expectBatchScrapeToSucceed(response: Awaited<ReturnType<typeof batchScrapeStatus>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.status).toBe("string");
expect(response.body.status).toBe("completed");
expect(response.body).toHaveProperty("data");
expect(Array.isArray(response.body.data)).toBe(true);
expect(response.body.data.length).toBeGreaterThan(0);
}
import { batchScrape } from "./lib";
describe("Batch scrape tests", () => {
it.concurrent("works", async () => {

View File

@ -0,0 +1,185 @@
import { batchScrape, crawl, creditUsage, extract, map, scrape, search, tokenUsage } from "./lib";
const sleep = (ms: number) => new Promise(x => setTimeout(() => x(true), ms));
const sleepForBatchBilling = () => sleep(20000);
beforeAll(async () => {
// Wait for previous test runs to stop billing processing
await sleep(40000);
}, 50000);
describe("Billing tests", () => {
if (process.env.TEST_SUITE_SELF_HOSTED) {
it("dummy", () => {
expect(true).toBe(true);
});
} else {
it("bills scrape correctly", async () => {
const rc1 = (await creditUsage()).remaining_credits;
// Run all scrape operations in parallel with Promise.all
await Promise.all([
// scrape 1: regular fc.dev scrape (1 credit)
scrape({
url: "https://firecrawl.dev"
}),
// scrape 1.1: regular fc.dev scrape (1 credit)
scrape({
url: "https://firecrawl.dev"
}),
// scrape 2: fc.dev with json (5 credits)
scrape({
url: "https://firecrawl.dev",
formats: ["json"],
jsonOptions: {
schema: {
type: "object",
properties: {
is_open_source: { type: "boolean" },
},
required: ["is_open_source"],
},
},
})
]);
// sum: 7 credits
await sleepForBatchBilling();
const rc2 = (await creditUsage()).remaining_credits;
expect(rc1 - rc2).toBe(7);
}, 120000);
it("bills batch scrape correctly", async () => {
const rc1 = (await creditUsage()).remaining_credits;
// Run both scrape operations in parallel with Promise.all
const [scrape1, scrape2] = await Promise.all([
// scrape 1: regular batch scrape with failing domain (2 credits)
batchScrape({
urls: [
"https://firecrawl.dev",
"https://mendable.ai",
"https://thisdomaindoesnotexistandwillfail.fcr",
],
}),
// scrape 2: batch scrape with json (10 credits)
batchScrape({
urls: [
"https://firecrawl.dev",
"https://mendable.ai",
"https://thisdomaindoesnotexistandwillfail.fcr",
],
formats: ["json"],
jsonOptions: {
schema: {
type: "object",
properties: {
four_word_summary: { type: "string" },
},
required: ["four_word_summary"],
},
},
})
]);
// sum: 12 credits
await sleepForBatchBilling();
const rc2 = (await creditUsage()).remaining_credits;
expect(rc1 - rc2).toBe(12);
}, 300000);
it("bills crawl correctly", async () => {
const rc1 = (await creditUsage()).remaining_credits;
// Run both crawl operations in parallel with Promise.all
const [crawl1, crawl2] = await Promise.all([
// crawl 1: regular fc.dev crawl (x credits)
crawl({
url: "https://firecrawl.dev",
}),
// crawl 2: fc.dev crawl with json (5y credits)
crawl({
url: "https://firecrawl.dev",
scrapeOptions: {
formats: ["json"],
jsonOptions: {
schema: {
type: "object",
properties: {
four_word_summary: { type: "string" },
},
required: ["four_word_summary"],
},
},
}
})
]);
// sum: x+5y credits
await sleepForBatchBilling();
const rc2 = (await creditUsage()).remaining_credits;
expect(rc1 - rc2).toBe(crawl1.body.completed + crawl2.body.completed * 5);
}, 300000);
it("bills map correctly", async () => {
const rc1 = (await creditUsage()).remaining_credits;
await map({ url: "https://firecrawl.dev" });
await sleepForBatchBilling();
const rc2 = (await creditUsage()).remaining_credits;
expect(rc1 - rc2).toBe(1);
}, 60000);
it("bills search correctly", async () => {
const rc1 = (await creditUsage()).remaining_credits;
const results = await search({
query: "firecrawl"
});
await sleepForBatchBilling();
const rc2 = (await creditUsage()).remaining_credits;
expect(rc1 - rc2).toBe(results.length);
}, 60000);
it("bills extract correctly", async () => {
const rc1 = (await tokenUsage()).remaining_tokens;
await extract({
urls: ["https://firecrawl.dev"],
schema: {
"type": "object",
"properties": {
"is_open_source": {
"type": "boolean"
}
},
"required": [
"is_open_source"
]
},
origin: "api-sdk",
});
await sleepForBatchBilling();
const rc2 = (await tokenUsage()).remaining_tokens;
expect(rc1 - rc2).toBe(305);
}, 300000);
}
});

View File

@ -1,56 +1,4 @@
import request from "supertest";
import { configDotenv } from "dotenv";
import { CrawlRequestInput } from "../../controllers/v1/types";
configDotenv();
const TEST_URL = "http://127.0.0.1:3002";
async function crawlStart(body: CrawlRequestInput) {
return await request(TEST_URL)
.post("/v1/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
async function crawlStatus(id: string) {
return await request(TEST_URL)
.get("/v1/crawl/" + encodeURIComponent(id))
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.send();
}
async function crawl(body: CrawlRequestInput): ReturnType<typeof crawlStatus> {
const cs = await crawlStart(body);
expectCrawlStartToSucceed(cs);
let x;
do {
x = await crawlStatus(cs.body.id);
expect(x.statusCode).toBe(200);
expect(typeof x.body.status).toBe("string");
} while (x.body.status === "scraping");
expectCrawlToSucceed(x);
return x;
}
function expectCrawlStartToSucceed(response: Awaited<ReturnType<typeof crawlStart>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.id).toBe("string");
}
function expectCrawlToSucceed(response: Awaited<ReturnType<typeof crawlStatus>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.status).toBe("string");
expect(response.body.status).toBe("completed");
expect(response.body).toHaveProperty("data");
expect(Array.isArray(response.body.data)).toBe(true);
expect(response.body.data.length).toBeGreaterThan(0);
}
import { crawl } from "./lib";
describe("Crawl tests", () => {
it.concurrent("works", async () => {

View File

@ -1,54 +1,4 @@
import request from "supertest";
import { configDotenv } from "dotenv";
import { ExtractRequestInput, ExtractResponse } from "../../controllers/v1/types";
configDotenv();
const TEST_URL = "http://127.0.0.1:3002";
async function extractStart(body: ExtractRequestInput) {
return await request(TEST_URL)
.post("/v1/extract")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
async function extractStatus(id: string) {
return await request(TEST_URL)
.get("/v1/extract/" + encodeURIComponent(id))
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.send();
}
async function extract(body: ExtractRequestInput): Promise<ExtractResponse> {
const es = await extractStart(body);
expectExtractStartToSucceed(es);
let x;
do {
x = await extractStatus(es.body.id);
expect(x.statusCode).toBe(200);
expect(typeof x.body.status).toBe("string");
} while (x.body.status === "processing");
expectExtractToSucceed(x);
return x.body;
}
function expectExtractStartToSucceed(response: Awaited<ReturnType<typeof extractStart>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.id).toBe("string");
}
function expectExtractToSucceed(response: Awaited<ReturnType<typeof extractStatus>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.status).toBe("string");
expect(response.body.status).toBe("completed");
expect(response.body).toHaveProperty("data");
}
import { extract } from "./lib";
describe("Extract tests", () => {
if (!process.env.TEST_SUITE_SELF_HOSTED || process.env.OPENAI_API_KEY || process.env.OLLAMA_BASE_URL) {

View File

@ -0,0 +1,250 @@
import { configDotenv } from "dotenv";
configDotenv();
import { ScrapeRequestInput, Document, ExtractRequestInput, ExtractResponse, CrawlRequestInput, MapRequestInput, BatchScrapeRequestInput, SearchRequestInput } from "../../controllers/v1/types";
import request from "supertest";
// =========================================
// Configuration
// =========================================
const TEST_URL = "http://127.0.0.1:3002";
// =========================================
// Scrape API
// =========================================
async function scrapeRaw(body: ScrapeRequestInput) {
return await request(TEST_URL)
.post("/v1/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
function expectScrapeToSucceed(response: Awaited<ReturnType<typeof scrapeRaw>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.data).toBe("object");
}
export async function scrape(body: ScrapeRequestInput): Promise<Document> {
const raw = await scrapeRaw(body);
expectScrapeToSucceed(raw);
return raw.body.data;
}
// =========================================
// Crawl API
// =========================================
async function crawlStart(body: CrawlRequestInput) {
return await request(TEST_URL)
.post("/v1/crawl")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
async function crawlStatus(id: string) {
return await request(TEST_URL)
.get("/v1/crawl/" + encodeURIComponent(id))
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.send();
}
function expectCrawlStartToSucceed(response: Awaited<ReturnType<typeof crawlStart>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.id).toBe("string");
}
function expectCrawlToSucceed(response: Awaited<ReturnType<typeof crawlStatus>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.status).toBe("string");
expect(response.body.status).toBe("completed");
expect(response.body).toHaveProperty("data");
expect(Array.isArray(response.body.data)).toBe(true);
expect(response.body.data.length).toBeGreaterThan(0);
}
export async function crawl(body: CrawlRequestInput): ReturnType<typeof crawlStatus> {
const cs = await crawlStart(body);
expectCrawlStartToSucceed(cs);
let x;
do {
x = await crawlStatus(cs.body.id);
expect(x.statusCode).toBe(200);
expect(typeof x.body.status).toBe("string");
} while (x.body.status === "scraping");
expectCrawlToSucceed(x);
return x;
}
// =========================================
// Batch Scrape API
// =========================================
async function batchScrapeStart(body: BatchScrapeRequestInput) {
return await request(TEST_URL)
.post("/v1/batch/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
async function batchScrapeStatus(id: string) {
return await request(TEST_URL)
.get("/v1/batch/scrape/" + encodeURIComponent(id))
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.send();
}
function expectBatchScrapeStartToSucceed(response: Awaited<ReturnType<typeof batchScrape>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.id).toBe("string");
}
function expectBatchScrapeToSucceed(response: Awaited<ReturnType<typeof batchScrapeStatus>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.status).toBe("string");
expect(response.body.status).toBe("completed");
expect(response.body).toHaveProperty("data");
expect(Array.isArray(response.body.data)).toBe(true);
expect(response.body.data.length).toBeGreaterThan(0);
}
export async function batchScrape(body: BatchScrapeRequestInput): ReturnType<typeof batchScrapeStatus> {
const bss = await batchScrapeStart(body);
expectBatchScrapeStartToSucceed(bss);
let x;
do {
x = await batchScrapeStatus(bss.body.id);
expect(x.statusCode).toBe(200);
expect(typeof x.body.status).toBe("string");
} while (x.body.status === "scraping");
expectBatchScrapeToSucceed(x);
return x;
}
// =========================================
// Map API
// =========================================
export async function map(body: MapRequestInput) {
return await request(TEST_URL)
.post("/v1/map")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
export function expectMapToSucceed(response: Awaited<ReturnType<typeof map>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(Array.isArray(response.body.links)).toBe(true);
expect(response.body.links.length).toBeGreaterThan(0);
}
// =========================================
// Extract API
// =========================================
async function extractStart(body: ExtractRequestInput) {
return await request(TEST_URL)
.post("/v1/extract")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
async function extractStatus(id: string) {
return await request(TEST_URL)
.get("/v1/extract/" + encodeURIComponent(id))
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.send();
}
function expectExtractStartToSucceed(response: Awaited<ReturnType<typeof extractStart>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.id).toBe("string");
}
function expectExtractToSucceed(response: Awaited<ReturnType<typeof extractStatus>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.status).toBe("string");
expect(response.body.status).toBe("completed");
expect(response.body).toHaveProperty("data");
}
export async function extract(body: ExtractRequestInput): Promise<ExtractResponse> {
const es = await extractStart(body);
expectExtractStartToSucceed(es);
let x;
do {
x = await extractStatus(es.body.id);
expect(x.statusCode).toBe(200);
expect(typeof x.body.status).toBe("string");
} while (x.body.status === "processing");
expectExtractToSucceed(x);
return x.body;
}
// =========================================
// Search API
// =========================================
async function searchRaw(body: SearchRequestInput) {
return await request(TEST_URL)
.post("/v1/search")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
function expectSearchToSucceed(response: Awaited<ReturnType<typeof searchRaw>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.data).toBe("object");
expect(Array.isArray(response.body.data)).toBe(true);
expect(response.body.data.length).toBeGreaterThan(0);
}
export async function search(body: SearchRequestInput): Promise<Document[]> {
const raw = await searchRaw(body);
expectSearchToSucceed(raw);
return raw.body.data;
}
// =========================================
// Billing API
// =========================================
export async function creditUsage(): Promise<{ remaining_credits: number }> {
return (await request(TEST_URL)
.get("/v1/team/credit-usage")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")).body.data;
}
export async function tokenUsage(): Promise<{ remaining_tokens: number }> {
return (await request(TEST_URL)
.get("/v1/team/token-usage")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")).body.data;
}

View File

@ -1,24 +1,4 @@
import request from "supertest";
import { configDotenv } from "dotenv";
import { MapRequestInput } from "../../controllers/v1/types";
configDotenv();
const TEST_URL = "http://127.0.0.1:3002";
async function map(body: MapRequestInput) {
return await request(TEST_URL)
.post("/v1/map")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
function expectMapToSucceed(response: Awaited<ReturnType<typeof map>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(Array.isArray(response.body.links)).toBe(true);
expect(response.body.links.length).toBeGreaterThan(0);
}
import { expectMapToSucceed, map } from "./lib";
describe("Map tests", () => {
it.concurrent("basic map succeeds", async () => {

View File

@ -1,29 +1,4 @@
import request from "supertest";
import { configDotenv } from "dotenv";
import { Document, ScrapeRequestInput } from "../../controllers/v1/types";
configDotenv();
const TEST_URL = "http://127.0.0.1:3002";
async function scrapeRaw(body: ScrapeRequestInput) {
return await request(TEST_URL)
.post("/v1/scrape")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
function expectScrapeToSucceed(response: Awaited<ReturnType<typeof scrapeRaw>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.data).toBe("object");
}
async function scrape(body: ScrapeRequestInput): Promise<Document> {
const raw = await scrapeRaw(body);
expectScrapeToSucceed(raw);
return raw.body.data;
}
import { scrape } from "./lib";
describe("Scrape tests", () => {
it.concurrent("mocking works properly", async () => {

View File

@ -1,31 +1,4 @@
import request from "supertest";
import { configDotenv } from "dotenv";
import { Document, SearchRequestInput } from "../../controllers/v1/types";
configDotenv();
const TEST_URL = "http://127.0.0.1:3002";
async function searchRaw(body: SearchRequestInput) {
return await request(TEST_URL)
.post("/v1/search")
.set("Authorization", `Bearer ${process.env.TEST_API_KEY}`)
.set("Content-Type", "application/json")
.send(body);
}
function expectScrapeToSucceed(response: Awaited<ReturnType<typeof searchRaw>>) {
expect(response.statusCode).toBe(200);
expect(response.body.success).toBe(true);
expect(typeof response.body.data).toBe("object");
expect(Array.isArray(response.body.data)).toBe(true);
expect(response.body.data.length).toBeGreaterThan(0);
}
async function search(body: SearchRequestInput): Promise<Document> {
const raw = await searchRaw(body);
expectScrapeToSucceed(raw);
return raw.body.data;
}
import { search } from "./lib";
describe("Search tests", () => {
it.concurrent("works", async () => {

View File

@ -0,0 +1,46 @@
import { Request, Response } from "express";
import { RequestWithAuth } from "./types";
import { getACUC } from "../auth";
import { logger } from "../../lib/logger";
import { RateLimiterMode } from "../../types";
export async function tokenUsageController(
req: RequestWithAuth,
res: Response,
): Promise<void> {
try {
// If we already have the token usage info from auth, use it
if (req.acuc) {
res.json({
success: true,
data: {
remaining_tokens: req.acuc.remaining_credits,
},
});
return;
}
// Otherwise fetch fresh data
const chunk = await getACUC(req.auth.team_id, false, true, RateLimiterMode.Extract);
if (!chunk) {
res.status(404).json({
success: false,
error: "Could not find token usage information",
});
return;
}
res.json({
success: true,
data: {
remaining_tokens: chunk.remaining_credits,
},
});
} catch (error) {
logger.error("Error in token usage controller:", error);
res.status(500).json({
success: false,
error: "Internal server error while fetching token usage",
});
}
}

View File

@ -33,6 +33,7 @@ import { generateLLMsTextController } from "../controllers/v1/generate-llmstxt";
import { generateLLMsTextStatusController } from "../controllers/v1/generate-llmstxt-status";
import { deepResearchController } from "../controllers/v1/deep-research";
import { deepResearchStatusController } from "../controllers/v1/deep-research-status";
import { tokenUsageController } from "../controllers/v1/token-usage";
function checkCreditsMiddleware(
minimum?: number,
@ -293,3 +294,9 @@ v1Router.get(
authMiddleware(RateLimiterMode.CrawlStatus),
wrap(creditUsageController),
);
v1Router.get(
"/team/token-usage",
authMiddleware(RateLimiterMode.ExtractStatus),
wrap(tokenUsageController),
);

View File

@ -1132,7 +1132,7 @@ async function processJob(job: Job & { id: string }, token: string) {
logger.debug(`Adding billing job to queue for team ${job.data.team_id}`, {
billingJobId,
credits: creditsToBeBilled,
is_extract: job.data.scrapeOptions.extract,
is_extract: false,
});
// Add directly to the billing queue - the billing worker will handle the rest
@ -1142,7 +1142,7 @@ async function processJob(job: Job & { id: string }, token: string) {
team_id: job.data.team_id,
subscription_id: undefined,
credits: creditsToBeBilled,
is_extract: job.data.scrapeOptions.extract,
is_extract: false,
timestamp: new Date().toISOString(),
originating_job_id: job.id
},

View File

@ -237,6 +237,7 @@ const testSuiteTokens = [
"824abcd", // don't remove (s-ai)
"0966288",
"226556f",
"0a18c9e", // gh
];
const manual = ["69be9e74-7624-4990-b20d-08e0acc70cf6"];