diff --git a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts index d32d726c..2ac44131 100644 --- a/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_v1_withAuth/index.test.ts @@ -1,8 +1,6 @@ import request from "supertest"; import { configDotenv } from "dotenv"; -import { - ScrapeRequestInput, -} from "../../controllers/v1/types"; +import { ScrapeRequestInput } from "../../controllers/v1/types"; import { BLOCKLISTED_URL_MESSAGE } from "../../lib/strings"; configDotenv(); @@ -19,8 +17,7 @@ describe("E2E Tests for v1 API Routes", () => { describe("GET /is-production", () => { it.concurrent("should return the production status", async () => { - const response: any = - await request(TEST_URL).get("/is-production"); + const response: any = await request(TEST_URL).get("/is-production"); console.log( "process.env.USE_DB_AUTHENTICATION", @@ -274,12 +271,11 @@ describe("E2E Tests for v1 API Routes", () => { url: "https://www.scrapethissite.com/", onlyMainContent: false, // default is true }; - const responseWithoutRemoveTags: any = - await request(TEST_URL) - .post("/v1/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send(scrapeRequest); + const responseWithoutRemoveTags: any = await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequest); expect(responseWithoutRemoveTags.statusCode).toBe(200); expect(responseWithoutRemoveTags.body).toHaveProperty("data"); diff --git a/apps/api/src/__tests__/e2e_v1_withAuth_all_params/index.test.ts b/apps/api/src/__tests__/e2e_v1_withAuth_all_params/index.test.ts index dfc979a2..60a752bf 100644 --- a/apps/api/src/__tests__/e2e_v1_withAuth_all_params/index.test.ts +++ b/apps/api/src/__tests__/e2e_v1_withAuth_all_params/index.test.ts @@ -1,8 +1,6 @@ import request from "supertest"; import { configDotenv } from "dotenv"; -import { - ScrapeRequest, -} from "../../controllers/v1/types"; +import { ScrapeRequest } from "../../controllers/v1/types"; configDotenv(); const FIRECRAWL_API_URL = "http://127.0.0.1:3002"; @@ -12,9 +10,7 @@ describe("E2E Tests for v1 API Routes", () => { it.concurrent( "should return a successful response for a scrape with 403 page", async () => { - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -39,9 +35,7 @@ describe("E2E Tests for v1 API Routes", () => { url: E2E_TEST_SERVER_URL, } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -86,9 +80,7 @@ describe("E2E Tests for v1 API Routes", () => { formats: ["html"], } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -121,9 +113,7 @@ describe("E2E Tests for v1 API Routes", () => { formats: ["rawHtml"], } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -159,9 +149,7 @@ describe("E2E Tests for v1 API Routes", () => { headers: { "e2e-header-test": "firecrawl" }, } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -188,9 +176,7 @@ describe("E2E Tests for v1 API Routes", () => { includeTags: ["#content-1"], } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -220,9 +206,7 @@ describe("E2E Tests for v1 API Routes", () => { excludeTags: ["#content-1"], } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -253,9 +237,7 @@ describe("E2E Tests for v1 API Routes", () => { onlyMainContent: false, } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -285,9 +267,7 @@ describe("E2E Tests for v1 API Routes", () => { timeout: 500, } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -312,9 +292,7 @@ describe("E2E Tests for v1 API Routes", () => { mobile: true, } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -335,9 +313,7 @@ describe("E2E Tests for v1 API Routes", () => { it.concurrent( "should handle 'parsePDF' parameter correctly", async () => { - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -357,9 +333,7 @@ describe("E2E Tests for v1 API Routes", () => { "h7uKu14adDL6yGfnGf2qycY5uq8kC3OKCWkPxm", ); - const responseNoParsePDF: any = await request( - FIRECRAWL_API_URL, - ) + const responseNoParsePDF: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -410,9 +384,7 @@ describe("E2E Tests for v1 API Routes", () => { timeout: 120000, } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -432,12 +404,13 @@ describe("E2E Tests for v1 API Routes", () => { timeout: 120000, } as ScrapeRequest; - const responseWithSkipTlsVerification: any = - await request(FIRECRAWL_API_URL) - .post("/v1/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send(scrapeRequestWithSkipTlsVerification); + const responseWithSkipTlsVerification: any = await request( + FIRECRAWL_API_URL, + ) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(scrapeRequestWithSkipTlsVerification); console.log("Error1b"); // console.log(responseWithSkipTlsVerification.body) @@ -461,9 +434,7 @@ describe("E2E Tests for v1 API Routes", () => { removeBase64Images: true, } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -493,9 +464,7 @@ describe("E2E Tests for v1 API Routes", () => { ], } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -526,9 +495,7 @@ describe("E2E Tests for v1 API Routes", () => { ], } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -569,9 +536,7 @@ describe("E2E Tests for v1 API Routes", () => { ], } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -619,9 +584,7 @@ describe("E2E Tests for v1 API Routes", () => { ], } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -657,9 +620,7 @@ describe("E2E Tests for v1 API Routes", () => { ], } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -692,9 +653,7 @@ describe("E2E Tests for v1 API Routes", () => { ], } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") @@ -731,9 +690,7 @@ describe("E2E Tests for v1 API Routes", () => { ], } as ScrapeRequest; - const response: any = await request( - FIRECRAWL_API_URL, - ) + const response: any = await request(FIRECRAWL_API_URL) .post("/v1/scrape") .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) .set("Content-Type", "application/json") diff --git a/apps/api/src/__tests__/e2e_withAuth/index.test.ts b/apps/api/src/__tests__/e2e_withAuth/index.test.ts index 0edd7999..cbac0f9d 100644 --- a/apps/api/src/__tests__/e2e_withAuth/index.test.ts +++ b/apps/api/src/__tests__/e2e_withAuth/index.test.ts @@ -23,8 +23,7 @@ describe("E2E Tests for v0 API Routes", () => { describe("POST /v0/scrape", () => { it.concurrent("should require authorization", async () => { - const response: any = - await request(TEST_URL).post("/v0/scrape"); + const response: any = await request(TEST_URL).post("/v0/scrape"); expect(response.statusCode).toBe(401); }); @@ -159,12 +158,11 @@ describe("E2E Tests for v0 API Routes", () => { it.concurrent( "should return a successful response with a valid API key with removeTags option", async () => { - const responseWithoutRemoveTags: any = - await request(TEST_URL) - .post("/v0/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send({ url: "https://www.scrapethissite.com/" }); + const responseWithoutRemoveTags: any = await request(TEST_URL) + .post("/v0/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send({ url: "https://www.scrapethissite.com/" }); expect(responseWithoutRemoveTags.statusCode).toBe(200); expect(responseWithoutRemoveTags.body).toHaveProperty("data"); expect(responseWithoutRemoveTags.body.data).toHaveProperty("content"); @@ -332,8 +330,7 @@ describe("E2E Tests for v0 API Routes", () => { describe("POST /v0/crawl", () => { it.concurrent("should require authorization", async () => { - const response: any = - await request(TEST_URL).post("/v0/crawl"); + const response: any = await request(TEST_URL).post("/v0/crawl"); expect(response.statusCode).toBe(401); }); @@ -461,9 +458,7 @@ describe("E2E Tests for v0 API Routes", () => { } await new Promise((resolve) => setTimeout(resolve, 1000)); // wait for data to be saved on the database - const completedResponse: any = await request( - TEST_URL, - ) + const completedResponse: any = await request(TEST_URL) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); @@ -509,9 +504,7 @@ describe("E2E Tests for v0 API Routes", () => { await new Promise((resolve) => setTimeout(resolve, 1000)); // Wait for 1 second before checking again } } - const completedResponse: any = await request( - TEST_URL, - ) + const completedResponse: any = await request(TEST_URL) .get(`/v0/crawl/status/${crawlResponse.body.jobId}`) .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`); diff --git a/apps/api/src/__tests__/snips/scrape.test.ts b/apps/api/src/__tests__/snips/scrape.test.ts index c337f4f8..fd28c25d 100644 --- a/apps/api/src/__tests__/snips/scrape.test.ts +++ b/apps/api/src/__tests__/snips/scrape.test.ts @@ -6,31 +6,33 @@ configDotenv(); const TEST_URL = "http://127.0.0.1:3002"; async function scrape(body: ScrapeRequestInput) { - return await request(TEST_URL) - .post("/v1/scrape") - .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) - .set("Content-Type", "application/json") - .send(body); + return await request(TEST_URL) + .post("/v1/scrape") + .set("Authorization", `Bearer ${process.env.TEST_API_KEY}`) + .set("Content-Type", "application/json") + .send(body); } function expectScrapeToSucceed(response: Awaited>) { - expect(response.statusCode).toBe(200); - expect(response.body.success).toBe(true); - expect(typeof response.body.data).toBe("object"); + expect(response.statusCode).toBe(200); + expect(response.body.success).toBe(true); + expect(typeof response.body.data).toBe("object"); } describe("Scrape tests", () => { - it("mocking works properly", async () => { - // depends on falsified mock mocking-works-properly - // this test will fail if mock is bypassed with real data -- firecrawl.dev will never have - // that as its actual markdown output + it("mocking works properly", async () => { + // depends on falsified mock mocking-works-properly + // this test will fail if mock is bypassed with real data -- firecrawl.dev will never have + // that as its actual markdown output - const response = await scrape({ - url: "http://firecrawl.dev", - useMock: "mocking-works-properly", - }); - - expectScrapeToSucceed(response); - expect(response.body.data.markdown).toBe("this is fake data coming from the mocking system!"); + const response = await scrape({ + url: "http://firecrawl.dev", + useMock: "mocking-works-properly", }); -}); \ No newline at end of file + + expectScrapeToSucceed(response); + expect(response.body.data.markdown).toBe( + "this is fake data coming from the mocking system!", + ); + }); +}); diff --git a/apps/api/src/__tests__/snips/utils/collect-mocks.js b/apps/api/src/__tests__/snips/utils/collect-mocks.js index 3b879136..7ef47c7b 100644 --- a/apps/api/src/__tests__/snips/utils/collect-mocks.js +++ b/apps/api/src/__tests__/snips/utils/collect-mocks.js @@ -4,9 +4,11 @@ const fs = require("fs"); const mocksDirPath = path.join(__dirname, "../../../scraper/scrapeURL/mocks"); const files = fs.readdirSync(mocksDirPath); -const contents = files.map(x => JSON.parse(fs.readFileSync(path.join(mocksDirPath, x), "utf8"))); +const contents = files.map((x) => + JSON.parse(fs.readFileSync(path.join(mocksDirPath, x), "utf8")), +); fs.writeFileSync( - path.join(__dirname, "../mocks/" + process.argv[2] + ".json"), - JSON.stringify(contents, undefined, 4), -); \ No newline at end of file + path.join(__dirname, "../mocks/" + process.argv[2] + ".json"), + JSON.stringify(contents, undefined, 4), +); diff --git a/apps/api/src/controllers/auth.ts b/apps/api/src/controllers/auth.ts index 22c75232..d7148012 100644 --- a/apps/api/src/controllers/auth.ts +++ b/apps/api/src/controllers/auth.ts @@ -105,7 +105,6 @@ export async function getACUC( { get: true }, )); - if (!error) { break; } @@ -146,7 +145,7 @@ export async function clearACUC(api_key: string): Promise { modes.map(async (mode) => { const cacheKey = `acuc_${api_key}_${mode}`; await deleteKey(cacheKey); - }) + }), ); // Also clear the base cache key @@ -232,7 +231,6 @@ export async function supaAuthenticateUser( teamId = chunk.team_id; priceId = chunk.price_id; - plan = getPlanByPriceId(priceId); subscriptionData = { team_id: teamId, diff --git a/apps/api/src/controllers/v0/admin/check-fire-engine.ts b/apps/api/src/controllers/v0/admin/check-fire-engine.ts index 0559514e..0db6f10d 100644 --- a/apps/api/src/controllers/v0/admin/check-fire-engine.ts +++ b/apps/api/src/controllers/v0/admin/check-fire-engine.ts @@ -16,7 +16,7 @@ export async function checkFireEngine(req: Request, res: Response) { const timeout = setTimeout(() => controller.abort(), 30000); const urls = ["https://roastmywebsite.ai", "https://example.com"]; - let lastError : string | null = null; + let lastError: string | null = null; for (const url of urls) { try { @@ -62,7 +62,6 @@ export async function checkFireEngine(req: Request, res: Response) { success: false, error: "Internal server error - all retry attempts failed", }); - } catch (error) { logger.error(error); Sentry.captureException(error); diff --git a/apps/api/src/controllers/v0/crawl.ts b/apps/api/src/controllers/v0/crawl.ts index 5b19a0a8..6b15a219 100644 --- a/apps/api/src/controllers/v0/crawl.ts +++ b/apps/api/src/controllers/v0/crawl.ts @@ -227,7 +227,7 @@ export async function crawlController(req: Request, res: Response) { await addScrapeJob(job.data as any, {}, job.opts.jobId); } }); - + if (sitemap === 0) { await lockURL(id, sc, url); diff --git a/apps/api/src/controllers/v1/crawl-errors.ts b/apps/api/src/controllers/v1/crawl-errors.ts index b64d02fa..defdda01 100644 --- a/apps/api/src/controllers/v1/crawl-errors.ts +++ b/apps/api/src/controllers/v1/crawl-errors.ts @@ -1,6 +1,6 @@ import { Response } from "express"; import { - CrawlErrorsResponse, + CrawlErrorsResponse, CrawlStatusParams, CrawlStatusResponse, ErrorResponse, @@ -62,20 +62,23 @@ export async function crawlErrorsController( const failedJobIDs: string[] = []; for (const [id, status] of jobStatuses) { - if ( - status === "failed" - ) { + if (status === "failed") { failedJobIDs.push(id); } } res.status(200).json({ - errors: (await getJobs(failedJobIDs)).map(x => ({ - id: x.id, - timestamp: x.finishedOn !== undefined ? (new Date(x.finishedOn).toISOString()) : undefined, - url: x.data.url, - error: x.failedReason, + errors: (await getJobs(failedJobIDs)).map((x) => ({ + id: x.id, + timestamp: + x.finishedOn !== undefined + ? new Date(x.finishedOn).toISOString() + : undefined, + url: x.data.url, + error: x.failedReason, })), - robotsBlocked: await redisConnection.smembers("crawl:" + req.params.jobId + ":robots_blocked"), + robotsBlocked: await redisConnection.smembers( + "crawl:" + req.params.jobId + ":robots_blocked", + ), }); } diff --git a/apps/api/src/controllers/v1/crawl-status.ts b/apps/api/src/controllers/v1/crawl-status.ts index 48a4a177..bac70a34 100644 --- a/apps/api/src/controllers/v1/crawl-status.ts +++ b/apps/api/src/controllers/v1/crawl-status.ts @@ -116,7 +116,10 @@ export async function crawlStatusController( const status: Exclude["status"] = sc.cancelled ? "cancelled" - : validJobStatuses.every((x) => x[1] === "completed") && (sc.crawlerOptions ? await isCrawlKickoffFinished(req.params.jobId) : true) + : validJobStatuses.every((x) => x[1] === "completed") && + (sc.crawlerOptions + ? await isCrawlKickoffFinished(req.params.jobId) + : true) ? "completed" : "scraping"; diff --git a/apps/api/src/controllers/v1/map.ts b/apps/api/src/controllers/v1/map.ts index 3d6c5541..c34f0220 100644 --- a/apps/api/src/controllers/v1/map.ts +++ b/apps/api/src/controllers/v1/map.ts @@ -101,7 +101,7 @@ export async function getMapResults({ }, true, true, - 30000 + 30000, ); if (sitemap > 0) { links = links @@ -164,20 +164,24 @@ export async function getMapResults({ const twoDaysAgo = new Date(); twoDaysAgo.setDate(twoDaysAgo.getDate() - 2); - // If sitemap is not ignored and either we have few URLs (<100) or the data is stale (>2 days old), fetch fresh sitemap if ( - !ignoreSitemap && + !ignoreSitemap && (sitemapIndexResult.urls.length < 100 || - new Date(sitemapIndexResult.lastUpdated) < twoDaysAgo) + new Date(sitemapIndexResult.lastUpdated) < twoDaysAgo) ) { try { - await crawler.tryGetSitemap(urls => { - links.push(...urls); - }, true, false, 30000); + await crawler.tryGetSitemap( + (urls) => { + links.push(...urls); + }, + true, + false, + 30000, + ); } catch (e) { logger.warn("tryGetSitemap threw an error", { error: e }); - } + } } if (!cachedResult) { @@ -253,7 +257,7 @@ export async function getMapResults({ }, { priority: 10, - } + }, ); return { diff --git a/apps/api/src/controllers/v1/scrape.ts b/apps/api/src/controllers/v1/scrape.ts index f4bc45b5..1ea28995 100644 --- a/apps/api/src/controllers/v1/scrape.ts +++ b/apps/api/src/controllers/v1/scrape.ts @@ -33,7 +33,6 @@ export async function scrapeController( basePriority: 10, }); - await addScrapeJob( { url: req.body.url, @@ -97,7 +96,7 @@ export async function scrapeController( // Don't bill if we're early returning return; } - if (req.body.extract && req.body.formats.includes("extract") ) { + if (req.body.extract && req.body.formats.includes("extract")) { creditsToBeBilled = 5; } diff --git a/apps/api/src/controllers/v1/types.ts b/apps/api/src/controllers/v1/types.ts index bef88e93..13b14116 100644 --- a/apps/api/src/controllers/v1/types.ts +++ b/apps/api/src/controllers/v1/types.ts @@ -125,7 +125,7 @@ export const scrapeOptions = z "screenshot", "screenshot@fullPage", "extract", - "json" + "json", ]) .array() .optional() @@ -233,7 +233,7 @@ export const extractV1Options = z .strict(strictMessage) .transform((obj) => ({ ...obj, - allowExternalLinks: obj.allowExternalLinks || obj.enableWebSearch + allowExternalLinks: obj.allowExternalLinks || obj.enableWebSearch, })); export type ExtractV1Options = z.infer; @@ -268,11 +268,17 @@ export const scrapeRequestSchema = scrapeOptions ) .transform((obj) => { // Handle timeout - if ((obj.formats?.includes("extract") || obj.extract || obj.formats?.includes("json") || obj.jsonOptions) && !obj.timeout) { + if ( + (obj.formats?.includes("extract") || + obj.extract || + obj.formats?.includes("json") || + obj.jsonOptions) && + !obj.timeout + ) { obj = { ...obj, timeout: 60000 }; } - if(obj.formats?.includes("json")) { + if (obj.formats?.includes("json")) { obj.formats.push("extract"); } @@ -284,8 +290,8 @@ export const scrapeRequestSchema = scrapeOptions prompt: obj.jsonOptions.prompt, systemPrompt: obj.jsonOptions.systemPrompt, schema: obj.jsonOptions.schema, - mode: "llm" - } + mode: "llm", + }, }; } @@ -602,15 +608,14 @@ export type CrawlStatusResponse = data: Document[]; }; - export type CrawlErrorsResponse = | ErrorResponse | { errors: { - id: string, - timestamp?: string, - url: string, - error: string, + id: string; + timestamp?: string; + url: string; + error: string; }[]; robotsBlocked: string[]; }; @@ -888,7 +893,6 @@ export type SearchResponse = data: Document[]; }; - export type TokenUsage = { promptTokens: number; completionTokens: number; diff --git a/apps/api/src/index.ts b/apps/api/src/index.ts index b4eff93f..254e2d8c 100644 --- a/apps/api/src/index.ts +++ b/apps/api/src/index.ts @@ -4,7 +4,11 @@ import * as Sentry from "@sentry/node"; import express, { NextFunction, Request, Response } from "express"; import bodyParser from "body-parser"; import cors from "cors"; -import { getExtractQueue, getScrapeQueue, getIndexQueue } from "./services/queue-service"; +import { + getExtractQueue, + getScrapeQueue, + getIndexQueue, +} from "./services/queue-service"; import { v0Router } from "./routes/v0"; import os from "os"; import { logger } from "./lib/logger"; diff --git a/apps/api/src/lib/__tests__/deduplicate-obs-array.test.ts b/apps/api/src/lib/__tests__/deduplicate-obs-array.test.ts index bab91137..6446e6c5 100644 --- a/apps/api/src/lib/__tests__/deduplicate-obs-array.test.ts +++ b/apps/api/src/lib/__tests__/deduplicate-obs-array.test.ts @@ -3,101 +3,101 @@ import { deduplicateObjectsArray } from "../extract/helpers/deduplicate-objs-arr describe("deduplicateObjectsArray", () => { it("should deduplicate the array", async () => { const objArray = { - "lawyers": [ + lawyers: [ { - "name": "James D. Schull", - "email": null, - "title": "Personal Injury Attorney", + name: "James D. Schull", + email: null, + title: "Personal Injury Attorney", "phone-number": null, "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "James D. Schull", - "email": null, - "title": "Personal Injury Attorney", + name: "James D. Schull", + email: null, + title: "Personal Injury Attorney", "phone-number": null, "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "James D. Schull", - "email": null, - "title": "Personal Injury Attorney", + name: "James D. Schull", + email: null, + title: "Personal Injury Attorney", "phone-number": null, "practice-areas": [ { - "area": "Personal Injury" - } - ] - } - ] - } + area: "Personal Injury", + }, + ], + }, + ], + }; const expected = { - "lawyers": [ + lawyers: [ { - "name": "James D. Schull", - "email": null, - "title": "Personal Injury Attorney", + name: "James D. Schull", + email: null, + title: "Personal Injury Attorney", "phone-number": null, "practice-areas": [ { - "area": "Personal Injury" - } - ] - } - ] - } + area: "Personal Injury", + }, + ], + }, + ], + }; const result = await deduplicateObjectsArray(objArray); expect(result).toEqual(expected); - }) + }); it("should not deduplicate if not necessary", async () => { const objArray = { - "lawyers": [ + lawyers: [ { - "name": "James D. Schull", - "email": null, - "title": "Personal Injury Attorney", + name: "James D. Schull", + email: null, + title: "Personal Injury Attorney", "phone-number": null, "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "John Doe", - "email": null, - "title": "Personal Injury Attorney", + name: "John Doe", + email: null, + title: "Personal Injury Attorney", "phone-number": null, "practice-areas": [ { - "area": "Personal Injury" - } - ] - } - ] - } + area: "Personal Injury", + }, + ], + }, + ], + }; const result = await deduplicateObjectsArray(objArray); expect(result).toEqual(objArray); - }) + }); it("should handle an empty array", async () => { - const objArray = { "lawyers": [] }; + const objArray = { lawyers: [] }; - const expected = { "lawyers": [] }; + const expected = { lawyers: [] }; const result = await deduplicateObjectsArray(objArray); @@ -106,35 +106,35 @@ describe("deduplicateObjectsArray", () => { it("should handle objects with different properties", async () => { const objArray = { - "lawyers": [ + lawyers: [ { - "name": "James D. Schull", - "email": "james@example.com", - "title": "Personal Injury Attorney" + name: "James D. Schull", + email: "james@example.com", + title: "Personal Injury Attorney", }, { - "name": "James D. Schull", - "email": "james@example.com", - "title": "Personal Injury Attorney", - "phone-number": "123-456-7890" - } - ] + name: "James D. Schull", + email: "james@example.com", + title: "Personal Injury Attorney", + "phone-number": "123-456-7890", + }, + ], }; const expected = { - "lawyers": [ + lawyers: [ { - "name": "James D. Schull", - "email": "james@example.com", - "title": "Personal Injury Attorney" + name: "James D. Schull", + email: "james@example.com", + title: "Personal Injury Attorney", }, { - "name": "James D. Schull", - "email": "james@example.com", - "title": "Personal Injury Attorney", - "phone-number": "123-456-7890" - } - ] + name: "James D. Schull", + email: "james@example.com", + title: "Personal Injury Attorney", + "phone-number": "123-456-7890", + }, + ], }; const result = await deduplicateObjectsArray(objArray); @@ -144,33 +144,33 @@ describe("deduplicateObjectsArray", () => { it("should handle objects with same properties but different values", async () => { const objArray = { - "lawyers": [ + lawyers: [ { - "name": "James D. Schull", - "email": "james1@example.com", - "title": "Personal Injury Attorney" + name: "James D. Schull", + email: "james1@example.com", + title: "Personal Injury Attorney", }, { - "name": "James D. Schull", - "email": "james2@example.com", - "title": "Personal Injury Attorney" - } - ] + name: "James D. Schull", + email: "james2@example.com", + title: "Personal Injury Attorney", + }, + ], }; const expected = { - "lawyers": [ + lawyers: [ { - "name": "James D. Schull", - "email": "james1@example.com", - "title": "Personal Injury Attorney" + name: "James D. Schull", + email: "james1@example.com", + title: "Personal Injury Attorney", }, { - "name": "James D. Schull", - "email": "james2@example.com", - "title": "Personal Injury Attorney" - } - ] + name: "James D. Schull", + email: "james2@example.com", + title: "Personal Injury Attorney", + }, + ], }; const result = await deduplicateObjectsArray(objArray); @@ -180,47 +180,47 @@ describe("deduplicateObjectsArray", () => { it("should handle nested identical objects", async () => { const objArray = { - "lawyers": [ + lawyers: [ { - "name": "James D. Schull", - "email": null, - "title": "Personal Injury Attorney", + name: "James D. Schull", + email: null, + title: "Personal Injury Attorney", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "James D. Schull", - "email": null, - "title": "Personal Injury Attorney", + name: "James D. Schull", + email: null, + title: "Personal Injury Attorney", "practice-areas": [ { - "area": "Personal Injury" - } - ] - } - ] + area: "Personal Injury", + }, + ], + }, + ], }; const expected = { - "lawyers": [ + lawyers: [ { - "name": "James D. Schull", - "email": null, - "title": "Personal Injury Attorney", + name: "James D. Schull", + email: null, + title: "Personal Injury Attorney", "practice-areas": [ { - "area": "Personal Injury" - } - ] - } - ] + area: "Personal Injury", + }, + ], + }, + ], }; const result = await deduplicateObjectsArray(objArray); expect(result).toEqual(expected); }); -}) \ No newline at end of file +}); diff --git a/apps/api/src/lib/__tests__/merge-null-val-objs.test.ts b/apps/api/src/lib/__tests__/merge-null-val-objs.test.ts index 6f8e9d4b..62f81b91 100644 --- a/apps/api/src/lib/__tests__/merge-null-val-objs.test.ts +++ b/apps/api/src/lib/__tests__/merge-null-val-objs.test.ts @@ -3,292 +3,292 @@ import { mergeNullValObjs } from "../extract/helpers/merge-null-val-objs"; describe("mergeNullValObjs", () => { it("should merge the objects with null values", async () => { const objArray = { - "lawyers": [ + lawyers: [ { - "name": "Frank Giunta", - "email": null, - "title": "Personal Injury Attorney", + name: "Frank Giunta", + email: null, + title: "Personal Injury Attorney", "phone-number": "214.370.5200", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Frank Giunta", - "email": null, - "title": "Personal Injury Attorney", + name: "Frank Giunta", + email: null, + title: "Personal Injury Attorney", "phone-number": "214.370.5200", "practice-areas": [ { - "area": "Personal Injury" - } - ] - } - ] - } + area: "Personal Injury", + }, + ], + }, + ], + }; const expected = { - "lawyers": [ + lawyers: [ { - "name": "Frank Giunta", - "email": null, - "title": "Personal Injury Attorney", + name: "Frank Giunta", + email: null, + title: "Personal Injury Attorney", "phone-number": "214.370.5200", "practice-areas": [ { - "area": "Personal Injury" - } - ] - } - ] - } + area: "Personal Injury", + }, + ], + }, + ], + }; const result = mergeNullValObjs(objArray); expect(result).toEqual(expected); - }) + }); it("should handle empty object array", async () => { const objArray = { - "lawyers": [] - } + lawyers: [], + }; const expected = { - "lawyers": [] - } + lawyers: [], + }; const result = mergeNullValObjs(objArray); expect(result).toEqual(expected); - }) + }); it("should handle object array with no null values", async () => { const objArray = { - "lawyers": [ + lawyers: [ { - "name": "John Doe", - "email": "john.doe@example.com", - "title": "Attorney", + name: "John Doe", + email: "john.doe@example.com", + title: "Attorney", "phone-number": "123.456.7890", "practice-areas": [ { - "area": "Corporate Law" - } - ] - } - ] - } - - const expected = { - "lawyers": [ - { - "name": "John Doe", - "email": "john.doe@example.com", - "title": "Attorney", - "phone-number": "123.456.7890", - "practice-areas": [ - { - "area": "Corporate Law" - } - ] - } - ] - } - - const result = mergeNullValObjs(objArray); - - expect(result).toEqual(expected); - }) - - it("should merge objects with different null values", async () => { - const objArray = { - "lawyers": [ - { - "name": "Jane Smith", - "email": "null", - "title": "Attorney", - "description": null, - "phone-number": "987.654.3210", - "practice-areas": [ - { - "area": "Family Law" - } - ] + area: "Corporate Law", + }, + ], }, - { - "name": "Jane Smith", - "email": "jane.smith@example.com", - "title": null, - "description": "Jane Smith is an attorney specializing in Family Law.", - "phone-number": "987.654.3210", - "practice-areas": [ - { - "area": "Family Law" - } - ] - } - ] - } - - const expected = { - "lawyers": [ - { - "name": "Jane Smith", - "email": "jane.smith@example.com", - "title": "Attorney", - "description": "Jane Smith is an attorney specializing in Family Law.", - "phone-number": "987.654.3210", - "practice-areas": [ - { - "area": "Family Law" - } - ] - } - ] - } - - const result = mergeNullValObjs(objArray); - - expect(result).toEqual(expected); - }) - - it("should merge objects with different null values", async () => { - const objArray = { - "lawyers": [ - { - "name": "Frank Giunta", - "email": "frank.giunta@example.com", - "title": "Personal Injury Attorney", - "phone-number": "214.370.5200", - "practice-areas": [ - { - "area": "Personal Injury" - } - ] - }, - { - "name": "Frank Giunta", - "email": null, - "title": "Personal Injury Attorney", - "phone-number": "214.370.5200", - "practice-areas": [ - { - "area": "Personal Injury" - } - ] - }, - { - "name": "Dale R. Rose", - "email": null, - "title": "Personal Injury Attorney", - "phone-number": "972.562.0266", - "practice-areas": [ - { - "area": "Personal Injury" - } - ] - } - ] + ], }; const expected = { - "lawyers": [ + lawyers: [ { - "name": "Frank Giunta", - "email": "frank.giunta@example.com", - "title": "Personal Injury Attorney", - "phone-number": "214.370.5200", + name: "John Doe", + email: "john.doe@example.com", + title: "Attorney", + "phone-number": "123.456.7890", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Corporate Law", + }, + ], }, - { - "name": "Dale R. Rose", - "email": null, - "title": "Personal Injury Attorney", - "phone-number": "972.562.0266", - "practice-areas": [ - { - "area": "Personal Injury" - } - ] - } - ] + ], }; const result = mergeNullValObjs(objArray); expect(result).toEqual(expected); - }) + }); + + it("should merge objects with different null values", async () => { + const objArray = { + lawyers: [ + { + name: "Jane Smith", + email: "null", + title: "Attorney", + description: null, + "phone-number": "987.654.3210", + "practice-areas": [ + { + area: "Family Law", + }, + ], + }, + { + name: "Jane Smith", + email: "jane.smith@example.com", + title: null, + description: "Jane Smith is an attorney specializing in Family Law.", + "phone-number": "987.654.3210", + "practice-areas": [ + { + area: "Family Law", + }, + ], + }, + ], + }; + + const expected = { + lawyers: [ + { + name: "Jane Smith", + email: "jane.smith@example.com", + title: "Attorney", + description: "Jane Smith is an attorney specializing in Family Law.", + "phone-number": "987.654.3210", + "practice-areas": [ + { + area: "Family Law", + }, + ], + }, + ], + }; + + const result = mergeNullValObjs(objArray); + + expect(result).toEqual(expected); + }); + + it("should merge objects with different null values", async () => { + const objArray = { + lawyers: [ + { + name: "Frank Giunta", + email: "frank.giunta@example.com", + title: "Personal Injury Attorney", + "phone-number": "214.370.5200", + "practice-areas": [ + { + area: "Personal Injury", + }, + ], + }, + { + name: "Frank Giunta", + email: null, + title: "Personal Injury Attorney", + "phone-number": "214.370.5200", + "practice-areas": [ + { + area: "Personal Injury", + }, + ], + }, + { + name: "Dale R. Rose", + email: null, + title: "Personal Injury Attorney", + "phone-number": "972.562.0266", + "practice-areas": [ + { + area: "Personal Injury", + }, + ], + }, + ], + }; + + const expected = { + lawyers: [ + { + name: "Frank Giunta", + email: "frank.giunta@example.com", + title: "Personal Injury Attorney", + "phone-number": "214.370.5200", + "practice-areas": [ + { + area: "Personal Injury", + }, + ], + }, + { + name: "Dale R. Rose", + email: null, + title: "Personal Injury Attorney", + "phone-number": "972.562.0266", + "practice-areas": [ + { + area: "Personal Injury", + }, + ], + }, + ], + }; + + const result = mergeNullValObjs(objArray); + + expect(result).toEqual(expected); + }); it("should correctly merge and deduplicate objects", async () => { const objArray = { - "lawyers": [ + lawyers: [ { - "name": "Frank Giunta", - "email": null, - "title": "Personal Injury Attorney", + name: "Frank Giunta", + email: null, + title: "Personal Injury Attorney", "phone-number": "214.370.5200", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Frank Giunta", - "email": null, - "title": "Personal Injury Attorney", + name: "Frank Giunta", + email: null, + title: "Personal Injury Attorney", "phone-number": "214.370.5200", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Dale R. Rose", - "email": null, - "title": "Personal Injury Attorney", + name: "Dale R. Rose", + email: null, + title: "Personal Injury Attorney", "phone-number": "972.562.0266", "practice-areas": [ { - "area": "Personal Injury" - } - ] - } - ] + area: "Personal Injury", + }, + ], + }, + ], }; const expected = { - "lawyers": [ + lawyers: [ { - "name": "Frank Giunta", - "email": null, - "title": "Personal Injury Attorney", + name: "Frank Giunta", + email: null, + title: "Personal Injury Attorney", "phone-number": "214.370.5200", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Dale R. Rose", - "email": null, - "title": "Personal Injury Attorney", + name: "Dale R. Rose", + email: null, + title: "Personal Injury Attorney", "phone-number": "972.562.0266", "practice-areas": [ { - "area": "Personal Injury" - } - ] - } - ] + area: "Personal Injury", + }, + ], + }, + ], }; const result = mergeNullValObjs(objArray); @@ -298,177 +298,172 @@ describe("mergeNullValObjs", () => { it("should merge arrays of similar objects", async () => { const objArray = { - "lawyers": [ + lawyers: [ { - "name": "Allen Cox", - "email": null, - "title": "Personal Injury Lawyer", + name: "Allen Cox", + email: null, + title: "Personal Injury Lawyer", "phone-number": "972.606.9000", - "practice-areas": [ - { "area": "Personal Injury" } - ] + "practice-areas": [{ area: "Personal Injury" }], }, { - "name": "Allen Cox", - "email": "allen.cox@example.com", - "title": "Personal Injury Lawyer", + name: "Allen Cox", + email: "allen.cox@example.com", + title: "Personal Injury Lawyer", "phone-number": null, "practice-areas": [ - { "area": "Automobile accidents" }, - { "area": "Truck accidents" }, - { "area": "Amusement park injury" }, - { "area": "Bus accident" }, - { "area": "Industrial accidents" }, - { "area": "Product defects" }, - { "area": "Food poisoning" }, - { "area": "Workplace accidents" }, - { "area": "Wrongful death" }, - { "area": "Swimming pool accidents" }, - { "area": "Premises accidents" }, - { "area": "Aircraft accidents" }, - { "area": "Animal and dog bites" } - ] - } - ] - } + { area: "Automobile accidents" }, + { area: "Truck accidents" }, + { area: "Amusement park injury" }, + { area: "Bus accident" }, + { area: "Industrial accidents" }, + { area: "Product defects" }, + { area: "Food poisoning" }, + { area: "Workplace accidents" }, + { area: "Wrongful death" }, + { area: "Swimming pool accidents" }, + { area: "Premises accidents" }, + { area: "Aircraft accidents" }, + { area: "Animal and dog bites" }, + ], + }, + ], + }; const expected = { - "lawyers": [ + lawyers: [ { - "name": "Allen Cox", - "email": "allen.cox@example.com", - "title": "Personal Injury Lawyer", + name: "Allen Cox", + email: "allen.cox@example.com", + title: "Personal Injury Lawyer", "phone-number": "972.606.9000", "practice-areas": [ - { "area": "Personal Injury" }, - { "area": "Automobile accidents" }, - { "area": "Truck accidents" }, - { "area": "Amusement park injury" }, - { "area": "Bus accident" }, - { "area": "Industrial accidents" }, - { "area": "Product defects" }, - { "area": "Food poisoning" }, - { "area": "Workplace accidents" }, - { "area": "Wrongful death" }, - { "area": "Swimming pool accidents" }, - { "area": "Premises accidents" }, - { "area": "Aircraft accidents" }, - { "area": "Animal and dog bites" } - ] - } - ] - } + { area: "Personal Injury" }, + { area: "Automobile accidents" }, + { area: "Truck accidents" }, + { area: "Amusement park injury" }, + { area: "Bus accident" }, + { area: "Industrial accidents" }, + { area: "Product defects" }, + { area: "Food poisoning" }, + { area: "Workplace accidents" }, + { area: "Wrongful death" }, + { area: "Swimming pool accidents" }, + { area: "Premises accidents" }, + { area: "Aircraft accidents" }, + { area: "Animal and dog bites" }, + ], + }, + ], + }; const result = mergeNullValObjs(objArray); expect(result).toEqual(expected); - }) + }); it("should merge arrays of similar objects with different key names", async () => { const objArray = { - "attorneys": [ + attorneys: [ { - "fullName": "Allen Cox", - "contactEmail": null, - "position": "Personal Injury Lawyer", - "contactNumber": "972.606.9000", - "specializations": [ - { "field": "Personal Injury" } - ] + fullName: "Allen Cox", + contactEmail: null, + position: "Personal Injury Lawyer", + contactNumber: "972.606.9000", + specializations: [{ field: "Personal Injury" }], }, { - "fullName": "Allen Cox", - "contactEmail": "allen.cox@example.com", - "position": "Personal Injury Lawyer", - "contactNumber": null, - "specializations": [ - { "field": "Automobile accidents" }, - { "field": "Truck accidents" }, - { "field": "Amusement park injury" }, - { "field": "Bus accident" }, - { "field": "Industrial accidents" }, - { "field": "Product defects" }, - { "field": "Food poisoning" }, - { "field": "Workplace accidents" }, - { "field": "Wrongful death" }, - { "field": "Swimming pool accidents" }, - { "field": "Premises accidents" }, - { "field": "Aircraft accidents" }, - { "field": "Animal and dog bites" } - ] - } - ] - } + fullName: "Allen Cox", + contactEmail: "allen.cox@example.com", + position: "Personal Injury Lawyer", + contactNumber: null, + specializations: [ + { field: "Automobile accidents" }, + { field: "Truck accidents" }, + { field: "Amusement park injury" }, + { field: "Bus accident" }, + { field: "Industrial accidents" }, + { field: "Product defects" }, + { field: "Food poisoning" }, + { field: "Workplace accidents" }, + { field: "Wrongful death" }, + { field: "Swimming pool accidents" }, + { field: "Premises accidents" }, + { field: "Aircraft accidents" }, + { field: "Animal and dog bites" }, + ], + }, + ], + }; const expected = { - "attorneys": [ + attorneys: [ { - "fullName": "Allen Cox", - "contactEmail": "allen.cox@example.com", - "position": "Personal Injury Lawyer", - "contactNumber": "972.606.9000", - "specializations": [ - { "field": "Personal Injury" }, - { "field": "Automobile accidents" }, - { "field": "Truck accidents" }, - { "field": "Amusement park injury" }, - { "field": "Bus accident" }, - { "field": "Industrial accidents" }, - { "field": "Product defects" }, - { "field": "Food poisoning" }, - { "field": "Workplace accidents" }, - { "field": "Wrongful death" }, - { "field": "Swimming pool accidents" }, - { "field": "Premises accidents" }, - { "field": "Aircraft accidents" }, - { "field": "Animal and dog bites" } - ] - } - ] - } + fullName: "Allen Cox", + contactEmail: "allen.cox@example.com", + position: "Personal Injury Lawyer", + contactNumber: "972.606.9000", + specializations: [ + { field: "Personal Injury" }, + { field: "Automobile accidents" }, + { field: "Truck accidents" }, + { field: "Amusement park injury" }, + { field: "Bus accident" }, + { field: "Industrial accidents" }, + { field: "Product defects" }, + { field: "Food poisoning" }, + { field: "Workplace accidents" }, + { field: "Wrongful death" }, + { field: "Swimming pool accidents" }, + { field: "Premises accidents" }, + { field: "Aircraft accidents" }, + { field: "Animal and dog bites" }, + ], + }, + ], + }; const result = mergeNullValObjs(objArray); expect(result).toEqual(expected); - }) + }); - it ("should deal with not array values", async () => { + it("should deal with not array values", async () => { const objArray = { - "lawyers": { - "name": "not an array" + lawyers: { + name: "not an array", }, - "attorneys": { - "name": "not an array" - } - } + attorneys: { + name: "not an array", + }, + }; const expected = { - "lawyers": { - "name": "not an array" + lawyers: { + name: "not an array", }, - "attorneys": { - "name": "not an array" - } - } + attorneys: { + name: "not an array", + }, + }; // @ts-expect-error const result = mergeNullValObjs(objArray); expect(result).toEqual(expected); - }) + }); - it ("should deal with arrays of strings", async () => { - const objArray = { - "lawyers": ["res1", "res2", "res3"] - } + it("should deal with arrays of strings", async () => { + const objArray = { + lawyers: ["res1", "res2", "res3"], + }; - const expected = { - "lawyers": ["res1", "res2", "res3"] - } + const expected = { + lawyers: ["res1", "res2", "res3"], + }; - const result = mergeNullValObjs(objArray); + const result = mergeNullValObjs(objArray); - expect(result).toEqual(expected); - }) - -}) + expect(result).toEqual(expected); + }); +}); diff --git a/apps/api/src/lib/__tests__/mix-schemas.test.ts b/apps/api/src/lib/__tests__/mix-schemas.test.ts index b3fb1aa8..f62d5016 100644 --- a/apps/api/src/lib/__tests__/mix-schemas.test.ts +++ b/apps/api/src/lib/__tests__/mix-schemas.test.ts @@ -21,13 +21,13 @@ describe("mixSchemaObjects function", () => { city: { type: "string" }, state: { type: "string" }, country: { type: "string" }, - postal_code: { type: "string" } + postal_code: { type: "string" }, }, }, incorporation_date: { type: "string", format: "date" }, phone: { type: "string" }, - email: { type: "string", format: "email" } - } + email: { type: "string", format: "email" }, + }, }, owners: { type: "array", @@ -43,280 +43,284 @@ describe("mixSchemaObjects function", () => { city: { type: "string" }, state: { type: "string" }, country: { type: "string" }, - postal_code: { type: "string" } + postal_code: { type: "string" }, }, }, phone: { type: "string" }, - email: { type: "string", format: "email" } - } - } - } - } - } + email: { type: "string", format: "email" }, + }, + }, + }, + }, + }; const singleAnswerResult = { - "business": { - "name": "Revolut Ltd", - "registration_number": "08804411", - "tax_id": "", - "type": "Private limited company", - "industry": "Other information technology service activities", - "address": { - "street": "7 Westferry Circus", - "city": "London", - "state": "", - "country": "England", - "postal_code": "E14 4HD" + business: { + name: "Revolut Ltd", + registration_number: "08804411", + tax_id: "", + type: "Private limited company", + industry: "Other information technology service activities", + address: { + street: "7 Westferry Circus", + city: "London", + state: "", + country: "England", + postal_code: "E14 4HD", }, - "incorporation_date": "2013-12-06", - "phone": "", - "email": "" - } - } + incorporation_date: "2013-12-06", + phone: "", + email: "", + }, + }; const multiEntityResult = { - "owners": [ + owners: [ { - "full_name": "Thomas Bruce Hambrett", - "role": "Secretary", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Thomas Bruce Hambrett", + role: "Secretary", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Caroline Louise Britton", - "role": "Director", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Caroline Louise Britton", + role: "Director", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Martin James Gilbert", - "role": "Director", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Martin James Gilbert", + role: "Director", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Michael Sidney Sherwood", - "role": "Director", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Michael Sidney Sherwood", + role: "Director", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "John Phimister Sievwright", - "role": "Director", - "ownership_percentage": "", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "John Phimister Sievwright", + role: "Director", + ownership_percentage: "", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Nikolay Storonsky", - "role": "Director", - "ownership_percentage": "", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Nikolay Storonsky", + role: "Director", + ownership_percentage: "", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Dan Teodosiu", - "role": "Director", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Dan Teodosiu", + role: "Director", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Vladyslav Yatsenko", - "role": "Director", - "ownership_percentage": "", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Vladyslav Yatsenko", + role: "Director", + ownership_percentage: "", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" - } - ] - } + phone: "", + email: "", + }, + ], + }; - const finalResult = await mixSchemaObjects(originalSchema, singleAnswerResult, multiEntityResult) + const finalResult = await mixSchemaObjects( + originalSchema, + singleAnswerResult, + multiEntityResult, + ); expect(finalResult).toEqual({ - "business": { - "name": "Revolut Ltd", - "registration_number": "08804411", - "tax_id": "", - "type": "Private limited company", - "industry": "Other information technology service activities", - "address": { - "street": "7 Westferry Circus", - "city": "London", - "state": "", - "country": "England", - "postal_code": "E14 4HD" + business: { + name: "Revolut Ltd", + registration_number: "08804411", + tax_id: "", + type: "Private limited company", + industry: "Other information technology service activities", + address: { + street: "7 Westferry Circus", + city: "London", + state: "", + country: "England", + postal_code: "E14 4HD", }, - "incorporation_date": "2013-12-06", - "phone": "", - "email": "" + incorporation_date: "2013-12-06", + phone: "", + email: "", }, - "owners": [ + owners: [ { - "full_name": "Thomas Bruce Hambrett", - "role": "Secretary", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Thomas Bruce Hambrett", + role: "Secretary", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Caroline Louise Britton", - "role": "Director", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Caroline Louise Britton", + role: "Director", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Martin James Gilbert", - "role": "Director", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Martin James Gilbert", + role: "Director", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Michael Sidney Sherwood", - "role": "Director", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Michael Sidney Sherwood", + role: "Director", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "John Phimister Sievwright", - "role": "Director", - "ownership_percentage": "", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "John Phimister Sievwright", + role: "Director", + ownership_percentage: "", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Nikolay Storonsky", - "role": "Director", - "ownership_percentage": "", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Nikolay Storonsky", + role: "Director", + ownership_percentage: "", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Dan Teodosiu", - "role": "Director", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Dan Teodosiu", + role: "Director", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" + phone: "", + email: "", }, { - "full_name": "Vladyslav Yatsenko", - "role": "Director", - "ownership_percentage": "", - "address": { - "street": "7 Westferry Circus", - "city": "Canary Wharf", - "state": "London", - "country": "England", - "postal_code": "E14 4HD" + full_name: "Vladyslav Yatsenko", + role: "Director", + ownership_percentage: "", + address: { + street: "7 Westferry Circus", + city: "Canary Wharf", + state: "London", + country: "England", + postal_code: "E14 4HD", }, - "phone": "", - "email": "" - } - ] - }) - }) + phone: "", + email: "", + }, + ], + }); + }); it("should mix lawyers schema (id: 29)", async () => { const originalSchema = { @@ -336,559 +340,571 @@ describe("mixSchemaObjects function", () => { items: { type: "object", properties: { - area: { type: "string" } + area: { type: "string" }, }, }, - alias: "practice-areas" - } + alias: "practice-areas", + }, }, - } - } - } + }, + }, + }, }; const multiEntityResult = { - "lawyers": [ + lawyers: [ { - "name": "Phillip Galyen", - "email": "pgalyen@galyen.com", - "title": "President and CEO", + name: "Phillip Galyen", + email: "pgalyen@galyen.com", + title: "President and CEO", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "James Bridge", - "email": "jbridge@galyen.com", - "title": "COO & Firm Managing Attorney", + name: "James Bridge", + email: "jbridge@galyen.com", + title: "COO & Firm Managing Attorney", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Stephen C. Maxwell", - "email": "smaxwell@galyen.com", - "title": "Personal Injury Trial Attorney", + name: "Stephen C. Maxwell", + email: "smaxwell@galyen.com", + title: "Personal Injury Trial Attorney", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Scott Robelen", - "email": "srobelen@galyen.com", - "title": "Personal Injury Attorney", + name: "Scott Robelen", + email: "srobelen@galyen.com", + title: "Personal Injury Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Kern A. Lewis", - "email": "klewis@galyen.com", - "title": "Personal Injury Attorney", + name: "Kern A. Lewis", + email: "klewis@galyen.com", + title: "Personal Injury Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Steven Pierret", - "email": "spierret@galyen.com", - "title": "Personal Injury Attorney", + name: "Steven Pierret", + email: "spierret@galyen.com", + title: "Personal Injury Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Michael Galyen", - "email": "mgalyen@galyen.com", - "title": "Executive Vice President - Litigation Attorney", + name: "Michael Galyen", + email: "mgalyen@galyen.com", + title: "Executive Vice President - Litigation Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "H. John Gutierrez", - "email": "jgutierrez@galyen.com", - "title": "Personal Injury Lawyer", + name: "H. John Gutierrez", + email: "jgutierrez@galyen.com", + title: "Personal Injury Lawyer", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Daniel P. Sullivan", - "email": "dsullivan@galyen.com", - "title": "Personal Injury Attorney", + name: "Daniel P. Sullivan", + email: "dsullivan@galyen.com", + title: "Personal Injury Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Ana Lee", - "email": "alee@galyen.com", - "title": "Personal Injury Attorney", + name: "Ana Lee", + email: "alee@galyen.com", + title: "Personal Injury Attorney", "phone-number": "(844) 402-4530", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Michael Raymond Cramer", - "email": "mcramer@galyen.com", - "title": "Of Counsel", + name: "Michael Raymond Cramer", + email: "mcramer@galyen.com", + title: "Of Counsel", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Business Law" + area: "Business Law", }, { - "area": "Civil and Commercial Litigation" + area: "Civil and Commercial Litigation", }, { - "area": "Employment Law" + area: "Employment Law", }, { - "area": "Corporate Law" + area: "Corporate Law", }, { - "area": "Construction Law" + area: "Construction Law", }, { - "area": "Real Estate" + area: "Real Estate", }, { - "area": "Civil Defense" + area: "Civil Defense", }, { - "area": "Estate Planning" - } - ] + area: "Estate Planning", + }, + ], }, { - "name": "Benton Gann", - "email": "bgann@galyen.com", - "title": "Personal Injury Attorney", + name: "Benton Gann", + email: "bgann@galyen.com", + title: "Personal Injury Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Shane F. Langston", - "email": "slangston@galyen.com", - "title": "Personal Injury Litigation", + name: "Shane F. Langston", + email: "slangston@galyen.com", + title: "Personal Injury Litigation", "phone-number": "(844) 402-4530", "practice-areas": [ { - "area": "Personal Injury Litigation" - } - ] + area: "Personal Injury Litigation", + }, + ], }, { - "name": "Rebecca M. Langston", - "email": "rlangston@galyen.com", - "title": "Personal Injury Litigation", + name: "Rebecca M. Langston", + email: "rlangston@galyen.com", + title: "Personal Injury Litigation", "phone-number": "(844) 402-4530", "practice-areas": [ { - "area": "Personal Injury Litigation" - } - ] + area: "Personal Injury Litigation", + }, + ], }, { - "name": "David Klemm", - "email": "dklemm@galyen.com", - "title": "Personal Injury Trial Lawyer", + name: "David Klemm", + email: "dklemm@galyen.com", + title: "Personal Injury Trial Lawyer", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Personal Injury Trial Lawyer" - } - ] + area: "Personal Injury Trial Lawyer", + }, + ], }, { - "name": "Tyler D. Baker", - "email": "tbaker@galyen.com", - "title": "Personal Injury Attorney", + name: "Tyler D. Baker", + email: "tbaker@galyen.com", + title: "Personal Injury Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Personal Injury" - } - ] + area: "Personal Injury", + }, + ], }, { - "name": "Clint Lee", - "email": "clee@galyen.com", - "title": "Catastrophic Injury Attorney", + name: "Clint Lee", + email: "clee@galyen.com", + title: "Catastrophic Injury Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Catastrophic Injury" - } - ] + area: "Catastrophic Injury", + }, + ], }, { - "name": "R. Keith Spencer", - "email": "rkspencer@galyen.com", - "title": "Family Law Attorney", + name: "R. Keith Spencer", + email: "rkspencer@galyen.com", + title: "Family Law Attorney", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Gene Leposki", - "email": "gleposki@galyen.com", - "title": "Family Law Attorney", + name: "Gene Leposki", + email: "gleposki@galyen.com", + title: "Family Law Attorney", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Teresa Sanchez", - "email": "tsanchez@galyen.com", - "title": "Managing Attorney of the Family Law Department", + name: "Teresa Sanchez", + email: "tsanchez@galyen.com", + title: "Managing Attorney of the Family Law Department", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Paul Kennedy", - "email": "pkennedy@galyen.com", - "title": "Family Law Attorney", + name: "Paul Kennedy", + email: "pkennedy@galyen.com", + title: "Family Law Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Danielle Cortez-Harper", - "email": "dharper@galyen.com", - "title": "Family Law Attorney", + name: "Danielle Cortez-Harper", + email: "dharper@galyen.com", + title: "Family Law Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Jane Mapes", - "email": "jmapes@galyen.com", - "title": "Family Law Attorney", + name: "Jane Mapes", + email: "jmapes@galyen.com", + title: "Family Law Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Juliette Steffe", - "email": "jsteffe@galyen.com", - "title": "Family Law Attorney", + name: "Juliette Steffe", + email: "jsteffe@galyen.com", + title: "Family Law Attorney", "phone-number": "(817) 263-3000", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Anna Nika", - "email": "anika@galyen.com", - "title": "Family Law Attorney", + name: "Anna Nika", + email: "anika@galyen.com", + title: "Family Law Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Lori Shannon", - "email": "lshannon@galyen.com", - "title": "Family Law Attorney", + name: "Lori Shannon", + email: "lshannon@galyen.com", + title: "Family Law Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Michael Livens", - "email": "mlivens@galyen.com", - "title": "Family Law Attorney", + name: "Michael Livens", + email: "mlivens@galyen.com", + title: "Family Law Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Jennifer Scherf", - "email": "jscherf@galyen.com", - "title": "Family Law Attorney", + name: "Jennifer Scherf", + email: "jscherf@galyen.com", + title: "Family Law Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Allen Griffin", - "email": "agriffin@galyen.com", - "title": "Family Law Attorney", + name: "Allen Griffin", + email: "agriffin@galyen.com", + title: "Family Law Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Family Law" - } - ] + area: "Family Law", + }, + ], }, { - "name": "Ian Croall", - "email": "icroall@galyen.com", - "title": "Vice President & Managing Attorney, Social Security Disability", + name: "Ian Croall", + email: "icroall@galyen.com", + title: + "Vice President & Managing Attorney, Social Security Disability", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Social Security Disability" - } - ] + area: "Social Security Disability", + }, + ], }, { - "name": "Kim C. Smith", - "email": "ksmith@galyen.com", - "title": "Managing Attorney, Workers’ Compensation", + name: "Kim C. Smith", + email: "ksmith@galyen.com", + title: "Managing Attorney, Workers’ Compensation", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Workers’ Compensation" - } - ] + area: "Workers’ Compensation", + }, + ], }, { - "name": "J. C. Bailey III", - "email": "jcbailey@galyen.com", - "title": "Estate Planning, Probate, Wills & Business Law", + name: "J. C. Bailey III", + email: "jcbailey@galyen.com", + title: "Estate Planning, Probate, Wills & Business Law", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Estate Planning" + area: "Estate Planning", }, { - "area": "Probate" + area: "Probate", }, { - "area": "Wills" + area: "Wills", }, { - "area": "Business Law" - } - ] + area: "Business Law", + }, + ], }, { - "name": "John Robinson", - "email": "jrobinson@galyen.com", - "title": "Criminal Law Attorney", + name: "John Robinson", + email: "jrobinson@galyen.com", + title: "Criminal Law Attorney", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Criminal Law" - } - ] + area: "Criminal Law", + }, + ], }, { - "name": "Michael Raymond Cramer", - "email": "mcramer@galyen.com", - "title": "Of Counsel", + name: "Michael Raymond Cramer", + email: "mcramer@galyen.com", + title: "Of Counsel", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Business Law" + area: "Business Law", }, { - "area": "Civil and Commercial Litigation" + area: "Civil and Commercial Litigation", }, { - "area": "Employment Law" + area: "Employment Law", }, { - "area": "Corporate Law" + area: "Corporate Law", }, { - "area": "Construction Law" + area: "Construction Law", }, { - "area": "Real Estate" + area: "Real Estate", }, { - "area": "Civil Defense" + area: "Civil Defense", }, { - "area": "Estate Planning" - } - ] + area: "Estate Planning", + }, + ], }, { - "name": "Paul F. Wieneskie", - "email": "pwieneskie@galyen.com", - "title": "Civil Appellate Attorney", + name: "Paul F. Wieneskie", + email: "pwieneskie@galyen.com", + title: "Civil Appellate Attorney", "phone-number": "(844) 698-0233", "practice-areas": [ { - "area": "Civil Appellate Law" - } - ] + area: "Civil Appellate Law", + }, + ], }, { - "name": "Claudia Cubias", - "email": "ccubias@galyen.com", - "title": "Immigration Attorney", + name: "Claudia Cubias", + email: "ccubias@galyen.com", + title: "Immigration Attorney", "phone-number": "(844) 402-2992", "practice-areas": [ { - "area": "Immigration Law" - } - ] + area: "Immigration Law", + }, + ], }, { - "name": "Katherine Hawkins", - "email": "khawkins@galyen.com", - "title": "Immigration Attorney", + name: "Katherine Hawkins", + email: "khawkins@galyen.com", + title: "Immigration Attorney", "phone-number": "", "practice-areas": [ { - "area": "Immigration Law" - } - ] - } - ] + area: "Immigration Law", + }, + ], + }, + ], }; - const singleAnswerResult = {} + const singleAnswerResult = {}; - const finalResult = await mixSchemaObjects(originalSchema, singleAnswerResult, multiEntityResult) + const finalResult = await mixSchemaObjects( + originalSchema, + singleAnswerResult, + multiEntityResult, + ); - expect(finalResult).toEqual(multiEntityResult) - }) + expect(finalResult).toEqual(multiEntityResult); + }); it("shoud spread (id: 26)", async () => { const res1 = { - "products": [ + products: [ { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res2 = { - "products": [ + products: [ { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res3 = { - "products": [ + products: [ { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res4 = { - "products": [ + products: [ { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res5 = { - "products": [ + products: [ { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res6 = { - "products": [ + products: [ { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res7 = { - "products": [ + products: [ { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; - const results = [res1, res2, res3, res4, res5, res6, res7] + const results = [res1, res2, res3, res4, res5, res6, res7]; const originalSchema = { type: "object", @@ -900,60 +916,71 @@ describe("mixSchemaObjects function", () => { properties: { name: { type: "string" }, price: { type: "string" }, - description: { type: "string" } - } - } - } - } - } + description: { type: "string" }, + }, + }, + }, + }, + }; - console.log(await transformArrayToObject(originalSchema, results)) + console.log(await transformArrayToObject(originalSchema, results)); - const singleAnswerResult = {} + const singleAnswerResult = {}; const multiEntityResult = { - "products": [ + products: [ { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; - const finalResult = await mixSchemaObjects(originalSchema, singleAnswerResult, multiEntityResult) + const finalResult = await mixSchemaObjects( + originalSchema, + singleAnswerResult, + multiEntityResult, + ); - expect(finalResult).toEqual(multiEntityResult) - }) + expect(finalResult).toEqual(multiEntityResult); + }); it("should spread (id: 29)", async () => { const originalSchema = { @@ -969,28 +996,31 @@ describe("mixSchemaObjects function", () => { offers_cmmc: { type: "boolean" }, has_soc_2_cert: { type: "boolean" }, offers_office365: { type: "boolean" }, - offers_endpoint_security: { type: "boolean" } - } - } + offers_endpoint_security: { type: "boolean" }, + }, + }; const singleAnswerResult = { - "is_active": true, - "is_partner": true, - "is_msp": true, - "is_auditor": false, - "is_vciso": false, - "offers_soc_2": true, - "offers_iso_27001": false, - "offers_cmmc": false, - "has_soc_2_cert": false, - "offers_office365": true, - "offers_endpoint_security": false - } - const multiEntityResult = {} + is_active: true, + is_partner: true, + is_msp: true, + is_auditor: false, + is_vciso: false, + offers_soc_2: true, + offers_iso_27001: false, + offers_cmmc: false, + has_soc_2_cert: false, + offers_office365: true, + offers_endpoint_security: false, + }; + const multiEntityResult = {}; - const finalResult = await mixSchemaObjects(originalSchema, singleAnswerResult, multiEntityResult) + const finalResult = await mixSchemaObjects( + originalSchema, + singleAnswerResult, + multiEntityResult, + ); - expect(finalResult).toEqual(singleAnswerResult) - }) - -}) \ No newline at end of file + expect(finalResult).toEqual(singleAnswerResult); + }); +}); diff --git a/apps/api/src/lib/__tests__/spread-schema-objects.test.ts b/apps/api/src/lib/__tests__/spread-schema-objects.test.ts index e59028cf..7cfcdd03 100644 --- a/apps/api/src/lib/__tests__/spread-schema-objects.test.ts +++ b/apps/api/src/lib/__tests__/spread-schema-objects.test.ts @@ -2,7 +2,7 @@ import { spreadSchemas } from "../extract/helpers/spread-schemas"; describe("spreadSchemas", () => { it("should spread kyb schema (id: 1)", async () => { - const keys = ["owners"] + const keys = ["owners"]; const schema = { type: "object", properties: { @@ -21,13 +21,13 @@ describe("spreadSchemas", () => { city: { type: "string" }, state: { type: "string" }, country: { type: "string" }, - postal_code: { type: "string" } + postal_code: { type: "string" }, }, }, incorporation_date: { type: "string", format: "date" }, phone: { type: "string" }, - email: { type: "string", format: "email" } - } + email: { type: "string", format: "email" }, + }, }, owners: { type: "array", @@ -43,18 +43,21 @@ describe("spreadSchemas", () => { city: { type: "string" }, state: { type: "string" }, country: { type: "string" }, - postal_code: { type: "string" } + postal_code: { type: "string" }, }, }, phone: { type: "string" }, - email: { type: "string", format: "email" } - } - } - } - } - } + email: { type: "string", format: "email" }, + }, + }, + }, + }, + }; - const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas(schema, keys) + const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas( + schema, + keys, + ); expect(singleAnswerSchema).toEqual({ type: "object", @@ -74,16 +77,16 @@ describe("spreadSchemas", () => { city: { type: "string" }, state: { type: "string" }, country: { type: "string" }, - postal_code: { type: "string" } - } + postal_code: { type: "string" }, + }, }, incorporation_date: { type: "string", format: "date" }, phone: { type: "string" }, - email: { type: "string", format: "email" } - } + email: { type: "string", format: "email" }, + }, }, }, - }) + }); expect(multiEntitySchema).toEqual({ type: "object", @@ -102,20 +105,20 @@ describe("spreadSchemas", () => { city: { type: "string" }, state: { type: "string" }, country: { type: "string" }, - postal_code: { type: "string" } - } + postal_code: { type: "string" }, + }, }, phone: { type: "string" }, - email: { type: "string", format: "email" } - } - } - } - } - }) - }) + email: { type: "string", format: "email" }, + }, + }, + }, + }, + }); + }); it("should spread lawyers schema (id: 9)", async () => { - const keys = ["lawyers"] + const keys = ["lawyers"]; const schema = { type: "object", properties: { @@ -133,22 +136,25 @@ describe("spreadSchemas", () => { items: { type: "object", properties: { - area: { type: "string" } + area: { type: "string" }, }, }, - alias: "practice-areas" - } + alias: "practice-areas", + }, }, - } - } - } + }, + }, + }, }; - const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas(schema, keys) + const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas( + schema, + keys, + ); - expect(singleAnswerSchema).toEqual({}) - expect(multiEntitySchema).toEqual(schema) - }) + expect(singleAnswerSchema).toEqual({}); + expect(multiEntitySchema).toEqual(schema); + }); it("shoud spread (id: 26)", async () => { const schema = { @@ -161,19 +167,22 @@ describe("spreadSchemas", () => { properties: { name: { type: "string" }, price: { type: "string" }, - description: { type: "string" } - } - } - } - } - } + description: { type: "string" }, + }, + }, + }, + }, + }; - const keys = ["products"] - const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas(schema, keys) + const keys = ["products"]; + const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas( + schema, + keys, + ); - expect(singleAnswerSchema).toEqual({}) - expect(multiEntitySchema).toEqual(schema) - }) + expect(singleAnswerSchema).toEqual({}); + expect(multiEntitySchema).toEqual(schema); + }); it("shoud spread categories and products", async () => { const schema = { @@ -182,8 +191,8 @@ describe("spreadSchemas", () => { categories: { type: "array", items: { - type: "string" - } + type: "string", + }, }, products: { type: "array", @@ -192,19 +201,22 @@ describe("spreadSchemas", () => { properties: { name: { type: "string" }, price: { type: "string" }, - description: { type: "string" } - } - } - } - } - } + description: { type: "string" }, + }, + }, + }, + }, + }; - const keys = ["products", "categories"] - const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas(schema, keys) + const keys = ["products", "categories"]; + const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas( + schema, + keys, + ); - expect(singleAnswerSchema).toEqual({}) - expect(multiEntitySchema).toEqual(schema) - }) + expect(singleAnswerSchema).toEqual({}); + expect(multiEntitySchema).toEqual(schema); + }); it("should spread (id: 29)", async () => { const schema = { @@ -220,50 +232,55 @@ describe("spreadSchemas", () => { offers_cmmc: { type: "boolean" }, has_soc_2_cert: { type: "boolean" }, offers_office365: { type: "boolean" }, - offers_endpoint_security: { type: "boolean" } - } - } + offers_endpoint_security: { type: "boolean" }, + }, + }; - const keys = [] - const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas(schema, keys) + const keys = []; + const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas( + schema, + keys, + ); - expect(singleAnswerSchema).toEqual(schema) - expect(multiEntitySchema).toEqual({}) - }) + expect(singleAnswerSchema).toEqual(schema); + expect(multiEntitySchema).toEqual({}); + }); it("should spread kyb schema (id: 29)", async () => { - const schema = { - "type": "object", - "properties": { - "lawyers": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name": { "type": "string" }, - "email": { "type": ["string", "null"] }, - "phone-number": { "type": "string" }, + type: "object", + properties: { + lawyers: { + type: "array", + items: { + type: "object", + properties: { + name: { type: "string" }, + email: { type: ["string", "null"] }, + "phone-number": { type: "string" }, "practice-areas": { - "type": "array", - "items": { - "type": "object", - "properties": { - "area": { "type": "string" } - } - } + type: "array", + items: { + type: "object", + properties: { + area: { type: "string" }, + }, + }, }, - "title": { "type": ["string", "null"] } + title: { type: ["string", "null"] }, }, - } - } - } - } + }, + }, + }, + }; - const keys = ["lawyers"] - const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas(schema, keys) + const keys = ["lawyers"]; + const { singleAnswerSchema, multiEntitySchema } = await spreadSchemas( + schema, + keys, + ); - expect(singleAnswerSchema).toEqual({}) - expect(multiEntitySchema).toEqual(schema) - }) -}) \ No newline at end of file + expect(singleAnswerSchema).toEqual({}); + expect(multiEntitySchema).toEqual(schema); + }); +}); diff --git a/apps/api/src/lib/__tests__/transform-array-to-obj.test.ts b/apps/api/src/lib/__tests__/transform-array-to-obj.test.ts index a7e231cf..7f3591c6 100644 --- a/apps/api/src/lib/__tests__/transform-array-to-obj.test.ts +++ b/apps/api/src/lib/__tests__/transform-array-to-obj.test.ts @@ -10,665 +10,641 @@ const originalSchema = { properties: { name: { type: "string" }, price: { type: "string" }, - description: { type: "string" } - } - } - } - } -} + description: { type: "string" }, + }, + }, + }, + }, +}; describe("transformArrayToObject function", () => { it("shoud transform array to object (id: 26)", async () => { const res1 = { - "products": [ + products: [ { - "name": "סיר Neon 1", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 1", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res2 = { - "products": [ + products: [ { - "name": "סיר Neon 2", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 2", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res3 = { - "products": [ + products: [ { - "name": "סיר Neon 3", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 3", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res4 = { - "products": [ + products: [ { - "name": "סיר Neon 4", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 4", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res5 = { - "products": [ + products: [ { - "name": "סיר Neon 5", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 5", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res6 = { - "products": [ + products: [ { - "name": "סיר Neon 6", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 6", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; const res7 = { - "products": [ + products: [ { - "name": "סיר Neon 7", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 7", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; - const results = [res1, res2, res3, res4, res5, res6, res7] + const results = [res1, res2, res3, res4, res5, res6, res7]; const multiEntityResult = { - "products": [ + products: [ { - "name": "סיר Neon 1", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon 1", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon 2", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon 2", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon 3", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon 3", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon 4", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon 4", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon 5", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon 5", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon 6", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon 6", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon 7", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 7", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; - expect(await transformArrayToObject(originalSchema, results)).toEqual(multiEntityResult) - }) + expect(await transformArrayToObject(originalSchema, results)).toEqual( + multiEntityResult, + ); + }); it("should transform array to object (id: 27)", async () => { const res1 = { - "products": [ + products: [ { - "name": "סיר Neon 1", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 1", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; - const res3 = { "products": [] } - const res4 = { "products": null } + const res3 = { products: [] }; + const res4 = { products: null }; - const results = [res1, res3, res4] + const results = [res1, res3, res4]; const multiEntityResult = { - "products": [ + products: [ { - "name": "סיר Neon 1", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 1", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; - expect(await transformArrayToObject(originalSchema, results)).toEqual(multiEntityResult) - }) + expect(await transformArrayToObject(originalSchema, results)).toEqual( + multiEntityResult, + ); + }); it("should transform array to object (id: 27)", async () => { const res1 = { - "products": [ + products: [ { - "name": "סיר Neon 1", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 1", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; - const res3 = { "products": [] } - const res4 = { "products": [{ - "name": "סיר Neon 4", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - }] } + const res3 = { products: [] }; + const res4 = { + products: [ + { + name: "סיר Neon 4", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; - const results = [res1, res3, res4] + const results = [res1, res3, res4]; const multiEntityResult = { - "products": [ + products: [ { - "name": "סיר Neon 1", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" + name: "סיר Neon 1", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", }, { - "name": "סיר Neon 4", - "price": "99.90 ₪", - "description": "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות" - } - ] - } + name: "סיר Neon 4", + price: "99.90 ₪", + description: + "סיר מסדרת Neon גוף הכלי עשוי אלומיניום להולכת חום מהירה ואחידה ולחיסכון בזמן ואנרגיה סיר בציפוי נון סטיק למניעת הדבקות המזון, לשימוש מופחת בשמן ולניקוי קל ונוח. מתאים לכל סוגי הכיריים, מתאים לאינדוקציה מתאים לשטיפה במדיח. מתאים לשימוש כסיר אורז, סיר פסטה, סיר מרק, סיר למגוון תבשילים. סיר 28 ס”מ | 7.1 ליטר התמונה להמחשה בלבד. הצבע בתמונה עשוי להיות שונה מהמציאות", + }, + ], + }; - expect(await transformArrayToObject(originalSchema, results)).toEqual(multiEntityResult) - }) + expect(await transformArrayToObject(originalSchema, results)).toEqual( + multiEntityResult, + ); + }); it("more complex schema", async () => { const originalSchema = { type: "object", properties: { - "ecommerce": { + ecommerce: { type: "object", properties: { - "name": { type: "string" }, - "products": { + name: { type: "string" }, + products: { type: "array", items: { type: "object", properties: { - "name": { type: "string" }, - "price": { type: "string" }, - "description": { type: "string" }, - "categories": { + name: { type: "string" }, + price: { type: "string" }, + description: { type: "string" }, + categories: { type: "array", items: { - type: "string" - } - } - } - } - } - } - } - } - } + type: "string", + }, + }, + }, + }, + }, + }, + }, + }, + }; const res1 = { - "ecommerce": { - "name": '1', - "products": [ + ecommerce: { + name: "1", + products: [ { - "name": "סיר Neon 1", - "price": "99.90 ₪", - "description": "", - "categories": [ "סירים", "something", "else" ] - } - ] - } - } + name: "סיר Neon 1", + price: "99.90 ₪", + description: "", + categories: ["סירים", "something", "else"], + }, + ], + }, + }; const res2 = { - "ecommerce": { - "name": 'keep the first', - "products": [ + ecommerce: { + name: "keep the first", + products: [ { - "name": "סיר Neon 2", - "price": "99.90 ₪", - "description": "", - "categories": [ "סירים", "ajkshda", "something", "else" ] - } - ] - } - } + name: "סיר Neon 2", + price: "99.90 ₪", + description: "", + categories: ["סירים", "ajkshda", "something", "else"], + }, + ], + }, + }; - const res3 = { "ecommerce": { "products": [] } } - const res4 = { "ecommerce": { "products": null } } + const res3 = { ecommerce: { products: [] } }; + const res4 = { ecommerce: { products: null } }; - const results = [res1, res2, res3, res4] + const results = [res1, res2, res3, res4]; const multiEntityResult = { - "ecommerce": { - "name": '1', - "products": [ + ecommerce: { + name: "1", + products: [ { - "name": "סיר Neon 1", - "price": "99.90 ₪", - "description": "", - "categories": [ "סירים", "something", "else" ] + name: "סיר Neon 1", + price: "99.90 ₪", + description: "", + categories: ["סירים", "something", "else"], }, { - "name": "סיר Neon 2", - "price": "99.90 ₪", - "description": "", - "categories": [ "סירים", "ajkshda", "something", "else" ] - } - ] - } - } + name: "סיר Neon 2", + price: "99.90 ₪", + description: "", + categories: ["סירים", "ajkshda", "something", "else"], + }, + ], + }, + }; - console.log(await transformArrayToObject(originalSchema, results)) + console.log(await transformArrayToObject(originalSchema, results)); - expect(await transformArrayToObject(originalSchema, results)).toEqual(multiEntityResult) - }) + expect(await transformArrayToObject(originalSchema, results)).toEqual( + multiEntityResult, + ); + }); it("even more complex schema", async () => { const moreComplexSchema = { type: "object", properties: { - "name": { type: "string" }, - "description": { type: "string" }, - "products": { + name: { type: "string" }, + description: { type: "string" }, + products: { type: "array", items: { type: "object", properties: { - "name": { type: "string" }, - "price": { type: "string" }, - "description": { type: "string" } - } - } + name: { type: "string" }, + price: { type: "string" }, + description: { type: "string" }, + }, + }, }, categories: { type: "array", items: { - type: "string" - } - } - } - } + type: "string", + }, + }, + }, + }; const res1 = { - "name": '1', - "description": "description", - "products": [ + name: "1", + description: "description", + products: [ { - "name": "Neon 1", - "price": "99.90 ₪", - "description": "neon 1 product" - } - ], - "categories": [ "something", "else" ] - } - - const res4 = { "products": [] } + name: "Neon 1", + price: "99.90 ₪", + description: "neon 1 product", + }, + ], + categories: ["something", "else"], + }; + + const res4 = { products: [] }; const res2 = { - "name": 'keep first', - "description": "description", - "products": [ + name: "keep first", + description: "description", + products: [ { - "name": "Neon 2", - "price": "99.90 ₪", - "description": "neon 2 product" - } - ], - "categories": ["something" ] - } + name: "Neon 2", + price: "99.90 ₪", + description: "neon 2 product", + }, + ], + categories: ["something"], + }; const res3 = { - "name": 'keep the first', - "products": [ + name: "keep the first", + products: [ { - "name": "Neon 3", - "price": "555.90 ₪", - "description": "neon 3 product" - } + name: "Neon 3", + price: "555.90 ₪", + description: "neon 3 product", + }, ], - "categories": [ "hey", "something", "other one" ] - } + categories: ["hey", "something", "other one"], + }; - const res5 = { "products": null } + const res5 = { products: null }; - const results = [res1, res2, res3] + const results = [res1, res2, res3]; const multiEntityResult = { - "name": '1', - "description": "description", - "products": [ + name: "1", + description: "description", + products: [ { - "name": "Neon 1", - "price": "99.90 ₪", - "description": "neon 1 product" + name: "Neon 1", + price: "99.90 ₪", + description: "neon 1 product", }, { - "name": "Neon 2", - "price": "99.90 ₪", - "description": "neon 2 product" + name: "Neon 2", + price: "99.90 ₪", + description: "neon 2 product", }, { - "name": "Neon 3", - "price": "555.90 ₪", - "description": "neon 3 product" - } + name: "Neon 3", + price: "555.90 ₪", + description: "neon 3 product", + }, ], - "categories": [ "something", "else", "hey", "other one" ] - } + categories: ["something", "else", "hey", "other one"], + }; - console.log(multiEntityResult, await transformArrayToObject(moreComplexSchema, results)) + console.log( + multiEntityResult, + await transformArrayToObject(moreComplexSchema, results), + ); - expect(await transformArrayToObject(moreComplexSchema, results)).toEqual(multiEntityResult) - }) + expect(await transformArrayToObject(moreComplexSchema, results)).toEqual( + multiEntityResult, + ); + }); it("should transform array to object (id: 7)", async () => { const originalSchema = { - "type": "object", - "properties": { - "property_details": { - "properties": { - "title": { - "title": "Title", - "type": "string" - }, - "location": { - "title": "Location", - "type": "string" - }, - "property_type": { - "title": "Property Type", - "type": "string" - }, - "size": { - "title": "Size", - "type": "string" - }, - "rooms": { - "title": "Rooms", - "type": "string" - }, - "floor": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Floor" - }, - "furnished": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Furnished" - }, - "energy_rating": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Energy Rating" - } + type: "object", + properties: { + property_details: { + properties: { + title: { + title: "Title", + type: "string", + }, + location: { + title: "Location", + type: "string", + }, + property_type: { + title: "Property Type", + type: "string", + }, + size: { + title: "Size", + type: "string", + }, + rooms: { + title: "Rooms", + type: "string", + }, + floor: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Floor", + }, + furnished: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Furnished", + }, + energy_rating: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Energy Rating", }, - "required": [ - "title", - "location", - "property_type", - "size", - "rooms", - "floor", - "furnished", - "energy_rating" - ], - "title": "PropertyDetails", - "type": "object" }, - "features": { - "properties": { - "pets_allowed": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Pets Allowed" - }, - "senior_friendly": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Senior Friendly" - }, - "balcony": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Balcony" - }, - "dishwasher": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Dishwasher" - }, - "parking": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Parking" - }, - "electric_charging": { - "anyOf": [ - { "type": "string"}, - { "type": "null" } - ], - "title": "Electric Charging" - }, - "elevator": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Elevator" - }, - "washer_dryer": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Washer Dryer" - } + required: [ + "title", + "location", + "property_type", + "size", + "rooms", + "floor", + "furnished", + "energy_rating", + ], + title: "PropertyDetails", + type: "object", + }, + features: { + properties: { + pets_allowed: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Pets Allowed", + }, + senior_friendly: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Senior Friendly", + }, + balcony: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Balcony", + }, + dishwasher: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Dishwasher", + }, + parking: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Parking", + }, + electric_charging: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Electric Charging", + }, + elevator: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Elevator", + }, + washer_dryer: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Washer Dryer", }, - "required": [ - "pets_allowed", - "senior_friendly", - "balcony", - "dishwasher", - "parking", - "electric_charging", - "elevator", - "washer_dryer" - ], - "title": "FeaturesAmenities", - "type": "object" }, - "rental_details": { - "properties": { - "monthly_net_rent": { - "title": "Monthly Net Rent", - "type": "string" - }, - "utilities": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Utilities" - }, - "move_in_price": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Move In Price" - }, - "deposit": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Deposit" - }, - "prepaid_rent": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Prepaid Rent" - }, - "rental_period": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Rental Period" - }, - "available_from": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Available From" - }, - "listing_id": { - "title": "Listing Id", - "type": "string" - } + required: [ + "pets_allowed", + "senior_friendly", + "balcony", + "dishwasher", + "parking", + "electric_charging", + "elevator", + "washer_dryer", + ], + title: "FeaturesAmenities", + type: "object", + }, + rental_details: { + properties: { + monthly_net_rent: { + title: "Monthly Net Rent", + type: "string", + }, + utilities: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Utilities", + }, + move_in_price: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Move In Price", + }, + deposit: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Deposit", + }, + prepaid_rent: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Prepaid Rent", + }, + rental_period: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Rental Period", + }, + available_from: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Available From", + }, + listing_id: { + title: "Listing Id", + type: "string", }, - "required": [ - "monthly_net_rent", - "utilities", - "move_in_price", - "deposit", - "prepaid_rent", - "rental_period", - "available_from", - "listing_id" - ], - "title": "RentalDetails", - "type": "object" }, - "landlord_status": { - "properties": { - "boligportal_approved": { - "anyOf": [ - { "type": "boolean" }, - { "type": "null" } - ], - "title": "Boligportal Approved" - }, - "number_of_ads": { - "anyOf": [ - { "type": "integer" }, - { "type": "null" } - ], - "title": "Number Of Ads" - }, - "last_active": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Last Active" - }, - "profile_created": { - "anyOf": [ - { "type": "string" }, - { "type": "null" } - ], - "title": "Profile Created" - } + required: [ + "monthly_net_rent", + "utilities", + "move_in_price", + "deposit", + "prepaid_rent", + "rental_period", + "available_from", + "listing_id", + ], + title: "RentalDetails", + type: "object", + }, + landlord_status: { + properties: { + boligportal_approved: { + anyOf: [{ type: "boolean" }, { type: "null" }], + title: "Boligportal Approved", }, - "required": [ - "boligportal_approved", - "number_of_ads", - "last_active", - "profile_created" - ], - "title": "LandlordStatus", - "type": "object" - } - } - } + number_of_ads: { + anyOf: [{ type: "integer" }, { type: "null" }], + title: "Number Of Ads", + }, + last_active: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Last Active", + }, + profile_created: { + anyOf: [{ type: "string" }, { type: "null" }], + title: "Profile Created", + }, + }, + required: [ + "boligportal_approved", + "number_of_ads", + "last_active", + "profile_created", + ], + title: "LandlordStatus", + type: "object", + }, + }, + }; - const results = [ - { - "property_details": { - "title": "3 room apartment on 70 m²", - "location": "Odense", - "property_type": "Apartment", - "size": "70 m²", - "rooms": "3", - "floor": null, - "furnished": null, - "energy_rating": null - }, - "features": { - "pets_allowed": null, - "senior_friendly": null, - "balcony": null, - "dishwasher": null, - "parking": null, - "electric_charging": null, - "elevator": null, - "washer_dryer": null - }, - "rental_details": { - "monthly_net_rent": "7,000 kr.", - "utilities": null, - "move_in_price": null, - "deposit": null, - "prepaid_rent": null, - "rental_period": null, - "available_from": null, - "listing_id": "4937446" - }, - "landlord_status": { - "boligportal_approved": null, - "number_of_ads": null, - "last_active": null, - "profile_created": null - } - } - ] + const results = [ + { + property_details: { + title: "3 room apartment on 70 m²", + location: "Odense", + property_type: "Apartment", + size: "70 m²", + rooms: "3", + floor: null, + furnished: null, + energy_rating: null, + }, + features: { + pets_allowed: null, + senior_friendly: null, + balcony: null, + dishwasher: null, + parking: null, + electric_charging: null, + elevator: null, + washer_dryer: null, + }, + rental_details: { + monthly_net_rent: "7,000 kr.", + utilities: null, + move_in_price: null, + deposit: null, + prepaid_rent: null, + rental_period: null, + available_from: null, + listing_id: "4937446", + }, + landlord_status: { + boligportal_approved: null, + number_of_ads: null, + last_active: null, + profile_created: null, + }, + }, + ]; - expect(await transformArrayToObject(originalSchema, results)).toEqual(results[0]) - }) -}) + expect(await transformArrayToObject(originalSchema, results)).toEqual( + results[0], + ); + }); +}); diff --git a/apps/api/src/lib/cache.ts b/apps/api/src/lib/cache.ts index 611bc043..c8cde476 100644 --- a/apps/api/src/lib/cache.ts +++ b/apps/api/src/lib/cache.ts @@ -42,7 +42,10 @@ export async function saveEntryToCache(key: string, entry: CacheEntry) { if (!cacheRedis) return; if (!entry.html || entry.html.length < 100) { - logger.warn("Skipping cache save for short HTML", { key, htmlLength: entry.html?.length }); + logger.warn("Skipping cache save for short HTML", { + key, + htmlLength: entry.html?.length, + }); return; } diff --git a/apps/api/src/lib/crawl-redis.ts b/apps/api/src/lib/crawl-redis.ts index 0e7e643a..553934d7 100644 --- a/apps/api/src/lib/crawl-redis.ts +++ b/apps/api/src/lib/crawl-redis.ts @@ -127,13 +127,15 @@ export async function getDoneJobsOrdered( export async function isCrawlFinished(id: string) { return ( (await redisConnection.scard("crawl:" + id + ":jobs_done")) === - (await redisConnection.scard("crawl:" + id + ":jobs")) - && (await redisConnection.get("crawl:" + id + ":kickoff:finish")) !== null + (await redisConnection.scard("crawl:" + id + ":jobs")) && + (await redisConnection.get("crawl:" + id + ":kickoff:finish")) !== null ); } export async function isCrawlKickoffFinished(id: string) { - return await redisConnection.get("crawl:" + id + ":kickoff:finish") !== null + return ( + (await redisConnection.get("crawl:" + id + ":kickoff:finish")) !== null + ); } export async function isCrawlFinishedLocked(id: string) { @@ -141,7 +143,12 @@ export async function isCrawlFinishedLocked(id: string) { } export async function finishCrawlKickoff(id: string) { - await redisConnection.set("crawl:" + id + ":kickoff:finish", "yes", "EX", 24 * 60 * 60); + await redisConnection.set( + "crawl:" + id + ":kickoff:finish", + "yes", + "EX", + 24 * 60 * 60, + ); } export async function finishCrawl(id: string) { @@ -161,9 +168,10 @@ export async function finishCrawl(id: string) { module: "crawl-redis", method: "finishCrawl", crawlId: id, - jobs_done: (await redisConnection.scard("crawl:" + id + ":jobs_done")), - jobs: (await redisConnection.scard("crawl:" + id + ":jobs")), - kickoff_finished: (await redisConnection.get("crawl:" + id + ":kickoff:finish")) !== null, + jobs_done: await redisConnection.scard("crawl:" + id + ":jobs_done"), + jobs: await redisConnection.scard("crawl:" + id + ":jobs"), + kickoff_finished: + (await redisConnection.get("crawl:" + id + ":kickoff:finish")) !== null, }); } } diff --git a/apps/api/src/lib/extract/archive/crawling-index.ts b/apps/api/src/lib/extract/archive/crawling-index.ts index a8480c5f..19fa9495 100644 --- a/apps/api/src/lib/extract/archive/crawling-index.ts +++ b/apps/api/src/lib/extract/archive/crawling-index.ts @@ -1,81 +1,81 @@ // const id = crypto.randomUUID(); - // const sc: StoredCrawl = { - // originUrl: request.urls[0].replace("/*",""), - // crawlerOptions: toLegacyCrawlerOptions({ - // maxDepth: 15, - // limit: 5000, - // includePaths: [], - // excludePaths: [], - // ignoreSitemap: false, - // allowExternalLinks: false, - // allowBackwardLinks: true, - // allowSubdomains: false, - // ignoreRobotsTxt: false, - // deduplicateSimilarURLs: false, - // ignoreQueryParameters: false - // }), - // scrapeOptions: { - // formats: ["markdown"], - // onlyMainContent: true, - // waitFor: 0, - // mobile: false, - // removeBase64Images: true, - // fastMode: false, - // parsePDF: true, - // skipTlsVerification: false, - // }, - // internalOptions: { - // disableSmartWaitCache: true, - // isBackgroundIndex: true - // }, - // team_id: process.env.BACKGROUND_INDEX_TEAM_ID!, - // createdAt: Date.now(), - // plan: "hobby", // make it a low concurrency - // }; +// const sc: StoredCrawl = { +// originUrl: request.urls[0].replace("/*",""), +// crawlerOptions: toLegacyCrawlerOptions({ +// maxDepth: 15, +// limit: 5000, +// includePaths: [], +// excludePaths: [], +// ignoreSitemap: false, +// allowExternalLinks: false, +// allowBackwardLinks: true, +// allowSubdomains: false, +// ignoreRobotsTxt: false, +// deduplicateSimilarURLs: false, +// ignoreQueryParameters: false +// }), +// scrapeOptions: { +// formats: ["markdown"], +// onlyMainContent: true, +// waitFor: 0, +// mobile: false, +// removeBase64Images: true, +// fastMode: false, +// parsePDF: true, +// skipTlsVerification: false, +// }, +// internalOptions: { +// disableSmartWaitCache: true, +// isBackgroundIndex: true +// }, +// team_id: process.env.BACKGROUND_INDEX_TEAM_ID!, +// createdAt: Date.now(), +// plan: "hobby", // make it a low concurrency +// }; - // // Save the crawl configuration - // await saveCrawl(id, sc); +// // Save the crawl configuration +// await saveCrawl(id, sc); - // // Then kick off the job - // await _addScrapeJobToBullMQ({ - // url: request.urls[0].replace("/*",""), - // mode: "kickoff" as const, - // team_id: process.env.BACKGROUND_INDEX_TEAM_ID!, - // plan: "hobby", // make it a low concurrency - // crawlerOptions: sc.crawlerOptions, - // scrapeOptions: sc.scrapeOptions, - // internalOptions: sc.internalOptions, - // origin: "index", - // crawl_id: id, - // webhook: null, - // v1: true, - // }, {}, crypto.randomUUID(), 50); +// // Then kick off the job +// await _addScrapeJobToBullMQ({ +// url: request.urls[0].replace("/*",""), +// mode: "kickoff" as const, +// team_id: process.env.BACKGROUND_INDEX_TEAM_ID!, +// plan: "hobby", // make it a low concurrency +// crawlerOptions: sc.crawlerOptions, +// scrapeOptions: sc.scrapeOptions, +// internalOptions: sc.internalOptions, +// origin: "index", +// crawl_id: id, +// webhook: null, +// v1: true, +// }, {}, crypto.randomUUID(), 50); - // we restructure and make all of the arrays we need to fill into objects, - // adding them to a single object so the llm can fill them one at a time - // TODO: make this work for more complex schemas where arrays are not first level +// we restructure and make all of the arrays we need to fill into objects, +// adding them to a single object so the llm can fill them one at a time +// TODO: make this work for more complex schemas where arrays are not first level - // let schemasForLLM: {} = {}; - // for (const key in largeArraysSchema) { - // const originalSchema = structuredClone(largeArraysSchema[key].items); - // console.log( - // "key", - // key, - // "\noriginalSchema", - // JSON.stringify(largeArraysSchema[key], null, 2), - // ); - // let clonedObj = { - // type: "object", - // properties: { - // informationFilled: { - // type: "boolean", - // }, - // data: { - // type: "object", - // properties: originalSchema.properties, - // }, - // }, - // }; - // schemasForLLM[key] = clonedObj; - // } \ No newline at end of file +// let schemasForLLM: {} = {}; +// for (const key in largeArraysSchema) { +// const originalSchema = structuredClone(largeArraysSchema[key].items); +// console.log( +// "key", +// key, +// "\noriginalSchema", +// JSON.stringify(largeArraysSchema[key], null, 2), +// ); +// let clonedObj = { +// type: "object", +// properties: { +// informationFilled: { +// type: "boolean", +// }, +// data: { +// type: "object", +// properties: originalSchema.properties, +// }, +// }, +// }; +// schemasForLLM[key] = clonedObj; +// } diff --git a/apps/api/src/lib/extract/extract-redis.ts b/apps/api/src/lib/extract/extract-redis.ts index 4ec326ce..02842d8c 100644 --- a/apps/api/src/lib/extract/extract-redis.ts +++ b/apps/api/src/lib/extract/extract-redis.ts @@ -59,11 +59,11 @@ export async function updateExtract( // Limit links in steps to 500 if (extract.steps) { - extract.steps = extract.steps.map(step => { + extract.steps = extract.steps.map((step) => { if (step.discoveredLinks && step.discoveredLinks.length > 500) { return { ...step, - discoveredLinks: step.discoveredLinks.slice(0, 500) + discoveredLinks: step.discoveredLinks.slice(0, 500), }; } return step; diff --git a/apps/api/src/lib/extract/extraction-service.ts b/apps/api/src/lib/extract/extraction-service.ts index 70834621..c9ab015a 100644 --- a/apps/api/src/lib/extract/extraction-service.ts +++ b/apps/api/src/lib/extract/extraction-service.ts @@ -32,7 +32,11 @@ import { ExtractStep, updateExtract } from "./extract-redis"; import { deduplicateObjectsArray } from "./helpers/deduplicate-objs-array"; import { mergeNullValObjs } from "./helpers/merge-null-val-objs"; import { CUSTOM_U_TEAMS, extractConfig } from "./config"; -import { calculateFinalResultCost, estimateCost, estimateTotalCost } from "./usage/llm-cost"; +import { + calculateFinalResultCost, + estimateCost, + estimateTotalCost, +} from "./usage/llm-cost"; import { numTokensFromString } from "../LLM-extraction/helpers"; interface ExtractServiceOptions { @@ -147,7 +151,13 @@ Schema: ${schemaString}\nPrompt: ${prompt}\nRelevant URLs: ${urls}`, totalTokens: result.usage?.total_tokens ?? 0, model: model, }; - return { isMultiEntity, multiEntityKeys, reasoning, keyIndicators, tokenUsage }; + return { + isMultiEntity, + multiEntityKeys, + reasoning, + keyIndicators, + tokenUsage, + }; } type completions = { @@ -187,7 +197,7 @@ export async function performExtraction( method: "performExtraction", extractId, }); - + // Token tracking let tokenUsage: TokenUsage[] = []; @@ -246,7 +256,7 @@ export async function performExtraction( "No valid URLs found to scrape. Try adjusting your search criteria or including more URLs.", extractId, urlTrace: urlTraces, - totalUrlsScraped: 0 + totalUrlsScraped: 0, }; } @@ -277,8 +287,13 @@ export async function performExtraction( // 1. the first one is a completion that will extract the array of items // 2. the second one is multiple completions that will extract the items from the array let startAnalyze = Date.now(); - const { isMultiEntity, multiEntityKeys, reasoning, keyIndicators, tokenUsage: schemaAnalysisTokenUsage } = - await analyzeSchemaAndPrompt(links, reqSchema, request.prompt ?? ""); + const { + isMultiEntity, + multiEntityKeys, + reasoning, + keyIndicators, + tokenUsage: schemaAnalysisTokenUsage, + } = await analyzeSchemaAndPrompt(links, reqSchema, request.prompt ?? ""); // Track schema analysis tokens tokenUsage.push(schemaAnalysisTokenUsage); @@ -540,7 +555,7 @@ export async function performExtraction( "An unexpected error occurred. Please contact help@firecrawl.com for help.", extractId, urlTrace: urlTraces, - totalUrlsScraped + totalUrlsScraped, }; } } @@ -592,17 +607,18 @@ export async function performExtraction( } } - const validResults = results.filter((doc): doc is Document => doc !== null); + const validResults = results.filter( + (doc): doc is Document => doc !== null, + ); singleAnswerDocs.push(...validResults); totalUrlsScraped += validResults.length; - } catch (error) { return { success: false, error: error.message, extractId, urlTrace: urlTraces, - totalUrlsScraped + totalUrlsScraped, }; } @@ -614,7 +630,7 @@ export async function performExtraction( "All provided URLs are invalid. Please check your input and try again.", extractId, urlTrace: request.urlTrace ? urlTraces : undefined, - totalUrlsScraped: 0 + totalUrlsScraped: 0, }; } @@ -679,12 +695,12 @@ export async function performExtraction( : singleAnswerResult || multiEntityResult; // Tokenize final result to get token count - let finalResultTokens = 0; - if (finalResult) { - const finalResultStr = JSON.stringify(finalResult); - finalResultTokens = numTokensFromString(finalResultStr, "gpt-4o"); + // let finalResultTokens = 0; + // if (finalResult) { + // const finalResultStr = JSON.stringify(finalResult); + // finalResultTokens = numTokensFromString(finalResultStr, "gpt-4o"); - } + // } // // Deduplicate and validate final result against schema // if (reqSchema && finalResult && finalResult.length <= extractConfig.DEDUPLICATION.MAX_TOKENS) { // const schemaValidation = await generateOpenAICompletions( @@ -695,7 +711,7 @@ export async function performExtraction( // 1. Remove any duplicate entries in the data extracted by merging that into a single object according to the provided shcema // 2. Ensure all data matches the provided schema // 3. Keep only the highest quality and most complete entries when duplicates are found. - + // Do not change anything else. If data is null keep it null. If the schema is not provided, return the data as is.`, // prompt: `Please validate and merge the duplicate entries in this data according to the schema provided:\n @@ -732,12 +748,10 @@ export async function performExtraction( const llmUsage = estimateTotalCost(tokenUsage); let tokensToBill = calculateFinalResultCost(finalResult); - if (CUSTOM_U_TEAMS.includes(teamId)) { tokensToBill = 1; } - // Bill team for usage billTeam(teamId, subId, tokensToBill, logger, true).catch((error) => { logger.error( @@ -745,7 +759,6 @@ export async function performExtraction( ); }); - // Log job with token usage logJob({ job_id: extractId, @@ -779,6 +792,6 @@ export async function performExtraction( warning: undefined, // TODO FIX urlTrace: request.urlTrace ? urlTraces : undefined, llmUsage, - totalUrlsScraped + totalUrlsScraped, }; } diff --git a/apps/api/src/lib/extract/helpers/deduplicate-objs-array.ts b/apps/api/src/lib/extract/helpers/deduplicate-objs-array.ts index 248c5917..1bfd2dc6 100644 --- a/apps/api/src/lib/extract/helpers/deduplicate-objs-array.ts +++ b/apps/api/src/lib/extract/helpers/deduplicate-objs-array.ts @@ -1,10 +1,12 @@ -export function deduplicateObjectsArray(objArray: { [key: string]: any[] }): { [key: string]: any[] } { +export function deduplicateObjectsArray(objArray: { [key: string]: any[] }): { + [key: string]: any[]; +} { const deduplicatedObjArray: { [key: string]: any[] } = {}; for (const key in objArray) { if (Array.isArray(objArray[key])) { const seen = new Set(); - deduplicatedObjArray[key] = objArray[key].filter(item => { + deduplicatedObjArray[key] = objArray[key].filter((item) => { // Create a unique identifier for each item based on its properties const identifier = JSON.stringify(item); @@ -24,4 +26,4 @@ export function deduplicateObjectsArray(objArray: { [key: string]: any[] }): { [ } return deduplicatedObjArray; -} \ No newline at end of file +} diff --git a/apps/api/src/lib/extract/helpers/dereference-schema.ts b/apps/api/src/lib/extract/helpers/dereference-schema.ts index 60a4325f..66f8fa50 100644 --- a/apps/api/src/lib/extract/helpers/dereference-schema.ts +++ b/apps/api/src/lib/extract/helpers/dereference-schema.ts @@ -7,4 +7,4 @@ export async function dereferenceSchema(schema: any): Promise { console.error("Failed to dereference schema:", error); throw error; } -} \ No newline at end of file +} diff --git a/apps/api/src/lib/extract/helpers/dump-to-file.ts b/apps/api/src/lib/extract/helpers/dump-to-file.ts index bc8a5f07..f52b924c 100644 --- a/apps/api/src/lib/extract/helpers/dump-to-file.ts +++ b/apps/api/src/lib/extract/helpers/dump-to-file.ts @@ -1,5 +1,5 @@ -import * as fs from 'fs'; -import * as path from 'path'; +import * as fs from "fs"; +import * as path from "path"; /** * Helper function to dump data to a file for debugging/logging purposes @@ -10,17 +10,19 @@ import * as path from 'path'; export function dumpToFile( filename: string, data: T[], - formatter?: (item: T, index: number) => string + formatter?: (item: T, index: number) => string, ) { const filePath = path.join(__dirname, filename); - + let fileContent: string; if (formatter) { - fileContent = data.map((item, index) => formatter(item, index)).join('\n'); + fileContent = data.map((item, index) => formatter(item, index)).join("\n"); } else { - fileContent = data.map((item, index) => `${index + 1}. ${JSON.stringify(item)}`).join('\n'); + fileContent = data + .map((item, index) => `${index + 1}. ${JSON.stringify(item)}`) + .join("\n"); } - fs.writeFileSync(filePath, fileContent, 'utf8'); + fs.writeFileSync(filePath, fileContent, "utf8"); console.log(`Dumped data to ${filename}`); } diff --git a/apps/api/src/lib/extract/helpers/merge-null-val-objs.ts b/apps/api/src/lib/extract/helpers/merge-null-val-objs.ts index ddbb2a44..2044097b 100644 --- a/apps/api/src/lib/extract/helpers/merge-null-val-objs.ts +++ b/apps/api/src/lib/extract/helpers/merge-null-val-objs.ts @@ -1,4 +1,4 @@ -import { deduplicateObjectsArray } from './deduplicate-objs-array'; +import { deduplicateObjectsArray } from "./deduplicate-objs-array"; /** * Convert "null" strings to actual null values for easier comparison. @@ -25,16 +25,16 @@ function areMergeable(obj1: any, obj2: any): boolean { const allKeys = new Set([...Object.keys(obj1), ...Object.keys(obj2)]); let matchingNonNullValues = 0; let nonNullComparisons = 0; - + for (const key of allKeys) { const val1 = obj1[key]; const val2 = obj2[key]; - + // Skip array comparisons - they'll be merged separately if (Array.isArray(val1) || Array.isArray(val2)) { continue; } - + // If both values exist and are not null if (val1 !== null && val2 !== null) { nonNullComparisons++; @@ -43,7 +43,7 @@ function areMergeable(obj1: any, obj2: any): boolean { } } } - + // Objects are mergeable if they have at least one matching non-null value // and all their non-null values match when both objects have them return nonNullComparisons > 0 && matchingNonNullValues === nonNullComparisons; @@ -56,7 +56,10 @@ function mergeArrays(arr1: any[], arr2: any[]): any[] { const combined = [...arr1, ...arr2]; return combined.filter((item, index) => { const stringified = JSON.stringify(item); - return combined.findIndex(other => JSON.stringify(other) === stringified) === index; + return ( + combined.findIndex((other) => JSON.stringify(other) === stringified) === + index + ); }); } @@ -78,9 +81,9 @@ function mergeObjects(obj1: any, obj2: any): any { // If only obj2's value is an array, use it result[key] = [...obj2[key]]; } - } else if (typeof obj2[key] === 'object') { + } else if (typeof obj2[key] === "object") { // If both are objects (but not arrays), merge them - if (typeof result[key] === 'object' && !Array.isArray(result[key])) { + if (typeof result[key] === "object" && !Array.isArray(result[key])) { result[key] = mergeObjects(result[key], obj2[key]); } else { result[key] = { ...obj2[key] }; @@ -101,13 +104,17 @@ function mergeObjects(obj1: any, obj2: any): any { * null-equivalent fields, filling in null fields with the corresponding * non-null fields from the other object. */ -export function mergeNullValObjs(objArray: { [key: string]: any[] }): { [key: string]: any[] } { +export function mergeNullValObjs(objArray: { [key: string]: any[] }): { + [key: string]: any[]; +} { const result: { [key: string]: any[] } = {}; for (const key in objArray) { if (Array.isArray(objArray[key])) { // If array contains only primitive values, return as is - if (objArray[key].every(item => typeof item !== 'object' || item === null)) { + if ( + objArray[key].every((item) => typeof item !== "object" || item === null) + ) { result[key] = [...objArray[key]]; continue; } @@ -117,7 +124,7 @@ export function mergeNullValObjs(objArray: { [key: string]: any[] }): { [key: st for (const item of items) { let merged = false; - + for (let i = 0; i < mergedItems.length; i++) { if (areMergeable(mergedItems[i], item)) { mergedItems[i] = mergeObjects(mergedItems[i], item); @@ -125,7 +132,7 @@ export function mergeNullValObjs(objArray: { [key: string]: any[] }): { [key: st break; } } - + if (!merged) { mergedItems.push({ ...item }); } @@ -134,10 +141,13 @@ export function mergeNullValObjs(objArray: { [key: string]: any[] }): { [key: st // Final deduplication pass result[key] = deduplicateObjectsArray({ [key]: mergedItems })[key]; } else { - console.warn(`Expected an array at objArray[${key}], but found:`, objArray[key]); + console.warn( + `Expected an array at objArray[${key}], but found:`, + objArray[key], + ); return objArray; } } return result; -} \ No newline at end of file +} diff --git a/apps/api/src/lib/extract/helpers/mix-schema-objs.ts b/apps/api/src/lib/extract/helpers/mix-schema-objs.ts index ee274b42..5cc111ee 100644 --- a/apps/api/src/lib/extract/helpers/mix-schema-objs.ts +++ b/apps/api/src/lib/extract/helpers/mix-schema-objs.ts @@ -1,7 +1,7 @@ export async function mixSchemaObjects( finalSchema: any, singleAnswerResult: any, - multiEntityResult: any + multiEntityResult: any, ) { const finalResult: any = {}; @@ -9,14 +9,20 @@ export async function mixSchemaObjects( function mergeResults(schema: any, singleResult: any, multiResult: any) { const result: any = {}; for (const key in schema.properties) { - if (schema.properties[key].type === 'object' && schema.properties[key].properties) { + if ( + schema.properties[key].type === "object" && + schema.properties[key].properties + ) { // If the property is an object, recursively merge its properties result[key] = mergeResults( schema.properties[key], singleResult[key] || {}, - multiResult[key] || {} + multiResult[key] || {}, ); - } else if (schema.properties[key].type === 'array' && Array.isArray(multiResult[key])) { + } else if ( + schema.properties[key].type === "array" && + Array.isArray(multiResult[key]) + ) { // If the property is an array, flatten the arrays from multiResult result[key] = multiResult[key].flat(); } else if (singleResult.hasOwnProperty(key)) { @@ -29,7 +35,10 @@ export async function mixSchemaObjects( } // Merge the properties from the final schema - Object.assign(finalResult, mergeResults(finalSchema, singleAnswerResult, multiEntityResult)); + Object.assign( + finalResult, + mergeResults(finalSchema, singleAnswerResult, multiEntityResult), + ); return finalResult; -} \ No newline at end of file +} diff --git a/apps/api/src/lib/extract/helpers/spread-schemas.ts b/apps/api/src/lib/extract/helpers/spread-schemas.ts index 721ff218..8ba9f428 100644 --- a/apps/api/src/lib/extract/helpers/spread-schemas.ts +++ b/apps/api/src/lib/extract/helpers/spread-schemas.ts @@ -1,4 +1,7 @@ -export async function spreadSchemas(schema: any, keys: string[]): Promise<{ +export async function spreadSchemas( + schema: any, + keys: string[], +): Promise<{ singleAnswerSchema: any; multiEntitySchema: any; }> { @@ -32,7 +35,7 @@ export async function spreadSchemas(schema: any, keys: string[]): Promise<{ if (Object.keys(singleAnswerSchema.properties).length === 0) { singleAnswerSchema = {}; } - + if (Object.keys(multiEntitySchema.properties).length === 0) { multiEntitySchema = {}; } @@ -41,4 +44,4 @@ export async function spreadSchemas(schema: any, keys: string[]): Promise<{ singleAnswerSchema, multiEntitySchema, }; -} \ No newline at end of file +} diff --git a/apps/api/src/lib/extract/helpers/transform-array-to-obj.ts b/apps/api/src/lib/extract/helpers/transform-array-to-obj.ts index f01ef3bf..6c65e234 100644 --- a/apps/api/src/lib/extract/helpers/transform-array-to-obj.ts +++ b/apps/api/src/lib/extract/helpers/transform-array-to-obj.ts @@ -1,21 +1,21 @@ -import isEqual from 'lodash/isEqual'; +import isEqual from "lodash/isEqual"; export function transformArrayToObject( originalSchema: any, - arrayData: any[] + arrayData: any[], ): any { if (Object.keys(originalSchema).length == 0) { return {}; } - + const transformedResult: any = {}; // Function to find the array key in a nested schema function findArrayKey(schema: any): string | null { for (const key in schema.properties) { - if (schema.properties[key].type === 'array') { + if (schema.properties[key].type === "array") { return key; - } else if (schema.properties[key].type === 'object') { + } else if (schema.properties[key].type === "object") { const nestedKey = findArrayKey(schema.properties[key]); if (nestedKey) { return `${key}.${nestedKey}`; @@ -31,7 +31,10 @@ export function transformArrayToObject( for (const key in item) { if (!acc[key]) { acc[key] = item[key]; - } else if (typeof acc[key] === 'object' && typeof item[key] === 'object') { + } else if ( + typeof acc[key] === "object" && + typeof item[key] === "object" + ) { acc[key] = { ...acc[key], ...item[key] }; } } @@ -39,13 +42,16 @@ export function transformArrayToObject( }, {}); } - const arrayKeyParts = arrayKeyPath.split('.'); + const arrayKeyParts = arrayKeyPath.split("."); const arrayKey = arrayKeyParts.pop(); if (!arrayKey) { throw new Error("Array key not found in schema"); } - const parentSchema = arrayKeyParts.reduce((schema, key) => schema.properties[key], originalSchema); + const parentSchema = arrayKeyParts.reduce( + (schema, key) => schema.properties[key], + originalSchema, + ); const itemSchema = parentSchema.properties[arrayKey].items; if (!itemSchema) { throw new Error("Item schema not found for array key"); @@ -53,7 +59,7 @@ export function transformArrayToObject( // Initialize the array in the transformed result let currentLevel = transformedResult; - arrayKeyParts.forEach(part => { + arrayKeyParts.forEach((part) => { if (!currentLevel[part]) { currentLevel[part] = {}; } @@ -63,20 +69,23 @@ export function transformArrayToObject( // Helper function to check if an object is already in the array function isDuplicateObject(array: any[], obj: any): boolean { - return array.some(existingItem => isEqual(existingItem, obj)); + return array.some((existingItem) => isEqual(existingItem, obj)); } // Helper function to validate if an object follows the schema function isValidObject(obj: any, schema: any): boolean { - return Object.keys(schema.properties).every(key => { - return obj.hasOwnProperty(key) && typeof obj[key] === schema.properties[key].type; + return Object.keys(schema.properties).every((key) => { + return ( + obj.hasOwnProperty(key) && + typeof obj[key] === schema.properties[key].type + ); }); } // Iterate over each item in the arrayData - arrayData.forEach(item => { + arrayData.forEach((item) => { let currentItem = item; - arrayKeyParts.forEach(part => { + arrayKeyParts.forEach((part) => { if (currentItem[part]) { currentItem = currentItem[part]; } @@ -84,43 +93,63 @@ export function transformArrayToObject( // Copy non-array properties from the parent object for (const key in parentSchema.properties) { - if (key !== arrayKey && currentItem.hasOwnProperty(key) && !currentLevel.hasOwnProperty(key)) { + if ( + key !== arrayKey && + currentItem.hasOwnProperty(key) && + !currentLevel.hasOwnProperty(key) + ) { currentLevel[key] = currentItem[key]; } } - // Ensure that the currentItem[arrayKey] is an array before mapping - if (Array.isArray(currentItem[arrayKey])) { - currentItem[arrayKey].forEach((subItem: any) => { - if (typeof subItem === 'object' && subItem !== null && isValidObject(subItem, itemSchema)) { - // For arrays of objects, add only unique objects - const transformedItem: any = {}; - let hasValidData = false; + // Ensure that the currentItem[arrayKey] is an array before mapping + if (Array.isArray(currentItem[arrayKey])) { + currentItem[arrayKey].forEach((subItem: any) => { + if ( + typeof subItem === "object" && + subItem !== null && + isValidObject(subItem, itemSchema) + ) { + // For arrays of objects, add only unique objects + const transformedItem: any = {}; + let hasValidData = false; - for (const key in itemSchema.properties) { - if (subItem.hasOwnProperty(key) && subItem[key] !== undefined) { - transformedItem[key] = subItem[key]; - hasValidData = true; + for (const key in itemSchema.properties) { + if (subItem.hasOwnProperty(key) && subItem[key] !== undefined) { + transformedItem[key] = subItem[key]; + hasValidData = true; + } + } + + if ( + hasValidData && + !isDuplicateObject(currentLevel[arrayKey], transformedItem) + ) { + currentLevel[arrayKey].push(transformedItem); } } - - if (hasValidData && !isDuplicateObject(currentLevel[arrayKey], transformedItem)) { - currentLevel[arrayKey].push(transformedItem); - } - } - }); - } else { - console.warn(`Expected an array at ${arrayKey}, but found:`, currentItem[arrayKey]); - } + }); + } else { + console.warn( + `Expected an array at ${arrayKey}, but found:`, + currentItem[arrayKey], + ); + } // Handle merging of array properties for (const key in parentSchema.properties) { - if (parentSchema.properties[key].type === 'array' && Array.isArray(currentItem[key])) { + if ( + parentSchema.properties[key].type === "array" && + Array.isArray(currentItem[key]) + ) { if (!currentLevel[key]) { currentLevel[key] = []; } currentItem[key].forEach((value: any) => { - if (!currentLevel[key].includes(value) && !isDuplicateObject(currentLevel[arrayKey], value)) { + if ( + !currentLevel[key].includes(value) && + !isDuplicateObject(currentLevel[arrayKey], value) + ) { currentLevel[key].push(value); } }); @@ -129,4 +158,4 @@ export function transformArrayToObject( }); return transformedResult; -} \ No newline at end of file +} diff --git a/apps/api/src/lib/extract/index/pinecone.ts b/apps/api/src/lib/extract/index/pinecone.ts index 24da2d59..df4c7686 100644 --- a/apps/api/src/lib/extract/index/pinecone.ts +++ b/apps/api/src/lib/extract/index/pinecone.ts @@ -91,7 +91,8 @@ export async function indexPage({ url: normalizedUrl, originUrl: normalizeUrl(originUrl), title: document.metadata.title ?? document.metadata.ogTitle ?? "", - description: document.metadata.description ?? document.metadata.ogDescription ?? "", + description: + document.metadata.description ?? document.metadata.ogDescription ?? "", crawlId, teamId, markdown: trimmedMarkdown, @@ -126,7 +127,7 @@ export async function indexPage({ export async function searchSimilarPages( query: string, originUrl?: string, - limit: number = 1000 + limit: number = 1000, ): Promise { try { const index = pinecone.index(INDEX_NAME); diff --git a/apps/api/src/lib/extract/reranker.ts b/apps/api/src/lib/extract/reranker.ts index 078edffd..dc23d4cb 100644 --- a/apps/api/src/lib/extract/reranker.ts +++ b/apps/api/src/lib/extract/reranker.ts @@ -59,7 +59,7 @@ export async function rerankLinks( const linksAndScores = await performRanking( mappedLinksRerank, mappedLinks.map((l) => l.url), - searchQuery + searchQuery, ); // First try with high threshold @@ -109,8 +109,11 @@ export async function rerankLinks( } }); - const rankedLinks = filteredLinks.slice(0, extractConfig.RERANKING.MAX_RANKING_LIMIT_FOR_RELEVANCE); - + const rankedLinks = filteredLinks.slice( + 0, + extractConfig.RERANKING.MAX_RANKING_LIMIT_FOR_RELEVANCE, + ); + // Mark URLs that will be used in completion rankedLinks.forEach((link) => { const trace = urlTraces.find((t) => t.url === link.url); @@ -120,13 +123,15 @@ export async function rerankLinks( }); // Mark URLs that were dropped due to ranking limit - filteredLinks.slice(extractConfig.RERANKING.MAX_RANKING_LIMIT_FOR_RELEVANCE).forEach(link => { - const trace = urlTraces.find(t => t.url === link.url); - if (trace) { - trace.warning = "Excluded due to ranking limit"; - trace.usedInCompletion = false; - } - }); + filteredLinks + .slice(extractConfig.RERANKING.MAX_RANKING_LIMIT_FOR_RELEVANCE) + .forEach((link) => { + const trace = urlTraces.find((t) => t.url === link.url); + if (trace) { + trace.warning = "Excluded due to ranking limit"; + trace.usedInCompletion = false; + } + }); // console.log("Reranked links: ", rankedLinks.length); @@ -155,7 +160,7 @@ function filterAndProcessLinks( export type RerankerResult = { mapDocument: MapDocument[]; tokensUsed: number; -} +}; export async function rerankLinksWithLLM( mappedLinks: MapDocument[], @@ -167,7 +172,7 @@ export async function rerankLinksWithLLM( const TIMEOUT_MS = 20000; const MAX_RETRIES = 2; let totalTokensUsed = 0; - + // Split mappedLinks into chunks of 200 for (let i = 0; i < mappedLinks.length; i += chunkSize) { chunks.push(mappedLinks.slice(i, i + chunkSize)); @@ -184,23 +189,25 @@ export async function rerankLinksWithLLM( type: "object", properties: { url: { type: "string" }, - relevanceScore: { type: "number" } + relevanceScore: { type: "number" }, }, - required: ["url", "relevanceScore"] - } - } + required: ["url", "relevanceScore"], + }, + }, }, - required: ["relevantLinks"] + required: ["relevantLinks"], }; - const results = await Promise.all( chunks.map(async (chunk, chunkIndex) => { // console.log(`Processing chunk ${chunkIndex + 1}/${chunks.length} with ${chunk.length} links`); - - const linksContent = chunk.map(link => - `URL: ${link.url}${link.title ? `\nTitle: ${link.title}` : ''}${link.description ? `\nDescription: ${link.description}` : ''}` - ).join("\n\n"); + + const linksContent = chunk + .map( + (link) => + `URL: ${link.url}${link.title ? `\nTitle: ${link.title}` : ""}${link.description ? `\nDescription: ${link.description}` : ""}`, + ) + .join("\n\n"); for (let retry = 0; retry <= MAX_RETRIES; retry++) { try { @@ -208,22 +215,28 @@ export async function rerankLinksWithLLM( setTimeout(() => resolve(null), TIMEOUT_MS); }); - const completionPromise = generateOpenAICompletions( - logger.child({ method: "rerankLinksWithLLM", chunk: chunkIndex + 1, retry }), + logger.child({ + method: "rerankLinksWithLLM", + chunk: chunkIndex + 1, + retry, + }), { mode: "llm", systemPrompt: buildRerankerSystemPrompt(), prompt: buildRerankerUserPrompt(searchQuery), - schema: schema + schema: schema, }, linksContent, undefined, - true + true, ); - const completion = await Promise.race([completionPromise, timeoutPromise]); - + const completion = await Promise.race([ + completionPromise, + timeoutPromise, + ]); + if (!completion) { // console.log(`Chunk ${chunkIndex + 1}: Timeout on attempt ${retry + 1}`); continue; @@ -237,9 +250,11 @@ export async function rerankLinksWithLLM( totalTokensUsed += completion.numTokens || 0; // console.log(`Chunk ${chunkIndex + 1}: Found ${completion.extract.relevantLinks.length} relevant links`); return completion.extract.relevantLinks; - } catch (error) { - console.warn(`Error processing chunk ${chunkIndex + 1} attempt ${retry + 1}:`, error); + console.warn( + `Error processing chunk ${chunkIndex + 1} attempt ${retry + 1}:`, + error, + ); if (retry === MAX_RETRIES) { // console.log(`Chunk ${chunkIndex + 1}: Max retries reached, returning empty array`); return []; @@ -247,18 +262,20 @@ export async function rerankLinksWithLLM( } } return []; - }) + }), ); // console.log(`Processed ${results.length} chunks`); // Flatten results and sort by relevance score - const flattenedResults = results.flat().sort((a, b) => b.relevanceScore - a.relevanceScore); + const flattenedResults = results + .flat() + .sort((a, b) => b.relevanceScore - a.relevanceScore); // console.log(`Total relevant links found: ${flattenedResults.length}`); // Map back to MapDocument format, keeping only relevant links const relevantLinks = flattenedResults - .map(result => mappedLinks.find(link => link.url === result.url)) + .map((result) => mappedLinks.find((link) => link.url === result.url)) .filter((link): link is MapDocument => link !== undefined); // console.log(`Returning ${relevantLinks.length} relevant links`); diff --git a/apps/api/src/lib/extract/url-processor.ts b/apps/api/src/lib/extract/url-processor.ts index 5a34c456..97d21467 100644 --- a/apps/api/src/lib/extract/url-processor.ts +++ b/apps/api/src/lib/extract/url-processor.ts @@ -184,8 +184,6 @@ export async function processUrl( // (link, index) => `${index + 1}. URL: ${link.url}, Title: ${link.title}, Description: ${link.description}` // ); - - const rerankerResult = await rerankLinksWithLLM( mappedLinks, rephrasedPrompt, diff --git a/apps/api/src/lib/extract/usage/llm-cost.ts b/apps/api/src/lib/extract/usage/llm-cost.ts index 73904161..fdf232a1 100644 --- a/apps/api/src/lib/extract/usage/llm-cost.ts +++ b/apps/api/src/lib/extract/usage/llm-cost.ts @@ -12,7 +12,9 @@ const tokenPerCharacter = 4; const baseTokenCost = 300; export function calculateFinalResultCost(data: any): number { - return Math.floor((JSON.stringify(data).length / tokenPerCharacter) + baseTokenCost); + return Math.floor( + JSON.stringify(data).length / tokenPerCharacter + baseTokenCost, + ); } export function estimateTotalCost(tokenUsage: TokenUsage[]): number { diff --git a/apps/api/src/lib/extract/usage/model-prices.ts b/apps/api/src/lib/extract/usage/model-prices.ts index d24baeca..2a28c4fe 100644 --- a/apps/api/src/lib/extract/usage/model-prices.ts +++ b/apps/api/src/lib/extract/usage/model-prices.ts @@ -3,8138 +3,8257 @@ // 291 kb export const modelPrices = { - "gpt-4": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-05, - "output_cost_per_token": 6e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4o": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 2.5e-06, - "output_cost_per_token": 1e-05, - "input_cost_per_token_batches": 1.25e-06, - "output_cost_per_token_batches": 5e-06, - "cache_read_input_token_cost": 1.25e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4o-audio-preview": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 2.5e-06, - "input_cost_per_audio_token": 0.0001, - "output_cost_per_token": 1e-05, - "output_cost_per_audio_token": 0.0002, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_system_messages": true - }, - "gpt-4o-audio-preview-2024-10-01": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 2.5e-06, - "input_cost_per_audio_token": 0.0001, - "output_cost_per_token": 1e-05, - "output_cost_per_audio_token": 0.0002, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_system_messages": true - }, - "gpt-4o-mini": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "input_cost_per_token_batches": 7.5e-08, - "output_cost_per_token_batches": 3e-07, - "cache_read_input_token_cost": 7.5e-08, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4o-mini-2024-07-18": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "input_cost_per_token_batches": 7.5e-08, - "output_cost_per_token_batches": 3e-07, - "cache_read_input_token_cost": 7.5e-08, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "o1-mini": { - "max_tokens": 65536, - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.2e-05, - "cache_read_input_token_cost": 1.5e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_vision": true, - "supports_prompt_caching": true - }, - "o1-mini-2024-09-12": { - "max_tokens": 65536, - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.2e-05, - "cache_read_input_token_cost": 1.5e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_vision": true, - "supports_prompt_caching": true - }, - "o1-preview": { - "max_tokens": 32768, - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 6e-05, - "cache_read_input_token_cost": 7.5e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_vision": true, - "supports_prompt_caching": true - }, - "o1-preview-2024-09-12": { - "max_tokens": 32768, - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 6e-05, - "cache_read_input_token_cost": 7.5e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_vision": true, - "supports_prompt_caching": true - }, - "chatgpt-4o-latest": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4o-2024-05-13": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-06, - "output_cost_per_token": 1.5e-05, - "input_cost_per_token_batches": 2.5e-06, - "output_cost_per_token_batches": 7.5e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4o-2024-08-06": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 2.5e-06, - "output_cost_per_token": 1e-05, - "input_cost_per_token_batches": 1.25e-06, - "output_cost_per_token_batches": 5e-06, - "cache_read_input_token_cost": 1.25e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-turbo-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-0314": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-05, - "output_cost_per_token": 6e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-0613": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-05, - "output_cost_per_token": 6e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-32k": { - "max_tokens": 4096, - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-05, - "output_cost_per_token": 0.00012, - "litellm_provider": "openai", - "mode": "chat", - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-32k-0314": { - "max_tokens": 4096, - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-05, - "output_cost_per_token": 0.00012, - "litellm_provider": "openai", - "mode": "chat", - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-32k-0613": { - "max_tokens": 4096, - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-05, - "output_cost_per_token": 0.00012, - "litellm_provider": "openai", - "mode": "chat", - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-turbo": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-turbo-2024-04-09": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-1106-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-0125-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-vision-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-4-1106-vision-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-3.5-turbo": { - "max_tokens": 4097, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-3.5-turbo-0301": { - "max_tokens": 4097, - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-3.5-turbo-0613": { - "max_tokens": 4097, - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-3.5-turbo-1106": { - "max_tokens": 16385, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-3.5-turbo-0125": { - "max_tokens": 16385, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 1.5e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-3.5-turbo-16k": { - "max_tokens": 16385, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 4e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "gpt-3.5-turbo-16k-0613": { - "max_tokens": 16385, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 4e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "ft:gpt-3.5-turbo": { - "max_tokens": 4096, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 6e-06, - "input_cost_per_token_batches": 1.5e-06, - "output_cost_per_token_batches": 3e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_system_messages": true - }, - "ft:gpt-3.5-turbo-0125": { - "max_tokens": 4096, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_system_messages": true - }, - "ft:gpt-3.5-turbo-1106": { - "max_tokens": 4096, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_system_messages": true - }, - "ft:gpt-3.5-turbo-0613": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_system_messages": true - }, - "ft:gpt-4-0613": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-05, - "output_cost_per_token": 6e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "source": "OpenAI needs to add pricing for this ft model, will be updated when added by OpenAI. Defaulting to base model pricing", - "supports_system_messages": true - }, - "ft:gpt-4o-2024-08-06": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 3.75e-06, - "output_cost_per_token": 1.5e-05, - "input_cost_per_token_batches": 1.875e-06, - "output_cost_per_token_batches": 7.5e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_system_messages": true - }, - "ft:gpt-4o-mini-2024-07-18": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 1.2e-06, - "input_cost_per_token_batches": 1.5e-07, - "output_cost_per_token_batches": 6e-07, - "cache_read_input_token_cost": 1.5e-07, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "ft:davinci-002": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 2e-06, - "input_cost_per_token_batches": 1e-06, - "output_cost_per_token_batches": 1e-06, - "litellm_provider": "text-completion-openai", - "mode": "completion" - }, - "ft:babbage-002": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "input_cost_per_token": 4e-07, - "output_cost_per_token": 4e-07, - "input_cost_per_token_batches": 2e-07, - "output_cost_per_token_batches": 2e-07, - "litellm_provider": "text-completion-openai", - "mode": "completion" - }, - "text-embedding-3-large": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "output_vector_size": 3072, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 0.0, - "input_cost_per_token_batches": 6.5e-08, - "output_cost_per_token_batches": 0.0, - "litellm_provider": "openai", - "mode": "embedding" - }, - "text-embedding-3-small": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "output_vector_size": 1536, - "input_cost_per_token": 2e-08, - "output_cost_per_token": 0.0, - "input_cost_per_token_batches": 1e-08, - "output_cost_per_token_batches": 0.0, - "litellm_provider": "openai", - "mode": "embedding" - }, - "text-embedding-ada-002": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "output_vector_size": 1536, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "openai", - "mode": "embedding" - }, - "text-embedding-ada-002-v2": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "input_cost_per_token_batches": 5e-08, - "output_cost_per_token_batches": 0.0, - "litellm_provider": "openai", - "mode": "embedding" - }, - "text-moderation-stable": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 0, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "openai", - "mode": "moderations" - }, - "text-moderation-007": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 0, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "openai", - "mode": "moderations" - }, - "text-moderation-latest": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 0, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "openai", - "mode": "moderations" - }, - "256-x-256/dall-e-2": { - "mode": "image_generation", - "input_cost_per_pixel": 2.4414e-07, - "output_cost_per_pixel": 0.0, - "litellm_provider": "openai" - }, - "512-x-512/dall-e-2": { - "mode": "image_generation", - "input_cost_per_pixel": 6.86e-08, - "output_cost_per_pixel": 0.0, - "litellm_provider": "openai" - }, - "1024-x-1024/dall-e-2": { - "mode": "image_generation", - "input_cost_per_pixel": 1.9e-08, - "output_cost_per_pixel": 0.0, - "litellm_provider": "openai" - }, - "hd/1024-x-1792/dall-e-3": { - "mode": "image_generation", - "input_cost_per_pixel": 6.539e-08, - "output_cost_per_pixel": 0.0, - "litellm_provider": "openai" - }, - "hd/1792-x-1024/dall-e-3": { - "mode": "image_generation", - "input_cost_per_pixel": 6.539e-08, - "output_cost_per_pixel": 0.0, - "litellm_provider": "openai" - }, - "hd/1024-x-1024/dall-e-3": { - "mode": "image_generation", - "input_cost_per_pixel": 7.629e-08, - "output_cost_per_pixel": 0.0, - "litellm_provider": "openai" - }, - "standard/1024-x-1792/dall-e-3": { - "mode": "image_generation", - "input_cost_per_pixel": 4.359e-08, - "output_cost_per_pixel": 0.0, - "litellm_provider": "openai" - }, - "standard/1792-x-1024/dall-e-3": { - "mode": "image_generation", - "input_cost_per_pixel": 4.359e-08, - "output_cost_per_pixel": 0.0, - "litellm_provider": "openai" - }, - "standard/1024-x-1024/dall-e-3": { - "mode": "image_generation", - "input_cost_per_pixel": 3.81469e-08, - "output_cost_per_pixel": 0.0, - "litellm_provider": "openai" - }, - "whisper-1": { - "mode": "audio_transcription", - "input_cost_per_second": 0, - "output_cost_per_second": 0.0001, - "litellm_provider": "openai" - }, - "tts-1": { - "mode": "audio_speech", - "input_cost_per_character": 1.5e-05, - "litellm_provider": "openai" - }, - "tts-1-hd": { - "mode": "audio_speech", - "input_cost_per_character": 3e-05, - "litellm_provider": "openai" - }, - "azure/tts-1": { - "mode": "audio_speech", - "input_cost_per_character": 1.5e-05, - "litellm_provider": "azure" - }, - "azure/tts-1-hd": { - "mode": "audio_speech", - "input_cost_per_character": 3e-05, - "litellm_provider": "azure" - }, - "azure/whisper-1": { - "mode": "audio_transcription", - "input_cost_per_second": 0, - "output_cost_per_second": 0.0001, - "litellm_provider": "azure" - }, - "azure/o1-mini": { - "max_tokens": 65536, - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.2e-05, - "cache_read_input_token_cost": 1.5e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false, - "supports_prompt_caching": true - }, - "azure/o1-mini-2024-09-12": { - "max_tokens": 65536, - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.2e-05, - "cache_read_input_token_cost": 1.5e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false, - "supports_prompt_caching": true - }, - "azure/o1-preview": { - "max_tokens": 32768, - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 6e-05, - "cache_read_input_token_cost": 7.5e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false, - "supports_prompt_caching": true - }, - "azure/o1-preview-2024-09-12": { - "max_tokens": 32768, - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 6e-05, - "cache_read_input_token_cost": 7.5e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false, - "supports_prompt_caching": true - }, - "azure/gpt-4o": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-06, - "output_cost_per_token": 1.5e-05, - "cache_read_input_token_cost": 1.25e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true, - "supports_prompt_caching": true - }, - "azure/gpt-4o-2024-08-06": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 2.75e-06, - "output_cost_per_token": 1.1e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_prompt_caching": true - }, - "azure/gpt-4o-2024-05-13": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true, - "supports_prompt_caching": true - }, - "azure/global-standard/gpt-4o-2024-08-06": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 2.5e-06, - "output_cost_per_token": 1e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_prompt_caching": true - }, - "azure/global-standard/gpt-4o-mini": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true - }, - "azure/gpt-4o-mini": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 1.65e-07, - "output_cost_per_token": 6.6e-07, - "cache_read_input_token_cost": 7.5e-08, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_prompt_caching": true - }, - "azure/gpt-4-turbo-2024-04-09": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true - }, - "azure/gpt-4-0125-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true - }, - "azure/gpt-4-1106-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true - }, - "azure/gpt-4-0613": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-05, - "output_cost_per_token": 6e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true - }, - "azure/gpt-4-32k-0613": { - "max_tokens": 4096, - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-05, - "output_cost_per_token": 0.00012, - "litellm_provider": "azure", - "mode": "chat" - }, - "azure/gpt-4-32k": { - "max_tokens": 4096, - "max_input_tokens": 32768, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-05, - "output_cost_per_token": 0.00012, - "litellm_provider": "azure", - "mode": "chat" - }, - "azure/gpt-4": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-05, - "output_cost_per_token": 6e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true - }, - "azure/gpt-4-turbo": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true - }, - "azure/gpt-4-turbo-vision-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_vision": true - }, - "azure/gpt-35-turbo-16k-0613": { - "max_tokens": 4096, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 4e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true - }, - "azure/gpt-35-turbo-1106": { - "max_tokens": 4096, - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true - }, - "azure/gpt-35-turbo-0613": { - "max_tokens": 4097, - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true - }, - "azure/gpt-35-turbo-0301": { - "max_tokens": 4097, - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true - }, - "azure/gpt-35-turbo-0125": { - "max_tokens": 4096, - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 1.5e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true - }, - "azure/gpt-35-turbo-16k": { - "max_tokens": 4096, - "max_input_tokens": 16385, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 4e-06, - "litellm_provider": "azure", - "mode": "chat" - }, - "azure/gpt-35-turbo": { - "max_tokens": 4096, - "max_input_tokens": 4097, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 1.5e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true - }, - "azure/gpt-3.5-turbo-instruct-0914": { - "max_tokens": 4097, - "max_input_tokens": 4097, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "azure_text", - "mode": "completion" - }, - "azure/gpt-35-turbo-instruct": { - "max_tokens": 4097, - "max_input_tokens": 4097, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "azure_text", - "mode": "completion" - }, - "azure/gpt-35-turbo-instruct-0914": { - "max_tokens": 4097, - "max_input_tokens": 4097, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "azure_text", - "mode": "completion" - }, - "azure/mistral-large-latest": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true - }, - "azure/mistral-large-2402": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true - }, - "azure/command-r-plus": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true - }, - "azure/ada": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "azure", - "mode": "embedding" - }, - "azure/text-embedding-ada-002": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "azure", - "mode": "embedding" - }, - "azure/text-embedding-3-large": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "azure", - "mode": "embedding" - }, - "azure/text-embedding-3-small": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "input_cost_per_token": 2e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "azure", - "mode": "embedding" - }, - "azure/standard/1024-x-1024/dall-e-3": { - "input_cost_per_pixel": 3.81469e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "azure", - "mode": "image_generation" - }, - "azure/hd/1024-x-1024/dall-e-3": { - "input_cost_per_pixel": 7.629e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "azure", - "mode": "image_generation" - }, - "azure/standard/1024-x-1792/dall-e-3": { - "input_cost_per_pixel": 4.359e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "azure", - "mode": "image_generation" - }, - "azure/standard/1792-x-1024/dall-e-3": { - "input_cost_per_pixel": 4.359e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "azure", - "mode": "image_generation" - }, - "azure/hd/1024-x-1792/dall-e-3": { - "input_cost_per_pixel": 6.539e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "azure", - "mode": "image_generation" - }, - "azure/hd/1792-x-1024/dall-e-3": { - "input_cost_per_pixel": 6.539e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "azure", - "mode": "image_generation" - }, - "azure/standard/1024-x-1024/dall-e-2": { - "input_cost_per_pixel": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "azure", - "mode": "image_generation" - }, - "azure_ai/jamba-instruct": { - "max_tokens": 4096, - "max_input_tokens": 70000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 7e-07, - "litellm_provider": "azure_ai", - "mode": "chat" - }, - "azure_ai/mistral-large": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 4e-06, - "output_cost_per_token": 1.2e-05, - "litellm_provider": "azure_ai", - "mode": "chat", - "supports_function_calling": true - }, - "azure_ai/mistral-small": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "azure_ai", - "supports_function_calling": true, - "mode": "chat" - }, - "azure_ai/Meta-Llama-3-70B-Instruct": { - "max_tokens": 2048, - "max_input_tokens": 8192, - "max_output_tokens": 2048, - "input_cost_per_token": 1.1e-06, - "output_cost_per_token": 3.7e-07, - "litellm_provider": "azure_ai", - "mode": "chat" - }, - "azure_ai/Meta-Llama-3.1-8B-Instruct": { - "max_tokens": 2048, - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 6.1e-07, - "litellm_provider": "azure_ai", - "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-8b-instruct-offer?tab=PlansAndPrice" - }, - "azure_ai/Meta-Llama-3.1-70B-Instruct": { - "max_tokens": 2048, - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "input_cost_per_token": 2.68e-06, - "output_cost_per_token": 3.54e-06, - "litellm_provider": "azure_ai", - "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-70b-instruct-offer?tab=PlansAndPrice" - }, - "azure_ai/Meta-Llama-3.1-405B-Instruct": { - "max_tokens": 2048, - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "input_cost_per_token": 5.33e-06, - "output_cost_per_token": 1.6e-05, - "litellm_provider": "azure_ai", - "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-405b-instruct-offer?tab=PlansAndPrice" - }, - "azure_ai/cohere-rerank-v3-multilingual": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "input_cost_per_token": 0.0, - "input_cost_per_query": 0.002, - "output_cost_per_token": 0.0, - "litellm_provider": "azure_ai", - "mode": "rerank" - }, - "azure_ai/cohere-rerank-v3-english": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "input_cost_per_token": 0.0, - "input_cost_per_query": 0.002, - "output_cost_per_token": 0.0, - "litellm_provider": "azure_ai", - "mode": "rerank" - }, - "azure_ai/Cohere-embed-v3-english": { - "max_tokens": 512, - "max_input_tokens": 512, - "output_vector_size": 1024, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "azure_ai", - "mode": "embedding", - "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/cohere.cohere-embed-v3-english-offer?tab=PlansAndPrice" - }, - "azure_ai/Cohere-embed-v3-multilingual": { - "max_tokens": 512, - "max_input_tokens": 512, - "output_vector_size": 1024, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "azure_ai", - "mode": "embedding", - "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/cohere.cohere-embed-v3-english-offer?tab=PlansAndPrice" - }, - "babbage-002": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "input_cost_per_token": 4e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "text-completion-openai", - "mode": "completion" - }, - "davinci-002": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "text-completion-openai", - "mode": "completion" - }, - "gpt-3.5-turbo-instruct": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "text-completion-openai", - "mode": "completion" - }, - "gpt-3.5-turbo-instruct-0914": { - "max_tokens": 4097, - "max_input_tokens": 8192, - "max_output_tokens": 4097, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "text-completion-openai", - "mode": "completion" - }, - "claude-instant-1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 1.63e-06, - "output_cost_per_token": 5.51e-06, - "litellm_provider": "anthropic", - "mode": "chat" - }, - "mistral/mistral-tiny": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 2.5e-07, - "litellm_provider": "mistral", - "mode": "chat", - "supports_assistant_prefill": true - }, - "mistral/mistral-small": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "mistral", - "supports_function_calling": true, - "mode": "chat", - "supports_assistant_prefill": true - }, - "mistral/mistral-small-latest": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "mistral", - "supports_function_calling": true, - "mode": "chat", - "supports_assistant_prefill": true - }, - "mistral/mistral-medium": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 2.7e-06, - "output_cost_per_token": 8.1e-06, - "litellm_provider": "mistral", - "mode": "chat", - "supports_assistant_prefill": true - }, - "mistral/mistral-medium-latest": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 2.7e-06, - "output_cost_per_token": 8.1e-06, - "litellm_provider": "mistral", - "mode": "chat", - "supports_assistant_prefill": true - }, - "mistral/mistral-medium-2312": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 2.7e-06, - "output_cost_per_token": 8.1e-06, - "litellm_provider": "mistral", - "mode": "chat", - "supports_assistant_prefill": true - }, - "mistral/mistral-large-latest": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "mistral", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true - }, - "mistral/mistral-large-2402": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 4e-06, - "output_cost_per_token": 1.2e-05, - "litellm_provider": "mistral", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true - }, - "mistral/mistral-large-2407": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 9e-06, - "litellm_provider": "mistral", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true - }, - "mistral/pixtral-12b-2409": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 1.5e-07, - "litellm_provider": "mistral", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true, - "supports_vision": true - }, - "mistral/open-mistral-7b": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 2.5e-07, - "litellm_provider": "mistral", - "mode": "chat", - "supports_assistant_prefill": true - }, - "mistral/open-mixtral-8x7b": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 7e-07, - "output_cost_per_token": 7e-07, - "litellm_provider": "mistral", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true - }, - "mistral/open-mixtral-8x22b": { - "max_tokens": 8191, - "max_input_tokens": 64000, - "max_output_tokens": 8191, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "mistral", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true - }, - "mistral/codestral-latest": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "mistral", - "mode": "chat", - "supports_assistant_prefill": true - }, - "mistral/codestral-2405": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "mistral", - "mode": "chat", - "supports_assistant_prefill": true - }, - "mistral/open-mistral-nemo": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 3e-07, - "litellm_provider": "mistral", - "mode": "chat", - "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true - }, - "mistral/open-mistral-nemo-2407": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 3e-07, - "litellm_provider": "mistral", - "mode": "chat", - "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true - }, - "mistral/open-codestral-mamba": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 2.5e-07, - "litellm_provider": "mistral", - "mode": "chat", - "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true - }, - "mistral/codestral-mamba-latest": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 2.5e-07, - "litellm_provider": "mistral", - "mode": "chat", - "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true - }, - "mistral/mistral-embed": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "input_cost_per_token": 1e-07, - "litellm_provider": "mistral", - "mode": "embedding" - }, - "deepseek-chat": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.4e-07, - "input_cost_per_token_cache_hit": 1.4e-08, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "deepseek", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true, - "supports_tool_choice": true, - "supports_prompt_caching": true - }, - "codestral/codestral-latest": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "codestral", - "mode": "chat", - "source": "https://docs.mistral.ai/capabilities/code_generation/", - "supports_assistant_prefill": true - }, - "codestral/codestral-2405": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "codestral", - "mode": "chat", - "source": "https://docs.mistral.ai/capabilities/code_generation/", - "supports_assistant_prefill": true - }, - "text-completion-codestral/codestral-latest": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "text-completion-codestral", - "mode": "completion", - "source": "https://docs.mistral.ai/capabilities/code_generation/" - }, - "text-completion-codestral/codestral-2405": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "text-completion-codestral", - "mode": "completion", - "source": "https://docs.mistral.ai/capabilities/code_generation/" - }, - "deepseek-coder": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.4e-07, - "input_cost_per_token_cache_hit": 1.4e-08, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "deepseek", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true, - "supports_tool_choice": true, - "supports_prompt_caching": true - }, - "groq/llama2-70b-4096": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 7e-07, - "output_cost_per_token": 8e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/llama3-8b-8192": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 5e-08, - "output_cost_per_token": 8e-08, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/llama3-70b-8192": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 5.9e-07, - "output_cost_per_token": 7.9e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/llama-3.1-8b-instant": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 5e-08, - "output_cost_per_token": 8e-08, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/llama-3.1-70b-versatile": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 5.9e-07, - "output_cost_per_token": 7.9e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/llama-3.1-405b-reasoning": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 5.9e-07, - "output_cost_per_token": 7.9e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/mixtral-8x7b-32768": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 2.4e-07, - "output_cost_per_token": 2.4e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/gemma-7b-it": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 7e-08, - "output_cost_per_token": 7e-08, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/gemma2-9b-it": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/llama3-groq-70b-8192-tool-use-preview": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 8.9e-07, - "output_cost_per_token": 8.9e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/llama3-groq-8b-8192-tool-use-preview": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1.9e-07, - "output_cost_per_token": 1.9e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "cerebras/llama3.1-8b": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "cerebras", - "mode": "chat", - "supports_function_calling": true - }, - "cerebras/llama3.1-70b": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "cerebras", - "mode": "chat", - "supports_function_calling": true - }, - "friendliai/mixtral-8x7b-instruct-v0-1": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 4e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "friendliai", - "mode": "chat", - "supports_function_calling": true - }, - "friendliai/meta-llama-3-8b-instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "friendliai", - "mode": "chat", - "supports_function_calling": true - }, - "friendliai/meta-llama-3-70b-instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 8e-07, - "output_cost_per_token": 8e-07, - "litellm_provider": "friendliai", - "mode": "chat", - "supports_function_calling": true - }, - "claude-instant-1.2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 1.63e-07, - "output_cost_per_token": 5.51e-07, - "litellm_provider": "anthropic", - "mode": "chat" - }, - "claude-2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "anthropic", - "mode": "chat" - }, - "claude-2.1": { - "max_tokens": 8191, - "max_input_tokens": 200000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "anthropic", - "mode": "chat" - }, - "claude-3-haiku-20240307": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 1.25e-06, - "cache_creation_input_token_cost": 3e-07, - "cache_read_input_token_cost": 3e-08, - "litellm_provider": "anthropic", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 264, - "supports_assistant_prefill": true, - "supports_prompt_caching": true, - "supports_response_schema": true - }, - "claude-3-haiku-latest": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 1.25e-06, - "cache_creation_input_token_cost": 3e-07, - "cache_read_input_token_cost": 3e-08, - "litellm_provider": "anthropic", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 264, - "supports_assistant_prefill": true, - "supports_prompt_caching": true - }, - "claude-3-opus-20240229": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 7.5e-05, - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "litellm_provider": "anthropic", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 395, - "supports_assistant_prefill": true, - "supports_prompt_caching": true, - "supports_response_schema": true - }, - "claude-3-opus-latest": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 7.5e-05, - "cache_creation_input_token_cost": 1.875e-05, - "cache_read_input_token_cost": 1.5e-06, - "litellm_provider": "anthropic", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 395, - "supports_assistant_prefill": true, - "supports_prompt_caching": true - }, - "claude-3-sonnet-20240229": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "anthropic", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159, - "supports_assistant_prefill": true, - "supports_prompt_caching": true, - "supports_response_schema": true - }, - "claude-3-5-sonnet-20240620": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "litellm_provider": "anthropic", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159, - "supports_assistant_prefill": true, - "supports_prompt_caching": true, - "supports_response_schema": true - }, - "claude-3-5-sonnet-20241022": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "litellm_provider": "anthropic", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159, - "supports_assistant_prefill": true, - "supports_pdf_input": true, - "supports_prompt_caching": true, - "supports_response_schema": true - }, - "claude-3-5-sonnet-latest": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "litellm_provider": "anthropic", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159, - "supports_assistant_prefill": true, - "supports_prompt_caching": true - }, - "text-bison": { - "max_tokens": 2048, - "max_input_tokens": 8192, - "max_output_tokens": 2048, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-bison@001": { - "max_tokens": 1024, - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-bison@002": { - "max_tokens": 1024, - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-bison32k": { - "max_tokens": 1024, - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-bison32k@002": { - "max_tokens": 1024, - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-unicorn": { - "max_tokens": 1024, - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 2.8e-05, - "litellm_provider": "vertex_ai-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-unicorn@001": { - "max_tokens": 1024, - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 2.8e-05, - "litellm_provider": "vertex_ai-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "chat-bison": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-chat-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "chat-bison@001": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-chat-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "chat-bison@002": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-chat-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "chat-bison-32k": { - "max_tokens": 8192, - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-chat-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "chat-bison-32k@002": { - "max_tokens": 8192, - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-chat-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-bison": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-code-text-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-bison@001": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-code-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-bison@002": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-code-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-bison32k": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-code-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-bison-32k@002": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-code-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-gecko@001": { - "max_tokens": 64, - "max_input_tokens": 2048, - "max_output_tokens": 64, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-gecko@002": { - "max_tokens": 64, - "max_input_tokens": 2048, - "max_output_tokens": 64, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-gecko": { - "max_tokens": 64, - "max_input_tokens": 2048, - "max_output_tokens": 64, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "code-gecko-latest": { - "max_tokens": 64, - "max_input_tokens": 2048, - "max_output_tokens": 64, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "litellm_provider": "vertex_ai-code-text-models", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "codechat-bison@latest": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "codechat-bison": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "codechat-bison@001": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "codechat-bison@002": { - "max_tokens": 1024, - "max_input_tokens": 6144, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "codechat-bison-32k": { - "max_tokens": 8192, - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "codechat-bison-32k@002": { - "max_tokens": 8192, - "max_input_tokens": 32000, - "max_output_tokens": 8192, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "input_cost_per_character": 2.5e-07, - "output_cost_per_character": 5e-07, - "litellm_provider": "vertex_ai-code-chat-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-pro": { - "max_tokens": 8192, - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "input_cost_per_image": 0.0025, - "input_cost_per_video_per_second": 0.002, - "input_cost_per_token": 5e-07, - "input_cost_per_character": 1.25e-07, - "output_cost_per_token": 1.5e-06, - "output_cost_per_character": 3.75e-07, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_function_calling": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "gemini-1.0-pro": { - "max_tokens": 8192, - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "input_cost_per_image": 0.0025, - "input_cost_per_video_per_second": 0.002, - "input_cost_per_token": 5e-07, - "input_cost_per_character": 1.25e-07, - "output_cost_per_token": 1.5e-06, - "output_cost_per_character": 3.75e-07, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_function_calling": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#google_models" - }, - "gemini-1.0-pro-001": { - "max_tokens": 8192, - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "input_cost_per_image": 0.0025, - "input_cost_per_video_per_second": 0.002, - "input_cost_per_token": 5e-07, - "input_cost_per_character": 1.25e-07, - "output_cost_per_token": 1.5e-06, - "output_cost_per_character": 3.75e-07, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_function_calling": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.0-ultra": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 2048, - "input_cost_per_image": 0.0025, - "input_cost_per_video_per_second": 0.002, - "input_cost_per_token": 5e-07, - "input_cost_per_character": 1.25e-07, - "output_cost_per_token": 1.5e-06, - "output_cost_per_character": 3.75e-07, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_function_calling": true, - "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.0-ultra-001": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 2048, - "input_cost_per_image": 0.0025, - "input_cost_per_video_per_second": 0.002, - "input_cost_per_token": 5e-07, - "input_cost_per_character": 1.25e-07, - "output_cost_per_token": 1.5e-06, - "output_cost_per_character": 3.75e-07, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_function_calling": true, - "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.0-pro-002": { - "max_tokens": 8192, - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "input_cost_per_image": 0.0025, - "input_cost_per_video_per_second": 0.002, - "input_cost_per_token": 5e-07, - "input_cost_per_character": 1.25e-07, - "output_cost_per_token": 1.5e-06, - "output_cost_per_character": 3.75e-07, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_function_calling": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.5-pro": { - "max_tokens": 8192, - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "input_cost_per_image": 0.00032875, - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_token": 1.25e-06, - "input_cost_per_character": 3.125e-07, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_token_above_128k_tokens": 2.5e-06, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "output_cost_per_token": 5e-06, - "output_cost_per_character": 1.25e-06, - "output_cost_per_token_above_128k_tokens": 1e-05, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_vision": true, - "supports_pdf_input": true, - "supports_system_messages": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.5-pro-002": { - "max_tokens": 8192, - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "input_cost_per_image": 0.00032875, - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_token": 1.25e-06, - "input_cost_per_character": 3.125e-07, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_token_above_128k_tokens": 2.5e-06, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "output_cost_per_token": 5e-06, - "output_cost_per_character": 1.25e-06, - "output_cost_per_token_above_128k_tokens": 1e-05, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_vision": true, - "supports_system_messages": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-pro" - }, - "gemini-1.5-pro-001": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_image": 0.00032875, - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_token": 1.25e-06, - "input_cost_per_character": 3.125e-07, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_token_above_128k_tokens": 2.5e-06, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "output_cost_per_token": 5e-06, - "output_cost_per_character": 1.25e-06, - "output_cost_per_token_above_128k_tokens": 1e-05, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_vision": true, - "supports_system_messages": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.5-pro-preview-0514": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_image": 0.00032875, - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_token": 7.8125e-08, - "input_cost_per_character": 3.125e-07, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_token_above_128k_tokens": 1.5625e-07, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "output_cost_per_token": 3.125e-07, - "output_cost_per_character": 1.25e-06, - "output_cost_per_token_above_128k_tokens": 6.25e-07, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.5-pro-preview-0215": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_image": 0.00032875, - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_token": 7.8125e-08, - "input_cost_per_character": 3.125e-07, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_token_above_128k_tokens": 1.5625e-07, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "output_cost_per_token": 3.125e-07, - "output_cost_per_character": 1.25e-06, - "output_cost_per_token_above_128k_tokens": 6.25e-07, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.5-pro-preview-0409": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_image": 0.00032875, - "input_cost_per_audio_per_second": 3.125e-05, - "input_cost_per_video_per_second": 0.00032875, - "input_cost_per_token": 7.8125e-08, - "input_cost_per_character": 3.125e-07, - "input_cost_per_image_above_128k_tokens": 0.0006575, - "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, - "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, - "input_cost_per_token_above_128k_tokens": 1.5625e-07, - "input_cost_per_character_above_128k_tokens": 6.25e-07, - "output_cost_per_token": 3.125e-07, - "output_cost_per_character": 1.25e-06, - "output_cost_per_token_above_128k_tokens": 6.25e-07, - "output_cost_per_character_above_128k_tokens": 2.5e-06, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.5-flash": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_image": 2e-05, - "input_cost_per_video_per_second": 2e-05, - "input_cost_per_audio_per_second": 2e-06, - "input_cost_per_token": 7.5e-08, - "input_cost_per_character": 1.875e-08, - "input_cost_per_token_above_128k_tokens": 1e-06, - "input_cost_per_character_above_128k_tokens": 2.5e-07, - "input_cost_per_image_above_128k_tokens": 4e-05, - "input_cost_per_video_per_second_above_128k_tokens": 4e-05, - "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, - "output_cost_per_token": 3e-07, - "output_cost_per_character": 7.5e-08, - "output_cost_per_token_above_128k_tokens": 6e-07, - "output_cost_per_character_above_128k_tokens": 1.5e-07, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.5-flash-exp-0827": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_image": 2e-05, - "input_cost_per_video_per_second": 2e-05, - "input_cost_per_audio_per_second": 2e-06, - "input_cost_per_token": 4.688e-09, - "input_cost_per_character": 1.875e-08, - "input_cost_per_token_above_128k_tokens": 1e-06, - "input_cost_per_character_above_128k_tokens": 2.5e-07, - "input_cost_per_image_above_128k_tokens": 4e-05, - "input_cost_per_video_per_second_above_128k_tokens": 4e-05, - "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, - "output_cost_per_token": 4.6875e-09, - "output_cost_per_character": 1.875e-08, - "output_cost_per_token_above_128k_tokens": 9.375e-09, - "output_cost_per_character_above_128k_tokens": 3.75e-08, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.5-flash-002": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_image": 2e-05, - "input_cost_per_video_per_second": 2e-05, - "input_cost_per_audio_per_second": 2e-06, - "input_cost_per_token": 7.5e-08, - "input_cost_per_character": 1.875e-08, - "input_cost_per_token_above_128k_tokens": 1e-06, - "input_cost_per_character_above_128k_tokens": 2.5e-07, - "input_cost_per_image_above_128k_tokens": 4e-05, - "input_cost_per_video_per_second_above_128k_tokens": 4e-05, - "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, - "output_cost_per_token": 3e-07, - "output_cost_per_character": 7.5e-08, - "output_cost_per_token_above_128k_tokens": 6e-07, - "output_cost_per_character_above_128k_tokens": 1.5e-07, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-flash" - }, - "gemini-1.5-flash-001": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_image": 2e-05, - "input_cost_per_video_per_second": 2e-05, - "input_cost_per_audio_per_second": 2e-06, - "input_cost_per_token": 7.5e-08, - "input_cost_per_character": 1.875e-08, - "input_cost_per_token_above_128k_tokens": 1e-06, - "input_cost_per_character_above_128k_tokens": 2.5e-07, - "input_cost_per_image_above_128k_tokens": 4e-05, - "input_cost_per_video_per_second_above_128k_tokens": 4e-05, - "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, - "output_cost_per_token": 3e-07, - "output_cost_per_character": 7.5e-08, - "output_cost_per_token_above_128k_tokens": 6e-07, - "output_cost_per_character_above_128k_tokens": 1.5e-07, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.5-flash-preview-0514": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_image": 2e-05, - "input_cost_per_video_per_second": 2e-05, - "input_cost_per_audio_per_second": 2e-06, - "input_cost_per_token": 7.5e-08, - "input_cost_per_character": 1.875e-08, - "input_cost_per_token_above_128k_tokens": 1e-06, - "input_cost_per_character_above_128k_tokens": 2.5e-07, - "input_cost_per_image_above_128k_tokens": 4e-05, - "input_cost_per_video_per_second_above_128k_tokens": 4e-05, - "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, - "output_cost_per_token": 4.6875e-09, - "output_cost_per_character": 1.875e-08, - "output_cost_per_token_above_128k_tokens": 9.375e-09, - "output_cost_per_character_above_128k_tokens": 3.75e-08, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-pro-experimental": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_token": 0, - "output_cost_per_token": 0, - "input_cost_per_character": 0, - "output_cost_per_character": 0, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_function_calling": false, - "supports_tool_choice": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental" - }, - "gemini-flash-experimental": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_token": 0, - "output_cost_per_token": 0, - "input_cost_per_character": 0, - "output_cost_per_character": 0, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_function_calling": false, - "supports_tool_choice": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental" - }, - "gemini-pro-vision": { - "max_tokens": 2048, - "max_input_tokens": 16384, - "max_output_tokens": 2048, - "max_images_per_prompt": 16, - "max_videos_per_prompt": 1, - "max_video_length": 2, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "vertex_ai-vision-models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.0-pro-vision": { - "max_tokens": 2048, - "max_input_tokens": 16384, - "max_output_tokens": 2048, - "max_images_per_prompt": 16, - "max_videos_per_prompt": 1, - "max_video_length": 2, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "vertex_ai-vision-models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini-1.0-pro-vision-001": { - "max_tokens": 2048, - "max_input_tokens": 16384, - "max_output_tokens": 2048, - "max_images_per_prompt": 16, - "max_videos_per_prompt": 1, - "max_video_length": 2, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "vertex_ai-vision-models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "medlm-medium": { - "max_tokens": 8192, - "max_input_tokens": 32768, - "max_output_tokens": 8192, - "input_cost_per_character": 5e-07, - "output_cost_per_character": 1e-06, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "medlm-large": { - "max_tokens": 1024, - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "input_cost_per_character": 5e-06, - "output_cost_per_character": 1.5e-05, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "vertex_ai/claude-3-sonnet@20240229": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "vertex_ai/claude-3-5-sonnet@20240620": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "vertex_ai/claude-3-5-sonnet-v2@20241022": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "vertex_ai/claude-3-haiku@20240307": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 1.25e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "vertex_ai/claude-3-opus@20240229": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 7.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "vertex_ai/meta/llama3-405b-instruct-maas": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "vertex_ai-llama_models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models" - }, - "vertex_ai/meta/llama3-70b-instruct-maas": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "vertex_ai-llama_models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models" - }, - "vertex_ai/meta/llama3-8b-instruct-maas": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "max_output_tokens": 32000, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "vertex_ai-llama_models", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models" - }, - "vertex_ai/meta/llama-3.2-90b-vision-instruct-maas": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "vertex_ai-llama_models", - "mode": "chat", - "supports_system_messages": true, - "supports_vision": true, - "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas" - }, - "vertex_ai/mistral-large@latest": { - "max_tokens": 8191, - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "vertex_ai-mistral_models", - "mode": "chat", - "supports_function_calling": true - }, - "vertex_ai/mistral-large@2407": { - "max_tokens": 8191, - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "vertex_ai-mistral_models", - "mode": "chat", - "supports_function_calling": true - }, - "vertex_ai/mistral-nemo@latest": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 1.5e-07, - "litellm_provider": "vertex_ai-mistral_models", - "mode": "chat", - "supports_function_calling": true - }, - "vertex_ai/jamba-1.5-mini@001": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "vertex_ai-ai21_models", - "mode": "chat" - }, - "vertex_ai/jamba-1.5-large@001": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 8e-06, - "litellm_provider": "vertex_ai-ai21_models", - "mode": "chat" - }, - "vertex_ai/jamba-1.5": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "vertex_ai-ai21_models", - "mode": "chat" - }, - "vertex_ai/jamba-1.5-mini": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "vertex_ai-ai21_models", - "mode": "chat" - }, - "vertex_ai/jamba-1.5-large": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 8e-06, - "litellm_provider": "vertex_ai-ai21_models", - "mode": "chat" - }, - "vertex_ai/mistral-nemo@2407": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "vertex_ai-mistral_models", - "mode": "chat", - "supports_function_calling": true - }, - "vertex_ai/codestral@latest": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "vertex_ai-mistral_models", - "mode": "chat", - "supports_function_calling": true - }, - "vertex_ai/codestral@2405": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "vertex_ai-mistral_models", - "mode": "chat", - "supports_function_calling": true - }, - "vertex_ai/imagegeneration@006": { - "output_cost_per_image": 0.02, - "litellm_provider": "vertex_ai-image-models", - "mode": "image_generation", - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "vertex_ai/imagen-3.0-generate-001": { - "output_cost_per_image": 0.04, - "litellm_provider": "vertex_ai-image-models", - "mode": "image_generation", - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "vertex_ai/imagen-3.0-fast-generate-001": { - "output_cost_per_image": 0.02, - "litellm_provider": "vertex_ai-image-models", - "mode": "image_generation", - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "text-embedding-004": { - "max_tokens": 2048, - "max_input_tokens": 2048, - "output_vector_size": 768, - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0, - "litellm_provider": "vertex_ai-embedding-models", - "mode": "embedding", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models" - }, - "text-multilingual-embedding-002": { - "max_tokens": 2048, - "max_input_tokens": 2048, - "output_vector_size": 768, - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0, - "litellm_provider": "vertex_ai-embedding-models", - "mode": "embedding", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models" - }, - "textembedding-gecko": { - "max_tokens": 3072, - "max_input_tokens": 3072, - "output_vector_size": 768, - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0, - "litellm_provider": "vertex_ai-embedding-models", - "mode": "embedding", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "textembedding-gecko-multilingual": { - "max_tokens": 3072, - "max_input_tokens": 3072, - "output_vector_size": 768, - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0, - "litellm_provider": "vertex_ai-embedding-models", - "mode": "embedding", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "textembedding-gecko-multilingual@001": { - "max_tokens": 3072, - "max_input_tokens": 3072, - "output_vector_size": 768, - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0, - "litellm_provider": "vertex_ai-embedding-models", - "mode": "embedding", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "textembedding-gecko@001": { - "max_tokens": 3072, - "max_input_tokens": 3072, - "output_vector_size": 768, - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0, - "litellm_provider": "vertex_ai-embedding-models", - "mode": "embedding", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "textembedding-gecko@003": { - "max_tokens": 3072, - "max_input_tokens": 3072, - "output_vector_size": 768, - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0, - "litellm_provider": "vertex_ai-embedding-models", - "mode": "embedding", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "text-embedding-preview-0409": { - "max_tokens": 3072, - "max_input_tokens": 3072, - "output_vector_size": 768, - "input_cost_per_token": 6.25e-09, - "input_cost_per_token_batch_requests": 5e-09, - "output_cost_per_token": 0, - "litellm_provider": "vertex_ai-embedding-models", - "mode": "embedding", - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" - }, - "text-multilingual-embedding-preview-0409": { - "max_tokens": 3072, - "max_input_tokens": 3072, - "output_vector_size": 768, - "input_cost_per_token": 6.25e-09, - "output_cost_per_token": 0, - "litellm_provider": "vertex_ai-embedding-models", - "mode": "embedding", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "palm/chat-bison": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "palm/chat-bison-001": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "palm/text-bison": { - "max_tokens": 1024, - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "palm/text-bison-001": { - "max_tokens": 1024, - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "palm/text-bison-safety-off": { - "max_tokens": 1024, - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "palm/text-bison-safety-recitation-off": { - "max_tokens": 1024, - "max_input_tokens": 8192, - "max_output_tokens": 1024, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 1.25e-07, - "litellm_provider": "palm", - "mode": "completion", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini/gemini-1.5-flash-002": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "cache_read_input_token_cost": 1.875e-08, - "cache_creation_input_token_cost": 1e-06, - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1.5e-07, - "output_cost_per_token": 3e-07, - "output_cost_per_token_above_128k_tokens": 6e-07, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "supports_prompt_caching": true, - "tpm": 4000000, - "rpm": 2000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-1.5-flash-001": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "cache_read_input_token_cost": 1.875e-08, - "cache_creation_input_token_cost": 1e-06, - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1.5e-07, - "output_cost_per_token": 3e-07, - "output_cost_per_token_above_128k_tokens": 6e-07, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "supports_prompt_caching": true, - "tpm": 4000000, - "rpm": 2000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-1.5-flash": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1.5e-07, - "output_cost_per_token": 3e-07, - "output_cost_per_token_above_128k_tokens": 6e-07, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "tpm": 4000000, - "rpm": 2000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-1.5-flash-latest": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_token": 7.5e-08, - "input_cost_per_token_above_128k_tokens": 1.5e-07, - "output_cost_per_token": 3e-07, - "output_cost_per_token_above_128k_tokens": 6e-07, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "supports_prompt_caching": true, - "tpm": 4000000, - "rpm": 2000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-1.5-flash-8b-exp-0924": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "supports_prompt_caching": true, - "tpm": 4000000, - "rpm": 4000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-1.5-flash-exp-0827": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "tpm": 4000000, - "rpm": 2000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-1.5-flash-8b-exp-0827": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "tpm": 4000000, - "rpm": 4000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-pro": { - "max_tokens": 8192, - "max_input_tokens": 32760, - "max_output_tokens": 8192, - "input_cost_per_token": 3.5e-07, - "input_cost_per_token_above_128k_tokens": 7e-07, - "output_cost_per_token": 1.05e-06, - "output_cost_per_token_above_128k_tokens": 2.1e-06, - "litellm_provider": "gemini", - "mode": "chat", - "supports_function_calling": true, - "rpd": 30000, - "tpm": 120000, - "rpm": 360, - "source": "https://ai.google.dev/gemini-api/docs/models/gemini" - }, - "gemini/gemini-1.5-pro": { - "max_tokens": 8192, - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "input_cost_per_token": 3.5e-06, - "input_cost_per_token_above_128k_tokens": 7e-06, - "output_cost_per_token": 1.05e-05, - "output_cost_per_token_above_128k_tokens": 2.1e-05, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "tpm": 4000000, - "rpm": 1000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-1.5-pro-002": { - "max_tokens": 8192, - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "input_cost_per_token": 3.5e-06, - "input_cost_per_token_above_128k_tokens": 7e-06, - "output_cost_per_token": 1.05e-05, - "output_cost_per_token_above_128k_tokens": 2.1e-05, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "supports_prompt_caching": true, - "tpm": 4000000, - "rpm": 1000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-1.5-pro-001": { - "max_tokens": 8192, - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "input_cost_per_token": 3.5e-06, - "input_cost_per_token_above_128k_tokens": 7e-06, - "output_cost_per_token": 1.05e-05, - "output_cost_per_token_above_128k_tokens": 2.1e-05, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "supports_prompt_caching": true, - "tpm": 4000000, - "rpm": 1000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-1.5-pro-exp-0801": { - "max_tokens": 8192, - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "input_cost_per_token": 3.5e-06, - "input_cost_per_token_above_128k_tokens": 7e-06, - "output_cost_per_token": 1.05e-05, - "output_cost_per_token_above_128k_tokens": 2.1e-05, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "tpm": 4000000, - "rpm": 1000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-1.5-pro-exp-0827": { - "max_tokens": 8192, - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "tpm": 4000000, - "rpm": 1000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-1.5-pro-latest": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "input_cost_per_token": 3.5e-06, - "input_cost_per_token_above_128k_tokens": 7e-06, - "output_cost_per_token": 1.05e-06, - "output_cost_per_token_above_128k_tokens": 2.1e-05, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_tool_choice": true, - "supports_response_schema": true, - "tpm": 4000000, - "rpm": 1000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-pro-vision": { - "max_tokens": 2048, - "max_input_tokens": 30720, - "max_output_tokens": 2048, - "input_cost_per_token": 3.5e-07, - "input_cost_per_token_above_128k_tokens": 7e-07, - "output_cost_per_token": 1.05e-06, - "output_cost_per_token_above_128k_tokens": 2.1e-06, - "litellm_provider": "gemini", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "rpd": 30000, - "tpm": 120000, - "rpm": 360, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini/gemini-gemma-2-27b-it": { - "max_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3.5e-07, - "output_cost_per_token": 1.05e-06, - "litellm_provider": "gemini", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "gemini/gemini-gemma-2-9b-it": { - "max_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3.5e-07, - "output_cost_per_token": 1.05e-06, - "litellm_provider": "gemini", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" - }, - "command-r": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "cohere_chat", - "mode": "chat", - "supports_function_calling": true - }, - "command-r-08-2024": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "cohere_chat", - "mode": "chat", - "supports_function_calling": true - }, - "command-light": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "cohere_chat", - "mode": "chat" - }, - "command-r-plus": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 2.5e-06, - "output_cost_per_token": 1e-05, - "litellm_provider": "cohere_chat", - "mode": "chat", - "supports_function_calling": true - }, - "command-r-plus-08-2024": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 2.5e-06, - "output_cost_per_token": 1e-05, - "litellm_provider": "cohere_chat", - "mode": "chat", - "supports_function_calling": true - }, - "command-nightly": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "cohere", - "mode": "completion" - }, - "command": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "cohere", - "mode": "completion" - }, - "rerank-english-v3.0": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "input_cost_per_token": 0.0, - "input_cost_per_query": 0.002, - "output_cost_per_token": 0.0, - "litellm_provider": "cohere", - "mode": "rerank" - }, - "rerank-multilingual-v3.0": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "input_cost_per_token": 0.0, - "input_cost_per_query": 0.002, - "output_cost_per_token": 0.0, - "litellm_provider": "cohere", - "mode": "rerank" - }, - "rerank-english-v2.0": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "input_cost_per_token": 0.0, - "input_cost_per_query": 0.002, - "output_cost_per_token": 0.0, - "litellm_provider": "cohere", - "mode": "rerank" - }, - "rerank-multilingual-v2.0": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "input_cost_per_token": 0.0, - "input_cost_per_query": 0.002, - "output_cost_per_token": 0.0, - "litellm_provider": "cohere", - "mode": "rerank" - }, - "embed-english-v3.0": { - "max_tokens": 1024, - "max_input_tokens": 1024, - "input_cost_per_token": 1e-07, - "input_cost_per_image": 0.0001, - "output_cost_per_token": 0.0, - "litellm_provider": "cohere", - "mode": "embedding", - "supports_image_input": true, - "supports_embedding_image_input": true, - "metadata": { - "notes": "'supports_image_input' is a deprecated field. Use 'supports_embedding_image_input' instead." - } - }, - "embed-english-light-v3.0": { - "max_tokens": 1024, - "max_input_tokens": 1024, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "cohere", - "mode": "embedding" - }, - "embed-multilingual-v3.0": { - "max_tokens": 1024, - "max_input_tokens": 1024, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "cohere", - "mode": "embedding" - }, - "embed-english-v2.0": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "cohere", - "mode": "embedding" - }, - "embed-english-light-v2.0": { - "max_tokens": 1024, - "max_input_tokens": 1024, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "cohere", - "mode": "embedding" - }, - "embed-multilingual-v2.0": { - "max_tokens": 768, - "max_input_tokens": 768, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "cohere", - "mode": "embedding" - }, - "replicate/meta/llama-2-13b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/meta/llama-2-13b-chat": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/meta/llama-2-70b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 6.5e-07, - "output_cost_per_token": 2.75e-06, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/meta/llama-2-70b-chat": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 6.5e-07, - "output_cost_per_token": 2.75e-06, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/meta/llama-2-7b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-08, - "output_cost_per_token": 2.5e-07, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/meta/llama-2-7b-chat": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-08, - "output_cost_per_token": 2.5e-07, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/meta/llama-3-70b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 6.5e-07, - "output_cost_per_token": 2.75e-06, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/meta/llama-3-70b-instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 6.5e-07, - "output_cost_per_token": 2.75e-06, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/meta/llama-3-8b": { - "max_tokens": 8086, - "max_input_tokens": 8086, - "max_output_tokens": 8086, - "input_cost_per_token": 5e-08, - "output_cost_per_token": 2.5e-07, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/meta/llama-3-8b-instruct": { - "max_tokens": 8086, - "max_input_tokens": 8086, - "max_output_tokens": 8086, - "input_cost_per_token": 5e-08, - "output_cost_per_token": 2.5e-07, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/mistralai/mistral-7b-v0.1": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-08, - "output_cost_per_token": 2.5e-07, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/mistralai/mistral-7b-instruct-v0.2": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-08, - "output_cost_per_token": 2.5e-07, - "litellm_provider": "replicate", - "mode": "chat" - }, - "replicate/mistralai/mixtral-8x7b-instruct-v0.1": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 1e-06, - "litellm_provider": "replicate", - "mode": "chat" - }, - "openrouter/deepseek/deepseek-coder": { - "max_tokens": 8192, - "max_input_tokens": 66000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.4e-07, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "openrouter", - "supports_prompt_caching": true, - "mode": "chat" - }, - "openrouter/microsoft/wizardlm-2-8x22b:nitro": { - "max_tokens": 65536, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 1e-06, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/google/gemini-pro-1.5": { - "max_tokens": 8192, - "max_input_tokens": 1000000, - "max_output_tokens": 8192, - "input_cost_per_token": 2.5e-06, - "output_cost_per_token": 7.5e-06, - "input_cost_per_image": 0.00265, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "openrouter/mistralai/mixtral-8x22b-instruct": { - "max_tokens": 65536, - "input_cost_per_token": 6.5e-07, - "output_cost_per_token": 6.5e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/cohere/command-r-plus": { - "max_tokens": 128000, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/databricks/dbrx-instruct": { - "max_tokens": 32768, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/anthropic/claude-3-haiku": { - "max_tokens": 200000, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 1.25e-06, - "input_cost_per_image": 0.0004, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "openrouter/anthropic/claude-3-haiku-20240307": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 1.25e-06, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 264 - }, - "anthropic/claude-3-5-sonnet-20241022": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "litellm_provider": "anthropic", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159, - "supports_assistant_prefill": true, - "supports_prompt_caching": true - }, - "anthropic/claude-3-5-sonnet-latest": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "cache_creation_input_token_cost": 3.75e-06, - "cache_read_input_token_cost": 3e-07, - "litellm_provider": "anthropic", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159, - "supports_assistant_prefill": true, - "supports_prompt_caching": true - }, - "openrouter/anthropic/claude-3.5-sonnet": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159, - "supports_assistant_prefill": true - }, - "openrouter/anthropic/claude-3.5-sonnet:beta": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 159 - }, - "openrouter/anthropic/claude-3-sonnet": { - "max_tokens": 200000, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "input_cost_per_image": 0.0048, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "openrouter/mistralai/mistral-large": { - "max_tokens": 32000, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/cognitivecomputations/dolphin-mixtral-8x7b": { - "max_tokens": 32769, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/google/gemini-pro-vision": { - "max_tokens": 45875, - "input_cost_per_token": 1.25e-07, - "output_cost_per_token": 3.75e-07, - "input_cost_per_image": 0.0025, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "openrouter/fireworks/firellava-13b": { - "max_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/meta-llama/llama-3-8b-instruct:free": { - "max_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/meta-llama/llama-3-8b-instruct:extended": { - "max_tokens": 16384, - "input_cost_per_token": 2.25e-07, - "output_cost_per_token": 2.25e-06, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/meta-llama/llama-3-70b-instruct:nitro": { - "max_tokens": 8192, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/meta-llama/llama-3-70b-instruct": { - "max_tokens": 8192, - "input_cost_per_token": 5.9e-07, - "output_cost_per_token": 7.9e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/openai/o1-mini": { - "max_tokens": 65536, - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.2e-05, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "openrouter/openai/o1-mini-2024-09-12": { - "max_tokens": 65536, - "max_input_tokens": 128000, - "max_output_tokens": 65536, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.2e-05, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "openrouter/openai/o1-preview": { - "max_tokens": 32768, - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 6e-05, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "openrouter/openai/o1-preview-2024-09-12": { - "max_tokens": 32768, - "max_input_tokens": 128000, - "max_output_tokens": 32768, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 6e-05, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": false - }, - "openrouter/openai/gpt-4o": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true - }, - "openrouter/openai/gpt-4o-2024-05-13": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true - }, - "openrouter/openai/gpt-4-vision-preview": { - "max_tokens": 130000, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 3e-05, - "input_cost_per_image": 0.01445, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "openrouter/openai/gpt-3.5-turbo": { - "max_tokens": 4095, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/openai/gpt-3.5-turbo-16k": { - "max_tokens": 16383, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 4e-06, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/openai/gpt-4": { - "max_tokens": 8192, - "input_cost_per_token": 3e-05, - "output_cost_per_token": 6e-05, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/anthropic/claude-instant-v1": { - "max_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 1.63e-06, - "output_cost_per_token": 5.51e-06, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/anthropic/claude-2": { - "max_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 1.102e-05, - "output_cost_per_token": 3.268e-05, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/anthropic/claude-3-opus": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 7.5e-05, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "tool_use_system_prompt_tokens": 395 - }, - "openrouter/google/palm-2-chat-bison": { - "max_tokens": 25804, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/google/palm-2-codechat-bison": { - "max_tokens": 20070, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/meta-llama/llama-2-13b-chat": { - "max_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/meta-llama/llama-2-70b-chat": { - "max_tokens": 4096, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 1.5e-06, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/meta-llama/codellama-34b-instruct": { - "max_tokens": 8192, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/nousresearch/nous-hermes-llama2-13b": { - "max_tokens": 4096, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/mancer/weaver": { - "max_tokens": 8000, - "input_cost_per_token": 5.625e-06, - "output_cost_per_token": 5.625e-06, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/gryphe/mythomax-l2-13b": { - "max_tokens": 8192, - "input_cost_per_token": 1.875e-06, - "output_cost_per_token": 1.875e-06, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/jondurbin/airoboros-l2-70b-2.1": { - "max_tokens": 4096, - "input_cost_per_token": 1.3875e-05, - "output_cost_per_token": 1.3875e-05, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/undi95/remm-slerp-l2-13b": { - "max_tokens": 6144, - "input_cost_per_token": 1.875e-06, - "output_cost_per_token": 1.875e-06, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/pygmalionai/mythalion-13b": { - "max_tokens": 4096, - "input_cost_per_token": 1.875e-06, - "output_cost_per_token": 1.875e-06, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/mistralai/mistral-7b-instruct": { - "max_tokens": 8192, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 1.3e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "openrouter/mistralai/mistral-7b-instruct:free": { - "max_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "j2-ultra": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "ai21", - "mode": "completion" - }, - "jamba-1.5-mini@001": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "ai21", - "mode": "chat" - }, - "jamba-1.5-large@001": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 8e-06, - "litellm_provider": "ai21", - "mode": "chat" - }, - "jamba-1.5": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "ai21", - "mode": "chat" - }, - "jamba-1.5-mini": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "ai21", - "mode": "chat" - }, - "jamba-1.5-large": { - "max_tokens": 256000, - "max_input_tokens": 256000, - "max_output_tokens": 256000, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 8e-06, - "litellm_provider": "ai21", - "mode": "chat" - }, - "j2-mid": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-05, - "output_cost_per_token": 1e-05, - "litellm_provider": "ai21", - "mode": "completion" - }, - "j2-light": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "ai21", - "mode": "completion" - }, - "dolphin": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "nlp_cloud", - "mode": "completion" - }, - "chatdolphin": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "nlp_cloud", - "mode": "chat" - }, - "luminous-base": { - "max_tokens": 2048, - "input_cost_per_token": 3e-05, - "output_cost_per_token": 3.3e-05, - "litellm_provider": "aleph_alpha", - "mode": "completion" - }, - "luminous-base-control": { - "max_tokens": 2048, - "input_cost_per_token": 3.75e-05, - "output_cost_per_token": 4.125e-05, - "litellm_provider": "aleph_alpha", - "mode": "chat" - }, - "luminous-extended": { - "max_tokens": 2048, - "input_cost_per_token": 4.5e-05, - "output_cost_per_token": 4.95e-05, - "litellm_provider": "aleph_alpha", - "mode": "completion" - }, - "luminous-extended-control": { - "max_tokens": 2048, - "input_cost_per_token": 5.625e-05, - "output_cost_per_token": 6.1875e-05, - "litellm_provider": "aleph_alpha", - "mode": "chat" - }, - "luminous-supreme": { - "max_tokens": 2048, - "input_cost_per_token": 0.000175, - "output_cost_per_token": 0.0001925, - "litellm_provider": "aleph_alpha", - "mode": "completion" - }, - "luminous-supreme-control": { - "max_tokens": 2048, - "input_cost_per_token": 0.00021875, - "output_cost_per_token": 0.000240625, - "litellm_provider": "aleph_alpha", - "mode": "chat" - }, - "ai21.j2-mid-v1": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "max_output_tokens": 8191, - "input_cost_per_token": 1.25e-05, - "output_cost_per_token": 1.25e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "ai21.j2-ultra-v1": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "max_output_tokens": 8191, - "input_cost_per_token": 1.88e-05, - "output_cost_per_token": 1.88e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "ai21.jamba-instruct-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 70000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 7e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_system_messages": true - }, - "amazon.titan-text-lite-v1": { - "max_tokens": 4000, - "max_input_tokens": 42000, - "max_output_tokens": 4000, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 4e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "amazon.titan-text-express-v1": { - "max_tokens": 8000, - "max_input_tokens": 42000, - "max_output_tokens": 8000, - "input_cost_per_token": 1.3e-06, - "output_cost_per_token": 1.7e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "amazon.titan-text-premier-v1:0": { - "max_tokens": 32000, - "max_input_tokens": 42000, - "max_output_tokens": 32000, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 1.5e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "amazon.titan-embed-text-v1": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "output_vector_size": 1536, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "bedrock", - "mode": "embedding" - }, - "amazon.titan-embed-text-v2:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "output_vector_size": 1024, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "bedrock", - "mode": "embedding" - }, - "mistral.mistral-7b-instruct-v0:2": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "mistral.mixtral-8x7b-instruct-v0:1": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 4.5e-07, - "output_cost_per_token": 7e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "mistral.mistral-large-2402-v1:0": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true - }, - "mistral.mistral-large-2407-v1:0": { - "max_tokens": 8191, - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 9e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true - }, - "mistral.mistral-small-2402-v1:0": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true - }, - "bedrock/us-west-2/mistral.mixtral-8x7b-instruct-v0:1": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 4.5e-07, - "output_cost_per_token": 7e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/mistral.mixtral-8x7b-instruct-v0:1": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 4.5e-07, - "output_cost_per_token": 7e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-west-3/mistral.mixtral-8x7b-instruct-v0:1": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 5.9e-07, - "output_cost_per_token": 9.1e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/mistral.mistral-7b-instruct-v0:2": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/mistral.mistral-7b-instruct-v0:2": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-west-3/mistral.mistral-7b-instruct-v0:2": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2.6e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/mistral.mistral-large-2402-v1:0": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/mistral.mistral-large-2402-v1:0": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true - }, - "bedrock/eu-west-3/mistral.mistral-large-2402-v1:0": { - "max_tokens": 8191, - "max_input_tokens": 32000, - "max_output_tokens": 8191, - "input_cost_per_token": 1.04e-05, - "output_cost_per_token": 3.12e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true - }, - "anthropic.claude-3-sonnet-20240229-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "anthropic.claude-3-5-sonnet-20240620-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "anthropic.claude-3-5-sonnet-20241022-v2:0": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true, - "supports_prompt_caching": true - }, - "anthropic.claude-3-5-sonnet-latest-v2:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "anthropic.claude-3-haiku-20240307-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 1.25e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "anthropic.claude-3-opus-20240229-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 7.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "us.anthropic.claude-3-sonnet-20240229-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "us.anthropic.claude-3-5-sonnet-20240620-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "us.anthropic.claude-3-5-sonnet-20241022-v2:0": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "us.anthropic.claude-3-haiku-20240307-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 1.25e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "us.anthropic.claude-3-opus-20240229-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 7.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "eu.anthropic.claude-3-sonnet-20240229-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "eu.anthropic.claude-3-5-sonnet-20240620-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "eu.anthropic.claude-3-5-sonnet-20241022-v2:0": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "eu.anthropic.claude-3-haiku-20240307-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 1.25e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "eu.anthropic.claude-3-opus-20240229-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 7.5e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0455, - "output_cost_per_second": 0.0455, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.02527, - "output_cost_per_second": 0.02527, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0415, - "output_cost_per_second": 0.0415, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.02305, - "output_cost_per_second": 0.02305, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/1-month-commitment/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0175, - "output_cost_per_second": 0.0175, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/6-month-commitment/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.00972, - "output_cost_per_second": 0.00972, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/1-month-commitment/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0175, - "output_cost_per_second": 0.0175, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/6-month-commitment/anthropic.claude-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.00972, - "output_cost_per_second": 0.00972, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0455, - "output_cost_per_second": 0.0455, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.02527, - "output_cost_per_second": 0.02527, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0415, - "output_cost_per_second": 0.0415, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.02305, - "output_cost_per_second": 0.02305, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/1-month-commitment/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0175, - "output_cost_per_second": 0.0175, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/6-month-commitment/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.00972, - "output_cost_per_second": 0.00972, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/1-month-commitment/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0175, - "output_cost_per_second": 0.0175, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/6-month-commitment/anthropic.claude-v2": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.00972, - "output_cost_per_second": 0.00972, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0455, - "output_cost_per_second": 0.0455, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.02527, - "output_cost_per_second": 0.02527, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-06, - "output_cost_per_token": 2.4e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0415, - "output_cost_per_second": 0.0415, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.02305, - "output_cost_per_second": 0.02305, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/1-month-commitment/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0175, - "output_cost_per_second": 0.0175, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/6-month-commitment/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.00972, - "output_cost_per_second": 0.00972, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/1-month-commitment/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.0175, - "output_cost_per_second": 0.0175, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/6-month-commitment/anthropic.claude-v2:1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.00972, - "output_cost_per_second": 0.00972, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 1.63e-06, - "output_cost_per_token": 5.51e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-07, - "output_cost_per_token": 2.4e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/1-month-commitment/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.011, - "output_cost_per_second": 0.011, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/6-month-commitment/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.00611, - "output_cost_per_second": 0.00611, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/1-month-commitment/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.011, - "output_cost_per_second": 0.011, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/6-month-commitment/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.00611, - "output_cost_per_second": 0.00611, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-2/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 8e-07, - "output_cost_per_token": 2.4e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 2.23e-06, - "output_cost_per_token": 7.55e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.01475, - "output_cost_per_second": 0.01475, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.008194, - "output_cost_per_second": 0.008194, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_token": 2.48e-06, - "output_cost_per_token": 8.38e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/1-month-commitment/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.01635, - "output_cost_per_second": 0.01635, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-central-1/6-month-commitment/anthropic.claude-instant-v1": { - "max_tokens": 8191, - "max_input_tokens": 100000, - "max_output_tokens": 8191, - "input_cost_per_second": 0.009083, - "output_cost_per_second": 0.009083, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "cohere.command-text-v14": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/*/1-month-commitment/cohere.command-text-v14": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_second": 0.011, - "output_cost_per_second": 0.011, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/*/6-month-commitment/cohere.command-text-v14": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_second": 0.0066027, - "output_cost_per_second": 0.0066027, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "cohere.command-light-text-v14": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/*/1-month-commitment/cohere.command-light-text-v14": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_second": 0.001902, - "output_cost_per_second": 0.001902, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/*/6-month-commitment/cohere.command-light-text-v14": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_second": 0.0011416, - "output_cost_per_second": 0.0011416, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "cohere.command-r-plus-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "cohere.command-r-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 1.5e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "cohere.embed-english-v3": { - "max_tokens": 512, - "max_input_tokens": 512, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "bedrock", - "mode": "embedding" - }, - "cohere.embed-multilingual-v3": { - "max_tokens": 512, - "max_input_tokens": 512, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "bedrock", - "mode": "embedding" - }, - "meta.llama2-13b-chat-v1": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 7.5e-07, - "output_cost_per_token": 1e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "meta.llama2-70b-chat-v1": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.95e-06, - "output_cost_per_token": 2.56e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "meta.llama3-8b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/meta.llama3-8b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-1/meta.llama3-8b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-south-1/meta.llama3-8b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3.6e-07, - "output_cost_per_token": 7.2e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ca-central-1/meta.llama3-8b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3.5e-07, - "output_cost_per_token": 6.9e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-west-1/meta.llama3-8b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3.2e-07, - "output_cost_per_token": 6.5e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-west-2/meta.llama3-8b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3.9e-07, - "output_cost_per_token": 7.8e-07, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/sa-east-1/meta.llama3-8b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 5e-07, - "output_cost_per_token": 1.01e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "meta.llama3-70b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2.65e-06, - "output_cost_per_token": 3.5e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-east-1/meta.llama3-70b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2.65e-06, - "output_cost_per_token": 3.5e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/us-west-1/meta.llama3-70b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2.65e-06, - "output_cost_per_token": 3.5e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ap-south-1/meta.llama3-70b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3.18e-06, - "output_cost_per_token": 4.2e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/ca-central-1/meta.llama3-70b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3.05e-06, - "output_cost_per_token": 4.03e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-west-1/meta.llama3-70b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 2.86e-06, - "output_cost_per_token": 3.78e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/eu-west-2/meta.llama3-70b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 3.45e-06, - "output_cost_per_token": 4.55e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "bedrock/sa-east-1/meta.llama3-70b-instruct-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 4.45e-06, - "output_cost_per_token": 5.88e-06, - "litellm_provider": "bedrock", - "mode": "chat" - }, - "meta.llama3-1-8b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "input_cost_per_token": 2.2e-07, - "output_cost_per_token": 2.2e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-1-70b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "input_cost_per_token": 9.9e-07, - "output_cost_per_token": 9.9e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-1-405b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5.32e-06, - "output_cost_per_token": 1.6e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-2-1b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama3-2-1b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "eu.meta.llama3-2-1b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 1.3e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-2-3b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama3-2-3b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 1.5e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "eu.meta.llama3-2-3b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.9e-07, - "output_cost_per_token": 1.9e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-2-11b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 3.5e-07, - "output_cost_per_token": 3.5e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama3-2-11b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 3.5e-07, - "output_cost_per_token": 3.5e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "meta.llama3-2-90b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama3-2-90b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 2e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "512-x-512/50-steps/stability.stable-diffusion-xl-v0": { - "max_tokens": 77, - "max_input_tokens": 77, - "output_cost_per_image": 0.018, - "litellm_provider": "bedrock", - "mode": "image_generation" - }, - "512-x-512/max-steps/stability.stable-diffusion-xl-v0": { - "max_tokens": 77, - "max_input_tokens": 77, - "output_cost_per_image": 0.036, - "litellm_provider": "bedrock", - "mode": "image_generation" - }, - "max-x-max/50-steps/stability.stable-diffusion-xl-v0": { - "max_tokens": 77, - "max_input_tokens": 77, - "output_cost_per_image": 0.036, - "litellm_provider": "bedrock", - "mode": "image_generation" - }, - "max-x-max/max-steps/stability.stable-diffusion-xl-v0": { - "max_tokens": 77, - "max_input_tokens": 77, - "output_cost_per_image": 0.072, - "litellm_provider": "bedrock", - "mode": "image_generation" - }, - "1024-x-1024/50-steps/stability.stable-diffusion-xl-v1": { - "max_tokens": 77, - "max_input_tokens": 77, - "output_cost_per_image": 0.04, - "litellm_provider": "bedrock", - "mode": "image_generation" - }, - "1024-x-1024/max-steps/stability.stable-diffusion-xl-v1": { - "max_tokens": 77, - "max_input_tokens": 77, - "output_cost_per_image": 0.08, - "litellm_provider": "bedrock", - "mode": "image_generation" - }, - "sagemaker/meta-textgeneration-llama-2-7b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "mode": "completion" - }, - "sagemaker/meta-textgeneration-llama-2-7b-f": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "mode": "chat" - }, - "sagemaker/meta-textgeneration-llama-2-13b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "mode": "completion" - }, - "sagemaker/meta-textgeneration-llama-2-13b-f": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "mode": "chat" - }, - "sagemaker/meta-textgeneration-llama-2-70b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "mode": "completion" - }, - "sagemaker/meta-textgeneration-llama-2-70b-b-f": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "sagemaker", - "mode": "chat" - }, - "together-ai-up-to-4b": { - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "together_ai", - "mode": "chat" - }, - "together-ai-4.1b-8b": { - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "together_ai", - "mode": "chat" - }, - "together-ai-8.1b-21b": { - "max_tokens": 1000, - "input_cost_per_token": 3e-07, - "output_cost_per_token": 3e-07, - "litellm_provider": "together_ai", - "mode": "chat" - }, - "together-ai-21.1b-41b": { - "input_cost_per_token": 8e-07, - "output_cost_per_token": 8e-07, - "litellm_provider": "together_ai", - "mode": "chat" - }, - "together-ai-41.1b-80b": { - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "together_ai", - "mode": "chat" - }, - "together-ai-81.1b-110b": { - "input_cost_per_token": 1.8e-06, - "output_cost_per_token": 1.8e-06, - "litellm_provider": "together_ai", - "mode": "chat" - }, - "together-ai-embedding-up-to-150m": { - "input_cost_per_token": 8e-09, - "output_cost_per_token": 0.0, - "litellm_provider": "together_ai", - "mode": "embedding" - }, - "together-ai-embedding-151m-to-350m": { - "input_cost_per_token": 1.6e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "together_ai", - "mode": "embedding" - }, - "together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1": { - "input_cost_per_token": 6e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "together_ai", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "mode": "chat" - }, - "together_ai/mistralai/Mistral-7B-Instruct-v0.1": { - "litellm_provider": "together_ai", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "mode": "chat" - }, - "together_ai/togethercomputer/CodeLlama-34b-Instruct": { - "litellm_provider": "together_ai", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "mode": "chat" - }, - "ollama/codegemma": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "completion" - }, - "ollama/codegeex4": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat", - "supports_function_calling": false - }, - "ollama/deepseek-coder-v2-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat", - "supports_function_calling": true - }, - "ollama/deepseek-coder-v2-base": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "completion", - "supports_function_calling": true - }, - "ollama/deepseek-coder-v2-lite-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat", - "supports_function_calling": true - }, - "ollama/deepseek-coder-v2-lite-base": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "completion", - "supports_function_calling": true - }, - "ollama/internlm2_5-20b-chat": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat", - "supports_function_calling": true - }, - "ollama/llama2": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/llama2:7b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/llama2:13b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/llama2:70b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/llama2-uncensored": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "completion" - }, - "ollama/llama3": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/llama3:8b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/llama3:70b": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/llama3.1": { - "max_tokens": 32768, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat", - "supports_function_calling": true - }, - "ollama/mistral-large-instruct-2407": { - "max_tokens": 65536, - "max_input_tokens": 65536, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/mistral": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "completion" - }, - "ollama/mistral-7B-Instruct-v0.1": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/mistral-7B-Instruct-v0.2": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/mixtral-8x7B-Instruct-v0.1": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/mixtral-8x22B-Instruct-v0.1": { - "max_tokens": 65536, - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "chat" - }, - "ollama/codellama": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "completion" - }, - "ollama/orca-mini": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "completion" - }, - "ollama/vicuna": { - "max_tokens": 2048, - "max_input_tokens": 2048, - "max_output_tokens": 2048, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "ollama", - "mode": "completion" - }, - "deepinfra/lizpreciatior/lzlv_70b_fp16_hf": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 7e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/Gryphe/MythoMax-L2-13b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2.2e-07, - "output_cost_per_token": 2.2e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/mistralai/Mistral-7B-Instruct-v0.1": { - "max_tokens": 8191, - "max_input_tokens": 32768, - "max_output_tokens": 8191, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 1.3e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/meta-llama/Llama-2-70b-chat-hf": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 7e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/cognitivecomputations/dolphin-2.6-mixtral-8x7b": { - "max_tokens": 8191, - "max_input_tokens": 32768, - "max_output_tokens": 8191, - "input_cost_per_token": 2.7e-07, - "output_cost_per_token": 2.7e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/codellama/CodeLlama-34b-Instruct-hf": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/deepinfra/mixtral": { - "max_tokens": 4096, - "max_input_tokens": 32000, - "max_output_tokens": 4096, - "input_cost_per_token": 2.7e-07, - "output_cost_per_token": 2.7e-07, - "litellm_provider": "deepinfra", - "mode": "completion" - }, - "deepinfra/Phind/Phind-CodeLlama-34B-v2": { - "max_tokens": 4096, - "max_input_tokens": 16384, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/mistralai/Mixtral-8x7B-Instruct-v0.1": { - "max_tokens": 8191, - "max_input_tokens": 32768, - "max_output_tokens": 8191, - "input_cost_per_token": 2.7e-07, - "output_cost_per_token": 2.7e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/deepinfra/airoboros-70b": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 7e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/01-ai/Yi-34B-Chat": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/01-ai/Yi-6B-200K": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 1.3e-07, - "litellm_provider": "deepinfra", - "mode": "completion" - }, - "deepinfra/jondurbin/airoboros-l2-70b-gpt4-1.4.1": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 7e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/meta-llama/Llama-2-13b-chat-hf": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2.2e-07, - "output_cost_per_token": 2.2e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/amazon/MistralLite": { - "max_tokens": 8191, - "max_input_tokens": 32768, - "max_output_tokens": 8191, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/meta-llama/Llama-2-7b-chat-hf": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 1.3e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/meta-llama/Meta-Llama-3-8B-Instruct": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "max_output_tokens": 4096, - "input_cost_per_token": 8e-08, - "output_cost_per_token": 8e-08, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/meta-llama/Meta-Llama-3-70B-Instruct": { - "max_tokens": 8191, - "max_input_tokens": 8191, - "max_output_tokens": 4096, - "input_cost_per_token": 5.9e-07, - "output_cost_per_token": 7.9e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "deepinfra/01-ai/Yi-34B-200K": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "deepinfra", - "mode": "completion" - }, - "deepinfra/openchat/openchat_3.5": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 1.3e-07, - "litellm_provider": "deepinfra", - "mode": "chat" - }, - "perplexity/codellama-34b-instruct": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 3.5e-07, - "output_cost_per_token": 1.4e-06, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/codellama-70b-instruct": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 7e-07, - "output_cost_per_token": 2.8e-06, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/llama-3.1-70b-instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 1e-06, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/llama-3.1-8b-instruct": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/llama-3.1-sonar-huge-128k-online": { - "max_tokens": 127072, - "max_input_tokens": 127072, - "max_output_tokens": 127072, - "input_cost_per_token": 5e-06, - "output_cost_per_token": 5e-06, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/llama-3.1-sonar-large-128k-online": { - "max_tokens": 127072, - "max_input_tokens": 127072, - "max_output_tokens": 127072, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 1e-06, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/llama-3.1-sonar-large-128k-chat": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 1e-06, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/llama-3.1-sonar-small-128k-chat": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/llama-3.1-sonar-small-128k-online": { - "max_tokens": 127072, - "max_input_tokens": 127072, - "max_output_tokens": 127072, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/pplx-7b-chat": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 7e-08, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/pplx-70b-chat": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 7e-07, - "output_cost_per_token": 2.8e-06, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/pplx-7b-online": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 2.8e-07, - "input_cost_per_request": 0.005, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/pplx-70b-online": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 0.0, - "output_cost_per_token": 2.8e-06, - "input_cost_per_request": 0.005, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/llama-2-70b-chat": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 7e-07, - "output_cost_per_token": 2.8e-06, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/mistral-7b-instruct": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 7e-08, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/mixtral-8x7b-instruct": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 7e-08, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/sonar-small-chat": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 7e-08, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/sonar-small-online": { - "max_tokens": 12000, - "max_input_tokens": 12000, - "max_output_tokens": 12000, - "input_cost_per_token": 0, - "output_cost_per_token": 2.8e-07, - "input_cost_per_request": 0.005, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/sonar-medium-chat": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 1.8e-06, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "perplexity/sonar-medium-online": { - "max_tokens": 12000, - "max_input_tokens": 12000, - "max_output_tokens": 12000, - "input_cost_per_token": 0, - "output_cost_per_token": 1.8e-06, - "input_cost_per_request": 0.005, - "litellm_provider": "perplexity", - "mode": "chat" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p2-1b-instruct": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p2-3b-instruct": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/accounts/fireworks/models/llama-v3p2-11b-vision-instruct": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" - }, - "accounts/fireworks/models/llama-v3p2-90b-vision-instruct": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/accounts/fireworks/models/firefunction-v2": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct-hf": { - "max_tokens": 65536, - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/accounts/fireworks/models/qwen2-72b-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/accounts/fireworks/models/yi-large": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-instruct": { - "max_tokens": 65536, - "max_input_tokens": 65536, - "max_output_tokens": 8192, - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/nomic-ai/nomic-embed-text-v1.5": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "input_cost_per_token": 8e-09, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai-embedding-models", - "mode": "embedding", - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/nomic-ai/nomic-embed-text-v1": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "input_cost_per_token": 8e-09, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai-embedding-models", - "mode": "embedding", - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/WhereIsAI/UAE-Large-V1": { - "max_tokens": 512, - "max_input_tokens": 512, - "input_cost_per_token": 1.6e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai-embedding-models", - "mode": "embedding", - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/thenlper/gte-large": { - "max_tokens": 512, - "max_input_tokens": 512, - "input_cost_per_token": 1.6e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai-embedding-models", - "mode": "embedding", - "source": "https://fireworks.ai/pricing" - }, - "fireworks_ai/thenlper/gte-base": { - "max_tokens": 512, - "max_input_tokens": 512, - "input_cost_per_token": 8e-09, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai-embedding-models", - "mode": "embedding", - "source": "https://fireworks.ai/pricing" - }, - "fireworks-ai-up-to-16b": { - "input_cost_per_token": 2e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "fireworks_ai" - }, - "fireworks-ai-16.1b-to-80b": { - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai" - }, - "fireworks-ai-moe-up-to-56b": { - "input_cost_per_token": 5e-07, - "output_cost_per_token": 5e-07, - "litellm_provider": "fireworks_ai" - }, - "fireworks-ai-56b-to-176b": { - "input_cost_per_token": 1.2e-06, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "fireworks_ai" - }, - "fireworks-ai-default": { - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai" - }, - "fireworks-ai-embedding-up-to-150m": { - "input_cost_per_token": 8e-09, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai-embedding-models" - }, - "fireworks-ai-embedding-150m-to-350m": { - "input_cost_per_token": 1.6e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "fireworks_ai-embedding-models" - }, - "anyscale/mistralai/Mistral-7B-Instruct-v0.1": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "mode": "chat", - "supports_function_calling": true, - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mistral-7B-Instruct-v0.1" - }, - "anyscale/mistralai/Mixtral-8x7B-Instruct-v0.1": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "mode": "chat", - "supports_function_calling": true, - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x7B-Instruct-v0.1" - }, - "anyscale/mistralai/Mixtral-8x22B-Instruct-v0.1": { - "max_tokens": 65536, - "max_input_tokens": 65536, - "max_output_tokens": 65536, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "anyscale", - "mode": "chat", - "supports_function_calling": true, - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x22B-Instruct-v0.1" - }, - "anyscale/HuggingFaceH4/zephyr-7b-beta": { - "max_tokens": 16384, - "max_input_tokens": 16384, - "max_output_tokens": 16384, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "mode": "chat" - }, - "anyscale/google/gemma-7b-it": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "mode": "chat", - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/google-gemma-7b-it" - }, - "anyscale/meta-llama/Llama-2-7b-chat-hf": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "mode": "chat" - }, - "anyscale/meta-llama/Llama-2-13b-chat-hf": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 2.5e-07, - "litellm_provider": "anyscale", - "mode": "chat" - }, - "anyscale/meta-llama/Llama-2-70b-chat-hf": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 1e-06, - "litellm_provider": "anyscale", - "mode": "chat" - }, - "anyscale/codellama/CodeLlama-34b-Instruct-hf": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 1e-06, - "litellm_provider": "anyscale", - "mode": "chat" - }, - "anyscale/codellama/CodeLlama-70b-Instruct-hf": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 1e-06, - "litellm_provider": "anyscale", - "mode": "chat", - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/codellama-CodeLlama-70b-Instruct-hf" - }, - "anyscale/meta-llama/Meta-Llama-3-8B-Instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 1.5e-07, - "litellm_provider": "anyscale", - "mode": "chat", - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-8B-Instruct" - }, - "anyscale/meta-llama/Meta-Llama-3-70B-Instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 1e-06, - "litellm_provider": "anyscale", - "mode": "chat", - "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-70B-Instruct" - }, - "cloudflare/@cf/meta/llama-2-7b-chat-fp16": { - "max_tokens": 3072, - "max_input_tokens": 3072, - "max_output_tokens": 3072, - "input_cost_per_token": 1.923e-06, - "output_cost_per_token": 1.923e-06, - "litellm_provider": "cloudflare", - "mode": "chat" - }, - "cloudflare/@cf/meta/llama-2-7b-chat-int8": { - "max_tokens": 2048, - "max_input_tokens": 2048, - "max_output_tokens": 2048, - "input_cost_per_token": 1.923e-06, - "output_cost_per_token": 1.923e-06, - "litellm_provider": "cloudflare", - "mode": "chat" - }, - "cloudflare/@cf/mistral/mistral-7b-instruct-v0.1": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1.923e-06, - "output_cost_per_token": 1.923e-06, - "litellm_provider": "cloudflare", - "mode": "chat" - }, - "cloudflare/@hf/thebloke/codellama-7b-instruct-awq": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.923e-06, - "output_cost_per_token": 1.923e-06, - "litellm_provider": "cloudflare", - "mode": "chat" - }, - "voyage/voyage-01": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/voyage-lite-01": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/voyage-large-2": { - "max_tokens": 16000, - "max_input_tokens": 16000, - "input_cost_per_token": 1.2e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/voyage-law-2": { - "max_tokens": 16000, - "max_input_tokens": 16000, - "input_cost_per_token": 1.2e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/voyage-code-2": { - "max_tokens": 16000, - "max_input_tokens": 16000, - "input_cost_per_token": 1.2e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/voyage-2": { - "max_tokens": 4000, - "max_input_tokens": 4000, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/voyage-lite-02-instruct": { - "max_tokens": 4000, - "max_input_tokens": 4000, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/voyage-finance-2": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "input_cost_per_token": 1.2e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "databricks/databricks-meta-llama-3-1-405b-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 5e-06, - "input_dbu_cost_per_token": 7.1429e-05, - "output_cost_per_token": 1.500002e-05, - "output_db_cost_per_token": 0.000214286, - "litellm_provider": "databricks", - "mode": "chat", - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - } - }, - "databricks/databricks-meta-llama-3-1-70b-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 1.00002e-06, - "input_dbu_cost_per_token": 1.4286e-05, - "output_cost_per_token": 2.99999e-06, - "output_dbu_cost_per_token": 4.2857e-05, - "litellm_provider": "databricks", - "mode": "chat", - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - } - }, - "databricks/databricks-dbrx-instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 7.4998e-07, - "input_dbu_cost_per_token": 1.0714e-05, - "output_cost_per_token": 2.24901e-06, - "output_dbu_cost_per_token": 3.2143e-05, - "litellm_provider": "databricks", - "mode": "chat", - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - } - }, - "databricks/databricks-meta-llama-3-70b-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 1.00002e-06, - "input_dbu_cost_per_token": 1.4286e-05, - "output_cost_per_token": 2.99999e-06, - "output_dbu_cost_per_token": 4.2857e-05, - "litellm_provider": "databricks", - "mode": "chat", - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - } - }, - "databricks/databricks-llama-2-70b-chat": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 5.0001e-07, - "input_dbu_cost_per_token": 7.143e-06, - "output_cost_per_token": 1.5e-06, - "output_dbu_cost_per_token": 2.1429e-05, - "litellm_provider": "databricks", - "mode": "chat", - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - } - }, - "databricks/databricks-mixtral-8x7b-instruct": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 5.0001e-07, - "input_dbu_cost_per_token": 7.143e-06, - "output_cost_per_token": 9.9902e-07, - "output_dbu_cost_per_token": 1.4286e-05, - "litellm_provider": "databricks", - "mode": "chat", - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - } - }, - "databricks/databricks-mpt-30b-instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 9.9902e-07, - "input_dbu_cost_per_token": 1.4286e-05, - "output_cost_per_token": 9.9902e-07, - "output_dbu_cost_per_token": 1.4286e-05, - "litellm_provider": "databricks", - "mode": "chat", - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - } - }, - "databricks/databricks-mpt-7b-instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 5.0001e-07, - "input_dbu_cost_per_token": 7.143e-06, - "output_cost_per_token": 0.0, - "output_dbu_cost_per_token": 0.0, - "litellm_provider": "databricks", - "mode": "chat", - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - } - }, - "databricks/databricks-bge-large-en": { - "max_tokens": 512, - "max_input_tokens": 512, - "output_vector_size": 1024, - "input_cost_per_token": 1.0003e-07, - "input_dbu_cost_per_token": 1.429e-06, - "output_cost_per_token": 0.0, - "output_dbu_cost_per_token": 0.0, - "litellm_provider": "databricks", - "mode": "embedding", - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - } - }, - "databricks/databricks-gte-large-en": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "output_vector_size": 1024, - "input_cost_per_token": 1.2999e-07, - "input_dbu_cost_per_token": 1.857e-06, - "output_cost_per_token": 0.0, - "output_dbu_cost_per_token": 0.0, - "litellm_provider": "databricks", - "mode": "embedding", - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - } - }, - "azure/gpt-4o-mini-2024-07-18": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 1.65e-07, - "output_cost_per_token": 6.6e-07, - "cache_read_input_token_cost": 7.5e-08, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_prompt_caching": true - }, - "amazon.titan-embed-image-v1": { - "max_tokens": 128, - "max_input_tokens": 128, - "output_vector_size": 1024, - "input_cost_per_token": 8e-07, - "input_cost_per_image": 6e-05, - "output_cost_per_token": 0.0, - "litellm_provider": "bedrock", - "supports_image_input": true, - "supports_embedding_image_input": true, - "mode": "embedding", - "source": "https://us-east-1.console.aws.amazon.com/bedrock/home?region=us-east-1#/providers?model=amazon.titan-image-generator-v1", - "metadata": { - "notes": "'supports_image_input' is a deprecated field. Use 'supports_embedding_image_input' instead." - } - }, - "azure_ai/mistral-large-2407": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "azure_ai", - "supports_function_calling": true, - "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-ai-large-2407-offer?tab=Overview" - }, - "azure_ai/ministral-3b": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 4e-08, - "output_cost_per_token": 4e-08, - "litellm_provider": "azure_ai", - "supports_function_calling": true, - "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.ministral-3b-2410-offer?tab=Overview" - }, - "azure_ai/Llama-3.2-11B-Vision-Instruct": { - "max_tokens": 2048, - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "input_cost_per_token": 3.7e-07, - "output_cost_per_token": 3.7e-07, - "litellm_provider": "azure_ai", - "supports_function_calling": true, - "supports_vision": true, - "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-11b-vision-instruct-offer?tab=Overview" - }, - "azure_ai/Llama-3.2-90B-Vision-Instruct": { - "max_tokens": 2048, - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "input_cost_per_token": 2.04e-06, - "output_cost_per_token": 2.04e-06, - "litellm_provider": "azure_ai", - "supports_function_calling": true, - "supports_vision": true, - "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-90b-vision-instruct-offer?tab=Overview" - }, - "azure_ai/Phi-3.5-mini-instruct": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 5.2e-07, - "litellm_provider": "azure_ai", - "mode": "chat", - "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" - }, - "azure_ai/Phi-3.5-vision-instruct": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 5.2e-07, - "litellm_provider": "azure_ai", - "mode": "chat", - "supports_vision": true, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" - }, - "azure_ai/Phi-3.5-MoE-instruct": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.6e-07, - "output_cost_per_token": 6.4e-07, - "litellm_provider": "azure_ai", - "mode": "chat", - "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" - }, - "azure_ai/Phi-3-mini-4k-instruct": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 5.2e-07, - "litellm_provider": "azure_ai", - "mode": "chat", - "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" - }, - "azure_ai/Phi-3-mini-128k-instruct": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.3e-07, - "output_cost_per_token": 5.2e-07, - "litellm_provider": "azure_ai", - "mode": "chat", - "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" - }, - "azure_ai/Phi-3-small-8k-instruct": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "azure_ai", - "mode": "chat", - "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" - }, - "azure_ai/Phi-3-small-128k-instruct": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "azure_ai", - "mode": "chat", - "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" - }, - "azure_ai/Phi-3-medium-4k-instruct": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 1.7e-07, - "output_cost_per_token": 6.8e-07, - "litellm_provider": "azure_ai", - "mode": "chat", - "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" - }, - "azure_ai/Phi-3-medium-128k-instruct": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.7e-07, - "output_cost_per_token": 6.8e-07, - "litellm_provider": "azure_ai", - "mode": "chat", - "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" - }, - "xai/grok-beta": { - "max_tokens": 131072, - "max_input_tokens": 131072, - "max_output_tokens": 131072, - "input_cost_per_token": 5e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "xai", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true - }, - "claude-3-5-haiku-20241022": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 5e-06, - "cache_creation_input_token_cost": 1.25e-06, - "cache_read_input_token_cost": 1e-07, - "litellm_provider": "anthropic", - "mode": "chat", - "supports_function_calling": true, - "tool_use_system_prompt_tokens": 264, - "supports_assistant_prefill": true, - "supports_prompt_caching": true, - "supports_response_schema": true - }, - "vertex_ai/claude-3-5-haiku@20241022": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 5e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true - }, - "openrouter/anthropic/claude-3-5-haiku": { - "max_tokens": 200000, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 5e-06, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true - }, - "openrouter/anthropic/claude-3-5-haiku-20241022": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 5e-06, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "tool_use_system_prompt_tokens": 264 - }, - "anthropic.claude-3-5-haiku-20241022-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 5e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_assistant_prefill": true, - "supports_function_calling": true, - "supports_prompt_caching": true - }, - "us.anthropic.claude-3-5-haiku-20241022-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 5e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_assistant_prefill": true, - "supports_function_calling": true - }, - "eu.anthropic.claude-3-5-haiku-20241022-v1:0": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 5e-06, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true - }, - "stability.sd3-large-v1:0": { - "max_tokens": 77, - "max_input_tokens": 77, - "output_cost_per_image": 0.08, - "litellm_provider": "bedrock", - "mode": "image_generation" - }, - "gpt-4o-2024-11-20": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 2.5e-06, - "output_cost_per_token": 1e-05, - "input_cost_per_token_batches": 1.25e-06, - "output_cost_per_token_batches": 5e-06, - "cache_read_input_token_cost": 1.25e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "ft:gpt-4o-2024-11-20": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 3.75e-06, - "cache_creation_input_token_cost": 1.875e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true - }, - "azure/gpt-4o-2024-11-20": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 2.75e-06, - "output_cost_per_token": 1.1e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true - }, - "azure/global-standard/gpt-4o-2024-11-20": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 2.5e-06, - "output_cost_per_token": 1e-05, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "supports_vision": true - }, - "groq/llama-3.2-1b-preview": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 4e-08, - "output_cost_per_token": 4e-08, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/llama-3.2-3b-preview": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 6e-08, - "output_cost_per_token": 6e-08, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/llama-3.2-11b-text-preview": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1.8e-07, - "output_cost_per_token": 1.8e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/llama-3.2-11b-vision-preview": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1.8e-07, - "output_cost_per_token": 1.8e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_vision": true - }, - "groq/llama-3.2-90b-text-preview": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true - }, - "groq/llama-3.2-90b-vision-preview": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "groq", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "supports_vision": true - }, - "vertex_ai/claude-3-sonnet": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "vertex_ai/claude-3-5-sonnet": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "vertex_ai/claude-3-5-sonnet-v2": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 3e-06, - "output_cost_per_token": 1.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "vertex_ai/claude-3-haiku": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 2.5e-07, - "output_cost_per_token": 1.25e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "vertex_ai/claude-3-5-haiku": { - "max_tokens": 8192, - "max_input_tokens": 200000, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-06, - "output_cost_per_token": 5e-06, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true - }, - "vertex_ai/claude-3-opus": { - "max_tokens": 4096, - "max_input_tokens": 200000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 7.5e-05, - "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_assistant_prefill": true - }, - "gemini/gemini-exp-1114": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "tpm": 4000000, - "rpm": 1000, - "source": "https://ai.google.dev/pricing", - "metadata": { - "notes": "Rate limits not documented for gemini-exp-1114. Assuming same as gemini-1.5-pro." - } - }, - "openrouter/qwen/qwen-2.5-coder-32b-instruct": { - "max_tokens": 33792, - "max_input_tokens": 33792, - "max_output_tokens": 33792, - "input_cost_per_token": 1.8e-07, - "output_cost_per_token": 1.8e-07, - "litellm_provider": "openrouter", - "mode": "chat" - }, - "us.meta.llama3-1-8b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "input_cost_per_token": 2.2e-07, - "output_cost_per_token": 2.2e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama3-1-70b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "input_cost_per_token": 9.9e-07, - "output_cost_per_token": 9.9e-07, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "us.meta.llama3-1-405b-instruct-v1:0": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5.32e-06, - "output_cost_per_token": 1.6e-05, - "litellm_provider": "bedrock", - "mode": "chat", - "supports_function_calling": true, - "supports_tool_choice": false - }, - "stability.stable-image-ultra-v1:0": { - "max_tokens": 77, - "max_input_tokens": 77, - "output_cost_per_image": 0.14, - "litellm_provider": "bedrock", - "mode": "image_generation" - }, - "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat", - "supports_function_calling": true, - "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" - }, - "omni-moderation-latest": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 0, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "openai", - "mode": "moderation" - }, - "omni-moderation-latest-intents": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 0, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "openai", - "mode": "moderation" - }, - "omni-moderation-2024-09-26": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 0, - "input_cost_per_token": 0.0, - "output_cost_per_token": 0.0, - "litellm_provider": "openai", - "mode": "moderation" - }, - "gpt-4o-audio-preview-2024-12-17": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 2.5e-06, - "input_cost_per_audio_token": 4e-05, - "output_cost_per_token": 1e-05, - "output_cost_per_audio_token": 8e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_system_messages": true - }, - "gpt-4o-mini-audio-preview-2024-12-17": { - "max_tokens": 16384, - "max_input_tokens": 128000, - "max_output_tokens": 16384, - "input_cost_per_token": 1.5e-07, - "input_cost_per_audio_token": 1e-05, - "output_cost_per_token": 6e-07, - "output_cost_per_audio_token": 2e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_system_messages": true - }, - "o1": { - "max_tokens": 100000, - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 6e-05, - "cache_read_input_token_cost": 7.5e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_response_schema": true - }, - "o1-2024-12-17": { - "max_tokens": 100000, - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 6e-05, - "cache_read_input_token_cost": 7.5e-06, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_response_schema": true - }, - "gpt-4o-realtime-preview-2024-10-01": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-06, - "input_cost_per_audio_token": 0.0001, - "cache_read_input_token_cost": 2.5e-06, - "cache_creation_input_audio_token_cost": 2e-05, - "output_cost_per_token": 2e-05, - "output_cost_per_audio_token": 0.0002, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_system_messages": true - }, - "gpt-4o-realtime-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-06, - "input_cost_per_audio_token": 4e-05, - "cache_read_input_token_cost": 2.5e-06, - "output_cost_per_token": 2e-05, - "output_cost_per_audio_token": 8e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_system_messages": true - }, - "gpt-4o-realtime-preview-2024-12-17": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 5e-06, - "input_cost_per_audio_token": 4e-05, - "cache_read_input_token_cost": 2.5e-06, - "output_cost_per_token": 2e-05, - "output_cost_per_audio_token": 8e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_system_messages": true - }, - "gpt-4o-mini-realtime-preview": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-07, - "input_cost_per_audio_token": 1e-05, - "cache_read_input_token_cost": 3e-07, - "cache_creation_input_audio_token_cost": 3e-07, - "output_cost_per_token": 2.4e-06, - "output_cost_per_audio_token": 2e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_system_messages": true - }, - "gpt-4o-mini-realtime-preview-2024-12-17": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-07, - "input_cost_per_audio_token": 1e-05, - "cache_read_input_token_cost": 3e-07, - "cache_creation_input_audio_token_cost": 3e-07, - "output_cost_per_token": 2.4e-06, - "output_cost_per_audio_token": 2e-05, - "litellm_provider": "openai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_audio_input": true, - "supports_audio_output": true, - "supports_system_messages": true - }, - "azure/o1": { - "max_tokens": 100000, - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 6e-05, - "cache_read_input_token_cost": 7.5e-06, - "litellm_provider": "azure", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true, - "supports_prompt_caching": true - }, - "azure_ai/Llama-3.3-70B-Instruct": { - "max_tokens": 2048, - "max_input_tokens": 128000, - "max_output_tokens": 2048, - "input_cost_per_token": 7.1e-07, - "output_cost_per_token": 7.1e-07, - "litellm_provider": "azure_ai", - "supports_function_calling": true, - "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.llama-3-3-70b-instruct-offer?tab=Overview" - }, - "mistral/mistral-large-2411": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "mistral", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true - }, - "mistral/pixtral-large-latest": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "mistral", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true, - "supports_vision": true - }, - "mistral/pixtral-large-2411": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "mistral", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true, - "supports_vision": true - }, - "deepseek/deepseek-chat": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.4e-07, - "input_cost_per_token_cache_hit": 1.4e-08, - "cache_read_input_token_cost": 1.4e-08, - "cache_creation_input_token_cost": 0.0, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "deepseek", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true, - "supports_tool_choice": true, - "supports_prompt_caching": true - }, - "deepseek/deepseek-coder": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.4e-07, - "input_cost_per_token_cache_hit": 1.4e-08, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "deepseek", - "mode": "chat", - "supports_function_calling": true, - "supports_assistant_prefill": true, - "supports_tool_choice": true, - "supports_prompt_caching": true - }, - "groq/llama-3.3-70b-versatile": { - "max_tokens": 8192, - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "input_cost_per_token": 5.9e-07, - "output_cost_per_token": 7.9e-07, - "litellm_provider": "groq", - "mode": "chat" - }, - "groq/llama-3.3-70b-specdec": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 5.9e-07, - "output_cost_per_token": 9.9e-07, - "litellm_provider": "groq", - "mode": "chat" - }, - "friendliai/meta-llama-3.1-8b-instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 1e-07, - "litellm_provider": "friendliai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_response_schema": true - }, - "friendliai/meta-llama-3.1-70b-instruct": { - "max_tokens": 8192, - "max_input_tokens": 8192, - "max_output_tokens": 8192, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 6e-07, - "litellm_provider": "friendliai", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_system_messages": true, - "supports_response_schema": true - }, - "gemini-2.0-flash-exp": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_image": 0, - "input_cost_per_video_per_second": 0, - "input_cost_per_audio_per_second": 0, - "input_cost_per_token": 0, - "input_cost_per_character": 0, - "input_cost_per_token_above_128k_tokens": 0, - "input_cost_per_character_above_128k_tokens": 0, - "input_cost_per_image_above_128k_tokens": 0, - "input_cost_per_video_per_second_above_128k_tokens": 0, - "input_cost_per_audio_per_second_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_character": 0, - "output_cost_per_token_above_128k_tokens": 0, - "output_cost_per_character_above_128k_tokens": 0, - "litellm_provider": "vertex_ai-language-models", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "supports_audio_output": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash" - }, - "gemini/gemini-2.0-flash-exp": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_image": 0, - "input_cost_per_video_per_second": 0, - "input_cost_per_audio_per_second": 0, - "input_cost_per_token": 0, - "input_cost_per_character": 0, - "input_cost_per_token_above_128k_tokens": 0, - "input_cost_per_character_above_128k_tokens": 0, - "input_cost_per_image_above_128k_tokens": 0, - "input_cost_per_video_per_second_above_128k_tokens": 0, - "input_cost_per_audio_per_second_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_character": 0, - "output_cost_per_token_above_128k_tokens": 0, - "output_cost_per_character_above_128k_tokens": 0, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "supports_audio_output": true, - "tpm": 4000000, - "rpm": 10, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash" - }, - "vertex_ai/mistral-large@2411-001": { - "max_tokens": 8191, - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "vertex_ai-mistral_models", - "mode": "chat", - "supports_function_calling": true - }, - "vertex_ai/mistral-large-2411": { - "max_tokens": 8191, - "max_input_tokens": 128000, - "max_output_tokens": 8191, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 6e-06, - "litellm_provider": "vertex_ai-mistral_models", - "mode": "chat", - "supports_function_calling": true - }, - "text-embedding-005": { - "max_tokens": 2048, - "max_input_tokens": 2048, - "output_vector_size": 768, - "input_cost_per_character": 2.5e-08, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 0, - "litellm_provider": "vertex_ai-embedding-models", - "mode": "embedding", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models" - }, - "gemini/gemini-1.5-flash-8b": { - "max_tokens": 8192, - "max_input_tokens": 1048576, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "supports_prompt_caching": true, - "tpm": 4000000, - "rpm": 4000, - "source": "https://ai.google.dev/pricing" - }, - "gemini/gemini-exp-1206": { - "max_tokens": 8192, - "max_input_tokens": 2097152, - "max_output_tokens": 8192, - "max_images_per_prompt": 3000, - "max_videos_per_prompt": 10, - "max_video_length": 1, - "max_audio_length_hours": 8.4, - "max_audio_per_prompt": 1, - "max_pdf_size_mb": 30, - "input_cost_per_token": 0, - "input_cost_per_token_above_128k_tokens": 0, - "output_cost_per_token": 0, - "output_cost_per_token_above_128k_tokens": 0, - "litellm_provider": "gemini", - "mode": "chat", - "supports_system_messages": true, - "supports_function_calling": true, - "supports_vision": true, - "supports_response_schema": true, - "tpm": 4000000, - "rpm": 1000, - "source": "https://ai.google.dev/pricing", - "metadata": { - "notes": "Rate limits not documented for gemini-exp-1206. Assuming same as gemini-1.5-pro." - } - }, - "command-r7b-12-2024": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.5e-07, - "output_cost_per_token": 3.75e-08, - "litellm_provider": "cohere_chat", - "mode": "chat", - "supports_function_calling": true, - "source": "https://docs.cohere.com/v2/docs/command-r7b" - }, - "rerank-v3.5": { - "max_tokens": 4096, - "max_input_tokens": 4096, - "max_output_tokens": 4096, - "max_query_tokens": 2048, - "input_cost_per_token": 0.0, - "input_cost_per_query": 0.002, - "output_cost_per_token": 0.0, - "litellm_provider": "cohere", - "mode": "rerank" - }, - "openrouter/deepseek/deepseek-chat": { - "max_tokens": 8192, - "max_input_tokens": 66000, - "max_output_tokens": 4096, - "input_cost_per_token": 1.4e-07, - "output_cost_per_token": 2.8e-07, - "litellm_provider": "openrouter", - "supports_prompt_caching": true, - "mode": "chat" - }, - "openrouter/openai/o1": { - "max_tokens": 100000, - "max_input_tokens": 200000, - "max_output_tokens": 100000, - "input_cost_per_token": 1.5e-05, - "output_cost_per_token": 6e-05, - "cache_read_input_token_cost": 7.5e-06, - "litellm_provider": "openrouter", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_vision": true, - "supports_prompt_caching": true, - "supports_system_messages": true, - "supports_response_schema": true - }, - "amazon.nova-micro-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 300000, - "max_output_tokens": 4096, - "input_cost_per_token": 3.5e-08, - "output_cost_per_token": 1.4e-07, - "litellm_provider": "bedrock_converse", - "mode": "chat", - "supports_function_calling": true, - "supports_prompt_caching": true - }, - "amazon.nova-lite-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 6e-08, - "output_cost_per_token": 2.4e-07, - "litellm_provider": "bedrock_converse", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_pdf_input": true, - "supports_prompt_caching": true - }, - "amazon.nova-pro-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 300000, - "max_output_tokens": 4096, - "input_cost_per_token": 8e-07, - "output_cost_per_token": 3.2e-06, - "litellm_provider": "bedrock_converse", - "mode": "chat", - "supports_function_calling": true, - "supports_vision": true, - "supports_pdf_input": true, - "supports_prompt_caching": true - }, - "meta.llama3-3-70b-instruct-v1:0": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 7.2e-07, - "output_cost_per_token": 7.2e-07, - "litellm_provider": "bedrock_converse", - "mode": "chat" - }, - "together_ai/meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": { - "input_cost_per_token": 1.8e-07, - "output_cost_per_token": 1.8e-07, - "litellm_provider": "together_ai", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "mode": "chat" - }, - "together_ai/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": { - "input_cost_per_token": 8.8e-07, - "output_cost_per_token": 8.8e-07, - "litellm_provider": "together_ai", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "supports_response_schema": true, - "mode": "chat" - }, - "together_ai/meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": { - "input_cost_per_token": 3.5e-06, - "output_cost_per_token": 3.5e-06, - "litellm_provider": "together_ai", - "supports_function_calling": true, - "supports_parallel_function_calling": true, - "mode": "chat" - }, - "deepinfra/meta-llama/Meta-Llama-3.1-405B-Instruct": { - "max_tokens": 32768, - "max_input_tokens": 32768, - "max_output_tokens": 32768, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "deepinfra", - "mode": "chat", - "supports_function_calling": true, - "supports_parallel_function_calling": true - }, - "fireworks_ai/accounts/fireworks/models/deepseek-v3": { - "max_tokens": 8192, - "max_input_tokens": 128000, - "max_output_tokens": 8192, - "input_cost_per_token": 9e-07, - "output_cost_per_token": 9e-07, - "litellm_provider": "fireworks_ai", - "mode": "chat", - "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" - }, - "voyage/voyage-3-large": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "input_cost_per_token": 1.8e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/voyage-3": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "input_cost_per_token": 6e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/voyage-3-lite": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "input_cost_per_token": 2e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/voyage-code-3": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "input_cost_per_token": 1.8e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/voyage-multimodal-3": { - "max_tokens": 32000, - "max_input_tokens": 32000, - "input_cost_per_token": 1.2e-07, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "embedding" - }, - "voyage/rerank-2": { - "max_tokens": 16000, - "max_input_tokens": 16000, - "max_output_tokens": 16000, - "max_query_tokens": 16000, - "input_cost_per_token": 5e-08, - "input_cost_per_query": 5e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "rerank" - }, - "voyage/rerank-2-lite": { - "max_tokens": 8000, - "max_input_tokens": 8000, - "max_output_tokens": 8000, - "max_query_tokens": 8000, - "input_cost_per_token": 2e-08, - "input_cost_per_query": 2e-08, - "output_cost_per_token": 0.0, - "litellm_provider": "voyage", - "mode": "rerank" - }, - "databricks/meta-llama-3.3-70b-instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 1.00002e-06, - "input_dbu_cost_per_token": 1.4286e-05, - "output_cost_per_token": 2.99999e-06, - "output_dbu_cost_per_token": 4.2857e-05, - "litellm_provider": "databricks", - "mode": "chat", - "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": { - "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." - } - }, - "sambanova/Meta-Llama-3.1-8B-Instruct": { - "max_tokens": 16000, - "max_input_tokens": 16000, - "max_output_tokens": 16000, - "input_cost_per_token": 1e-07, - "output_cost_per_token": 2e-07, - "litellm_provider": "sambanova", - "supports_function_calling": true, - "mode": "chat" - }, - "sambanova/Meta-Llama-3.1-70B-Instruct": { - "max_tokens": 128000, - "max_input_tokens": 128000, - "max_output_tokens": 128000, - "input_cost_per_token": 6e-07, - "output_cost_per_token": 1.2e-06, - "litellm_provider": "sambanova", - "supports_function_calling": true, - "mode": "chat" - }, - "sambanova/Meta-Llama-3.1-405B-Instruct": { - "max_tokens": 16000, - "max_input_tokens": 16000, - "max_output_tokens": 16000, - "input_cost_per_token": 5e-06, - "output_cost_per_token": 1e-05, - "litellm_provider": "sambanova", - "supports_function_calling": true, - "mode": "chat" - }, - "sambanova/Meta-Llama-3.2-1B-Instruct": { - "max_tokens": 16000, - "max_input_tokens": 16000, - "max_output_tokens": 16000, - "input_cost_per_token": 4e-07, - "output_cost_per_token": 8e-07, - "litellm_provider": "sambanova", - "supports_function_calling": true, - "mode": "chat" - }, - "sambanova/Meta-Llama-3.2-3B-Instruct": { - "max_tokens": 4000, - "max_input_tokens": 4000, - "max_output_tokens": 4000, - "input_cost_per_token": 8e-07, - "output_cost_per_token": 1.6e-06, - "litellm_provider": "sambanova", - "supports_function_calling": true, - "mode": "chat" - }, - "sambanova/Qwen2.5-Coder-32B-Instruct": { - "max_tokens": 8000, - "max_input_tokens": 8000, - "max_output_tokens": 8000, - "input_cost_per_token": 1.5e-06, - "output_cost_per_token": 3e-06, - "litellm_provider": "sambanova", - "supports_function_calling": true, - "mode": "chat" - }, - "sambanova/Qwen2.5-72B-Instruct": { - "max_tokens": 8000, - "max_input_tokens": 8000, - "max_output_tokens": 8000, - "input_cost_per_token": 2e-06, - "output_cost_per_token": 4e-06, - "litellm_provider": "sambanova", - "supports_function_calling": true, - "mode": "chat" - } -} \ No newline at end of file + "gpt-4": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 3e-5, + output_cost_per_token: 6e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4o": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 2.5e-6, + output_cost_per_token: 1e-5, + input_cost_per_token_batches: 1.25e-6, + output_cost_per_token_batches: 5e-6, + cache_read_input_token_cost: 1.25e-6, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4o-audio-preview": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 2.5e-6, + input_cost_per_audio_token: 0.0001, + output_cost_per_token: 1e-5, + output_cost_per_audio_token: 0.0002, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_audio_input: true, + supports_audio_output: true, + supports_system_messages: true, + }, + "gpt-4o-audio-preview-2024-10-01": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 2.5e-6, + input_cost_per_audio_token: 0.0001, + output_cost_per_token: 1e-5, + output_cost_per_audio_token: 0.0002, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_audio_input: true, + supports_audio_output: true, + supports_system_messages: true, + }, + "gpt-4o-mini": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 6e-7, + input_cost_per_token_batches: 7.5e-8, + output_cost_per_token_batches: 3e-7, + cache_read_input_token_cost: 7.5e-8, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4o-mini-2024-07-18": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 6e-7, + input_cost_per_token_batches: 7.5e-8, + output_cost_per_token_batches: 3e-7, + cache_read_input_token_cost: 7.5e-8, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "o1-mini": { + max_tokens: 65536, + max_input_tokens: 128000, + max_output_tokens: 65536, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.2e-5, + cache_read_input_token_cost: 1.5e-6, + litellm_provider: "openai", + mode: "chat", + supports_vision: true, + supports_prompt_caching: true, + }, + "o1-mini-2024-09-12": { + max_tokens: 65536, + max_input_tokens: 128000, + max_output_tokens: 65536, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.2e-5, + cache_read_input_token_cost: 1.5e-6, + litellm_provider: "openai", + mode: "chat", + supports_vision: true, + supports_prompt_caching: true, + }, + "o1-preview": { + max_tokens: 32768, + max_input_tokens: 128000, + max_output_tokens: 32768, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 6e-5, + cache_read_input_token_cost: 7.5e-6, + litellm_provider: "openai", + mode: "chat", + supports_vision: true, + supports_prompt_caching: true, + }, + "o1-preview-2024-09-12": { + max_tokens: 32768, + max_input_tokens: 128000, + max_output_tokens: 32768, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 6e-5, + cache_read_input_token_cost: 7.5e-6, + litellm_provider: "openai", + mode: "chat", + supports_vision: true, + supports_prompt_caching: true, + }, + "chatgpt-4o-latest": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4o-2024-05-13": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5e-6, + output_cost_per_token: 1.5e-5, + input_cost_per_token_batches: 2.5e-6, + output_cost_per_token_batches: 7.5e-6, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4o-2024-08-06": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 2.5e-6, + output_cost_per_token: 1e-5, + input_cost_per_token_batches: 1.25e-6, + output_cost_per_token_batches: 5e-6, + cache_read_input_token_cost: 1.25e-6, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-turbo-preview": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-0314": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 3e-5, + output_cost_per_token: 6e-5, + litellm_provider: "openai", + mode: "chat", + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-0613": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 3e-5, + output_cost_per_token: 6e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-32k": { + max_tokens: 4096, + max_input_tokens: 32768, + max_output_tokens: 4096, + input_cost_per_token: 6e-5, + output_cost_per_token: 0.00012, + litellm_provider: "openai", + mode: "chat", + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-32k-0314": { + max_tokens: 4096, + max_input_tokens: 32768, + max_output_tokens: 4096, + input_cost_per_token: 6e-5, + output_cost_per_token: 0.00012, + litellm_provider: "openai", + mode: "chat", + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-32k-0613": { + max_tokens: 4096, + max_input_tokens: 32768, + max_output_tokens: 4096, + input_cost_per_token: 6e-5, + output_cost_per_token: 0.00012, + litellm_provider: "openai", + mode: "chat", + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-turbo": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-turbo-2024-04-09": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-1106-preview": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-0125-preview": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-vision-preview": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "openai", + mode: "chat", + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-4-1106-vision-preview": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "openai", + mode: "chat", + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-3.5-turbo": { + max_tokens: 4097, + max_input_tokens: 16385, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 2e-6, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-3.5-turbo-0301": { + max_tokens: 4097, + max_input_tokens: 4097, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 2e-6, + litellm_provider: "openai", + mode: "chat", + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-3.5-turbo-0613": { + max_tokens: 4097, + max_input_tokens: 4097, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 2e-6, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-3.5-turbo-1106": { + max_tokens: 16385, + max_input_tokens: 16385, + max_output_tokens: 4096, + input_cost_per_token: 1e-6, + output_cost_per_token: 2e-6, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-3.5-turbo-0125": { + max_tokens: 16385, + max_input_tokens: 16385, + max_output_tokens: 4096, + input_cost_per_token: 5e-7, + output_cost_per_token: 1.5e-6, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-3.5-turbo-16k": { + max_tokens: 16385, + max_input_tokens: 16385, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 4e-6, + litellm_provider: "openai", + mode: "chat", + supports_prompt_caching: true, + supports_system_messages: true, + }, + "gpt-3.5-turbo-16k-0613": { + max_tokens: 16385, + max_input_tokens: 16385, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 4e-6, + litellm_provider: "openai", + mode: "chat", + supports_prompt_caching: true, + supports_system_messages: true, + }, + "ft:gpt-3.5-turbo": { + max_tokens: 4096, + max_input_tokens: 16385, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 6e-6, + input_cost_per_token_batches: 1.5e-6, + output_cost_per_token_batches: 3e-6, + litellm_provider: "openai", + mode: "chat", + supports_system_messages: true, + }, + "ft:gpt-3.5-turbo-0125": { + max_tokens: 4096, + max_input_tokens: 16385, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 6e-6, + litellm_provider: "openai", + mode: "chat", + supports_system_messages: true, + }, + "ft:gpt-3.5-turbo-1106": { + max_tokens: 4096, + max_input_tokens: 16385, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 6e-6, + litellm_provider: "openai", + mode: "chat", + supports_system_messages: true, + }, + "ft:gpt-3.5-turbo-0613": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 6e-6, + litellm_provider: "openai", + mode: "chat", + supports_system_messages: true, + }, + "ft:gpt-4-0613": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 3e-5, + output_cost_per_token: 6e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + source: + "OpenAI needs to add pricing for this ft model, will be updated when added by OpenAI. Defaulting to base model pricing", + supports_system_messages: true, + }, + "ft:gpt-4o-2024-08-06": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 3.75e-6, + output_cost_per_token: 1.5e-5, + input_cost_per_token_batches: 1.875e-6, + output_cost_per_token_batches: 7.5e-6, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_system_messages: true, + }, + "ft:gpt-4o-mini-2024-07-18": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 3e-7, + output_cost_per_token: 1.2e-6, + input_cost_per_token_batches: 1.5e-7, + output_cost_per_token_batches: 6e-7, + cache_read_input_token_cost: 1.5e-7, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "ft:davinci-002": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 4096, + input_cost_per_token: 2e-6, + output_cost_per_token: 2e-6, + input_cost_per_token_batches: 1e-6, + output_cost_per_token_batches: 1e-6, + litellm_provider: "text-completion-openai", + mode: "completion", + }, + "ft:babbage-002": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 4096, + input_cost_per_token: 4e-7, + output_cost_per_token: 4e-7, + input_cost_per_token_batches: 2e-7, + output_cost_per_token_batches: 2e-7, + litellm_provider: "text-completion-openai", + mode: "completion", + }, + "text-embedding-3-large": { + max_tokens: 8191, + max_input_tokens: 8191, + output_vector_size: 3072, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 0.0, + input_cost_per_token_batches: 6.5e-8, + output_cost_per_token_batches: 0.0, + litellm_provider: "openai", + mode: "embedding", + }, + "text-embedding-3-small": { + max_tokens: 8191, + max_input_tokens: 8191, + output_vector_size: 1536, + input_cost_per_token: 2e-8, + output_cost_per_token: 0.0, + input_cost_per_token_batches: 1e-8, + output_cost_per_token_batches: 0.0, + litellm_provider: "openai", + mode: "embedding", + }, + "text-embedding-ada-002": { + max_tokens: 8191, + max_input_tokens: 8191, + output_vector_size: 1536, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "openai", + mode: "embedding", + }, + "text-embedding-ada-002-v2": { + max_tokens: 8191, + max_input_tokens: 8191, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + input_cost_per_token_batches: 5e-8, + output_cost_per_token_batches: 0.0, + litellm_provider: "openai", + mode: "embedding", + }, + "text-moderation-stable": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 0, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "openai", + mode: "moderations", + }, + "text-moderation-007": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 0, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "openai", + mode: "moderations", + }, + "text-moderation-latest": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 0, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "openai", + mode: "moderations", + }, + "256-x-256/dall-e-2": { + mode: "image_generation", + input_cost_per_pixel: 2.4414e-7, + output_cost_per_pixel: 0.0, + litellm_provider: "openai", + }, + "512-x-512/dall-e-2": { + mode: "image_generation", + input_cost_per_pixel: 6.86e-8, + output_cost_per_pixel: 0.0, + litellm_provider: "openai", + }, + "1024-x-1024/dall-e-2": { + mode: "image_generation", + input_cost_per_pixel: 1.9e-8, + output_cost_per_pixel: 0.0, + litellm_provider: "openai", + }, + "hd/1024-x-1792/dall-e-3": { + mode: "image_generation", + input_cost_per_pixel: 6.539e-8, + output_cost_per_pixel: 0.0, + litellm_provider: "openai", + }, + "hd/1792-x-1024/dall-e-3": { + mode: "image_generation", + input_cost_per_pixel: 6.539e-8, + output_cost_per_pixel: 0.0, + litellm_provider: "openai", + }, + "hd/1024-x-1024/dall-e-3": { + mode: "image_generation", + input_cost_per_pixel: 7.629e-8, + output_cost_per_pixel: 0.0, + litellm_provider: "openai", + }, + "standard/1024-x-1792/dall-e-3": { + mode: "image_generation", + input_cost_per_pixel: 4.359e-8, + output_cost_per_pixel: 0.0, + litellm_provider: "openai", + }, + "standard/1792-x-1024/dall-e-3": { + mode: "image_generation", + input_cost_per_pixel: 4.359e-8, + output_cost_per_pixel: 0.0, + litellm_provider: "openai", + }, + "standard/1024-x-1024/dall-e-3": { + mode: "image_generation", + input_cost_per_pixel: 3.81469e-8, + output_cost_per_pixel: 0.0, + litellm_provider: "openai", + }, + "whisper-1": { + mode: "audio_transcription", + input_cost_per_second: 0, + output_cost_per_second: 0.0001, + litellm_provider: "openai", + }, + "tts-1": { + mode: "audio_speech", + input_cost_per_character: 1.5e-5, + litellm_provider: "openai", + }, + "tts-1-hd": { + mode: "audio_speech", + input_cost_per_character: 3e-5, + litellm_provider: "openai", + }, + "azure/tts-1": { + mode: "audio_speech", + input_cost_per_character: 1.5e-5, + litellm_provider: "azure", + }, + "azure/tts-1-hd": { + mode: "audio_speech", + input_cost_per_character: 3e-5, + litellm_provider: "azure", + }, + "azure/whisper-1": { + mode: "audio_transcription", + input_cost_per_second: 0, + output_cost_per_second: 0.0001, + litellm_provider: "azure", + }, + "azure/o1-mini": { + max_tokens: 65536, + max_input_tokens: 128000, + max_output_tokens: 65536, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.2e-5, + cache_read_input_token_cost: 1.5e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: false, + supports_prompt_caching: true, + }, + "azure/o1-mini-2024-09-12": { + max_tokens: 65536, + max_input_tokens: 128000, + max_output_tokens: 65536, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.2e-5, + cache_read_input_token_cost: 1.5e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: false, + supports_prompt_caching: true, + }, + "azure/o1-preview": { + max_tokens: 32768, + max_input_tokens: 128000, + max_output_tokens: 32768, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 6e-5, + cache_read_input_token_cost: 7.5e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: false, + supports_prompt_caching: true, + }, + "azure/o1-preview-2024-09-12": { + max_tokens: 32768, + max_input_tokens: 128000, + max_output_tokens: 32768, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 6e-5, + cache_read_input_token_cost: 7.5e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: false, + supports_prompt_caching: true, + }, + "azure/gpt-4o": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5e-6, + output_cost_per_token: 1.5e-5, + cache_read_input_token_cost: 1.25e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + supports_prompt_caching: true, + }, + "azure/gpt-4o-2024-08-06": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 2.75e-6, + output_cost_per_token: 1.1e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_prompt_caching: true, + }, + "azure/gpt-4o-2024-05-13": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + supports_prompt_caching: true, + }, + "azure/global-standard/gpt-4o-2024-08-06": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 2.5e-6, + output_cost_per_token: 1e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_prompt_caching: true, + }, + "azure/global-standard/gpt-4o-mini": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 6e-7, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + }, + "azure/gpt-4o-mini": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 1.65e-7, + output_cost_per_token: 6.6e-7, + cache_read_input_token_cost: 7.5e-8, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_prompt_caching: true, + }, + "azure/gpt-4-turbo-2024-04-09": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + }, + "azure/gpt-4-0125-preview": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + }, + "azure/gpt-4-1106-preview": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + }, + "azure/gpt-4-0613": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 3e-5, + output_cost_per_token: 6e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + }, + "azure/gpt-4-32k-0613": { + max_tokens: 4096, + max_input_tokens: 32768, + max_output_tokens: 4096, + input_cost_per_token: 6e-5, + output_cost_per_token: 0.00012, + litellm_provider: "azure", + mode: "chat", + }, + "azure/gpt-4-32k": { + max_tokens: 4096, + max_input_tokens: 32768, + max_output_tokens: 4096, + input_cost_per_token: 6e-5, + output_cost_per_token: 0.00012, + litellm_provider: "azure", + mode: "chat", + }, + "azure/gpt-4": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 3e-5, + output_cost_per_token: 6e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + }, + "azure/gpt-4-turbo": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + }, + "azure/gpt-4-turbo-vision-preview": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + litellm_provider: "azure", + mode: "chat", + supports_vision: true, + }, + "azure/gpt-35-turbo-16k-0613": { + max_tokens: 4096, + max_input_tokens: 16385, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 4e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + }, + "azure/gpt-35-turbo-1106": { + max_tokens: 4096, + max_input_tokens: 16384, + max_output_tokens: 4096, + input_cost_per_token: 1e-6, + output_cost_per_token: 2e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + }, + "azure/gpt-35-turbo-0613": { + max_tokens: 4097, + max_input_tokens: 4097, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 2e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + }, + "azure/gpt-35-turbo-0301": { + max_tokens: 4097, + max_input_tokens: 4097, + max_output_tokens: 4096, + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + }, + "azure/gpt-35-turbo-0125": { + max_tokens: 4096, + max_input_tokens: 16384, + max_output_tokens: 4096, + input_cost_per_token: 5e-7, + output_cost_per_token: 1.5e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + }, + "azure/gpt-35-turbo-16k": { + max_tokens: 4096, + max_input_tokens: 16385, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 4e-6, + litellm_provider: "azure", + mode: "chat", + }, + "azure/gpt-35-turbo": { + max_tokens: 4096, + max_input_tokens: 4097, + max_output_tokens: 4096, + input_cost_per_token: 5e-7, + output_cost_per_token: 1.5e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + }, + "azure/gpt-3.5-turbo-instruct-0914": { + max_tokens: 4097, + max_input_tokens: 4097, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 2e-6, + litellm_provider: "azure_text", + mode: "completion", + }, + "azure/gpt-35-turbo-instruct": { + max_tokens: 4097, + max_input_tokens: 4097, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 2e-6, + litellm_provider: "azure_text", + mode: "completion", + }, + "azure/gpt-35-turbo-instruct-0914": { + max_tokens: 4097, + max_input_tokens: 4097, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 2e-6, + litellm_provider: "azure_text", + mode: "completion", + }, + "azure/mistral-large-latest": { + max_tokens: 32000, + max_input_tokens: 32000, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + }, + "azure/mistral-large-2402": { + max_tokens: 32000, + max_input_tokens: 32000, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + }, + "azure/command-r-plus": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + }, + "azure/ada": { + max_tokens: 8191, + max_input_tokens: 8191, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "azure", + mode: "embedding", + }, + "azure/text-embedding-ada-002": { + max_tokens: 8191, + max_input_tokens: 8191, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "azure", + mode: "embedding", + }, + "azure/text-embedding-3-large": { + max_tokens: 8191, + max_input_tokens: 8191, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 0.0, + litellm_provider: "azure", + mode: "embedding", + }, + "azure/text-embedding-3-small": { + max_tokens: 8191, + max_input_tokens: 8191, + input_cost_per_token: 2e-8, + output_cost_per_token: 0.0, + litellm_provider: "azure", + mode: "embedding", + }, + "azure/standard/1024-x-1024/dall-e-3": { + input_cost_per_pixel: 3.81469e-8, + output_cost_per_token: 0.0, + litellm_provider: "azure", + mode: "image_generation", + }, + "azure/hd/1024-x-1024/dall-e-3": { + input_cost_per_pixel: 7.629e-8, + output_cost_per_token: 0.0, + litellm_provider: "azure", + mode: "image_generation", + }, + "azure/standard/1024-x-1792/dall-e-3": { + input_cost_per_pixel: 4.359e-8, + output_cost_per_token: 0.0, + litellm_provider: "azure", + mode: "image_generation", + }, + "azure/standard/1792-x-1024/dall-e-3": { + input_cost_per_pixel: 4.359e-8, + output_cost_per_token: 0.0, + litellm_provider: "azure", + mode: "image_generation", + }, + "azure/hd/1024-x-1792/dall-e-3": { + input_cost_per_pixel: 6.539e-8, + output_cost_per_token: 0.0, + litellm_provider: "azure", + mode: "image_generation", + }, + "azure/hd/1792-x-1024/dall-e-3": { + input_cost_per_pixel: 6.539e-8, + output_cost_per_token: 0.0, + litellm_provider: "azure", + mode: "image_generation", + }, + "azure/standard/1024-x-1024/dall-e-2": { + input_cost_per_pixel: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "azure", + mode: "image_generation", + }, + "azure_ai/jamba-instruct": { + max_tokens: 4096, + max_input_tokens: 70000, + max_output_tokens: 4096, + input_cost_per_token: 5e-7, + output_cost_per_token: 7e-7, + litellm_provider: "azure_ai", + mode: "chat", + }, + "azure_ai/mistral-large": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 4e-6, + output_cost_per_token: 1.2e-5, + litellm_provider: "azure_ai", + mode: "chat", + supports_function_calling: true, + }, + "azure_ai/mistral-small": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 1e-6, + output_cost_per_token: 3e-6, + litellm_provider: "azure_ai", + supports_function_calling: true, + mode: "chat", + }, + "azure_ai/Meta-Llama-3-70B-Instruct": { + max_tokens: 2048, + max_input_tokens: 8192, + max_output_tokens: 2048, + input_cost_per_token: 1.1e-6, + output_cost_per_token: 3.7e-7, + litellm_provider: "azure_ai", + mode: "chat", + }, + "azure_ai/Meta-Llama-3.1-8B-Instruct": { + max_tokens: 2048, + max_input_tokens: 128000, + max_output_tokens: 2048, + input_cost_per_token: 3e-7, + output_cost_per_token: 6.1e-7, + litellm_provider: "azure_ai", + mode: "chat", + source: + "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-8b-instruct-offer?tab=PlansAndPrice", + }, + "azure_ai/Meta-Llama-3.1-70B-Instruct": { + max_tokens: 2048, + max_input_tokens: 128000, + max_output_tokens: 2048, + input_cost_per_token: 2.68e-6, + output_cost_per_token: 3.54e-6, + litellm_provider: "azure_ai", + mode: "chat", + source: + "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-70b-instruct-offer?tab=PlansAndPrice", + }, + "azure_ai/Meta-Llama-3.1-405B-Instruct": { + max_tokens: 2048, + max_input_tokens: 128000, + max_output_tokens: 2048, + input_cost_per_token: 5.33e-6, + output_cost_per_token: 1.6e-5, + litellm_provider: "azure_ai", + mode: "chat", + source: + "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-405b-instruct-offer?tab=PlansAndPrice", + }, + "azure_ai/cohere-rerank-v3-multilingual": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + max_query_tokens: 2048, + input_cost_per_token: 0.0, + input_cost_per_query: 0.002, + output_cost_per_token: 0.0, + litellm_provider: "azure_ai", + mode: "rerank", + }, + "azure_ai/cohere-rerank-v3-english": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + max_query_tokens: 2048, + input_cost_per_token: 0.0, + input_cost_per_query: 0.002, + output_cost_per_token: 0.0, + litellm_provider: "azure_ai", + mode: "rerank", + }, + "azure_ai/Cohere-embed-v3-english": { + max_tokens: 512, + max_input_tokens: 512, + output_vector_size: 1024, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "azure_ai", + mode: "embedding", + source: + "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/cohere.cohere-embed-v3-english-offer?tab=PlansAndPrice", + }, + "azure_ai/Cohere-embed-v3-multilingual": { + max_tokens: 512, + max_input_tokens: 512, + output_vector_size: 1024, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "azure_ai", + mode: "embedding", + source: + "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/cohere.cohere-embed-v3-english-offer?tab=PlansAndPrice", + }, + "babbage-002": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 4096, + input_cost_per_token: 4e-7, + output_cost_per_token: 4e-7, + litellm_provider: "text-completion-openai", + mode: "completion", + }, + "davinci-002": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 4096, + input_cost_per_token: 2e-6, + output_cost_per_token: 2e-6, + litellm_provider: "text-completion-openai", + mode: "completion", + }, + "gpt-3.5-turbo-instruct": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 2e-6, + litellm_provider: "text-completion-openai", + mode: "completion", + }, + "gpt-3.5-turbo-instruct-0914": { + max_tokens: 4097, + max_input_tokens: 8192, + max_output_tokens: 4097, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 2e-6, + litellm_provider: "text-completion-openai", + mode: "completion", + }, + "claude-instant-1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 1.63e-6, + output_cost_per_token: 5.51e-6, + litellm_provider: "anthropic", + mode: "chat", + }, + "mistral/mistral-tiny": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 2.5e-7, + litellm_provider: "mistral", + mode: "chat", + supports_assistant_prefill: true, + }, + "mistral/mistral-small": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 1e-6, + output_cost_per_token: 3e-6, + litellm_provider: "mistral", + supports_function_calling: true, + mode: "chat", + supports_assistant_prefill: true, + }, + "mistral/mistral-small-latest": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 1e-6, + output_cost_per_token: 3e-6, + litellm_provider: "mistral", + supports_function_calling: true, + mode: "chat", + supports_assistant_prefill: true, + }, + "mistral/mistral-medium": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 2.7e-6, + output_cost_per_token: 8.1e-6, + litellm_provider: "mistral", + mode: "chat", + supports_assistant_prefill: true, + }, + "mistral/mistral-medium-latest": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 2.7e-6, + output_cost_per_token: 8.1e-6, + litellm_provider: "mistral", + mode: "chat", + supports_assistant_prefill: true, + }, + "mistral/mistral-medium-2312": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 2.7e-6, + output_cost_per_token: 8.1e-6, + litellm_provider: "mistral", + mode: "chat", + supports_assistant_prefill: true, + }, + "mistral/mistral-large-latest": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 2e-6, + output_cost_per_token: 6e-6, + litellm_provider: "mistral", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + }, + "mistral/mistral-large-2402": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 4e-6, + output_cost_per_token: 1.2e-5, + litellm_provider: "mistral", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + }, + "mistral/mistral-large-2407": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 3e-6, + output_cost_per_token: 9e-6, + litellm_provider: "mistral", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + }, + "mistral/pixtral-12b-2409": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 1.5e-7, + litellm_provider: "mistral", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + supports_vision: true, + }, + "mistral/open-mistral-7b": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 2.5e-7, + litellm_provider: "mistral", + mode: "chat", + supports_assistant_prefill: true, + }, + "mistral/open-mixtral-8x7b": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 7e-7, + output_cost_per_token: 7e-7, + litellm_provider: "mistral", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + }, + "mistral/open-mixtral-8x22b": { + max_tokens: 8191, + max_input_tokens: 64000, + max_output_tokens: 8191, + input_cost_per_token: 2e-6, + output_cost_per_token: 6e-6, + litellm_provider: "mistral", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + }, + "mistral/codestral-latest": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 1e-6, + output_cost_per_token: 3e-6, + litellm_provider: "mistral", + mode: "chat", + supports_assistant_prefill: true, + }, + "mistral/codestral-2405": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 1e-6, + output_cost_per_token: 3e-6, + litellm_provider: "mistral", + mode: "chat", + supports_assistant_prefill: true, + }, + "mistral/open-mistral-nemo": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 3e-7, + output_cost_per_token: 3e-7, + litellm_provider: "mistral", + mode: "chat", + source: "https://mistral.ai/technology/", + supports_assistant_prefill: true, + }, + "mistral/open-mistral-nemo-2407": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 3e-7, + output_cost_per_token: 3e-7, + litellm_provider: "mistral", + mode: "chat", + source: "https://mistral.ai/technology/", + supports_assistant_prefill: true, + }, + "mistral/open-codestral-mamba": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 2.5e-7, + litellm_provider: "mistral", + mode: "chat", + source: "https://mistral.ai/technology/", + supports_assistant_prefill: true, + }, + "mistral/codestral-mamba-latest": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 2.5e-7, + litellm_provider: "mistral", + mode: "chat", + source: "https://mistral.ai/technology/", + supports_assistant_prefill: true, + }, + "mistral/mistral-embed": { + max_tokens: 8192, + max_input_tokens: 8192, + input_cost_per_token: 1e-7, + litellm_provider: "mistral", + mode: "embedding", + }, + "deepseek-chat": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.4e-7, + input_cost_per_token_cache_hit: 1.4e-8, + output_cost_per_token: 2.8e-7, + litellm_provider: "deepseek", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + supports_tool_choice: true, + supports_prompt_caching: true, + }, + "codestral/codestral-latest": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "codestral", + mode: "chat", + source: "https://docs.mistral.ai/capabilities/code_generation/", + supports_assistant_prefill: true, + }, + "codestral/codestral-2405": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "codestral", + mode: "chat", + source: "https://docs.mistral.ai/capabilities/code_generation/", + supports_assistant_prefill: true, + }, + "text-completion-codestral/codestral-latest": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "text-completion-codestral", + mode: "completion", + source: "https://docs.mistral.ai/capabilities/code_generation/", + }, + "text-completion-codestral/codestral-2405": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "text-completion-codestral", + mode: "completion", + source: "https://docs.mistral.ai/capabilities/code_generation/", + }, + "deepseek-coder": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.4e-7, + input_cost_per_token_cache_hit: 1.4e-8, + output_cost_per_token: 2.8e-7, + litellm_provider: "deepseek", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + supports_tool_choice: true, + supports_prompt_caching: true, + }, + "groq/llama2-70b-4096": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 7e-7, + output_cost_per_token: 8e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/llama3-8b-8192": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 5e-8, + output_cost_per_token: 8e-8, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/llama3-70b-8192": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 5.9e-7, + output_cost_per_token: 7.9e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/llama-3.1-8b-instant": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 5e-8, + output_cost_per_token: 8e-8, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/llama-3.1-70b-versatile": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 5.9e-7, + output_cost_per_token: 7.9e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/llama-3.1-405b-reasoning": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 5.9e-7, + output_cost_per_token: 7.9e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/mixtral-8x7b-32768": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 32768, + input_cost_per_token: 2.4e-7, + output_cost_per_token: 2.4e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/gemma-7b-it": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 7e-8, + output_cost_per_token: 7e-8, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/gemma2-9b-it": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/llama3-groq-70b-8192-tool-use-preview": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 8.9e-7, + output_cost_per_token: 8.9e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/llama3-groq-8b-8192-tool-use-preview": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 1.9e-7, + output_cost_per_token: 1.9e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "cerebras/llama3.1-8b": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 1e-7, + output_cost_per_token: 1e-7, + litellm_provider: "cerebras", + mode: "chat", + supports_function_calling: true, + }, + "cerebras/llama3.1-70b": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 6e-7, + output_cost_per_token: 6e-7, + litellm_provider: "cerebras", + mode: "chat", + supports_function_calling: true, + }, + "friendliai/mixtral-8x7b-instruct-v0-1": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 32768, + input_cost_per_token: 4e-7, + output_cost_per_token: 4e-7, + litellm_provider: "friendliai", + mode: "chat", + supports_function_calling: true, + }, + "friendliai/meta-llama-3-8b-instruct": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 1e-7, + output_cost_per_token: 1e-7, + litellm_provider: "friendliai", + mode: "chat", + supports_function_calling: true, + }, + "friendliai/meta-llama-3-70b-instruct": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 8e-7, + output_cost_per_token: 8e-7, + litellm_provider: "friendliai", + mode: "chat", + supports_function_calling: true, + }, + "claude-instant-1.2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 1.63e-7, + output_cost_per_token: 5.51e-7, + litellm_provider: "anthropic", + mode: "chat", + }, + "claude-2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "anthropic", + mode: "chat", + }, + "claude-2.1": { + max_tokens: 8191, + max_input_tokens: 200000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "anthropic", + mode: "chat", + }, + "claude-3-haiku-20240307": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 1.25e-6, + cache_creation_input_token_cost: 3e-7, + cache_read_input_token_cost: 3e-8, + litellm_provider: "anthropic", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 264, + supports_assistant_prefill: true, + supports_prompt_caching: true, + supports_response_schema: true, + }, + "claude-3-haiku-latest": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 1.25e-6, + cache_creation_input_token_cost: 3e-7, + cache_read_input_token_cost: 3e-8, + litellm_provider: "anthropic", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 264, + supports_assistant_prefill: true, + supports_prompt_caching: true, + }, + "claude-3-opus-20240229": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 7.5e-5, + cache_creation_input_token_cost: 1.875e-5, + cache_read_input_token_cost: 1.5e-6, + litellm_provider: "anthropic", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 395, + supports_assistant_prefill: true, + supports_prompt_caching: true, + supports_response_schema: true, + }, + "claude-3-opus-latest": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 7.5e-5, + cache_creation_input_token_cost: 1.875e-5, + cache_read_input_token_cost: 1.5e-6, + litellm_provider: "anthropic", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 395, + supports_assistant_prefill: true, + supports_prompt_caching: true, + }, + "claude-3-sonnet-20240229": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "anthropic", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 159, + supports_assistant_prefill: true, + supports_prompt_caching: true, + supports_response_schema: true, + }, + "claude-3-5-sonnet-20240620": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + cache_creation_input_token_cost: 3.75e-6, + cache_read_input_token_cost: 3e-7, + litellm_provider: "anthropic", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 159, + supports_assistant_prefill: true, + supports_prompt_caching: true, + supports_response_schema: true, + }, + "claude-3-5-sonnet-20241022": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + cache_creation_input_token_cost: 3.75e-6, + cache_read_input_token_cost: 3e-7, + litellm_provider: "anthropic", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 159, + supports_assistant_prefill: true, + supports_pdf_input: true, + supports_prompt_caching: true, + supports_response_schema: true, + }, + "claude-3-5-sonnet-latest": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + cache_creation_input_token_cost: 3.75e-6, + cache_read_input_token_cost: 3e-7, + litellm_provider: "anthropic", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 159, + supports_assistant_prefill: true, + supports_prompt_caching: true, + }, + "text-bison": { + max_tokens: 2048, + max_input_tokens: 8192, + max_output_tokens: 2048, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "text-bison@001": { + max_tokens: 1024, + max_input_tokens: 8192, + max_output_tokens: 1024, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "text-bison@002": { + max_tokens: 1024, + max_input_tokens: 8192, + max_output_tokens: 1024, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "text-bison32k": { + max_tokens: 1024, + max_input_tokens: 8192, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "text-bison32k@002": { + max_tokens: 1024, + max_input_tokens: 8192, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "text-unicorn": { + max_tokens: 1024, + max_input_tokens: 8192, + max_output_tokens: 1024, + input_cost_per_token: 1e-5, + output_cost_per_token: 2.8e-5, + litellm_provider: "vertex_ai-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "text-unicorn@001": { + max_tokens: 1024, + max_input_tokens: 8192, + max_output_tokens: 1024, + input_cost_per_token: 1e-5, + output_cost_per_token: 2.8e-5, + litellm_provider: "vertex_ai-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "chat-bison": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-chat-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "chat-bison@001": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-chat-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "chat-bison@002": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-chat-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "chat-bison-32k": { + max_tokens: 8192, + max_input_tokens: 32000, + max_output_tokens: 8192, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-chat-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "chat-bison-32k@002": { + max_tokens: 8192, + max_input_tokens: 32000, + max_output_tokens: 8192, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-chat-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "code-bison": { + max_tokens: 1024, + max_input_tokens: 6144, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-code-text-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "code-bison@001": { + max_tokens: 1024, + max_input_tokens: 6144, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-code-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "code-bison@002": { + max_tokens: 1024, + max_input_tokens: 6144, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-code-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "code-bison32k": { + max_tokens: 1024, + max_input_tokens: 6144, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-code-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "code-bison-32k@002": { + max_tokens: 1024, + max_input_tokens: 6144, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-code-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "code-gecko@001": { + max_tokens: 64, + max_input_tokens: 2048, + max_output_tokens: 64, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + litellm_provider: "vertex_ai-code-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "code-gecko@002": { + max_tokens: 64, + max_input_tokens: 2048, + max_output_tokens: 64, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + litellm_provider: "vertex_ai-code-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "code-gecko": { + max_tokens: 64, + max_input_tokens: 2048, + max_output_tokens: 64, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + litellm_provider: "vertex_ai-code-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "code-gecko-latest": { + max_tokens: 64, + max_input_tokens: 2048, + max_output_tokens: 64, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + litellm_provider: "vertex_ai-code-text-models", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "codechat-bison@latest": { + max_tokens: 1024, + max_input_tokens: 6144, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-code-chat-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "codechat-bison": { + max_tokens: 1024, + max_input_tokens: 6144, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-code-chat-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "codechat-bison@001": { + max_tokens: 1024, + max_input_tokens: 6144, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-code-chat-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "codechat-bison@002": { + max_tokens: 1024, + max_input_tokens: 6144, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-code-chat-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "codechat-bison-32k": { + max_tokens: 8192, + max_input_tokens: 32000, + max_output_tokens: 8192, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-code-chat-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "codechat-bison-32k@002": { + max_tokens: 8192, + max_input_tokens: 32000, + max_output_tokens: 8192, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + input_cost_per_character: 2.5e-7, + output_cost_per_character: 5e-7, + litellm_provider: "vertex_ai-code-chat-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-pro": { + max_tokens: 8192, + max_input_tokens: 32760, + max_output_tokens: 8192, + input_cost_per_image: 0.0025, + input_cost_per_video_per_second: 0.002, + input_cost_per_token: 5e-7, + input_cost_per_character: 1.25e-7, + output_cost_per_token: 1.5e-6, + output_cost_per_character: 3.75e-7, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_function_calling: true, + source: "https://cloud.google.com/vertex-ai/generative-ai/pricing", + }, + "gemini-1.0-pro": { + max_tokens: 8192, + max_input_tokens: 32760, + max_output_tokens: 8192, + input_cost_per_image: 0.0025, + input_cost_per_video_per_second: 0.002, + input_cost_per_token: 5e-7, + input_cost_per_character: 1.25e-7, + output_cost_per_token: 1.5e-6, + output_cost_per_character: 3.75e-7, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_function_calling: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/pricing#google_models", + }, + "gemini-1.0-pro-001": { + max_tokens: 8192, + max_input_tokens: 32760, + max_output_tokens: 8192, + input_cost_per_image: 0.0025, + input_cost_per_video_per_second: 0.002, + input_cost_per_token: 5e-7, + input_cost_per_character: 1.25e-7, + output_cost_per_token: 1.5e-6, + output_cost_per_character: 3.75e-7, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_function_calling: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.0-ultra": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 2048, + input_cost_per_image: 0.0025, + input_cost_per_video_per_second: 0.002, + input_cost_per_token: 5e-7, + input_cost_per_character: 1.25e-7, + output_cost_per_token: 1.5e-6, + output_cost_per_character: 3.75e-7, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_function_calling: true, + source: + "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.0-ultra-001": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 2048, + input_cost_per_image: 0.0025, + input_cost_per_video_per_second: 0.002, + input_cost_per_token: 5e-7, + input_cost_per_character: 1.25e-7, + output_cost_per_token: 1.5e-6, + output_cost_per_character: 3.75e-7, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_function_calling: true, + source: + "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.0-pro-002": { + max_tokens: 8192, + max_input_tokens: 32760, + max_output_tokens: 8192, + input_cost_per_image: 0.0025, + input_cost_per_video_per_second: 0.002, + input_cost_per_token: 5e-7, + input_cost_per_character: 1.25e-7, + output_cost_per_token: 1.5e-6, + output_cost_per_character: 3.75e-7, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_function_calling: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.5-pro": { + max_tokens: 8192, + max_input_tokens: 2097152, + max_output_tokens: 8192, + input_cost_per_image: 0.00032875, + input_cost_per_audio_per_second: 3.125e-5, + input_cost_per_video_per_second: 0.00032875, + input_cost_per_token: 1.25e-6, + input_cost_per_character: 3.125e-7, + input_cost_per_image_above_128k_tokens: 0.0006575, + input_cost_per_video_per_second_above_128k_tokens: 0.0006575, + input_cost_per_audio_per_second_above_128k_tokens: 6.25e-5, + input_cost_per_token_above_128k_tokens: 2.5e-6, + input_cost_per_character_above_128k_tokens: 6.25e-7, + output_cost_per_token: 5e-6, + output_cost_per_character: 1.25e-6, + output_cost_per_token_above_128k_tokens: 1e-5, + output_cost_per_character_above_128k_tokens: 2.5e-6, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_vision: true, + supports_pdf_input: true, + supports_system_messages: true, + supports_function_calling: true, + supports_tool_choice: true, + supports_response_schema: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.5-pro-002": { + max_tokens: 8192, + max_input_tokens: 2097152, + max_output_tokens: 8192, + input_cost_per_image: 0.00032875, + input_cost_per_audio_per_second: 3.125e-5, + input_cost_per_video_per_second: 0.00032875, + input_cost_per_token: 1.25e-6, + input_cost_per_character: 3.125e-7, + input_cost_per_image_above_128k_tokens: 0.0006575, + input_cost_per_video_per_second_above_128k_tokens: 0.0006575, + input_cost_per_audio_per_second_above_128k_tokens: 6.25e-5, + input_cost_per_token_above_128k_tokens: 2.5e-6, + input_cost_per_character_above_128k_tokens: 6.25e-7, + output_cost_per_token: 5e-6, + output_cost_per_character: 1.25e-6, + output_cost_per_token_above_128k_tokens: 1e-5, + output_cost_per_character_above_128k_tokens: 2.5e-6, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_vision: true, + supports_system_messages: true, + supports_function_calling: true, + supports_tool_choice: true, + supports_response_schema: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-pro", + }, + "gemini-1.5-pro-001": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + input_cost_per_image: 0.00032875, + input_cost_per_audio_per_second: 3.125e-5, + input_cost_per_video_per_second: 0.00032875, + input_cost_per_token: 1.25e-6, + input_cost_per_character: 3.125e-7, + input_cost_per_image_above_128k_tokens: 0.0006575, + input_cost_per_video_per_second_above_128k_tokens: 0.0006575, + input_cost_per_audio_per_second_above_128k_tokens: 6.25e-5, + input_cost_per_token_above_128k_tokens: 2.5e-6, + input_cost_per_character_above_128k_tokens: 6.25e-7, + output_cost_per_token: 5e-6, + output_cost_per_character: 1.25e-6, + output_cost_per_token_above_128k_tokens: 1e-5, + output_cost_per_character_above_128k_tokens: 2.5e-6, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_vision: true, + supports_system_messages: true, + supports_function_calling: true, + supports_tool_choice: true, + supports_response_schema: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.5-pro-preview-0514": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + input_cost_per_image: 0.00032875, + input_cost_per_audio_per_second: 3.125e-5, + input_cost_per_video_per_second: 0.00032875, + input_cost_per_token: 7.8125e-8, + input_cost_per_character: 3.125e-7, + input_cost_per_image_above_128k_tokens: 0.0006575, + input_cost_per_video_per_second_above_128k_tokens: 0.0006575, + input_cost_per_audio_per_second_above_128k_tokens: 6.25e-5, + input_cost_per_token_above_128k_tokens: 1.5625e-7, + input_cost_per_character_above_128k_tokens: 6.25e-7, + output_cost_per_token: 3.125e-7, + output_cost_per_character: 1.25e-6, + output_cost_per_token_above_128k_tokens: 6.25e-7, + output_cost_per_character_above_128k_tokens: 2.5e-6, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_tool_choice: true, + supports_response_schema: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.5-pro-preview-0215": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + input_cost_per_image: 0.00032875, + input_cost_per_audio_per_second: 3.125e-5, + input_cost_per_video_per_second: 0.00032875, + input_cost_per_token: 7.8125e-8, + input_cost_per_character: 3.125e-7, + input_cost_per_image_above_128k_tokens: 0.0006575, + input_cost_per_video_per_second_above_128k_tokens: 0.0006575, + input_cost_per_audio_per_second_above_128k_tokens: 6.25e-5, + input_cost_per_token_above_128k_tokens: 1.5625e-7, + input_cost_per_character_above_128k_tokens: 6.25e-7, + output_cost_per_token: 3.125e-7, + output_cost_per_character: 1.25e-6, + output_cost_per_token_above_128k_tokens: 6.25e-7, + output_cost_per_character_above_128k_tokens: 2.5e-6, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_tool_choice: true, + supports_response_schema: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.5-pro-preview-0409": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + input_cost_per_image: 0.00032875, + input_cost_per_audio_per_second: 3.125e-5, + input_cost_per_video_per_second: 0.00032875, + input_cost_per_token: 7.8125e-8, + input_cost_per_character: 3.125e-7, + input_cost_per_image_above_128k_tokens: 0.0006575, + input_cost_per_video_per_second_above_128k_tokens: 0.0006575, + input_cost_per_audio_per_second_above_128k_tokens: 6.25e-5, + input_cost_per_token_above_128k_tokens: 1.5625e-7, + input_cost_per_character_above_128k_tokens: 6.25e-7, + output_cost_per_token: 3.125e-7, + output_cost_per_character: 1.25e-6, + output_cost_per_token_above_128k_tokens: 6.25e-7, + output_cost_per_character_above_128k_tokens: 2.5e-6, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: true, + supports_response_schema: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.5-flash": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_image: 2e-5, + input_cost_per_video_per_second: 2e-5, + input_cost_per_audio_per_second: 2e-6, + input_cost_per_token: 7.5e-8, + input_cost_per_character: 1.875e-8, + input_cost_per_token_above_128k_tokens: 1e-6, + input_cost_per_character_above_128k_tokens: 2.5e-7, + input_cost_per_image_above_128k_tokens: 4e-5, + input_cost_per_video_per_second_above_128k_tokens: 4e-5, + input_cost_per_audio_per_second_above_128k_tokens: 4e-6, + output_cost_per_token: 3e-7, + output_cost_per_character: 7.5e-8, + output_cost_per_token_above_128k_tokens: 6e-7, + output_cost_per_character_above_128k_tokens: 1.5e-7, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.5-flash-exp-0827": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_image: 2e-5, + input_cost_per_video_per_second: 2e-5, + input_cost_per_audio_per_second: 2e-6, + input_cost_per_token: 4.688e-9, + input_cost_per_character: 1.875e-8, + input_cost_per_token_above_128k_tokens: 1e-6, + input_cost_per_character_above_128k_tokens: 2.5e-7, + input_cost_per_image_above_128k_tokens: 4e-5, + input_cost_per_video_per_second_above_128k_tokens: 4e-5, + input_cost_per_audio_per_second_above_128k_tokens: 4e-6, + output_cost_per_token: 4.6875e-9, + output_cost_per_character: 1.875e-8, + output_cost_per_token_above_128k_tokens: 9.375e-9, + output_cost_per_character_above_128k_tokens: 3.75e-8, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.5-flash-002": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_image: 2e-5, + input_cost_per_video_per_second: 2e-5, + input_cost_per_audio_per_second: 2e-6, + input_cost_per_token: 7.5e-8, + input_cost_per_character: 1.875e-8, + input_cost_per_token_above_128k_tokens: 1e-6, + input_cost_per_character_above_128k_tokens: 2.5e-7, + input_cost_per_image_above_128k_tokens: 4e-5, + input_cost_per_video_per_second_above_128k_tokens: 4e-5, + input_cost_per_audio_per_second_above_128k_tokens: 4e-6, + output_cost_per_token: 3e-7, + output_cost_per_character: 7.5e-8, + output_cost_per_token_above_128k_tokens: 6e-7, + output_cost_per_character_above_128k_tokens: 1.5e-7, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-flash", + }, + "gemini-1.5-flash-001": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_image: 2e-5, + input_cost_per_video_per_second: 2e-5, + input_cost_per_audio_per_second: 2e-6, + input_cost_per_token: 7.5e-8, + input_cost_per_character: 1.875e-8, + input_cost_per_token_above_128k_tokens: 1e-6, + input_cost_per_character_above_128k_tokens: 2.5e-7, + input_cost_per_image_above_128k_tokens: 4e-5, + input_cost_per_video_per_second_above_128k_tokens: 4e-5, + input_cost_per_audio_per_second_above_128k_tokens: 4e-6, + output_cost_per_token: 3e-7, + output_cost_per_character: 7.5e-8, + output_cost_per_token_above_128k_tokens: 6e-7, + output_cost_per_character_above_128k_tokens: 1.5e-7, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.5-flash-preview-0514": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_image: 2e-5, + input_cost_per_video_per_second: 2e-5, + input_cost_per_audio_per_second: 2e-6, + input_cost_per_token: 7.5e-8, + input_cost_per_character: 1.875e-8, + input_cost_per_token_above_128k_tokens: 1e-6, + input_cost_per_character_above_128k_tokens: 2.5e-7, + input_cost_per_image_above_128k_tokens: 4e-5, + input_cost_per_video_per_second_above_128k_tokens: 4e-5, + input_cost_per_audio_per_second_above_128k_tokens: 4e-6, + output_cost_per_token: 4.6875e-9, + output_cost_per_character: 1.875e-8, + output_cost_per_token_above_128k_tokens: 9.375e-9, + output_cost_per_character_above_128k_tokens: 3.75e-8, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-pro-experimental": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + input_cost_per_token: 0, + output_cost_per_token: 0, + input_cost_per_character: 0, + output_cost_per_character: 0, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_function_calling: false, + supports_tool_choice: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental", + }, + "gemini-flash-experimental": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + input_cost_per_token: 0, + output_cost_per_token: 0, + input_cost_per_character: 0, + output_cost_per_character: 0, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_function_calling: false, + supports_tool_choice: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental", + }, + "gemini-pro-vision": { + max_tokens: 2048, + max_input_tokens: 16384, + max_output_tokens: 2048, + max_images_per_prompt: 16, + max_videos_per_prompt: 1, + max_video_length: 2, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 5e-7, + litellm_provider: "vertex_ai-vision-models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.0-pro-vision": { + max_tokens: 2048, + max_input_tokens: 16384, + max_output_tokens: 2048, + max_images_per_prompt: 16, + max_videos_per_prompt: 1, + max_video_length: 2, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 5e-7, + litellm_provider: "vertex_ai-vision-models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini-1.0-pro-vision-001": { + max_tokens: 2048, + max_input_tokens: 16384, + max_output_tokens: 2048, + max_images_per_prompt: 16, + max_videos_per_prompt: 1, + max_video_length: 2, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 5e-7, + litellm_provider: "vertex_ai-vision-models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "medlm-medium": { + max_tokens: 8192, + max_input_tokens: 32768, + max_output_tokens: 8192, + input_cost_per_character: 5e-7, + output_cost_per_character: 1e-6, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "medlm-large": { + max_tokens: 1024, + max_input_tokens: 8192, + max_output_tokens: 1024, + input_cost_per_character: 5e-6, + output_cost_per_character: 1.5e-5, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "vertex_ai/claude-3-sonnet@20240229": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "vertex_ai/claude-3-5-sonnet@20240620": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "vertex_ai/claude-3-5-sonnet-v2@20241022": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "vertex_ai/claude-3-haiku@20240307": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 1.25e-6, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "vertex_ai/claude-3-opus@20240229": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 7.5e-5, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "vertex_ai/meta/llama3-405b-instruct-maas": { + max_tokens: 32000, + max_input_tokens: 32000, + max_output_tokens: 32000, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "vertex_ai-llama_models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + }, + "vertex_ai/meta/llama3-70b-instruct-maas": { + max_tokens: 32000, + max_input_tokens: 32000, + max_output_tokens: 32000, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "vertex_ai-llama_models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + }, + "vertex_ai/meta/llama3-8b-instruct-maas": { + max_tokens: 32000, + max_input_tokens: 32000, + max_output_tokens: 32000, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "vertex_ai-llama_models", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + }, + "vertex_ai/meta/llama-3.2-90b-vision-instruct-maas": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 2048, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "vertex_ai-llama_models", + mode: "chat", + supports_system_messages: true, + supports_vision: true, + source: + "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas", + }, + "vertex_ai/mistral-large@latest": { + max_tokens: 8191, + max_input_tokens: 128000, + max_output_tokens: 8191, + input_cost_per_token: 2e-6, + output_cost_per_token: 6e-6, + litellm_provider: "vertex_ai-mistral_models", + mode: "chat", + supports_function_calling: true, + }, + "vertex_ai/mistral-large@2407": { + max_tokens: 8191, + max_input_tokens: 128000, + max_output_tokens: 8191, + input_cost_per_token: 2e-6, + output_cost_per_token: 6e-6, + litellm_provider: "vertex_ai-mistral_models", + mode: "chat", + supports_function_calling: true, + }, + "vertex_ai/mistral-nemo@latest": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 1.5e-7, + litellm_provider: "vertex_ai-mistral_models", + mode: "chat", + supports_function_calling: true, + }, + "vertex_ai/jamba-1.5-mini@001": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2e-7, + output_cost_per_token: 4e-7, + litellm_provider: "vertex_ai-ai21_models", + mode: "chat", + }, + "vertex_ai/jamba-1.5-large@001": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2e-6, + output_cost_per_token: 8e-6, + litellm_provider: "vertex_ai-ai21_models", + mode: "chat", + }, + "vertex_ai/jamba-1.5": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2e-7, + output_cost_per_token: 4e-7, + litellm_provider: "vertex_ai-ai21_models", + mode: "chat", + }, + "vertex_ai/jamba-1.5-mini": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2e-7, + output_cost_per_token: 4e-7, + litellm_provider: "vertex_ai-ai21_models", + mode: "chat", + }, + "vertex_ai/jamba-1.5-large": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2e-6, + output_cost_per_token: 8e-6, + litellm_provider: "vertex_ai-ai21_models", + mode: "chat", + }, + "vertex_ai/mistral-nemo@2407": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 3e-6, + output_cost_per_token: 3e-6, + litellm_provider: "vertex_ai-mistral_models", + mode: "chat", + supports_function_calling: true, + }, + "vertex_ai/codestral@latest": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 2e-7, + output_cost_per_token: 6e-7, + litellm_provider: "vertex_ai-mistral_models", + mode: "chat", + supports_function_calling: true, + }, + "vertex_ai/codestral@2405": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 2e-7, + output_cost_per_token: 6e-7, + litellm_provider: "vertex_ai-mistral_models", + mode: "chat", + supports_function_calling: true, + }, + "vertex_ai/imagegeneration@006": { + output_cost_per_image: 0.02, + litellm_provider: "vertex_ai-image-models", + mode: "image_generation", + source: "https://cloud.google.com/vertex-ai/generative-ai/pricing", + }, + "vertex_ai/imagen-3.0-generate-001": { + output_cost_per_image: 0.04, + litellm_provider: "vertex_ai-image-models", + mode: "image_generation", + source: "https://cloud.google.com/vertex-ai/generative-ai/pricing", + }, + "vertex_ai/imagen-3.0-fast-generate-001": { + output_cost_per_image: 0.02, + litellm_provider: "vertex_ai-image-models", + mode: "image_generation", + source: "https://cloud.google.com/vertex-ai/generative-ai/pricing", + }, + "text-embedding-004": { + max_tokens: 2048, + max_input_tokens: 2048, + output_vector_size: 768, + input_cost_per_character: 2.5e-8, + input_cost_per_token: 1e-7, + output_cost_per_token: 0, + litellm_provider: "vertex_ai-embedding-models", + mode: "embedding", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models", + }, + "text-multilingual-embedding-002": { + max_tokens: 2048, + max_input_tokens: 2048, + output_vector_size: 768, + input_cost_per_character: 2.5e-8, + input_cost_per_token: 1e-7, + output_cost_per_token: 0, + litellm_provider: "vertex_ai-embedding-models", + mode: "embedding", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models", + }, + "textembedding-gecko": { + max_tokens: 3072, + max_input_tokens: 3072, + output_vector_size: 768, + input_cost_per_character: 2.5e-8, + input_cost_per_token: 1e-7, + output_cost_per_token: 0, + litellm_provider: "vertex_ai-embedding-models", + mode: "embedding", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "textembedding-gecko-multilingual": { + max_tokens: 3072, + max_input_tokens: 3072, + output_vector_size: 768, + input_cost_per_character: 2.5e-8, + input_cost_per_token: 1e-7, + output_cost_per_token: 0, + litellm_provider: "vertex_ai-embedding-models", + mode: "embedding", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "textembedding-gecko-multilingual@001": { + max_tokens: 3072, + max_input_tokens: 3072, + output_vector_size: 768, + input_cost_per_character: 2.5e-8, + input_cost_per_token: 1e-7, + output_cost_per_token: 0, + litellm_provider: "vertex_ai-embedding-models", + mode: "embedding", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "textembedding-gecko@001": { + max_tokens: 3072, + max_input_tokens: 3072, + output_vector_size: 768, + input_cost_per_character: 2.5e-8, + input_cost_per_token: 1e-7, + output_cost_per_token: 0, + litellm_provider: "vertex_ai-embedding-models", + mode: "embedding", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "textembedding-gecko@003": { + max_tokens: 3072, + max_input_tokens: 3072, + output_vector_size: 768, + input_cost_per_character: 2.5e-8, + input_cost_per_token: 1e-7, + output_cost_per_token: 0, + litellm_provider: "vertex_ai-embedding-models", + mode: "embedding", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "text-embedding-preview-0409": { + max_tokens: 3072, + max_input_tokens: 3072, + output_vector_size: 768, + input_cost_per_token: 6.25e-9, + input_cost_per_token_batch_requests: 5e-9, + output_cost_per_token: 0, + litellm_provider: "vertex_ai-embedding-models", + mode: "embedding", + source: "https://cloud.google.com/vertex-ai/generative-ai/pricing", + }, + "text-multilingual-embedding-preview-0409": { + max_tokens: 3072, + max_input_tokens: 3072, + output_vector_size: 768, + input_cost_per_token: 6.25e-9, + output_cost_per_token: 0, + litellm_provider: "vertex_ai-embedding-models", + mode: "embedding", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "palm/chat-bison": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + litellm_provider: "palm", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "palm/chat-bison-001": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + litellm_provider: "palm", + mode: "chat", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "palm/text-bison": { + max_tokens: 1024, + max_input_tokens: 8192, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + litellm_provider: "palm", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "palm/text-bison-001": { + max_tokens: 1024, + max_input_tokens: 8192, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + litellm_provider: "palm", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "palm/text-bison-safety-off": { + max_tokens: 1024, + max_input_tokens: 8192, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + litellm_provider: "palm", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "palm/text-bison-safety-recitation-off": { + max_tokens: 1024, + max_input_tokens: 8192, + max_output_tokens: 1024, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 1.25e-7, + litellm_provider: "palm", + mode: "completion", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini/gemini-1.5-flash-002": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + cache_read_input_token_cost: 1.875e-8, + cache_creation_input_token_cost: 1e-6, + input_cost_per_token: 7.5e-8, + input_cost_per_token_above_128k_tokens: 1.5e-7, + output_cost_per_token: 3e-7, + output_cost_per_token_above_128k_tokens: 6e-7, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + supports_prompt_caching: true, + tpm: 4000000, + rpm: 2000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-1.5-flash-001": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + cache_read_input_token_cost: 1.875e-8, + cache_creation_input_token_cost: 1e-6, + input_cost_per_token: 7.5e-8, + input_cost_per_token_above_128k_tokens: 1.5e-7, + output_cost_per_token: 3e-7, + output_cost_per_token_above_128k_tokens: 6e-7, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + supports_prompt_caching: true, + tpm: 4000000, + rpm: 2000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-1.5-flash": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_token: 7.5e-8, + input_cost_per_token_above_128k_tokens: 1.5e-7, + output_cost_per_token: 3e-7, + output_cost_per_token_above_128k_tokens: 6e-7, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + tpm: 4000000, + rpm: 2000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-1.5-flash-latest": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_token: 7.5e-8, + input_cost_per_token_above_128k_tokens: 1.5e-7, + output_cost_per_token: 3e-7, + output_cost_per_token_above_128k_tokens: 6e-7, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + supports_prompt_caching: true, + tpm: 4000000, + rpm: 2000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-1.5-flash-8b-exp-0924": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_token: 0, + input_cost_per_token_above_128k_tokens: 0, + output_cost_per_token: 0, + output_cost_per_token_above_128k_tokens: 0, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + supports_prompt_caching: true, + tpm: 4000000, + rpm: 4000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-1.5-flash-exp-0827": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_token: 0, + input_cost_per_token_above_128k_tokens: 0, + output_cost_per_token: 0, + output_cost_per_token_above_128k_tokens: 0, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + tpm: 4000000, + rpm: 2000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-1.5-flash-8b-exp-0827": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_token: 0, + input_cost_per_token_above_128k_tokens: 0, + output_cost_per_token: 0, + output_cost_per_token_above_128k_tokens: 0, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + tpm: 4000000, + rpm: 4000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-pro": { + max_tokens: 8192, + max_input_tokens: 32760, + max_output_tokens: 8192, + input_cost_per_token: 3.5e-7, + input_cost_per_token_above_128k_tokens: 7e-7, + output_cost_per_token: 1.05e-6, + output_cost_per_token_above_128k_tokens: 2.1e-6, + litellm_provider: "gemini", + mode: "chat", + supports_function_calling: true, + rpd: 30000, + tpm: 120000, + rpm: 360, + source: "https://ai.google.dev/gemini-api/docs/models/gemini", + }, + "gemini/gemini-1.5-pro": { + max_tokens: 8192, + max_input_tokens: 2097152, + max_output_tokens: 8192, + input_cost_per_token: 3.5e-6, + input_cost_per_token_above_128k_tokens: 7e-6, + output_cost_per_token: 1.05e-5, + output_cost_per_token_above_128k_tokens: 2.1e-5, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_tool_choice: true, + supports_response_schema: true, + tpm: 4000000, + rpm: 1000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-1.5-pro-002": { + max_tokens: 8192, + max_input_tokens: 2097152, + max_output_tokens: 8192, + input_cost_per_token: 3.5e-6, + input_cost_per_token_above_128k_tokens: 7e-6, + output_cost_per_token: 1.05e-5, + output_cost_per_token_above_128k_tokens: 2.1e-5, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_tool_choice: true, + supports_response_schema: true, + supports_prompt_caching: true, + tpm: 4000000, + rpm: 1000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-1.5-pro-001": { + max_tokens: 8192, + max_input_tokens: 2097152, + max_output_tokens: 8192, + input_cost_per_token: 3.5e-6, + input_cost_per_token_above_128k_tokens: 7e-6, + output_cost_per_token: 1.05e-5, + output_cost_per_token_above_128k_tokens: 2.1e-5, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_tool_choice: true, + supports_response_schema: true, + supports_prompt_caching: true, + tpm: 4000000, + rpm: 1000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-1.5-pro-exp-0801": { + max_tokens: 8192, + max_input_tokens: 2097152, + max_output_tokens: 8192, + input_cost_per_token: 3.5e-6, + input_cost_per_token_above_128k_tokens: 7e-6, + output_cost_per_token: 1.05e-5, + output_cost_per_token_above_128k_tokens: 2.1e-5, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_tool_choice: true, + supports_response_schema: true, + tpm: 4000000, + rpm: 1000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-1.5-pro-exp-0827": { + max_tokens: 8192, + max_input_tokens: 2097152, + max_output_tokens: 8192, + input_cost_per_token: 0, + input_cost_per_token_above_128k_tokens: 0, + output_cost_per_token: 0, + output_cost_per_token_above_128k_tokens: 0, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_tool_choice: true, + supports_response_schema: true, + tpm: 4000000, + rpm: 1000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-1.5-pro-latest": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + input_cost_per_token: 3.5e-6, + input_cost_per_token_above_128k_tokens: 7e-6, + output_cost_per_token: 1.05e-6, + output_cost_per_token_above_128k_tokens: 2.1e-5, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_tool_choice: true, + supports_response_schema: true, + tpm: 4000000, + rpm: 1000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-pro-vision": { + max_tokens: 2048, + max_input_tokens: 30720, + max_output_tokens: 2048, + input_cost_per_token: 3.5e-7, + input_cost_per_token_above_128k_tokens: 7e-7, + output_cost_per_token: 1.05e-6, + output_cost_per_token_above_128k_tokens: 2.1e-6, + litellm_provider: "gemini", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + rpd: 30000, + tpm: 120000, + rpm: 360, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini/gemini-gemma-2-27b-it": { + max_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3.5e-7, + output_cost_per_token: 1.05e-6, + litellm_provider: "gemini", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "gemini/gemini-gemma-2-9b-it": { + max_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3.5e-7, + output_cost_per_token: 1.05e-6, + litellm_provider: "gemini", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + }, + "command-r": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 6e-7, + litellm_provider: "cohere_chat", + mode: "chat", + supports_function_calling: true, + }, + "command-r-08-2024": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 6e-7, + litellm_provider: "cohere_chat", + mode: "chat", + supports_function_calling: true, + }, + "command-light": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 3e-7, + output_cost_per_token: 6e-7, + litellm_provider: "cohere_chat", + mode: "chat", + }, + "command-r-plus": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 2.5e-6, + output_cost_per_token: 1e-5, + litellm_provider: "cohere_chat", + mode: "chat", + supports_function_calling: true, + }, + "command-r-plus-08-2024": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 2.5e-6, + output_cost_per_token: 1e-5, + litellm_provider: "cohere_chat", + mode: "chat", + supports_function_calling: true, + }, + "command-nightly": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1e-6, + output_cost_per_token: 2e-6, + litellm_provider: "cohere", + mode: "completion", + }, + command: { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1e-6, + output_cost_per_token: 2e-6, + litellm_provider: "cohere", + mode: "completion", + }, + "rerank-english-v3.0": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + max_query_tokens: 2048, + input_cost_per_token: 0.0, + input_cost_per_query: 0.002, + output_cost_per_token: 0.0, + litellm_provider: "cohere", + mode: "rerank", + }, + "rerank-multilingual-v3.0": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + max_query_tokens: 2048, + input_cost_per_token: 0.0, + input_cost_per_query: 0.002, + output_cost_per_token: 0.0, + litellm_provider: "cohere", + mode: "rerank", + }, + "rerank-english-v2.0": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + max_query_tokens: 2048, + input_cost_per_token: 0.0, + input_cost_per_query: 0.002, + output_cost_per_token: 0.0, + litellm_provider: "cohere", + mode: "rerank", + }, + "rerank-multilingual-v2.0": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + max_query_tokens: 2048, + input_cost_per_token: 0.0, + input_cost_per_query: 0.002, + output_cost_per_token: 0.0, + litellm_provider: "cohere", + mode: "rerank", + }, + "embed-english-v3.0": { + max_tokens: 1024, + max_input_tokens: 1024, + input_cost_per_token: 1e-7, + input_cost_per_image: 0.0001, + output_cost_per_token: 0.0, + litellm_provider: "cohere", + mode: "embedding", + supports_image_input: true, + supports_embedding_image_input: true, + metadata: { + notes: + "'supports_image_input' is a deprecated field. Use 'supports_embedding_image_input' instead.", + }, + }, + "embed-english-light-v3.0": { + max_tokens: 1024, + max_input_tokens: 1024, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "cohere", + mode: "embedding", + }, + "embed-multilingual-v3.0": { + max_tokens: 1024, + max_input_tokens: 1024, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "cohere", + mode: "embedding", + }, + "embed-english-v2.0": { + max_tokens: 4096, + max_input_tokens: 4096, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "cohere", + mode: "embedding", + }, + "embed-english-light-v2.0": { + max_tokens: 1024, + max_input_tokens: 1024, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "cohere", + mode: "embedding", + }, + "embed-multilingual-v2.0": { + max_tokens: 768, + max_input_tokens: 768, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "cohere", + mode: "embedding", + }, + "replicate/meta/llama-2-13b": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1e-7, + output_cost_per_token: 5e-7, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/meta/llama-2-13b-chat": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1e-7, + output_cost_per_token: 5e-7, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/meta/llama-2-70b": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 6.5e-7, + output_cost_per_token: 2.75e-6, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/meta/llama-2-70b-chat": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 6.5e-7, + output_cost_per_token: 2.75e-6, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/meta/llama-2-7b": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 5e-8, + output_cost_per_token: 2.5e-7, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/meta/llama-2-7b-chat": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 5e-8, + output_cost_per_token: 2.5e-7, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/meta/llama-3-70b": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 6.5e-7, + output_cost_per_token: 2.75e-6, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/meta/llama-3-70b-instruct": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 6.5e-7, + output_cost_per_token: 2.75e-6, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/meta/llama-3-8b": { + max_tokens: 8086, + max_input_tokens: 8086, + max_output_tokens: 8086, + input_cost_per_token: 5e-8, + output_cost_per_token: 2.5e-7, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/meta/llama-3-8b-instruct": { + max_tokens: 8086, + max_input_tokens: 8086, + max_output_tokens: 8086, + input_cost_per_token: 5e-8, + output_cost_per_token: 2.5e-7, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/mistralai/mistral-7b-v0.1": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 5e-8, + output_cost_per_token: 2.5e-7, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/mistralai/mistral-7b-instruct-v0.2": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 5e-8, + output_cost_per_token: 2.5e-7, + litellm_provider: "replicate", + mode: "chat", + }, + "replicate/mistralai/mixtral-8x7b-instruct-v0.1": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 3e-7, + output_cost_per_token: 1e-6, + litellm_provider: "replicate", + mode: "chat", + }, + "openrouter/deepseek/deepseek-coder": { + max_tokens: 8192, + max_input_tokens: 66000, + max_output_tokens: 4096, + input_cost_per_token: 1.4e-7, + output_cost_per_token: 2.8e-7, + litellm_provider: "openrouter", + supports_prompt_caching: true, + mode: "chat", + }, + "openrouter/microsoft/wizardlm-2-8x22b:nitro": { + max_tokens: 65536, + input_cost_per_token: 1e-6, + output_cost_per_token: 1e-6, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/google/gemini-pro-1.5": { + max_tokens: 8192, + max_input_tokens: 1000000, + max_output_tokens: 8192, + input_cost_per_token: 2.5e-6, + output_cost_per_token: 7.5e-6, + input_cost_per_image: 0.00265, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "openrouter/mistralai/mixtral-8x22b-instruct": { + max_tokens: 65536, + input_cost_per_token: 6.5e-7, + output_cost_per_token: 6.5e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/cohere/command-r-plus": { + max_tokens: 128000, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/databricks/dbrx-instruct": { + max_tokens: 32768, + input_cost_per_token: 6e-7, + output_cost_per_token: 6e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/anthropic/claude-3-haiku": { + max_tokens: 200000, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 1.25e-6, + input_cost_per_image: 0.0004, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "openrouter/anthropic/claude-3-haiku-20240307": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 1.25e-6, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 264, + }, + "anthropic/claude-3-5-sonnet-20241022": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + cache_creation_input_token_cost: 3.75e-6, + cache_read_input_token_cost: 3e-7, + litellm_provider: "anthropic", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 159, + supports_assistant_prefill: true, + supports_prompt_caching: true, + }, + "anthropic/claude-3-5-sonnet-latest": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + cache_creation_input_token_cost: 3.75e-6, + cache_read_input_token_cost: 3e-7, + litellm_provider: "anthropic", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 159, + supports_assistant_prefill: true, + supports_prompt_caching: true, + }, + "openrouter/anthropic/claude-3.5-sonnet": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 159, + supports_assistant_prefill: true, + }, + "openrouter/anthropic/claude-3.5-sonnet:beta": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 159, + }, + "openrouter/anthropic/claude-3-sonnet": { + max_tokens: 200000, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + input_cost_per_image: 0.0048, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "openrouter/mistralai/mistral-large": { + max_tokens: 32000, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/cognitivecomputations/dolphin-mixtral-8x7b": { + max_tokens: 32769, + input_cost_per_token: 5e-7, + output_cost_per_token: 5e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/google/gemini-pro-vision": { + max_tokens: 45875, + input_cost_per_token: 1.25e-7, + output_cost_per_token: 3.75e-7, + input_cost_per_image: 0.0025, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "openrouter/fireworks/firellava-13b": { + max_tokens: 4096, + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/meta-llama/llama-3-8b-instruct:free": { + max_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/meta-llama/llama-3-8b-instruct:extended": { + max_tokens: 16384, + input_cost_per_token: 2.25e-7, + output_cost_per_token: 2.25e-6, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/meta-llama/llama-3-70b-instruct:nitro": { + max_tokens: 8192, + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/meta-llama/llama-3-70b-instruct": { + max_tokens: 8192, + input_cost_per_token: 5.9e-7, + output_cost_per_token: 7.9e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/openai/o1-mini": { + max_tokens: 65536, + max_input_tokens: 128000, + max_output_tokens: 65536, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.2e-5, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: false, + }, + "openrouter/openai/o1-mini-2024-09-12": { + max_tokens: 65536, + max_input_tokens: 128000, + max_output_tokens: 65536, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.2e-5, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: false, + }, + "openrouter/openai/o1-preview": { + max_tokens: 32768, + max_input_tokens: 128000, + max_output_tokens: 32768, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 6e-5, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: false, + }, + "openrouter/openai/o1-preview-2024-09-12": { + max_tokens: 32768, + max_input_tokens: 128000, + max_output_tokens: 32768, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 6e-5, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: false, + }, + "openrouter/openai/gpt-4o": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + }, + "openrouter/openai/gpt-4o-2024-05-13": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + }, + "openrouter/openai/gpt-4-vision-preview": { + max_tokens: 130000, + input_cost_per_token: 1e-5, + output_cost_per_token: 3e-5, + input_cost_per_image: 0.01445, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "openrouter/openai/gpt-3.5-turbo": { + max_tokens: 4095, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 2e-6, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/openai/gpt-3.5-turbo-16k": { + max_tokens: 16383, + input_cost_per_token: 3e-6, + output_cost_per_token: 4e-6, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/openai/gpt-4": { + max_tokens: 8192, + input_cost_per_token: 3e-5, + output_cost_per_token: 6e-5, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/anthropic/claude-instant-v1": { + max_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 1.63e-6, + output_cost_per_token: 5.51e-6, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/anthropic/claude-2": { + max_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 1.102e-5, + output_cost_per_token: 3.268e-5, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/anthropic/claude-3-opus": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 7.5e-5, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + tool_use_system_prompt_tokens: 395, + }, + "openrouter/google/palm-2-chat-bison": { + max_tokens: 25804, + input_cost_per_token: 5e-7, + output_cost_per_token: 5e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/google/palm-2-codechat-bison": { + max_tokens: 20070, + input_cost_per_token: 5e-7, + output_cost_per_token: 5e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/meta-llama/llama-2-13b-chat": { + max_tokens: 4096, + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/meta-llama/llama-2-70b-chat": { + max_tokens: 4096, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 1.5e-6, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/meta-llama/codellama-34b-instruct": { + max_tokens: 8192, + input_cost_per_token: 5e-7, + output_cost_per_token: 5e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/nousresearch/nous-hermes-llama2-13b": { + max_tokens: 4096, + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/mancer/weaver": { + max_tokens: 8000, + input_cost_per_token: 5.625e-6, + output_cost_per_token: 5.625e-6, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/gryphe/mythomax-l2-13b": { + max_tokens: 8192, + input_cost_per_token: 1.875e-6, + output_cost_per_token: 1.875e-6, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/jondurbin/airoboros-l2-70b-2.1": { + max_tokens: 4096, + input_cost_per_token: 1.3875e-5, + output_cost_per_token: 1.3875e-5, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/undi95/remm-slerp-l2-13b": { + max_tokens: 6144, + input_cost_per_token: 1.875e-6, + output_cost_per_token: 1.875e-6, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/pygmalionai/mythalion-13b": { + max_tokens: 4096, + input_cost_per_token: 1.875e-6, + output_cost_per_token: 1.875e-6, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/mistralai/mistral-7b-instruct": { + max_tokens: 8192, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 1.3e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "openrouter/mistralai/mistral-7b-instruct:free": { + max_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "openrouter", + mode: "chat", + }, + "j2-ultra": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 1.5e-5, + litellm_provider: "ai21", + mode: "completion", + }, + "jamba-1.5-mini@001": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2e-7, + output_cost_per_token: 4e-7, + litellm_provider: "ai21", + mode: "chat", + }, + "jamba-1.5-large@001": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2e-6, + output_cost_per_token: 8e-6, + litellm_provider: "ai21", + mode: "chat", + }, + "jamba-1.5": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2e-7, + output_cost_per_token: 4e-7, + litellm_provider: "ai21", + mode: "chat", + }, + "jamba-1.5-mini": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2e-7, + output_cost_per_token: 4e-7, + litellm_provider: "ai21", + mode: "chat", + }, + "jamba-1.5-large": { + max_tokens: 256000, + max_input_tokens: 256000, + max_output_tokens: 256000, + input_cost_per_token: 2e-6, + output_cost_per_token: 8e-6, + litellm_provider: "ai21", + mode: "chat", + }, + "j2-mid": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 1e-5, + output_cost_per_token: 1e-5, + litellm_provider: "ai21", + mode: "completion", + }, + "j2-light": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 3e-6, + litellm_provider: "ai21", + mode: "completion", + }, + dolphin: { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 5e-7, + output_cost_per_token: 5e-7, + litellm_provider: "nlp_cloud", + mode: "completion", + }, + chatdolphin: { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 5e-7, + output_cost_per_token: 5e-7, + litellm_provider: "nlp_cloud", + mode: "chat", + }, + "luminous-base": { + max_tokens: 2048, + input_cost_per_token: 3e-5, + output_cost_per_token: 3.3e-5, + litellm_provider: "aleph_alpha", + mode: "completion", + }, + "luminous-base-control": { + max_tokens: 2048, + input_cost_per_token: 3.75e-5, + output_cost_per_token: 4.125e-5, + litellm_provider: "aleph_alpha", + mode: "chat", + }, + "luminous-extended": { + max_tokens: 2048, + input_cost_per_token: 4.5e-5, + output_cost_per_token: 4.95e-5, + litellm_provider: "aleph_alpha", + mode: "completion", + }, + "luminous-extended-control": { + max_tokens: 2048, + input_cost_per_token: 5.625e-5, + output_cost_per_token: 6.1875e-5, + litellm_provider: "aleph_alpha", + mode: "chat", + }, + "luminous-supreme": { + max_tokens: 2048, + input_cost_per_token: 0.000175, + output_cost_per_token: 0.0001925, + litellm_provider: "aleph_alpha", + mode: "completion", + }, + "luminous-supreme-control": { + max_tokens: 2048, + input_cost_per_token: 0.00021875, + output_cost_per_token: 0.000240625, + litellm_provider: "aleph_alpha", + mode: "chat", + }, + "ai21.j2-mid-v1": { + max_tokens: 8191, + max_input_tokens: 8191, + max_output_tokens: 8191, + input_cost_per_token: 1.25e-5, + output_cost_per_token: 1.25e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "ai21.j2-ultra-v1": { + max_tokens: 8191, + max_input_tokens: 8191, + max_output_tokens: 8191, + input_cost_per_token: 1.88e-5, + output_cost_per_token: 1.88e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "ai21.jamba-instruct-v1:0": { + max_tokens: 4096, + max_input_tokens: 70000, + max_output_tokens: 4096, + input_cost_per_token: 5e-7, + output_cost_per_token: 7e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_system_messages: true, + }, + "amazon.titan-text-lite-v1": { + max_tokens: 4000, + max_input_tokens: 42000, + max_output_tokens: 4000, + input_cost_per_token: 3e-7, + output_cost_per_token: 4e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "amazon.titan-text-express-v1": { + max_tokens: 8000, + max_input_tokens: 42000, + max_output_tokens: 8000, + input_cost_per_token: 1.3e-6, + output_cost_per_token: 1.7e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "amazon.titan-text-premier-v1:0": { + max_tokens: 32000, + max_input_tokens: 42000, + max_output_tokens: 32000, + input_cost_per_token: 5e-7, + output_cost_per_token: 1.5e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "amazon.titan-embed-text-v1": { + max_tokens: 8192, + max_input_tokens: 8192, + output_vector_size: 1536, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "bedrock", + mode: "embedding", + }, + "amazon.titan-embed-text-v2:0": { + max_tokens: 8192, + max_input_tokens: 8192, + output_vector_size: 1024, + input_cost_per_token: 2e-7, + output_cost_per_token: 0.0, + litellm_provider: "bedrock", + mode: "embedding", + }, + "mistral.mistral-7b-instruct-v0:2": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 2e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "mistral.mixtral-8x7b-instruct-v0:1": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 4.5e-7, + output_cost_per_token: 7e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "mistral.mistral-large-2402-v1:0": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + }, + "mistral.mistral-large-2407-v1:0": { + max_tokens: 8191, + max_input_tokens: 128000, + max_output_tokens: 8191, + input_cost_per_token: 3e-6, + output_cost_per_token: 9e-6, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + }, + "mistral.mistral-small-2402-v1:0": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 1e-6, + output_cost_per_token: 3e-6, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + }, + "bedrock/us-west-2/mistral.mixtral-8x7b-instruct-v0:1": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 4.5e-7, + output_cost_per_token: 7e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/mistral.mixtral-8x7b-instruct-v0:1": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 4.5e-7, + output_cost_per_token: 7e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-west-3/mistral.mixtral-8x7b-instruct-v0:1": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 5.9e-7, + output_cost_per_token: 9.1e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/mistral.mistral-7b-instruct-v0:2": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 2e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/mistral.mistral-7b-instruct-v0:2": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 2e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-west-3/mistral.mistral-7b-instruct-v0:2": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 2e-7, + output_cost_per_token: 2.6e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/mistral.mistral-large-2402-v1:0": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/mistral.mistral-large-2402-v1:0": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + }, + "bedrock/eu-west-3/mistral.mistral-large-2402-v1:0": { + max_tokens: 8191, + max_input_tokens: 32000, + max_output_tokens: 8191, + input_cost_per_token: 1.04e-5, + output_cost_per_token: 3.12e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + }, + "anthropic.claude-3-sonnet-20240229-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "anthropic.claude-3-5-sonnet-20240620-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "anthropic.claude-3-5-sonnet-20241022-v2:0": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + supports_prompt_caching: true, + }, + "anthropic.claude-3-5-sonnet-latest-v2:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "anthropic.claude-3-haiku-20240307-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 1.25e-6, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "anthropic.claude-3-opus-20240229-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 7.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "us.anthropic.claude-3-sonnet-20240229-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "us.anthropic.claude-3-5-sonnet-20240620-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "us.anthropic.claude-3-5-sonnet-20241022-v2:0": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "us.anthropic.claude-3-haiku-20240307-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 1.25e-6, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "us.anthropic.claude-3-opus-20240229-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 7.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "eu.anthropic.claude-3-sonnet-20240229-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "eu.anthropic.claude-3-5-sonnet-20240620-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "eu.anthropic.claude-3-5-sonnet-20241022-v2:0": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "eu.anthropic.claude-3-haiku-20240307-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 1.25e-6, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "eu.anthropic.claude-3-opus-20240229-v1:0": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 7.5e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0455, + output_cost_per_second: 0.0455, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.02527, + output_cost_per_second: 0.02527, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0415, + output_cost_per_second: 0.0415, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.02305, + output_cost_per_second: 0.02305, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/1-month-commitment/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0175, + output_cost_per_second: 0.0175, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/6-month-commitment/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.00972, + output_cost_per_second: 0.00972, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/1-month-commitment/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0175, + output_cost_per_second: 0.0175, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/6-month-commitment/anthropic.claude-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.00972, + output_cost_per_second: 0.00972, + litellm_provider: "bedrock", + mode: "chat", + }, + "anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0455, + output_cost_per_second: 0.0455, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.02527, + output_cost_per_second: 0.02527, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0415, + output_cost_per_second: 0.0415, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.02305, + output_cost_per_second: 0.02305, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/1-month-commitment/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0175, + output_cost_per_second: 0.0175, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/6-month-commitment/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.00972, + output_cost_per_second: 0.00972, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/1-month-commitment/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0175, + output_cost_per_second: 0.0175, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/6-month-commitment/anthropic.claude-v2": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.00972, + output_cost_per_second: 0.00972, + litellm_provider: "bedrock", + mode: "chat", + }, + "anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0455, + output_cost_per_second: 0.0455, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.02527, + output_cost_per_second: 0.02527, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-6, + output_cost_per_token: 2.4e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0415, + output_cost_per_second: 0.0415, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.02305, + output_cost_per_second: 0.02305, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/1-month-commitment/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0175, + output_cost_per_second: 0.0175, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/6-month-commitment/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.00972, + output_cost_per_second: 0.00972, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/1-month-commitment/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.0175, + output_cost_per_second: 0.0175, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/6-month-commitment/anthropic.claude-v2:1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.00972, + output_cost_per_second: 0.00972, + litellm_provider: "bedrock", + mode: "chat", + }, + "anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 1.63e-6, + output_cost_per_token: 5.51e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-7, + output_cost_per_token: 2.4e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/1-month-commitment/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.011, + output_cost_per_second: 0.011, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/6-month-commitment/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.00611, + output_cost_per_second: 0.00611, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/1-month-commitment/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.011, + output_cost_per_second: 0.011, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/6-month-commitment/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.00611, + output_cost_per_second: 0.00611, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-2/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 8e-7, + output_cost_per_token: 2.4e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 2.23e-6, + output_cost_per_token: 7.55e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.01475, + output_cost_per_second: 0.01475, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.008194, + output_cost_per_second: 0.008194, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_token: 2.48e-6, + output_cost_per_token: 8.38e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/1-month-commitment/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.01635, + output_cost_per_second: 0.01635, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-central-1/6-month-commitment/anthropic.claude-instant-v1": { + max_tokens: 8191, + max_input_tokens: 100000, + max_output_tokens: 8191, + input_cost_per_second: 0.009083, + output_cost_per_second: 0.009083, + litellm_provider: "bedrock", + mode: "chat", + }, + "cohere.command-text-v14": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 2e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/*/1-month-commitment/cohere.command-text-v14": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_second: 0.011, + output_cost_per_second: 0.011, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/*/6-month-commitment/cohere.command-text-v14": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_second: 0.0066027, + output_cost_per_second: 0.0066027, + litellm_provider: "bedrock", + mode: "chat", + }, + "cohere.command-light-text-v14": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 3e-7, + output_cost_per_token: 6e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/*/1-month-commitment/cohere.command-light-text-v14": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_second: 0.001902, + output_cost_per_second: 0.001902, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/*/6-month-commitment/cohere.command-light-text-v14": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_second: 0.0011416, + output_cost_per_second: 0.0011416, + litellm_provider: "bedrock", + mode: "chat", + }, + "cohere.command-r-plus-v1:0": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "bedrock", + mode: "chat", + }, + "cohere.command-r-v1:0": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5e-7, + output_cost_per_token: 1.5e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "cohere.embed-english-v3": { + max_tokens: 512, + max_input_tokens: 512, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "bedrock", + mode: "embedding", + }, + "cohere.embed-multilingual-v3": { + max_tokens: 512, + max_input_tokens: 512, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "bedrock", + mode: "embedding", + }, + "meta.llama2-13b-chat-v1": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 7.5e-7, + output_cost_per_token: 1e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "meta.llama2-70b-chat-v1": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1.95e-6, + output_cost_per_token: 2.56e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "meta.llama3-8b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3e-7, + output_cost_per_token: 6e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/meta.llama3-8b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3e-7, + output_cost_per_token: 6e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-1/meta.llama3-8b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3e-7, + output_cost_per_token: 6e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-south-1/meta.llama3-8b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3.6e-7, + output_cost_per_token: 7.2e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ca-central-1/meta.llama3-8b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3.5e-7, + output_cost_per_token: 6.9e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-west-1/meta.llama3-8b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3.2e-7, + output_cost_per_token: 6.5e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-west-2/meta.llama3-8b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3.9e-7, + output_cost_per_token: 7.8e-7, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/sa-east-1/meta.llama3-8b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 5e-7, + output_cost_per_token: 1.01e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "meta.llama3-70b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 2.65e-6, + output_cost_per_token: 3.5e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-east-1/meta.llama3-70b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 2.65e-6, + output_cost_per_token: 3.5e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/us-west-1/meta.llama3-70b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 2.65e-6, + output_cost_per_token: 3.5e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ap-south-1/meta.llama3-70b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3.18e-6, + output_cost_per_token: 4.2e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/ca-central-1/meta.llama3-70b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3.05e-6, + output_cost_per_token: 4.03e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-west-1/meta.llama3-70b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 2.86e-6, + output_cost_per_token: 3.78e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/eu-west-2/meta.llama3-70b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 3.45e-6, + output_cost_per_token: 4.55e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "bedrock/sa-east-1/meta.llama3-70b-instruct-v1:0": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 4.45e-6, + output_cost_per_token: 5.88e-6, + litellm_provider: "bedrock", + mode: "chat", + }, + "meta.llama3-1-8b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 2048, + input_cost_per_token: 2.2e-7, + output_cost_per_token: 2.2e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "meta.llama3-1-70b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 2048, + input_cost_per_token: 9.9e-7, + output_cost_per_token: 9.9e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "meta.llama3-1-405b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5.32e-6, + output_cost_per_token: 1.6e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "meta.llama3-2-1b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-7, + output_cost_per_token: 1e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "us.meta.llama3-2-1b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1e-7, + output_cost_per_token: 1e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "eu.meta.llama3-2-1b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 1.3e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "meta.llama3-2-3b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 1.5e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "us.meta.llama3-2-3b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 1.5e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "eu.meta.llama3-2-3b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.9e-7, + output_cost_per_token: 1.9e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "meta.llama3-2-11b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 3.5e-7, + output_cost_per_token: 3.5e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "us.meta.llama3-2-11b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 3.5e-7, + output_cost_per_token: 3.5e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "meta.llama3-2-90b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 2e-6, + output_cost_per_token: 2e-6, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "us.meta.llama3-2-90b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 2e-6, + output_cost_per_token: 2e-6, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "512-x-512/50-steps/stability.stable-diffusion-xl-v0": { + max_tokens: 77, + max_input_tokens: 77, + output_cost_per_image: 0.018, + litellm_provider: "bedrock", + mode: "image_generation", + }, + "512-x-512/max-steps/stability.stable-diffusion-xl-v0": { + max_tokens: 77, + max_input_tokens: 77, + output_cost_per_image: 0.036, + litellm_provider: "bedrock", + mode: "image_generation", + }, + "max-x-max/50-steps/stability.stable-diffusion-xl-v0": { + max_tokens: 77, + max_input_tokens: 77, + output_cost_per_image: 0.036, + litellm_provider: "bedrock", + mode: "image_generation", + }, + "max-x-max/max-steps/stability.stable-diffusion-xl-v0": { + max_tokens: 77, + max_input_tokens: 77, + output_cost_per_image: 0.072, + litellm_provider: "bedrock", + mode: "image_generation", + }, + "1024-x-1024/50-steps/stability.stable-diffusion-xl-v1": { + max_tokens: 77, + max_input_tokens: 77, + output_cost_per_image: 0.04, + litellm_provider: "bedrock", + mode: "image_generation", + }, + "1024-x-1024/max-steps/stability.stable-diffusion-xl-v1": { + max_tokens: 77, + max_input_tokens: 77, + output_cost_per_image: 0.08, + litellm_provider: "bedrock", + mode: "image_generation", + }, + "sagemaker/meta-textgeneration-llama-2-7b": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "sagemaker", + mode: "completion", + }, + "sagemaker/meta-textgeneration-llama-2-7b-f": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "sagemaker", + mode: "chat", + }, + "sagemaker/meta-textgeneration-llama-2-13b": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "sagemaker", + mode: "completion", + }, + "sagemaker/meta-textgeneration-llama-2-13b-f": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "sagemaker", + mode: "chat", + }, + "sagemaker/meta-textgeneration-llama-2-70b": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "sagemaker", + mode: "completion", + }, + "sagemaker/meta-textgeneration-llama-2-70b-b-f": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "sagemaker", + mode: "chat", + }, + "together-ai-up-to-4b": { + input_cost_per_token: 1e-7, + output_cost_per_token: 1e-7, + litellm_provider: "together_ai", + mode: "chat", + }, + "together-ai-4.1b-8b": { + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-7, + litellm_provider: "together_ai", + mode: "chat", + }, + "together-ai-8.1b-21b": { + max_tokens: 1000, + input_cost_per_token: 3e-7, + output_cost_per_token: 3e-7, + litellm_provider: "together_ai", + mode: "chat", + }, + "together-ai-21.1b-41b": { + input_cost_per_token: 8e-7, + output_cost_per_token: 8e-7, + litellm_provider: "together_ai", + mode: "chat", + }, + "together-ai-41.1b-80b": { + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "together_ai", + mode: "chat", + }, + "together-ai-81.1b-110b": { + input_cost_per_token: 1.8e-6, + output_cost_per_token: 1.8e-6, + litellm_provider: "together_ai", + mode: "chat", + }, + "together-ai-embedding-up-to-150m": { + input_cost_per_token: 8e-9, + output_cost_per_token: 0.0, + litellm_provider: "together_ai", + mode: "embedding", + }, + "together-ai-embedding-151m-to-350m": { + input_cost_per_token: 1.6e-8, + output_cost_per_token: 0.0, + litellm_provider: "together_ai", + mode: "embedding", + }, + "together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1": { + input_cost_per_token: 6e-7, + output_cost_per_token: 6e-7, + litellm_provider: "together_ai", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + mode: "chat", + }, + "together_ai/mistralai/Mistral-7B-Instruct-v0.1": { + litellm_provider: "together_ai", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + mode: "chat", + }, + "together_ai/togethercomputer/CodeLlama-34b-Instruct": { + litellm_provider: "together_ai", + supports_function_calling: true, + supports_parallel_function_calling: true, + mode: "chat", + }, + "ollama/codegemma": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "completion", + }, + "ollama/codegeex4": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + supports_function_calling: false, + }, + "ollama/deepseek-coder-v2-instruct": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + supports_function_calling: true, + }, + "ollama/deepseek-coder-v2-base": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "completion", + supports_function_calling: true, + }, + "ollama/deepseek-coder-v2-lite-instruct": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + supports_function_calling: true, + }, + "ollama/deepseek-coder-v2-lite-base": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "completion", + supports_function_calling: true, + }, + "ollama/internlm2_5-20b-chat": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + supports_function_calling: true, + }, + "ollama/llama2": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/llama2:7b": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/llama2:13b": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/llama2:70b": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/llama2-uncensored": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "completion", + }, + "ollama/llama3": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/llama3:8b": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/llama3:70b": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/llama3.1": { + max_tokens: 32768, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + supports_function_calling: true, + }, + "ollama/mistral-large-instruct-2407": { + max_tokens: 65536, + max_input_tokens: 65536, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/mistral": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "completion", + }, + "ollama/mistral-7B-Instruct-v0.1": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/mistral-7B-Instruct-v0.2": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 32768, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/mixtral-8x7B-Instruct-v0.1": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 32768, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/mixtral-8x22B-Instruct-v0.1": { + max_tokens: 65536, + max_input_tokens: 65536, + max_output_tokens: 65536, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "chat", + }, + "ollama/codellama": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "completion", + }, + "ollama/orca-mini": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "completion", + }, + "ollama/vicuna": { + max_tokens: 2048, + max_input_tokens: 2048, + max_output_tokens: 2048, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "ollama", + mode: "completion", + }, + "deepinfra/lizpreciatior/lzlv_70b_fp16_hf": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 7e-7, + output_cost_per_token: 9e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/Gryphe/MythoMax-L2-13b": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 2.2e-7, + output_cost_per_token: 2.2e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/mistralai/Mistral-7B-Instruct-v0.1": { + max_tokens: 8191, + max_input_tokens: 32768, + max_output_tokens: 8191, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 1.3e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/meta-llama/Llama-2-70b-chat-hf": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 7e-7, + output_cost_per_token: 9e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/cognitivecomputations/dolphin-2.6-mixtral-8x7b": { + max_tokens: 8191, + max_input_tokens: 32768, + max_output_tokens: 8191, + input_cost_per_token: 2.7e-7, + output_cost_per_token: 2.7e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/codellama/CodeLlama-34b-Instruct-hf": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 6e-7, + output_cost_per_token: 6e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/deepinfra/mixtral": { + max_tokens: 4096, + max_input_tokens: 32000, + max_output_tokens: 4096, + input_cost_per_token: 2.7e-7, + output_cost_per_token: 2.7e-7, + litellm_provider: "deepinfra", + mode: "completion", + }, + "deepinfra/Phind/Phind-CodeLlama-34B-v2": { + max_tokens: 4096, + max_input_tokens: 16384, + max_output_tokens: 4096, + input_cost_per_token: 6e-7, + output_cost_per_token: 6e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/mistralai/Mixtral-8x7B-Instruct-v0.1": { + max_tokens: 8191, + max_input_tokens: 32768, + max_output_tokens: 8191, + input_cost_per_token: 2.7e-7, + output_cost_per_token: 2.7e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/deepinfra/airoboros-70b": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 7e-7, + output_cost_per_token: 9e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/01-ai/Yi-34B-Chat": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 6e-7, + output_cost_per_token: 6e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/01-ai/Yi-6B-200K": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 1.3e-7, + litellm_provider: "deepinfra", + mode: "completion", + }, + "deepinfra/jondurbin/airoboros-l2-70b-gpt4-1.4.1": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 7e-7, + output_cost_per_token: 9e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/meta-llama/Llama-2-13b-chat-hf": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 2.2e-7, + output_cost_per_token: 2.2e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/amazon/MistralLite": { + max_tokens: 8191, + max_input_tokens: 32768, + max_output_tokens: 8191, + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/meta-llama/Llama-2-7b-chat-hf": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 1.3e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/meta-llama/Meta-Llama-3-8B-Instruct": { + max_tokens: 8191, + max_input_tokens: 8191, + max_output_tokens: 4096, + input_cost_per_token: 8e-8, + output_cost_per_token: 8e-8, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/meta-llama/Meta-Llama-3-70B-Instruct": { + max_tokens: 8191, + max_input_tokens: 8191, + max_output_tokens: 4096, + input_cost_per_token: 5.9e-7, + output_cost_per_token: 7.9e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "deepinfra/01-ai/Yi-34B-200K": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 6e-7, + output_cost_per_token: 6e-7, + litellm_provider: "deepinfra", + mode: "completion", + }, + "deepinfra/openchat/openchat_3.5": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 1.3e-7, + litellm_provider: "deepinfra", + mode: "chat", + }, + "perplexity/codellama-34b-instruct": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 3.5e-7, + output_cost_per_token: 1.4e-6, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/codellama-70b-instruct": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 7e-7, + output_cost_per_token: 2.8e-6, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/llama-3.1-70b-instruct": { + max_tokens: 131072, + max_input_tokens: 131072, + max_output_tokens: 131072, + input_cost_per_token: 1e-6, + output_cost_per_token: 1e-6, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/llama-3.1-8b-instruct": { + max_tokens: 131072, + max_input_tokens: 131072, + max_output_tokens: 131072, + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-7, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/llama-3.1-sonar-huge-128k-online": { + max_tokens: 127072, + max_input_tokens: 127072, + max_output_tokens: 127072, + input_cost_per_token: 5e-6, + output_cost_per_token: 5e-6, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/llama-3.1-sonar-large-128k-online": { + max_tokens: 127072, + max_input_tokens: 127072, + max_output_tokens: 127072, + input_cost_per_token: 1e-6, + output_cost_per_token: 1e-6, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/llama-3.1-sonar-large-128k-chat": { + max_tokens: 131072, + max_input_tokens: 131072, + max_output_tokens: 131072, + input_cost_per_token: 1e-6, + output_cost_per_token: 1e-6, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/llama-3.1-sonar-small-128k-chat": { + max_tokens: 131072, + max_input_tokens: 131072, + max_output_tokens: 131072, + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-7, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/llama-3.1-sonar-small-128k-online": { + max_tokens: 127072, + max_input_tokens: 127072, + max_output_tokens: 127072, + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-7, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/pplx-7b-chat": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 7e-8, + output_cost_per_token: 2.8e-7, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/pplx-70b-chat": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 7e-7, + output_cost_per_token: 2.8e-6, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/pplx-7b-online": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 2.8e-7, + input_cost_per_request: 0.005, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/pplx-70b-online": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 0.0, + output_cost_per_token: 2.8e-6, + input_cost_per_request: 0.005, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/llama-2-70b-chat": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 7e-7, + output_cost_per_token: 2.8e-6, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/mistral-7b-instruct": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 7e-8, + output_cost_per_token: 2.8e-7, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/mixtral-8x7b-instruct": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 7e-8, + output_cost_per_token: 2.8e-7, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/sonar-small-chat": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 7e-8, + output_cost_per_token: 2.8e-7, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/sonar-small-online": { + max_tokens: 12000, + max_input_tokens: 12000, + max_output_tokens: 12000, + input_cost_per_token: 0, + output_cost_per_token: 2.8e-7, + input_cost_per_request: 0.005, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/sonar-medium-chat": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 6e-7, + output_cost_per_token: 1.8e-6, + litellm_provider: "perplexity", + mode: "chat", + }, + "perplexity/sonar-medium-online": { + max_tokens: 12000, + max_input_tokens: 12000, + max_output_tokens: 12000, + input_cost_per_token: 0, + output_cost_per_token: 1.8e-6, + input_cost_per_request: 0.005, + litellm_provider: "perplexity", + mode: "chat", + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-1b-instruct": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 1e-7, + output_cost_per_token: 1e-7, + litellm_provider: "fireworks_ai", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-3b-instruct": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 1e-7, + output_cost_per_token: 1e-7, + litellm_provider: "fireworks_ai", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-11b-vision-instruct": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-7, + litellm_provider: "fireworks_ai", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + source: "https://fireworks.ai/pricing", + }, + "accounts/fireworks/models/llama-v3p2-90b-vision-instruct": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "fireworks_ai", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/accounts/fireworks/models/firefunction-v2": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "fireworks_ai", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct-hf": { + max_tokens: 65536, + max_input_tokens: 65536, + max_output_tokens: 65536, + input_cost_per_token: 1.2e-6, + output_cost_per_token: 1.2e-6, + litellm_provider: "fireworks_ai", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/accounts/fireworks/models/qwen2-72b-instruct": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 32768, + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "fireworks_ai", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/accounts/fireworks/models/yi-large": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 32768, + input_cost_per_token: 3e-6, + output_cost_per_token: 3e-6, + litellm_provider: "fireworks_ai", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-instruct": { + max_tokens: 65536, + max_input_tokens: 65536, + max_output_tokens: 8192, + input_cost_per_token: 1.2e-6, + output_cost_per_token: 1.2e-6, + litellm_provider: "fireworks_ai", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/nomic-ai/nomic-embed-text-v1.5": { + max_tokens: 8192, + max_input_tokens: 8192, + input_cost_per_token: 8e-9, + output_cost_per_token: 0.0, + litellm_provider: "fireworks_ai-embedding-models", + mode: "embedding", + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/nomic-ai/nomic-embed-text-v1": { + max_tokens: 8192, + max_input_tokens: 8192, + input_cost_per_token: 8e-9, + output_cost_per_token: 0.0, + litellm_provider: "fireworks_ai-embedding-models", + mode: "embedding", + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/WhereIsAI/UAE-Large-V1": { + max_tokens: 512, + max_input_tokens: 512, + input_cost_per_token: 1.6e-8, + output_cost_per_token: 0.0, + litellm_provider: "fireworks_ai-embedding-models", + mode: "embedding", + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/thenlper/gte-large": { + max_tokens: 512, + max_input_tokens: 512, + input_cost_per_token: 1.6e-8, + output_cost_per_token: 0.0, + litellm_provider: "fireworks_ai-embedding-models", + mode: "embedding", + source: "https://fireworks.ai/pricing", + }, + "fireworks_ai/thenlper/gte-base": { + max_tokens: 512, + max_input_tokens: 512, + input_cost_per_token: 8e-9, + output_cost_per_token: 0.0, + litellm_provider: "fireworks_ai-embedding-models", + mode: "embedding", + source: "https://fireworks.ai/pricing", + }, + "fireworks-ai-up-to-16b": { + input_cost_per_token: 2e-7, + output_cost_per_token: 2e-7, + litellm_provider: "fireworks_ai", + }, + "fireworks-ai-16.1b-to-80b": { + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "fireworks_ai", + }, + "fireworks-ai-moe-up-to-56b": { + input_cost_per_token: 5e-7, + output_cost_per_token: 5e-7, + litellm_provider: "fireworks_ai", + }, + "fireworks-ai-56b-to-176b": { + input_cost_per_token: 1.2e-6, + output_cost_per_token: 1.2e-6, + litellm_provider: "fireworks_ai", + }, + "fireworks-ai-default": { + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "fireworks_ai", + }, + "fireworks-ai-embedding-up-to-150m": { + input_cost_per_token: 8e-9, + output_cost_per_token: 0.0, + litellm_provider: "fireworks_ai-embedding-models", + }, + "fireworks-ai-embedding-150m-to-350m": { + input_cost_per_token: 1.6e-8, + output_cost_per_token: 0.0, + litellm_provider: "fireworks_ai-embedding-models", + }, + "anyscale/mistralai/Mistral-7B-Instruct-v0.1": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 1.5e-7, + litellm_provider: "anyscale", + mode: "chat", + supports_function_calling: true, + source: + "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mistral-7B-Instruct-v0.1", + }, + "anyscale/mistralai/Mixtral-8x7B-Instruct-v0.1": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 1.5e-7, + litellm_provider: "anyscale", + mode: "chat", + supports_function_calling: true, + source: + "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x7B-Instruct-v0.1", + }, + "anyscale/mistralai/Mixtral-8x22B-Instruct-v0.1": { + max_tokens: 65536, + max_input_tokens: 65536, + max_output_tokens: 65536, + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "anyscale", + mode: "chat", + supports_function_calling: true, + source: + "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x22B-Instruct-v0.1", + }, + "anyscale/HuggingFaceH4/zephyr-7b-beta": { + max_tokens: 16384, + max_input_tokens: 16384, + max_output_tokens: 16384, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 1.5e-7, + litellm_provider: "anyscale", + mode: "chat", + }, + "anyscale/google/gemma-7b-it": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 1.5e-7, + litellm_provider: "anyscale", + mode: "chat", + source: + "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/google-gemma-7b-it", + }, + "anyscale/meta-llama/Llama-2-7b-chat-hf": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 1.5e-7, + litellm_provider: "anyscale", + mode: "chat", + }, + "anyscale/meta-llama/Llama-2-13b-chat-hf": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 2.5e-7, + litellm_provider: "anyscale", + mode: "chat", + }, + "anyscale/meta-llama/Llama-2-70b-chat-hf": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1e-6, + output_cost_per_token: 1e-6, + litellm_provider: "anyscale", + mode: "chat", + }, + "anyscale/codellama/CodeLlama-34b-Instruct-hf": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1e-6, + output_cost_per_token: 1e-6, + litellm_provider: "anyscale", + mode: "chat", + }, + "anyscale/codellama/CodeLlama-70b-Instruct-hf": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1e-6, + output_cost_per_token: 1e-6, + litellm_provider: "anyscale", + mode: "chat", + source: + "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/codellama-CodeLlama-70b-Instruct-hf", + }, + "anyscale/meta-llama/Meta-Llama-3-8B-Instruct": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 1.5e-7, + litellm_provider: "anyscale", + mode: "chat", + source: + "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-8B-Instruct", + }, + "anyscale/meta-llama/Meta-Llama-3-70B-Instruct": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 1e-6, + output_cost_per_token: 1e-6, + litellm_provider: "anyscale", + mode: "chat", + source: + "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-70B-Instruct", + }, + "cloudflare/@cf/meta/llama-2-7b-chat-fp16": { + max_tokens: 3072, + max_input_tokens: 3072, + max_output_tokens: 3072, + input_cost_per_token: 1.923e-6, + output_cost_per_token: 1.923e-6, + litellm_provider: "cloudflare", + mode: "chat", + }, + "cloudflare/@cf/meta/llama-2-7b-chat-int8": { + max_tokens: 2048, + max_input_tokens: 2048, + max_output_tokens: 2048, + input_cost_per_token: 1.923e-6, + output_cost_per_token: 1.923e-6, + litellm_provider: "cloudflare", + mode: "chat", + }, + "cloudflare/@cf/mistral/mistral-7b-instruct-v0.1": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 1.923e-6, + output_cost_per_token: 1.923e-6, + litellm_provider: "cloudflare", + mode: "chat", + }, + "cloudflare/@hf/thebloke/codellama-7b-instruct-awq": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1.923e-6, + output_cost_per_token: 1.923e-6, + litellm_provider: "cloudflare", + mode: "chat", + }, + "voyage/voyage-01": { + max_tokens: 4096, + max_input_tokens: 4096, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/voyage-lite-01": { + max_tokens: 4096, + max_input_tokens: 4096, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/voyage-large-2": { + max_tokens: 16000, + max_input_tokens: 16000, + input_cost_per_token: 1.2e-7, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/voyage-law-2": { + max_tokens: 16000, + max_input_tokens: 16000, + input_cost_per_token: 1.2e-7, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/voyage-code-2": { + max_tokens: 16000, + max_input_tokens: 16000, + input_cost_per_token: 1.2e-7, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/voyage-2": { + max_tokens: 4000, + max_input_tokens: 4000, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/voyage-lite-02-instruct": { + max_tokens: 4000, + max_input_tokens: 4000, + input_cost_per_token: 1e-7, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/voyage-finance-2": { + max_tokens: 32000, + max_input_tokens: 32000, + input_cost_per_token: 1.2e-7, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "databricks/databricks-meta-llama-3-1-405b-instruct": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 5e-6, + input_dbu_cost_per_token: 7.1429e-5, + output_cost_per_token: 1.500002e-5, + output_db_cost_per_token: 0.000214286, + litellm_provider: "databricks", + mode: "chat", + source: + "https://www.databricks.com/product/pricing/foundation-model-serving", + metadata: { + notes: + "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation.", + }, + }, + "databricks/databricks-meta-llama-3-1-70b-instruct": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 1.00002e-6, + input_dbu_cost_per_token: 1.4286e-5, + output_cost_per_token: 2.99999e-6, + output_dbu_cost_per_token: 4.2857e-5, + litellm_provider: "databricks", + mode: "chat", + source: + "https://www.databricks.com/product/pricing/foundation-model-serving", + metadata: { + notes: + "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation.", + }, + }, + "databricks/databricks-dbrx-instruct": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 32768, + input_cost_per_token: 7.4998e-7, + input_dbu_cost_per_token: 1.0714e-5, + output_cost_per_token: 2.24901e-6, + output_dbu_cost_per_token: 3.2143e-5, + litellm_provider: "databricks", + mode: "chat", + source: + "https://www.databricks.com/product/pricing/foundation-model-serving", + metadata: { + notes: + "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation.", + }, + }, + "databricks/databricks-meta-llama-3-70b-instruct": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 1.00002e-6, + input_dbu_cost_per_token: 1.4286e-5, + output_cost_per_token: 2.99999e-6, + output_dbu_cost_per_token: 4.2857e-5, + litellm_provider: "databricks", + mode: "chat", + source: + "https://www.databricks.com/product/pricing/foundation-model-serving", + metadata: { + notes: + "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation.", + }, + }, + "databricks/databricks-llama-2-70b-chat": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 5.0001e-7, + input_dbu_cost_per_token: 7.143e-6, + output_cost_per_token: 1.5e-6, + output_dbu_cost_per_token: 2.1429e-5, + litellm_provider: "databricks", + mode: "chat", + source: + "https://www.databricks.com/product/pricing/foundation-model-serving", + metadata: { + notes: + "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation.", + }, + }, + "databricks/databricks-mixtral-8x7b-instruct": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 5.0001e-7, + input_dbu_cost_per_token: 7.143e-6, + output_cost_per_token: 9.9902e-7, + output_dbu_cost_per_token: 1.4286e-5, + litellm_provider: "databricks", + mode: "chat", + source: + "https://www.databricks.com/product/pricing/foundation-model-serving", + metadata: { + notes: + "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation.", + }, + }, + "databricks/databricks-mpt-30b-instruct": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 9.9902e-7, + input_dbu_cost_per_token: 1.4286e-5, + output_cost_per_token: 9.9902e-7, + output_dbu_cost_per_token: 1.4286e-5, + litellm_provider: "databricks", + mode: "chat", + source: + "https://www.databricks.com/product/pricing/foundation-model-serving", + metadata: { + notes: + "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation.", + }, + }, + "databricks/databricks-mpt-7b-instruct": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 5.0001e-7, + input_dbu_cost_per_token: 7.143e-6, + output_cost_per_token: 0.0, + output_dbu_cost_per_token: 0.0, + litellm_provider: "databricks", + mode: "chat", + source: + "https://www.databricks.com/product/pricing/foundation-model-serving", + metadata: { + notes: + "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation.", + }, + }, + "databricks/databricks-bge-large-en": { + max_tokens: 512, + max_input_tokens: 512, + output_vector_size: 1024, + input_cost_per_token: 1.0003e-7, + input_dbu_cost_per_token: 1.429e-6, + output_cost_per_token: 0.0, + output_dbu_cost_per_token: 0.0, + litellm_provider: "databricks", + mode: "embedding", + source: + "https://www.databricks.com/product/pricing/foundation-model-serving", + metadata: { + notes: + "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation.", + }, + }, + "databricks/databricks-gte-large-en": { + max_tokens: 8192, + max_input_tokens: 8192, + output_vector_size: 1024, + input_cost_per_token: 1.2999e-7, + input_dbu_cost_per_token: 1.857e-6, + output_cost_per_token: 0.0, + output_dbu_cost_per_token: 0.0, + litellm_provider: "databricks", + mode: "embedding", + source: + "https://www.databricks.com/product/pricing/foundation-model-serving", + metadata: { + notes: + "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation.", + }, + }, + "azure/gpt-4o-mini-2024-07-18": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 1.65e-7, + output_cost_per_token: 6.6e-7, + cache_read_input_token_cost: 7.5e-8, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_prompt_caching: true, + }, + "amazon.titan-embed-image-v1": { + max_tokens: 128, + max_input_tokens: 128, + output_vector_size: 1024, + input_cost_per_token: 8e-7, + input_cost_per_image: 6e-5, + output_cost_per_token: 0.0, + litellm_provider: "bedrock", + supports_image_input: true, + supports_embedding_image_input: true, + mode: "embedding", + source: + "https://us-east-1.console.aws.amazon.com/bedrock/home?region=us-east-1#/providers?model=amazon.titan-image-generator-v1", + metadata: { + notes: + "'supports_image_input' is a deprecated field. Use 'supports_embedding_image_input' instead.", + }, + }, + "azure_ai/mistral-large-2407": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 2e-6, + output_cost_per_token: 6e-6, + litellm_provider: "azure_ai", + supports_function_calling: true, + mode: "chat", + source: + "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-ai-large-2407-offer?tab=Overview", + }, + "azure_ai/ministral-3b": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 4e-8, + output_cost_per_token: 4e-8, + litellm_provider: "azure_ai", + supports_function_calling: true, + mode: "chat", + source: + "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.ministral-3b-2410-offer?tab=Overview", + }, + "azure_ai/Llama-3.2-11B-Vision-Instruct": { + max_tokens: 2048, + max_input_tokens: 128000, + max_output_tokens: 2048, + input_cost_per_token: 3.7e-7, + output_cost_per_token: 3.7e-7, + litellm_provider: "azure_ai", + supports_function_calling: true, + supports_vision: true, + mode: "chat", + source: + "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-11b-vision-instruct-offer?tab=Overview", + }, + "azure_ai/Llama-3.2-90B-Vision-Instruct": { + max_tokens: 2048, + max_input_tokens: 128000, + max_output_tokens: 2048, + input_cost_per_token: 2.04e-6, + output_cost_per_token: 2.04e-6, + litellm_provider: "azure_ai", + supports_function_calling: true, + supports_vision: true, + mode: "chat", + source: + "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-90b-vision-instruct-offer?tab=Overview", + }, + "azure_ai/Phi-3.5-mini-instruct": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 5.2e-7, + litellm_provider: "azure_ai", + mode: "chat", + supports_vision: false, + source: "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + }, + "azure_ai/Phi-3.5-vision-instruct": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 5.2e-7, + litellm_provider: "azure_ai", + mode: "chat", + supports_vision: true, + source: "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + }, + "azure_ai/Phi-3.5-MoE-instruct": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.6e-7, + output_cost_per_token: 6.4e-7, + litellm_provider: "azure_ai", + mode: "chat", + supports_vision: false, + source: "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + }, + "azure_ai/Phi-3-mini-4k-instruct": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 5.2e-7, + litellm_provider: "azure_ai", + mode: "chat", + supports_vision: false, + source: "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + }, + "azure_ai/Phi-3-mini-128k-instruct": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.3e-7, + output_cost_per_token: 5.2e-7, + litellm_provider: "azure_ai", + mode: "chat", + supports_vision: false, + source: "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + }, + "azure_ai/Phi-3-small-8k-instruct": { + max_tokens: 4096, + max_input_tokens: 8192, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 6e-7, + litellm_provider: "azure_ai", + mode: "chat", + supports_vision: false, + source: "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + }, + "azure_ai/Phi-3-small-128k-instruct": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 6e-7, + litellm_provider: "azure_ai", + mode: "chat", + supports_vision: false, + source: "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + }, + "azure_ai/Phi-3-medium-4k-instruct": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 1.7e-7, + output_cost_per_token: 6.8e-7, + litellm_provider: "azure_ai", + mode: "chat", + supports_vision: false, + source: "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + }, + "azure_ai/Phi-3-medium-128k-instruct": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.7e-7, + output_cost_per_token: 6.8e-7, + litellm_provider: "azure_ai", + mode: "chat", + supports_vision: false, + source: "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + }, + "xai/grok-beta": { + max_tokens: 131072, + max_input_tokens: 131072, + max_output_tokens: 131072, + input_cost_per_token: 5e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "xai", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + }, + "claude-3-5-haiku-20241022": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 1e-6, + output_cost_per_token: 5e-6, + cache_creation_input_token_cost: 1.25e-6, + cache_read_input_token_cost: 1e-7, + litellm_provider: "anthropic", + mode: "chat", + supports_function_calling: true, + tool_use_system_prompt_tokens: 264, + supports_assistant_prefill: true, + supports_prompt_caching: true, + supports_response_schema: true, + }, + "vertex_ai/claude-3-5-haiku@20241022": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 1e-6, + output_cost_per_token: 5e-6, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + }, + "openrouter/anthropic/claude-3-5-haiku": { + max_tokens: 200000, + input_cost_per_token: 1e-6, + output_cost_per_token: 5e-6, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + }, + "openrouter/anthropic/claude-3-5-haiku-20241022": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 1e-6, + output_cost_per_token: 5e-6, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + tool_use_system_prompt_tokens: 264, + }, + "anthropic.claude-3-5-haiku-20241022-v1:0": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 1e-6, + output_cost_per_token: 5e-6, + litellm_provider: "bedrock", + mode: "chat", + supports_assistant_prefill: true, + supports_function_calling: true, + supports_prompt_caching: true, + }, + "us.anthropic.claude-3-5-haiku-20241022-v1:0": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 1e-6, + output_cost_per_token: 5e-6, + litellm_provider: "bedrock", + mode: "chat", + supports_assistant_prefill: true, + supports_function_calling: true, + }, + "eu.anthropic.claude-3-5-haiku-20241022-v1:0": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 1e-6, + output_cost_per_token: 5e-6, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + }, + "stability.sd3-large-v1:0": { + max_tokens: 77, + max_input_tokens: 77, + output_cost_per_image: 0.08, + litellm_provider: "bedrock", + mode: "image_generation", + }, + "gpt-4o-2024-11-20": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 2.5e-6, + output_cost_per_token: 1e-5, + input_cost_per_token_batches: 1.25e-6, + output_cost_per_token_batches: 5e-6, + cache_read_input_token_cost: 1.25e-6, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "ft:gpt-4o-2024-11-20": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 3.75e-6, + cache_creation_input_token_cost: 1.875e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + }, + "azure/gpt-4o-2024-11-20": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 2.75e-6, + output_cost_per_token: 1.1e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + }, + "azure/global-standard/gpt-4o-2024-11-20": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 2.5e-6, + output_cost_per_token: 1e-5, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + supports_vision: true, + }, + "groq/llama-3.2-1b-preview": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 4e-8, + output_cost_per_token: 4e-8, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/llama-3.2-3b-preview": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 6e-8, + output_cost_per_token: 6e-8, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/llama-3.2-11b-text-preview": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 1.8e-7, + output_cost_per_token: 1.8e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/llama-3.2-11b-vision-preview": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 1.8e-7, + output_cost_per_token: 1.8e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + supports_vision: true, + }, + "groq/llama-3.2-90b-text-preview": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + }, + "groq/llama-3.2-90b-vision-preview": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "groq", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + supports_vision: true, + }, + "vertex_ai/claude-3-sonnet": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "vertex_ai/claude-3-5-sonnet": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "vertex_ai/claude-3-5-sonnet-v2": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 3e-6, + output_cost_per_token: 1.5e-5, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "vertex_ai/claude-3-haiku": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 2.5e-7, + output_cost_per_token: 1.25e-6, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "vertex_ai/claude-3-5-haiku": { + max_tokens: 8192, + max_input_tokens: 200000, + max_output_tokens: 8192, + input_cost_per_token: 1e-6, + output_cost_per_token: 5e-6, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + }, + "vertex_ai/claude-3-opus": { + max_tokens: 4096, + max_input_tokens: 200000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 7.5e-5, + litellm_provider: "vertex_ai-anthropic_models", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_assistant_prefill: true, + }, + "gemini/gemini-exp-1114": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_token: 0, + input_cost_per_token_above_128k_tokens: 0, + output_cost_per_token: 0, + output_cost_per_token_above_128k_tokens: 0, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + tpm: 4000000, + rpm: 1000, + source: "https://ai.google.dev/pricing", + metadata: { + notes: + "Rate limits not documented for gemini-exp-1114. Assuming same as gemini-1.5-pro.", + }, + }, + "openrouter/qwen/qwen-2.5-coder-32b-instruct": { + max_tokens: 33792, + max_input_tokens: 33792, + max_output_tokens: 33792, + input_cost_per_token: 1.8e-7, + output_cost_per_token: 1.8e-7, + litellm_provider: "openrouter", + mode: "chat", + }, + "us.meta.llama3-1-8b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 2048, + input_cost_per_token: 2.2e-7, + output_cost_per_token: 2.2e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "us.meta.llama3-1-70b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 2048, + input_cost_per_token: 9.9e-7, + output_cost_per_token: 9.9e-7, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "us.meta.llama3-1-405b-instruct-v1:0": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5.32e-6, + output_cost_per_token: 1.6e-5, + litellm_provider: "bedrock", + mode: "chat", + supports_function_calling: true, + supports_tool_choice: false, + }, + "stability.stable-image-ultra-v1:0": { + max_tokens: 77, + max_input_tokens: 77, + output_cost_per_image: 0.14, + litellm_provider: "bedrock", + mode: "image_generation", + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "fireworks_ai", + mode: "chat", + supports_function_calling: true, + supports_response_schema: true, + source: "https://fireworks.ai/pricing", + }, + "omni-moderation-latest": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 0, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "openai", + mode: "moderation", + }, + "omni-moderation-latest-intents": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 0, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "openai", + mode: "moderation", + }, + "omni-moderation-2024-09-26": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 0, + input_cost_per_token: 0.0, + output_cost_per_token: 0.0, + litellm_provider: "openai", + mode: "moderation", + }, + "gpt-4o-audio-preview-2024-12-17": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 2.5e-6, + input_cost_per_audio_token: 4e-5, + output_cost_per_token: 1e-5, + output_cost_per_audio_token: 8e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_audio_input: true, + supports_audio_output: true, + supports_system_messages: true, + }, + "gpt-4o-mini-audio-preview-2024-12-17": { + max_tokens: 16384, + max_input_tokens: 128000, + max_output_tokens: 16384, + input_cost_per_token: 1.5e-7, + input_cost_per_audio_token: 1e-5, + output_cost_per_token: 6e-7, + output_cost_per_audio_token: 2e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_audio_input: true, + supports_audio_output: true, + supports_system_messages: true, + }, + o1: { + max_tokens: 100000, + max_input_tokens: 200000, + max_output_tokens: 100000, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 6e-5, + cache_read_input_token_cost: 7.5e-6, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + supports_response_schema: true, + }, + "o1-2024-12-17": { + max_tokens: 100000, + max_input_tokens: 200000, + max_output_tokens: 100000, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 6e-5, + cache_read_input_token_cost: 7.5e-6, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + supports_response_schema: true, + }, + "gpt-4o-realtime-preview-2024-10-01": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5e-6, + input_cost_per_audio_token: 0.0001, + cache_read_input_token_cost: 2.5e-6, + cache_creation_input_audio_token_cost: 2e-5, + output_cost_per_token: 2e-5, + output_cost_per_audio_token: 0.0002, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_audio_input: true, + supports_audio_output: true, + supports_system_messages: true, + }, + "gpt-4o-realtime-preview": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5e-6, + input_cost_per_audio_token: 4e-5, + cache_read_input_token_cost: 2.5e-6, + output_cost_per_token: 2e-5, + output_cost_per_audio_token: 8e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_audio_input: true, + supports_audio_output: true, + supports_system_messages: true, + }, + "gpt-4o-realtime-preview-2024-12-17": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 5e-6, + input_cost_per_audio_token: 4e-5, + cache_read_input_token_cost: 2.5e-6, + output_cost_per_token: 2e-5, + output_cost_per_audio_token: 8e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_audio_input: true, + supports_audio_output: true, + supports_system_messages: true, + }, + "gpt-4o-mini-realtime-preview": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 6e-7, + input_cost_per_audio_token: 1e-5, + cache_read_input_token_cost: 3e-7, + cache_creation_input_audio_token_cost: 3e-7, + output_cost_per_token: 2.4e-6, + output_cost_per_audio_token: 2e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_audio_input: true, + supports_audio_output: true, + supports_system_messages: true, + }, + "gpt-4o-mini-realtime-preview-2024-12-17": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 6e-7, + input_cost_per_audio_token: 1e-5, + cache_read_input_token_cost: 3e-7, + cache_creation_input_audio_token_cost: 3e-7, + output_cost_per_token: 2.4e-6, + output_cost_per_audio_token: 2e-5, + litellm_provider: "openai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_audio_input: true, + supports_audio_output: true, + supports_system_messages: true, + }, + "azure/o1": { + max_tokens: 100000, + max_input_tokens: 200000, + max_output_tokens: 100000, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 6e-5, + cache_read_input_token_cost: 7.5e-6, + litellm_provider: "azure", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + supports_prompt_caching: true, + }, + "azure_ai/Llama-3.3-70B-Instruct": { + max_tokens: 2048, + max_input_tokens: 128000, + max_output_tokens: 2048, + input_cost_per_token: 7.1e-7, + output_cost_per_token: 7.1e-7, + litellm_provider: "azure_ai", + supports_function_calling: true, + mode: "chat", + source: + "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.llama-3-3-70b-instruct-offer?tab=Overview", + }, + "mistral/mistral-large-2411": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 2e-6, + output_cost_per_token: 6e-6, + litellm_provider: "mistral", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + }, + "mistral/pixtral-large-latest": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 2e-6, + output_cost_per_token: 6e-6, + litellm_provider: "mistral", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + supports_vision: true, + }, + "mistral/pixtral-large-2411": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 2e-6, + output_cost_per_token: 6e-6, + litellm_provider: "mistral", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + supports_vision: true, + }, + "deepseek/deepseek-chat": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.4e-7, + input_cost_per_token_cache_hit: 1.4e-8, + cache_read_input_token_cost: 1.4e-8, + cache_creation_input_token_cost: 0.0, + output_cost_per_token: 2.8e-7, + litellm_provider: "deepseek", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + supports_tool_choice: true, + supports_prompt_caching: true, + }, + "deepseek/deepseek-coder": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.4e-7, + input_cost_per_token_cache_hit: 1.4e-8, + output_cost_per_token: 2.8e-7, + litellm_provider: "deepseek", + mode: "chat", + supports_function_calling: true, + supports_assistant_prefill: true, + supports_tool_choice: true, + supports_prompt_caching: true, + }, + "groq/llama-3.3-70b-versatile": { + max_tokens: 8192, + max_input_tokens: 128000, + max_output_tokens: 8192, + input_cost_per_token: 5.9e-7, + output_cost_per_token: 7.9e-7, + litellm_provider: "groq", + mode: "chat", + }, + "groq/llama-3.3-70b-specdec": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 5.9e-7, + output_cost_per_token: 9.9e-7, + litellm_provider: "groq", + mode: "chat", + }, + "friendliai/meta-llama-3.1-8b-instruct": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 1e-7, + output_cost_per_token: 1e-7, + litellm_provider: "friendliai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_system_messages: true, + supports_response_schema: true, + }, + "friendliai/meta-llama-3.1-70b-instruct": { + max_tokens: 8192, + max_input_tokens: 8192, + max_output_tokens: 8192, + input_cost_per_token: 6e-7, + output_cost_per_token: 6e-7, + litellm_provider: "friendliai", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_system_messages: true, + supports_response_schema: true, + }, + "gemini-2.0-flash-exp": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_image: 0, + input_cost_per_video_per_second: 0, + input_cost_per_audio_per_second: 0, + input_cost_per_token: 0, + input_cost_per_character: 0, + input_cost_per_token_above_128k_tokens: 0, + input_cost_per_character_above_128k_tokens: 0, + input_cost_per_image_above_128k_tokens: 0, + input_cost_per_video_per_second_above_128k_tokens: 0, + input_cost_per_audio_per_second_above_128k_tokens: 0, + output_cost_per_token: 0, + output_cost_per_character: 0, + output_cost_per_token_above_128k_tokens: 0, + output_cost_per_character_above_128k_tokens: 0, + litellm_provider: "vertex_ai-language-models", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + supports_audio_output: true, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + }, + "gemini/gemini-2.0-flash-exp": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_image: 0, + input_cost_per_video_per_second: 0, + input_cost_per_audio_per_second: 0, + input_cost_per_token: 0, + input_cost_per_character: 0, + input_cost_per_token_above_128k_tokens: 0, + input_cost_per_character_above_128k_tokens: 0, + input_cost_per_image_above_128k_tokens: 0, + input_cost_per_video_per_second_above_128k_tokens: 0, + input_cost_per_audio_per_second_above_128k_tokens: 0, + output_cost_per_token: 0, + output_cost_per_character: 0, + output_cost_per_token_above_128k_tokens: 0, + output_cost_per_character_above_128k_tokens: 0, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + supports_audio_output: true, + tpm: 4000000, + rpm: 10, + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + }, + "vertex_ai/mistral-large@2411-001": { + max_tokens: 8191, + max_input_tokens: 128000, + max_output_tokens: 8191, + input_cost_per_token: 2e-6, + output_cost_per_token: 6e-6, + litellm_provider: "vertex_ai-mistral_models", + mode: "chat", + supports_function_calling: true, + }, + "vertex_ai/mistral-large-2411": { + max_tokens: 8191, + max_input_tokens: 128000, + max_output_tokens: 8191, + input_cost_per_token: 2e-6, + output_cost_per_token: 6e-6, + litellm_provider: "vertex_ai-mistral_models", + mode: "chat", + supports_function_calling: true, + }, + "text-embedding-005": { + max_tokens: 2048, + max_input_tokens: 2048, + output_vector_size: 768, + input_cost_per_character: 2.5e-8, + input_cost_per_token: 1e-7, + output_cost_per_token: 0, + litellm_provider: "vertex_ai-embedding-models", + mode: "embedding", + source: + "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models", + }, + "gemini/gemini-1.5-flash-8b": { + max_tokens: 8192, + max_input_tokens: 1048576, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_token: 0, + input_cost_per_token_above_128k_tokens: 0, + output_cost_per_token: 0, + output_cost_per_token_above_128k_tokens: 0, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + supports_prompt_caching: true, + tpm: 4000000, + rpm: 4000, + source: "https://ai.google.dev/pricing", + }, + "gemini/gemini-exp-1206": { + max_tokens: 8192, + max_input_tokens: 2097152, + max_output_tokens: 8192, + max_images_per_prompt: 3000, + max_videos_per_prompt: 10, + max_video_length: 1, + max_audio_length_hours: 8.4, + max_audio_per_prompt: 1, + max_pdf_size_mb: 30, + input_cost_per_token: 0, + input_cost_per_token_above_128k_tokens: 0, + output_cost_per_token: 0, + output_cost_per_token_above_128k_tokens: 0, + litellm_provider: "gemini", + mode: "chat", + supports_system_messages: true, + supports_function_calling: true, + supports_vision: true, + supports_response_schema: true, + tpm: 4000000, + rpm: 1000, + source: "https://ai.google.dev/pricing", + metadata: { + notes: + "Rate limits not documented for gemini-exp-1206. Assuming same as gemini-1.5-pro.", + }, + }, + "command-r7b-12-2024": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 1.5e-7, + output_cost_per_token: 3.75e-8, + litellm_provider: "cohere_chat", + mode: "chat", + supports_function_calling: true, + source: "https://docs.cohere.com/v2/docs/command-r7b", + }, + "rerank-v3.5": { + max_tokens: 4096, + max_input_tokens: 4096, + max_output_tokens: 4096, + max_query_tokens: 2048, + input_cost_per_token: 0.0, + input_cost_per_query: 0.002, + output_cost_per_token: 0.0, + litellm_provider: "cohere", + mode: "rerank", + }, + "openrouter/deepseek/deepseek-chat": { + max_tokens: 8192, + max_input_tokens: 66000, + max_output_tokens: 4096, + input_cost_per_token: 1.4e-7, + output_cost_per_token: 2.8e-7, + litellm_provider: "openrouter", + supports_prompt_caching: true, + mode: "chat", + }, + "openrouter/openai/o1": { + max_tokens: 100000, + max_input_tokens: 200000, + max_output_tokens: 100000, + input_cost_per_token: 1.5e-5, + output_cost_per_token: 6e-5, + cache_read_input_token_cost: 7.5e-6, + litellm_provider: "openrouter", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_vision: true, + supports_prompt_caching: true, + supports_system_messages: true, + supports_response_schema: true, + }, + "amazon.nova-micro-v1:0": { + max_tokens: 4096, + max_input_tokens: 300000, + max_output_tokens: 4096, + input_cost_per_token: 3.5e-8, + output_cost_per_token: 1.4e-7, + litellm_provider: "bedrock_converse", + mode: "chat", + supports_function_calling: true, + supports_prompt_caching: true, + }, + "amazon.nova-lite-v1:0": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 6e-8, + output_cost_per_token: 2.4e-7, + litellm_provider: "bedrock_converse", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_pdf_input: true, + supports_prompt_caching: true, + }, + "amazon.nova-pro-v1:0": { + max_tokens: 4096, + max_input_tokens: 300000, + max_output_tokens: 4096, + input_cost_per_token: 8e-7, + output_cost_per_token: 3.2e-6, + litellm_provider: "bedrock_converse", + mode: "chat", + supports_function_calling: true, + supports_vision: true, + supports_pdf_input: true, + supports_prompt_caching: true, + }, + "meta.llama3-3-70b-instruct-v1:0": { + max_tokens: 4096, + max_input_tokens: 128000, + max_output_tokens: 4096, + input_cost_per_token: 7.2e-7, + output_cost_per_token: 7.2e-7, + litellm_provider: "bedrock_converse", + mode: "chat", + }, + "together_ai/meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": { + input_cost_per_token: 1.8e-7, + output_cost_per_token: 1.8e-7, + litellm_provider: "together_ai", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + mode: "chat", + }, + "together_ai/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": { + input_cost_per_token: 8.8e-7, + output_cost_per_token: 8.8e-7, + litellm_provider: "together_ai", + supports_function_calling: true, + supports_parallel_function_calling: true, + supports_response_schema: true, + mode: "chat", + }, + "together_ai/meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": { + input_cost_per_token: 3.5e-6, + output_cost_per_token: 3.5e-6, + litellm_provider: "together_ai", + supports_function_calling: true, + supports_parallel_function_calling: true, + mode: "chat", + }, + "deepinfra/meta-llama/Meta-Llama-3.1-405B-Instruct": { + max_tokens: 32768, + max_input_tokens: 32768, + max_output_tokens: 32768, + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "deepinfra", + mode: "chat", + supports_function_calling: true, + supports_parallel_function_calling: true, + }, + "fireworks_ai/accounts/fireworks/models/deepseek-v3": { + max_tokens: 8192, + max_input_tokens: 128000, + max_output_tokens: 8192, + input_cost_per_token: 9e-7, + output_cost_per_token: 9e-7, + litellm_provider: "fireworks_ai", + mode: "chat", + supports_response_schema: true, + source: "https://fireworks.ai/pricing", + }, + "voyage/voyage-3-large": { + max_tokens: 32000, + max_input_tokens: 32000, + input_cost_per_token: 1.8e-7, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/voyage-3": { + max_tokens: 32000, + max_input_tokens: 32000, + input_cost_per_token: 6e-8, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/voyage-3-lite": { + max_tokens: 32000, + max_input_tokens: 32000, + input_cost_per_token: 2e-8, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/voyage-code-3": { + max_tokens: 32000, + max_input_tokens: 32000, + input_cost_per_token: 1.8e-7, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/voyage-multimodal-3": { + max_tokens: 32000, + max_input_tokens: 32000, + input_cost_per_token: 1.2e-7, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "embedding", + }, + "voyage/rerank-2": { + max_tokens: 16000, + max_input_tokens: 16000, + max_output_tokens: 16000, + max_query_tokens: 16000, + input_cost_per_token: 5e-8, + input_cost_per_query: 5e-8, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "rerank", + }, + "voyage/rerank-2-lite": { + max_tokens: 8000, + max_input_tokens: 8000, + max_output_tokens: 8000, + max_query_tokens: 8000, + input_cost_per_token: 2e-8, + input_cost_per_query: 2e-8, + output_cost_per_token: 0.0, + litellm_provider: "voyage", + mode: "rerank", + }, + "databricks/meta-llama-3.3-70b-instruct": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 1.00002e-6, + input_dbu_cost_per_token: 1.4286e-5, + output_cost_per_token: 2.99999e-6, + output_dbu_cost_per_token: 4.2857e-5, + litellm_provider: "databricks", + mode: "chat", + source: + "https://www.databricks.com/product/pricing/foundation-model-serving", + metadata: { + notes: + "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation.", + }, + }, + "sambanova/Meta-Llama-3.1-8B-Instruct": { + max_tokens: 16000, + max_input_tokens: 16000, + max_output_tokens: 16000, + input_cost_per_token: 1e-7, + output_cost_per_token: 2e-7, + litellm_provider: "sambanova", + supports_function_calling: true, + mode: "chat", + }, + "sambanova/Meta-Llama-3.1-70B-Instruct": { + max_tokens: 128000, + max_input_tokens: 128000, + max_output_tokens: 128000, + input_cost_per_token: 6e-7, + output_cost_per_token: 1.2e-6, + litellm_provider: "sambanova", + supports_function_calling: true, + mode: "chat", + }, + "sambanova/Meta-Llama-3.1-405B-Instruct": { + max_tokens: 16000, + max_input_tokens: 16000, + max_output_tokens: 16000, + input_cost_per_token: 5e-6, + output_cost_per_token: 1e-5, + litellm_provider: "sambanova", + supports_function_calling: true, + mode: "chat", + }, + "sambanova/Meta-Llama-3.2-1B-Instruct": { + max_tokens: 16000, + max_input_tokens: 16000, + max_output_tokens: 16000, + input_cost_per_token: 4e-7, + output_cost_per_token: 8e-7, + litellm_provider: "sambanova", + supports_function_calling: true, + mode: "chat", + }, + "sambanova/Meta-Llama-3.2-3B-Instruct": { + max_tokens: 4000, + max_input_tokens: 4000, + max_output_tokens: 4000, + input_cost_per_token: 8e-7, + output_cost_per_token: 1.6e-6, + litellm_provider: "sambanova", + supports_function_calling: true, + mode: "chat", + }, + "sambanova/Qwen2.5-Coder-32B-Instruct": { + max_tokens: 8000, + max_input_tokens: 8000, + max_output_tokens: 8000, + input_cost_per_token: 1.5e-6, + output_cost_per_token: 3e-6, + litellm_provider: "sambanova", + supports_function_calling: true, + mode: "chat", + }, + "sambanova/Qwen2.5-72B-Instruct": { + max_tokens: 8000, + max_input_tokens: 8000, + max_output_tokens: 8000, + input_cost_per_token: 2e-6, + output_cost_per_token: 4e-6, + litellm_provider: "sambanova", + supports_function_calling: true, + mode: "chat", + }, +}; diff --git a/apps/api/src/lib/ranker.ts b/apps/api/src/lib/ranker.ts index 865f30d9..16e49764 100644 --- a/apps/api/src/lib/ranker.ts +++ b/apps/api/src/lib/ranker.ts @@ -55,9 +55,9 @@ async function performRanking( // Generate embeddings for each link and calculate similarity in parallel const linksAndScores = await Promise.all( - linksWithContext.map((linkWithContext, index) => + linksWithContext.map((linkWithContext, index) => getEmbedding(linkWithContext) - .then(linkEmbedding => { + .then((linkEmbedding) => { const score = cosineSimilarity(queryEmbedding, linkEmbedding); return { link: links[index], @@ -71,8 +71,8 @@ async function performRanking( linkWithContext, score: 0, originalIndex: index, - })) - ) + })), + ), ); // Sort links based on similarity scores while preserving original order for equal scores diff --git a/apps/api/src/scraper/WebScraper/crawler.ts b/apps/api/src/scraper/WebScraper/crawler.ts index 2003e448..442a4f5e 100644 --- a/apps/api/src/scraper/WebScraper/crawler.ts +++ b/apps/api/src/scraper/WebScraper/crawler.ts @@ -252,20 +252,19 @@ export class WebCrawler { }; const timeoutPromise = new Promise((_, reject) => { - setTimeout(() => reject(new Error('Sitemap fetch timeout')), timeout); + setTimeout(() => reject(new Error("Sitemap fetch timeout")), timeout); }); try { - let count = await Promise.race([ + let count = (await Promise.race([ Promise.all([ - this.tryFetchSitemapLinks( - this.initialUrl, - _urlsHandler, - ), - ...this.robots.getSitemaps().map(x => this.tryFetchSitemapLinks(x, _urlsHandler)), - ]).then(results => results.reduce((a,x) => a+x, 0)), - timeoutPromise - ]) as number; + this.tryFetchSitemapLinks(this.initialUrl, _urlsHandler), + ...this.robots + .getSitemaps() + .map((x) => this.tryFetchSitemapLinks(x, _urlsHandler)), + ]).then((results) => results.reduce((a, x) => a + x, 0)), + timeoutPromise, + ])) as number; if (count > 0) { if ( @@ -281,14 +280,14 @@ export class WebCrawler { return count; } catch (error) { - if (error.message === 'Sitemap fetch timeout') { - this.logger.warn('Sitemap fetch timed out', { + if (error.message === "Sitemap fetch timeout") { + this.logger.warn("Sitemap fetch timed out", { method: "tryGetSitemap", timeout, }); return 0; } - this.logger.error('Error fetching sitemap', { + this.logger.error("Error fetching sitemap", { method: "tryGetSitemap", error, }); @@ -328,9 +327,16 @@ export class WebCrawler { !this.matchesExcludes(path) && !this.isRobotsAllowed(fullUrl, this.ignoreRobotsTxt) ) { - (async() => { - await redisConnection.sadd("crawl:" + this.jobId + ":robots_blocked", fullUrl); - await redisConnection.expire("crawl:" + this.jobId + ":robots_blocked", 24 * 60 * 60, "NX"); + (async () => { + await redisConnection.sadd( + "crawl:" + this.jobId + ":robots_blocked", + fullUrl, + ); + await redisConnection.expire( + "crawl:" + this.jobId + ":robots_blocked", + 24 * 60 * 60, + "NX", + ); })(); } } else { diff --git a/apps/api/src/scraper/WebScraper/sitemap-index.ts b/apps/api/src/scraper/WebScraper/sitemap-index.ts index 75d2532c..162f6953 100644 --- a/apps/api/src/scraper/WebScraper/sitemap-index.ts +++ b/apps/api/src/scraper/WebScraper/sitemap-index.ts @@ -1,5 +1,8 @@ import { logger } from "../../lib/logger"; -import { normalizeUrl, normalizeUrlOnlyHostname } from "../../lib/canonical-url"; +import { + normalizeUrl, + normalizeUrlOnlyHostname, +} from "../../lib/canonical-url"; import { supabase_service } from "../../services/supabase"; /** @@ -28,13 +31,19 @@ async function querySitemapIndexFunction(url: string) { return { urls: [], lastUpdated: new Date(0) }; } - const allUrls = [...new Set(data.map((entry) => entry.urls).flat().map(url => normalizeUrl(url)))]; + const allUrls = [ + ...new Set( + data + .map((entry) => entry.urls) + .flat() + .map((url) => normalizeUrl(url)), + ), + ]; return { urls: allUrls, lastUpdated: data[0].updated_at }; - } catch (error) { - logger.error("(sitemap-index) Error querying the index", { + logger.error("(sitemap-index) Error querying the index", { error, - attempt + attempt, }); if (attempt === 3) { @@ -46,4 +55,7 @@ async function querySitemapIndexFunction(url: string) { return { urls: [], lastUpdated: new Date(0) }; } -export const querySitemapIndex = withAuth(querySitemapIndexFunction, { urls: [], lastUpdated: new Date(0) }); +export const querySitemapIndex = withAuth(querySitemapIndexFunction, { + urls: [], + lastUpdated: new Date(0), +}); diff --git a/apps/api/src/scraper/WebScraper/sitemap.ts b/apps/api/src/scraper/WebScraper/sitemap.ts index f5978fa9..2ae52ef8 100644 --- a/apps/api/src/scraper/WebScraper/sitemap.ts +++ b/apps/api/src/scraper/WebScraper/sitemap.ts @@ -24,55 +24,79 @@ export async function getLinksFromSitemap( try { if (mode === "fire-engine" && useFireEngine) { const fetchResponse = await scrapeURL( - "sitemap", + "sitemap", sitemapUrl, scrapeOptions.parse({ formats: ["rawHtml"] }), { forceEngine: "fetch" }, ); - if (fetchResponse.success && (fetchResponse.document.metadata.statusCode >= 200 && fetchResponse.document.metadata.statusCode < 300)) { + if ( + fetchResponse.success && + fetchResponse.document.metadata.statusCode >= 200 && + fetchResponse.document.metadata.statusCode < 300 + ) { content = fetchResponse.document.rawHtml!; } else { logger.debug( "Failed to scrape sitemap via fetch, falling back to TLSClient...", - { error: fetchResponse.success ? fetchResponse.document : fetchResponse.error }, + { + error: fetchResponse.success + ? fetchResponse.document + : fetchResponse.error, + }, ); const tlsResponse = await scrapeURL( - "sitemap", + "sitemap", sitemapUrl, scrapeOptions.parse({ formats: ["rawHtml"] }), { forceEngine: "fire-engine;tlsclient", v0DisableJsDom: true }, ); - if (tlsResponse.success && (tlsResponse.document.metadata.statusCode >= 200 && tlsResponse.document.metadata.statusCode < 300)) { + if ( + tlsResponse.success && + tlsResponse.document.metadata.statusCode >= 200 && + tlsResponse.document.metadata.statusCode < 300 + ) { content = tlsResponse.document.rawHtml!; } else { - logger.error(`Request failed for ${sitemapUrl}, ran out of engines!`, { - method: "getLinksFromSitemap", - mode, - sitemapUrl, - error: tlsResponse.success ? tlsResponse.document : tlsResponse.error, - }); + logger.error( + `Request failed for ${sitemapUrl}, ran out of engines!`, + { + method: "getLinksFromSitemap", + mode, + sitemapUrl, + error: tlsResponse.success + ? tlsResponse.document + : tlsResponse.error, + }, + ); return 0; } } } else { const fetchResponse = await scrapeURL( - "sitemap", + "sitemap", sitemapUrl, scrapeOptions.parse({ formats: ["rawHtml"] }), { forceEngine: "fetch" }, ); - if (fetchResponse.success && (fetchResponse.document.metadata.statusCode >= 200 && fetchResponse.document.metadata.statusCode < 300)) { + if ( + fetchResponse.success && + fetchResponse.document.metadata.statusCode >= 200 && + fetchResponse.document.metadata.statusCode < 300 + ) { content = fetchResponse.document.rawHtml!; } else { - logger.error(`Request failed for ${sitemapUrl}, ran out of engines!`, { - method: "getLinksFromSitemap", - mode, - sitemapUrl, - }); + logger.error( + `Request failed for ${sitemapUrl}, ran out of engines!`, + { + method: "getLinksFromSitemap", + mode, + sitemapUrl, + }, + ); return 0; } } @@ -165,13 +189,20 @@ export const fetchSitemapData = async ( const sitemapUrl = url.endsWith("/sitemap.xml") ? url : `${url}/sitemap.xml`; try { const fetchResponse = await scrapeURL( - "sitemap", + "sitemap", sitemapUrl, - scrapeOptions.parse({ formats: ["rawHtml"], timeout: timeout || axiosTimeout }), + scrapeOptions.parse({ + formats: ["rawHtml"], + timeout: timeout || axiosTimeout, + }), { forceEngine: "fetch" }, ); - if (fetchResponse.success && (fetchResponse.document.metadata.statusCode >= 200 && fetchResponse.document.metadata.statusCode < 300)) { + if ( + fetchResponse.success && + fetchResponse.document.metadata.statusCode >= 200 && + fetchResponse.document.metadata.statusCode < 300 + ) { const xml = fetchResponse.document.rawHtml!; const parsedXml = await parseStringPromise(xml); diff --git a/apps/api/src/scraper/scrapeURL/engines/cache/index.ts b/apps/api/src/scraper/scrapeURL/engines/cache/index.ts index 31075f92..ec1db99c 100644 --- a/apps/api/src/scraper/scrapeURL/engines/cache/index.ts +++ b/apps/api/src/scraper/scrapeURL/engines/cache/index.ts @@ -17,7 +17,6 @@ export async function scrapeCache(meta: Meta): Promise { throw new EngineError("Cache hit but HTML is too short to be useful"); } - // Set fromCache flag to indicate this document was retrieved from cache meta.internalOptions.fromCache = true; diff --git a/apps/api/src/scraper/scrapeURL/engines/fire-engine/checkStatus.ts b/apps/api/src/scraper/scrapeURL/engines/fire-engine/checkStatus.ts index b3af6103..58fc5b3e 100644 --- a/apps/api/src/scraper/scrapeURL/engines/fire-engine/checkStatus.ts +++ b/apps/api/src/scraper/scrapeURL/engines/fire-engine/checkStatus.ts @@ -3,7 +3,12 @@ import * as Sentry from "@sentry/node"; import { z } from "zod"; import { robustFetch } from "../../lib/fetch"; -import { ActionError, EngineError, SiteError, UnsupportedFileError } from "../../error"; +import { + ActionError, + EngineError, + SiteError, + UnsupportedFileError, +} from "../../error"; import { MockState } from "../../lib/mock"; const successSchema = z.object({ diff --git a/apps/api/src/scraper/scrapeURL/engines/fire-engine/delete.ts b/apps/api/src/scraper/scrapeURL/engines/fire-engine/delete.ts index 1d4464d9..d20df42e 100644 --- a/apps/api/src/scraper/scrapeURL/engines/fire-engine/delete.ts +++ b/apps/api/src/scraper/scrapeURL/engines/fire-engine/delete.ts @@ -4,7 +4,11 @@ import * as Sentry from "@sentry/node"; import { robustFetch } from "../../lib/fetch"; import { MockState } from "../../lib/mock"; -export async function fireEngineDelete(logger: Logger, jobId: string, mock: MockState | null) { +export async function fireEngineDelete( + logger: Logger, + jobId: string, + mock: MockState | null, +) { const fireEngineURL = process.env.FIRE_ENGINE_BETA_URL!; await Sentry.startSpan( diff --git a/apps/api/src/scraper/scrapeURL/index.ts b/apps/api/src/scraper/scrapeURL/index.ts index 3df5020d..a657c4c4 100644 --- a/apps/api/src/scraper/scrapeURL/index.ts +++ b/apps/api/src/scraper/scrapeURL/index.ts @@ -143,7 +143,10 @@ async function buildMetaObject( logger, logs, featureFlags: buildFeatureFlags(url, options, internalOptions), - mock: options.useMock !== undefined ? await loadMock(options.useMock, _logger) : null, + mock: + options.useMock !== undefined + ? await loadMock(options.useMock, _logger) + : null, }; } diff --git a/apps/api/src/scraper/scrapeURL/lib/fetch.ts b/apps/api/src/scraper/scrapeURL/lib/fetch.ts index 56b91687..73b8f9be 100644 --- a/apps/api/src/scraper/scrapeURL/lib/fetch.ts +++ b/apps/api/src/scraper/scrapeURL/lib/fetch.ts @@ -34,7 +34,7 @@ export async function robustFetch< requestId = crypto.randomUUID(), tryCount = 1, tryCooldown, - mock + mock, }: RobustFetchParams): Promise { const params = { url, @@ -51,8 +51,8 @@ export async function robustFetch< let response: { status: number; - headers: Headers, - body: string, + headers: Headers; + body: string; }; if (mock === null) { @@ -123,25 +123,33 @@ export async function robustFetch< return null as Output; } - const makeRequestTypeId = (request: typeof mock["requests"][number]["options"]) => { + const makeRequestTypeId = ( + request: (typeof mock)["requests"][number]["options"], + ) => { let out = request.url + ";" + request.method; - if (process.env.FIRE_ENGINE_BETA_URL && url.startsWith(process.env.FIRE_ENGINE_BETA_URL) && request.method === "POST") { + if ( + process.env.FIRE_ENGINE_BETA_URL && + url.startsWith(process.env.FIRE_ENGINE_BETA_URL) && + request.method === "POST" + ) { out += "f-e;" + request.body?.engine + ";" + request.body?.url; } return out; - } + }; const thisId = makeRequestTypeId(params); - const matchingMocks = mock.requests.filter(x => makeRequestTypeId(x.options) === thisId).sort((a,b) => a.time - b.time); + const matchingMocks = mock.requests + .filter((x) => makeRequestTypeId(x.options) === thisId) + .sort((a, b) => a.time - b.time); const nextI = mock.tracker[thisId] ?? 0; mock.tracker[thisId] = nextI + 1; - + if (!matchingMocks[nextI]) { throw new Error("Failed to mock request -- no mock targets found."); } response = { - ...(matchingMocks[nextI].result), + ...matchingMocks[nextI].result, headers: new Headers(matchingMocks[nextI].result.headers), }; } @@ -180,12 +188,15 @@ export async function robustFetch< } if (mock === null) { - await saveMock({ - ...params, - logger: undefined, - schema: undefined, - headers: undefined, - }, response); + await saveMock( + { + ...params, + logger: undefined, + schema: undefined, + headers: undefined, + }, + response, + ); } let data: Output; diff --git a/apps/api/src/scraper/scrapeURL/lib/mock.ts b/apps/api/src/scraper/scrapeURL/lib/mock.ts index e96c8142..e57256d6 100644 --- a/apps/api/src/scraper/scrapeURL/lib/mock.ts +++ b/apps/api/src/scraper/scrapeURL/lib/mock.ts @@ -6,55 +6,70 @@ const saveMocksDirPath = path.join(__dirname, "../mocks/").replace("dist/", ""); const loadMocksDirPath = path.join(__dirname, "../../../__tests__/snips/mocks"); export async function saveMock(options: unknown, result: unknown) { - if (process.env.FIRECRAWL_SAVE_MOCKS !== "true") return; + if (process.env.FIRECRAWL_SAVE_MOCKS !== "true") return; - await fs.mkdir(saveMocksDirPath, { recursive: true }); + await fs.mkdir(saveMocksDirPath, { recursive: true }); - const fileName = Date.now() + "-" + crypto.randomUUID() + ".json"; - const filePath = path.join(saveMocksDirPath, fileName); - console.log(filePath); + const fileName = Date.now() + "-" + crypto.randomUUID() + ".json"; + const filePath = path.join(saveMocksDirPath, fileName); + console.log(filePath); - await fs.writeFile(filePath, JSON.stringify({ + await fs.writeFile( + filePath, + JSON.stringify( + { time: Date.now(), options, result, - }, undefined, 4)); + }, + undefined, + 4, + ), + ); } export type MockState = { - requests: { - time: number, - options: { - url: string, - method: string, - body?: any, - ignoreResponse: boolean, - ignoreFailure: boolean, - tryCount: number, - tryCooldown?: number, - }, - result: any, - }[], - tracker: Record, -} + requests: { + time: number; + options: { + url: string; + method: string; + body?: any; + ignoreResponse: boolean; + ignoreFailure: boolean; + tryCount: number; + tryCooldown?: number; + }; + result: any; + }[]; + tracker: Record; +}; -export async function loadMock(name: string, logger: Logger = _logger): Promise { - try { - const mockPath = path.join(loadMocksDirPath, name + ".json"); +export async function loadMock( + name: string, + logger: Logger = _logger, +): Promise { + try { + const mockPath = path.join(loadMocksDirPath, name + ".json"); - const relative = path.relative(loadMocksDirPath, mockPath); - if (!relative || relative.startsWith("..") || path.isAbsolute(relative)) { - // directory moving - return null; - } - - const load = JSON.parse(await fs.readFile(mockPath, "utf8")); - return { - requests: load, - tracker: {}, - }; - } catch (error) { - logger.warn("Failed to load mock file!", { name, module: "scrapeURL:mock", method: "loadMock", error }); - return null; + const relative = path.relative(loadMocksDirPath, mockPath); + if (!relative || relative.startsWith("..") || path.isAbsolute(relative)) { + // directory moving + return null; } + + const load = JSON.parse(await fs.readFile(mockPath, "utf8")); + return { + requests: load, + tracker: {}, + }; + } catch (error) { + logger.warn("Failed to load mock file!", { + name, + module: "scrapeURL:mock", + method: "loadMock", + error, + }); + return null; + } } diff --git a/apps/api/src/scraper/scrapeURL/lib/removeUnwantedElements.ts b/apps/api/src/scraper/scrapeURL/lib/removeUnwantedElements.ts index 57015557..4a3d2ae4 100644 --- a/apps/api/src/scraper/scrapeURL/lib/removeUnwantedElements.ts +++ b/apps/api/src/scraper/scrapeURL/lib/removeUnwantedElements.ts @@ -119,16 +119,16 @@ export const htmlTransform = ( // always return biggest image soup("img[srcset]").each((_, el) => { - const sizes = el.attribs.srcset.split(",").map(x => { + const sizes = el.attribs.srcset.split(",").map((x) => { const tok = x.trim().split(" "); return { url: tok[0], size: parseInt((tok[1] ?? "1x").slice(0, -1), 10), - isX: (tok[1] ?? "").endsWith("x") + isX: (tok[1] ?? "").endsWith("x"), }; }); - if (sizes.every(x => x.isX) && el.attribs.src) { + if (sizes.every((x) => x.isX) && el.attribs.src) { sizes.push({ url: el.attribs.src, size: 1, @@ -136,7 +136,7 @@ export const htmlTransform = ( }); } - sizes.sort((a,b) => b.size - a.size); + sizes.sort((a, b) => b.size - a.size); el.attribs.src = sizes[0]?.url; }); diff --git a/apps/api/src/scraper/scrapeURL/transformers/index.ts b/apps/api/src/scraper/scrapeURL/transformers/index.ts index 54bf0d46..cf29a562 100644 --- a/apps/api/src/scraper/scrapeURL/transformers/index.ts +++ b/apps/api/src/scraper/scrapeURL/transformers/index.ts @@ -41,7 +41,11 @@ export function deriveHTMLFromRawHTML( ); } - document.html = htmlTransform(document.rawHtml, document.metadata.url ?? document.metadata.sourceURL ?? meta.url, meta.options); + document.html = htmlTransform( + document.rawHtml, + document.metadata.url ?? document.metadata.sourceURL ?? meta.url, + meta.options, + ); return document; } diff --git a/apps/api/src/scraper/scrapeURL/transformers/llmExtract.ts b/apps/api/src/scraper/scrapeURL/transformers/llmExtract.ts index 31046892..5e3cded5 100644 --- a/apps/api/src/scraper/scrapeURL/transformers/llmExtract.ts +++ b/apps/api/src/scraper/scrapeURL/transformers/llmExtract.ts @@ -1,7 +1,11 @@ import OpenAI from "openai"; import { encoding_for_model } from "@dqbd/tiktoken"; import { TiktokenModel } from "@dqbd/tiktoken"; -import { Document, ExtractOptions, TokenUsage } from "../../../controllers/v1/types"; +import { + Document, + ExtractOptions, + TokenUsage, +} from "../../../controllers/v1/types"; import { Logger } from "winston"; import { EngineResultsTracker, Meta } from ".."; import { logger } from "../../../lib/logger"; @@ -72,14 +76,20 @@ export async function generateOpenAICompletions( markdown?: string, previousWarning?: string, isExtractEndpoint?: boolean, - model: TiktokenModel = (process.env.MODEL_NAME as TiktokenModel) ?? "gpt-4o-mini", -): Promise<{ extract: any; numTokens: number; warning: string | undefined; totalUsage: TokenUsage, model: string }> { + model: TiktokenModel = (process.env.MODEL_NAME as TiktokenModel) ?? + "gpt-4o-mini", +): Promise<{ + extract: any; + numTokens: number; + warning: string | undefined; + totalUsage: TokenUsage; + model: string; +}> { let extract: any; let warning: string | undefined; const openai = new OpenAI(); - if (markdown === undefined) { throw new Error("document.markdown is undefined -- this is unexpected"); } @@ -208,8 +218,8 @@ export async function generateOpenAICompletions( } } - const promptTokens = (jsonCompletion.usage?.prompt_tokens ?? 0); - const completionTokens = (jsonCompletion.usage?.completion_tokens ?? 0); + const promptTokens = jsonCompletion.usage?.prompt_tokens ?? 0; + const completionTokens = jsonCompletion.usage?.completion_tokens ?? 0; // If the users actually wants the items object, they can specify it as 'required' in the schema // otherwise, we just return the items array @@ -222,7 +232,17 @@ export async function generateOpenAICompletions( } // num tokens (just user prompt tokenized) | deprecated // totalTokens = promptTokens + completionTokens - return { extract, warning, numTokens, totalUsage: { promptTokens, completionTokens, totalTokens: promptTokens + completionTokens }, model }; + return { + extract, + warning, + numTokens, + totalUsage: { + promptTokens, + completionTokens, + totalTokens: promptTokens + completionTokens, + }, + model, + }; } export async function performLLMExtract( @@ -238,7 +258,7 @@ export async function performLLMExtract( document.markdown, document.warning, ); - + if (meta.options.formats.includes("json")) { document.json = extract; } else { diff --git a/apps/api/src/services/billing/auto_charge.ts b/apps/api/src/services/billing/auto_charge.ts index 7a8a96ca..7bae2a65 100644 --- a/apps/api/src/services/billing/auto_charge.ts +++ b/apps/api/src/services/billing/auto_charge.ts @@ -32,7 +32,7 @@ export async function autoCharge( const resource = `auto-recharge:${chunk.team_id}`; const cooldownKey = `auto-recharge-cooldown:${chunk.team_id}`; - if(chunk.team_id === "285bb597-6eaf-4b96-801c-51461fc3c543"){ + if (chunk.team_id === "285bb597-6eaf-4b96-801c-51461fc3c543") { return { success: false, message: "Auto-recharge failed", diff --git a/apps/api/src/services/indexing/crawl-maps-index.ts b/apps/api/src/services/indexing/crawl-maps-index.ts index 35e377ea..957f3eff 100644 --- a/apps/api/src/services/indexing/crawl-maps-index.ts +++ b/apps/api/src/services/indexing/crawl-maps-index.ts @@ -107,15 +107,15 @@ async function processBatch() { // Keep most recent entry and mark others for deletion const [mostRecent, ...duplicates] = existingForOrigin; if (duplicates.length > 0) { - duplicatesToDelete.push(...duplicates.map(d => d.id)); + duplicatesToDelete.push(...duplicates.map((d) => d.id)); } // Merge and deduplicate URLs const mergedUrls = [ ...new Set([ ...mostRecent.urls, - ...op.standardizedUrls.map(url => normalizeUrl(url)) - ]) + ...op.standardizedUrls.map((url) => normalizeUrl(url)), + ]), ]; updates.push({ @@ -127,7 +127,9 @@ async function processBatch() { }); } else { // Prepare insert with deduplicated URLs - const deduplicatedUrls = [...new Set(op.standardizedUrls.map(url => normalizeUrl(url)))]; + const deduplicatedUrls = [ + ...new Set(op.standardizedUrls.map((url) => normalizeUrl(url))), + ]; inserts.push({ origin_url: op.originUrl, urls: deduplicatedUrls, @@ -140,8 +142,10 @@ async function processBatch() { // Delete duplicate entries if (duplicatesToDelete.length > 0) { - logger.info(`🗑️ Deleting ${duplicatesToDelete.length} duplicate crawl maps in batches of 100`); - + logger.info( + `🗑️ Deleting ${duplicatesToDelete.length} duplicate crawl maps in batches of 100`, + ); + // Delete in batches of 100 for (let i = 0; i < duplicatesToDelete.length; i += 100) { const batch = duplicatesToDelete.slice(i, i + 100); @@ -151,11 +155,14 @@ async function processBatch() { .in("id", batch); if (deleteError) { - logger.error(`Failed to delete batch ${i/100 + 1} of duplicate crawl maps`, { - error: deleteError, - batchSize: batch.length, - startIndex: i - }); + logger.error( + `Failed to delete batch ${i / 100 + 1} of duplicate crawl maps`, + { + error: deleteError, + batchSize: batch.length, + startIndex: i, + }, + ); } } } @@ -165,7 +172,7 @@ async function processBatch() { logger.info(`🔄 Updating ${updates.length} existing crawl maps`, { origins: updates.map((u) => u.origin_url), }); - + // Process updates one at a time to avoid conflicts for (const update of updates) { const { error: updateError } = await supabase_service @@ -175,7 +182,7 @@ async function processBatch() { if (updateError) { logger.error("Failed to update crawl map", { error: updateError, - origin: update.origin_url + origin: update.origin_url, }); } } diff --git a/apps/api/src/services/indexing/index-worker.ts b/apps/api/src/services/indexing/index-worker.ts index 0d964477..af9e456d 100644 --- a/apps/api/src/services/indexing/index-worker.ts +++ b/apps/api/src/services/indexing/index-worker.ts @@ -3,18 +3,27 @@ import "../sentry"; import * as Sentry from "@sentry/node"; import { Job, Queue, Worker } from "bullmq"; import { logger as _logger, logger } from "../../lib/logger"; -import { redisConnection, indexQueueName, getIndexQueue } from "../queue-service"; +import { + redisConnection, + indexQueueName, + getIndexQueue, +} from "../queue-service"; import { saveCrawlMap } from "./crawl-maps-index"; import systemMonitor from "../system-monitor"; import { v4 as uuidv4 } from "uuid"; const workerLockDuration = Number(process.env.WORKER_LOCK_DURATION) || 60000; -const workerStalledCheckInterval = Number(process.env.WORKER_STALLED_CHECK_INTERVAL) || 30000; -const jobLockExtendInterval = Number(process.env.JOB_LOCK_EXTEND_INTERVAL) || 15000; -const jobLockExtensionTime = Number(process.env.JOB_LOCK_EXTENSION_TIME) || 60000; +const workerStalledCheckInterval = + Number(process.env.WORKER_STALLED_CHECK_INTERVAL) || 30000; +const jobLockExtendInterval = + Number(process.env.JOB_LOCK_EXTEND_INTERVAL) || 15000; +const jobLockExtensionTime = + Number(process.env.JOB_LOCK_EXTENSION_TIME) || 60000; -const cantAcceptConnectionInterval = Number(process.env.CANT_ACCEPT_CONNECTION_INTERVAL) || 2000; -const connectionMonitorInterval = Number(process.env.CONNECTION_MONITOR_INTERVAL) || 10; +const cantAcceptConnectionInterval = + Number(process.env.CANT_ACCEPT_CONNECTION_INTERVAL) || 2000; +const connectionMonitorInterval = + Number(process.env.CONNECTION_MONITOR_INTERVAL) || 10; const gotJobInterval = Number(process.env.CONNECTION_MONITOR_INTERVAL) || 20; const runningJobs: Set = new Set(); @@ -88,7 +97,7 @@ const workerFun = async (queue: Queue) => { const token = uuidv4(); const canAcceptConnection = await monitor.acceptConnection(); - + if (!canAcceptConnection) { logger.info("Cant accept connection"); cantAcceptConnectionCount++; @@ -100,7 +109,9 @@ const workerFun = async (queue: Queue) => { }); } - await new Promise(resolve => setTimeout(resolve, cantAcceptConnectionInterval)); + await new Promise((resolve) => + setTimeout(resolve, cantAcceptConnectionInterval), + ); continue; } else { cantAcceptConnectionCount = 0; @@ -141,15 +152,17 @@ const workerFun = async (queue: Queue) => { runningJobs.delete(job.id); } - await new Promise(resolve => setTimeout(resolve, gotJobInterval)); + await new Promise((resolve) => setTimeout(resolve, gotJobInterval)); } else { - await new Promise(resolve => setTimeout(resolve, connectionMonitorInterval)); + await new Promise((resolve) => + setTimeout(resolve, connectionMonitorInterval), + ); } } logger.info("Worker loop ended. Waiting for running jobs to finish..."); while (runningJobs.size > 0) { - await new Promise(resolve => setTimeout(resolve, 500)); + await new Promise((resolve) => setTimeout(resolve, 500)); } logger.info("All jobs finished. Worker exiting!"); process.exit(0); @@ -158,4 +171,4 @@ const workerFun = async (queue: Queue) => { // Start the worker (async () => { await workerFun(getIndexQueue()); -})(); \ No newline at end of file +})(); diff --git a/apps/api/src/services/queue-worker.ts b/apps/api/src/services/queue-worker.ts index 30aab3bc..960ea11e 100644 --- a/apps/api/src/services/queue-worker.ts +++ b/apps/api/src/services/queue-worker.ts @@ -93,7 +93,9 @@ const runningJobs: Set = new Set(); async function finishCrawlIfNeeded(job: Job & { id: string }, sc: StoredCrawl) { if (await finishCrawl(job.data.crawl_id)) { (async () => { - const originUrl = sc.originUrl ? normalizeUrlOnlyHostname(sc.originUrl) : undefined; + const originUrl = sc.originUrl + ? normalizeUrlOnlyHostname(sc.originUrl) + : undefined; // Get all visited unique URLs from Redis const visitedUrls = await redisConnection.smembers( "crawl:" + job.data.crawl_id + ":visited_unique", @@ -113,7 +115,7 @@ async function finishCrawlIfNeeded(job: Job & { id: string }, sc: StoredCrawl) { }, { priority: 10, - } + }, ); } })(); @@ -315,11 +317,14 @@ const processExtractJobInternal = async ( return result; } else { // throw new Error(result.error || "Unknown error during extraction"); - + await job.moveToCompleted(result, token, false); await updateExtract(job.data.extractId, { status: "failed", - error: result.error ?? "Unknown error, please contact help@firecrawl.com. Extract id: " + job.data.extractId, + error: + result.error ?? + "Unknown error, please contact help@firecrawl.com. Extract id: " + + job.data.extractId, }); return result; @@ -348,7 +353,14 @@ const processExtractJobInternal = async ( "Unknown error, please contact help@firecrawl.com. Extract id: " + job.data.extractId, }); - return { success: false, error: error.error ?? error ?? "Unknown error, please contact help@firecrawl.com. Extract id: " + job.data.extractId }; + return { + success: false, + error: + error.error ?? + error ?? + "Unknown error, please contact help@firecrawl.com. Extract id: " + + job.data.extractId, + }; // throw error; } finally { clearInterval(extendLockInterval); @@ -949,13 +961,15 @@ async function processJob(job: Job & { id: string }, token: string) { } if (job.data.team_id !== process.env.BACKGROUND_INDEX_TEAM_ID!) { - billTeam(job.data.team_id, undefined, creditsToBeBilled, logger).catch((error) => { - logger.error( - `Failed to bill team ${job.data.team_id} for ${creditsToBeBilled} credits`, - { error }, - ); - // Optionally, you could notify an admin or add to a retry queue here - }); + billTeam(job.data.team_id, undefined, creditsToBeBilled, logger).catch( + (error) => { + logger.error( + `Failed to bill team ${job.data.team_id} for ${creditsToBeBilled} credits`, + { error }, + ); + // Optionally, you could notify an admin or add to a retry queue here + }, + ); } } @@ -974,11 +988,12 @@ async function processJob(job: Job & { id: string }, token: string) { await finishCrawlIfNeeded(job, sc); } - + const isEarlyTimeout = error instanceof Error && error.message === "timeout"; const isCancelled = - error instanceof Error && error.message === "Parent crawl/batch scrape was cancelled"; + error instanceof Error && + error.message === "Parent crawl/batch scrape was cancelled"; if (isEarlyTimeout) { logger.error(`🐂 Job timed out ${job.id}`);