diff --git a/apps/api/src/controllers/auth.ts b/apps/api/src/controllers/auth.ts index 6bda3039..99ff71c7 100644 --- a/apps/api/src/controllers/auth.ts +++ b/apps/api/src/controllers/auth.ts @@ -77,8 +77,9 @@ export async function getACUC( api_key: string, cacheOnly = false, useCache = true, + mode?: RateLimiterMode, ): Promise { - const cacheKeyACUC = `acuc_${api_key}`; + const cacheKeyACUC = `acuc_${api_key}_${mode}`; if (useCache) { const cachedACUC = await getValue(cacheKeyACUC); @@ -93,9 +94,13 @@ export async function getACUC( let retries = 0; const maxRetries = 5; + let rpcName = + mode === RateLimiterMode.Extract || mode === RateLimiterMode.ExtractStatus + ? "auth_credit_usage_chunk_extract" + : "auth_credit_usage_chunk_test_22_credit_pack_n_extract"; while (retries < maxRetries) { ({ data, error } = await supabase_service.rpc( - "auth_credit_usage_chunk_test_21_credit_pack", + rpcName, { input_key: api_key }, { get: true }, )); @@ -127,8 +132,6 @@ export async function getACUC( setCachedACUC(api_key, chunk); } - // console.log(chunk); - return chunk; } else { return null; @@ -203,7 +206,7 @@ export async function supaAuthenticateUser( }; } - chunk = await getACUC(normalizedApi); + chunk = await getACUC(normalizedApi, false, true, mode); if (chunk === null) { return { @@ -258,6 +261,9 @@ export async function supaAuthenticateUser( subscriptionData.plan, ); break; + case RateLimiterMode.ExtractStatus: + rateLimiter = getRateLimiter(RateLimiterMode.ExtractStatus, token); + break; case RateLimiterMode.CrawlStatus: rateLimiter = getRateLimiter(RateLimiterMode.CrawlStatus, token); break; diff --git a/apps/api/src/controllers/v1/extract-status.ts b/apps/api/src/controllers/v1/extract-status.ts index 6d81e400..3e166e5f 100644 --- a/apps/api/src/controllers/v1/extract-status.ts +++ b/apps/api/src/controllers/v1/extract-status.ts @@ -37,5 +37,6 @@ export async function extractStatusController( error: extract?.error ?? undefined, expiresAt: (await getExtractExpiry(req.params.jobId)).toISOString(), steps: extract.showSteps ? extract.steps : undefined, + llmUsage: extract.showLLMUsage ? extract.llmUsage : undefined, }); } diff --git a/apps/api/src/controllers/v1/extract.ts b/apps/api/src/controllers/v1/extract.ts index 83d74ad8..48431dae 100644 --- a/apps/api/src/controllers/v1/extract.ts +++ b/apps/api/src/controllers/v1/extract.ts @@ -71,6 +71,7 @@ export async function extractController( createdAt: Date.now(), status: "processing", showSteps: req.body.__experimental_streamSteps, + showLLMUsage: req.body.__experimental_llmUsage, }); if (Sentry.isInitialized()) { diff --git a/apps/api/src/controllers/v1/types.ts b/apps/api/src/controllers/v1/types.ts index 5573fbaa..c4465988 100644 --- a/apps/api/src/controllers/v1/types.ts +++ b/apps/api/src/controllers/v1/types.ts @@ -226,6 +226,7 @@ export const extractV1Options = z origin: z.string().optional().default("api"), urlTrace: z.boolean().default(false), __experimental_streamSteps: z.boolean().default(false), + __experimental_llmUsage: z.boolean().default(false), timeout: z.number().int().positive().finite().safe().default(60000), }) .strict(strictMessage); @@ -881,3 +882,12 @@ export type SearchResponse = warning?: string; data: Document[]; }; + + +export type TokenUsage = { + promptTokens: number; + completionTokens: number; + totalTokens: number; + step?: string; + model?: string; +}; diff --git a/apps/api/src/lib/extract/extract-redis.ts b/apps/api/src/lib/extract/extract-redis.ts index 25c3ce22..4ec326ce 100644 --- a/apps/api/src/lib/extract/extract-redis.ts +++ b/apps/api/src/lib/extract/extract-redis.ts @@ -30,6 +30,8 @@ export type StoredExtract = { error?: any; showSteps?: boolean; steps?: ExtractedStep[]; + showLLMUsage?: boolean; + llmUsage?: number; }; export async function saveExtract(id: string, extract: StoredExtract) { diff --git a/apps/api/src/lib/extract/extraction-service.ts b/apps/api/src/lib/extract/extraction-service.ts index 420ad8fd..1f0a1924 100644 --- a/apps/api/src/lib/extract/extraction-service.ts +++ b/apps/api/src/lib/extract/extraction-service.ts @@ -1,6 +1,7 @@ import { Document, ExtractRequest, + TokenUsage, toLegacyCrawlerOptions, URLTrace, } from "../../controllers/v1/types"; @@ -31,6 +32,7 @@ import { ExtractStep, updateExtract } from "./extract-redis"; import { deduplicateObjectsArray } from "./helpers/deduplicate-objs-array"; import { mergeNullValObjs } from "./helpers/merge-null-val-objs"; import { CUSTOM_U_TEAMS } from "./config"; +import { calculateFinalResultCost, estimateCost, estimateTotalCost } from "./usage/llm-cost"; interface ExtractServiceOptions { request: ExtractRequest; @@ -46,6 +48,9 @@ interface ExtractResult { warning?: string; urlTrace?: URLTrace[]; error?: string; + tokenUsageBreakdown?: TokenUsage[]; + llmUsage?: number; + totalUrlsScraped?: number; } async function analyzeSchemaAndPrompt( @@ -57,6 +62,7 @@ async function analyzeSchemaAndPrompt( multiEntityKeys: string[]; reasoning?: string; keyIndicators?: string[]; + tokenUsage: TokenUsage; }> { if (!schema) { schema = await generateSchemaFromPrompt(prompt); @@ -71,8 +77,10 @@ async function analyzeSchemaAndPrompt( keyIndicators: z.array(z.string()), }); + const model = "gpt-4o"; + const result = await openai.beta.chat.completions.parse({ - model: "gpt-4o", + model: model, messages: [ { role: "system", @@ -131,12 +139,20 @@ Schema: ${schemaString}\nPrompt: ${prompt}\nRelevant URLs: ${urls}`, const { isMultiEntity, multiEntityKeys, reasoning, keyIndicators } = checkSchema.parse(result.choices[0].message.parsed); - return { isMultiEntity, multiEntityKeys, reasoning, keyIndicators }; + + const tokenUsage: TokenUsage = { + promptTokens: result.usage?.prompt_tokens ?? 0, + completionTokens: result.usage?.completion_tokens ?? 0, + totalTokens: result.usage?.total_tokens ?? 0, + model: model, + }; + return { isMultiEntity, multiEntityKeys, reasoning, keyIndicators, tokenUsage }; } type completions = { extract: Record; numTokens: number; + totalUsage: TokenUsage; warning?: string; }; @@ -163,6 +179,11 @@ export async function performExtraction( let multiEntityCompletions: completions[] = []; let multiEntityResult: any = {}; let singleAnswerResult: any = {}; + let totalUrlsScraped = 0; + + + // Token tracking + let tokenUsage: TokenUsage[] = []; await updateExtract(extractId, { status: "processing", @@ -219,6 +240,7 @@ export async function performExtraction( "No valid URLs found to scrape. Try adjusting your search criteria or including more URLs.", extractId, urlTrace: urlTraces, + totalUrlsScraped: 0 }; } @@ -249,9 +271,12 @@ export async function performExtraction( // 1. the first one is a completion that will extract the array of items // 2. the second one is multiple completions that will extract the items from the array let startAnalyze = Date.now(); - const { isMultiEntity, multiEntityKeys, reasoning, keyIndicators } = + const { isMultiEntity, multiEntityKeys, reasoning, keyIndicators, tokenUsage: schemaAnalysisTokenUsage } = await analyzeSchemaAndPrompt(links, reqSchema, request.prompt ?? ""); + // Track schema analysis tokens + tokenUsage.push(schemaAnalysisTokenUsage); + // console.log("\nIs Multi Entity:", isMultiEntity); // console.log("\nMulti Entity Keys:", multiEntityKeys); // console.log("\nReasoning:", reasoning); @@ -312,6 +337,8 @@ export async function performExtraction( (doc): doc is Document => doc !== null, ); + totalUrlsScraped += multyEntityDocs.length; + let endScrape = Date.now(); await updateExtract(extractId, { @@ -376,6 +403,8 @@ export async function performExtraction( true, ); + tokenUsage.push(shouldExtractCheck.totalUsage); + if (!shouldExtractCheck.extract["extract"]) { console.log( `Skipping extraction for ${doc.metadata.url} as content is irrelevant`, @@ -438,6 +467,11 @@ export async function performExtraction( timeoutPromise, ])) as Awaited>; + // Track multi-entity extraction tokens + if (multiEntityCompletion) { + tokenUsage.push(multiEntityCompletion.totalUsage); + } + // console.log(multiEntityCompletion.extract) // if (!multiEntityCompletion.extract?.is_content_relevant) { // console.log(`Skipping extraction for ${doc.metadata.url} as content is not relevant`); @@ -500,6 +534,7 @@ export async function performExtraction( "An unexpected error occurred. Please contact help@firecrawl.com for help.", extractId, urlTrace: urlTraces, + totalUrlsScraped }; } } @@ -551,15 +586,17 @@ export async function performExtraction( } } - singleAnswerDocs.push( - ...results.filter((doc): doc is Document => doc !== null), - ); + const validResults = results.filter((doc): doc is Document => doc !== null); + singleAnswerDocs.push(...validResults); + totalUrlsScraped += validResults.length; + } catch (error) { return { success: false, error: error.message, extractId, urlTrace: urlTraces, + totalUrlsScraped }; } @@ -571,6 +608,7 @@ export async function performExtraction( "All provided URLs are invalid. Please check your input and try again.", extractId, urlTrace: request.urlTrace ? urlTraces : undefined, + totalUrlsScraped: 0 }; } @@ -603,6 +641,11 @@ export async function performExtraction( true, ); + // Track single answer extraction tokens + if (singleAnswerCompletions) { + tokenUsage.push(singleAnswerCompletions.totalUsage); + } + singleAnswerResult = singleAnswerCompletions.extract; // Update token usage in traces @@ -629,19 +672,24 @@ export async function performExtraction( ? await mixSchemaObjects(reqSchema, singleAnswerResult, multiEntityResult) : singleAnswerResult || multiEntityResult; - let linksBilled = links.length * 5; + + const totalTokensUsed = tokenUsage.reduce((a, b) => a + b.totalTokens, 0); + const llmUsage = estimateTotalCost(tokenUsage); + let tokensToBill = calculateFinalResultCost(finalResult); + if (CUSTOM_U_TEAMS.includes(teamId)) { - linksBilled = 1; + tokensToBill = 1; } // Bill team for usage - billTeam(teamId, subId, linksBilled).catch((error) => { + billTeam(teamId, subId, tokensToBill, logger, true).catch((error) => { logger.error( - `Failed to bill team ${teamId} for ${linksBilled} credits: ${error}`, + `Failed to bill team ${teamId} for ${tokensToBill} tokens: ${error}`, ); }); - // Log job + + // Log job with token usage logJob({ job_id: extractId, success: true, @@ -654,10 +702,12 @@ export async function performExtraction( url: request.urls.join(", "), scrapeOptions: request, origin: request.origin ?? "api", - num_tokens: 0, // completions?.numTokens ?? 0, + num_tokens: totalTokensUsed, + tokens_billed: tokensToBill, }).then(() => { updateExtract(extractId, { status: "completed", + llmUsage, }).catch((error) => { logger.error( `Failed to update extract ${extractId} status to completed: ${error}`, @@ -671,5 +721,7 @@ export async function performExtraction( extractId, warning: undefined, // TODO FIX urlTrace: request.urlTrace ? urlTraces : undefined, + llmUsage, + totalUrlsScraped }; } diff --git a/apps/api/src/lib/extract/reranker.ts b/apps/api/src/lib/extract/reranker.ts index b61ebba9..6ee51e67 100644 --- a/apps/api/src/lib/extract/reranker.ts +++ b/apps/api/src/lib/extract/reranker.ts @@ -150,16 +150,21 @@ function filterAndProcessLinks( ); } +export type RerankerResult = { + mapDocument: MapDocument[]; + tokensUsed: number; +} export async function rerankLinksWithLLM( mappedLinks: MapDocument[], searchQuery: string, urlTraces: URLTrace[], -): Promise { +): Promise { const chunkSize = 100; const chunks: MapDocument[][] = []; const TIMEOUT_MS = 20000; const MAX_RETRIES = 2; + let totalTokensUsed = 0; // Split mappedLinks into chunks of 200 for (let i = 0; i < mappedLinks.length; i += chunkSize) { @@ -225,6 +230,7 @@ export async function rerankLinksWithLLM( return []; } + totalTokensUsed += completion.numTokens || 0; // console.log(`Chunk ${chunkIndex + 1}: Found ${completion.extract.relevantLinks.length} relevant links`); return completion.extract.relevantLinks; @@ -252,5 +258,8 @@ export async function rerankLinksWithLLM( .filter((link): link is MapDocument => link !== undefined); // console.log(`Returning ${relevantLinks.length} relevant links`); - return relevantLinks; -} \ No newline at end of file + return { + mapDocument: relevantLinks, + tokensUsed: totalTokensUsed, + }; +} diff --git a/apps/api/src/lib/extract/url-processor.ts b/apps/api/src/lib/extract/url-processor.ts index eb5f0278..30c18038 100644 --- a/apps/api/src/lib/extract/url-processor.ts +++ b/apps/api/src/lib/extract/url-processor.ts @@ -199,15 +199,19 @@ export async function processUrl( // (link, index) => `${index + 1}. URL: ${link.url}, Title: ${link.title}, Description: ${link.description}` // ); - mappedLinks = await rerankLinksWithLLM(mappedLinks, searchQuery, urlTraces); + const rerankerResult = await rerankLinksWithLLM(mappedLinks, searchQuery, urlTraces); + mappedLinks = rerankerResult.mapDocument; + let tokensUsed = rerankerResult.tokensUsed; // 2nd Pass, useful for when the first pass returns too many links if (mappedLinks.length > 100) { - mappedLinks = await rerankLinksWithLLM( + const rerankerResult = await rerankLinksWithLLM( mappedLinks, searchQuery, urlTraces, ); + mappedLinks = rerankerResult.mapDocument; + tokensUsed += rerankerResult.tokensUsed; } // dumpToFile( diff --git a/apps/api/src/lib/extract/usage/llm-cost.ts b/apps/api/src/lib/extract/usage/llm-cost.ts new file mode 100644 index 00000000..73904161 --- /dev/null +++ b/apps/api/src/lib/extract/usage/llm-cost.ts @@ -0,0 +1,59 @@ +import { TokenUsage } from "../../../controllers/v1/types"; +import { logger } from "../../../lib/logger"; +import { modelPrices } from "./model-prices"; + +interface ModelPricing { + input_cost_per_token?: number; + output_cost_per_token?: number; + input_cost_per_request?: number; + mode: string; +} +const tokenPerCharacter = 4; +const baseTokenCost = 300; + +export function calculateFinalResultCost(data: any): number { + return Math.floor((JSON.stringify(data).length / tokenPerCharacter) + baseTokenCost); +} + +export function estimateTotalCost(tokenUsage: TokenUsage[]): number { + return tokenUsage.reduce((total, usage) => { + return total + estimateCost(usage); + }, 0); +} + +export function estimateCost(tokenUsage: TokenUsage): number { + let totalCost = 0; + try { + let model = tokenUsage.model ?? process.env.MODEL_NAME ?? "gpt-4o-mini"; + const pricing = modelPrices[model] as ModelPricing; + + if (!pricing) { + logger.error(`No pricing information found for model: ${model}`); + return 0; + } + + if (pricing.mode !== "chat") { + logger.error(`Model ${model} is not a chat model`); + return 0; + } + + // Add per-request cost if applicable (Only Perplexity supports this) + if (pricing.input_cost_per_request) { + totalCost += pricing.input_cost_per_request; + } + + // Add token-based costs + if (pricing.input_cost_per_token) { + totalCost += tokenUsage.promptTokens * pricing.input_cost_per_token; + } + + if (pricing.output_cost_per_token) { + totalCost += tokenUsage.completionTokens * pricing.output_cost_per_token; + } + + return Number(totalCost.toFixed(7)); + } catch (error) { + logger.error(`Error estimating cost: ${error}`); + return totalCost; + } +} diff --git a/apps/api/src/lib/extract/usage/model-prices.ts b/apps/api/src/lib/extract/usage/model-prices.ts new file mode 100644 index 00000000..d24baeca --- /dev/null +++ b/apps/api/src/lib/extract/usage/model-prices.ts @@ -0,0 +1,8140 @@ +// Last updated: 2025-01-15 +// Thanks to AgentOps - https://github.com/AgentOps-AI/tokencost +// 291 kb + +export const modelPrices = { + "gpt-4": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-05, + "output_cost_per_token": 6e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4o": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 2.5e-06, + "output_cost_per_token": 1e-05, + "input_cost_per_token_batches": 1.25e-06, + "output_cost_per_token_batches": 5e-06, + "cache_read_input_token_cost": 1.25e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4o-audio-preview": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 2.5e-06, + "input_cost_per_audio_token": 0.0001, + "output_cost_per_token": 1e-05, + "output_cost_per_audio_token": 0.0002, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_system_messages": true + }, + "gpt-4o-audio-preview-2024-10-01": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 2.5e-06, + "input_cost_per_audio_token": 0.0001, + "output_cost_per_token": 1e-05, + "output_cost_per_audio_token": 0.0002, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_system_messages": true + }, + "gpt-4o-mini": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 6e-07, + "input_cost_per_token_batches": 7.5e-08, + "output_cost_per_token_batches": 3e-07, + "cache_read_input_token_cost": 7.5e-08, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4o-mini-2024-07-18": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 6e-07, + "input_cost_per_token_batches": 7.5e-08, + "output_cost_per_token_batches": 3e-07, + "cache_read_input_token_cost": 7.5e-08, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "o1-mini": { + "max_tokens": 65536, + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.2e-05, + "cache_read_input_token_cost": 1.5e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_vision": true, + "supports_prompt_caching": true + }, + "o1-mini-2024-09-12": { + "max_tokens": 65536, + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.2e-05, + "cache_read_input_token_cost": 1.5e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_vision": true, + "supports_prompt_caching": true + }, + "o1-preview": { + "max_tokens": 32768, + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 6e-05, + "cache_read_input_token_cost": 7.5e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_vision": true, + "supports_prompt_caching": true + }, + "o1-preview-2024-09-12": { + "max_tokens": 32768, + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 6e-05, + "cache_read_input_token_cost": 7.5e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_vision": true, + "supports_prompt_caching": true + }, + "chatgpt-4o-latest": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4o-2024-05-13": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-06, + "output_cost_per_token": 1.5e-05, + "input_cost_per_token_batches": 2.5e-06, + "output_cost_per_token_batches": 7.5e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4o-2024-08-06": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 2.5e-06, + "output_cost_per_token": 1e-05, + "input_cost_per_token_batches": 1.25e-06, + "output_cost_per_token_batches": 5e-06, + "cache_read_input_token_cost": 1.25e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-turbo-preview": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-0314": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-05, + "output_cost_per_token": 6e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-0613": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-05, + "output_cost_per_token": 6e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-32k": { + "max_tokens": 4096, + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-05, + "output_cost_per_token": 0.00012, + "litellm_provider": "openai", + "mode": "chat", + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-32k-0314": { + "max_tokens": 4096, + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-05, + "output_cost_per_token": 0.00012, + "litellm_provider": "openai", + "mode": "chat", + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-32k-0613": { + "max_tokens": 4096, + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-05, + "output_cost_per_token": 0.00012, + "litellm_provider": "openai", + "mode": "chat", + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-turbo": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-turbo-2024-04-09": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-1106-preview": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-0125-preview": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-vision-preview": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-4-1106-vision-preview": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-3.5-turbo": { + "max_tokens": 4097, + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-3.5-turbo-0301": { + "max_tokens": 4097, + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-3.5-turbo-0613": { + "max_tokens": 4097, + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-3.5-turbo-1106": { + "max_tokens": 16385, + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-3.5-turbo-0125": { + "max_tokens": 16385, + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 1.5e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-3.5-turbo-16k": { + "max_tokens": 16385, + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 4e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "gpt-3.5-turbo-16k-0613": { + "max_tokens": 16385, + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 4e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "ft:gpt-3.5-turbo": { + "max_tokens": 4096, + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 6e-06, + "input_cost_per_token_batches": 1.5e-06, + "output_cost_per_token_batches": 3e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_system_messages": true + }, + "ft:gpt-3.5-turbo-0125": { + "max_tokens": 4096, + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_system_messages": true + }, + "ft:gpt-3.5-turbo-1106": { + "max_tokens": 4096, + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_system_messages": true + }, + "ft:gpt-3.5-turbo-0613": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_system_messages": true + }, + "ft:gpt-4-0613": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-05, + "output_cost_per_token": 6e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "source": "OpenAI needs to add pricing for this ft model, will be updated when added by OpenAI. Defaulting to base model pricing", + "supports_system_messages": true + }, + "ft:gpt-4o-2024-08-06": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 3.75e-06, + "output_cost_per_token": 1.5e-05, + "input_cost_per_token_batches": 1.875e-06, + "output_cost_per_token_batches": 7.5e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_system_messages": true + }, + "ft:gpt-4o-mini-2024-07-18": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 1.2e-06, + "input_cost_per_token_batches": 1.5e-07, + "output_cost_per_token_batches": 6e-07, + "cache_read_input_token_cost": 1.5e-07, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "ft:davinci-002": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 2e-06, + "input_cost_per_token_batches": 1e-06, + "output_cost_per_token_batches": 1e-06, + "litellm_provider": "text-completion-openai", + "mode": "completion" + }, + "ft:babbage-002": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "input_cost_per_token": 4e-07, + "output_cost_per_token": 4e-07, + "input_cost_per_token_batches": 2e-07, + "output_cost_per_token_batches": 2e-07, + "litellm_provider": "text-completion-openai", + "mode": "completion" + }, + "text-embedding-3-large": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "output_vector_size": 3072, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 0.0, + "input_cost_per_token_batches": 6.5e-08, + "output_cost_per_token_batches": 0.0, + "litellm_provider": "openai", + "mode": "embedding" + }, + "text-embedding-3-small": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "output_vector_size": 1536, + "input_cost_per_token": 2e-08, + "output_cost_per_token": 0.0, + "input_cost_per_token_batches": 1e-08, + "output_cost_per_token_batches": 0.0, + "litellm_provider": "openai", + "mode": "embedding" + }, + "text-embedding-ada-002": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "output_vector_size": 1536, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "openai", + "mode": "embedding" + }, + "text-embedding-ada-002-v2": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "input_cost_per_token_batches": 5e-08, + "output_cost_per_token_batches": 0.0, + "litellm_provider": "openai", + "mode": "embedding" + }, + "text-moderation-stable": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 0, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "openai", + "mode": "moderations" + }, + "text-moderation-007": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 0, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "openai", + "mode": "moderations" + }, + "text-moderation-latest": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 0, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "openai", + "mode": "moderations" + }, + "256-x-256/dall-e-2": { + "mode": "image_generation", + "input_cost_per_pixel": 2.4414e-07, + "output_cost_per_pixel": 0.0, + "litellm_provider": "openai" + }, + "512-x-512/dall-e-2": { + "mode": "image_generation", + "input_cost_per_pixel": 6.86e-08, + "output_cost_per_pixel": 0.0, + "litellm_provider": "openai" + }, + "1024-x-1024/dall-e-2": { + "mode": "image_generation", + "input_cost_per_pixel": 1.9e-08, + "output_cost_per_pixel": 0.0, + "litellm_provider": "openai" + }, + "hd/1024-x-1792/dall-e-3": { + "mode": "image_generation", + "input_cost_per_pixel": 6.539e-08, + "output_cost_per_pixel": 0.0, + "litellm_provider": "openai" + }, + "hd/1792-x-1024/dall-e-3": { + "mode": "image_generation", + "input_cost_per_pixel": 6.539e-08, + "output_cost_per_pixel": 0.0, + "litellm_provider": "openai" + }, + "hd/1024-x-1024/dall-e-3": { + "mode": "image_generation", + "input_cost_per_pixel": 7.629e-08, + "output_cost_per_pixel": 0.0, + "litellm_provider": "openai" + }, + "standard/1024-x-1792/dall-e-3": { + "mode": "image_generation", + "input_cost_per_pixel": 4.359e-08, + "output_cost_per_pixel": 0.0, + "litellm_provider": "openai" + }, + "standard/1792-x-1024/dall-e-3": { + "mode": "image_generation", + "input_cost_per_pixel": 4.359e-08, + "output_cost_per_pixel": 0.0, + "litellm_provider": "openai" + }, + "standard/1024-x-1024/dall-e-3": { + "mode": "image_generation", + "input_cost_per_pixel": 3.81469e-08, + "output_cost_per_pixel": 0.0, + "litellm_provider": "openai" + }, + "whisper-1": { + "mode": "audio_transcription", + "input_cost_per_second": 0, + "output_cost_per_second": 0.0001, + "litellm_provider": "openai" + }, + "tts-1": { + "mode": "audio_speech", + "input_cost_per_character": 1.5e-05, + "litellm_provider": "openai" + }, + "tts-1-hd": { + "mode": "audio_speech", + "input_cost_per_character": 3e-05, + "litellm_provider": "openai" + }, + "azure/tts-1": { + "mode": "audio_speech", + "input_cost_per_character": 1.5e-05, + "litellm_provider": "azure" + }, + "azure/tts-1-hd": { + "mode": "audio_speech", + "input_cost_per_character": 3e-05, + "litellm_provider": "azure" + }, + "azure/whisper-1": { + "mode": "audio_transcription", + "input_cost_per_second": 0, + "output_cost_per_second": 0.0001, + "litellm_provider": "azure" + }, + "azure/o1-mini": { + "max_tokens": 65536, + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.2e-05, + "cache_read_input_token_cost": 1.5e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false, + "supports_prompt_caching": true + }, + "azure/o1-mini-2024-09-12": { + "max_tokens": 65536, + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.2e-05, + "cache_read_input_token_cost": 1.5e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false, + "supports_prompt_caching": true + }, + "azure/o1-preview": { + "max_tokens": 32768, + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 6e-05, + "cache_read_input_token_cost": 7.5e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false, + "supports_prompt_caching": true + }, + "azure/o1-preview-2024-09-12": { + "max_tokens": 32768, + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 6e-05, + "cache_read_input_token_cost": 7.5e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false, + "supports_prompt_caching": true + }, + "azure/gpt-4o": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-06, + "output_cost_per_token": 1.5e-05, + "cache_read_input_token_cost": 1.25e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true + }, + "azure/gpt-4o-2024-08-06": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 2.75e-06, + "output_cost_per_token": 1.1e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true + }, + "azure/gpt-4o-2024-05-13": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true + }, + "azure/global-standard/gpt-4o-2024-08-06": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 2.5e-06, + "output_cost_per_token": 1e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true + }, + "azure/global-standard/gpt-4o-mini": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true + }, + "azure/gpt-4o-mini": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 1.65e-07, + "output_cost_per_token": 6.6e-07, + "cache_read_input_token_cost": 7.5e-08, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true + }, + "azure/gpt-4-turbo-2024-04-09": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "azure/gpt-4-0125-preview": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "azure/gpt-4-1106-preview": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "azure/gpt-4-0613": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-05, + "output_cost_per_token": 6e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true + }, + "azure/gpt-4-32k-0613": { + "max_tokens": 4096, + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-05, + "output_cost_per_token": 0.00012, + "litellm_provider": "azure", + "mode": "chat" + }, + "azure/gpt-4-32k": { + "max_tokens": 4096, + "max_input_tokens": 32768, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-05, + "output_cost_per_token": 0.00012, + "litellm_provider": "azure", + "mode": "chat" + }, + "azure/gpt-4": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-05, + "output_cost_per_token": 6e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true + }, + "azure/gpt-4-turbo": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "azure/gpt-4-turbo-vision-preview": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_vision": true + }, + "azure/gpt-35-turbo-16k-0613": { + "max_tokens": 4096, + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 4e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true + }, + "azure/gpt-35-turbo-1106": { + "max_tokens": 4096, + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "azure/gpt-35-turbo-0613": { + "max_tokens": 4097, + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "azure/gpt-35-turbo-0301": { + "max_tokens": 4097, + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "azure/gpt-35-turbo-0125": { + "max_tokens": 4096, + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 1.5e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "azure/gpt-35-turbo-16k": { + "max_tokens": 4096, + "max_input_tokens": 16385, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 4e-06, + "litellm_provider": "azure", + "mode": "chat" + }, + "azure/gpt-35-turbo": { + "max_tokens": 4096, + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 1.5e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true + }, + "azure/gpt-3.5-turbo-instruct-0914": { + "max_tokens": 4097, + "max_input_tokens": 4097, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "azure_text", + "mode": "completion" + }, + "azure/gpt-35-turbo-instruct": { + "max_tokens": 4097, + "max_input_tokens": 4097, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "azure_text", + "mode": "completion" + }, + "azure/gpt-35-turbo-instruct-0914": { + "max_tokens": 4097, + "max_input_tokens": 4097, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "azure_text", + "mode": "completion" + }, + "azure/mistral-large-latest": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true + }, + "azure/mistral-large-2402": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true + }, + "azure/command-r-plus": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true + }, + "azure/ada": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "azure", + "mode": "embedding" + }, + "azure/text-embedding-ada-002": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "azure", + "mode": "embedding" + }, + "azure/text-embedding-3-large": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "azure", + "mode": "embedding" + }, + "azure/text-embedding-3-small": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "input_cost_per_token": 2e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "azure", + "mode": "embedding" + }, + "azure/standard/1024-x-1024/dall-e-3": { + "input_cost_per_pixel": 3.81469e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "azure", + "mode": "image_generation" + }, + "azure/hd/1024-x-1024/dall-e-3": { + "input_cost_per_pixel": 7.629e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "azure", + "mode": "image_generation" + }, + "azure/standard/1024-x-1792/dall-e-3": { + "input_cost_per_pixel": 4.359e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "azure", + "mode": "image_generation" + }, + "azure/standard/1792-x-1024/dall-e-3": { + "input_cost_per_pixel": 4.359e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "azure", + "mode": "image_generation" + }, + "azure/hd/1024-x-1792/dall-e-3": { + "input_cost_per_pixel": 6.539e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "azure", + "mode": "image_generation" + }, + "azure/hd/1792-x-1024/dall-e-3": { + "input_cost_per_pixel": 6.539e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "azure", + "mode": "image_generation" + }, + "azure/standard/1024-x-1024/dall-e-2": { + "input_cost_per_pixel": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "azure", + "mode": "image_generation" + }, + "azure_ai/jamba-instruct": { + "max_tokens": 4096, + "max_input_tokens": 70000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 7e-07, + "litellm_provider": "azure_ai", + "mode": "chat" + }, + "azure_ai/mistral-large": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 4e-06, + "output_cost_per_token": 1.2e-05, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_function_calling": true + }, + "azure_ai/mistral-small": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 3e-06, + "litellm_provider": "azure_ai", + "supports_function_calling": true, + "mode": "chat" + }, + "azure_ai/Meta-Llama-3-70B-Instruct": { + "max_tokens": 2048, + "max_input_tokens": 8192, + "max_output_tokens": 2048, + "input_cost_per_token": 1.1e-06, + "output_cost_per_token": 3.7e-07, + "litellm_provider": "azure_ai", + "mode": "chat" + }, + "azure_ai/Meta-Llama-3.1-8B-Instruct": { + "max_tokens": 2048, + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 6.1e-07, + "litellm_provider": "azure_ai", + "mode": "chat", + "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-8b-instruct-offer?tab=PlansAndPrice" + }, + "azure_ai/Meta-Llama-3.1-70B-Instruct": { + "max_tokens": 2048, + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "input_cost_per_token": 2.68e-06, + "output_cost_per_token": 3.54e-06, + "litellm_provider": "azure_ai", + "mode": "chat", + "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-70b-instruct-offer?tab=PlansAndPrice" + }, + "azure_ai/Meta-Llama-3.1-405B-Instruct": { + "max_tokens": 2048, + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "input_cost_per_token": 5.33e-06, + "output_cost_per_token": 1.6e-05, + "litellm_provider": "azure_ai", + "mode": "chat", + "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-405b-instruct-offer?tab=PlansAndPrice" + }, + "azure_ai/cohere-rerank-v3-multilingual": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_query_tokens": 2048, + "input_cost_per_token": 0.0, + "input_cost_per_query": 0.002, + "output_cost_per_token": 0.0, + "litellm_provider": "azure_ai", + "mode": "rerank" + }, + "azure_ai/cohere-rerank-v3-english": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_query_tokens": 2048, + "input_cost_per_token": 0.0, + "input_cost_per_query": 0.002, + "output_cost_per_token": 0.0, + "litellm_provider": "azure_ai", + "mode": "rerank" + }, + "azure_ai/Cohere-embed-v3-english": { + "max_tokens": 512, + "max_input_tokens": 512, + "output_vector_size": 1024, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "azure_ai", + "mode": "embedding", + "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/cohere.cohere-embed-v3-english-offer?tab=PlansAndPrice" + }, + "azure_ai/Cohere-embed-v3-multilingual": { + "max_tokens": 512, + "max_input_tokens": 512, + "output_vector_size": 1024, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "azure_ai", + "mode": "embedding", + "source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/cohere.cohere-embed-v3-english-offer?tab=PlansAndPrice" + }, + "babbage-002": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "input_cost_per_token": 4e-07, + "output_cost_per_token": 4e-07, + "litellm_provider": "text-completion-openai", + "mode": "completion" + }, + "davinci-002": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "text-completion-openai", + "mode": "completion" + }, + "gpt-3.5-turbo-instruct": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "text-completion-openai", + "mode": "completion" + }, + "gpt-3.5-turbo-instruct-0914": { + "max_tokens": 4097, + "max_input_tokens": 8192, + "max_output_tokens": 4097, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "text-completion-openai", + "mode": "completion" + }, + "claude-instant-1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 1.63e-06, + "output_cost_per_token": 5.51e-06, + "litellm_provider": "anthropic", + "mode": "chat" + }, + "mistral/mistral-tiny": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 2.5e-07, + "litellm_provider": "mistral", + "mode": "chat", + "supports_assistant_prefill": true + }, + "mistral/mistral-small": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 3e-06, + "litellm_provider": "mistral", + "supports_function_calling": true, + "mode": "chat", + "supports_assistant_prefill": true + }, + "mistral/mistral-small-latest": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 3e-06, + "litellm_provider": "mistral", + "supports_function_calling": true, + "mode": "chat", + "supports_assistant_prefill": true + }, + "mistral/mistral-medium": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 2.7e-06, + "output_cost_per_token": 8.1e-06, + "litellm_provider": "mistral", + "mode": "chat", + "supports_assistant_prefill": true + }, + "mistral/mistral-medium-latest": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 2.7e-06, + "output_cost_per_token": 8.1e-06, + "litellm_provider": "mistral", + "mode": "chat", + "supports_assistant_prefill": true + }, + "mistral/mistral-medium-2312": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 2.7e-06, + "output_cost_per_token": 8.1e-06, + "litellm_provider": "mistral", + "mode": "chat", + "supports_assistant_prefill": true + }, + "mistral/mistral-large-latest": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true + }, + "mistral/mistral-large-2402": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 4e-06, + "output_cost_per_token": 1.2e-05, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true + }, + "mistral/mistral-large-2407": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 9e-06, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true + }, + "mistral/pixtral-12b-2409": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 1.5e-07, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true, + "supports_vision": true + }, + "mistral/open-mistral-7b": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 2.5e-07, + "litellm_provider": "mistral", + "mode": "chat", + "supports_assistant_prefill": true + }, + "mistral/open-mixtral-8x7b": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 7e-07, + "output_cost_per_token": 7e-07, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true + }, + "mistral/open-mixtral-8x22b": { + "max_tokens": 8191, + "max_input_tokens": 64000, + "max_output_tokens": 8191, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true + }, + "mistral/codestral-latest": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 3e-06, + "litellm_provider": "mistral", + "mode": "chat", + "supports_assistant_prefill": true + }, + "mistral/codestral-2405": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 3e-06, + "litellm_provider": "mistral", + "mode": "chat", + "supports_assistant_prefill": true + }, + "mistral/open-mistral-nemo": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 3e-07, + "litellm_provider": "mistral", + "mode": "chat", + "source": "https://mistral.ai/technology/", + "supports_assistant_prefill": true + }, + "mistral/open-mistral-nemo-2407": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 3e-07, + "litellm_provider": "mistral", + "mode": "chat", + "source": "https://mistral.ai/technology/", + "supports_assistant_prefill": true + }, + "mistral/open-codestral-mamba": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 2.5e-07, + "litellm_provider": "mistral", + "mode": "chat", + "source": "https://mistral.ai/technology/", + "supports_assistant_prefill": true + }, + "mistral/codestral-mamba-latest": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 2.5e-07, + "litellm_provider": "mistral", + "mode": "chat", + "source": "https://mistral.ai/technology/", + "supports_assistant_prefill": true + }, + "mistral/mistral-embed": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "input_cost_per_token": 1e-07, + "litellm_provider": "mistral", + "mode": "embedding" + }, + "deepseek-chat": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.4e-07, + "input_cost_per_token_cache_hit": 1.4e-08, + "output_cost_per_token": 2.8e-07, + "litellm_provider": "deepseek", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true, + "supports_tool_choice": true, + "supports_prompt_caching": true + }, + "codestral/codestral-latest": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "codestral", + "mode": "chat", + "source": "https://docs.mistral.ai/capabilities/code_generation/", + "supports_assistant_prefill": true + }, + "codestral/codestral-2405": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "codestral", + "mode": "chat", + "source": "https://docs.mistral.ai/capabilities/code_generation/", + "supports_assistant_prefill": true + }, + "text-completion-codestral/codestral-latest": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "text-completion-codestral", + "mode": "completion", + "source": "https://docs.mistral.ai/capabilities/code_generation/" + }, + "text-completion-codestral/codestral-2405": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "text-completion-codestral", + "mode": "completion", + "source": "https://docs.mistral.ai/capabilities/code_generation/" + }, + "deepseek-coder": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.4e-07, + "input_cost_per_token_cache_hit": 1.4e-08, + "output_cost_per_token": 2.8e-07, + "litellm_provider": "deepseek", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true, + "supports_tool_choice": true, + "supports_prompt_caching": true + }, + "groq/llama2-70b-4096": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 7e-07, + "output_cost_per_token": 8e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/llama3-8b-8192": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 5e-08, + "output_cost_per_token": 8e-08, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/llama3-70b-8192": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 5.9e-07, + "output_cost_per_token": 7.9e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/llama-3.1-8b-instant": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 5e-08, + "output_cost_per_token": 8e-08, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/llama-3.1-70b-versatile": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 5.9e-07, + "output_cost_per_token": 7.9e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/llama-3.1-405b-reasoning": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 5.9e-07, + "output_cost_per_token": 7.9e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/mixtral-8x7b-32768": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 2.4e-07, + "output_cost_per_token": 2.4e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/gemma-7b-it": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 7e-08, + "output_cost_per_token": 7e-08, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/gemma2-9b-it": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/llama3-groq-70b-8192-tool-use-preview": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 8.9e-07, + "output_cost_per_token": 8.9e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/llama3-groq-8b-8192-tool-use-preview": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1.9e-07, + "output_cost_per_token": 1.9e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "cerebras/llama3.1-8b": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 1e-07, + "litellm_provider": "cerebras", + "mode": "chat", + "supports_function_calling": true + }, + "cerebras/llama3.1-70b": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 6e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "cerebras", + "mode": "chat", + "supports_function_calling": true + }, + "friendliai/mixtral-8x7b-instruct-v0-1": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 4e-07, + "output_cost_per_token": 4e-07, + "litellm_provider": "friendliai", + "mode": "chat", + "supports_function_calling": true + }, + "friendliai/meta-llama-3-8b-instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 1e-07, + "litellm_provider": "friendliai", + "mode": "chat", + "supports_function_calling": true + }, + "friendliai/meta-llama-3-70b-instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 8e-07, + "output_cost_per_token": 8e-07, + "litellm_provider": "friendliai", + "mode": "chat", + "supports_function_calling": true + }, + "claude-instant-1.2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 1.63e-07, + "output_cost_per_token": 5.51e-07, + "litellm_provider": "anthropic", + "mode": "chat" + }, + "claude-2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "anthropic", + "mode": "chat" + }, + "claude-2.1": { + "max_tokens": 8191, + "max_input_tokens": 200000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "anthropic", + "mode": "chat" + }, + "claude-3-haiku-20240307": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 1.25e-06, + "cache_creation_input_token_cost": 3e-07, + "cache_read_input_token_cost": 3e-08, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 264, + "supports_assistant_prefill": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "claude-3-haiku-latest": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 1.25e-06, + "cache_creation_input_token_cost": 3e-07, + "cache_read_input_token_cost": 3e-08, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 264, + "supports_assistant_prefill": true, + "supports_prompt_caching": true + }, + "claude-3-opus-20240229": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 7.5e-05, + "cache_creation_input_token_cost": 1.875e-05, + "cache_read_input_token_cost": 1.5e-06, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 395, + "supports_assistant_prefill": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "claude-3-opus-latest": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 7.5e-05, + "cache_creation_input_token_cost": 1.875e-05, + "cache_read_input_token_cost": 1.5e-06, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 395, + "supports_assistant_prefill": true, + "supports_prompt_caching": true + }, + "claude-3-sonnet-20240229": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "claude-3-5-sonnet-20240620": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "cache_creation_input_token_cost": 3.75e-06, + "cache_read_input_token_cost": 3e-07, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "claude-3-5-sonnet-20241022": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "cache_creation_input_token_cost": 3.75e-06, + "cache_read_input_token_cost": 3e-07, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "claude-3-5-sonnet-latest": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "cache_creation_input_token_cost": 3.75e-06, + "cache_read_input_token_cost": 3e-07, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_prompt_caching": true + }, + "text-bison": { + "max_tokens": 2048, + "max_input_tokens": 8192, + "max_output_tokens": 2048, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "text-bison@001": { + "max_tokens": 1024, + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "text-bison@002": { + "max_tokens": 1024, + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "text-bison32k": { + "max_tokens": 1024, + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "text-bison32k@002": { + "max_tokens": 1024, + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "text-unicorn": { + "max_tokens": 1024, + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 2.8e-05, + "litellm_provider": "vertex_ai-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "text-unicorn@001": { + "max_tokens": 1024, + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 2.8e-05, + "litellm_provider": "vertex_ai-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "chat-bison": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-chat-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "chat-bison@001": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-chat-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "chat-bison@002": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-chat-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "chat-bison-32k": { + "max_tokens": 8192, + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-chat-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "chat-bison-32k@002": { + "max_tokens": 8192, + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-chat-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "code-bison": { + "max_tokens": 1024, + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-code-text-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "code-bison@001": { + "max_tokens": 1024, + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-code-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "code-bison@002": { + "max_tokens": 1024, + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-code-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "code-bison32k": { + "max_tokens": 1024, + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-code-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "code-bison-32k@002": { + "max_tokens": 1024, + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-code-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "code-gecko@001": { + "max_tokens": 64, + "max_input_tokens": 2048, + "max_output_tokens": 64, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "litellm_provider": "vertex_ai-code-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "code-gecko@002": { + "max_tokens": 64, + "max_input_tokens": 2048, + "max_output_tokens": 64, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "litellm_provider": "vertex_ai-code-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "code-gecko": { + "max_tokens": 64, + "max_input_tokens": 2048, + "max_output_tokens": 64, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "litellm_provider": "vertex_ai-code-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "code-gecko-latest": { + "max_tokens": 64, + "max_input_tokens": 2048, + "max_output_tokens": 64, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "litellm_provider": "vertex_ai-code-text-models", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "codechat-bison@latest": { + "max_tokens": 1024, + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-code-chat-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "codechat-bison": { + "max_tokens": 1024, + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-code-chat-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "codechat-bison@001": { + "max_tokens": 1024, + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-code-chat-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "codechat-bison@002": { + "max_tokens": 1024, + "max_input_tokens": 6144, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-code-chat-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "codechat-bison-32k": { + "max_tokens": 8192, + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-code-chat-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "codechat-bison-32k@002": { + "max_tokens": 8192, + "max_input_tokens": 32000, + "max_output_tokens": 8192, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "input_cost_per_character": 2.5e-07, + "output_cost_per_character": 5e-07, + "litellm_provider": "vertex_ai-code-chat-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-pro": { + "max_tokens": 8192, + "max_input_tokens": 32760, + "max_output_tokens": 8192, + "input_cost_per_image": 0.0025, + "input_cost_per_video_per_second": 0.002, + "input_cost_per_token": 5e-07, + "input_cost_per_character": 1.25e-07, + "output_cost_per_token": 1.5e-06, + "output_cost_per_character": 3.75e-07, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" + }, + "gemini-1.0-pro": { + "max_tokens": 8192, + "max_input_tokens": 32760, + "max_output_tokens": 8192, + "input_cost_per_image": 0.0025, + "input_cost_per_video_per_second": 0.002, + "input_cost_per_token": 5e-07, + "input_cost_per_character": 1.25e-07, + "output_cost_per_token": 1.5e-06, + "output_cost_per_character": 3.75e-07, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#google_models" + }, + "gemini-1.0-pro-001": { + "max_tokens": 8192, + "max_input_tokens": 32760, + "max_output_tokens": 8192, + "input_cost_per_image": 0.0025, + "input_cost_per_video_per_second": 0.002, + "input_cost_per_token": 5e-07, + "input_cost_per_character": 1.25e-07, + "output_cost_per_token": 1.5e-06, + "output_cost_per_character": 3.75e-07, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.0-ultra": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 2048, + "input_cost_per_image": 0.0025, + "input_cost_per_video_per_second": 0.002, + "input_cost_per_token": 5e-07, + "input_cost_per_character": 1.25e-07, + "output_cost_per_token": 1.5e-06, + "output_cost_per_character": 3.75e-07, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": true, + "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.0-ultra-001": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 2048, + "input_cost_per_image": 0.0025, + "input_cost_per_video_per_second": 0.002, + "input_cost_per_token": 5e-07, + "input_cost_per_character": 1.25e-07, + "output_cost_per_token": 1.5e-06, + "output_cost_per_character": 3.75e-07, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": true, + "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.0-pro-002": { + "max_tokens": 8192, + "max_input_tokens": 32760, + "max_output_tokens": 8192, + "input_cost_per_image": 0.0025, + "input_cost_per_video_per_second": 0.002, + "input_cost_per_token": 5e-07, + "input_cost_per_character": 1.25e-07, + "output_cost_per_token": 1.5e-06, + "output_cost_per_character": 3.75e-07, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.5-pro": { + "max_tokens": 8192, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "input_cost_per_image": 0.00032875, + "input_cost_per_audio_per_second": 3.125e-05, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_token": 1.25e-06, + "input_cost_per_character": 3.125e-07, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, + "input_cost_per_token_above_128k_tokens": 2.5e-06, + "input_cost_per_character_above_128k_tokens": 6.25e-07, + "output_cost_per_token": 5e-06, + "output_cost_per_character": 1.25e-06, + "output_cost_per_token_above_128k_tokens": 1e-05, + "output_cost_per_character_above_128k_tokens": 2.5e-06, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_vision": true, + "supports_pdf_input": true, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.5-pro-002": { + "max_tokens": 8192, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "input_cost_per_image": 0.00032875, + "input_cost_per_audio_per_second": 3.125e-05, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_token": 1.25e-06, + "input_cost_per_character": 3.125e-07, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, + "input_cost_per_token_above_128k_tokens": 2.5e-06, + "input_cost_per_character_above_128k_tokens": 6.25e-07, + "output_cost_per_token": 5e-06, + "output_cost_per_character": 1.25e-06, + "output_cost_per_token_above_128k_tokens": 1e-05, + "output_cost_per_character_above_128k_tokens": 2.5e-06, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_vision": true, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-pro" + }, + "gemini-1.5-pro-001": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_image": 0.00032875, + "input_cost_per_audio_per_second": 3.125e-05, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_token": 1.25e-06, + "input_cost_per_character": 3.125e-07, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, + "input_cost_per_token_above_128k_tokens": 2.5e-06, + "input_cost_per_character_above_128k_tokens": 6.25e-07, + "output_cost_per_token": 5e-06, + "output_cost_per_character": 1.25e-06, + "output_cost_per_token_above_128k_tokens": 1e-05, + "output_cost_per_character_above_128k_tokens": 2.5e-06, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_vision": true, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.5-pro-preview-0514": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_image": 0.00032875, + "input_cost_per_audio_per_second": 3.125e-05, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_token": 7.8125e-08, + "input_cost_per_character": 3.125e-07, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, + "input_cost_per_token_above_128k_tokens": 1.5625e-07, + "input_cost_per_character_above_128k_tokens": 6.25e-07, + "output_cost_per_token": 3.125e-07, + "output_cost_per_character": 1.25e-06, + "output_cost_per_token_above_128k_tokens": 6.25e-07, + "output_cost_per_character_above_128k_tokens": 2.5e-06, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.5-pro-preview-0215": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_image": 0.00032875, + "input_cost_per_audio_per_second": 3.125e-05, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_token": 7.8125e-08, + "input_cost_per_character": 3.125e-07, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, + "input_cost_per_token_above_128k_tokens": 1.5625e-07, + "input_cost_per_character_above_128k_tokens": 6.25e-07, + "output_cost_per_token": 3.125e-07, + "output_cost_per_character": 1.25e-06, + "output_cost_per_token_above_128k_tokens": 6.25e-07, + "output_cost_per_character_above_128k_tokens": 2.5e-06, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.5-pro-preview-0409": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_image": 0.00032875, + "input_cost_per_audio_per_second": 3.125e-05, + "input_cost_per_video_per_second": 0.00032875, + "input_cost_per_token": 7.8125e-08, + "input_cost_per_character": 3.125e-07, + "input_cost_per_image_above_128k_tokens": 0.0006575, + "input_cost_per_video_per_second_above_128k_tokens": 0.0006575, + "input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05, + "input_cost_per_token_above_128k_tokens": 1.5625e-07, + "input_cost_per_character_above_128k_tokens": 6.25e-07, + "output_cost_per_token": 3.125e-07, + "output_cost_per_character": 1.25e-06, + "output_cost_per_token_above_128k_tokens": 6.25e-07, + "output_cost_per_character_above_128k_tokens": 2.5e-06, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.5-flash": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_image": 2e-05, + "input_cost_per_video_per_second": 2e-05, + "input_cost_per_audio_per_second": 2e-06, + "input_cost_per_token": 7.5e-08, + "input_cost_per_character": 1.875e-08, + "input_cost_per_token_above_128k_tokens": 1e-06, + "input_cost_per_character_above_128k_tokens": 2.5e-07, + "input_cost_per_image_above_128k_tokens": 4e-05, + "input_cost_per_video_per_second_above_128k_tokens": 4e-05, + "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, + "output_cost_per_token": 3e-07, + "output_cost_per_character": 7.5e-08, + "output_cost_per_token_above_128k_tokens": 6e-07, + "output_cost_per_character_above_128k_tokens": 1.5e-07, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.5-flash-exp-0827": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_image": 2e-05, + "input_cost_per_video_per_second": 2e-05, + "input_cost_per_audio_per_second": 2e-06, + "input_cost_per_token": 4.688e-09, + "input_cost_per_character": 1.875e-08, + "input_cost_per_token_above_128k_tokens": 1e-06, + "input_cost_per_character_above_128k_tokens": 2.5e-07, + "input_cost_per_image_above_128k_tokens": 4e-05, + "input_cost_per_video_per_second_above_128k_tokens": 4e-05, + "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, + "output_cost_per_token": 4.6875e-09, + "output_cost_per_character": 1.875e-08, + "output_cost_per_token_above_128k_tokens": 9.375e-09, + "output_cost_per_character_above_128k_tokens": 3.75e-08, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.5-flash-002": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_image": 2e-05, + "input_cost_per_video_per_second": 2e-05, + "input_cost_per_audio_per_second": 2e-06, + "input_cost_per_token": 7.5e-08, + "input_cost_per_character": 1.875e-08, + "input_cost_per_token_above_128k_tokens": 1e-06, + "input_cost_per_character_above_128k_tokens": 2.5e-07, + "input_cost_per_image_above_128k_tokens": 4e-05, + "input_cost_per_video_per_second_above_128k_tokens": 4e-05, + "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, + "output_cost_per_token": 3e-07, + "output_cost_per_character": 7.5e-08, + "output_cost_per_token_above_128k_tokens": 6e-07, + "output_cost_per_character_above_128k_tokens": 1.5e-07, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-flash" + }, + "gemini-1.5-flash-001": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_image": 2e-05, + "input_cost_per_video_per_second": 2e-05, + "input_cost_per_audio_per_second": 2e-06, + "input_cost_per_token": 7.5e-08, + "input_cost_per_character": 1.875e-08, + "input_cost_per_token_above_128k_tokens": 1e-06, + "input_cost_per_character_above_128k_tokens": 2.5e-07, + "input_cost_per_image_above_128k_tokens": 4e-05, + "input_cost_per_video_per_second_above_128k_tokens": 4e-05, + "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, + "output_cost_per_token": 3e-07, + "output_cost_per_character": 7.5e-08, + "output_cost_per_token_above_128k_tokens": 6e-07, + "output_cost_per_character_above_128k_tokens": 1.5e-07, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.5-flash-preview-0514": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_image": 2e-05, + "input_cost_per_video_per_second": 2e-05, + "input_cost_per_audio_per_second": 2e-06, + "input_cost_per_token": 7.5e-08, + "input_cost_per_character": 1.875e-08, + "input_cost_per_token_above_128k_tokens": 1e-06, + "input_cost_per_character_above_128k_tokens": 2.5e-07, + "input_cost_per_image_above_128k_tokens": 4e-05, + "input_cost_per_video_per_second_above_128k_tokens": 4e-05, + "input_cost_per_audio_per_second_above_128k_tokens": 4e-06, + "output_cost_per_token": 4.6875e-09, + "output_cost_per_character": 1.875e-08, + "output_cost_per_token_above_128k_tokens": 9.375e-09, + "output_cost_per_character_above_128k_tokens": 3.75e-08, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-pro-experimental": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_token": 0, + "output_cost_per_token": 0, + "input_cost_per_character": 0, + "output_cost_per_character": 0, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": false, + "supports_tool_choice": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental" + }, + "gemini-flash-experimental": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_token": 0, + "output_cost_per_token": 0, + "input_cost_per_character": 0, + "output_cost_per_character": 0, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": false, + "supports_tool_choice": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental" + }, + "gemini-pro-vision": { + "max_tokens": 2048, + "max_input_tokens": 16384, + "max_output_tokens": 2048, + "max_images_per_prompt": 16, + "max_videos_per_prompt": 1, + "max_video_length": 2, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "vertex_ai-vision-models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.0-pro-vision": { + "max_tokens": 2048, + "max_input_tokens": 16384, + "max_output_tokens": 2048, + "max_images_per_prompt": 16, + "max_videos_per_prompt": 1, + "max_video_length": 2, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "vertex_ai-vision-models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.0-pro-vision-001": { + "max_tokens": 2048, + "max_input_tokens": 16384, + "max_output_tokens": 2048, + "max_images_per_prompt": 16, + "max_videos_per_prompt": 1, + "max_video_length": 2, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "vertex_ai-vision-models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "medlm-medium": { + "max_tokens": 8192, + "max_input_tokens": 32768, + "max_output_tokens": 8192, + "input_cost_per_character": 5e-07, + "output_cost_per_character": 1e-06, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "medlm-large": { + "max_tokens": 1024, + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "input_cost_per_character": 5e-06, + "output_cost_per_character": 1.5e-05, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "vertex_ai/claude-3-sonnet@20240229": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "vertex_ai/claude-3-5-sonnet@20240620": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "vertex_ai/claude-3-5-sonnet-v2@20241022": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "vertex_ai/claude-3-haiku@20240307": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 1.25e-06, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "vertex_ai/claude-3-opus@20240229": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 7.5e-05, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "vertex_ai/meta/llama3-405b-instruct-maas": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "vertex_ai-llama_models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models" + }, + "vertex_ai/meta/llama3-70b-instruct-maas": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "vertex_ai-llama_models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models" + }, + "vertex_ai/meta/llama3-8b-instruct-maas": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "vertex_ai-llama_models", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models" + }, + "vertex_ai/meta/llama-3.2-90b-vision-instruct-maas": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "vertex_ai-llama_models", + "mode": "chat", + "supports_system_messages": true, + "supports_vision": true, + "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas" + }, + "vertex_ai/mistral-large@latest": { + "max_tokens": 8191, + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "vertex_ai-mistral_models", + "mode": "chat", + "supports_function_calling": true + }, + "vertex_ai/mistral-large@2407": { + "max_tokens": 8191, + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "vertex_ai-mistral_models", + "mode": "chat", + "supports_function_calling": true + }, + "vertex_ai/mistral-nemo@latest": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 1.5e-07, + "litellm_provider": "vertex_ai-mistral_models", + "mode": "chat", + "supports_function_calling": true + }, + "vertex_ai/jamba-1.5-mini@001": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 4e-07, + "litellm_provider": "vertex_ai-ai21_models", + "mode": "chat" + }, + "vertex_ai/jamba-1.5-large@001": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 8e-06, + "litellm_provider": "vertex_ai-ai21_models", + "mode": "chat" + }, + "vertex_ai/jamba-1.5": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 4e-07, + "litellm_provider": "vertex_ai-ai21_models", + "mode": "chat" + }, + "vertex_ai/jamba-1.5-mini": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 4e-07, + "litellm_provider": "vertex_ai-ai21_models", + "mode": "chat" + }, + "vertex_ai/jamba-1.5-large": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 8e-06, + "litellm_provider": "vertex_ai-ai21_models", + "mode": "chat" + }, + "vertex_ai/mistral-nemo@2407": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 3e-06, + "litellm_provider": "vertex_ai-mistral_models", + "mode": "chat", + "supports_function_calling": true + }, + "vertex_ai/codestral@latest": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "vertex_ai-mistral_models", + "mode": "chat", + "supports_function_calling": true + }, + "vertex_ai/codestral@2405": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "vertex_ai-mistral_models", + "mode": "chat", + "supports_function_calling": true + }, + "vertex_ai/imagegeneration@006": { + "output_cost_per_image": 0.02, + "litellm_provider": "vertex_ai-image-models", + "mode": "image_generation", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" + }, + "vertex_ai/imagen-3.0-generate-001": { + "output_cost_per_image": 0.04, + "litellm_provider": "vertex_ai-image-models", + "mode": "image_generation", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" + }, + "vertex_ai/imagen-3.0-fast-generate-001": { + "output_cost_per_image": 0.02, + "litellm_provider": "vertex_ai-image-models", + "mode": "image_generation", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" + }, + "text-embedding-004": { + "max_tokens": 2048, + "max_input_tokens": 2048, + "output_vector_size": 768, + "input_cost_per_character": 2.5e-08, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models" + }, + "text-multilingual-embedding-002": { + "max_tokens": 2048, + "max_input_tokens": 2048, + "output_vector_size": 768, + "input_cost_per_character": 2.5e-08, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models" + }, + "textembedding-gecko": { + "max_tokens": 3072, + "max_input_tokens": 3072, + "output_vector_size": 768, + "input_cost_per_character": 2.5e-08, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "textembedding-gecko-multilingual": { + "max_tokens": 3072, + "max_input_tokens": 3072, + "output_vector_size": 768, + "input_cost_per_character": 2.5e-08, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "textembedding-gecko-multilingual@001": { + "max_tokens": 3072, + "max_input_tokens": 3072, + "output_vector_size": 768, + "input_cost_per_character": 2.5e-08, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "textembedding-gecko@001": { + "max_tokens": 3072, + "max_input_tokens": 3072, + "output_vector_size": 768, + "input_cost_per_character": 2.5e-08, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "textembedding-gecko@003": { + "max_tokens": 3072, + "max_input_tokens": 3072, + "output_vector_size": 768, + "input_cost_per_character": 2.5e-08, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "text-embedding-preview-0409": { + "max_tokens": 3072, + "max_input_tokens": 3072, + "output_vector_size": 768, + "input_cost_per_token": 6.25e-09, + "input_cost_per_token_batch_requests": 5e-09, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" + }, + "text-multilingual-embedding-preview-0409": { + "max_tokens": 3072, + "max_input_tokens": 3072, + "output_vector_size": 768, + "input_cost_per_token": 6.25e-09, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "palm/chat-bison": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "litellm_provider": "palm", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "palm/chat-bison-001": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "litellm_provider": "palm", + "mode": "chat", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "palm/text-bison": { + "max_tokens": 1024, + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "litellm_provider": "palm", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "palm/text-bison-001": { + "max_tokens": 1024, + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "litellm_provider": "palm", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "palm/text-bison-safety-off": { + "max_tokens": 1024, + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "litellm_provider": "palm", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "palm/text-bison-safety-recitation-off": { + "max_tokens": 1024, + "max_input_tokens": 8192, + "max_output_tokens": 1024, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 1.25e-07, + "litellm_provider": "palm", + "mode": "completion", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini/gemini-1.5-flash-002": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "cache_read_input_token_cost": 1.875e-08, + "cache_creation_input_token_cost": 1e-06, + "input_cost_per_token": 7.5e-08, + "input_cost_per_token_above_128k_tokens": 1.5e-07, + "output_cost_per_token": 3e-07, + "output_cost_per_token_above_128k_tokens": 6e-07, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_prompt_caching": true, + "tpm": 4000000, + "rpm": 2000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-1.5-flash-001": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "cache_read_input_token_cost": 1.875e-08, + "cache_creation_input_token_cost": 1e-06, + "input_cost_per_token": 7.5e-08, + "input_cost_per_token_above_128k_tokens": 1.5e-07, + "output_cost_per_token": 3e-07, + "output_cost_per_token_above_128k_tokens": 6e-07, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_prompt_caching": true, + "tpm": 4000000, + "rpm": 2000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-1.5-flash": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_token": 7.5e-08, + "input_cost_per_token_above_128k_tokens": 1.5e-07, + "output_cost_per_token": 3e-07, + "output_cost_per_token_above_128k_tokens": 6e-07, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "tpm": 4000000, + "rpm": 2000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-1.5-flash-latest": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_token": 7.5e-08, + "input_cost_per_token_above_128k_tokens": 1.5e-07, + "output_cost_per_token": 3e-07, + "output_cost_per_token_above_128k_tokens": 6e-07, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_prompt_caching": true, + "tpm": 4000000, + "rpm": 2000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-1.5-flash-8b-exp-0924": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_prompt_caching": true, + "tpm": 4000000, + "rpm": 4000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-1.5-flash-exp-0827": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "tpm": 4000000, + "rpm": 2000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-1.5-flash-8b-exp-0827": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "tpm": 4000000, + "rpm": 4000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-pro": { + "max_tokens": 8192, + "max_input_tokens": 32760, + "max_output_tokens": 8192, + "input_cost_per_token": 3.5e-07, + "input_cost_per_token_above_128k_tokens": 7e-07, + "output_cost_per_token": 1.05e-06, + "output_cost_per_token_above_128k_tokens": 2.1e-06, + "litellm_provider": "gemini", + "mode": "chat", + "supports_function_calling": true, + "rpd": 30000, + "tpm": 120000, + "rpm": 360, + "source": "https://ai.google.dev/gemini-api/docs/models/gemini" + }, + "gemini/gemini-1.5-pro": { + "max_tokens": 8192, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "input_cost_per_token": 3.5e-06, + "input_cost_per_token_above_128k_tokens": 7e-06, + "output_cost_per_token": 1.05e-05, + "output_cost_per_token_above_128k_tokens": 2.1e-05, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "tpm": 4000000, + "rpm": 1000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-1.5-pro-002": { + "max_tokens": 8192, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "input_cost_per_token": 3.5e-06, + "input_cost_per_token_above_128k_tokens": 7e-06, + "output_cost_per_token": 1.05e-05, + "output_cost_per_token_above_128k_tokens": 2.1e-05, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "supports_prompt_caching": true, + "tpm": 4000000, + "rpm": 1000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-1.5-pro-001": { + "max_tokens": 8192, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "input_cost_per_token": 3.5e-06, + "input_cost_per_token_above_128k_tokens": 7e-06, + "output_cost_per_token": 1.05e-05, + "output_cost_per_token_above_128k_tokens": 2.1e-05, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "supports_prompt_caching": true, + "tpm": 4000000, + "rpm": 1000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-1.5-pro-exp-0801": { + "max_tokens": 8192, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "input_cost_per_token": 3.5e-06, + "input_cost_per_token_above_128k_tokens": 7e-06, + "output_cost_per_token": 1.05e-05, + "output_cost_per_token_above_128k_tokens": 2.1e-05, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "tpm": 4000000, + "rpm": 1000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-1.5-pro-exp-0827": { + "max_tokens": 8192, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "tpm": 4000000, + "rpm": 1000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-1.5-pro-latest": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "input_cost_per_token": 3.5e-06, + "input_cost_per_token_above_128k_tokens": 7e-06, + "output_cost_per_token": 1.05e-06, + "output_cost_per_token_above_128k_tokens": 2.1e-05, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true, + "supports_response_schema": true, + "tpm": 4000000, + "rpm": 1000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-pro-vision": { + "max_tokens": 2048, + "max_input_tokens": 30720, + "max_output_tokens": 2048, + "input_cost_per_token": 3.5e-07, + "input_cost_per_token_above_128k_tokens": 7e-07, + "output_cost_per_token": 1.05e-06, + "output_cost_per_token_above_128k_tokens": 2.1e-06, + "litellm_provider": "gemini", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "rpd": 30000, + "tpm": 120000, + "rpm": 360, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini/gemini-gemma-2-27b-it": { + "max_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3.5e-07, + "output_cost_per_token": 1.05e-06, + "litellm_provider": "gemini", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini/gemini-gemma-2-9b-it": { + "max_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3.5e-07, + "output_cost_per_token": 1.05e-06, + "litellm_provider": "gemini", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "command-r": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "cohere_chat", + "mode": "chat", + "supports_function_calling": true + }, + "command-r-08-2024": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "cohere_chat", + "mode": "chat", + "supports_function_calling": true + }, + "command-light": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "cohere_chat", + "mode": "chat" + }, + "command-r-plus": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-06, + "output_cost_per_token": 1e-05, + "litellm_provider": "cohere_chat", + "mode": "chat", + "supports_function_calling": true + }, + "command-r-plus-08-2024": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-06, + "output_cost_per_token": 1e-05, + "litellm_provider": "cohere_chat", + "mode": "chat", + "supports_function_calling": true + }, + "command-nightly": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "cohere", + "mode": "completion" + }, + "command": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "cohere", + "mode": "completion" + }, + "rerank-english-v3.0": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_query_tokens": 2048, + "input_cost_per_token": 0.0, + "input_cost_per_query": 0.002, + "output_cost_per_token": 0.0, + "litellm_provider": "cohere", + "mode": "rerank" + }, + "rerank-multilingual-v3.0": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_query_tokens": 2048, + "input_cost_per_token": 0.0, + "input_cost_per_query": 0.002, + "output_cost_per_token": 0.0, + "litellm_provider": "cohere", + "mode": "rerank" + }, + "rerank-english-v2.0": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_query_tokens": 2048, + "input_cost_per_token": 0.0, + "input_cost_per_query": 0.002, + "output_cost_per_token": 0.0, + "litellm_provider": "cohere", + "mode": "rerank" + }, + "rerank-multilingual-v2.0": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_query_tokens": 2048, + "input_cost_per_token": 0.0, + "input_cost_per_query": 0.002, + "output_cost_per_token": 0.0, + "litellm_provider": "cohere", + "mode": "rerank" + }, + "embed-english-v3.0": { + "max_tokens": 1024, + "max_input_tokens": 1024, + "input_cost_per_token": 1e-07, + "input_cost_per_image": 0.0001, + "output_cost_per_token": 0.0, + "litellm_provider": "cohere", + "mode": "embedding", + "supports_image_input": true, + "supports_embedding_image_input": true, + "metadata": { + "notes": "'supports_image_input' is a deprecated field. Use 'supports_embedding_image_input' instead." + } + }, + "embed-english-light-v3.0": { + "max_tokens": 1024, + "max_input_tokens": 1024, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "cohere", + "mode": "embedding" + }, + "embed-multilingual-v3.0": { + "max_tokens": 1024, + "max_input_tokens": 1024, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "cohere", + "mode": "embedding" + }, + "embed-english-v2.0": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "cohere", + "mode": "embedding" + }, + "embed-english-light-v2.0": { + "max_tokens": 1024, + "max_input_tokens": 1024, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "cohere", + "mode": "embedding" + }, + "embed-multilingual-v2.0": { + "max_tokens": 768, + "max_input_tokens": 768, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "cohere", + "mode": "embedding" + }, + "replicate/meta/llama-2-13b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-2-13b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-2-70b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 6.5e-07, + "output_cost_per_token": 2.75e-06, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-2-70b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 6.5e-07, + "output_cost_per_token": 2.75e-06, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-2-7b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-08, + "output_cost_per_token": 2.5e-07, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-2-7b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-08, + "output_cost_per_token": 2.5e-07, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-3-70b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 6.5e-07, + "output_cost_per_token": 2.75e-06, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-3-70b-instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 6.5e-07, + "output_cost_per_token": 2.75e-06, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-3-8b": { + "max_tokens": 8086, + "max_input_tokens": 8086, + "max_output_tokens": 8086, + "input_cost_per_token": 5e-08, + "output_cost_per_token": 2.5e-07, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-3-8b-instruct": { + "max_tokens": 8086, + "max_input_tokens": 8086, + "max_output_tokens": 8086, + "input_cost_per_token": 5e-08, + "output_cost_per_token": 2.5e-07, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/mistralai/mistral-7b-v0.1": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-08, + "output_cost_per_token": 2.5e-07, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/mistralai/mistral-7b-instruct-v0.2": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-08, + "output_cost_per_token": 2.5e-07, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/mistralai/mixtral-8x7b-instruct-v0.1": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 1e-06, + "litellm_provider": "replicate", + "mode": "chat" + }, + "openrouter/deepseek/deepseek-coder": { + "max_tokens": 8192, + "max_input_tokens": 66000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.4e-07, + "output_cost_per_token": 2.8e-07, + "litellm_provider": "openrouter", + "supports_prompt_caching": true, + "mode": "chat" + }, + "openrouter/microsoft/wizardlm-2-8x22b:nitro": { + "max_tokens": 65536, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 1e-06, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/google/gemini-pro-1.5": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_token": 2.5e-06, + "output_cost_per_token": 7.5e-06, + "input_cost_per_image": 0.00265, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/mistralai/mixtral-8x22b-instruct": { + "max_tokens": 65536, + "input_cost_per_token": 6.5e-07, + "output_cost_per_token": 6.5e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/cohere/command-r-plus": { + "max_tokens": 128000, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/databricks/dbrx-instruct": { + "max_tokens": 32768, + "input_cost_per_token": 6e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/anthropic/claude-3-haiku": { + "max_tokens": 200000, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 1.25e-06, + "input_cost_per_image": 0.0004, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/anthropic/claude-3-haiku-20240307": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 1.25e-06, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 264 + }, + "anthropic/claude-3-5-sonnet-20241022": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "cache_creation_input_token_cost": 3.75e-06, + "cache_read_input_token_cost": 3e-07, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_prompt_caching": true + }, + "anthropic/claude-3-5-sonnet-latest": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "cache_creation_input_token_cost": 3.75e-06, + "cache_read_input_token_cost": 3e-07, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_prompt_caching": true + }, + "openrouter/anthropic/claude-3.5-sonnet": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true + }, + "openrouter/anthropic/claude-3.5-sonnet:beta": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 + }, + "openrouter/anthropic/claude-3-sonnet": { + "max_tokens": 200000, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "input_cost_per_image": 0.0048, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/mistralai/mistral-large": { + "max_tokens": 32000, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/cognitivecomputations/dolphin-mixtral-8x7b": { + "max_tokens": 32769, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/google/gemini-pro-vision": { + "max_tokens": 45875, + "input_cost_per_token": 1.25e-07, + "output_cost_per_token": 3.75e-07, + "input_cost_per_image": 0.0025, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/fireworks/firellava-13b": { + "max_tokens": 4096, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-8b-instruct:free": { + "max_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-8b-instruct:extended": { + "max_tokens": 16384, + "input_cost_per_token": 2.25e-07, + "output_cost_per_token": 2.25e-06, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-70b-instruct:nitro": { + "max_tokens": 8192, + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-70b-instruct": { + "max_tokens": 8192, + "input_cost_per_token": 5.9e-07, + "output_cost_per_token": 7.9e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/openai/o1-mini": { + "max_tokens": 65536, + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.2e-05, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "openrouter/openai/o1-mini-2024-09-12": { + "max_tokens": 65536, + "max_input_tokens": 128000, + "max_output_tokens": 65536, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.2e-05, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "openrouter/openai/o1-preview": { + "max_tokens": 32768, + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 6e-05, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "openrouter/openai/o1-preview-2024-09-12": { + "max_tokens": 32768, + "max_input_tokens": 128000, + "max_output_tokens": 32768, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 6e-05, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": false + }, + "openrouter/openai/gpt-4o": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4o-2024-05-13": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4-vision-preview": { + "max_tokens": 130000, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 3e-05, + "input_cost_per_image": 0.01445, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/openai/gpt-3.5-turbo": { + "max_tokens": 4095, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/openai/gpt-3.5-turbo-16k": { + "max_tokens": 16383, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 4e-06, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/openai/gpt-4": { + "max_tokens": 8192, + "input_cost_per_token": 3e-05, + "output_cost_per_token": 6e-05, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/anthropic/claude-instant-v1": { + "max_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 1.63e-06, + "output_cost_per_token": 5.51e-06, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/anthropic/claude-2": { + "max_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 1.102e-05, + "output_cost_per_token": 3.268e-05, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/anthropic/claude-3-opus": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 7.5e-05, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 395 + }, + "openrouter/google/palm-2-chat-bison": { + "max_tokens": 25804, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/google/palm-2-codechat-bison": { + "max_tokens": 20070, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-2-13b-chat": { + "max_tokens": 4096, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-2-70b-chat": { + "max_tokens": 4096, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 1.5e-06, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/codellama-34b-instruct": { + "max_tokens": 8192, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/nousresearch/nous-hermes-llama2-13b": { + "max_tokens": 4096, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/mancer/weaver": { + "max_tokens": 8000, + "input_cost_per_token": 5.625e-06, + "output_cost_per_token": 5.625e-06, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/gryphe/mythomax-l2-13b": { + "max_tokens": 8192, + "input_cost_per_token": 1.875e-06, + "output_cost_per_token": 1.875e-06, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/jondurbin/airoboros-l2-70b-2.1": { + "max_tokens": 4096, + "input_cost_per_token": 1.3875e-05, + "output_cost_per_token": 1.3875e-05, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/undi95/remm-slerp-l2-13b": { + "max_tokens": 6144, + "input_cost_per_token": 1.875e-06, + "output_cost_per_token": 1.875e-06, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/pygmalionai/mythalion-13b": { + "max_tokens": 4096, + "input_cost_per_token": 1.875e-06, + "output_cost_per_token": 1.875e-06, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/mistralai/mistral-7b-instruct": { + "max_tokens": 8192, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 1.3e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/mistralai/mistral-7b-instruct:free": { + "max_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "j2-ultra": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "ai21", + "mode": "completion" + }, + "jamba-1.5-mini@001": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 4e-07, + "litellm_provider": "ai21", + "mode": "chat" + }, + "jamba-1.5-large@001": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 8e-06, + "litellm_provider": "ai21", + "mode": "chat" + }, + "jamba-1.5": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 4e-07, + "litellm_provider": "ai21", + "mode": "chat" + }, + "jamba-1.5-mini": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 4e-07, + "litellm_provider": "ai21", + "mode": "chat" + }, + "jamba-1.5-large": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 8e-06, + "litellm_provider": "ai21", + "mode": "chat" + }, + "j2-mid": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-05, + "output_cost_per_token": 1e-05, + "litellm_provider": "ai21", + "mode": "completion" + }, + "j2-light": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 3e-06, + "litellm_provider": "ai21", + "mode": "completion" + }, + "dolphin": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "nlp_cloud", + "mode": "completion" + }, + "chatdolphin": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "nlp_cloud", + "mode": "chat" + }, + "luminous-base": { + "max_tokens": 2048, + "input_cost_per_token": 3e-05, + "output_cost_per_token": 3.3e-05, + "litellm_provider": "aleph_alpha", + "mode": "completion" + }, + "luminous-base-control": { + "max_tokens": 2048, + "input_cost_per_token": 3.75e-05, + "output_cost_per_token": 4.125e-05, + "litellm_provider": "aleph_alpha", + "mode": "chat" + }, + "luminous-extended": { + "max_tokens": 2048, + "input_cost_per_token": 4.5e-05, + "output_cost_per_token": 4.95e-05, + "litellm_provider": "aleph_alpha", + "mode": "completion" + }, + "luminous-extended-control": { + "max_tokens": 2048, + "input_cost_per_token": 5.625e-05, + "output_cost_per_token": 6.1875e-05, + "litellm_provider": "aleph_alpha", + "mode": "chat" + }, + "luminous-supreme": { + "max_tokens": 2048, + "input_cost_per_token": 0.000175, + "output_cost_per_token": 0.0001925, + "litellm_provider": "aleph_alpha", + "mode": "completion" + }, + "luminous-supreme-control": { + "max_tokens": 2048, + "input_cost_per_token": 0.00021875, + "output_cost_per_token": 0.000240625, + "litellm_provider": "aleph_alpha", + "mode": "chat" + }, + "ai21.j2-mid-v1": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "max_output_tokens": 8191, + "input_cost_per_token": 1.25e-05, + "output_cost_per_token": 1.25e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "ai21.j2-ultra-v1": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "max_output_tokens": 8191, + "input_cost_per_token": 1.88e-05, + "output_cost_per_token": 1.88e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "ai21.jamba-instruct-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 70000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 7e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_system_messages": true + }, + "amazon.titan-text-lite-v1": { + "max_tokens": 4000, + "max_input_tokens": 42000, + "max_output_tokens": 4000, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 4e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "amazon.titan-text-express-v1": { + "max_tokens": 8000, + "max_input_tokens": 42000, + "max_output_tokens": 8000, + "input_cost_per_token": 1.3e-06, + "output_cost_per_token": 1.7e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "amazon.titan-text-premier-v1:0": { + "max_tokens": 32000, + "max_input_tokens": 42000, + "max_output_tokens": 32000, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 1.5e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "amazon.titan-embed-text-v1": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "output_vector_size": 1536, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "bedrock", + "mode": "embedding" + }, + "amazon.titan-embed-text-v2:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "output_vector_size": 1024, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "bedrock", + "mode": "embedding" + }, + "mistral.mistral-7b-instruct-v0:2": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "mistral.mixtral-8x7b-instruct-v0:1": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 4.5e-07, + "output_cost_per_token": 7e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "mistral.mistral-large-2402-v1:0": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true + }, + "mistral.mistral-large-2407-v1:0": { + "max_tokens": 8191, + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 9e-06, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true + }, + "mistral.mistral-small-2402-v1:0": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 3e-06, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true + }, + "bedrock/us-west-2/mistral.mixtral-8x7b-instruct-v0:1": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 4.5e-07, + "output_cost_per_token": 7e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/mistral.mixtral-8x7b-instruct-v0:1": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 4.5e-07, + "output_cost_per_token": 7e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-west-3/mistral.mixtral-8x7b-instruct-v0:1": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 5.9e-07, + "output_cost_per_token": 9.1e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/mistral.mistral-7b-instruct-v0:2": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/mistral.mistral-7b-instruct-v0:2": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-west-3/mistral.mistral-7b-instruct-v0:2": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2.6e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/mistral.mistral-large-2402-v1:0": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/mistral.mistral-large-2402-v1:0": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true + }, + "bedrock/eu-west-3/mistral.mistral-large-2402-v1:0": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 1.04e-05, + "output_cost_per_token": 3.12e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true + }, + "anthropic.claude-3-sonnet-20240229-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "anthropic.claude-3-5-sonnet-20240620-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "anthropic.claude-3-5-sonnet-20241022-v2:0": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true, + "supports_prompt_caching": true + }, + "anthropic.claude-3-5-sonnet-latest-v2:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "anthropic.claude-3-haiku-20240307-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 1.25e-06, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "anthropic.claude-3-opus-20240229-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 7.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "us.anthropic.claude-3-sonnet-20240229-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "us.anthropic.claude-3-5-sonnet-20240620-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "us.anthropic.claude-3-5-sonnet-20241022-v2:0": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "us.anthropic.claude-3-haiku-20240307-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 1.25e-06, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "us.anthropic.claude-3-opus-20240229-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 7.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "eu.anthropic.claude-3-sonnet-20240229-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "eu.anthropic.claude-3-5-sonnet-20240620-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "eu.anthropic.claude-3-5-sonnet-20241022-v2:0": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "eu.anthropic.claude-3-haiku-20240307-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 1.25e-06, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "eu.anthropic.claude-3-opus-20240229-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 7.5e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0455, + "output_cost_per_second": 0.0455, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.02527, + "output_cost_per_second": 0.02527, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0415, + "output_cost_per_second": 0.0415, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.02305, + "output_cost_per_second": 0.02305, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/1-month-commitment/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0175, + "output_cost_per_second": 0.0175, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/6-month-commitment/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.00972, + "output_cost_per_second": 0.00972, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/1-month-commitment/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0175, + "output_cost_per_second": 0.0175, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/6-month-commitment/anthropic.claude-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.00972, + "output_cost_per_second": 0.00972, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0455, + "output_cost_per_second": 0.0455, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.02527, + "output_cost_per_second": 0.02527, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0415, + "output_cost_per_second": 0.0415, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.02305, + "output_cost_per_second": 0.02305, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/1-month-commitment/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0175, + "output_cost_per_second": 0.0175, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/6-month-commitment/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.00972, + "output_cost_per_second": 0.00972, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/1-month-commitment/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0175, + "output_cost_per_second": 0.0175, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/6-month-commitment/anthropic.claude-v2": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.00972, + "output_cost_per_second": 0.00972, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0455, + "output_cost_per_second": 0.0455, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.02527, + "output_cost_per_second": 0.02527, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-06, + "output_cost_per_token": 2.4e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0415, + "output_cost_per_second": 0.0415, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.02305, + "output_cost_per_second": 0.02305, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/1-month-commitment/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0175, + "output_cost_per_second": 0.0175, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/6-month-commitment/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.00972, + "output_cost_per_second": 0.00972, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/1-month-commitment/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.0175, + "output_cost_per_second": 0.0175, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/6-month-commitment/anthropic.claude-v2:1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.00972, + "output_cost_per_second": 0.00972, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 1.63e-06, + "output_cost_per_token": 5.51e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-07, + "output_cost_per_token": 2.4e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/1-month-commitment/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.011, + "output_cost_per_second": 0.011, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/6-month-commitment/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.00611, + "output_cost_per_second": 0.00611, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/1-month-commitment/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.011, + "output_cost_per_second": 0.011, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/6-month-commitment/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.00611, + "output_cost_per_second": 0.00611, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-2/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 8e-07, + "output_cost_per_token": 2.4e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 2.23e-06, + "output_cost_per_token": 7.55e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.01475, + "output_cost_per_second": 0.01475, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.008194, + "output_cost_per_second": 0.008194, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_token": 2.48e-06, + "output_cost_per_token": 8.38e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/1-month-commitment/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.01635, + "output_cost_per_second": 0.01635, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-central-1/6-month-commitment/anthropic.claude-instant-v1": { + "max_tokens": 8191, + "max_input_tokens": 100000, + "max_output_tokens": 8191, + "input_cost_per_second": 0.009083, + "output_cost_per_second": 0.009083, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "cohere.command-text-v14": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/*/1-month-commitment/cohere.command-text-v14": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_second": 0.011, + "output_cost_per_second": 0.011, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/*/6-month-commitment/cohere.command-text-v14": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_second": 0.0066027, + "output_cost_per_second": 0.0066027, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "cohere.command-light-text-v14": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/*/1-month-commitment/cohere.command-light-text-v14": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_second": 0.001902, + "output_cost_per_second": 0.001902, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/*/6-month-commitment/cohere.command-light-text-v14": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_second": 0.0011416, + "output_cost_per_second": 0.0011416, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "cohere.command-r-plus-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "cohere.command-r-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 1.5e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "cohere.embed-english-v3": { + "max_tokens": 512, + "max_input_tokens": 512, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "bedrock", + "mode": "embedding" + }, + "cohere.embed-multilingual-v3": { + "max_tokens": 512, + "max_input_tokens": 512, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "bedrock", + "mode": "embedding" + }, + "meta.llama2-13b-chat-v1": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 7.5e-07, + "output_cost_per_token": 1e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "meta.llama2-70b-chat-v1": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1.95e-06, + "output_cost_per_token": 2.56e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "meta.llama3-8b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/meta.llama3-8b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-1/meta.llama3-8b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-south-1/meta.llama3-8b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3.6e-07, + "output_cost_per_token": 7.2e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ca-central-1/meta.llama3-8b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3.5e-07, + "output_cost_per_token": 6.9e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-west-1/meta.llama3-8b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3.2e-07, + "output_cost_per_token": 6.5e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-west-2/meta.llama3-8b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3.9e-07, + "output_cost_per_token": 7.8e-07, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/sa-east-1/meta.llama3-8b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 5e-07, + "output_cost_per_token": 1.01e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "meta.llama3-70b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2.65e-06, + "output_cost_per_token": 3.5e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-east-1/meta.llama3-70b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2.65e-06, + "output_cost_per_token": 3.5e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/us-west-1/meta.llama3-70b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2.65e-06, + "output_cost_per_token": 3.5e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ap-south-1/meta.llama3-70b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3.18e-06, + "output_cost_per_token": 4.2e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/ca-central-1/meta.llama3-70b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3.05e-06, + "output_cost_per_token": 4.03e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-west-1/meta.llama3-70b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 2.86e-06, + "output_cost_per_token": 3.78e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/eu-west-2/meta.llama3-70b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 3.45e-06, + "output_cost_per_token": 4.55e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "bedrock/sa-east-1/meta.llama3-70b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 4.45e-06, + "output_cost_per_token": 5.88e-06, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "meta.llama3-1-8b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "input_cost_per_token": 2.2e-07, + "output_cost_per_token": 2.2e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-1-70b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "input_cost_per_token": 9.9e-07, + "output_cost_per_token": 9.9e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-1-405b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5.32e-06, + "output_cost_per_token": 1.6e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-2-1b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 1e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama3-2-1b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 1e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "eu.meta.llama3-2-1b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 1.3e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-2-3b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 1.5e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama3-2-3b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 1.5e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "eu.meta.llama3-2-3b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.9e-07, + "output_cost_per_token": 1.9e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-2-11b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 3.5e-07, + "output_cost_per_token": 3.5e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama3-2-11b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 3.5e-07, + "output_cost_per_token": 3.5e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "meta.llama3-2-90b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama3-2-90b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 2e-06, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "512-x-512/50-steps/stability.stable-diffusion-xl-v0": { + "max_tokens": 77, + "max_input_tokens": 77, + "output_cost_per_image": 0.018, + "litellm_provider": "bedrock", + "mode": "image_generation" + }, + "512-x-512/max-steps/stability.stable-diffusion-xl-v0": { + "max_tokens": 77, + "max_input_tokens": 77, + "output_cost_per_image": 0.036, + "litellm_provider": "bedrock", + "mode": "image_generation" + }, + "max-x-max/50-steps/stability.stable-diffusion-xl-v0": { + "max_tokens": 77, + "max_input_tokens": 77, + "output_cost_per_image": 0.036, + "litellm_provider": "bedrock", + "mode": "image_generation" + }, + "max-x-max/max-steps/stability.stable-diffusion-xl-v0": { + "max_tokens": 77, + "max_input_tokens": 77, + "output_cost_per_image": 0.072, + "litellm_provider": "bedrock", + "mode": "image_generation" + }, + "1024-x-1024/50-steps/stability.stable-diffusion-xl-v1": { + "max_tokens": 77, + "max_input_tokens": 77, + "output_cost_per_image": 0.04, + "litellm_provider": "bedrock", + "mode": "image_generation" + }, + "1024-x-1024/max-steps/stability.stable-diffusion-xl-v1": { + "max_tokens": 77, + "max_input_tokens": 77, + "output_cost_per_image": 0.08, + "litellm_provider": "bedrock", + "mode": "image_generation" + }, + "sagemaker/meta-textgeneration-llama-2-7b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "sagemaker", + "mode": "completion" + }, + "sagemaker/meta-textgeneration-llama-2-7b-f": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "sagemaker", + "mode": "chat" + }, + "sagemaker/meta-textgeneration-llama-2-13b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "sagemaker", + "mode": "completion" + }, + "sagemaker/meta-textgeneration-llama-2-13b-f": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "sagemaker", + "mode": "chat" + }, + "sagemaker/meta-textgeneration-llama-2-70b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "sagemaker", + "mode": "completion" + }, + "sagemaker/meta-textgeneration-llama-2-70b-b-f": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "sagemaker", + "mode": "chat" + }, + "together-ai-up-to-4b": { + "input_cost_per_token": 1e-07, + "output_cost_per_token": 1e-07, + "litellm_provider": "together_ai", + "mode": "chat" + }, + "together-ai-4.1b-8b": { + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "together_ai", + "mode": "chat" + }, + "together-ai-8.1b-21b": { + "max_tokens": 1000, + "input_cost_per_token": 3e-07, + "output_cost_per_token": 3e-07, + "litellm_provider": "together_ai", + "mode": "chat" + }, + "together-ai-21.1b-41b": { + "input_cost_per_token": 8e-07, + "output_cost_per_token": 8e-07, + "litellm_provider": "together_ai", + "mode": "chat" + }, + "together-ai-41.1b-80b": { + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "together_ai", + "mode": "chat" + }, + "together-ai-81.1b-110b": { + "input_cost_per_token": 1.8e-06, + "output_cost_per_token": 1.8e-06, + "litellm_provider": "together_ai", + "mode": "chat" + }, + "together-ai-embedding-up-to-150m": { + "input_cost_per_token": 8e-09, + "output_cost_per_token": 0.0, + "litellm_provider": "together_ai", + "mode": "embedding" + }, + "together-ai-embedding-151m-to-350m": { + "input_cost_per_token": 1.6e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "together_ai", + "mode": "embedding" + }, + "together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1": { + "input_cost_per_token": 6e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "together_ai", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "mode": "chat" + }, + "together_ai/mistralai/Mistral-7B-Instruct-v0.1": { + "litellm_provider": "together_ai", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "mode": "chat" + }, + "together_ai/togethercomputer/CodeLlama-34b-Instruct": { + "litellm_provider": "together_ai", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "mode": "chat" + }, + "ollama/codegemma": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion" + }, + "ollama/codegeex4": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat", + "supports_function_calling": false + }, + "ollama/deepseek-coder-v2-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat", + "supports_function_calling": true + }, + "ollama/deepseek-coder-v2-base": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion", + "supports_function_calling": true + }, + "ollama/deepseek-coder-v2-lite-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat", + "supports_function_calling": true + }, + "ollama/deepseek-coder-v2-lite-base": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion", + "supports_function_calling": true + }, + "ollama/internlm2_5-20b-chat": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat", + "supports_function_calling": true + }, + "ollama/llama2": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/llama2:7b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/llama2:13b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/llama2:70b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/llama2-uncensored": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion" + }, + "ollama/llama3": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/llama3:8b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/llama3:70b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/llama3.1": { + "max_tokens": 32768, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat", + "supports_function_calling": true + }, + "ollama/mistral-large-instruct-2407": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/mistral": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion" + }, + "ollama/mistral-7B-Instruct-v0.1": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/mistral-7B-Instruct-v0.2": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/mixtral-8x7B-Instruct-v0.1": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/mixtral-8x22B-Instruct-v0.1": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/codellama": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion" + }, + "ollama/orca-mini": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion" + }, + "ollama/vicuna": { + "max_tokens": 2048, + "max_input_tokens": 2048, + "max_output_tokens": 2048, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "completion" + }, + "deepinfra/lizpreciatior/lzlv_70b_fp16_hf": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 7e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/Gryphe/MythoMax-L2-13b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2.2e-07, + "output_cost_per_token": 2.2e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/mistralai/Mistral-7B-Instruct-v0.1": { + "max_tokens": 8191, + "max_input_tokens": 32768, + "max_output_tokens": 8191, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 1.3e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/meta-llama/Llama-2-70b-chat-hf": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 7e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/cognitivecomputations/dolphin-2.6-mixtral-8x7b": { + "max_tokens": 8191, + "max_input_tokens": 32768, + "max_output_tokens": 8191, + "input_cost_per_token": 2.7e-07, + "output_cost_per_token": 2.7e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/codellama/CodeLlama-34b-Instruct-hf": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/deepinfra/mixtral": { + "max_tokens": 4096, + "max_input_tokens": 32000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.7e-07, + "output_cost_per_token": 2.7e-07, + "litellm_provider": "deepinfra", + "mode": "completion" + }, + "deepinfra/Phind/Phind-CodeLlama-34B-v2": { + "max_tokens": 4096, + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/mistralai/Mixtral-8x7B-Instruct-v0.1": { + "max_tokens": 8191, + "max_input_tokens": 32768, + "max_output_tokens": 8191, + "input_cost_per_token": 2.7e-07, + "output_cost_per_token": 2.7e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/deepinfra/airoboros-70b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 7e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/01-ai/Yi-34B-Chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/01-ai/Yi-6B-200K": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 1.3e-07, + "litellm_provider": "deepinfra", + "mode": "completion" + }, + "deepinfra/jondurbin/airoboros-l2-70b-gpt4-1.4.1": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 7e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/meta-llama/Llama-2-13b-chat-hf": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2.2e-07, + "output_cost_per_token": 2.2e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/amazon/MistralLite": { + "max_tokens": 8191, + "max_input_tokens": 32768, + "max_output_tokens": 8191, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/meta-llama/Llama-2-7b-chat-hf": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 1.3e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/meta-llama/Meta-Llama-3-8B-Instruct": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "max_output_tokens": 4096, + "input_cost_per_token": 8e-08, + "output_cost_per_token": 8e-08, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/meta-llama/Meta-Llama-3-70B-Instruct": { + "max_tokens": 8191, + "max_input_tokens": 8191, + "max_output_tokens": 4096, + "input_cost_per_token": 5.9e-07, + "output_cost_per_token": 7.9e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "deepinfra/01-ai/Yi-34B-200K": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "deepinfra", + "mode": "completion" + }, + "deepinfra/openchat/openchat_3.5": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 1.3e-07, + "litellm_provider": "deepinfra", + "mode": "chat" + }, + "perplexity/codellama-34b-instruct": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 3.5e-07, + "output_cost_per_token": 1.4e-06, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/codellama-70b-instruct": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 7e-07, + "output_cost_per_token": 2.8e-06, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/llama-3.1-70b-instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 1e-06, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/llama-3.1-8b-instruct": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/llama-3.1-sonar-huge-128k-online": { + "max_tokens": 127072, + "max_input_tokens": 127072, + "max_output_tokens": 127072, + "input_cost_per_token": 5e-06, + "output_cost_per_token": 5e-06, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/llama-3.1-sonar-large-128k-online": { + "max_tokens": 127072, + "max_input_tokens": 127072, + "max_output_tokens": 127072, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 1e-06, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/llama-3.1-sonar-large-128k-chat": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 1e-06, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/llama-3.1-sonar-small-128k-chat": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/llama-3.1-sonar-small-128k-online": { + "max_tokens": 127072, + "max_input_tokens": 127072, + "max_output_tokens": 127072, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/pplx-7b-chat": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 7e-08, + "output_cost_per_token": 2.8e-07, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/pplx-70b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 7e-07, + "output_cost_per_token": 2.8e-06, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/pplx-7b-online": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 2.8e-07, + "input_cost_per_request": 0.005, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/pplx-70b-online": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0, + "output_cost_per_token": 2.8e-06, + "input_cost_per_request": 0.005, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/llama-2-70b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 7e-07, + "output_cost_per_token": 2.8e-06, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/mistral-7b-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 7e-08, + "output_cost_per_token": 2.8e-07, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/mixtral-8x7b-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 7e-08, + "output_cost_per_token": 2.8e-07, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/sonar-small-chat": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 7e-08, + "output_cost_per_token": 2.8e-07, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/sonar-small-online": { + "max_tokens": 12000, + "max_input_tokens": 12000, + "max_output_tokens": 12000, + "input_cost_per_token": 0, + "output_cost_per_token": 2.8e-07, + "input_cost_per_request": 0.005, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/sonar-medium-chat": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 6e-07, + "output_cost_per_token": 1.8e-06, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/sonar-medium-online": { + "max_tokens": 12000, + "max_input_tokens": 12000, + "max_output_tokens": 12000, + "input_cost_per_token": 0, + "output_cost_per_token": 1.8e-06, + "input_cost_per_request": 0.005, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-1b-instruct": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 1e-07, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-3b-instruct": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 1e-07, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/accounts/fireworks/models/llama-v3p2-11b-vision-instruct": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "source": "https://fireworks.ai/pricing" + }, + "accounts/fireworks/models/llama-v3p2-90b-vision-instruct": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/accounts/fireworks/models/firefunction-v2": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct-hf": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "input_cost_per_token": 1.2e-06, + "output_cost_per_token": 1.2e-06, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/accounts/fireworks/models/qwen2-72b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/accounts/fireworks/models/yi-large": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 3e-06, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-instruct": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 8192, + "input_cost_per_token": 1.2e-06, + "output_cost_per_token": 1.2e-06, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/nomic-ai/nomic-embed-text-v1.5": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "input_cost_per_token": 8e-09, + "output_cost_per_token": 0.0, + "litellm_provider": "fireworks_ai-embedding-models", + "mode": "embedding", + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/nomic-ai/nomic-embed-text-v1": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "input_cost_per_token": 8e-09, + "output_cost_per_token": 0.0, + "litellm_provider": "fireworks_ai-embedding-models", + "mode": "embedding", + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/WhereIsAI/UAE-Large-V1": { + "max_tokens": 512, + "max_input_tokens": 512, + "input_cost_per_token": 1.6e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "fireworks_ai-embedding-models", + "mode": "embedding", + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/thenlper/gte-large": { + "max_tokens": 512, + "max_input_tokens": 512, + "input_cost_per_token": 1.6e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "fireworks_ai-embedding-models", + "mode": "embedding", + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/thenlper/gte-base": { + "max_tokens": 512, + "max_input_tokens": 512, + "input_cost_per_token": 8e-09, + "output_cost_per_token": 0.0, + "litellm_provider": "fireworks_ai-embedding-models", + "mode": "embedding", + "source": "https://fireworks.ai/pricing" + }, + "fireworks-ai-up-to-16b": { + "input_cost_per_token": 2e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "fireworks_ai" + }, + "fireworks-ai-16.1b-to-80b": { + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "fireworks_ai" + }, + "fireworks-ai-moe-up-to-56b": { + "input_cost_per_token": 5e-07, + "output_cost_per_token": 5e-07, + "litellm_provider": "fireworks_ai" + }, + "fireworks-ai-56b-to-176b": { + "input_cost_per_token": 1.2e-06, + "output_cost_per_token": 1.2e-06, + "litellm_provider": "fireworks_ai" + }, + "fireworks-ai-default": { + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "fireworks_ai" + }, + "fireworks-ai-embedding-up-to-150m": { + "input_cost_per_token": 8e-09, + "output_cost_per_token": 0.0, + "litellm_provider": "fireworks_ai-embedding-models" + }, + "fireworks-ai-embedding-150m-to-350m": { + "input_cost_per_token": 1.6e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "fireworks_ai-embedding-models" + }, + "anyscale/mistralai/Mistral-7B-Instruct-v0.1": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 1.5e-07, + "litellm_provider": "anyscale", + "mode": "chat", + "supports_function_calling": true, + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mistral-7B-Instruct-v0.1" + }, + "anyscale/mistralai/Mixtral-8x7B-Instruct-v0.1": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 1.5e-07, + "litellm_provider": "anyscale", + "mode": "chat", + "supports_function_calling": true, + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x7B-Instruct-v0.1" + }, + "anyscale/mistralai/Mixtral-8x22B-Instruct-v0.1": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "anyscale", + "mode": "chat", + "supports_function_calling": true, + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x22B-Instruct-v0.1" + }, + "anyscale/HuggingFaceH4/zephyr-7b-beta": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 16384, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 1.5e-07, + "litellm_provider": "anyscale", + "mode": "chat" + }, + "anyscale/google/gemma-7b-it": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 1.5e-07, + "litellm_provider": "anyscale", + "mode": "chat", + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/google-gemma-7b-it" + }, + "anyscale/meta-llama/Llama-2-7b-chat-hf": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 1.5e-07, + "litellm_provider": "anyscale", + "mode": "chat" + }, + "anyscale/meta-llama/Llama-2-13b-chat-hf": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 2.5e-07, + "litellm_provider": "anyscale", + "mode": "chat" + }, + "anyscale/meta-llama/Llama-2-70b-chat-hf": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 1e-06, + "litellm_provider": "anyscale", + "mode": "chat" + }, + "anyscale/codellama/CodeLlama-34b-Instruct-hf": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 1e-06, + "litellm_provider": "anyscale", + "mode": "chat" + }, + "anyscale/codellama/CodeLlama-70b-Instruct-hf": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 1e-06, + "litellm_provider": "anyscale", + "mode": "chat", + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/codellama-CodeLlama-70b-Instruct-hf" + }, + "anyscale/meta-llama/Meta-Llama-3-8B-Instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 1.5e-07, + "litellm_provider": "anyscale", + "mode": "chat", + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-8B-Instruct" + }, + "anyscale/meta-llama/Meta-Llama-3-70B-Instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 1e-06, + "litellm_provider": "anyscale", + "mode": "chat", + "source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-70B-Instruct" + }, + "cloudflare/@cf/meta/llama-2-7b-chat-fp16": { + "max_tokens": 3072, + "max_input_tokens": 3072, + "max_output_tokens": 3072, + "input_cost_per_token": 1.923e-06, + "output_cost_per_token": 1.923e-06, + "litellm_provider": "cloudflare", + "mode": "chat" + }, + "cloudflare/@cf/meta/llama-2-7b-chat-int8": { + "max_tokens": 2048, + "max_input_tokens": 2048, + "max_output_tokens": 2048, + "input_cost_per_token": 1.923e-06, + "output_cost_per_token": 1.923e-06, + "litellm_provider": "cloudflare", + "mode": "chat" + }, + "cloudflare/@cf/mistral/mistral-7b-instruct-v0.1": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1.923e-06, + "output_cost_per_token": 1.923e-06, + "litellm_provider": "cloudflare", + "mode": "chat" + }, + "cloudflare/@hf/thebloke/codellama-7b-instruct-awq": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1.923e-06, + "output_cost_per_token": 1.923e-06, + "litellm_provider": "cloudflare", + "mode": "chat" + }, + "voyage/voyage-01": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/voyage-lite-01": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/voyage-large-2": { + "max_tokens": 16000, + "max_input_tokens": 16000, + "input_cost_per_token": 1.2e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/voyage-law-2": { + "max_tokens": 16000, + "max_input_tokens": 16000, + "input_cost_per_token": 1.2e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/voyage-code-2": { + "max_tokens": 16000, + "max_input_tokens": 16000, + "input_cost_per_token": 1.2e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/voyage-2": { + "max_tokens": 4000, + "max_input_tokens": 4000, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/voyage-lite-02-instruct": { + "max_tokens": 4000, + "max_input_tokens": 4000, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/voyage-finance-2": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "input_cost_per_token": 1.2e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "databricks/databricks-meta-llama-3-1-405b-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 5e-06, + "input_dbu_cost_per_token": 7.1429e-05, + "output_cost_per_token": 1.500002e-05, + "output_db_cost_per_token": 0.000214286, + "litellm_provider": "databricks", + "mode": "chat", + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + } + }, + "databricks/databricks-meta-llama-3-1-70b-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 1.00002e-06, + "input_dbu_cost_per_token": 1.4286e-05, + "output_cost_per_token": 2.99999e-06, + "output_dbu_cost_per_token": 4.2857e-05, + "litellm_provider": "databricks", + "mode": "chat", + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + } + }, + "databricks/databricks-dbrx-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 7.4998e-07, + "input_dbu_cost_per_token": 1.0714e-05, + "output_cost_per_token": 2.24901e-06, + "output_dbu_cost_per_token": 3.2143e-05, + "litellm_provider": "databricks", + "mode": "chat", + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + } + }, + "databricks/databricks-meta-llama-3-70b-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 1.00002e-06, + "input_dbu_cost_per_token": 1.4286e-05, + "output_cost_per_token": 2.99999e-06, + "output_dbu_cost_per_token": 4.2857e-05, + "litellm_provider": "databricks", + "mode": "chat", + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + } + }, + "databricks/databricks-llama-2-70b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 5.0001e-07, + "input_dbu_cost_per_token": 7.143e-06, + "output_cost_per_token": 1.5e-06, + "output_dbu_cost_per_token": 2.1429e-05, + "litellm_provider": "databricks", + "mode": "chat", + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + } + }, + "databricks/databricks-mixtral-8x7b-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 5.0001e-07, + "input_dbu_cost_per_token": 7.143e-06, + "output_cost_per_token": 9.9902e-07, + "output_dbu_cost_per_token": 1.4286e-05, + "litellm_provider": "databricks", + "mode": "chat", + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + } + }, + "databricks/databricks-mpt-30b-instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 9.9902e-07, + "input_dbu_cost_per_token": 1.4286e-05, + "output_cost_per_token": 9.9902e-07, + "output_dbu_cost_per_token": 1.4286e-05, + "litellm_provider": "databricks", + "mode": "chat", + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + } + }, + "databricks/databricks-mpt-7b-instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 5.0001e-07, + "input_dbu_cost_per_token": 7.143e-06, + "output_cost_per_token": 0.0, + "output_dbu_cost_per_token": 0.0, + "litellm_provider": "databricks", + "mode": "chat", + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + } + }, + "databricks/databricks-bge-large-en": { + "max_tokens": 512, + "max_input_tokens": 512, + "output_vector_size": 1024, + "input_cost_per_token": 1.0003e-07, + "input_dbu_cost_per_token": 1.429e-06, + "output_cost_per_token": 0.0, + "output_dbu_cost_per_token": 0.0, + "litellm_provider": "databricks", + "mode": "embedding", + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + } + }, + "databricks/databricks-gte-large-en": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "output_vector_size": 1024, + "input_cost_per_token": 1.2999e-07, + "input_dbu_cost_per_token": 1.857e-06, + "output_cost_per_token": 0.0, + "output_dbu_cost_per_token": 0.0, + "litellm_provider": "databricks", + "mode": "embedding", + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + } + }, + "azure/gpt-4o-mini-2024-07-18": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 1.65e-07, + "output_cost_per_token": 6.6e-07, + "cache_read_input_token_cost": 7.5e-08, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true + }, + "amazon.titan-embed-image-v1": { + "max_tokens": 128, + "max_input_tokens": 128, + "output_vector_size": 1024, + "input_cost_per_token": 8e-07, + "input_cost_per_image": 6e-05, + "output_cost_per_token": 0.0, + "litellm_provider": "bedrock", + "supports_image_input": true, + "supports_embedding_image_input": true, + "mode": "embedding", + "source": "https://us-east-1.console.aws.amazon.com/bedrock/home?region=us-east-1#/providers?model=amazon.titan-image-generator-v1", + "metadata": { + "notes": "'supports_image_input' is a deprecated field. Use 'supports_embedding_image_input' instead." + } + }, + "azure_ai/mistral-large-2407": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "azure_ai", + "supports_function_calling": true, + "mode": "chat", + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-ai-large-2407-offer?tab=Overview" + }, + "azure_ai/ministral-3b": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 4e-08, + "output_cost_per_token": 4e-08, + "litellm_provider": "azure_ai", + "supports_function_calling": true, + "mode": "chat", + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.ministral-3b-2410-offer?tab=Overview" + }, + "azure_ai/Llama-3.2-11B-Vision-Instruct": { + "max_tokens": 2048, + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "input_cost_per_token": 3.7e-07, + "output_cost_per_token": 3.7e-07, + "litellm_provider": "azure_ai", + "supports_function_calling": true, + "supports_vision": true, + "mode": "chat", + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-11b-vision-instruct-offer?tab=Overview" + }, + "azure_ai/Llama-3.2-90B-Vision-Instruct": { + "max_tokens": 2048, + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "input_cost_per_token": 2.04e-06, + "output_cost_per_token": 2.04e-06, + "litellm_provider": "azure_ai", + "supports_function_calling": true, + "supports_vision": true, + "mode": "chat", + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-90b-vision-instruct-offer?tab=Overview" + }, + "azure_ai/Phi-3.5-mini-instruct": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 5.2e-07, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_vision": false, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + }, + "azure_ai/Phi-3.5-vision-instruct": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 5.2e-07, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_vision": true, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + }, + "azure_ai/Phi-3.5-MoE-instruct": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.6e-07, + "output_cost_per_token": 6.4e-07, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_vision": false, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + }, + "azure_ai/Phi-3-mini-4k-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 5.2e-07, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_vision": false, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + }, + "azure_ai/Phi-3-mini-128k-instruct": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.3e-07, + "output_cost_per_token": 5.2e-07, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_vision": false, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + }, + "azure_ai/Phi-3-small-8k-instruct": { + "max_tokens": 4096, + "max_input_tokens": 8192, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_vision": false, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + }, + "azure_ai/Phi-3-small-128k-instruct": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_vision": false, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + }, + "azure_ai/Phi-3-medium-4k-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 1.7e-07, + "output_cost_per_token": 6.8e-07, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_vision": false, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + }, + "azure_ai/Phi-3-medium-128k-instruct": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.7e-07, + "output_cost_per_token": 6.8e-07, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_vision": false, + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + }, + "xai/grok-beta": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 5e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "xai", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "claude-3-5-haiku-20241022": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 5e-06, + "cache_creation_input_token_cost": 1.25e-06, + "cache_read_input_token_cost": 1e-07, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "tool_use_system_prompt_tokens": 264, + "supports_assistant_prefill": true, + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "vertex_ai/claude-3-5-haiku@20241022": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 5e-06, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true + }, + "openrouter/anthropic/claude-3-5-haiku": { + "max_tokens": 200000, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 5e-06, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true + }, + "openrouter/anthropic/claude-3-5-haiku-20241022": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 5e-06, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "tool_use_system_prompt_tokens": 264 + }, + "anthropic.claude-3-5-haiku-20241022-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 5e-06, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_assistant_prefill": true, + "supports_function_calling": true, + "supports_prompt_caching": true + }, + "us.anthropic.claude-3-5-haiku-20241022-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 5e-06, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_assistant_prefill": true, + "supports_function_calling": true + }, + "eu.anthropic.claude-3-5-haiku-20241022-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 5e-06, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true + }, + "stability.sd3-large-v1:0": { + "max_tokens": 77, + "max_input_tokens": 77, + "output_cost_per_image": 0.08, + "litellm_provider": "bedrock", + "mode": "image_generation" + }, + "gpt-4o-2024-11-20": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 2.5e-06, + "output_cost_per_token": 1e-05, + "input_cost_per_token_batches": 1.25e-06, + "output_cost_per_token_batches": 5e-06, + "cache_read_input_token_cost": 1.25e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "ft:gpt-4o-2024-11-20": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 3.75e-06, + "cache_creation_input_token_cost": 1.875e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true + }, + "azure/gpt-4o-2024-11-20": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 2.75e-06, + "output_cost_per_token": 1.1e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true + }, + "azure/global-standard/gpt-4o-2024-11-20": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 2.5e-06, + "output_cost_per_token": 1e-05, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true + }, + "groq/llama-3.2-1b-preview": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 4e-08, + "output_cost_per_token": 4e-08, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/llama-3.2-3b-preview": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 6e-08, + "output_cost_per_token": 6e-08, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/llama-3.2-11b-text-preview": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1.8e-07, + "output_cost_per_token": 1.8e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/llama-3.2-11b-vision-preview": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1.8e-07, + "output_cost_per_token": 1.8e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_vision": true + }, + "groq/llama-3.2-90b-text-preview": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true + }, + "groq/llama-3.2-90b-vision-preview": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_vision": true + }, + "vertex_ai/claude-3-sonnet": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "vertex_ai/claude-3-5-sonnet": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "vertex_ai/claude-3-5-sonnet-v2": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 3e-06, + "output_cost_per_token": 1.5e-05, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "vertex_ai/claude-3-haiku": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 2.5e-07, + "output_cost_per_token": 1.25e-06, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "vertex_ai/claude-3-5-haiku": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-06, + "output_cost_per_token": 5e-06, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true + }, + "vertex_ai/claude-3-opus": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 7.5e-05, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true + }, + "gemini/gemini-exp-1114": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "tpm": 4000000, + "rpm": 1000, + "source": "https://ai.google.dev/pricing", + "metadata": { + "notes": "Rate limits not documented for gemini-exp-1114. Assuming same as gemini-1.5-pro." + } + }, + "openrouter/qwen/qwen-2.5-coder-32b-instruct": { + "max_tokens": 33792, + "max_input_tokens": 33792, + "max_output_tokens": 33792, + "input_cost_per_token": 1.8e-07, + "output_cost_per_token": 1.8e-07, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "us.meta.llama3-1-8b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "input_cost_per_token": 2.2e-07, + "output_cost_per_token": 2.2e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama3-1-70b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "input_cost_per_token": 9.9e-07, + "output_cost_per_token": 9.9e-07, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "us.meta.llama3-1-405b-instruct-v1:0": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5.32e-06, + "output_cost_per_token": 1.6e-05, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false + }, + "stability.stable-image-ultra-v1:0": { + "max_tokens": 77, + "max_input_tokens": 77, + "output_cost_per_image": 0.14, + "litellm_provider": "bedrock", + "mode": "image_generation" + }, + "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "source": "https://fireworks.ai/pricing" + }, + "omni-moderation-latest": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 0, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "openai", + "mode": "moderation" + }, + "omni-moderation-latest-intents": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 0, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "openai", + "mode": "moderation" + }, + "omni-moderation-2024-09-26": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 0, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "openai", + "mode": "moderation" + }, + "gpt-4o-audio-preview-2024-12-17": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 2.5e-06, + "input_cost_per_audio_token": 4e-05, + "output_cost_per_token": 1e-05, + "output_cost_per_audio_token": 8e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_system_messages": true + }, + "gpt-4o-mini-audio-preview-2024-12-17": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 1.5e-07, + "input_cost_per_audio_token": 1e-05, + "output_cost_per_token": 6e-07, + "output_cost_per_audio_token": 2e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_system_messages": true + }, + "o1": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 6e-05, + "cache_read_input_token_cost": 7.5e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_response_schema": true + }, + "o1-2024-12-17": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 6e-05, + "cache_read_input_token_cost": 7.5e-06, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_response_schema": true + }, + "gpt-4o-realtime-preview-2024-10-01": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-06, + "input_cost_per_audio_token": 0.0001, + "cache_read_input_token_cost": 2.5e-06, + "cache_creation_input_audio_token_cost": 2e-05, + "output_cost_per_token": 2e-05, + "output_cost_per_audio_token": 0.0002, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_system_messages": true + }, + "gpt-4o-realtime-preview": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-06, + "input_cost_per_audio_token": 4e-05, + "cache_read_input_token_cost": 2.5e-06, + "output_cost_per_token": 2e-05, + "output_cost_per_audio_token": 8e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_system_messages": true + }, + "gpt-4o-realtime-preview-2024-12-17": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 5e-06, + "input_cost_per_audio_token": 4e-05, + "cache_read_input_token_cost": 2.5e-06, + "output_cost_per_token": 2e-05, + "output_cost_per_audio_token": 8e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_system_messages": true + }, + "gpt-4o-mini-realtime-preview": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-07, + "input_cost_per_audio_token": 1e-05, + "cache_read_input_token_cost": 3e-07, + "cache_creation_input_audio_token_cost": 3e-07, + "output_cost_per_token": 2.4e-06, + "output_cost_per_audio_token": 2e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_system_messages": true + }, + "gpt-4o-mini-realtime-preview-2024-12-17": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-07, + "input_cost_per_audio_token": 1e-05, + "cache_read_input_token_cost": 3e-07, + "cache_creation_input_audio_token_cost": 3e-07, + "output_cost_per_token": 2.4e-06, + "output_cost_per_audio_token": 2e-05, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_audio_input": true, + "supports_audio_output": true, + "supports_system_messages": true + }, + "azure/o1": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 6e-05, + "cache_read_input_token_cost": 7.5e-06, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true + }, + "azure_ai/Llama-3.3-70B-Instruct": { + "max_tokens": 2048, + "max_input_tokens": 128000, + "max_output_tokens": 2048, + "input_cost_per_token": 7.1e-07, + "output_cost_per_token": 7.1e-07, + "litellm_provider": "azure_ai", + "supports_function_calling": true, + "mode": "chat", + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.llama-3-3-70b-instruct-offer?tab=Overview" + }, + "mistral/mistral-large-2411": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true + }, + "mistral/pixtral-large-latest": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true, + "supports_vision": true + }, + "mistral/pixtral-large-2411": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true, + "supports_vision": true + }, + "deepseek/deepseek-chat": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.4e-07, + "input_cost_per_token_cache_hit": 1.4e-08, + "cache_read_input_token_cost": 1.4e-08, + "cache_creation_input_token_cost": 0.0, + "output_cost_per_token": 2.8e-07, + "litellm_provider": "deepseek", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true, + "supports_tool_choice": true, + "supports_prompt_caching": true + }, + "deepseek/deepseek-coder": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.4e-07, + "input_cost_per_token_cache_hit": 1.4e-08, + "output_cost_per_token": 2.8e-07, + "litellm_provider": "deepseek", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true, + "supports_tool_choice": true, + "supports_prompt_caching": true + }, + "groq/llama-3.3-70b-versatile": { + "max_tokens": 8192, + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "input_cost_per_token": 5.9e-07, + "output_cost_per_token": 7.9e-07, + "litellm_provider": "groq", + "mode": "chat" + }, + "groq/llama-3.3-70b-specdec": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 5.9e-07, + "output_cost_per_token": 9.9e-07, + "litellm_provider": "groq", + "mode": "chat" + }, + "friendliai/meta-llama-3.1-8b-instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 1e-07, + "litellm_provider": "friendliai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_response_schema": true + }, + "friendliai/meta-llama-3.1-70b-instruct": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 6e-07, + "output_cost_per_token": 6e-07, + "litellm_provider": "friendliai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_system_messages": true, + "supports_response_schema": true + }, + "gemini-2.0-flash-exp": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_image": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_audio_per_second": 0, + "input_cost_per_token": 0, + "input_cost_per_character": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_character": 0, + "output_cost_per_token_above_128k_tokens": 0, + "output_cost_per_character_above_128k_tokens": 0, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash" + }, + "gemini/gemini-2.0-flash-exp": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_image": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_audio_per_second": 0, + "input_cost_per_token": 0, + "input_cost_per_character": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_character": 0, + "output_cost_per_token_above_128k_tokens": 0, + "output_cost_per_character_above_128k_tokens": 0, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": true, + "tpm": 4000000, + "rpm": 10, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash" + }, + "vertex_ai/mistral-large@2411-001": { + "max_tokens": 8191, + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "vertex_ai-mistral_models", + "mode": "chat", + "supports_function_calling": true + }, + "vertex_ai/mistral-large-2411": { + "max_tokens": 8191, + "max_input_tokens": 128000, + "max_output_tokens": 8191, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 6e-06, + "litellm_provider": "vertex_ai-mistral_models", + "mode": "chat", + "supports_function_calling": true + }, + "text-embedding-005": { + "max_tokens": 2048, + "max_input_tokens": 2048, + "output_vector_size": 768, + "input_cost_per_character": 2.5e-08, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models" + }, + "gemini/gemini-1.5-flash-8b": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_prompt_caching": true, + "tpm": 4000000, + "rpm": 4000, + "source": "https://ai.google.dev/pricing" + }, + "gemini/gemini-exp-1206": { + "max_tokens": 8192, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_token": 0, + "input_cost_per_token_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_token_above_128k_tokens": 0, + "litellm_provider": "gemini", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "tpm": 4000000, + "rpm": 1000, + "source": "https://ai.google.dev/pricing", + "metadata": { + "notes": "Rate limits not documented for gemini-exp-1206. Assuming same as gemini-1.5-pro." + } + }, + "command-r7b-12-2024": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.5e-07, + "output_cost_per_token": 3.75e-08, + "litellm_provider": "cohere_chat", + "mode": "chat", + "supports_function_calling": true, + "source": "https://docs.cohere.com/v2/docs/command-r7b" + }, + "rerank-v3.5": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "max_query_tokens": 2048, + "input_cost_per_token": 0.0, + "input_cost_per_query": 0.002, + "output_cost_per_token": 0.0, + "litellm_provider": "cohere", + "mode": "rerank" + }, + "openrouter/deepseek/deepseek-chat": { + "max_tokens": 8192, + "max_input_tokens": 66000, + "max_output_tokens": 4096, + "input_cost_per_token": 1.4e-07, + "output_cost_per_token": 2.8e-07, + "litellm_provider": "openrouter", + "supports_prompt_caching": true, + "mode": "chat" + }, + "openrouter/openai/o1": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 1.5e-05, + "output_cost_per_token": 6e-05, + "cache_read_input_token_cost": 7.5e-06, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_response_schema": true + }, + "amazon.nova-micro-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 300000, + "max_output_tokens": 4096, + "input_cost_per_token": 3.5e-08, + "output_cost_per_token": 1.4e-07, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_prompt_caching": true + }, + "amazon.nova-lite-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 6e-08, + "output_cost_per_token": 2.4e-07, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_prompt_caching": true + }, + "amazon.nova-pro-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 300000, + "max_output_tokens": 4096, + "input_cost_per_token": 8e-07, + "output_cost_per_token": 3.2e-06, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_prompt_caching": true + }, + "meta.llama3-3-70b-instruct-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 7.2e-07, + "output_cost_per_token": 7.2e-07, + "litellm_provider": "bedrock_converse", + "mode": "chat" + }, + "together_ai/meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": { + "input_cost_per_token": 1.8e-07, + "output_cost_per_token": 1.8e-07, + "litellm_provider": "together_ai", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "mode": "chat" + }, + "together_ai/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": { + "input_cost_per_token": 8.8e-07, + "output_cost_per_token": 8.8e-07, + "litellm_provider": "together_ai", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "mode": "chat" + }, + "together_ai/meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": { + "input_cost_per_token": 3.5e-06, + "output_cost_per_token": 3.5e-06, + "litellm_provider": "together_ai", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "mode": "chat" + }, + "deepinfra/meta-llama/Meta-Llama-3.1-405B-Instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "deepinfra", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true + }, + "fireworks_ai/accounts/fireworks/models/deepseek-v3": { + "max_tokens": 8192, + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "input_cost_per_token": 9e-07, + "output_cost_per_token": 9e-07, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_response_schema": true, + "source": "https://fireworks.ai/pricing" + }, + "voyage/voyage-3-large": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "input_cost_per_token": 1.8e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/voyage-3": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "input_cost_per_token": 6e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/voyage-3-lite": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "input_cost_per_token": 2e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/voyage-code-3": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "input_cost_per_token": 1.8e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/voyage-multimodal-3": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "input_cost_per_token": 1.2e-07, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "embedding" + }, + "voyage/rerank-2": { + "max_tokens": 16000, + "max_input_tokens": 16000, + "max_output_tokens": 16000, + "max_query_tokens": 16000, + "input_cost_per_token": 5e-08, + "input_cost_per_query": 5e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "rerank" + }, + "voyage/rerank-2-lite": { + "max_tokens": 8000, + "max_input_tokens": 8000, + "max_output_tokens": 8000, + "max_query_tokens": 8000, + "input_cost_per_token": 2e-08, + "input_cost_per_query": 2e-08, + "output_cost_per_token": 0.0, + "litellm_provider": "voyage", + "mode": "rerank" + }, + "databricks/meta-llama-3.3-70b-instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 1.00002e-06, + "input_dbu_cost_per_token": 1.4286e-05, + "output_cost_per_token": 2.99999e-06, + "output_dbu_cost_per_token": 4.2857e-05, + "litellm_provider": "databricks", + "mode": "chat", + "source": "https://www.databricks.com/product/pricing/foundation-model-serving", + "metadata": { + "notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation." + } + }, + "sambanova/Meta-Llama-3.1-8B-Instruct": { + "max_tokens": 16000, + "max_input_tokens": 16000, + "max_output_tokens": 16000, + "input_cost_per_token": 1e-07, + "output_cost_per_token": 2e-07, + "litellm_provider": "sambanova", + "supports_function_calling": true, + "mode": "chat" + }, + "sambanova/Meta-Llama-3.1-70B-Instruct": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 6e-07, + "output_cost_per_token": 1.2e-06, + "litellm_provider": "sambanova", + "supports_function_calling": true, + "mode": "chat" + }, + "sambanova/Meta-Llama-3.1-405B-Instruct": { + "max_tokens": 16000, + "max_input_tokens": 16000, + "max_output_tokens": 16000, + "input_cost_per_token": 5e-06, + "output_cost_per_token": 1e-05, + "litellm_provider": "sambanova", + "supports_function_calling": true, + "mode": "chat" + }, + "sambanova/Meta-Llama-3.2-1B-Instruct": { + "max_tokens": 16000, + "max_input_tokens": 16000, + "max_output_tokens": 16000, + "input_cost_per_token": 4e-07, + "output_cost_per_token": 8e-07, + "litellm_provider": "sambanova", + "supports_function_calling": true, + "mode": "chat" + }, + "sambanova/Meta-Llama-3.2-3B-Instruct": { + "max_tokens": 4000, + "max_input_tokens": 4000, + "max_output_tokens": 4000, + "input_cost_per_token": 8e-07, + "output_cost_per_token": 1.6e-06, + "litellm_provider": "sambanova", + "supports_function_calling": true, + "mode": "chat" + }, + "sambanova/Qwen2.5-Coder-32B-Instruct": { + "max_tokens": 8000, + "max_input_tokens": 8000, + "max_output_tokens": 8000, + "input_cost_per_token": 1.5e-06, + "output_cost_per_token": 3e-06, + "litellm_provider": "sambanova", + "supports_function_calling": true, + "mode": "chat" + }, + "sambanova/Qwen2.5-72B-Instruct": { + "max_tokens": 8000, + "max_input_tokens": 8000, + "max_output_tokens": 8000, + "input_cost_per_token": 2e-06, + "output_cost_per_token": 4e-06, + "litellm_provider": "sambanova", + "supports_function_calling": true, + "mode": "chat" + } +} \ No newline at end of file diff --git a/apps/api/src/routes/v1.ts b/apps/api/src/routes/v1.ts index 4aacfe18..f92fe024 100644 --- a/apps/api/src/routes/v1.ts +++ b/apps/api/src/routes/v1.ts @@ -228,7 +228,7 @@ v1Router.post( v1Router.get( "/extract/:jobId", - authMiddleware(RateLimiterMode.CrawlStatus), + authMiddleware(RateLimiterMode.ExtractStatus), wrap(extractStatusController), ); diff --git a/apps/api/src/scraper/scrapeURL/transformers/llmExtract.ts b/apps/api/src/scraper/scrapeURL/transformers/llmExtract.ts index be97036f..1ec8502a 100644 --- a/apps/api/src/scraper/scrapeURL/transformers/llmExtract.ts +++ b/apps/api/src/scraper/scrapeURL/transformers/llmExtract.ts @@ -1,7 +1,7 @@ import OpenAI from "openai"; import { encoding_for_model } from "@dqbd/tiktoken"; import { TiktokenModel } from "@dqbd/tiktoken"; -import { Document, ExtractOptions } from "../../../controllers/v1/types"; +import { Document, ExtractOptions, TokenUsage } from "../../../controllers/v1/types"; import { Logger } from "winston"; import { EngineResultsTracker, Meta } from ".."; import { logger } from "../../../lib/logger"; @@ -72,7 +72,7 @@ export async function generateOpenAICompletions( markdown?: string, previousWarning?: string, isExtractEndpoint?: boolean, -): Promise<{ extract: any; numTokens: number; warning: string | undefined }> { +): Promise<{ extract: any; numTokens: number; warning: string | undefined; totalUsage: TokenUsage }> { let extract: any; let warning: string | undefined; @@ -208,6 +208,9 @@ export async function generateOpenAICompletions( } } + const promptTokens = (jsonCompletion.usage?.prompt_tokens ?? 0); + const completionTokens = (jsonCompletion.usage?.completion_tokens ?? 0); + // If the users actually wants the items object, they can specify it as 'required' in the schema // otherwise, we just return the items array if ( @@ -217,7 +220,9 @@ export async function generateOpenAICompletions( ) { extract = extract?.items; } - return { extract, warning, numTokens }; + // num tokens (just user prompt tokenized) | deprecated + // totalTokens = promptTokens + completionTokens + return { extract, warning, numTokens, totalUsage: { promptTokens, completionTokens, totalTokens: promptTokens + completionTokens, model: model } }; } export async function performLLMExtract( @@ -282,6 +287,22 @@ Consider: 3. Appropriate data types for each field 4. Nested objects and arrays where appropriate +Valid JSON schema, has to be simple. No crazy properties. OpenAI has to support it. +Supported types +The following types are supported for Structured Outputs: + +String +Number +Boolean +Integer +Object +Array +Enum +anyOf + +Formats are not supported. Min/max are not supported. Anything beyond the above is not supported. Keep it simple with types and descriptions. +Optionals are not supported. +Keep it simple. Don't create too many properties, just the ones that are needed. Don't invent properties. Return a valid JSON schema object with properties that would capture the information requested in the prompt.`, }, { diff --git a/apps/api/src/services/billing/credit_billing.ts b/apps/api/src/services/billing/credit_billing.ts index 0a866006..8189161b 100644 --- a/apps/api/src/services/billing/credit_billing.ts +++ b/apps/api/src/services/billing/credit_billing.ts @@ -23,12 +23,14 @@ export async function billTeam( subscription_id: string | null | undefined, credits: number, logger?: Logger, + is_extract: boolean = false, ) { return withAuth(supaBillTeam, { success: true, message: "No DB, bypassed." })( team_id, subscription_id, credits, logger, + is_extract, ); } export async function supaBillTeam( @@ -36,6 +38,7 @@ export async function supaBillTeam( subscription_id: string | null | undefined, credits: number, __logger?: Logger, + is_extract: boolean = false, ) { const _logger = (__logger ?? logger).child({ module: "credit_billing", @@ -50,11 +53,12 @@ export async function supaBillTeam( credits, }); - const { data, error } = await supabase_service.rpc("bill_team", { + const { data, error } = await supabase_service.rpc("bill_team_w_extract_3", { _team_id: team_id, sub_id: subscription_id ?? null, fetch_subscription: subscription_id === undefined, credits, + is_extract_param: is_extract, }); if (error) { diff --git a/apps/api/src/services/logging/log_job.ts b/apps/api/src/services/logging/log_job.ts index 2ee07292..ddd9b234 100644 --- a/apps/api/src/services/logging/log_job.ts +++ b/apps/api/src/services/logging/log_job.ts @@ -59,6 +59,7 @@ export async function logJob(job: FirecrawlJob, force: boolean = false) { num_tokens: job.num_tokens, retry: !!job.retry, crawl_id: job.crawl_id, + tokens_billed: job.tokens_billed, }; if (force) { @@ -128,6 +129,7 @@ export async function logJob(job: FirecrawlJob, force: boolean = false) { origin: job.origin, num_tokens: job.num_tokens, retry: job.retry, + tokens_billed: job.tokens_billed, }, }; if (job.mode !== "single_urls") { diff --git a/apps/api/src/services/rate-limiter.ts b/apps/api/src/services/rate-limiter.ts index 4631fcfc..09c8d749 100644 --- a/apps/api/src/services/rate-limiter.ts +++ b/apps/api/src/services/rate-limiter.ts @@ -100,6 +100,10 @@ const RATE_LIMITS = { free: 500, default: 5000, }, + extractStatus: { + free: 500, + default: 5000, + }, testSuite: { free: 10000, default: 10000, diff --git a/apps/api/src/types.ts b/apps/api/src/types.ts index 102f4929..ce535ede 100644 --- a/apps/api/src/types.ts +++ b/apps/api/src/types.ts @@ -87,6 +87,7 @@ export interface FirecrawlJob { num_tokens?: number; retry?: boolean; crawl_id?: string; + tokens_billed?: number; } export interface FirecrawlScrapeResponse { @@ -133,6 +134,7 @@ export enum RateLimiterMode { Search = "search", Map = "map", Extract = "extract", + ExtractStatus = "extractStatus", } export type AuthResponse =