mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-04-19 12:39:59 +08:00

### What problem does this PR solve? Update LLM models ### Type of change - [x] Refactoring
3237 lines
113 KiB
JSON
3237 lines
113 KiB
JSON
{
|
|
"factory_llm_infos": [
|
|
{
|
|
"name": "OpenAI",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,TTS,TEXT RE-RANK,SPEECH2TEXT,MODERATION",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "gpt-4o-mini",
|
|
"tags": "LLM,CHAT,128K,IMAGE2TEXT",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "gpt-4o",
|
|
"tags": "LLM,CHAT,128K,IMAGE2TEXT",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "gpt-3.5-turbo",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "gpt-3.5-turbo-16k-0613",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16385,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "text-embedding-ada-002",
|
|
"tags": "TEXT EMBEDDING,8K",
|
|
"max_tokens": 8191,
|
|
"model_type": "embedding",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "text-embedding-3-small",
|
|
"tags": "TEXT EMBEDDING,8K",
|
|
"max_tokens": 8191,
|
|
"model_type": "embedding",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "text-embedding-3-large",
|
|
"tags": "TEXT EMBEDDING,8K",
|
|
"max_tokens": 8191,
|
|
"model_type": "embedding",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "whisper-1",
|
|
"tags": "SPEECH2TEXT",
|
|
"max_tokens": 26214400,
|
|
"model_type": "speech2text",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "gpt-4",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8191,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "gpt-4-turbo",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8191,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "gpt-4-32k",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "tts-1",
|
|
"tags": "TTS",
|
|
"max_tokens": 2048,
|
|
"model_type": "tts",
|
|
"is_tools": false
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Tongyi-Qianwen",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,TEXT RE-RANK,TTS,SPEECH2TEXT,MODERATION",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "deepseek-r1",
|
|
"tags": "LLM,CHAT,64K",
|
|
"max_tokens": 65792,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "deepseek-v3",
|
|
"tags": "LLM,CHAT,64K",
|
|
"max_tokens": 65792,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "deepseek-r1-distill-qwen-1.5b",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "deepseek-r1-distill-qwen-7b",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "deepseek-r1-distill-qwen-14b",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "deepseek-r1-distill-qwen-32b",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "deepseek-r1-distill-llama-8b",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "deepseek-r1-distill-llama-70b",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "qwq-32b",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwq-plus",
|
|
"tags": "LLM,CHAT,132k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwq-plus-latest",
|
|
"tags": "LLM,CHAT,132k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwen-long",
|
|
"tags": "LLM,CHAT,10000K",
|
|
"max_tokens": 1000000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwen-turbo",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8191,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwen-max",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwen-plus",
|
|
"tags": "LLM,CHAT,132k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwen-plus-latest",
|
|
"tags": "LLM,CHAT,132k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "text-embedding-v2",
|
|
"tags": "TEXT EMBEDDING,2K",
|
|
"max_tokens": 2048,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "sambert-zhide-v1",
|
|
"tags": "TTS",
|
|
"max_tokens": 2048,
|
|
"model_type": "tts"
|
|
},
|
|
{
|
|
"llm_name": "sambert-zhiru-v1",
|
|
"tags": "TTS",
|
|
"max_tokens": 2048,
|
|
"model_type": "tts"
|
|
},
|
|
{
|
|
"llm_name": "text-embedding-v3",
|
|
"tags": "TEXT EMBEDDING,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "qwen-vl-max",
|
|
"tags": "LLM,CHAT,IMAGE2TEXT",
|
|
"max_tokens": 765,
|
|
"model_type": "image2text",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "qwen-vl-plus",
|
|
"tags": "LLM,CHAT,IMAGE2TEXT",
|
|
"max_tokens": 765,
|
|
"model_type": "image2text",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "gte-rerank",
|
|
"tags": "RE-RANK,4k",
|
|
"max_tokens": 4000,
|
|
"model_type": "rerank"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "ZHIPU-AI",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "glm-4-plus",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "glm-4-0520",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "glm-4",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "glm-4-airx",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 8000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "glm-4-air",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "glm-4-flash",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "glm-4-flashx",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "glm-4-long",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 1000000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "glm-3-turbo",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "glm-4v",
|
|
"tags": "LLM,CHAT,IMAGE2TEXT",
|
|
"max_tokens": 2000,
|
|
"model_type": "image2text",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "glm-4-9b",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "embedding-2",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "embedding-3",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Ollama",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "ModelScope",
|
|
"logo": "",
|
|
"tags": "LLM",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "LocalAI",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "OpenAI-API-Compatible",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "VLLM",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "Moonshot",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "moonshot-v1-8k",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 7900,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "moonshot-v1-32k",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "moonshot-v1-128k",
|
|
"tags": "LLM,CHAT",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "moonshot-v1-auto",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "FastEmbed",
|
|
"logo": "",
|
|
"tags": "TEXT EMBEDDING",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "Xinference",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,TTS,SPEECH2TEXT,MODERATION,TEXT RE-RANK",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "Youdao",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "maidalun1020/bce-embedding-base_v1",
|
|
"tags": "TEXT EMBEDDING,",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "DeepSeek",
|
|
"logo": "",
|
|
"tags": "LLM",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "deepseek-chat",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
|
|
},
|
|
{
|
|
"llm_name": "deepseek-reasoner",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "VolcEngine",
|
|
"logo": "",
|
|
"tags": "LLM, TEXT EMBEDDING",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "Doubao-pro-128k",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Doubao-pro-32k",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Doubao-pro-4k",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "BaiChuan",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "Baichuan2-Turbo",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Baichuan2-Turbo-192k",
|
|
"tags": "LLM,CHAT,192K",
|
|
"max_tokens": 196608,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Baichuan3-Turbo",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Baichuan3-Turbo-128k",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Baichuan4",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Baichuan-Text-Embedding",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Jina",
|
|
"logo": "",
|
|
"tags": "TEXT EMBEDDING, TEXT RE-RANK",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "jina-reranker-v1-base-en",
|
|
"tags": "RE-RANK,8k",
|
|
"max_tokens": 8196,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "jina-reranker-v1-turbo-en",
|
|
"tags": "RE-RANK,8k",
|
|
"max_tokens": 8196,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "jina-reranker-v1-tiny-en",
|
|
"tags": "RE-RANK,8k",
|
|
"max_tokens": 8196,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "jina-colbert-v1-en",
|
|
"tags": "RE-RANK,8k",
|
|
"max_tokens": 8196,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "jina-embeddings-v2-base-en",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 8196,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "jina-embeddings-v2-base-de",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 8196,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "jina-embeddings-v2-base-es",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 8196,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "jina-embeddings-v2-base-code",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 8196,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "jina-embeddings-v2-base-zh",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 8196,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "jina-reranker-v2-base-multilingual",
|
|
"tags": "RE-RANK,8k",
|
|
"max_tokens": 8196,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "jina-embeddings-v3",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 8196,
|
|
"model_type": "embedding"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "BAAI",
|
|
"logo": "",
|
|
"tags": "TEXT EMBEDDING, TEXT RE-RANK",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "BAAI/bge-large-zh-v1.5",
|
|
"tags": "TEXT EMBEDDING,",
|
|
"max_tokens": 1024,
|
|
"model_type": "embedding"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "MiniMax",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "abab6.5-chat",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "abab6.5s-chat",
|
|
"tags": "LLM,CHAT,245k",
|
|
"max_tokens": 245760,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "abab6.5t-chat",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "abab6.5g-chat",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "abab5.5s-chat",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Mistral",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,MODERATION",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "codestral-latest",
|
|
"tags": "LLM,CHAT,256k",
|
|
"max_tokens": 256000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "mistral-large-latest",
|
|
"tags": "LLM,CHAT,131k",
|
|
"max_tokens": 131000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "mistral-saba-latest",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "pixtral-large-latest",
|
|
"tags": "LLM,CHAT,IMAGE2TEXT,131k",
|
|
"max_tokens": 131000,
|
|
"model_type": "image2text",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "ministral-3b-latest",
|
|
"tags": "LLM,CHAT,131k",
|
|
"max_tokens": 131000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "ministral-8b-latest",
|
|
"tags": "LLM,CHAT,131k",
|
|
"max_tokens": 131000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "mistral-embed",
|
|
"tags": "TEXT EMBEDDING,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "mistral-moderation-latest",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistral-small-latest",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "pixtral-12b-2409",
|
|
"tags": "LLM,IMAGE2TEXT,131k",
|
|
"max_tokens": 131000,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "mistral-ocr-latest",
|
|
"tags": "LLM,IMAGE2TEXT,131k",
|
|
"max_tokens": 131000,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "open-mistral-nemo",
|
|
"tags": "LLM,CHAT,131k",
|
|
"max_tokens": 131000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "open-codestral-mamba",
|
|
"tags": "LLM,CHAT,256k",
|
|
"max_tokens": 256000,
|
|
"model_type": "chat"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Azure-OpenAI",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "gpt-4o-mini",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 128000,
|
|
"model_type": "image2text",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "gpt-4o",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 128000,
|
|
"model_type": "image2text",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "gpt-3.5-turbo",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat",
|
|
"is_tools": false
|
|
},
|
|
{
|
|
"llm_name": "gpt-3.5-turbo-16k",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16385,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "text-embedding-ada-002",
|
|
"tags": "TEXT EMBEDDING,8K",
|
|
"max_tokens": 8191,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "text-embedding-3-small",
|
|
"tags": "TEXT EMBEDDING,8K",
|
|
"max_tokens": 8191,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "text-embedding-3-large",
|
|
"tags": "TEXT EMBEDDING,8K",
|
|
"max_tokens": 8191,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "whisper-1",
|
|
"tags": "SPEECH2TEXT",
|
|
"max_tokens": 26214400,
|
|
"model_type": "speech2text"
|
|
},
|
|
{
|
|
"llm_name": "gpt-4",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8191,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "gpt-4-turbo",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8191,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "gpt-4-32k",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "gpt-4-vision-preview",
|
|
"tags": "LLM,CHAT,IMAGE2TEXT",
|
|
"max_tokens": 765,
|
|
"model_type": "image2text"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Bedrock",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "Gemini",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "gemini-2.0-flash-001",
|
|
"tags": "LLM,CHAT,1024K",
|
|
"max_tokens": 1048576,
|
|
"model_type": "image2text",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "gemini-2.0-flash-thinking-exp-01-21",
|
|
"tags": "LLM,CHAT,1024K",
|
|
"max_tokens": 1048576,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "gemini-1.5-flash",
|
|
"tags": "LLM,IMAGE2TEXT,1024K",
|
|
"max_tokens": 1048576,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "gemini-2.5-pro-exp-03-25",
|
|
"tags": "LLM,IMAGE2TEXT,1024K",
|
|
"max_tokens": 1048576,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "gemini-1.5-pro",
|
|
"tags": "LLM,IMAGE2TEXT,2048K",
|
|
"max_tokens": 2097152,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "gemini-1.5-flash-8b",
|
|
"tags": "LLM,IMAGE2TEXT,1024K",
|
|
"max_tokens": 1048576,
|
|
"model_type": "image2text",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "text-embedding-004",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 2048,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "embedding-001",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 2048,
|
|
"model_type": "embedding"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Groq",
|
|
"logo": "",
|
|
"tags": "LLM",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "gemma-7b-it",
|
|
"tags": "LLM,CHAT,15k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "gemma2-9b-it",
|
|
"tags": "LLM,CHAT,15k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "llama3-70b-8192",
|
|
"tags": "LLM,CHAT,6k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "llama3-8b-8192",
|
|
"tags": "LLM,CHAT,30k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "llama-3.1-70b-versatile",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "llama-3.1-8b-instant",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "llama-3.3-70b-versatile",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "llama-3.3-70b-specdec",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mixtral-8x7b-32768",
|
|
"tags": "LLM,CHAT,5k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "OpenRouter",
|
|
"logo": "",
|
|
"tags": "LLM,IMAGE2TEXT",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "StepFun",
|
|
"logo": "",
|
|
"tags": "LLM",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "step-1-8k",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "step-1-32k",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "step-1-128k",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "step-1-256k",
|
|
"tags": "LLM,CHAT,256k",
|
|
"max_tokens": 262144,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "step-1v-8k",
|
|
"tags": "LLM,CHAT,IMAGE2TEXT",
|
|
"max_tokens": 8192,
|
|
"model_type": "image2text",
|
|
"is_tools": true
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "NVIDIA",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING, TEXT RE-RANK",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "01-ai/yi-large",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "abacusai/dracarys-llama-3.1-70b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "ai21labs/jamba-1.5-large-instruct",
|
|
"tags": "LLM,CHAT,256K",
|
|
"max_tokens": 256000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "ai21labs/jamba-1.5-mini-instruct",
|
|
"tags": "LLM,CHAT,256K",
|
|
"max_tokens": 256000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "aisingapore/sea-lion-7b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "baichuan-inc/baichuan2-13b-chat",
|
|
"tags": "LLM,CHAT,192K",
|
|
"max_tokens": 196608,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "bigcode/starcoder2-7b",
|
|
"tags": "LLM,CHAT,16K",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "bigcode/starcoder2-15b",
|
|
"tags": "LLM,CHAT,16K",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "databricks/dbrx-instruct",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "deepseek-ai/deepseek-r1",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "google/gemma-2b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "google/gemma-7b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "google/gemma-2-2b-it",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "google/gemma-2-9b-it",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "google/gemma-2-27b-it",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "google/codegemma-1.1-7b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "google/codegemma-7b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "google/recurrentgemma-2b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "google/shieldgemma-9b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "ibm/granite-3.0-3b-a800m-instruct",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "ibm/granite-3.0-8b-instruct",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "ibm/granite-34b-code-instruct",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "ibm/granite-8b-code-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "ibm/granite-guardian-3.0-8b",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "igenius / colosseum-355b_instruct_16k",
|
|
"tags": "LLM,CHAT,16K",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "igenius / italia_10b_instruct_16k",
|
|
"tags": "LLM,CHAT,16K",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "institute-of-science-tokyo/llama-3.1-swallow-70b-instruct-v01",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "institute-of-science-tokyo/llama-3.1-swallow-8b-instruct-v0.1",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mediatek/breeze-7b-instruct",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta/codellama-70b",
|
|
"tags": "LLM,CHAT,100K",
|
|
"max_tokens": 100000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta/llama2-70b",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta/llama3-8b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta/llama3-70b",
|
|
"tags": "LLM,CHAT,",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta/llama-3.1-8b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta/llama-3.1-70b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta/llama-3.1-405b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta/llama-3.2-1b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta/llama-3.2-3b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta/llama-3.3-70b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/phi-3-medium-128k-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/phi-3-medium-4k-instruct",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/phi-3-mini-128k-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/phi-3-mini-4k-instruct",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/phi-3-small-128k-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/phi-3-small-8k-instruct",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/phi-3.5-mini",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/phi-3.5-moe-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistralai/codestral-22b-instruct-v0.1",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "mistralai/mamba-codestral-7b-v0.1",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistralai/mistral-2-large-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistralai/mathstral-7b-v01",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistralai/mistral-7b-instruct",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistralai/mistral-7b-instruct-v0.3",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistralai/mixtral-8x7b-instruct",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistralai/mixtral-8x22b-instruct",
|
|
"tags": "LLM,CHAT,64K",
|
|
"max_tokens": 65536,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistralai/mistral-large",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "mistralai/mistral-small-24b-instruct",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/llama3-chatqa-1.5-8b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/llama-3.1-nemoguard-8b-content-safety",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/llama-3.1-nemoguard-8b-topic-control",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/llama-3.1-nemotron-51b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/llama-3.1-nemotron-70b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/llama-3.1-nemotron-70b-reward",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 128000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/llama3-chatqa-1.5-70b",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/mistral-nemo-minitron-8b-base",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/mistral-nemo-minitron-8b-8k-instruct",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/nemotron-4-340b-instruct",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "nvidia/nemotron-4-340b-reward",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/nemotron-4-mini-hindi-4b-instruct",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/nemotron-mini-4b-instruct",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nv-mistralai/mistral-nemo-12b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "qwen/qwen2-7b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwen/qwen2.5-7b-instruct",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwen/qwen2.5-coder-7b-instruct",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwen/qwen2.5-coder-32b-instruct",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "rakuten/rakutenai-7b-chat",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "rakuten/rakutenai-7b-instruct",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "seallms/seallm-7b-v2.5",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "snowflake/arctic",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "tokyotech-llm/llama-3-swallow-70b-instruct-v01",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "thudm/chatglm3-6b",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "tiiuae/falcon3-7b-instruct",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "upstage/solar-10.7b-instruct",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "writer/palmyra-creative-122b",
|
|
"tags": "LLM,CHAT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "writer/palmyra-fin-70b-32k",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "writer/palmyra-med-70b-32k",
|
|
"tags": "LLM,CHAT,32K",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "writer/palmyra-med-70b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "yentinglin/llama-3-taiwan-70b-instruct",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "zyphra/zamba2-7b-instruct",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-m3",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 8192,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-m3-unsupervised",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 8192,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-m3-retromae",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 8129,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-large-en-v1.5",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-base-en-v1.5",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-small-en-v1.5",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/embed-qa-4",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/llama-3.2-nv-embedqa-1b-v1",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/llama-3.2-nv-embedqa-1b-v2",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 8192,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/llama-3.2-nv-rerankqa-1b-v1",
|
|
"tags": "RE-RANK,512",
|
|
"max_tokens": 512,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/llama-3.2-nv-rerankqa-1b-v2",
|
|
"tags": "RE-RANK,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/nvclip",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 1024,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/nv-embed-v1",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 4096,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/nv-embedqa-e5-v5",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 1024,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/nv-embedqa-mistral-7b-v2",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 4096,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/nv-rerankqa-mistral-4b-v3",
|
|
"tags": "RE-RANK,512",
|
|
"max_tokens": 512,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/rerank-qa-mistral-4b",
|
|
"tags": "RE-RANK,512",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "snowflake-arctic-embed-xs",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "snowflake-arctic-embed-s",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "snowflake-arctic-embed-m",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "snowflake-arctic-embed-m-long",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "snowflake-arctic-embed-l",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
|
|
|
|
{
|
|
"llm_name": "adept/fuyu-8b",
|
|
"tags": "IMAGE2TEXT,1K",
|
|
"max_tokens": 1024,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "google/deplot",
|
|
"tags": "IMAGE2TEXT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "google/paligemma",
|
|
"tags": "IMAGE2TEXT,256K",
|
|
"max_tokens": 256000,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "meta/llama-3.2-11b-vision-instruct",
|
|
"tags": "IMAGE2TEXT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "meta/llama-3.2-90b-vision-instruct",
|
|
"tags": "IMAGE2TEXT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/florence-2",
|
|
"tags": "IMAGE2TEXT,1K",
|
|
"max_tokens": 1024,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/kosmos-2",
|
|
"tags": "IMAGE2TEXT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/phi-3-vision-128k-instruct",
|
|
"tags": "IMAGE2TEXT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/phi-3.5-vision-instruct",
|
|
"tags": "IMAGE2TEXT,128K",
|
|
"max_tokens": 131072,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "nvidia/neva-22b",
|
|
"tags": "IMAGE2TEXT,1K",
|
|
"max_tokens": 1024,
|
|
"model_type": "image2text"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "LM-Studio",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "Cohere",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING, TEXT RE-RANK",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "command-r-plus",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "command-r",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "command",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "command-nightly",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "command-light",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "command-light-nightly",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "embed-english-v3.0",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "embed-english-light-v3.0",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "embed-multilingual-v3.0",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "embed-multilingual-light-v3.0",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "embed-english-v2.0",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "embed-english-light-v2.0",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "embed-multilingual-v2.0",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 256,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "rerank-english-v3.0",
|
|
"tags": "RE-RANK,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "rerank-multilingual-v3.0",
|
|
"tags": "RE-RANK,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "rerank-english-v2.0",
|
|
"tags": "RE-RANK,512",
|
|
"max_tokens": 512,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "rerank-multilingual-v2.0",
|
|
"tags": "RE-RANK,512",
|
|
"max_tokens": 512,
|
|
"model_type": "rerank"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "LeptonAI",
|
|
"logo": "",
|
|
"tags": "LLM",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "dolphin-mixtral-8x7b",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "gemma-7b",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "llama3-1-8b",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "llama3-8b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "llama2-13b",
|
|
"tags": "LLM,CHAT,4K",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "llama3-1-70b",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "llama3-70b",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "llama3-1-405b",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "mistral-7b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistral-8x7b",
|
|
"tags": "LLM,CHAT,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nous-hermes-llama2",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "openchat-3-5",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "toppy-m-7b",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "wizardlm-2-7b",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "wizardlm-2-8x22b",
|
|
"tags": "LLM,CHAT,64K",
|
|
"max_tokens": 65536,
|
|
"model_type": "chat"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "TogetherAI",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "PerfXCloud",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "deepseek-v2-chat",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "llama3.1:405b",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Qwen2-72B-Instruct",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Qwen2-72B-Instruct-GPTQ-Int4",
|
|
"tags": "LLM,CHAT,2k",
|
|
"max_tokens": 2048,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Qwen2-72B-Instruct-awq-int4",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Llama3-Chinese_v2",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Yi-1_5-9B-Chat-16K",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen1.5-72B-Chat-GPTQ-Int4",
|
|
"tags": "LLM,CHAT,2k",
|
|
"max_tokens": 2048,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Meta-Llama-3.1-8B-Instruct",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Qwen2-7B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "deepseek-v2-lite-chat",
|
|
"tags": "LLM,CHAT,2k",
|
|
"max_tokens": 2048,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Qwen2-7B",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "chatglm3-6b",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Meta-Llama-3-70B-Instruct-GPTQ-Int4",
|
|
"tags": "LLM,CHAT,1k",
|
|
"max_tokens": 1024,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Meta-Llama-3-8B-Instruct",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Mistral-7B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "MindChat-Qwen-7B-v2",
|
|
"tags": "LLM,CHAT,2k",
|
|
"max_tokens": 2048,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "phi-2",
|
|
"tags": "LLM,CHAT,2k",
|
|
"max_tokens": 2048,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "SOLAR-10_7B-Instruct",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Mixtral-8x7B-Instruct-v0.1-GPTQ",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen1.5-7B",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-large-en-v1.5",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-large-zh-v1.5",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 1024,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-m3",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 8192,
|
|
"model_type": "embedding"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Upstage",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "solar-1-mini-chat",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "solar-1-mini-chat-ja",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "solar-embedding-1-large-query",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 4000,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "solar-embedding-1-large-passage",
|
|
"tags": "TEXT EMBEDDING",
|
|
"max_tokens": 4000,
|
|
"model_type": "embedding"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "novita.ai",
|
|
"logo": "",
|
|
"tags": "LLM,IMAGE2TEXT",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "deepseek/deepseek-r1",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek_v3",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek-r1-distill-llama-70b",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek-r1-distill-qwen-32b",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek-r1-distill-qwen-14b",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek-r1-distill-llama-8b",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3.3-70b-instruct",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3.2-11b-vision-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3.2-3b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3.2-1b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3.1-70b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3.1-8b-instruct",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3.1-8b-instruct-bf16",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3-70b-instruct",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3-8b-instruct",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwen/qwen-2.5-72b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwen/qwen-2-vl-72b-instruct",
|
|
"tags": "LLM,IMAGE2TEXT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "qwen/qwen-2-7b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistralai/mistral-nemo",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "mistralai/mistral-7b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Sao10K/L3-8B-Stheno-v3.2",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "sao10k/l3-70b-euryale-v2.1",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "sao10k/l3-8b-lunaris",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "sao10k/l31-70b-euryale-v2.2",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nousresearch/hermes-2-pro-llama-3-8b",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "nousresearch/nous-hermes-llama2-13b",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "openchat/openchat-7b",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "gryphe/mythomax-l2-13b",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "microsoft/wizardlm-2-8x22b",
|
|
"tags": "LLM,CHAT,65k",
|
|
"max_tokens": 65535,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "google/gemma-2-9b-it",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "cognitivecomputations/dolphin-mixtral-8x22b",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "jondurbin/airoboros-l2-70b",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "teknium/openhermes-2.5-mistral-7b",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "sophosympatheia/midnight-rose-70b",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "SILICONFLOW",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,TEXT RE-RANK",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "deepseek-ai/DeepSeek-R1",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Pro/deepseek-ai/DeepSeek-R1",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Pro/deepseek-ai/DeepSeek-V3",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "deepseek-ai/DeepSeek-V3",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Pro/deepseek-ai/DeepSeek-R1-Distill-Llama-8B",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Pro/deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Pro/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/QwQ-32B-Preview",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/QwQ-32B",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen2.5-Coder-32B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "AIDC-AI/Marco-o1",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek-ai/deepseek-vl2",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek-ai/Janus-Pro-7B",
|
|
"tags": "LLM,IMAGE2TEXT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/QVQ-72B-Preview",
|
|
"tags": "LLM,IMAGE2TEXT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "image2text"
|
|
},
|
|
{
|
|
"llm_name": "FunAudioLLM/CosyVoice2-0.5B",
|
|
"tags": "LLM,TTS,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "tts"
|
|
},
|
|
{
|
|
"llm_name": "fishaudio/fish-speech-1.5",
|
|
"tags": "LLM,TTS,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "tts"
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/Llama-3.3-70B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen2-7B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen2-1.5B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen1.5-7B-Chat",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "THUDM/glm-4-9b-chat",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "THUDM/chatglm3-6b",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "01-ai/Yi-1.5-9B-Chat-16K",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "01-ai/Yi-1.5-6B-Chat",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "google/gemma-2-9b-it",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "internlm/internlm2_5-7b-chat",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/Meta-Llama-3.1-8B-Instruct",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Pro/Qwen/Qwen2-7B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Pro/Qwen/Qwen2-1.5B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Pro/THUDM/glm-4-9b-chat",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Pro/01-ai/Yi-1.5-6B-Chat",
|
|
"tags": "LLM,CHAT,4k",
|
|
"max_tokens": 4096,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Pro/google/gemma-2-9b-it",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "Pro/meta-llama/Meta-Llama-3.1-8B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Pro/mistralai/Mistral-7B-Instruct-v0.2",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen2-Math-72B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen1.5-32B-Chat",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen2.5-72B-Instruct-128K",
|
|
"tags": "LLM,CHAT,128k",
|
|
"max_tokens": 131072,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen2.5-72B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen2.5-7B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen2.5-14B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen2.5-32B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Qwen/Qwen2.5-Coder-7B-Instruct",
|
|
"tags": "LLM,CHAT,FIM,Coder,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Pro/Qwen/Qwen2.5-7B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "Pro/Qwen/Qwen2.5-Coder-7B-Instruct",
|
|
"tags": "LLM,CHAT,FIM,Coder,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "01-ai/Yi-1.5-34B-Chat-16K",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "internlm/internlm2_5-20b-chat",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/Meta-Llama-3.1-405B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/Meta-Llama-3.1-70B-Instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "google/gemma-2-27b-it",
|
|
"tags": "LLM,CHAT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-m3",
|
|
"tags": "TEXT EMBEDDING,8K",
|
|
"max_tokens": 8192,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-large-en-v1.5",
|
|
"tags": "TEXT EMBEDDING,512",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-large-zh-v1.5",
|
|
"tags": "TEXT EMBEDDING,512",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "netease-youdao/bce-embedding-base_v1",
|
|
"tags": "TEXT EMBEDDING,512",
|
|
"max_tokens": 512,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "BAAI/bge-reranker-v2-m3",
|
|
"tags": "RE-RANK, 512",
|
|
"max_tokens": 1024,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "netease-youdao/bce-reranker-base_v1",
|
|
"tags": "RE-RANK, 512",
|
|
"max_tokens": 1024,
|
|
"model_type": "rerank"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "PPIO",
|
|
"logo": "",
|
|
"tags": "LLM",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "deepseek/deepseek-r1/community",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek-v3/community",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek-r1",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek-v3",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek-r1-distill-llama-70b",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek-r1-distill-qwen-32b",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek-r1-distill-qwen-14b",
|
|
"tags": "LLM,CHAT,64k",
|
|
"max_tokens": 64000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "deepseek/deepseek-r1-distill-llama-8b",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "qwen/qwen-2.5-72b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "qwen/qwen-2-vl-72b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3.2-3b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "qwen/qwen2.5-32b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32000,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "baichuan/baichuan2-13b-chat",
|
|
"tags": "LLM,CHAT,14k",
|
|
"max_tokens": 14336,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3.1-70b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "meta-llama/llama-3.1-8b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "01-ai/yi-1.5-34b-chat",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "01-ai/yi-1.5-9b-chat",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "thudm/glm-4-9b-chat",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "qwen/qwen-2-7b-instruct",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "01.AI",
|
|
"logo": "",
|
|
"tags": "LLM,IMAGE2TEXT",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "yi-lightning",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "yi-large",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "yi-medium",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "yi-medium-200k",
|
|
"tags": "LLM,CHAT,200k",
|
|
"max_tokens": 204800,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "yi-spark",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "yi-large-rag",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "yi-large-fc",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "yi-large-turbo",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "yi-large-preview",
|
|
"tags": "LLM,CHAT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "yi-vision",
|
|
"tags": "LLM,CHAT,IMAGE2TEXT,16k",
|
|
"max_tokens": 16384,
|
|
"model_type": "image2text"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Replicate",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "Tencent Hunyuan",
|
|
"logo": "",
|
|
"tags": "LLM,IMAGE2TEXT",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "hunyuan-pro",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "hunyuan-standard",
|
|
"tags": "LLM,CHAT,32k",
|
|
"max_tokens": 32768,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "hunyuan-standard-256K",
|
|
"tags": "LLM,CHAT,256k",
|
|
"max_tokens": 262144,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "hunyuan-lite",
|
|
"tags": "LLM,CHAT,256k",
|
|
"max_tokens": 262144,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "hunyuan-vision",
|
|
"tags": "LLM,IMAGE2TEXT,8k",
|
|
"max_tokens": 8192,
|
|
"model_type": "image2text"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "XunFei Spark",
|
|
"logo": "",
|
|
"tags": "LLM,TTS",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "BaiduYiyan",
|
|
"logo": "",
|
|
"tags": "LLM",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "Fish Audio",
|
|
"logo": "",
|
|
"tags": "TTS",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "Tencent Cloud",
|
|
"logo": "",
|
|
"tags": "SPEECH2TEXT",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "Anthropic",
|
|
"logo": "",
|
|
"tags": "LLM",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "claude-3-7-sonnet-20250219",
|
|
"tags": "LLM,IMAGE2TEXT,200k",
|
|
"max_tokens": 204800,
|
|
"model_type": "image2text",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "claude-3-5-sonnet-20241022",
|
|
"tags": "LLM,IMAGE2TEXT,200k",
|
|
"max_tokens": 204800,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "claude-3-opus-20240229",
|
|
"tags": "LLM,IMAGE2TEXT,200k",
|
|
"max_tokens": 204800,
|
|
"model_type": "chat",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "claude-3-haiku-20240307",
|
|
"tags": "LLM,IMAGE2TEXT,200k",
|
|
"max_tokens": 204800,
|
|
"model_type": "image2text",
|
|
"is_tools": true
|
|
},
|
|
{
|
|
"llm_name": "claude-2.1",
|
|
"tags": "LLM,CHAT,200k",
|
|
"max_tokens": 204800,
|
|
"model_type": "chat"
|
|
},
|
|
{
|
|
"llm_name": "claude-2.0",
|
|
"tags": "LLM,CHAT,100k",
|
|
"max_tokens": 102400,
|
|
"model_type": "chat"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Voyage AI",
|
|
"logo": "",
|
|
"tags": "TEXT EMBEDDING, TEXT RE-RANK",
|
|
"status": "1",
|
|
"llm": [
|
|
{
|
|
"llm_name": "voyage-large-2-instruct",
|
|
"tags": "TEXT EMBEDDING,16000",
|
|
"max_tokens": 16000,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "voyage-finance-2",
|
|
"tags": "TEXT EMBEDDING,32000",
|
|
"max_tokens": 32000,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "voyage-multilingual-2",
|
|
"tags": "TEXT EMBEDDING,32000",
|
|
"max_tokens": 32000,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "voyage-law-2",
|
|
"tags": "TEXT EMBEDDING,16000",
|
|
"max_tokens": 16000,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "voyage-code-2",
|
|
"tags": "TEXT EMBEDDING,16000",
|
|
"max_tokens": 16000,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "voyage-large-2",
|
|
"tags": "TEXT EMBEDDING,16000",
|
|
"max_tokens": 16000,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "voyage-2",
|
|
"tags": "TEXT EMBEDDING,4000",
|
|
"max_tokens": 4000,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "voyage-3",
|
|
"tags": "TEXT EMBEDDING,32000",
|
|
"max_tokens": 32000,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "voyage-3-lite",
|
|
"tags": "TEXT EMBEDDING,32000",
|
|
"max_tokens": 32000,
|
|
"model_type": "embedding"
|
|
},
|
|
{
|
|
"llm_name": "rerank-1",
|
|
"tags": "RE-RANK, 8000",
|
|
"max_tokens": 8000,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "rerank-lite-1",
|
|
"tags": "RE-RANK, 4000",
|
|
"max_tokens": 4000,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "rerank-2",
|
|
"tags": "RE-RANK, 16000",
|
|
"max_tokens": 16000,
|
|
"model_type": "rerank"
|
|
},
|
|
{
|
|
"llm_name": "rerank-2-lite",
|
|
"tags": "RE-RANK, 8000",
|
|
"max_tokens": 8000,
|
|
"model_type": "rerank"
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"name": "Google Cloud",
|
|
"logo": "",
|
|
"tags": "LLM",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "HuggingFace",
|
|
"logo": "",
|
|
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
|
|
"status": "1",
|
|
"llm": []
|
|
},
|
|
{
|
|
"name": "GPUStack",
|
|
"logo": "",
|
|
"tags": "LLM,TEXT EMBEDDING,TTS,SPEECH2TEXT,TEXT RE-RANK",
|
|
"status": "1",
|
|
"llm": []
|
|
}
|
|
]
|
|
}
|