mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-12 23:29:00 +08:00
'load llm infomation from a json file and add support for OpenRouter' (#1533)
### What problem does this PR solve? #1467 ### Type of change - [x] New Feature (non-breaking change which adds functionality) --------- Co-authored-by: Zhedong Cen <cenzhedong2@126.com>
This commit is contained in:
parent
3657b1f2a2
commit
75086f41a9
@ -57,8 +57,8 @@ def set_api_key():
|
||||
mdl = ChatModel[factory](
|
||||
req["api_key"], llm.llm_name, base_url=req.get("base_url"))
|
||||
try:
|
||||
m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}], {
|
||||
"temperature": 0.9})
|
||||
m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}],
|
||||
{"temperature": 0.9,'max_tokens':50})
|
||||
if not tc:
|
||||
raise Exception(m)
|
||||
except Exception as e:
|
||||
|
@ -89,904 +89,29 @@ def init_superuser():
|
||||
tenant["embd_id"]))
|
||||
|
||||
|
||||
factory_infos = [{
|
||||
"name": "OpenAI",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
||||
"status": "1",
|
||||
}, {
|
||||
"name": "Tongyi-Qianwen",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
||||
"status": "1",
|
||||
}, {
|
||||
"name": "ZHIPU-AI",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
||||
"status": "1",
|
||||
},
|
||||
{
|
||||
"name": "Ollama",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
||||
"status": "1",
|
||||
}, {
|
||||
"name": "Moonshot",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING",
|
||||
"status": "1",
|
||||
}, {
|
||||
"name": "FastEmbed",
|
||||
"logo": "",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"status": "1",
|
||||
}, {
|
||||
"name": "Xinference",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION,TEXT RE-RANK",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "Youdao",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "DeepSeek",
|
||||
"logo": "",
|
||||
"tags": "LLM",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "VolcEngine",
|
||||
"logo": "",
|
||||
"tags": "LLM, TEXT EMBEDDING",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "BaiChuan",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "Jina",
|
||||
"logo": "",
|
||||
"tags": "TEXT EMBEDDING, TEXT RE-RANK",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "BAAI",
|
||||
"logo": "",
|
||||
"tags": "TEXT EMBEDDING, TEXT RE-RANK",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "MiniMax",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "Mistral",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "Azure-OpenAI",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "Bedrock",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING",
|
||||
"status": "1",
|
||||
},{
|
||||
"name": "Gemini",
|
||||
"logo": "",
|
||||
"tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT",
|
||||
"status": "1",
|
||||
},
|
||||
{
|
||||
"name": "Groq",
|
||||
"logo": "",
|
||||
"tags": "LLM",
|
||||
"status": "1",
|
||||
}
|
||||
# {
|
||||
# "name": "文心一言",
|
||||
# "logo": "",
|
||||
# "tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
|
||||
# "status": "1",
|
||||
# },
|
||||
]
|
||||
|
||||
|
||||
def init_llm_factory():
|
||||
llm_infos = [
|
||||
# ---------------------- OpenAI ------------------------
|
||||
{
|
||||
"fid": factory_infos[0]["name"],
|
||||
"llm_name": "gpt-4o",
|
||||
"tags": "LLM,CHAT,128K",
|
||||
"max_tokens": 128000,
|
||||
"model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value
|
||||
}, {
|
||||
"fid": factory_infos[0]["name"],
|
||||
"llm_name": "gpt-3.5-turbo",
|
||||
"tags": "LLM,CHAT,4K",
|
||||
"max_tokens": 4096,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[0]["name"],
|
||||
"llm_name": "gpt-3.5-turbo-16k-0613",
|
||||
"tags": "LLM,CHAT,16k",
|
||||
"max_tokens": 16385,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[0]["name"],
|
||||
"llm_name": "text-embedding-ada-002",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[0]["name"],
|
||||
"llm_name": "text-embedding-3-small",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[0]["name"],
|
||||
"llm_name": "text-embedding-3-large",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[0]["name"],
|
||||
"llm_name": "whisper-1",
|
||||
"tags": "SPEECH2TEXT",
|
||||
"max_tokens": 25 * 1024 * 1024,
|
||||
"model_type": LLMType.SPEECH2TEXT.value
|
||||
}, {
|
||||
"fid": factory_infos[0]["name"],
|
||||
"llm_name": "gpt-4",
|
||||
"tags": "LLM,CHAT,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[0]["name"],
|
||||
"llm_name": "gpt-4-turbo",
|
||||
"tags": "LLM,CHAT,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},{
|
||||
"fid": factory_infos[0]["name"],
|
||||
"llm_name": "gpt-4-32k",
|
||||
"tags": "LLM,CHAT,32K",
|
||||
"max_tokens": 32768,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[0]["name"],
|
||||
"llm_name": "gpt-4-vision-preview",
|
||||
"tags": "LLM,CHAT,IMAGE2TEXT",
|
||||
"max_tokens": 765,
|
||||
"model_type": LLMType.IMAGE2TEXT.value
|
||||
},
|
||||
# ----------------------- Qwen -----------------------
|
||||
{
|
||||
"fid": factory_infos[1]["name"],
|
||||
"llm_name": "qwen-turbo",
|
||||
"tags": "LLM,CHAT,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[1]["name"],
|
||||
"llm_name": "qwen-plus",
|
||||
"tags": "LLM,CHAT,32K",
|
||||
"max_tokens": 32768,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[1]["name"],
|
||||
"llm_name": "qwen-max-1201",
|
||||
"tags": "LLM,CHAT,6K",
|
||||
"max_tokens": 5899,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[1]["name"],
|
||||
"llm_name": "text-embedding-v2",
|
||||
"tags": "TEXT EMBEDDING,2K",
|
||||
"max_tokens": 2048,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[1]["name"],
|
||||
"llm_name": "paraformer-realtime-8k-v1",
|
||||
"tags": "SPEECH2TEXT",
|
||||
"max_tokens": 25 * 1024 * 1024,
|
||||
"model_type": LLMType.SPEECH2TEXT.value
|
||||
}, {
|
||||
"fid": factory_infos[1]["name"],
|
||||
"llm_name": "qwen-vl-max",
|
||||
"tags": "LLM,CHAT,IMAGE2TEXT",
|
||||
"max_tokens": 765,
|
||||
"model_type": LLMType.IMAGE2TEXT.value
|
||||
},
|
||||
# ---------------------- ZhipuAI ----------------------
|
||||
{
|
||||
"fid": factory_infos[2]["name"],
|
||||
"llm_name": "glm-3-turbo",
|
||||
"tags": "LLM,CHAT,",
|
||||
"max_tokens": 128 * 1000,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[2]["name"],
|
||||
"llm_name": "glm-4",
|
||||
"tags": "LLM,CHAT,",
|
||||
"max_tokens": 128 * 1000,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[2]["name"],
|
||||
"llm_name": "glm-4v",
|
||||
"tags": "LLM,CHAT,IMAGE2TEXT",
|
||||
"max_tokens": 2000,
|
||||
"model_type": LLMType.IMAGE2TEXT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[2]["name"],
|
||||
"llm_name": "embedding-2",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 512,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
# ------------------------ Moonshot -----------------------
|
||||
{
|
||||
"fid": factory_infos[4]["name"],
|
||||
"llm_name": "moonshot-v1-8k",
|
||||
"tags": "LLM,CHAT,",
|
||||
"max_tokens": 7900,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[4]["name"],
|
||||
"llm_name": "moonshot-v1-32k",
|
||||
"tags": "LLM,CHAT,",
|
||||
"max_tokens": 32768,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[4]["name"],
|
||||
"llm_name": "moonshot-v1-128k",
|
||||
"tags": "LLM,CHAT",
|
||||
"max_tokens": 128 * 1000,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
# ------------------------ FastEmbed -----------------------
|
||||
{
|
||||
"fid": factory_infos[5]["name"],
|
||||
"llm_name": "BAAI/bge-small-en-v1.5",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 512,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[5]["name"],
|
||||
"llm_name": "BAAI/bge-small-zh-v1.5",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 512,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
}, {
|
||||
"fid": factory_infos[5]["name"],
|
||||
"llm_name": "BAAI/bge-base-en-v1.5",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 512,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
}, {
|
||||
"fid": factory_infos[5]["name"],
|
||||
"llm_name": "BAAI/bge-large-en-v1.5",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 512,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[5]["name"],
|
||||
"llm_name": "sentence-transformers/all-MiniLM-L6-v2",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 512,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[5]["name"],
|
||||
"llm_name": "nomic-ai/nomic-embed-text-v1.5",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[5]["name"],
|
||||
"llm_name": "jinaai/jina-embeddings-v2-small-en",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 2147483648,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[5]["name"],
|
||||
"llm_name": "jinaai/jina-embeddings-v2-base-en",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 2147483648,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
# ------------------------ Youdao -----------------------
|
||||
{
|
||||
"fid": factory_infos[7]["name"],
|
||||
"llm_name": "maidalun1020/bce-embedding-base_v1",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 512,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[7]["name"],
|
||||
"llm_name": "maidalun1020/bce-reranker-base_v1",
|
||||
"tags": "RE-RANK, 512",
|
||||
"max_tokens": 512,
|
||||
"model_type": LLMType.RERANK.value
|
||||
},
|
||||
# ------------------------ DeepSeek -----------------------
|
||||
{
|
||||
"fid": factory_infos[8]["name"],
|
||||
"llm_name": "deepseek-chat",
|
||||
"tags": "LLM,CHAT,",
|
||||
"max_tokens": 32768,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[8]["name"],
|
||||
"llm_name": "deepseek-coder",
|
||||
"tags": "LLM,CHAT,",
|
||||
"max_tokens": 16385,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
# ------------------------ VolcEngine -----------------------
|
||||
{
|
||||
"fid": factory_infos[9]["name"],
|
||||
"llm_name": "Skylark2-pro-32k",
|
||||
"tags": "LLM,CHAT,32k",
|
||||
"max_tokens": 32768,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[9]["name"],
|
||||
"llm_name": "Skylark2-pro-4k",
|
||||
"tags": "LLM,CHAT,4k",
|
||||
"max_tokens": 4096,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
# ------------------------ BaiChuan -----------------------
|
||||
{
|
||||
"fid": factory_infos[10]["name"],
|
||||
"llm_name": "Baichuan2-Turbo",
|
||||
"tags": "LLM,CHAT,32K",
|
||||
"max_tokens": 32768,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[10]["name"],
|
||||
"llm_name": "Baichuan2-Turbo-192k",
|
||||
"tags": "LLM,CHAT,192K",
|
||||
"max_tokens": 196608,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[10]["name"],
|
||||
"llm_name": "Baichuan3-Turbo",
|
||||
"tags": "LLM,CHAT,32K",
|
||||
"max_tokens": 32768,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[10]["name"],
|
||||
"llm_name": "Baichuan3-Turbo-128k",
|
||||
"tags": "LLM,CHAT,128K",
|
||||
"max_tokens": 131072,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[10]["name"],
|
||||
"llm_name": "Baichuan4",
|
||||
"tags": "LLM,CHAT,128K",
|
||||
"max_tokens": 131072,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[10]["name"],
|
||||
"llm_name": "Baichuan-Text-Embedding",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 512,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
# ------------------------ Jina -----------------------
|
||||
{
|
||||
"fid": factory_infos[11]["name"],
|
||||
"llm_name": "jina-reranker-v1-base-en",
|
||||
"tags": "RE-RANK,8k",
|
||||
"max_tokens": 8196,
|
||||
"model_type": LLMType.RERANK.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[11]["name"],
|
||||
"llm_name": "jina-reranker-v1-turbo-en",
|
||||
"tags": "RE-RANK,8k",
|
||||
"max_tokens": 8196,
|
||||
"model_type": LLMType.RERANK.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[11]["name"],
|
||||
"llm_name": "jina-reranker-v1-tiny-en",
|
||||
"tags": "RE-RANK,8k",
|
||||
"max_tokens": 8196,
|
||||
"model_type": LLMType.RERANK.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[11]["name"],
|
||||
"llm_name": "jina-colbert-v1-en",
|
||||
"tags": "RE-RANK,8k",
|
||||
"max_tokens": 8196,
|
||||
"model_type": LLMType.RERANK.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[11]["name"],
|
||||
"llm_name": "jina-embeddings-v2-base-en",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 8196,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[11]["name"],
|
||||
"llm_name": "jina-embeddings-v2-base-de",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 8196,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[11]["name"],
|
||||
"llm_name": "jina-embeddings-v2-base-es",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 8196,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[11]["name"],
|
||||
"llm_name": "jina-embeddings-v2-base-code",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 8196,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[11]["name"],
|
||||
"llm_name": "jina-embeddings-v2-base-zh",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 8196,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
# ------------------------ BAAI -----------------------
|
||||
{
|
||||
"fid": factory_infos[12]["name"],
|
||||
"llm_name": "BAAI/bge-large-zh-v1.5",
|
||||
"tags": "TEXT EMBEDDING,",
|
||||
"max_tokens": 1024,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[12]["name"],
|
||||
"llm_name": "BAAI/bge-reranker-v2-m3",
|
||||
"tags": "RE-RANK,2k",
|
||||
"max_tokens": 2048,
|
||||
"model_type": LLMType.RERANK.value
|
||||
},
|
||||
# ------------------------ Minimax -----------------------
|
||||
{
|
||||
"fid": factory_infos[13]["name"],
|
||||
"llm_name": "abab6.5",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[13]["name"],
|
||||
"llm_name": "abab6.5s",
|
||||
"tags": "LLM,CHAT,245k",
|
||||
"max_tokens": 245760,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[13]["name"],
|
||||
"llm_name": "abab6.5t",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[13]["name"],
|
||||
"llm_name": "abab6.5g",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[13]["name"],
|
||||
"llm_name": "abab5.5s",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
# ------------------------ Mistral -----------------------
|
||||
{
|
||||
"fid": factory_infos[14]["name"],
|
||||
"llm_name": "open-mixtral-8x22b",
|
||||
"tags": "LLM,CHAT,64k",
|
||||
"max_tokens": 64000,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[14]["name"],
|
||||
"llm_name": "open-mixtral-8x7b",
|
||||
"tags": "LLM,CHAT,32k",
|
||||
"max_tokens": 32000,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[14]["name"],
|
||||
"llm_name": "open-mistral-7b",
|
||||
"tags": "LLM,CHAT,32k",
|
||||
"max_tokens": 32000,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[14]["name"],
|
||||
"llm_name": "mistral-large-latest",
|
||||
"tags": "LLM,CHAT,32k",
|
||||
"max_tokens": 32000,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[14]["name"],
|
||||
"llm_name": "mistral-small-latest",
|
||||
"tags": "LLM,CHAT,32k",
|
||||
"max_tokens": 32000,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[14]["name"],
|
||||
"llm_name": "mistral-medium-latest",
|
||||
"tags": "LLM,CHAT,32k",
|
||||
"max_tokens": 32000,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[14]["name"],
|
||||
"llm_name": "codestral-latest",
|
||||
"tags": "LLM,CHAT,32k",
|
||||
"max_tokens": 32000,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[14]["name"],
|
||||
"llm_name": "mistral-embed",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.EMBEDDING
|
||||
},
|
||||
# ------------------------ Azure OpenAI -----------------------
|
||||
# Please ensure the llm_name is the same as the name in Azure
|
||||
# OpenAI deployment name (e.g., azure-gpt-4o). And the llm_name
|
||||
# must different from the OpenAI llm_name
|
||||
#
|
||||
# Each model must be deployed in the Azure OpenAI service, otherwise,
|
||||
# you will receive an error message 'The API deployment for
|
||||
# this resource does not exist'
|
||||
{
|
||||
"fid": factory_infos[15]["name"],
|
||||
"llm_name": "azure-gpt-4o",
|
||||
"tags": "LLM,CHAT,128K",
|
||||
"max_tokens": 128000,
|
||||
"model_type": LLMType.CHAT.value + "," + LLMType.IMAGE2TEXT.value
|
||||
}, {
|
||||
"fid": factory_infos[15]["name"],
|
||||
"llm_name": "azure-gpt-35-turbo",
|
||||
"tags": "LLM,CHAT,4K",
|
||||
"max_tokens": 4096,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[15]["name"],
|
||||
"llm_name": "azure-gpt-35-turbo-16k",
|
||||
"tags": "LLM,CHAT,16k",
|
||||
"max_tokens": 16385,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[15]["name"],
|
||||
"llm_name": "azure-text-embedding-ada-002",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[15]["name"],
|
||||
"llm_name": "azure-text-embedding-3-small",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[15]["name"],
|
||||
"llm_name": "azure-text-embedding-3-large",
|
||||
"tags": "TEXT EMBEDDING,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},{
|
||||
"fid": factory_infos[15]["name"],
|
||||
"llm_name": "azure-whisper-1",
|
||||
"tags": "SPEECH2TEXT",
|
||||
"max_tokens": 25 * 1024 * 1024,
|
||||
"model_type": LLMType.SPEECH2TEXT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[15]["name"],
|
||||
"llm_name": "azure-gpt-4",
|
||||
"tags": "LLM,CHAT,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[15]["name"],
|
||||
"llm_name": "azure-gpt-4-turbo",
|
||||
"tags": "LLM,CHAT,8K",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[15]["name"],
|
||||
"llm_name": "azure-gpt-4-32k",
|
||||
"tags": "LLM,CHAT,32K",
|
||||
"max_tokens": 32768,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[15]["name"],
|
||||
"llm_name": "azure-gpt-4-vision-preview",
|
||||
"tags": "LLM,CHAT,IMAGE2TEXT",
|
||||
"max_tokens": 765,
|
||||
"model_type": LLMType.IMAGE2TEXT.value
|
||||
},
|
||||
# ------------------------ Bedrock -----------------------
|
||||
{
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "ai21.j2-ultra-v1",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "ai21.j2-mid-v1",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8191,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "cohere.command-text-v14",
|
||||
"tags": "LLM,CHAT,4k",
|
||||
"max_tokens": 4096,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "cohere.command-light-text-v14",
|
||||
"tags": "LLM,CHAT,4k",
|
||||
"max_tokens": 4096,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "cohere.command-r-v1:0",
|
||||
"tags": "LLM,CHAT,128k",
|
||||
"max_tokens": 128 * 1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "cohere.command-r-plus-v1:0",
|
||||
"tags": "LLM,CHAT,128k",
|
||||
"max_tokens": 128000,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "anthropic.claude-v2",
|
||||
"tags": "LLM,CHAT,100k",
|
||||
"max_tokens": 100 * 1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "anthropic.claude-v2:1",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200 * 1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "anthropic.claude-3-sonnet-20240229-v1:0",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200 * 1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "anthropic.claude-3-5-sonnet-20240620-v1:0",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200 * 1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200 * 1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "anthropic.claude-3-opus-20240229-v1:0",
|
||||
"tags": "LLM,CHAT,200k",
|
||||
"max_tokens": 200 * 1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "anthropic.claude-instant-v1",
|
||||
"tags": "LLM,CHAT,100k",
|
||||
"max_tokens": 100 * 1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "amazon.titan-text-express-v1",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "amazon.titan-text-premier-v1:0",
|
||||
"tags": "LLM,CHAT,32k",
|
||||
"max_tokens": 32 * 1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "amazon.titan-text-lite-v1",
|
||||
"tags": "LLM,CHAT,4k",
|
||||
"max_tokens": 4096,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "meta.llama2-13b-chat-v1",
|
||||
"tags": "LLM,CHAT,4k",
|
||||
"max_tokens": 4096,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "meta.llama2-70b-chat-v1",
|
||||
"tags": "LLM,CHAT,4k",
|
||||
"max_tokens": 4096,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "meta.llama3-8b-instruct-v1:0",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "meta.llama3-70b-instruct-v1:0",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "mistral.mistral-7b-instruct-v0:2",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "mistral.mixtral-8x7b-instruct-v0:1",
|
||||
"tags": "LLM,CHAT,4k",
|
||||
"max_tokens": 4096,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "mistral.mistral-large-2402-v1:0",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "mistral.mistral-small-2402-v1:0",
|
||||
"tags": "LLM,CHAT,8k",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "amazon.titan-embed-text-v2:0",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 8192,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "cohere.embed-english-v3",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 2048,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[16]["name"],
|
||||
"llm_name": "cohere.embed-multilingual-v3",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 2048,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
}, {
|
||||
"fid": factory_infos[17]["name"],
|
||||
"llm_name": "gemini-1.5-pro-latest",
|
||||
"tags": "LLM,CHAT,1024K",
|
||||
"max_tokens": 1024*1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[17]["name"],
|
||||
"llm_name": "gemini-1.5-flash-latest",
|
||||
"tags": "LLM,CHAT,1024K",
|
||||
"max_tokens": 1024*1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[17]["name"],
|
||||
"llm_name": "gemini-1.0-pro",
|
||||
"tags": "LLM,CHAT,30K",
|
||||
"max_tokens": 30*1024,
|
||||
"model_type": LLMType.CHAT.value
|
||||
}, {
|
||||
"fid": factory_infos[17]["name"],
|
||||
"llm_name": "gemini-1.0-pro-vision-latest",
|
||||
"tags": "LLM,IMAGE2TEXT,12K",
|
||||
"max_tokens": 12*1024,
|
||||
"model_type": LLMType.IMAGE2TEXT.value
|
||||
}, {
|
||||
"fid": factory_infos[17]["name"],
|
||||
"llm_name": "text-embedding-004",
|
||||
"tags": "TEXT EMBEDDING",
|
||||
"max_tokens": 2048,
|
||||
"model_type": LLMType.EMBEDDING.value
|
||||
},
|
||||
# ------------------------ Groq -----------------------
|
||||
{
|
||||
"fid": factory_infos[18]["name"],
|
||||
"llm_name": "gemma-7b-it",
|
||||
"tags": "LLM,CHAT,15k",
|
||||
"max_tokens": 8192,
|
||||
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[18]["name"],
|
||||
"llm_name": "gemma2-9b-it",
|
||||
"tags": "LLM,CHAT,15k",
|
||||
"max_tokens": 8192,
|
||||
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[18]["name"],
|
||||
"llm_name": "llama3-70b-8192",
|
||||
"tags": "LLM,CHAT,6k",
|
||||
"max_tokens": 8192,
|
||||
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[18]["name"],
|
||||
"llm_name": "llama3-8b-8192",
|
||||
"tags": "LLM,CHAT,30k",
|
||||
"max_tokens": 8192,
|
||||
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[18]["name"],
|
||||
"llm_name": "mixtral-8x7b-32768",
|
||||
"tags": "LLM,CHAT,5k",
|
||||
"max_tokens": 32768,
|
||||
|
||||
"model_type": LLMType.CHAT.value
|
||||
}
|
||||
]
|
||||
for info in factory_infos:
|
||||
factory_llm_infos = json.load(
|
||||
open(
|
||||
os.path.join(get_project_base_directory(), "conf", "llm_factories.json"),
|
||||
"r",
|
||||
)
|
||||
)
|
||||
for factory_llm_info in factory_llm_infos["factory_llm_infos"]:
|
||||
llm_infos = factory_llm_info.pop("llm")
|
||||
try:
|
||||
LLMFactoriesService.save(**info)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
LLMService.filter_delete([(LLM.fid == "MiniMax" or LLM.fid == "Minimax")])
|
||||
for info in llm_infos:
|
||||
try:
|
||||
LLMService.save(**info)
|
||||
LLMFactoriesService.save(**factory_llm_info)
|
||||
except Exception as e:
|
||||
pass
|
||||
for llm_info in llm_infos:
|
||||
llm_info["fid"] = factory_llm_info["name"]
|
||||
try:
|
||||
LLMService.save(**llm_info)
|
||||
except Exception as e:
|
||||
pass
|
||||
try:
|
||||
LLMService.filter_delete([(LLM.fid == "MiniMax" or LLM.fid == "Minimax")])
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
LLMFactoriesService.filter_delete([LLMFactories.name == "Local"])
|
||||
LLMService.filter_delete([LLM.fid == "Local"])
|
||||
|
1884
conf/llm_factories.json
Normal file
1884
conf/llm_factories.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -45,7 +45,8 @@ CvModel = {
|
||||
"Tongyi-Qianwen": QWenCV,
|
||||
"ZHIPU-AI": Zhipu4V,
|
||||
"Moonshot": LocalCV,
|
||||
'Gemini':GeminiCV
|
||||
'Gemini':GeminiCV,
|
||||
'OpenRouter':OpenRouterCV
|
||||
}
|
||||
|
||||
|
||||
@ -65,7 +66,8 @@ ChatModel = {
|
||||
"Mistral": MistralChat,
|
||||
'Gemini' : GeminiChat,
|
||||
"Bedrock": BedrockChat,
|
||||
"Groq": GroqChat
|
||||
"Groq": GroqChat,
|
||||
'OpenRouter':OpenRouterChat
|
||||
}
|
||||
|
||||
|
||||
|
@ -685,7 +685,6 @@ class GeminiChat(Base):
|
||||
yield response._chunks[-1].usage_metadata.total_token_count
|
||||
|
||||
|
||||
|
||||
class GroqChat:
|
||||
def __init__(self, key, model_name,base_url=''):
|
||||
self.client = Groq(api_key=key)
|
||||
@ -697,7 +696,6 @@ class GroqChat:
|
||||
for k in list(gen_conf.keys()):
|
||||
if k not in ["temperature", "top_p", "max_tokens"]:
|
||||
del gen_conf[k]
|
||||
|
||||
ans = ""
|
||||
try:
|
||||
response = self.client.chat.completions.create(
|
||||
@ -707,7 +705,7 @@ class GroqChat:
|
||||
)
|
||||
ans = response.choices[0].message.content
|
||||
if response.choices[0].finish_reason == "length":
|
||||
ans += "...\nFor the content length reason, it stopped, continue?" if self.is_english(
|
||||
ans += "...\nFor the content length reason, it stopped, continue?" if is_english(
|
||||
[ans]) else "······\n由于长度的原因,回答被截断了,要继续吗?"
|
||||
return ans, response.usage.total_tokens
|
||||
except Exception as e:
|
||||
@ -734,11 +732,20 @@ class GroqChat:
|
||||
ans += resp.choices[0].delta.content
|
||||
total_tokens += 1
|
||||
if resp.choices[0].finish_reason == "length":
|
||||
ans += "...\nFor the content length reason, it stopped, continue?" if self.is_english(
|
||||
ans += "...\nFor the content length reason, it stopped, continue?" if is_english(
|
||||
[ans]) else "······\n由于长度的原因,回答被截断了,要继续吗?"
|
||||
yield ans
|
||||
|
||||
except Exception as e:
|
||||
yield ans + "\n**ERROR**: " + str(e)
|
||||
|
||||
yield total_tokens
|
||||
yield total_tokens
|
||||
|
||||
|
||||
## openrouter
|
||||
class OpenRouterChat(Base):
|
||||
def __init__(self, key, model_name, base_url="https://openrouter.ai/api/v1"):
|
||||
self.base_url = "https://openrouter.ai/api/v1"
|
||||
self.client = OpenAI(base_url=self.base_url, api_key=key)
|
||||
self.model_name = model_name
|
||||
|
||||
|
@ -23,6 +23,8 @@ from openai import OpenAI
|
||||
import os
|
||||
import base64
|
||||
from io import BytesIO
|
||||
import json
|
||||
import requests
|
||||
|
||||
from api.utils import get_uuid
|
||||
from api.utils.file_utils import get_project_base_directory
|
||||
@ -212,7 +214,7 @@ class GeminiCV(Base):
|
||||
self.model = GenerativeModel(model_name=self.model_name)
|
||||
self.model._client = _client
|
||||
self.lang = lang
|
||||
|
||||
|
||||
def describe(self, image, max_tokens=2048):
|
||||
from PIL.Image import open
|
||||
gen_config = {'max_output_tokens':max_tokens}
|
||||
@ -227,6 +229,63 @@ class GeminiCV(Base):
|
||||
)
|
||||
return res.text,res.usage_metadata.total_token_count
|
||||
|
||||
|
||||
class OpenRouterCV(Base):
|
||||
def __init__(
|
||||
self,
|
||||
key,
|
||||
model_name,
|
||||
lang="Chinese",
|
||||
base_url="https://openrouter.ai/api/v1/chat/completions",
|
||||
):
|
||||
self.model_name = model_name
|
||||
self.lang = lang
|
||||
self.base_url = "https://openrouter.ai/api/v1/chat/completions"
|
||||
self.key = key
|
||||
|
||||
def describe(self, image, max_tokens=300):
|
||||
b64 = self.image2base64(image)
|
||||
response = requests.post(
|
||||
url=self.base_url,
|
||||
headers={
|
||||
"Authorization": f"Bearer {self.key}",
|
||||
},
|
||||
data=json.dumps(
|
||||
{
|
||||
"model": self.model_name,
|
||||
"messages": self.prompt(b64),
|
||||
"max_tokens": max_tokens,
|
||||
}
|
||||
),
|
||||
)
|
||||
response = response.json()
|
||||
return (
|
||||
response["choices"][0]["message"]["content"].strip(),
|
||||
response["usage"]["total_tokens"],
|
||||
)
|
||||
|
||||
def prompt(self, b64):
|
||||
return [
|
||||
{
|
||||
"role": "user",
|
||||
"content": [
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": {"url": f"data:image/jpeg;base64,{b64}"},
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": (
|
||||
"请用中文详细描述一下图中的内容,比如时间,地点,人物,事情,人物心情等,如果有数据请提取出数据。"
|
||||
if self.lang.lower() == "chinese"
|
||||
else "Please describe the content of this picture, like where, when, who, what happen. If it has number data, please extract them out."
|
||||
),
|
||||
},
|
||||
],
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class LocalCV(Base):
|
||||
def __init__(self, key, model_name="glm-4v", lang="Chinese", **kwargs):
|
||||
pass
|
||||
|
18
web/src/assets/svg/llm/open-router.svg
Normal file
18
web/src/assets/svg/llm/open-router.svg
Normal file
@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" width="16px" height="16px" viewBox="0 0 16 16" enable-background="new 0 0 16 16" xml:space="preserve"> <image id="image0" width="16" height="16" x="0" y="0"
|
||||
xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAAAIGNIUk0AAHomAACAhAAA+gAAAIDo
|
||||
AAB1MAAA6mAAADqYAAAXcJy6UTwAAADnUExURQAAAJSkuZeluJmZmZajt5SkuZSkuJSkuJSjuJSj
|
||||
uJSkuZOnupaju5Sjt5WjuJSiuYuiuZajuZWjuJSjuJSiuJKktr+/v5WkuZSjuJSjuJSkt5ent5eh
|
||||
vZWkuZSjuJWjuZWjuZSiuJSjuJSkuP///5Oit4CAgJSjuJeiuZSjuJSkuZmZzJOlt5WkuJSkuJSm
|
||||
uJSkuJSjuJmzs5Oit5Sjt5SjuJWkuZWjuZWjuZSjuJCmsZOkupSjuJSjuJOjuJWqqpSkuJSjt5Sj
|
||||
uI+fv5ilupSkupWjuJSit5WkuJOfuZOjt5SjuP///6MuKmIAAABLdFJOUwCdNgVOkbG48/2gGkvd
|
||||
8m4Llv7+pRwEVNzDUSAb2so6su33cAFHAtYs+4MFR4nlK7AyCof8/LmOi5sXO97wYQx+5PoQJUNI
|
||||
8rYoQFP//YUAAAABYktHRCS0BvmZAAAAB3RJTUUH6AcMBxoClVndlQAAAI5JREFUGNNjYMADGJkQ
|
||||
bGYWVjZ2Dk4ubiifh9cbAvj4QVwBQSEoX1hElIFBTFwCyJSUkpaRlZMHSisoArlKyipApqoaSD1I
|
||||
pboGkl0gAU0tbYSAjq4eUEhOH8g0MIQIGRmbeHubmplbKJlaWkGErG2g1nrb2kFE7B0cwXwnc2eY
|
||||
USourm7uHp6onvPC6mUAFZcZIrBzIB0AAAAldEVYdGRhdGU6Y3JlYXRlADIwMjQtMDctMTJUMDc6
|
||||
MjY6MDIrMDA6MDClm05uAAAAJXRFWHRkYXRlOm1vZGlmeQAyMDI0LTA3LTEyVDA3OjI2OjAyKzAw
|
||||
OjAw1Mb20gAAACh0RVh0ZGF0ZTp0aW1lc3RhbXAAMjAyNC0wNy0xMlQwNzoyNjowMiswMDowMIPT
|
||||
1w0AAAAASUVORK5CYII=" />
|
||||
</svg>
|
After Width: | Height: | Size: 1.5 KiB |
@ -63,6 +63,7 @@ const IconMap = {
|
||||
Bedrock: 'bedrock',
|
||||
Gemini:'gemini',
|
||||
Groq: 'Groq',
|
||||
OpenRouter:'open-router'
|
||||
};
|
||||
|
||||
const LlmIcon = ({ name }: { name: string }) => {
|
||||
|
Loading…
x
Reference in New Issue
Block a user