fix minimax bug (#1528)

### What problem does this PR solve?

#1353 

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
Kevin Hu 2024-07-16 10:55:33 +08:00 committed by GitHub
parent 2a647162a8
commit 607de74ace
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 8 additions and 12 deletions

View File

@ -573,42 +573,35 @@ def init_llm_factory():
# ------------------------ Minimax -----------------------
{
"fid": factory_infos[13]["name"],
"llm_name": "abab6.5-chat",
"llm_name": "abab6.5",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": LLMType.CHAT.value
},
{
"fid": factory_infos[13]["name"],
"llm_name": "abab6.5s-chat",
"llm_name": "abab6.5s",
"tags": "LLM,CHAT,245k",
"max_tokens": 245760,
"model_type": LLMType.CHAT.value
},
{
"fid": factory_infos[13]["name"],
"llm_name": "abab6.5t-chat",
"llm_name": "abab6.5t",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": LLMType.CHAT.value
},
{
"fid": factory_infos[13]["name"],
"llm_name": "abab6.5g-chat",
"llm_name": "abab6.5g",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": LLMType.CHAT.value
},
{
"fid": factory_infos[13]["name"],
"llm_name": "abab5.5-chat",
"tags": "LLM,CHAT,16k",
"max_tokens": 16384,
"model_type": LLMType.CHAT.value
},
{
"fid": factory_infos[13]["name"],
"llm_name": "abab5.5s-chat",
"llm_name": "abab5.5s",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": LLMType.CHAT.value
@ -987,6 +980,8 @@ def init_llm_factory():
LLMFactoriesService.save(**info)
except Exception as e:
pass
LLMService.filter_delete([(LLM.fid == "MiniMax" or LLM.fid == "Minimax")])
for info in llm_infos:
try:
LLMService.save(**info)

View File

@ -61,6 +61,7 @@ ChatModel = {
"VolcEngine": VolcEngineChat,
"BaiChuan": BaiChuanChat,
"MiniMax": MiniMaxChat,
"Minimax": MiniMaxChat,
"Mistral": MistralChat,
'Gemini' : GeminiChat,
"Bedrock": BedrockChat,