Add more mistral models. (#3826)

### What problem does this PR solve?

#3647

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
Kevin Hu 2024-12-03 15:18:38 +08:00 committed by GitHub
parent 95da6de9e1
commit 934dbc2e2b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 37 additions and 12 deletions

View File

@ -234,6 +234,7 @@ class Canvas(ABC):
except Exception as e:
yield {"content": "*Exception*: {}".format(e), "running_status": True}
logging.exception("Canvas.run got exception")
ran += 1
if ran >= len(self.path[-1]) and waiting:
without_dependent_checking = waiting

View File

@ -618,26 +618,32 @@
},
{
"llm_name": "open-mistral-7b",
"tags": "LLM,CHAT,32k",
"max_tokens": 32000,
"tags": "LLM,CHAT,128k",
"max_tokens": 128000,
"model_type": "chat"
},
{
"llm_name": "ministral-8b-latest",
"tags": "LLM,CHAT,128k",
"max_tokens": 128000,
"model_type": "chat"
},
{
"llm_name": "ministral-3b-latest",
"tags": "LLM,CHAT,128k",
"max_tokens": 128000,
"model_type": "chat"
},
{
"llm_name": "mistral-large-latest",
"tags": "LLM,CHAT,32k",
"max_tokens": 32000,
"tags": "LLM,CHAT,128k",
"max_tokens": 128000,
"model_type": "chat"
},
{
"llm_name": "mistral-small-latest",
"tags": "LLM,CHAT,32k",
"max_tokens": 32000,
"model_type": "chat"
},
{
"llm_name": "mistral-medium-latest",
"tags": "LLM,CHAT,32k",
"max_tokens": 32000,
"tags": "LLM,CHAT,128k",
"max_tokens": 128000,
"model_type": "chat"
},
{
@ -646,11 +652,29 @@
"max_tokens": 32000,
"model_type": "chat"
},
{
"llm_name": "mistral-nemo",
"tags": "LLM,CHAT,128k",
"max_tokens": 128000,
"model_type": "chat"
},
{
"llm_name": "mistral-embed",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "embedding"
},
{
"llm_name": "pixtral-large-latest",
"tags": "LLM,CHAT,32k",
"max_tokens": 32000,
"model_type": "image2text"
},
{
"llm_name": "pixtral-12b",
"tags": "LLM,CHAT,32k",
"max_tokens": 32000,
"model_type": "image2text"
}
]
},