mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-12 20:39:01 +08:00
feat: add abab5.5s-chat (#2063)
This commit is contained in:
parent
5960103cb8
commit
d7209d9057
@ -10,8 +10,14 @@ model_properties:
|
|||||||
parameter_rules:
|
parameter_rules:
|
||||||
- name: temperature
|
- name: temperature
|
||||||
use_template: temperature
|
use_template: temperature
|
||||||
|
min: 0.01
|
||||||
|
max: 1
|
||||||
|
default: 0.9
|
||||||
- name: top_p
|
- name: top_p
|
||||||
use_template: top_p
|
use_template: top_p
|
||||||
|
min: 0.01
|
||||||
|
max: 1
|
||||||
|
default: 0.95
|
||||||
- name: max_tokens
|
- name: max_tokens
|
||||||
use_template: max_tokens
|
use_template: max_tokens
|
||||||
required: true
|
required: true
|
||||||
|
@ -0,0 +1,35 @@
|
|||||||
|
model: abab5.5s-chat
|
||||||
|
label:
|
||||||
|
en_US: Abab5.5s-Chat
|
||||||
|
model_type: llm
|
||||||
|
features:
|
||||||
|
- agent-thought
|
||||||
|
model_properties:
|
||||||
|
mode: chat
|
||||||
|
context_size: 8192
|
||||||
|
parameter_rules:
|
||||||
|
- name: temperature
|
||||||
|
use_template: temperature
|
||||||
|
min: 0.01
|
||||||
|
max: 1
|
||||||
|
default: 0.9
|
||||||
|
- name: top_p
|
||||||
|
use_template: top_p
|
||||||
|
min: 0.01
|
||||||
|
max: 1
|
||||||
|
default: 0.95
|
||||||
|
- name: max_tokens
|
||||||
|
use_template: max_tokens
|
||||||
|
required: true
|
||||||
|
default: 3072
|
||||||
|
min: 1
|
||||||
|
max: 8192
|
||||||
|
- name: presence_penalty
|
||||||
|
use_template: presence_penalty
|
||||||
|
- name: frequency_penalty
|
||||||
|
use_template: frequency_penalty
|
||||||
|
pricing:
|
||||||
|
input: '0.00'
|
||||||
|
output: '0.005'
|
||||||
|
unit: '0.001'
|
||||||
|
currency: RMB
|
@ -22,7 +22,7 @@ class MinimaxChatCompletionPro(object):
|
|||||||
"""
|
"""
|
||||||
generate chat completion
|
generate chat completion
|
||||||
"""
|
"""
|
||||||
if model != 'abab5.5-chat':
|
if model not in ['abab5.5-chat', 'abab5.5s-chat']:
|
||||||
raise BadRequestError(f'Invalid model: {model}')
|
raise BadRequestError(f'Invalid model: {model}')
|
||||||
|
|
||||||
if not api_key or not group_id:
|
if not api_key or not group_id:
|
||||||
|
@ -18,6 +18,7 @@ from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage
|
|||||||
|
|
||||||
class MinimaxLargeLanguageModel(LargeLanguageModel):
|
class MinimaxLargeLanguageModel(LargeLanguageModel):
|
||||||
model_apis = {
|
model_apis = {
|
||||||
|
'abab5.5s-chat': MinimaxChatCompletionPro,
|
||||||
'abab5.5-chat': MinimaxChatCompletionPro,
|
'abab5.5-chat': MinimaxChatCompletionPro,
|
||||||
'abab5-chat': MinimaxChatCompletion
|
'abab5-chat': MinimaxChatCompletion
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user