diff --git a/api/core/model_runtime/model_providers/minimax/llm/abab5.5-chat.yaml b/api/core/model_runtime/model_providers/minimax/llm/abab5.5-chat.yaml index b52f875d75..5840c2faba 100644 --- a/api/core/model_runtime/model_providers/minimax/llm/abab5.5-chat.yaml +++ b/api/core/model_runtime/model_providers/minimax/llm/abab5.5-chat.yaml @@ -10,8 +10,14 @@ model_properties: parameter_rules: - name: temperature use_template: temperature + min: 0.01 + max: 1 + default: 0.9 - name: top_p use_template: top_p + min: 0.01 + max: 1 + default: 0.95 - name: max_tokens use_template: max_tokens required: true diff --git a/api/core/model_runtime/model_providers/minimax/llm/abab5.5s-chat.yaml b/api/core/model_runtime/model_providers/minimax/llm/abab5.5s-chat.yaml new file mode 100644 index 0000000000..710c5c8914 --- /dev/null +++ b/api/core/model_runtime/model_providers/minimax/llm/abab5.5s-chat.yaml @@ -0,0 +1,35 @@ +model: abab5.5s-chat +label: + en_US: Abab5.5s-Chat +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 8192 +parameter_rules: + - name: temperature + use_template: temperature + min: 0.01 + max: 1 + default: 0.9 + - name: top_p + use_template: top_p + min: 0.01 + max: 1 + default: 0.95 + - name: max_tokens + use_template: max_tokens + required: true + default: 3072 + min: 1 + max: 8192 + - name: presence_penalty + use_template: presence_penalty + - name: frequency_penalty + use_template: frequency_penalty +pricing: + input: '0.00' + output: '0.005' + unit: '0.001' + currency: RMB diff --git a/api/core/model_runtime/model_providers/minimax/llm/chat_completion_pro.py b/api/core/model_runtime/model_providers/minimax/llm/chat_completion_pro.py index a680ef0a2b..d75b734eb3 100644 --- a/api/core/model_runtime/model_providers/minimax/llm/chat_completion_pro.py +++ b/api/core/model_runtime/model_providers/minimax/llm/chat_completion_pro.py @@ -22,7 +22,7 @@ class MinimaxChatCompletionPro(object): """ generate chat completion """ - if model != 'abab5.5-chat': + if model not in ['abab5.5-chat', 'abab5.5s-chat']: raise BadRequestError(f'Invalid model: {model}') if not api_key or not group_id: diff --git a/api/core/model_runtime/model_providers/minimax/llm/llm.py b/api/core/model_runtime/model_providers/minimax/llm/llm.py index 8937b1c128..4226156309 100644 --- a/api/core/model_runtime/model_providers/minimax/llm/llm.py +++ b/api/core/model_runtime/model_providers/minimax/llm/llm.py @@ -18,6 +18,7 @@ from core.model_runtime.model_providers.minimax.llm.types import MinimaxMessage class MinimaxLargeLanguageModel(LargeLanguageModel): model_apis = { + 'abab5.5s-chat': MinimaxChatCompletionPro, 'abab5.5-chat': MinimaxChatCompletionPro, 'abab5-chat': MinimaxChatCompletion }