From 6ea77ab4cdc0238cb13ccbf9925cdf6a42c6d09c Mon Sep 17 00:00:00 2001 From: jiandanfeng Date: Mon, 20 Jan 2025 22:04:18 +0800 Subject: [PATCH] fix: DeepSeek API Error with response format active (text and json_object) (#12747) --- api/core/model_runtime/model_providers/deepseek/llm/llm.py | 3 --- api/core/model_runtime/model_providers/moonshot/llm/llm.py | 3 --- api/core/model_runtime/model_providers/siliconflow/llm/llm.py | 3 --- 3 files changed, 9 deletions(-) diff --git a/api/core/model_runtime/model_providers/deepseek/llm/llm.py b/api/core/model_runtime/model_providers/deepseek/llm/llm.py index 0a81f0c094..610dc7b458 100644 --- a/api/core/model_runtime/model_providers/deepseek/llm/llm.py +++ b/api/core/model_runtime/model_providers/deepseek/llm/llm.py @@ -24,9 +24,6 @@ class DeepseekLargeLanguageModel(OAIAPICompatLargeLanguageModel): user: Optional[str] = None, ) -> Union[LLMResult, Generator]: self._add_custom_parameters(credentials) - # {"response_format": "xx"} need convert to {"response_format": {"type": "xx"}} - if "response_format" in model_parameters: - model_parameters["response_format"] = {"type": model_parameters.get("response_format")} return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) def validate_credentials(self, model: str, credentials: dict) -> None: diff --git a/api/core/model_runtime/model_providers/moonshot/llm/llm.py b/api/core/model_runtime/model_providers/moonshot/llm/llm.py index cfee0b91e7..33fa02f0bd 100644 --- a/api/core/model_runtime/model_providers/moonshot/llm/llm.py +++ b/api/core/model_runtime/model_providers/moonshot/llm/llm.py @@ -44,9 +44,6 @@ class MoonshotLargeLanguageModel(OAIAPICompatLargeLanguageModel): self._add_custom_parameters(credentials) self._add_function_call(model, credentials) user = user[:32] if user else None - # {"response_format": "json_object"} need convert to {"response_format": {"type": "json_object"}} - if "response_format" in model_parameters: - model_parameters["response_format"] = {"type": model_parameters.get("response_format")} return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) def validate_credentials(self, model: str, credentials: dict) -> None: diff --git a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py b/api/core/model_runtime/model_providers/siliconflow/llm/llm.py index f61e8b82e4..7a8aac9ca7 100644 --- a/api/core/model_runtime/model_providers/siliconflow/llm/llm.py +++ b/api/core/model_runtime/model_providers/siliconflow/llm/llm.py @@ -29,9 +29,6 @@ class SiliconflowLargeLanguageModel(OAIAPICompatLargeLanguageModel): user: Optional[str] = None, ) -> Union[LLMResult, Generator]: self._add_custom_parameters(credentials) - # {"response_format": "json_object"} need convert to {"response_format": {"type": "json_object"}} - if "response_format" in model_parameters: - model_parameters["response_format"] = {"type": model_parameters.get("response_format")} return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) def validate_credentials(self, model: str, credentials: dict) -> None: