diff --git a/api/core/model_runtime/model_providers/deepseek/__init__.py b/api/core/model_runtime/model_providers/deepseek/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/deepseek/_assets/icon_l_en.png b/api/core/model_runtime/model_providers/deepseek/_assets/icon_l_en.png new file mode 100644 index 0000000000..25254d7f53 Binary files /dev/null and b/api/core/model_runtime/model_providers/deepseek/_assets/icon_l_en.png differ diff --git a/api/core/model_runtime/model_providers/deepseek/_assets/icon_s_en.png b/api/core/model_runtime/model_providers/deepseek/_assets/icon_s_en.png new file mode 100644 index 0000000000..3271f5cfe4 Binary files /dev/null and b/api/core/model_runtime/model_providers/deepseek/_assets/icon_s_en.png differ diff --git a/api/core/model_runtime/model_providers/deepseek/deepseek.py b/api/core/model_runtime/model_providers/deepseek/deepseek.py new file mode 100644 index 0000000000..5fb821ed45 --- /dev/null +++ b/api/core/model_runtime/model_providers/deepseek/deepseek.py @@ -0,0 +1,30 @@ +import logging + +from core.model_runtime.entities.model_entities import ModelType +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.__base.model_provider import ModelProvider + +logger = logging.getLogger(__name__) + + +class DeepseekProvider(ModelProvider): + + def validate_provider_credentials(self, credentials: dict) -> None: + """ + Validate provider credentials + if validate failed, raise exception + + :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. + """ + try: + model_instance = self.get_model_instance(ModelType.LLM) + + model_instance.validate_credentials( + model='deepseek-chat', + credentials=credentials + ) + except CredentialsValidateFailedError as ex: + raise ex + except Exception as ex: + logger.exception(f'{self.get_provider_schema().provider} credentials validate failed') + raise ex diff --git a/api/core/model_runtime/model_providers/deepseek/deepseek.yaml b/api/core/model_runtime/model_providers/deepseek/deepseek.yaml new file mode 100644 index 0000000000..b535053c36 --- /dev/null +++ b/api/core/model_runtime/model_providers/deepseek/deepseek.yaml @@ -0,0 +1,28 @@ +provider: deepseek +label: + en_US: Deepseek +icon_small: + en_US: icon_s_en.png +icon_large: + en_US: icon_l_en.png +background: "#FFFFFF" +help: + title: + en_US: Get your API Key from Deepseek + zh_Hans: 从 Deepseek 获取 API Key + url: + en_US: https://platform.deepseek.com/api_keys +supported_model_types: + - llm +configurate_methods: + - predefined-model +provider_credential_schema: + credential_form_schemas: + - variable: api_key + label: + en_US: API Key + type: secret-input + required: true + placeholder: + zh_Hans: 在此输入您的 API Key + en_US: Enter your API Key diff --git a/api/core/model_runtime/model_providers/deepseek/llm/__init__.py b/api/core/model_runtime/model_providers/deepseek/llm/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/deepseek/llm/_position.yaml b/api/core/model_runtime/model_providers/deepseek/llm/_position.yaml new file mode 100644 index 0000000000..43d03f2ee9 --- /dev/null +++ b/api/core/model_runtime/model_providers/deepseek/llm/_position.yaml @@ -0,0 +1,2 @@ +- deepseek-chat +- deepseek-coder diff --git a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml new file mode 100644 index 0000000000..a766a09a7b --- /dev/null +++ b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-chat.yaml @@ -0,0 +1,26 @@ +model: deepseek-chat +label: + zh_Hans: deepseek-chat + en_US: deepseek-chat +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 32000 +parameter_rules: + - name: temperature + use_template: temperature + min: 0 + max: 1 + default: 0.5 + - name: top_p + use_template: top_p + min: 0 + max: 1 + default: 1 + - name: max_tokens + use_template: max_tokens + min: 1 + max: 32000 + default: 1024 diff --git a/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml new file mode 100644 index 0000000000..8f156be101 --- /dev/null +++ b/api/core/model_runtime/model_providers/deepseek/llm/deepseek-coder.yaml @@ -0,0 +1,26 @@ +model: deepseek-coder +label: + zh_Hans: deepseek-coder + en_US: deepseek-coder +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 16000 +parameter_rules: + - name: temperature + use_template: temperature + min: 0 + max: 1 + default: 0.5 + - name: top_p + use_template: top_p + min: 0 + max: 1 + default: 1 + - name: max_tokens + use_template: max_tokens + min: 1 + max: 32000 + default: 1024 diff --git a/api/core/model_runtime/model_providers/deepseek/llm/llm.py b/api/core/model_runtime/model_providers/deepseek/llm/llm.py new file mode 100644 index 0000000000..4d20a07447 --- /dev/null +++ b/api/core/model_runtime/model_providers/deepseek/llm/llm.py @@ -0,0 +1,27 @@ +from collections.abc import Generator +from typing import Optional, Union + +from core.model_runtime.entities.llm_entities import LLMResult +from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool +from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel + + +class DeepseekLargeLanguageModel(OAIAPICompatLargeLanguageModel): + def _invoke(self, model: str, credentials: dict, + prompt_messages: list[PromptMessage], model_parameters: dict, + tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, + stream: bool = True, user: Optional[str] = None) \ + -> Union[LLMResult, Generator]: + + self._add_custom_parameters(credentials) + + return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream, user) + + def validate_credentials(self, model: str, credentials: dict) -> None: + self._add_custom_parameters(credentials) + super().validate_credentials(model, credentials) + + @staticmethod + def _add_custom_parameters(credentials: dict) -> None: + credentials['mode'] = 'chat' + credentials['endpoint_url'] = 'https://api.deepseek.com/'