diff --git a/api/core/model_runtime/model_providers/_position.yaml b/api/core/model_runtime/model_providers/_position.yaml index 2dcdc1bf2e..97116978cd 100644 --- a/api/core/model_runtime/model_providers/_position.yaml +++ b/api/core/model_runtime/model_providers/_position.yaml @@ -20,6 +20,7 @@ - jina - chatglm - xinference +- yi - openllm - localai - openai_api_compatible diff --git a/api/core/model_runtime/model_providers/yi/__init__.py b/api/core/model_runtime/model_providers/yi/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/yi/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/yi/_assets/icon_l_en.svg new file mode 100644 index 0000000000..0efce4e85b --- /dev/null +++ b/api/core/model_runtime/model_providers/yi/_assets/icon_l_en.svg @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + 01.AI + + diff --git a/api/core/model_runtime/model_providers/yi/_assets/icon_l_zh.svg b/api/core/model_runtime/model_providers/yi/_assets/icon_l_zh.svg new file mode 100644 index 0000000000..951842da55 --- /dev/null +++ b/api/core/model_runtime/model_providers/yi/_assets/icon_l_zh.svg @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + 零一万物 + + diff --git a/api/core/model_runtime/model_providers/yi/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/yi/_assets/icon_s_en.svg new file mode 100644 index 0000000000..a813274466 --- /dev/null +++ b/api/core/model_runtime/model_providers/yi/_assets/icon_s_en.svg @@ -0,0 +1,7 @@ + + + + + + + \ No newline at end of file diff --git a/api/core/model_runtime/model_providers/yi/llm/__init__.py b/api/core/model_runtime/model_providers/yi/llm/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/model_runtime/model_providers/yi/llm/_position.yaml b/api/core/model_runtime/model_providers/yi/llm/_position.yaml new file mode 100644 index 0000000000..12838d670f --- /dev/null +++ b/api/core/model_runtime/model_providers/yi/llm/_position.yaml @@ -0,0 +1,3 @@ +- yi-34b-chat-0205 +- yi-34b-chat-200k +- yi-vl-plus diff --git a/api/core/model_runtime/model_providers/yi/llm/llm.py b/api/core/model_runtime/model_providers/yi/llm/llm.py new file mode 100644 index 0000000000..8ad6462514 --- /dev/null +++ b/api/core/model_runtime/model_providers/yi/llm/llm.py @@ -0,0 +1,30 @@ +from collections.abc import Generator +from typing import Optional, Union + +from core.model_runtime.entities.llm_entities import LLMResult +from core.model_runtime.entities.message_entities import ( + PromptMessage, + PromptMessageTool, +) +from core.model_runtime.model_providers.openai_api_compatible.llm.llm import OAIAPICompatLargeLanguageModel + + +class YiLargeLanguageModel(OAIAPICompatLargeLanguageModel): + def _invoke(self, model: str, credentials: dict, + prompt_messages: list[PromptMessage], model_parameters: dict, + tools: Optional[list[PromptMessageTool]] = None, stop: Optional[list[str]] = None, + stream: bool = True, user: Optional[str] = None) \ + -> Union[LLMResult, Generator]: + self._add_custom_parameters(credentials) + return super()._invoke(model, credentials, prompt_messages, model_parameters, tools, stop, stream) + + def validate_credentials(self, model: str, credentials: dict) -> None: + self._add_custom_parameters(credentials) + super().validate_credentials(model, credentials) + + @staticmethod + def _add_custom_parameters(credentials: dict) -> None: + credentials['mode'] = 'chat' + + if 'endpoint_url' not in credentials or credentials['endpoint_url'] == "": + credentials['endpoint_url'] = 'https://api.lingyiwanwu.com/v1' diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-0205.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-0205.yaml new file mode 100644 index 0000000000..4d4148aa91 --- /dev/null +++ b/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-0205.yaml @@ -0,0 +1,28 @@ +model: yi-34b-chat-0205 +label: + zh_Hans: yi-34b-chat-0205 + en_US: yi-34b-chat-0205 +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 4096 +parameter_rules: + - name: max_tokens + use_template: max_tokens + type: int + default: 512 + min: 1 + max: 4096 + - name: temperature + use_template: temperature + type: float + default: 0.7 + min: 0 + max: 2 +pricing: + input: '0.0025' + output: '0.0025' + unit: '0.00001' + currency: RMB diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-200k.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-200k.yaml new file mode 100644 index 0000000000..4fbe84e9b7 --- /dev/null +++ b/api/core/model_runtime/model_providers/yi/llm/yi-34b-chat-200k.yaml @@ -0,0 +1,28 @@ +model: yi-34b-chat-200k +label: + zh_Hans: yi-34b-chat-200k + en_US: yi-34b-chat-200k +model_type: llm +features: + - agent-thought +model_properties: + mode: chat + context_size: 200000 +parameter_rules: + - name: max_tokens + use_template: max_tokens + type: int + default: 1024 + min: 1 + max: 200000 + - name: temperature + use_template: temperature + type: float + default: 0.7 + min: 0 + max: 2 +pricing: + input: '0.012' + output: '0.012' + unit: '0.00001' + currency: RMB diff --git a/api/core/model_runtime/model_providers/yi/llm/yi-vl-plus.yaml b/api/core/model_runtime/model_providers/yi/llm/yi-vl-plus.yaml new file mode 100644 index 0000000000..6195051f16 --- /dev/null +++ b/api/core/model_runtime/model_providers/yi/llm/yi-vl-plus.yaml @@ -0,0 +1,28 @@ +model: yi-vl-plus +label: + zh_Hans: yi-vl-plus + en_US: yi-vl-plus +model_type: llm +features: + - vision +model_properties: + mode: chat + context_size: 4096 +parameter_rules: + - name: max_tokens + use_template: max_tokens + type: int + default: 512 + min: 1 + max: 4096 + - name: temperature + use_template: temperature + type: float + default: 0.7 + min: 0 + max: 2 +pricing: + input: '0.01' + output: '0.03' + unit: '0.001' + currency: USD diff --git a/api/core/model_runtime/model_providers/yi/yi.py b/api/core/model_runtime/model_providers/yi/yi.py new file mode 100644 index 0000000000..691c7aa371 --- /dev/null +++ b/api/core/model_runtime/model_providers/yi/yi.py @@ -0,0 +1,32 @@ +import logging + +from core.model_runtime.entities.model_entities import ModelType +from core.model_runtime.errors.validate import CredentialsValidateFailedError +from core.model_runtime.model_providers.__base.model_provider import ModelProvider + +logger = logging.getLogger(__name__) + + +class YiProvider(ModelProvider): + + def validate_provider_credentials(self, credentials: dict) -> None: + """ + Validate provider credentials + if validate failed, raise exception + + :param credentials: provider credentials, credentials form defined in `provider_credential_schema`. + """ + try: + model_instance = self.get_model_instance(ModelType.LLM) + + # Use `yi-34b-chat-0205` model for validate, + # no matter what model you pass in, text completion model or chat model + model_instance.validate_credentials( + model='yi-34b-chat-0205', + credentials=credentials + ) + except CredentialsValidateFailedError as ex: + raise ex + except Exception as ex: + logger.exception(f'{self.get_provider_schema().provider} credentials validate failed') + raise ex diff --git a/api/core/model_runtime/model_providers/yi/yi.yaml b/api/core/model_runtime/model_providers/yi/yi.yaml new file mode 100644 index 0000000000..368c715456 --- /dev/null +++ b/api/core/model_runtime/model_providers/yi/yi.yaml @@ -0,0 +1,41 @@ +provider: yi +label: + en_US: 01.AI + zh_Hans: 零一万物 +description: + en_US: Models provided by 01.AI, such as yi-34b-chat and yi-vl-plus. + zh_Hans: 零一万物提供的模型,例如 yi-34b-chat 和 yi-vl-plus。 +icon_small: + en_US: icon_s_en.svg +icon_large: + en_US: icon_l_en.svg +background: "#EFFDFD" +help: + title: + en_US: Get your API Key from 01.ai + zh_Hans: 从零一万物获取 API Key + url: + en_US: https://platform.lingyiwanwu.com/apikeys +supported_model_types: + - llm +configurate_methods: + - predefined-model +provider_credential_schema: + credential_form_schemas: + - variable: api_key + label: + en_US: API Key + type: secret-input + required: true + placeholder: + zh_Hans: 在此输入您的 API Key + en_US: Enter your API Key + - variable: endpoint_url + label: + zh_Hans: 自定义 API endpoint 地址 + en_US: CUstom API endpoint URL + type: text-input + required: false + placeholder: + zh_Hans: Base URL, e.g. https://api.lingyiwanwu.com/v1 + en_US: Base URL, e.g. https://api.lingyiwanwu.com/v1