chore: format get_customizable_model_schema return value (#9335)

This commit is contained in:
ice yao 2024-10-21 19:05:44 +08:00 committed by GitHub
parent 79fe175440
commit 1e829ceaf3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
28 changed files with 33 additions and 30 deletions

View File

@ -218,7 +218,7 @@ For instance, Xinference supports `max_tokens`, `temperature`, and `top_p` param
However, some vendors may support different parameters for different models. For example, the `OpenLLM` vendor supports `top_k`, but not all models provided by this vendor support `top_k`. Let's say model A supports `top_k` but model B does not. In such cases, we need to dynamically generate the model parameter schema, as illustrated below:
```python
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -205,7 +205,7 @@ provider_credential_schema:
但是有的供应商根据不同的模型支持不同的参数,如供应商`OpenLLM`支持`top_k`,但是并不是这个供应商提供的所有模型都支持`top_k`我们这里举例A模型支持`top_k`B模型不支持`top_k`那么我们需要在这里动态生成模型参数的Schema如下所示
```python
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -294,7 +294,7 @@ class AzureAIStudioLargeLanguageModel(LargeLanguageModel):
],
}
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
Used to define customizable model schema
"""

View File

@ -148,7 +148,7 @@ class AzureRerankModel(RerankModel):
InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError, json.JSONDecodeError],
}
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -118,7 +118,7 @@ class HuggingfaceTeiRerankModel(RerankModel):
InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError],
}
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -189,7 +189,7 @@ class HuggingfaceTeiTextEmbeddingModel(TextEmbeddingModel):
return usage
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -1,5 +1,5 @@
from collections.abc import Generator
from typing import cast
from typing import Optional, cast
from httpx import Timeout
from openai import (
@ -212,7 +212,7 @@ class LocalAILanguageModel(LargeLanguageModel):
except Exception as ex:
raise CredentialsValidateFailedError(f"Invalid credentials {str(ex)}")
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
completion_model = None
if credentials["completion_type"] == "chat_completion":
completion_model = LLMMode.CHAT.value

View File

@ -73,7 +73,7 @@ class LocalAISpeech2text(Speech2TextModel):
InvokeBadRequestError: [InvokeBadRequestError],
}
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -115,7 +115,7 @@ class LocalAITextEmbeddingModel(TextEmbeddingModel):
num_tokens += self._get_num_tokens_by_gpt2(text)
return num_tokens
def _get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def _get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
Get customizable model schema

View File

@ -50,7 +50,7 @@ class MoonshotLargeLanguageModel(OAIAPICompatLargeLanguageModel):
self._add_custom_parameters(credentials)
super().validate_credentials(model, credentials)
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
return AIModelEntity(
model=model,
label=I18nObject(en_US=model, zh_Hans=model),

View File

@ -61,7 +61,7 @@ class OpenAISpeech2TextModel(_CommonOpenAI, Speech2TextModel):
return response.text
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -62,7 +62,7 @@ class OAICompatSpeech2TextModel(_CommonOaiApiCompat, Speech2TextModel):
except Exception as ex:
raise CredentialsValidateFailedError(str(ex))
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -1,4 +1,5 @@
from collections.abc import Generator
from typing import Optional
from core.model_runtime.entities.common_entities import I18nObject
from core.model_runtime.entities.llm_entities import LLMMode, LLMResult, LLMResultChunk, LLMResultChunkDelta
@ -193,7 +194,7 @@ class OpenLLMLargeLanguageModel(LargeLanguageModel):
),
)
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -408,7 +408,7 @@ class SageMakerLargeLanguageModel(LargeLanguageModel):
InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError],
}
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -157,7 +157,7 @@ class SageMakerRerankModel(RerankModel):
InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError],
}
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -111,7 +111,7 @@ class SageMakerSpeech2TextModel(Speech2TextModel):
InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError],
}
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -180,7 +180,7 @@ class SageMakerEmbeddingModel(TextEmbeddingModel):
InvokeBadRequestError: [KeyError],
}
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -159,7 +159,7 @@ class SageMakerText2SpeechModel(TTSModel):
return self._tts_invoke_streaming(model_type, payload, sagemaker_endpoint)
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -40,7 +40,7 @@ class SiliconflowLargeLanguageModel(OAIAPICompatLargeLanguageModel):
credentials["mode"] = "chat"
credentials["endpoint_url"] = "https://api.siliconflow.cn/v1"
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
return AIModelEntity(
model=model,
label=I18nObject(en_US=model, zh_Hans=model),

View File

@ -50,7 +50,7 @@ class StepfunLargeLanguageModel(OAIAPICompatLargeLanguageModel):
self._add_custom_parameters(credentials)
super().validate_credentials(model, credentials)
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
return AIModelEntity(
model=model,
label=I18nObject(en_US=model, zh_Hans=model),

View File

@ -535,7 +535,7 @@ class TongyiLargeLanguageModel(LargeLanguageModel):
],
}
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
Architecture for defining customizable models

View File

@ -1,4 +1,5 @@
from collections.abc import Generator
from typing import Optional
from httpx import Response, post
from yarl import URL
@ -109,7 +110,7 @@ class TritonInferenceAILargeLanguageModel(LargeLanguageModel):
raise NotImplementedError(f"PromptMessage type {type(item)} is not supported")
return text
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -1,5 +1,6 @@
import logging
from collections.abc import Generator
from typing import Optional
from volcenginesdkarkruntime.types.chat import ChatCompletion, ChatCompletionChunk
@ -298,7 +299,7 @@ class VolcengineMaaSLargeLanguageModel(LargeLanguageModel):
chunks = client.stream_chat(prompt_messages, **req_params)
return _handle_stream_chat_response(chunks)
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -1,5 +1,5 @@
from collections.abc import Generator, Iterator
from typing import cast
from typing import Optional, cast
from openai import (
APIConnectionError,
@ -321,7 +321,7 @@ class XinferenceAILargeLanguageModel(LargeLanguageModel):
return message_dict
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -142,7 +142,7 @@ class XinferenceRerankModel(RerankModel):
InvokeBadRequestError: [InvokeBadRequestError, KeyError, ValueError],
}
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -129,7 +129,7 @@ class XinferenceSpeech2TextModel(Speech2TextModel):
return response["text"]
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -184,7 +184,7 @@ class XinferenceTextEmbeddingModel(TextEmbeddingModel):
return usage
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""

View File

@ -116,7 +116,7 @@ class XinferenceText2SpeechModel(TTSModel):
"""
return self._tts_invoke_streaming(model, credentials, content_text, voice)
def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity | None:
def get_customizable_model_schema(self, model: str, credentials: dict) -> Optional[AIModelEntity]:
"""
used to define customizable model schema
"""