add supprot for OpenAI-API-Compatible llm (#1787)

### What problem does this PR solve?

#1771  add supprot for OpenAI-API-Compatible 

### Type of change

- [x] New Feature (non-breaking change which adds functionality)

---------

Co-authored-by: Zhedong Cen <cenzhedong2@126.com>
This commit is contained in:
黄腾 2024-08-06 16:20:21 +08:00 committed by GitHub
parent 66e4113e0b
commit b67484e77d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 74 additions and 11 deletions

View File

@ -129,6 +129,9 @@ def add_llm():
elif factory == "LocalAI":
llm_name = req["llm_name"]+"___LocalAI"
api_key = "xxxxxxxxxxxxxxx"
elif factory == "OpenAI-API-Compatible":
llm_name = req["llm_name"]+"___OpenAI-API"
api_key = req["api_key"]
else:
llm_name = req["llm_name"]
api_key = "xxxxxxxxxxxxxxx"
@ -145,7 +148,7 @@ def add_llm():
msg = ""
if llm["model_type"] == LLMType.EMBEDDING.value:
mdl = EmbeddingModel[factory](
key=llm['api_key'] if factory in ["VolcEngine", "Bedrock"] else None,
key=llm['api_key'] if factory in ["VolcEngine", "Bedrock","OpenAI-API-Compatible"] else None,
model_name=llm["llm_name"],
base_url=llm["api_base"])
try:
@ -156,7 +159,7 @@ def add_llm():
msg += f"\nFail to access embedding model({llm['llm_name']})." + str(e)
elif llm["model_type"] == LLMType.CHAT.value:
mdl = ChatModel[factory](
key=llm['api_key'] if factory in ["VolcEngine", "Bedrock"] else None,
key=llm['api_key'] if factory in ["VolcEngine", "Bedrock","OpenAI-API-Compatible"] else None,
model_name=llm["llm_name"],
base_url=llm["api_base"]
)
@ -181,7 +184,7 @@ def add_llm():
e)
elif llm["model_type"] == LLMType.IMAGE2TEXT.value:
mdl = CvModel[factory](
key=None, model_name=llm["llm_name"], base_url=llm["api_base"]
key=llm["api_key"] if factory in ["OpenAI-API-Compatible"] else None, model_name=llm["llm_name"], base_url=llm["api_base"]
)
try:
img_url = (

View File

@ -158,6 +158,13 @@
"status": "1",
"llm": []
},
{
"name": "OpenAI-API-Compatible",
"logo": "",
"tags": "LLM,TEXT EMBEDDING,SPEECH2TEXT,MODERATION",
"status": "1",
"llm": []
},
{
"name": "Moonshot",
"logo": "",

View File

@ -36,7 +36,8 @@ EmbeddingModel = {
"Bedrock": BedrockEmbed,
"Gemini": GeminiEmbed,
"NVIDIA": NvidiaEmbed,
"LM-Studio": LmStudioEmbed
"LM-Studio": LmStudioEmbed,
"OpenAI-API-Compatible": OpenAI_APIEmbed
}
@ -53,7 +54,8 @@ CvModel = {
"LocalAI": LocalAICV,
"NVIDIA": NvidiaCV,
"LM-Studio": LmStudioCV,
"StepFun":StepFunCV
"StepFun":StepFunCV,
"OpenAI-API-Compatible": OpenAI_APICV
}
@ -78,7 +80,8 @@ ChatModel = {
"OpenRouter": OpenRouterChat,
"StepFun": StepFunChat,
"NVIDIA": NvidiaChat,
"LM-Studio": LmStudioChat
"LM-Studio": LmStudioChat,
"OpenAI-API-Compatible": OpenAI_APIChat
}
@ -88,7 +91,8 @@ RerankModel = {
"Youdao": YoudaoRerank,
"Xinference": XInferenceRerank,
"NVIDIA": NvidiaRerank,
"LM-Studio": LmStudioRerank
"LM-Studio": LmStudioRerank,
"OpenAI-API-Compatible": OpenAI_APIRerank
}

View File

@ -887,6 +887,16 @@ class LmStudioChat(Base):
if not base_url:
raise ValueError("Local llm url cannot be None")
if base_url.split("/")[-1] != "v1":
self.base_url = os.path.join(base_url, "v1")
self.client = OpenAI(api_key="lm-studio", base_url=self.base_url)
base_url = os.path.join(base_url, "v1")
self.client = OpenAI(api_key="lm-studio", base_url=base_url)
self.model_name = model_name
class OpenAI_APIChat(Base):
def __init__(self, key, model_name, base_url):
if not base_url:
raise ValueError("url cannot be None")
if base_url.split("/")[-1] != "v1":
base_url = os.path.join(base_url, "v1")
model_name = model_name.split("___")[0]
super().__init__(key, model_name, base_url)

View File

@ -638,3 +638,14 @@ class LmStudioCV(GptV4):
self.client = OpenAI(api_key="lm-studio", base_url=base_url)
self.model_name = model_name
self.lang = lang
class OpenAI_APICV(GptV4):
def __init__(self, key, model_name, base_url, lang="Chinese"):
if not base_url:
raise ValueError("url cannot be None")
if base_url.split("/")[-1] != "v1":
base_url = os.path.join(base_url, "v1")
self.client = OpenAI(api_key=key, base_url=base_url)
self.model_name = model_name.split("___")[0]
self.lang = lang

View File

@ -513,3 +513,13 @@ class LmStudioEmbed(LocalAIEmbed):
self.base_url = os.path.join(base_url, "v1")
self.client = OpenAI(api_key="lm-studio", base_url=self.base_url)
self.model_name = model_name
class OpenAI_APIEmbed(OpenAIEmbed):
def __init__(self, key, model_name, base_url):
if not base_url:
raise ValueError("url cannot be None")
if base_url.split("/")[-1] != "v1":
self.base_url = os.path.join(base_url, "v1")
self.client = OpenAI(api_key=key, base_url=base_url)
self.model_name = model_name.split("___")[0]

View File

@ -212,3 +212,11 @@ class LmStudioRerank(Base):
def similarity(self, query: str, texts: list):
raise NotImplementedError("The LmStudioRerank has not been implement")
class OpenAI_APIRerank(Base):
def __init__(self, key, model_name, base_url):
pass
def similarity(self, query: str, texts: list):
raise NotImplementedError("The api has not been implement")

View File

@ -0,0 +1 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1722581471720" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="4395" xmlns:xlink="http://www.w3.org/1999/xlink" width="200" height="200"><path d="M512 512m-512 0a512 512 0 1 0 1024 0 512 512 0 1 0-1024 0Z" fill="#A663E6" p-id="4396"></path><path d="M845.568 442.282667a189.226667 189.226667 0 0 0-16.725333-157.141334c-42.496-72.96-127.829333-110.421333-211.2-92.8a198.4 198.4 0 0 0-186.965334-60.16 195.84 195.84 0 0 0-146.346666 129.578667 194.005333 194.005333 0 0 0-129.706667 92.8 191.445333 191.445333 0 0 0 24.064 227.114667 189.056 189.056 0 0 0 16.554667 157.141333c42.538667 72.96 127.957333 110.464 211.370666 92.8A195.370667 195.370667 0 0 0 552.874667 896c85.461333 0.085333 161.152-54.357333 187.264-134.613333a194.176 194.176 0 0 0 129.664-92.8 191.744 191.744 0 0 0-24.234667-226.304z m-292.693333 403.456a146.432 146.432 0 0 1-93.312-33.28l4.608-2.56 155.008-88.277334a25.386667 25.386667 0 0 0 12.714666-21.76V484.266667l65.536 37.376a2.261333 2.261333 0 0 1 1.28 1.664v178.645333c-0.213333 79.36-65.365333 143.616-145.834666 143.786667z m-313.386667-132.010667a141.397333 141.397333 0 0 1-17.322667-96.426667l4.608 2.688 155.136 88.32c7.808 4.48 17.493333 4.48 25.301334 0l189.568-107.818666v74.624a2.56 2.56 0 0 1-1.066667 1.962666l-157.013333 89.301334c-69.76 39.637333-158.890667 16.085333-199.210667-52.650667z m-40.874667-333.056a145.066667 145.066667 0 0 1 76.8-63.146667V499.2c-0.128 8.96 4.693333 17.194667 12.544 21.632l188.672 107.349333-65.536 37.376a2.474667 2.474667 0 0 1-2.304 0l-156.714666-89.173333a143.061333 143.061333 0 0 1-53.461334-196.48v0.768z m538.453334 123.349333l-189.226667-108.373333 65.365333-37.248a2.474667 2.474667 0 0 1 2.304 0l156.672 89.301333a143.317333 143.317333 0 0 1 72.490667 136.533334 144.085333 144.085333 0 0 1-94.421333 122.837333v-181.717333a25.258667 25.258667 0 0 0-13.226667-21.333334z m65.194666-96.725333l-4.565333-2.730667-154.88-89.045333a25.472 25.472 0 0 0-25.472 0L427.946667 423.338667v-74.666667a2.133333 2.133333 0 0 1 0.896-1.92l156.714666-89.173333a147.669333 147.669333 0 0 1 156.330667 6.698666 143.146667 143.146667 0 0 1 60.373333 142.421334v0.597333z m-410.026666 132.309333l-65.578667-37.248a2.56 2.56 0 0 1-1.237333-1.834666V322.389333a143.872 143.872 0 0 1 84.096-130.133333 147.626667 147.626667 0 0 1 155.178666 19.626667l-4.608 2.56-155.008 88.277333a25.386667 25.386667 0 0 0-12.757333 21.76l-0.128 215.125333z m35.541333-75.690666l84.437333-48 84.565334 48v96l-84.266667 48-84.565333-48-0.170667-96z" fill="#FFFFFF" p-id="4397"></path></svg>

After

Width:  |  Height:  |  Size: 2.7 KiB

View File

@ -3,6 +3,7 @@ export interface IAddLlmRequestBody {
llm_name: string;
model_type: string;
api_base?: string; // chat|embedding|speech2text|image2text
api_key: string;
}
export interface IDeleteLlmRequestBody {

View File

@ -17,4 +17,4 @@ export const UserSettingIconMap = {
export * from '@/constants/setting';
export const LocalLlmFactories = ['Ollama', 'Xinference','LocalAI','LM-Studio'];
export const LocalLlmFactories = ['Ollama', 'Xinference','LocalAI','LM-Studio',"OpenAI-API-Compatible"];

View File

@ -21,7 +21,8 @@ export const IconMap = {
LocalAI: 'local-ai',
StepFun: 'stepfun',
NVIDIA:'nvidia',
'LM-Studio':'lm-studio'
'LM-Studio':'lm-studio',
'OpenAI-API-Compatible':'openai-api'
};
export const BedrockRegionList = [

View File

@ -92,6 +92,13 @@ const OllamaModal = ({
>
<Input placeholder={t('baseUrlNameMessage')} />
</Form.Item>
<Form.Item<FieldType>
label={t('apiKey')}
name="api_key"
rules={[{ required: false, message: t('apiKeyMessage') }]}
>
<Input placeholder={t('apiKeyMessage')} />
</Form.Item>
<Form.Item noStyle dependencies={['model_type']}>
{({ getFieldValue }) =>
getFieldValue('model_type') === 'chat' && (