mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-14 07:25:52 +08:00
Add support for VolcEngine - the current version supports SDK2 (#885)
- The main idea is to assemble **ak**, **sk**, and **ep_id** into a dictionary and store it in the database **api_key** field - I don’t know much about the front-end, so I learned from Ollama, which may be redundant. ### Configuration method - model name - Format requirements: {"VolcEngine model name":"endpoint_id"} - For example: {"Skylark-pro-32K":"ep-xxxxxxxxx"} - Volcano ACCESS_KEY - Format requirements: VOLC_ACCESSKEY of the volcano engine corresponding to the model - Volcano SECRET_KEY - Format requirements: VOLC_SECRETKEY of the volcano engine corresponding to the model ### What problem does this PR solve? _Briefly describe what this PR aims to solve. Include background context that will help reviewers understand the purpose of the PR._ ### Type of change - [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
parent
fbd0d74053
commit
eb51ad73d6
@ -96,16 +96,29 @@ def set_api_key():
|
||||
@validate_request("llm_factory", "llm_name", "model_type")
|
||||
def add_llm():
|
||||
req = request.json
|
||||
factory = req["llm_factory"]
|
||||
# For VolcEngine, due to its special authentication method
|
||||
# Assemble volc_ak, volc_sk, endpoint_id into api_key
|
||||
if factory == "VolcEngine":
|
||||
temp = list(eval(req["llm_name"]).items())[0]
|
||||
llm_name = temp[0]
|
||||
endpoint_id = temp[1]
|
||||
api_key = '{' + f'"volc_ak": "{req.get("volc_ak", "")}", ' \
|
||||
f'"volc_sk": "{req.get("volc_sk", "")}", ' \
|
||||
f'"ep_id": "{endpoint_id}", ' + '}'
|
||||
else:
|
||||
llm_name = req["llm_name"]
|
||||
api_key = "xxxxxxxxxxxxxxx"
|
||||
|
||||
llm = {
|
||||
"tenant_id": current_user.id,
|
||||
"llm_factory": req["llm_factory"],
|
||||
"llm_factory": factory,
|
||||
"model_type": req["model_type"],
|
||||
"llm_name": req["llm_name"],
|
||||
"llm_name": llm_name,
|
||||
"api_base": req.get("api_base", ""),
|
||||
"api_key": "xxxxxxxxxxxxxxx"
|
||||
"api_key": api_key
|
||||
}
|
||||
|
||||
factory = req["llm_factory"]
|
||||
msg = ""
|
||||
if llm["model_type"] == LLMType.EMBEDDING.value:
|
||||
mdl = EmbeddingModel[factory](
|
||||
@ -118,7 +131,10 @@ def add_llm():
|
||||
msg += f"\nFail to access embedding model({llm['llm_name']})." + str(e)
|
||||
elif llm["model_type"] == LLMType.CHAT.value:
|
||||
mdl = ChatModel[factory](
|
||||
key=None, model_name=llm["llm_name"], base_url=llm["api_base"])
|
||||
key=llm['api_key'] if factory == "VolcEngine" else None,
|
||||
model_name=llm["llm_name"],
|
||||
base_url=llm["api_base"]
|
||||
)
|
||||
try:
|
||||
m, tc = mdl.chat(None, [{"role": "user", "content": "Hello! How are you doing!"}], {
|
||||
"temperature": 0.9})
|
||||
@ -134,7 +150,6 @@ def add_llm():
|
||||
if msg:
|
||||
return get_data_error_result(retmsg=msg)
|
||||
|
||||
|
||||
if not TenantLLMService.filter_update(
|
||||
[TenantLLM.tenant_id == current_user.id, TenantLLM.llm_factory == factory, TenantLLM.llm_name == llm["llm_name"]], llm):
|
||||
TenantLLMService.save(**llm)
|
||||
|
@ -132,7 +132,12 @@ factory_infos = [{
|
||||
"logo": "",
|
||||
"tags": "LLM",
|
||||
"status": "1",
|
||||
},
|
||||
},{
|
||||
"name": "VolcEngine",
|
||||
"logo": "",
|
||||
"tags": "LLM, TEXT EMBEDDING",
|
||||
"status": "1",
|
||||
}
|
||||
# {
|
||||
# "name": "文心一言",
|
||||
# "logo": "",
|
||||
@ -372,6 +377,21 @@ def init_llm_factory():
|
||||
"max_tokens": 16385,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
# ------------------------ VolcEngine -----------------------
|
||||
{
|
||||
"fid": factory_infos[9]["name"],
|
||||
"llm_name": "Skylark2-pro-32k",
|
||||
"tags": "LLM,CHAT,32k",
|
||||
"max_tokens": 32768,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
{
|
||||
"fid": factory_infos[9]["name"],
|
||||
"llm_name": "Skylark2-pro-4k",
|
||||
"tags": "LLM,CHAT,4k",
|
||||
"max_tokens": 4096,
|
||||
"model_type": LLMType.CHAT.value
|
||||
},
|
||||
]
|
||||
for info in factory_infos:
|
||||
try:
|
||||
|
@ -19,6 +19,7 @@ from abc import ABC
|
||||
from openai import OpenAI
|
||||
import openai
|
||||
from ollama import Client
|
||||
from volcengine.maas.v2 import MaasService
|
||||
from rag.nlp import is_english
|
||||
from rag.utils import num_tokens_from_string
|
||||
|
||||
@ -315,3 +316,71 @@ class LocalLLM(Base):
|
||||
yield answer + "\n**ERROR**: " + str(e)
|
||||
|
||||
yield token_count
|
||||
|
||||
|
||||
class VolcEngineChat(Base):
|
||||
def __init__(self, key, model_name, base_url):
|
||||
"""
|
||||
Since do not want to modify the original database fields, and the VolcEngine authentication method is quite special,
|
||||
Assemble ak, sk, ep_id into api_key, store it as a dictionary type, and parse it for use
|
||||
model_name is for display only
|
||||
"""
|
||||
self.client = MaasService('maas-api.ml-platform-cn-beijing.volces.com', 'cn-beijing')
|
||||
self.volc_ak = eval(key).get('volc_ak', '')
|
||||
self.volc_sk = eval(key).get('volc_sk', '')
|
||||
self.client.set_ak(self.volc_ak)
|
||||
self.client.set_sk(self.volc_sk)
|
||||
self.model_name = eval(key).get('ep_id', '')
|
||||
|
||||
def chat(self, system, history, gen_conf):
|
||||
if system:
|
||||
history.insert(0, {"role": "system", "content": system})
|
||||
try:
|
||||
req = {
|
||||
"parameters": {
|
||||
"min_new_tokens": gen_conf.get("min_new_tokens", 1),
|
||||
"top_k": gen_conf.get("top_k", 0),
|
||||
"max_prompt_tokens": gen_conf.get("max_prompt_tokens", 30000),
|
||||
"temperature": gen_conf.get("temperature", 0.1),
|
||||
"max_new_tokens": gen_conf.get("max_tokens", 1000),
|
||||
"top_p": gen_conf.get("top_p", 0.3),
|
||||
},
|
||||
"messages": history
|
||||
}
|
||||
response = self.client.chat(self.model_name, req)
|
||||
ans = response.choices[0].message.content.strip()
|
||||
if response.choices[0].finish_reason == "length":
|
||||
ans += "...\nFor the content length reason, it stopped, continue?" if is_english(
|
||||
[ans]) else "······\n由于长度的原因,回答被截断了,要继续吗?"
|
||||
return ans, response.usage.total_tokens
|
||||
except Exception as e:
|
||||
return "**ERROR**: " + str(e), 0
|
||||
|
||||
def chat_streamly(self, system, history, gen_conf):
|
||||
if system:
|
||||
history.insert(0, {"role": "system", "content": system})
|
||||
ans = ""
|
||||
try:
|
||||
req = {
|
||||
"parameters": {
|
||||
"min_new_tokens": gen_conf.get("min_new_tokens", 1),
|
||||
"top_k": gen_conf.get("top_k", 0),
|
||||
"max_prompt_tokens": gen_conf.get("max_prompt_tokens", 30000),
|
||||
"temperature": gen_conf.get("temperature", 0.1),
|
||||
"max_new_tokens": gen_conf.get("max_tokens", 1000),
|
||||
"top_p": gen_conf.get("top_p", 0.3),
|
||||
},
|
||||
"messages": history
|
||||
}
|
||||
stream = self.client.stream_chat(self.model_name, req)
|
||||
for resp in stream:
|
||||
if not resp.choices[0].message.content:
|
||||
continue
|
||||
ans += resp.choices[0].message.content
|
||||
yield ans
|
||||
if resp.choices[0].finish_reason == "stop":
|
||||
return resp.usage.total_tokens
|
||||
|
||||
except Exception as e:
|
||||
yield ans + "\n**ERROR**: " + str(e)
|
||||
yield 0
|
||||
|
14
web/src/assets/svg/llm/volc_engine.svg
Normal file
14
web/src/assets/svg/llm/volc_engine.svg
Normal file
@ -0,0 +1,14 @@
|
||||
<svg width="200" height="200" viewBox="0 0 20 40" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_4967_21175)">
|
||||
<path d="M20.511 15.3019L17.2442 28.1928C17.2362 28.2282 17.2364 28.2649 17.2447 28.3001C17.2531 28.3354 17.2694 28.3683 17.2923 28.3964C17.3153 28.4244 17.3444 28.4468 17.3773 28.4619C17.4103 28.477 17.4462 28.4844 17.4825 28.4835H24.0137C24.0499 28.4844 24.0859 28.477 24.1188 28.4619C24.1518 28.4468 24.1809 28.4244 24.2038 28.3964C24.2268 28.3683 24.2431 28.3354 24.2514 28.3001C24.2598 28.2649 24.26 28.2282 24.252 28.1928L20.9685 15.3019C20.9541 15.2524 20.924 15.209 20.8827 15.178C20.8415 15.1471 20.7913 15.1304 20.7397 15.1304C20.6882 15.1304 20.638 15.1471 20.5968 15.178C20.5555 15.209 20.5254 15.2524 20.511 15.3019V15.3019Z" fill="#00E5E5"/>
|
||||
<path d="M2.53051 18.2228L-5.28338e-06 28.1924C-0.00799016 28.2277 -0.00780431 28.2644 0.000538111 28.2997C0.00888053 28.335 0.0251596 28.3679 0.0481365 28.3959C0.0711133 28.4239 0.100182 28.4464 0.133131 28.4615C0.166079 28.4766 0.202039 28.484 0.238273 28.4831H5.28025C5.31649 28.484 5.35245 28.4766 5.38539 28.4615C5.41834 28.4464 5.44741 28.4239 5.47039 28.3959C5.49336 28.3679 5.50964 28.335 5.51799 28.2997C5.52633 28.2644 5.52651 28.2277 5.51853 28.1924L2.98563 18.2228C2.97054 18.1742 2.94032 18.1318 2.89938 18.1016C2.85844 18.0714 2.80892 18.0552 2.75807 18.0552C2.70722 18.0552 2.6577 18.0714 2.61676 18.1016C2.57582 18.1318 2.5456 18.1742 2.53051 18.2228V18.2228Z" fill="#00E5E5"/>
|
||||
<path d="M6.99344 9.96839L2.38275 28.1919C2.37498 28.2263 2.37494 28.262 2.38262 28.2964C2.3903 28.3308 2.40552 28.363 2.42717 28.3908C2.44882 28.4186 2.47637 28.4413 2.50783 28.4572C2.53929 28.473 2.57388 28.4817 2.60911 28.4826H11.8329C11.8691 28.4835 11.9051 28.4761 11.938 28.461C11.971 28.4459 12 28.4235 12.023 28.3955C12.046 28.3675 12.0623 28.3345 12.0706 28.2993C12.079 28.264 12.0791 28.2273 12.0712 28.1919L7.44855 9.96839C7.43347 9.91982 7.40325 9.87736 7.36231 9.8472C7.32136 9.81705 7.27185 9.80078 7.221 9.80078C7.17015 9.80078 7.12063 9.81705 7.07969 9.8472C7.03874 9.87736 7.00852 9.91982 6.99344 9.96839Z" fill="#006EFF"/>
|
||||
<path d="M14.9472 4.17346C14.9321 4.1249 14.9019 4.08244 14.861 4.05228C14.82 4.02213 14.7705 4.00586 14.7197 4.00586C14.6688 4.00586 14.6193 4.02213 14.5784 4.05228C14.5374 4.08244 14.5072 4.1249 14.4921 4.17346L8.18963 28.192C8.18165 28.2273 8.18183 28.264 8.19017 28.2993C8.19852 28.3346 8.2148 28.3675 8.23777 28.3955C8.26075 28.4235 8.28982 28.446 8.32277 28.4611C8.35572 28.4762 8.39168 28.4835 8.42791 28.4827H21.0233C21.0596 28.4835 21.0955 28.4762 21.1285 28.4611C21.1614 28.446 21.1905 28.4235 21.2135 28.3955C21.2364 28.3675 21.2527 28.3346 21.2611 28.2993C21.2694 28.264 21.2696 28.2273 21.2616 28.192L14.9472 4.17346Z" fill="#006EFF"/>
|
||||
<path d="M10.3175 12.6188L6.31915 28.1903C6.31074 28.2258 6.31061 28.2628 6.31875 28.2984C6.3269 28.3339 6.34311 28.3672 6.36614 28.3955C6.38916 28.4238 6.41839 28.4465 6.45155 28.4617C6.48472 28.4769 6.52094 28.4844 6.55743 28.4834H14.535C14.5715 28.4844 14.6077 28.4769 14.6409 28.4617C14.674 28.4465 14.7033 28.4238 14.7263 28.3955C14.7493 28.3672 14.7655 28.3339 14.7737 28.2984C14.7818 28.2628 14.7817 28.2258 14.7733 28.1903L10.7726 12.6188C10.7575 12.5702 10.7273 12.5278 10.6863 12.4976C10.6454 12.4674 10.5959 12.4512 10.545 12.4512C10.4942 12.4512 10.4447 12.4674 10.4037 12.4976C10.3628 12.5278 10.3326 12.5702 10.3175 12.6188Z" fill="#00E5E5"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_4967_21175">
|
||||
<rect width="99.9412" height="24.5665" fill="white" transform="translate(0 4)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
After Width: | Height: | Size: 3.5 KiB |
@ -477,6 +477,11 @@ The above is the content you need to summarize.`,
|
||||
baseUrlNameMessage: 'Please input your base url!',
|
||||
vision: 'Does it support Vision?',
|
||||
ollamaLink: 'How to integrate {{name}}',
|
||||
volcModelNameMessage: 'Please input your model name! Format: {"ModelName":"EndpointID"}',
|
||||
addVolcEngineAK: 'VOLC ACCESS_KEY',
|
||||
volcAKMessage: 'Please input your VOLC_ACCESS_KEY',
|
||||
addVolcEngineSK: 'VOLC SECRET_KEY',
|
||||
volcSKMessage: 'Please input your SECRET_KEY',
|
||||
},
|
||||
message: {
|
||||
registered: 'Registered!',
|
||||
|
@ -440,7 +440,12 @@ export default {
|
||||
modelNameMessage: '請輸入模型名稱!',
|
||||
modelTypeMessage: '請輸入模型類型!',
|
||||
baseUrlNameMessage: '請輸入基礎 Url!',
|
||||
ollamaLink: '如何集成Ollama',
|
||||
ollamaLink: '如何集成 {{name}}',
|
||||
volcModelNameMessage: '請輸入模型名稱!格式:{"模型名稱":"EndpointID"}',
|
||||
addVolcEngineAK: '火山 ACCESS_KEY',
|
||||
volcAKMessage: '請輸入VOLC_ACCESS_KEY',
|
||||
addVolcEngineSK: '火山 SECRET_KEY',
|
||||
volcSKMessage: '請輸入VOLC_SECRET_KEY',
|
||||
},
|
||||
message: {
|
||||
registered: '註冊成功',
|
||||
|
@ -458,6 +458,11 @@ export default {
|
||||
modelTypeMessage: '请输入模型类型!',
|
||||
baseUrlNameMessage: '请输入基础 Url!',
|
||||
ollamaLink: '如何集成 {{name}}',
|
||||
volcModelNameMessage: '请输入模型名称!格式:{"模型名称":"EndpointID"}',
|
||||
addVolcEngineAK: '火山 ACCESS_KEY',
|
||||
volcAKMessage: '请输入VOLC_ACCESS_KEY',
|
||||
addVolcEngineSK: '火山 SECRET_KEY',
|
||||
volcSKMessage: '请输入VOLC_SECRET_KEY',
|
||||
},
|
||||
message: {
|
||||
registered: '注册成功',
|
||||
|
@ -166,6 +166,41 @@ export const useSubmitOllama = () => {
|
||||
};
|
||||
};
|
||||
|
||||
export const useSubmitVolcEngine = () => {
|
||||
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
|
||||
const [selectedVolcFactory, setSelectedVolcFactory] = useState<string>('');
|
||||
const addLlm = useAddLlm();
|
||||
const {
|
||||
visible: volcAddingVisible,
|
||||
hideModal: hideVolcAddingModal,
|
||||
showModal: showVolcAddingModal,
|
||||
} = useSetModalState();
|
||||
|
||||
const onVolcAddingOk = useCallback(
|
||||
async (payload: IAddLlmRequestBody) => {
|
||||
const ret = await addLlm(payload);
|
||||
if (ret === 0) {
|
||||
hideVolcAddingModal();
|
||||
}
|
||||
},
|
||||
[hideVolcAddingModal, addLlm],
|
||||
);
|
||||
|
||||
const handleShowVolcAddingModal = (llmFactory: string) => {
|
||||
setSelectedVolcFactory(llmFactory);
|
||||
showVolcAddingModal();
|
||||
};
|
||||
|
||||
return {
|
||||
volcAddingLoading: loading,
|
||||
onVolcAddingOk,
|
||||
volcAddingVisible,
|
||||
hideVolcAddingModal,
|
||||
showVolcAddingModal: handleShowVolcAddingModal,
|
||||
selectedVolcFactory,
|
||||
};
|
||||
};
|
||||
|
||||
export const useHandleDeleteLlm = (llmFactory: string) => {
|
||||
const deleteLlm = useDeleteLlm();
|
||||
const showDeleteConfirm = useShowDeleteConfirm();
|
||||
|
@ -37,10 +37,12 @@ import {
|
||||
useSelectModelProvidersLoading,
|
||||
useSubmitApiKey,
|
||||
useSubmitOllama,
|
||||
useSubmitVolcEngine,
|
||||
useSubmitSystemModelSetting,
|
||||
} from './hooks';
|
||||
import styles from './index.less';
|
||||
import OllamaModal from './ollama-modal';
|
||||
import VolcEngineModal from "./volcengine-model";
|
||||
import SystemModelSettingModal from './system-model-setting-modal';
|
||||
|
||||
const IconMap = {
|
||||
@ -52,6 +54,7 @@ const IconMap = {
|
||||
Ollama: 'ollama',
|
||||
Xinference: 'xinference',
|
||||
DeepSeek: 'deepseek',
|
||||
VolcEngine: 'volc_engine',
|
||||
};
|
||||
|
||||
const LlmIcon = ({ name }: { name: string }) => {
|
||||
@ -165,6 +168,15 @@ const UserSettingModel = () => {
|
||||
selectedLlmFactory,
|
||||
} = useSubmitOllama();
|
||||
|
||||
const {
|
||||
volcAddingVisible,
|
||||
hideVolcAddingModal,
|
||||
showVolcAddingModal,
|
||||
onVolcAddingOk,
|
||||
volcAddingLoading,
|
||||
selectedVolcFactory,
|
||||
} = useSubmitVolcEngine();
|
||||
|
||||
const handleApiKeyClick = useCallback(
|
||||
(llmFactory: string) => {
|
||||
if (isLocalLlmFactory(llmFactory)) {
|
||||
@ -179,6 +191,8 @@ const UserSettingModel = () => {
|
||||
const handleAddModel = (llmFactory: string) => () => {
|
||||
if (isLocalLlmFactory(llmFactory)) {
|
||||
showLlmAddingModal(llmFactory);
|
||||
} else if (llmFactory === 'VolcEngine') {
|
||||
showVolcAddingModal('VolcEngine');
|
||||
} else {
|
||||
handleApiKeyClick(llmFactory);
|
||||
}
|
||||
@ -270,6 +284,13 @@ const UserSettingModel = () => {
|
||||
loading={llmAddingLoading}
|
||||
llmFactory={selectedLlmFactory}
|
||||
></OllamaModal>
|
||||
<VolcEngineModal
|
||||
visible={volcAddingVisible}
|
||||
hideModal={hideVolcAddingModal}
|
||||
onOk={onVolcAddingOk}
|
||||
loading={volcAddingLoading}
|
||||
llmFactory={selectedVolcFactory}
|
||||
></VolcEngineModal>
|
||||
</section>
|
||||
);
|
||||
};
|
||||
|
@ -0,0 +1,118 @@
|
||||
import { useTranslate } from '@/hooks/commonHooks';
|
||||
import { IModalProps } from '@/interfaces/common';
|
||||
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
|
||||
import { Flex, Form, Input, Modal, Select, Space, Switch } from 'antd';
|
||||
import omit from 'lodash/omit';
|
||||
|
||||
type FieldType = IAddLlmRequestBody & { vision: boolean };
|
||||
|
||||
const { Option } = Select;
|
||||
|
||||
const VolcEngineModal = ({
|
||||
visible,
|
||||
hideModal,
|
||||
onOk,
|
||||
loading,
|
||||
llmFactory
|
||||
}: IModalProps<IAddLlmRequestBody> & { llmFactory: string }) => {
|
||||
const [form] = Form.useForm<FieldType>();
|
||||
|
||||
const { t } = useTranslate('setting');
|
||||
|
||||
const handleOk = async () => {
|
||||
const values = await form.validateFields();
|
||||
const modelType =
|
||||
values.model_type === 'chat' && values.vision
|
||||
? 'image2text'
|
||||
: values.model_type;
|
||||
|
||||
const data = {
|
||||
...omit(values, ['vision']),
|
||||
model_type: modelType,
|
||||
llm_factory: llmFactory,
|
||||
};
|
||||
console.info(data);
|
||||
|
||||
onOk?.(data);
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={t('addLlmTitle', { name: llmFactory })}
|
||||
open={visible}
|
||||
onOk={handleOk}
|
||||
onCancel={hideModal}
|
||||
okButtonProps={{ loading }}
|
||||
footer={(originNode: React.ReactNode) => {
|
||||
return (
|
||||
<Flex justify={'space-between'}>
|
||||
<a
|
||||
href="https://www.volcengine.com/docs/82379/1095322"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
{t('ollamaLink', { name: llmFactory })}
|
||||
</a>
|
||||
<Space>{originNode}</Space>
|
||||
</Flex>
|
||||
);
|
||||
}}
|
||||
>
|
||||
<Form
|
||||
name="basic"
|
||||
style={{ maxWidth: 600 }}
|
||||
autoComplete="off"
|
||||
layout={'vertical'}
|
||||
form={form}
|
||||
>
|
||||
<Form.Item<FieldType>
|
||||
label={t('modelType')}
|
||||
name="model_type"
|
||||
initialValue={'chat'}
|
||||
rules={[{ required: true, message: t('modelTypeMessage') }]}
|
||||
>
|
||||
<Select placeholder={t('modelTypeMessage')}>
|
||||
<Option value="chat">chat</Option>
|
||||
<Option value="embedding">embedding</Option>
|
||||
</Select>
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('modelName')}
|
||||
name="llm_name"
|
||||
rules={[{ required: true, message: t('volcModelNameMessage') }]}
|
||||
>
|
||||
<Input placeholder={t('volcModelNameMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('addVolcEngineAK')}
|
||||
name="volc_ak"
|
||||
rules={[{ required: true, message: t('volcAKMessage') }]}
|
||||
>
|
||||
<Input placeholder={t('volcAKMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item<FieldType>
|
||||
label={t('addVolcEngineSK')}
|
||||
name="volc_sk"
|
||||
rules={[{ required: true, message: t('volcAKMessage') }]}
|
||||
>
|
||||
<Input placeholder={t('volcAKMessage')} />
|
||||
</Form.Item>
|
||||
<Form.Item noStyle dependencies={['model_type']}>
|
||||
{({ getFieldValue }) =>
|
||||
getFieldValue('model_type') === 'chat' && (
|
||||
<Form.Item
|
||||
label={t('vision')}
|
||||
valuePropName="checked"
|
||||
name={'vision'}
|
||||
>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
)
|
||||
}
|
||||
</Form.Item>
|
||||
</Form>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
export default VolcEngineModal;
|
Loading…
x
Reference in New Issue
Block a user