feat: add support for ollama #221 (#260)

### What problem does this PR solve?

add support for ollama

Issue link:#221

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
balibabu 2024-04-08 19:13:45 +08:00 committed by GitHub
parent d0a1ffe6e2
commit 265a7a283a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 275 additions and 26 deletions

View File

@ -0,0 +1,10 @@
<svg width="17" height="17" viewBox="0 0 17 17" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_660_5)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.50662 0.453613C4.07917 0.453613 0.5 4.05917 0.5 8.51972C0.5 12.0853 2.79329 15.1035 5.9747 16.1717C6.37246 16.252 6.51816 15.9981 6.51816 15.7846C6.51816 15.5976 6.50505 14.9566 6.50505 14.2888C4.2778 14.7696 3.81399 13.3272 3.81399 13.3272C3.45606 12.3924 2.92572 12.1522 2.92572 12.1522C2.19674 11.658 2.97882 11.658 2.97882 11.658C3.78744 11.7115 4.21175 12.486 4.21175 12.486C4.92745 13.7145 6.08074 13.3674 6.54471 13.1537C6.61092 12.6328 6.82315 12.2723 7.0485 12.072C5.27211 11.885 3.40312 11.1906 3.40312 8.0923C3.40312 7.21091 3.72107 6.4898 4.22486 5.92897C4.14538 5.7287 3.86693 4.90057 4.30451 3.79219C4.30451 3.79219 4.98055 3.57848 6.50488 4.62016C7.1575 4.44359 7.83054 4.35377 8.50662 4.35302C9.18266 4.35302 9.87181 4.4466 10.5082 4.62016C12.0327 3.57848 12.7087 3.79219 12.7087 3.79219C13.1463 4.90057 12.8677 5.7287 12.7882 5.92897C13.3053 6.4898 13.6101 7.21091 13.6101 8.0923C13.6101 11.1906 11.7411 11.8716 9.95146 12.072C10.2432 12.3257 10.4949 12.8064 10.4949 13.5677C10.4949 14.6493 10.4818 15.5174 10.4818 15.7844C10.4818 15.9981 10.6277 16.252 11.0253 16.1719C14.2067 15.1033 16.5 12.0853 16.5 8.51972C16.5131 4.05917 12.9208 0.453613 8.50662 0.453613Z" fill="#24292F"/>
</g>
<defs>
<clipPath id="clip0_660_5">
<rect width="16" height="16" fill="white" transform="translate(0.5 0.453613)"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

View File

@ -0,0 +1,13 @@
<svg width="16" height="17" viewBox="0 0 16 17" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_660_28)">
<path d="M8 6.99902V10.0972H12.3054C12.1164 11.0936 11.549 11.9372 10.6981 12.5045L13.2945 14.5191C14.8072 13.1227 15.68 11.0718 15.68 8.63547C15.68 8.0682 15.6291 7.5227 15.5345 6.99911L8 6.99902Z" fill="#4285F4"/>
<path d="M3.51649 9.97632L2.93092 10.4246L0.858154 12.0391C2.17451 14.65 4.8725 16.4536 7.99974 16.4536C10.1597 16.4536 11.9706 15.7409 13.2942 14.5191L10.6979 12.5046C9.98516 12.9846 9.07606 13.2755 7.99974 13.2755C5.91976 13.2755 4.15254 11.8719 3.51976 9.98094L3.51649 9.97632Z" fill="#34A853"/>
<path d="M0.858119 4.86816C0.312695 5.94448 0 7.15905 0 8.45357C0 9.74809 0.312695 10.9627 0.858119 12.039C0.858119 12.0462 3.51998 9.97352 3.51998 9.97352C3.35998 9.49352 3.26541 8.98446 3.26541 8.45349C3.26541 7.92251 3.35998 7.41345 3.51998 6.93345L0.858119 4.86816Z" fill="#FBBC05"/>
<path d="M7.99991 3.63907C9.17811 3.63907 10.2254 4.04633 11.0617 4.83179L13.3526 2.54091C11.9635 1.24639 10.1599 0.453613 7.99991 0.453613C4.87266 0.453613 2.17451 2.24997 0.858154 4.86816L3.51994 6.93362C4.15263 5.04269 5.91992 3.63907 7.99991 3.63907Z" fill="#EA4335"/>
</g>
<defs>
<clipPath id="clip0_660_28">
<rect width="16" height="16" fill="white" transform="translate(0 0.453613)"/>
</clipPath>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

Before

Width:  |  Height:  |  Size: 3.5 KiB

After

Width:  |  Height:  |  Size: 3.5 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 17 KiB

View File

Before

Width:  |  Height:  |  Size: 2.4 KiB

After

Width:  |  Height:  |  Size: 2.4 KiB

View File

Before

Width:  |  Height:  |  Size: 1.3 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

Before

Width:  |  Height:  |  Size: 4.4 KiB

After

Width:  |  Height:  |  Size: 4.4 KiB

View File

Before

Width:  |  Height:  |  Size: 3.3 KiB

After

Width:  |  Height:  |  Size: 3.3 KiB

View File

@ -21,13 +21,16 @@ try {
interface IProps extends IconComponentProps {
name: string;
width: string | number;
height?: string | number;
}
const SvgIcon = ({ name, width, ...restProps }: IProps) => {
const SvgIcon = ({ name, width, height, ...restProps }: IProps) => {
const ListItem = routeList.find((item) => item.name === name);
return (
<Icon
component={() => <img src={ListItem?.value} alt="" width={width} />}
component={() => (
<img src={ListItem?.value} alt="" width={width} height={height} />
)}
{...(restProps as any)}
/>
);

View File

@ -4,6 +4,7 @@ import {
IMyLlmValue,
IThirdOAIModelCollection,
} from '@/interfaces/database/llm';
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
import { useCallback, useEffect, useMemo } from 'react';
import { useDispatch, useSelector } from 'umi';
@ -206,3 +207,19 @@ export const useSaveTenantInfo = () => {
return saveTenantInfo;
};
export const useAddLlm = () => {
const dispatch = useDispatch();
const saveTenantInfo = useCallback(
(requestBody: IAddLlmRequestBody) => {
return dispatch<any>({
type: 'settingModel/add_llm',
payload: requestBody,
});
},
[dispatch],
);
return saveTenantInfo;
};

View File

@ -7,3 +7,11 @@ export interface BaseState {
pagination: Pagination;
searchString: string;
}
export interface IModalProps<T> {
showModal?(): void;
hideModal(): void;
visible: boolean;
loading?: boolean;
onOk?(payload?: T): Promise<void> | void;
}

View File

@ -0,0 +1,6 @@
export interface IAddLlmRequestBody {
llm_factory: string; // Ollama
llm_name: string;
model_type: string;
api_base?: string; // chat|embedding|speech2text|image2text
}

View File

@ -390,6 +390,14 @@ export default {
'The default ASR model all the newly created knowledgebase will use. Use this model to translate voices to corresponding text.',
workspace: 'Workspace',
upgrade: 'Upgrade',
addLlmTitle: 'Add LLM',
modelName: 'Model name',
modelNameMessage: 'Please input your model name!',
modelType: 'Model type',
modelTypeMessage: 'Please input your model type!',
addLlmBaseUrl: 'Base url',
baseUrlNameMessage: 'Please input your base url!',
vision: 'Does it support Vision?',
},
message: {
registered: 'Registered!',

View File

@ -375,6 +375,14 @@ export default {
'所有新创建的知识库都将使用默认的 ASR 模型。 使用此模型将语音翻译为相应的文本。',
workspace: '工作空间',
upgrade: '升级',
addLlmTitle: '添加 LLM',
modelName: '模型名称',
modelType: '模型类型',
addLlmBaseUrl: '基础 Url',
vision: '是否支持 Vision',
modelNameMessage: '请输入模型名称!',
modelTypeMessage: '请输入模型类型!',
baseUrlNameMessage: '请输入基础 Url',
},
message: {
registered: '注册成功',

View File

@ -151,6 +151,17 @@ const model: DvaModel<SettingModelState> = {
}
return retcode;
},
*add_llm({ payload = {} }, { call, put }) {
const { data } = yield call(userService.add_llm, payload);
const { retcode } = data;
if (retcode === 0) {
message.success(i18n.t('message.modified'));
yield put({ type: 'my_llm' });
yield put({ type: 'factories_list' });
}
return retcode;
},
},
};
export default model;

View File

@ -46,8 +46,10 @@ const ApiKeyModal = ({
};
useEffect(() => {
form.setFieldValue('api_key', initialValue);
}, [initialValue, form]);
if (visible) {
form.setFieldValue('api_key', initialValue);
}
}, [initialValue, form, visible]);
return (
<Modal

View File

@ -2,6 +2,7 @@ import { useSetModalState } from '@/hooks/commonHooks';
import {
IApiKeySavingParams,
ISystemModelSettingSavingParams,
useAddLlm,
useFetchLlmList,
useSaveApiKey,
useSaveTenantInfo,
@ -12,6 +13,7 @@ import {
useFetchTenantInfo,
useSelectTenantInfo,
} from '@/hooks/userSettingHook';
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
import { useCallback, useEffect, useState } from 'react';
type SavingParamsState = Omit<IApiKeySavingParams, 'api_key'>;
@ -127,3 +129,31 @@ export const useSelectModelProvidersLoading = () => {
return loading;
};
export const useSubmitOllama = () => {
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
const addLlm = useAddLlm();
const {
visible: llmAddingVisible,
hideModal: hideLlmAddingModal,
showModal: showLlmAddingModal,
} = useSetModalState();
const onLlmAddingOk = useCallback(
async (payload: IAddLlmRequestBody) => {
const ret = await addLlm(payload);
if (ret === 0) {
hideLlmAddingModal();
}
},
[hideLlmAddingModal, addLlm],
);
return {
llmAddingLoading: loading,
onLlmAddingOk,
llmAddingVisible,
hideLlmAddingModal,
showLlmAddingModal,
};
};

View File

@ -1,15 +1,11 @@
import { ReactComponent as MoreModelIcon } from '@/assets/svg/more-model.svg';
import SvgIcon from '@/components/svg-icon';
import { useSetModalState, useTranslate } from '@/hooks/commonHooks';
import {
LlmItem,
useFetchLlmFactoryListOnMount,
useFetchMyLlmListOnMount,
} from '@/hooks/llmHooks';
import { ReactComponent as MoonshotIcon } from '@/icons/moonshot.svg';
import { ReactComponent as OpenAiIcon } from '@/icons/openai.svg';
import { ReactComponent as TongYiIcon } from '@/icons/tongyi.svg';
import { ReactComponent as WenXinIcon } from '@/icons/wenxin.svg';
import { ReactComponent as ZhiPuIcon } from '@/icons/zhipu.svg';
import { SettingOutlined, UserOutlined } from '@ant-design/icons';
import {
Avatar,
@ -33,24 +29,27 @@ import ApiKeyModal from './api-key-modal';
import {
useSelectModelProvidersLoading,
useSubmitApiKey,
useSubmitOllama,
useSubmitSystemModelSetting,
} from './hooks';
import styles from './index.less';
import OllamaModal from './ollama-modal';
import SystemModelSettingModal from './system-model-setting-modal';
import styles from './index.less';
const IconMap = {
'Tongyi-Qianwen': TongYiIcon,
Moonshot: MoonshotIcon,
OpenAI: OpenAiIcon,
'ZHIPU-AI': ZhiPuIcon,
文心一言: WenXinIcon,
'Tongyi-Qianwen': 'tongyi',
Moonshot: 'moonshot',
OpenAI: 'openai',
'ZHIPU-AI': 'zhipu',
: 'wenxin',
Ollama: 'ollama',
};
const LlmIcon = ({ name }: { name: string }) => {
const Icon = IconMap[name as keyof typeof IconMap];
return Icon ? (
<Icon width={48} height={48}></Icon>
const icon = IconMap[name as keyof typeof IconMap];
return icon ? (
<SvgIcon name={`llm/${icon}`} width={48} height={48}></SvgIcon>
) : (
<Avatar shape="square" size="large" icon={<UserOutlined />} />
);
@ -90,7 +89,7 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
<Col span={12} className={styles.factoryOperationWrapper}>
<Space size={'middle'}>
<Button onClick={handleApiKeyClick}>
API-Key
{item.name === 'Ollama' ? t('addTheModel') : 'API-Key'}
<SettingOutlined />
</Button>
<Button onClick={handleShowMoreClick}>
@ -142,16 +141,31 @@ const UserSettingModel = () => {
showSystemSettingModal,
} = useSubmitSystemModelSetting();
const { t } = useTranslate('setting');
const {
llmAddingVisible,
hideLlmAddingModal,
showLlmAddingModal,
onLlmAddingOk,
llmAddingLoading,
} = useSubmitOllama();
const handleApiKeyClick = useCallback(
(llmFactory: string) => {
showApiKeyModal({ llm_factory: llmFactory });
if (llmFactory === 'Ollama') {
showLlmAddingModal();
} else {
showApiKeyModal({ llm_factory: llmFactory });
}
},
[showApiKeyModal],
[showApiKeyModal, showLlmAddingModal],
);
const handleAddModel = (llmFactory: string) => () => {
handleApiKeyClick(llmFactory);
if (llmFactory === 'Ollama') {
showLlmAddingModal();
} else {
handleApiKeyClick(llmFactory);
}
};
const items: CollapseProps['items'] = [
@ -216,7 +230,7 @@ const UserSettingModel = () => {
clickButton={showSystemSettingModal}
></SettingTitle>
<Divider></Divider>
<Collapse defaultActiveKey={['1']} ghost items={items} />
<Collapse defaultActiveKey={['1', '2']} ghost items={items} />
</section>
</Spin>
<ApiKeyModal
@ -233,6 +247,12 @@ const UserSettingModel = () => {
hideModal={hideSystemSettingModal}
loading={saveSystemModelSettingLoading}
></SystemModelSettingModal>
<OllamaModal
visible={llmAddingVisible}
hideModal={hideLlmAddingModal}
onOk={onLlmAddingOk}
loading={llmAddingLoading}
></OllamaModal>
</>
);
};

View File

@ -0,0 +1,96 @@
import { useTranslate } from '@/hooks/commonHooks';
import { IModalProps } from '@/interfaces/common';
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
import { Form, Input, Modal, Select, Switch } from 'antd';
import omit from 'lodash/omit';
type FieldType = IAddLlmRequestBody & { vision: boolean };
const { Option } = Select;
const OllamaModal = ({
visible,
hideModal,
onOk,
loading,
}: IModalProps<IAddLlmRequestBody>) => {
const [form] = Form.useForm<FieldType>();
const { t } = useTranslate('setting');
const handleOk = async () => {
const values = await form.validateFields();
const modelType =
values.model_type === 'chat' && values.vision
? 'image2text'
: values.model_type;
const data = {
...omit(values, ['vision']),
model_type: modelType,
llm_factory: 'Ollama',
};
console.info(data);
onOk?.(data);
};
return (
<Modal
title={t('addLlmTitle')}
open={visible}
onOk={handleOk}
onCancel={hideModal}
okButtonProps={{ loading }}
>
<Form
name="basic"
style={{ maxWidth: 600 }}
autoComplete="off"
layout={'vertical'}
form={form}
>
<Form.Item<FieldType>
label={t('modelType')}
name="model_type"
initialValue={'chat'}
rules={[{ required: true, message: t('modelTypeMessage') }]}
>
<Select placeholder={t('modelTypeMessage')}>
<Option value="chat">chat</Option>
<Option value="embedding">embedding</Option>
</Select>
</Form.Item>
<Form.Item<FieldType>
label={t('modelName')}
name="llm_name"
rules={[{ required: true, message: t('modelNameMessage') }]}
>
<Input placeholder={t('modelNameMessage')} />
</Form.Item>
<Form.Item<FieldType>
label={t('addLlmBaseUrl')}
name="api_base"
rules={[{ required: true, message: t('baseUrlNameMessage') }]}
>
<Input placeholder={t('baseUrlNameMessage')} />
</Form.Item>
<Form.Item noStyle dependencies={['model_type']}>
{({ getFieldValue }) =>
getFieldValue('model_type') === 'chat' && (
<Form.Item
label={t('vision')}
valuePropName="checked"
name={'vision'}
>
<Switch />
</Form.Item>
)
}
</Form.Item>
</Form>
</Modal>
);
};
export default OllamaModal;

View File

@ -30,8 +30,10 @@ const SystemModelSettingModal = ({
};
useEffect(() => {
form.setFieldsValue(initialValues);
}, [form, initialValues]);
if (visible) {
form.setFieldsValue(initialValues);
}
}, [form, initialValues, visible]);
const onFormLayoutChange = () => {};

View File

@ -14,6 +14,7 @@ const {
my_llm,
set_api_key,
set_tenant_info,
add_llm,
} = api;
const methods = {
@ -61,6 +62,10 @@ const methods = {
url: set_api_key,
method: 'post',
},
add_llm: {
url: add_llm,
method: 'post',
},
} as const;
const userService = registerServer<keyof typeof methods>(methods, request);

View File

@ -17,6 +17,7 @@ export default {
llm_list: `${api_host}/llm/list`,
my_llm: `${api_host}/llm/my_llms`,
set_api_key: `${api_host}/llm/set_api_key`,
add_llm: `${api_host}/llm/add_llm`,
//知识库管理
kb_list: `${api_host}/kb/list`,