fix: fetch llm list by @tanstack/react-query #1306 (#1708)

### What problem does this PR solve?

fix: fetch llm list by @tanstack/react-query #1306

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
balibabu 2024-07-25 18:06:39 +08:00 committed by GitHub
parent a99d19bdea
commit 375f621405
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 170 additions and 207 deletions

View File

@ -1,37 +1,37 @@
import { LlmModelType } from '@/constants/knowledge';
import { ResponseGetType } from '@/interfaces/database/base';
import {
IFactory,
IMyLlmValue,
IThirdOAIModelCollection as IThirdAiModelCollection,
IThirdOAIModelCollection,
} from '@/interfaces/database/llm';
import {
IAddLlmRequestBody,
IDeleteLlmRequestBody,
} from '@/interfaces/request/llm';
import userService from '@/services/user-service';
import { sortLLmFactoryListBySpecifiedOrder } from '@/utils/common-util';
import { useCallback, useEffect, useMemo } from 'react';
import { useDispatch, useSelector } from 'umi';
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
import { message } from 'antd';
import { useMemo } from 'react';
import { useTranslation } from 'react-i18next';
import { useSelector } from 'umi';
export const useFetchLlmList = (
modelType?: LlmModelType,
isOnMountFetching: boolean = true,
) => {
const dispatch = useDispatch();
): IThirdAiModelCollection => {
const { data } = useQuery({
queryKey: ['llmList'],
initialData: {},
queryFn: async () => {
const { data } = await userService.llm_list({ model_type: modelType });
const fetchLlmList = useCallback(() => {
dispatch({
type: 'settingModel/llm_list',
payload: { model_type: modelType },
});
}, [dispatch, modelType]);
return data?.data ?? {};
},
});
useEffect(() => {
if (isOnMountFetching) {
fetchLlmList();
}
}, [fetchLlmList, isOnMountFetching]);
return fetchLlmList;
return data;
};
export const useSelectLlmInfo = () => {
@ -43,7 +43,7 @@ export const useSelectLlmInfo = () => {
};
export const useSelectLlmOptions = () => {
const llmInfo: IThirdOAIModelCollection = useSelectLlmInfo();
const llmInfo: IThirdOAIModelCollection = useFetchLlmList();
const embeddingModelOptions = useMemo(() => {
return Object.entries(llmInfo).map(([key, value]) => {
@ -62,7 +62,7 @@ export const useSelectLlmOptions = () => {
};
export const useSelectLlmOptionsByModelType = () => {
const llmInfo: IThirdOAIModelCollection = useSelectLlmInfo();
const llmInfo: IThirdOAIModelCollection = useFetchLlmList();
const groupOptionsByModelType = (modelType: LlmModelType) => {
return Object.entries(llmInfo)
@ -96,73 +96,65 @@ export const useSelectLlmOptionsByModelType = () => {
};
};
export const useSelectLlmFactoryList = () => {
const factoryList: IFactory[] = useSelector(
(state: any) => state.settingModel.factoryList,
);
export const useFetchLlmFactoryList = (): ResponseGetType<IFactory[]> => {
const { data, isFetching: loading } = useQuery({
queryKey: ['factoryList'],
initialData: [],
gcTime: 0,
queryFn: async () => {
const { data } = await userService.factories_list();
return factoryList;
return data?.data ?? [];
},
});
return { data, loading };
};
export const useSelectMyLlmList = () => {
const myLlmList: Record<string, IMyLlmValue> = useSelector(
(state: any) => state.settingModel.myLlmList,
);
export type LlmItem = { name: string; logo: string } & IMyLlmValue;
return myLlmList;
export const useFetchMyLlmList = (): ResponseGetType<
Record<string, IMyLlmValue>
> => {
const { data, isFetching: loading } = useQuery({
queryKey: ['myLlmList'],
initialData: {},
gcTime: 0,
queryFn: async () => {
const { data } = await userService.my_llm();
return data?.data ?? {};
},
});
return { data, loading };
};
export const useFetchLlmFactoryListOnMount = () => {
const dispatch = useDispatch();
const factoryList = useSelectLlmFactoryList();
const myLlmList = useSelectMyLlmList();
export const useSelectLlmList = () => {
const { data: myLlmList, loading: myLlmListLoading } = useFetchMyLlmList();
const { data: factoryList, loading: factoryListLoading } =
useFetchLlmFactoryList();
const list = useMemo(() => {
const nextMyLlmList: Array<LlmItem> = useMemo(() => {
return Object.entries(myLlmList).map(([key, value]) => ({
name: key,
logo: factoryList.find((x) => x.name === key)?.logo ?? '',
...value,
}));
}, [myLlmList, factoryList]);
const nextFactoryList = useMemo(() => {
const currentList = factoryList.filter((x) =>
Object.keys(myLlmList).every((y) => y !== x.name),
);
return sortLLmFactoryListBySpecifiedOrder(currentList);
}, [factoryList, myLlmList]);
const fetchLlmFactoryList = useCallback(() => {
dispatch({
type: 'settingModel/factories_list',
});
}, [dispatch]);
useEffect(() => {
fetchLlmFactoryList();
}, [fetchLlmFactoryList]);
return list;
};
export type LlmItem = { name: string; logo: string } & IMyLlmValue;
export const useFetchMyLlmListOnMount = () => {
const dispatch = useDispatch();
const llmList = useSelectMyLlmList();
const factoryList = useSelectLlmFactoryList();
const list: Array<LlmItem> = useMemo(() => {
return Object.entries(llmList).map(([key, value]) => ({
name: key,
logo: factoryList.find((x) => x.name === key)?.logo ?? '',
...value,
}));
}, [llmList, factoryList]);
const fetchMyLlmList = useCallback(() => {
dispatch({
type: 'settingModel/my_llm',
});
}, [dispatch]);
useEffect(() => {
fetchMyLlmList();
}, [fetchMyLlmList]);
return list;
return {
myLlmList: nextMyLlmList,
factoryList: nextFactoryList,
loading: myLlmListLoading || factoryListLoading,
};
};
export interface IApiKeySavingParams {
@ -174,19 +166,26 @@ export interface IApiKeySavingParams {
}
export const useSaveApiKey = () => {
const dispatch = useDispatch();
const saveApiKey = useCallback(
(savingParams: IApiKeySavingParams) => {
return dispatch<any>({
type: 'settingModel/set_api_key',
payload: savingParams,
});
const queryClient = useQueryClient();
const { t } = useTranslation();
const {
data,
isPending: loading,
mutateAsync,
} = useMutation({
mutationKey: ['saveApiKey'],
mutationFn: async (params: IApiKeySavingParams) => {
const { data } = await userService.set_api_key(params);
if (data.retcode === 0) {
message.success(t('message.modified'));
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
}
return data.retcode;
},
[dispatch],
);
});
return saveApiKey;
return { data, loading, saveApiKey: mutateAsync };
};
export interface ISystemModelSettingSavingParams {
@ -199,49 +198,67 @@ export interface ISystemModelSettingSavingParams {
}
export const useSaveTenantInfo = () => {
const dispatch = useDispatch();
const saveTenantInfo = useCallback(
(savingParams: ISystemModelSettingSavingParams) => {
return dispatch<any>({
type: 'settingModel/set_tenant_info',
payload: savingParams,
});
const { t } = useTranslation();
const {
data,
isPending: loading,
mutateAsync,
} = useMutation({
mutationKey: ['saveTenantInfo'],
mutationFn: async (params: ISystemModelSettingSavingParams) => {
const { data } = await userService.set_tenant_info(params);
if (data.retcode === 0) {
message.success(t('message.modified'));
}
return data.retcode;
},
[dispatch],
);
});
return saveTenantInfo;
return { data, loading, saveTenantInfo: mutateAsync };
};
export const useAddLlm = () => {
const dispatch = useDispatch();
const addLlm = useCallback(
(requestBody: IAddLlmRequestBody) => {
return dispatch<any>({
type: 'settingModel/add_llm',
payload: requestBody,
});
const queryClient = useQueryClient();
const { t } = useTranslation();
const {
data,
isPending: loading,
mutateAsync,
} = useMutation({
mutationKey: ['addLlm'],
mutationFn: async (params: IAddLlmRequestBody) => {
const { data } = await userService.add_llm(params);
if (data.retcode === 0) {
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
message.success(t('message.modified'));
}
return data.retcode;
},
[dispatch],
);
});
return addLlm;
return { data, loading, addLlm: mutateAsync };
};
export const useDeleteLlm = () => {
const dispatch = useDispatch();
const deleteLlm = useCallback(
(requestBody: IDeleteLlmRequestBody) => {
return dispatch<any>({
type: 'settingModel/delete_llm',
payload: requestBody,
});
const queryClient = useQueryClient();
const { t } = useTranslation();
const {
data,
isPending: loading,
mutateAsync,
} = useMutation({
mutationKey: ['deleteLlm'],
mutationFn: async (params: IDeleteLlmRequestBody) => {
const { data } = await userService.delete_llm(params);
if (data.retcode === 0) {
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
message.success(t('message.deleted'));
}
return data.retcode;
},
[dispatch],
);
});
return deleteLlm;
return { data, loading, deleteLlm: mutateAsync };
};

View File

@ -22,7 +22,6 @@ import { useTranslation } from 'react-i18next';
import { useDispatch } from 'umi';
import { useSetModalState, useTranslate } from './common-hooks';
import { useSetDocumentParser } from './document-hooks';
import { useFetchLlmList } from './llm-hooks';
import { useSetPaginationParams } from './route-hook';
import { useOneNamespaceEffectsLoading } from './store-hooks';
import {
@ -346,13 +345,3 @@ export const useFetchModelId = (visible: boolean) => {
return tenantInfo?.llm_id ?? '';
};
export const useFetchLlmModelOnVisible = (visible: boolean) => {
const fetchLlmList = useFetchLlmList();
useEffect(() => {
if (visible) {
fetchLlmList();
}
}, [fetchLlmList, visible]);
};

View File

@ -4,3 +4,8 @@ export interface ResponseType<T = any> {
retmsg: string;
status: number;
}
export interface ResponseGetType<T = any> {
data: T;
loading?: boolean;
}

View File

@ -2,7 +2,7 @@ import {
useFetchKnowledgeBaseConfiguration,
useUpdateKnowledge,
} from '@/hooks/knowledge-hooks';
import { useFetchLlmList, useSelectLlmOptions } from '@/hooks/llm-hooks';
import { useSelectLlmOptions } from '@/hooks/llm-hooks';
import { useNavigateToDataset } from '@/hooks/route-hook';
import {
useFetchTenantInfo,
@ -17,7 +17,6 @@ import { Form, UploadFile } from 'antd';
import { FormInstance } from 'antd/lib';
import pick from 'lodash/pick';
import { useCallback, useEffect } from 'react';
import { LlmModelType } from '../../constant';
export const useSubmitKnowledgeConfiguration = (form: FormInstance) => {
const { saveKnowledgeConfiguration, loading } = useUpdateKnowledge();
@ -46,7 +45,6 @@ export const useFetchKnowledgeConfigurationOnMount = (form: FormInstance) => {
useFetchTenantInfo();
const { data: knowledgeDetails } = useFetchKnowledgeBaseConfiguration();
useFetchLlmList(LlmModelType.Embedding);
useEffect(() => {
const fileList: UploadFile[] = getUploadFileListFromBase64(

View File

@ -1,12 +1,10 @@
import Rerank from '@/components/rerank';
import SimilaritySlider from '@/components/similarity-slider';
import { useTranslate } from '@/hooks/common-hooks';
import { useOneNamespaceEffectsLoading } from '@/hooks/store-hooks';
import { Button, Card, Divider, Flex, Form, Input } from 'antd';
import { FormInstance } from 'antd/lib';
import Rerank from '@/components/rerank';
import { useTranslate } from '@/hooks/common-hooks';
import { useFetchLlmList } from '@/hooks/llm-hooks';
import { useOneNamespaceEffectsLoading } from '@/hooks/store-hooks';
import { useEffect } from 'react';
import styles from './index.less';
type FieldType = {
@ -26,11 +24,6 @@ const TestingControl = ({ form, handleTesting }: IProps) => {
'testDocumentChunk',
]);
const { t } = useTranslate('knowledgeDetails');
const fetchLlmList = useFetchLlmList();
useEffect(() => {
fetchLlmList();
}, [fetchLlmList]);
const buttonDisabled =
!question || (typeof question === 'string' && question.trim() === '');

View File

@ -4,7 +4,11 @@ import {
ModelVariableType,
settledModelVariableMap,
} from '@/constants/knowledge';
import { useTranslate } from '@/hooks/common-hooks';
import { useFetchModelId } from '@/hooks/logic-hooks';
import { IDialog } from '@/interfaces/database/chat';
import { getBase64FromUploadFileList } from '@/utils/file-util';
import { removeUselessFieldsFromValues } from '@/utils/form';
import { Divider, Flex, Form, Modal, Segmented, UploadFile } from 'antd';
import { SegmentedValue } from 'antd/es/segmented';
import camelCase from 'lodash/camelCase';
@ -14,13 +18,6 @@ import AssistantSetting from './assistant-setting';
import ModelSetting from './model-setting';
import PromptEngine from './prompt-engine';
import { useTranslate } from '@/hooks/common-hooks';
import {
useFetchLlmModelOnVisible,
useFetchModelId,
} from '@/hooks/logic-hooks';
import { getBase64FromUploadFileList } from '@/utils/file-util';
import { removeUselessFieldsFromValues } from '@/utils/form';
import styles from './index.less';
const layout = {
@ -99,10 +96,6 @@ const ChatConfigurationModal = ({
onOk(finalValues);
};
const handleCancel = () => {
hideModal();
};
const handleSegmentedChange = (val: SegmentedValue) => {
setValue(val as ConfigurationSegmented);
};
@ -112,8 +105,6 @@ const ChatConfigurationModal = ({
form.resetFields();
};
useFetchLlmModelOnVisible(visible);
const title = (
<Flex gap={16}>
<ChatConfigurationAtom></ChatConfigurationAtom>
@ -153,7 +144,7 @@ const ChatConfigurationModal = ({
width={688}
open={visible}
onOk={handleOk}
onCancel={handleCancel}
onCancel={hideModal}
confirmLoading={loading}
destroyOnClose
afterClose={handleModalAfterClose}

View File

@ -353,15 +353,17 @@ const Chat = () => {
</Flex>
<Divider type={'vertical'} className={styles.divider}></Divider>
<ChatContainer></ChatContainer>
<ChatConfigurationModal
visible={dialogEditVisible}
initialDialog={initialDialog}
showModal={showDialogEditModal}
hideModal={hideDialogEditModal}
loading={dialogSettingLoading}
onOk={onDialogEditOk}
clearDialog={clearDialog}
></ChatConfigurationModal>
{dialogEditVisible && (
<ChatConfigurationModal
visible={dialogEditVisible}
initialDialog={initialDialog}
showModal={showDialogEditModal}
hideModal={hideDialogEditModal}
loading={dialogSettingLoading}
onOk={onDialogEditOk}
clearDialog={clearDialog}
></ChatConfigurationModal>
)}
<RenameModal
visible={conversationRenameVisible}
hideModal={hideConversationRenameModal}

View File

@ -1,6 +1,5 @@
import { useSetModalState } from '@/hooks/common-hooks';
import { useFetchFlow, useResetFlow, useSetFlow } from '@/hooks/flow-hooks';
import { useFetchLlmList } from '@/hooks/llm-hooks';
import { IGraph } from '@/interfaces/database/flow';
import { useIsFetching } from '@tanstack/react-query';
import React, {
@ -283,8 +282,6 @@ export const useFetchDataOnMount = () => {
useWatchGraphChange();
useFetchLlmList();
useEffect(() => {
refetch();
}, [refetch]);

View File

@ -4,12 +4,10 @@ import {
ISystemModelSettingSavingParams,
useAddLlm,
useDeleteLlm,
useFetchLlmList,
useSaveApiKey,
useSaveTenantInfo,
useSelectLlmOptionsByModelType,
} from '@/hooks/llm-hooks';
import { useOneNamespaceEffectsLoading } from '@/hooks/store-hooks';
import {
useFetchTenantInfo,
useSelectTenantInfo,
@ -24,7 +22,7 @@ export const useSubmitApiKey = () => {
const [savingParams, setSavingParams] = useState<SavingParamsState>(
{} as SavingParamsState,
);
const saveApiKey = useSaveApiKey();
const { saveApiKey, loading } = useSaveApiKey();
const {
visible: apiKeyVisible,
hideModal: hideApiKeyModal,
@ -53,10 +51,6 @@ export const useSubmitApiKey = () => {
[showApiKeyModal, setSavingParams],
);
const loading = useOneNamespaceEffectsLoading('settingModel', [
'set_api_key',
]);
return {
saveApiKeyLoading: loading,
initialApiKey: '',
@ -70,10 +64,8 @@ export const useSubmitApiKey = () => {
export const useSubmitSystemModelSetting = () => {
const systemSetting = useSelectTenantInfo();
const loading = useOneNamespaceEffectsLoading('settingModel', [
'set_tenant_info',
]);
const saveSystemModelSetting = useSaveTenantInfo();
const { saveTenantInfo: saveSystemModelSetting, loading } =
useSaveTenantInfo();
const {
visible: systemSettingVisible,
hideModal: hideSystemSettingModal,
@ -109,32 +101,20 @@ export const useSubmitSystemModelSetting = () => {
export const useFetchSystemModelSettingOnMount = (visible: boolean) => {
const systemSetting = useSelectTenantInfo();
const allOptions = useSelectLlmOptionsByModelType();
const fetchLlmList = useFetchLlmList();
const fetchTenantInfo = useFetchTenantInfo();
useEffect(() => {
if (visible) {
fetchLlmList();
fetchTenantInfo();
}
}, [fetchLlmList, fetchTenantInfo, visible]);
}, [fetchTenantInfo, visible]);
return { systemSetting, allOptions };
};
export const useSelectModelProvidersLoading = () => {
const loading = useOneNamespaceEffectsLoading('settingModel', [
'my_llm',
'factories_list',
]);
return loading;
};
export const useSubmitOllama = () => {
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
const [selectedLlmFactory, setSelectedLlmFactory] = useState<string>('');
const addLlm = useAddLlm();
const { addLlm, loading } = useAddLlm();
const {
visible: llmAddingVisible,
hideModal: hideLlmAddingModal,
@ -167,8 +147,7 @@ export const useSubmitOllama = () => {
};
export const useSubmitVolcEngine = () => {
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
const addLlm = useAddLlm();
const { addLlm, loading } = useAddLlm();
const {
visible: volcAddingVisible,
hideModal: hideVolcAddingModal,
@ -195,8 +174,7 @@ export const useSubmitVolcEngine = () => {
};
export const useSubmitBedrock = () => {
const loading = useOneNamespaceEffectsLoading('settingModel', ['add_llm']);
const addLlm = useAddLlm();
const { addLlm, loading } = useAddLlm();
const {
visible: bedrockAddingVisible,
hideModal: hideBedrockAddingModal,
@ -223,7 +201,7 @@ export const useSubmitBedrock = () => {
};
export const useHandleDeleteLlm = (llmFactory: string) => {
const deleteLlm = useDeleteLlm();
const { deleteLlm } = useDeleteLlm();
const showDeleteConfirm = useShowDeleteConfirm();
const handleDeleteLlm = (name: string) => () => {

View File

@ -1,11 +1,7 @@
import { ReactComponent as MoreModelIcon } from '@/assets/svg/more-model.svg';
import SvgIcon from '@/components/svg-icon';
import { useSetModalState, useTranslate } from '@/hooks/common-hooks';
import {
LlmItem,
useFetchLlmFactoryListOnMount,
useFetchMyLlmListOnMount,
} from '@/hooks/llm-hooks';
import { LlmItem, useSelectLlmList } from '@/hooks/llm-hooks';
import {
CloseCircleOutlined,
SettingOutlined,
@ -36,7 +32,6 @@ import BedrockModal from './bedrock-modal';
import { IconMap } from './constant';
import {
useHandleDeleteLlm,
useSelectModelProvidersLoading,
useSubmitApiKey,
useSubmitBedrock,
useSubmitOllama,
@ -132,9 +127,7 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
};
const UserSettingModel = () => {
const factoryList = useFetchLlmFactoryListOnMount();
const llmList = useFetchMyLlmListOnMount();
const loading = useSelectModelProvidersLoading();
const { factoryList, myLlmList: llmList, loading } = useSelectLlmList();
const {
saveApiKeyLoading,
initialApiKey,