mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-12 22:19:01 +08:00
Fix: After deleting all conversation lists, the chat input box can still be used for input. #4907 (#4909)
### What problem does this PR solve? Fix: After deleting all conversation lists, the chat input box can still be used for input. #4907 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
parent
7f06712a30
commit
d599707154
@ -1,6 +1,5 @@
|
||||
import { LlmModelType } from '@/constants/knowledge';
|
||||
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
|
||||
import { useMemo } from 'react';
|
||||
import { getLLMIconName, getLlmNameAndFIdByLlmId } from '@/utils/llm-util';
|
||||
import { LlmIcon } from '../svg-icon';
|
||||
|
||||
interface IProps {
|
||||
id?: string;
|
||||
@ -10,22 +9,19 @@ interface IProps {
|
||||
}
|
||||
|
||||
const LLMLabel = ({ value }: IProps) => {
|
||||
const modelOptions = useComposeLlmOptionsByModelTypes([
|
||||
LlmModelType.Chat,
|
||||
LlmModelType.Image2text,
|
||||
]);
|
||||
const { llmName, fId } = getLlmNameAndFIdByLlmId(value);
|
||||
|
||||
const label = useMemo(() => {
|
||||
for (const item of modelOptions) {
|
||||
for (const option of item.options) {
|
||||
if (option.value === value) {
|
||||
return option.label;
|
||||
}
|
||||
}
|
||||
}
|
||||
}, [modelOptions, value]);
|
||||
|
||||
return <div>{label}</div>;
|
||||
return (
|
||||
<div className="flex items-center gap-1">
|
||||
<LlmIcon
|
||||
name={getLLMIconName(fId, llmName)}
|
||||
width={20}
|
||||
height={20}
|
||||
size={'small'}
|
||||
/>
|
||||
{llmName}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default LLMLabel;
|
||||
|
@ -54,7 +54,11 @@ const LlmSettingItems = ({ prefix, formItemLayout = {} }: IProps) => {
|
||||
{...formItemLayout}
|
||||
rules={[{ required: true, message: t('modelMessage') }]}
|
||||
>
|
||||
<Select options={modelOptions} showSearch />
|
||||
<Select
|
||||
options={modelOptions}
|
||||
showSearch
|
||||
popupMatchSelectWidth={false}
|
||||
/>
|
||||
</Form.Item>
|
||||
<div className="border rounded-md">
|
||||
<div className="flex justify-between bg-slate-100 p-2 mb-2">
|
||||
|
@ -253,8 +253,12 @@ export const useFetchNextConversationList = () => {
|
||||
enabled: !!dialogId,
|
||||
queryFn: async () => {
|
||||
const { data } = await chatService.listConversation({ dialogId });
|
||||
if (data.code === 0 && data.data.length > 0) {
|
||||
handleClickConversation(data.data[0].id, '');
|
||||
if (data.code === 0) {
|
||||
if (data.data.length > 0) {
|
||||
handleClickConversation(data.data[0].id, '');
|
||||
} else {
|
||||
handleClickConversation('', '');
|
||||
}
|
||||
}
|
||||
return data?.data;
|
||||
},
|
||||
|
@ -13,6 +13,7 @@ import {
|
||||
} from '@/interfaces/request/llm';
|
||||
import userService from '@/services/user-service';
|
||||
import { sortLLmFactoryListBySpecifiedOrder } from '@/utils/common-util';
|
||||
import { getLLMIconName } from '@/utils/llm-util';
|
||||
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { Flex, message } from 'antd';
|
||||
import { DefaultOptionType } from 'antd/es/select';
|
||||
@ -54,14 +55,6 @@ export const useSelectLlmOptions = () => {
|
||||
return embeddingModelOptions;
|
||||
};
|
||||
|
||||
const getLLMIconName = (fid: string, llm_name: string) => {
|
||||
if (fid === 'FastEmbed') {
|
||||
return llm_name.split('/').at(0) ?? '';
|
||||
}
|
||||
|
||||
return fid;
|
||||
};
|
||||
|
||||
export const useSelectLlmOptionsByModelType = () => {
|
||||
const llmInfo: IThirdOAIModelCollection = useFetchLlmList();
|
||||
|
||||
|
13
web/src/utils/llm-util.ts
Normal file
13
web/src/utils/llm-util.ts
Normal file
@ -0,0 +1,13 @@
|
||||
export const getLLMIconName = (fid: string, llm_name: string) => {
|
||||
if (fid === 'FastEmbed') {
|
||||
return llm_name.split('/').at(0) ?? '';
|
||||
}
|
||||
|
||||
return fid;
|
||||
};
|
||||
|
||||
export const getLlmNameAndFIdByLlmId = (llmId?: string) => {
|
||||
const [llmName, fId] = llmId?.split('@') || [];
|
||||
|
||||
return { fId, llmName };
|
||||
};
|
@ -169,4 +169,9 @@
|
||||
h4 {
|
||||
@apply text-base font-normal;
|
||||
}
|
||||
|
||||
img,
|
||||
video {
|
||||
max-width: none;
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user