Fix: After deleting all conversation lists, the chat input box can still be used for input. #4907 (#4909)

### What problem does this PR solve?

Fix: After deleting all conversation lists, the chat input box can still
be used for input. #4907

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
balibabu 2025-02-12 16:54:14 +08:00 committed by GitHub
parent 7f06712a30
commit d599707154
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 44 additions and 29 deletions

View File

@ -1,6 +1,5 @@
import { LlmModelType } from '@/constants/knowledge';
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
import { useMemo } from 'react';
import { getLLMIconName, getLlmNameAndFIdByLlmId } from '@/utils/llm-util';
import { LlmIcon } from '../svg-icon';
interface IProps {
id?: string;
@ -10,22 +9,19 @@ interface IProps {
}
const LLMLabel = ({ value }: IProps) => {
const modelOptions = useComposeLlmOptionsByModelTypes([
LlmModelType.Chat,
LlmModelType.Image2text,
]);
const { llmName, fId } = getLlmNameAndFIdByLlmId(value);
const label = useMemo(() => {
for (const item of modelOptions) {
for (const option of item.options) {
if (option.value === value) {
return option.label;
}
}
}
}, [modelOptions, value]);
return <div>{label}</div>;
return (
<div className="flex items-center gap-1">
<LlmIcon
name={getLLMIconName(fId, llmName)}
width={20}
height={20}
size={'small'}
/>
{llmName}
</div>
);
};
export default LLMLabel;

View File

@ -54,7 +54,11 @@ const LlmSettingItems = ({ prefix, formItemLayout = {} }: IProps) => {
{...formItemLayout}
rules={[{ required: true, message: t('modelMessage') }]}
>
<Select options={modelOptions} showSearch />
<Select
options={modelOptions}
showSearch
popupMatchSelectWidth={false}
/>
</Form.Item>
<div className="border rounded-md">
<div className="flex justify-between bg-slate-100 p-2 mb-2">

View File

@ -253,8 +253,12 @@ export const useFetchNextConversationList = () => {
enabled: !!dialogId,
queryFn: async () => {
const { data } = await chatService.listConversation({ dialogId });
if (data.code === 0 && data.data.length > 0) {
handleClickConversation(data.data[0].id, '');
if (data.code === 0) {
if (data.data.length > 0) {
handleClickConversation(data.data[0].id, '');
} else {
handleClickConversation('', '');
}
}
return data?.data;
},

View File

@ -13,6 +13,7 @@ import {
} from '@/interfaces/request/llm';
import userService from '@/services/user-service';
import { sortLLmFactoryListBySpecifiedOrder } from '@/utils/common-util';
import { getLLMIconName } from '@/utils/llm-util';
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
import { Flex, message } from 'antd';
import { DefaultOptionType } from 'antd/es/select';
@ -54,14 +55,6 @@ export const useSelectLlmOptions = () => {
return embeddingModelOptions;
};
const getLLMIconName = (fid: string, llm_name: string) => {
if (fid === 'FastEmbed') {
return llm_name.split('/').at(0) ?? '';
}
return fid;
};
export const useSelectLlmOptionsByModelType = () => {
const llmInfo: IThirdOAIModelCollection = useFetchLlmList();

13
web/src/utils/llm-util.ts Normal file
View File

@ -0,0 +1,13 @@
export const getLLMIconName = (fid: string, llm_name: string) => {
if (fid === 'FastEmbed') {
return llm_name.split('/').at(0) ?? '';
}
return fid;
};
export const getLlmNameAndFIdByLlmId = (llmId?: string) => {
const [llmName, fId] = llmId?.split('@') || [];
return { fId, llmName };
};

View File

@ -169,4 +169,9 @@
h4 {
@apply text-base font-normal;
}
img,
video {
max-width: none;
}
}