Bläddra i källkod

Fix: After deleting all conversation lists, the chat input box can still be used for input. #4907 (#4909)

### What problem does this PR solve?

Fix: After deleting all conversation lists, the chat input box can still
be used for input. #4907

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
tags/v0.17.0
balibabu 8 månader sedan
förälder
incheckning
d599707154
Inget konto är kopplat till bidragsgivarens mejladress

+ 14
- 18
web/src/components/llm-select/llm-label.tsx Visa fil

@@ -1,6 +1,5 @@
import { LlmModelType } from '@/constants/knowledge';
import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks';
import { useMemo } from 'react';
import { getLLMIconName, getLlmNameAndFIdByLlmId } from '@/utils/llm-util';
import { LlmIcon } from '../svg-icon';

interface IProps {
id?: string;
@@ -10,22 +9,19 @@ interface IProps {
}

const LLMLabel = ({ value }: IProps) => {
const modelOptions = useComposeLlmOptionsByModelTypes([
LlmModelType.Chat,
LlmModelType.Image2text,
]);
const { llmName, fId } = getLlmNameAndFIdByLlmId(value);

const label = useMemo(() => {
for (const item of modelOptions) {
for (const option of item.options) {
if (option.value === value) {
return option.label;
}
}
}
}, [modelOptions, value]);
return <div>{label}</div>;
return (
<div className="flex items-center gap-1">
<LlmIcon
name={getLLMIconName(fId, llmName)}
width={20}
height={20}
size={'small'}
/>
{llmName}
</div>
);
};

export default LLMLabel;

+ 5
- 1
web/src/components/llm-setting-items/index.tsx Visa fil

@@ -54,7 +54,11 @@ const LlmSettingItems = ({ prefix, formItemLayout = {} }: IProps) => {
{...formItemLayout}
rules={[{ required: true, message: t('modelMessage') }]}
>
<Select options={modelOptions} showSearch />
<Select
options={modelOptions}
showSearch
popupMatchSelectWidth={false}
/>
</Form.Item>
<div className="border rounded-md">
<div className="flex justify-between bg-slate-100 p-2 mb-2">

+ 6
- 2
web/src/hooks/chat-hooks.ts Visa fil

@@ -253,8 +253,12 @@ export const useFetchNextConversationList = () => {
enabled: !!dialogId,
queryFn: async () => {
const { data } = await chatService.listConversation({ dialogId });
if (data.code === 0 && data.data.length > 0) {
handleClickConversation(data.data[0].id, '');
if (data.code === 0) {
if (data.data.length > 0) {
handleClickConversation(data.data[0].id, '');
} else {
handleClickConversation('', '');
}
}
return data?.data;
},

+ 1
- 8
web/src/hooks/llm-hooks.tsx Visa fil

@@ -13,6 +13,7 @@ import {
} from '@/interfaces/request/llm';
import userService from '@/services/user-service';
import { sortLLmFactoryListBySpecifiedOrder } from '@/utils/common-util';
import { getLLMIconName } from '@/utils/llm-util';
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query';
import { Flex, message } from 'antd';
import { DefaultOptionType } from 'antd/es/select';
@@ -54,14 +55,6 @@ export const useSelectLlmOptions = () => {
return embeddingModelOptions;
};

const getLLMIconName = (fid: string, llm_name: string) => {
if (fid === 'FastEmbed') {
return llm_name.split('/').at(0) ?? '';
}

return fid;
};

export const useSelectLlmOptionsByModelType = () => {
const llmInfo: IThirdOAIModelCollection = useFetchLlmList();


+ 13
- 0
web/src/utils/llm-util.ts Visa fil

@@ -0,0 +1,13 @@
export const getLLMIconName = (fid: string, llm_name: string) => {
if (fid === 'FastEmbed') {
return llm_name.split('/').at(0) ?? '';
}

return fid;
};

export const getLlmNameAndFIdByLlmId = (llmId?: string) => {
const [llmName, fId] = llmId?.split('@') || [];

return { fId, llmName };
};

+ 5
- 0
web/tailwind.css Visa fil

@@ -169,4 +169,9 @@
h4 {
@apply text-base font-normal;
}

img,
video {
max-width: none;
}
}

Laddar…
Avbryt
Spara