### What problem does this PR solve? Fix: After deleting all conversation lists, the chat input box can still be used for input. #4907 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue)tags/v0.17.0
| import { LlmModelType } from '@/constants/knowledge'; | |||||
| import { useComposeLlmOptionsByModelTypes } from '@/hooks/llm-hooks'; | |||||
| import { useMemo } from 'react'; | |||||
| import { getLLMIconName, getLlmNameAndFIdByLlmId } from '@/utils/llm-util'; | |||||
| import { LlmIcon } from '../svg-icon'; | |||||
| interface IProps { | interface IProps { | ||||
| id?: string; | id?: string; | ||||
| } | } | ||||
| const LLMLabel = ({ value }: IProps) => { | const LLMLabel = ({ value }: IProps) => { | ||||
| const modelOptions = useComposeLlmOptionsByModelTypes([ | |||||
| LlmModelType.Chat, | |||||
| LlmModelType.Image2text, | |||||
| ]); | |||||
| const { llmName, fId } = getLlmNameAndFIdByLlmId(value); | |||||
| const label = useMemo(() => { | |||||
| for (const item of modelOptions) { | |||||
| for (const option of item.options) { | |||||
| if (option.value === value) { | |||||
| return option.label; | |||||
| } | |||||
| } | |||||
| } | |||||
| }, [modelOptions, value]); | |||||
| return <div>{label}</div>; | |||||
| return ( | |||||
| <div className="flex items-center gap-1"> | |||||
| <LlmIcon | |||||
| name={getLLMIconName(fId, llmName)} | |||||
| width={20} | |||||
| height={20} | |||||
| size={'small'} | |||||
| /> | |||||
| {llmName} | |||||
| </div> | |||||
| ); | |||||
| }; | }; | ||||
| export default LLMLabel; | export default LLMLabel; |
| {...formItemLayout} | {...formItemLayout} | ||||
| rules={[{ required: true, message: t('modelMessage') }]} | rules={[{ required: true, message: t('modelMessage') }]} | ||||
| > | > | ||||
| <Select options={modelOptions} showSearch /> | |||||
| <Select | |||||
| options={modelOptions} | |||||
| showSearch | |||||
| popupMatchSelectWidth={false} | |||||
| /> | |||||
| </Form.Item> | </Form.Item> | ||||
| <div className="border rounded-md"> | <div className="border rounded-md"> | ||||
| <div className="flex justify-between bg-slate-100 p-2 mb-2"> | <div className="flex justify-between bg-slate-100 p-2 mb-2"> |
| enabled: !!dialogId, | enabled: !!dialogId, | ||||
| queryFn: async () => { | queryFn: async () => { | ||||
| const { data } = await chatService.listConversation({ dialogId }); | const { data } = await chatService.listConversation({ dialogId }); | ||||
| if (data.code === 0 && data.data.length > 0) { | |||||
| handleClickConversation(data.data[0].id, ''); | |||||
| if (data.code === 0) { | |||||
| if (data.data.length > 0) { | |||||
| handleClickConversation(data.data[0].id, ''); | |||||
| } else { | |||||
| handleClickConversation('', ''); | |||||
| } | |||||
| } | } | ||||
| return data?.data; | return data?.data; | ||||
| }, | }, |
| } from '@/interfaces/request/llm'; | } from '@/interfaces/request/llm'; | ||||
| import userService from '@/services/user-service'; | import userService from '@/services/user-service'; | ||||
| import { sortLLmFactoryListBySpecifiedOrder } from '@/utils/common-util'; | import { sortLLmFactoryListBySpecifiedOrder } from '@/utils/common-util'; | ||||
| import { getLLMIconName } from '@/utils/llm-util'; | |||||
| import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; | import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; | ||||
| import { Flex, message } from 'antd'; | import { Flex, message } from 'antd'; | ||||
| import { DefaultOptionType } from 'antd/es/select'; | import { DefaultOptionType } from 'antd/es/select'; | ||||
| return embeddingModelOptions; | return embeddingModelOptions; | ||||
| }; | }; | ||||
| const getLLMIconName = (fid: string, llm_name: string) => { | |||||
| if (fid === 'FastEmbed') { | |||||
| return llm_name.split('/').at(0) ?? ''; | |||||
| } | |||||
| return fid; | |||||
| }; | |||||
| export const useSelectLlmOptionsByModelType = () => { | export const useSelectLlmOptionsByModelType = () => { | ||||
| const llmInfo: IThirdOAIModelCollection = useFetchLlmList(); | const llmInfo: IThirdOAIModelCollection = useFetchLlmList(); | ||||
| export const getLLMIconName = (fid: string, llm_name: string) => { | |||||
| if (fid === 'FastEmbed') { | |||||
| return llm_name.split('/').at(0) ?? ''; | |||||
| } | |||||
| return fid; | |||||
| }; | |||||
| export const getLlmNameAndFIdByLlmId = (llmId?: string) => { | |||||
| const [llmName, fId] = llmId?.split('@') || []; | |||||
| return { fId, llmName }; | |||||
| }; |
| h4 { | h4 { | ||||
| @apply text-base font-normal; | @apply text-base font-normal; | ||||
| } | } | ||||
| img, | |||||
| video { | |||||
| max-width: none; | |||||
| } | |||||
| } | } |