| @@ -46,7 +46,7 @@ import { fetchDatasets } from '@/service/datasets' | |||
| import { useProviderContext } from '@/context/provider-context' | |||
| import { AgentStrategy, AppType, ModelModeType, RETRIEVE_TYPE, Resolution, TransferMethod } from '@/types/app' | |||
| import { PromptMode } from '@/models/debug' | |||
| import { ANNOTATION_DEFAULT, DEFAULT_AGENT_SETTING, DEFAULT_CHAT_PROMPT_CONFIG, DEFAULT_COMPLETION_PROMPT_CONFIG, supportFunctionCallModels } from '@/config' | |||
| import { ANNOTATION_DEFAULT, DEFAULT_AGENT_SETTING, DEFAULT_CHAT_PROMPT_CONFIG, DEFAULT_COMPLETION_PROMPT_CONFIG } from '@/config' | |||
| import SelectDataSet from '@/app/components/app/configuration/dataset-config/select-dataset' | |||
| import { useModalContext } from '@/context/modal-context' | |||
| import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints' | |||
| @@ -157,6 +157,7 @@ const Configuration: FC = () => { | |||
| dataSets: [], | |||
| agentConfig: DEFAULT_AGENT_SETTING, | |||
| }) | |||
| const isChatApp = mode === AppType.chat | |||
| const isAgent = modelConfig.agentConfig?.enabled | |||
| const setIsAgent = (value: boolean) => { | |||
| @@ -166,7 +167,7 @@ const Configuration: FC = () => { | |||
| doSetModelConfig(newModelConfig) | |||
| } | |||
| const isOpenAI = modelConfig.provider === 'openai' | |||
| const isFunctionCall = (isOpenAI && modelConfig.mode === ModelModeType.chat) || supportFunctionCallModels.includes(modelConfig.model_id) | |||
| const [collectionList, setCollectionList] = useState<Collection[]>([]) | |||
| useEffect(() => { | |||
| @@ -262,6 +263,13 @@ const Configuration: FC = () => { | |||
| }, | |||
| ) | |||
| const isFunctionCall = (() => { | |||
| const features = currModel?.features | |||
| if (!features) | |||
| return false | |||
| return features.includes(ModelFeatureEnum.toolCall) || features.includes(ModelFeatureEnum.multiToolCall) | |||
| })() | |||
| // Fill old app data missing model mode. | |||
| useEffect(() => { | |||
| if (hasFetchedDetail && !modelModeType) { | |||
| @@ -153,20 +153,6 @@ export const useTextGenerationCurrentProviderAndModelAndModelList = (defaultMode | |||
| } | |||
| } | |||
| export const useAgentThoughtCurrentProviderAndModelAndModelList = (defaultModel?: DefaultModel) => { | |||
| const { agentThoughtModelList } = useProviderContext() | |||
| const { | |||
| currentProvider, | |||
| currentModel, | |||
| } = useCurrentProviderAndModel(agentThoughtModelList, defaultModel) | |||
| return { | |||
| currentProvider, | |||
| currentModel, | |||
| agentThoughtModelList, | |||
| } | |||
| } | |||
| export const useModelListAndDefaultModel = (type: ModelTypeIndex) => { | |||
| const { data: modelList } = useModelList(type) | |||
| const { data: defaultModel } = useDefaultModel(type) | |||
| @@ -139,8 +139,6 @@ export const DEFAULT_AGENT_SETTING = { | |||
| tools: [], | |||
| } | |||
| export const supportFunctionCallModels = ['glm-3-turbo', 'glm-4'] | |||
| export const DEFAULT_AGENT_PROMPT = { | |||
| chat: `Respond to the human as helpfully and accurately as possible. | |||
| @@ -2,14 +2,13 @@ | |||
| import { createContext, useContext } from 'use-context-selector' | |||
| import useSWR from 'swr' | |||
| import { useEffect, useMemo, useState } from 'react' | |||
| import { useEffect, useState } from 'react' | |||
| import { | |||
| fetchModelList, | |||
| fetchModelProviders, | |||
| fetchSupportRetrievalMethods, | |||
| } from '@/service/common' | |||
| import { | |||
| ModelFeatureEnum, | |||
| ModelStatusEnum, | |||
| ModelTypeEnum, | |||
| } from '@/app/components/header/account-setting/model-provider-page/declarations' | |||
| @@ -23,7 +22,6 @@ import { defaultPlan } from '@/app/components/billing/config' | |||
| const ProviderContext = createContext<{ | |||
| modelProviders: ModelProvider[] | |||
| textGenerationModelList: Model[] | |||
| agentThoughtModelList: Model[] | |||
| supportRetrievalMethods: RETRIEVE_METHOD[] | |||
| hasSettedApiKey: boolean | |||
| plan: { | |||
| @@ -38,7 +36,6 @@ const ProviderContext = createContext<{ | |||
| }>({ | |||
| modelProviders: [], | |||
| textGenerationModelList: [], | |||
| agentThoughtModelList: [], | |||
| supportRetrievalMethods: [], | |||
| hasSettedApiKey: true, | |||
| plan: { | |||
| @@ -75,26 +72,6 @@ export const ProviderContextProvider = ({ | |||
| const { data: textGenerationModelList } = useSWR(`${fetchModelListUrlPrefix}${ModelTypeEnum.textGeneration}`, fetchModelList) | |||
| const { data: supportRetrievalMethods } = useSWR('/datasets/retrieval-setting', fetchSupportRetrievalMethods) | |||
| const agentThoughtModelList = useMemo(() => { | |||
| const result: Model[] = [] | |||
| if (textGenerationModelList?.data) { | |||
| textGenerationModelList?.data.forEach((item) => { | |||
| const agentThoughtModels = item.models.filter(model => model.features?.includes(ModelFeatureEnum.agentThought)) | |||
| if (agentThoughtModels.length) { | |||
| result.push({ | |||
| ...item, | |||
| models: agentThoughtModels, | |||
| }) | |||
| } | |||
| }) | |||
| return result | |||
| } | |||
| return [] | |||
| }, [textGenerationModelList]) | |||
| const [plan, setPlan] = useState(defaultPlan) | |||
| const [isFetchedPlan, setIsFetchedPlan] = useState(false) | |||
| const [enableBilling, setEnableBilling] = useState(true) | |||
| @@ -118,7 +95,6 @@ export const ProviderContextProvider = ({ | |||
| <ProviderContext.Provider value={{ | |||
| modelProviders: providersData?.data || [], | |||
| textGenerationModelList: textGenerationModelList?.data || [], | |||
| agentThoughtModelList, | |||
| hasSettedApiKey: !!textGenerationModelList?.data.some(model => model.status === ModelStatusEnum.active), | |||
| supportRetrievalMethods: supportRetrievalMethods?.retrieval_method || [], | |||
| plan, | |||