瀏覽代碼

Feat: Add model editing functionality with improved UI labels (#8855)

### What problem does this PR solve?

Add edit button for local LLM models
<img width="1531" height="1428" alt="image"
src="https://github.com/user-attachments/assets/19d62255-59a6-4a7e-9772-8b8743101f78"
/>

<img width="1531" height="1428" alt="image"
src="https://github.com/user-attachments/assets/c3a0f77e-cc6b-4190-95a6-13835463428b"
/>



### Type of change

- [ ] Bug Fix (non-breaking change which fixes an issue)
- [x] New Feature (non-breaking change which adds functionality)
- [ ] Documentation Update
- [ ] Refactoring
- [ ] Performance Improvement
- [ ] Other (please describe):

---------

Co-authored-by: Liu An <asiro@qq.com>
tags/v0.20.0
Adrian Altermatt 3 月之前
父節點
當前提交
6691532079
沒有連結到貢獻者的電子郵件帳戶。

+ 44
- 12
api/apps/llm_app.py 查看文件

@@ -312,23 +312,55 @@ def delete_factory():
@login_required
def my_llms():
try:
res = {}
for o in TenantLLMService.get_my_llms(current_user.id):
if o["llm_factory"] not in res:
res[o["llm_factory"]] = {
"tags": o["tags"],
"llm": []
}
res[o["llm_factory"]]["llm"].append({
"type": o["model_type"],
"name": o["llm_name"],
"used_token": o["used_tokens"]
})
include_details = request.args.get('include_details', 'false').lower() == 'true'
if include_details:
res = {}
objs = TenantLLMService.query(tenant_id=current_user.id)
factories = LLMFactoriesService.query(status=StatusEnum.VALID.value)
for o in objs:
o_dict = o.to_dict()
factory_tags = None
for f in factories:
if f.name == o_dict["llm_factory"]:
factory_tags = f.tags
break
if o_dict["llm_factory"] not in res:
res[o_dict["llm_factory"]] = {
"tags": factory_tags,
"llm": []
}
res[o_dict["llm_factory"]]["llm"].append({
"type": o_dict["model_type"],
"name": o_dict["llm_name"],
"used_token": o_dict["used_tokens"],
"api_base": o_dict["api_base"] or "",
"max_tokens": o_dict["max_tokens"] or 8192
})
else:
res = {}
for o in TenantLLMService.get_my_llms(current_user.id):
if o["llm_factory"] not in res:
res[o["llm_factory"]] = {
"tags": o["tags"],
"llm": []
}
res[o["llm_factory"]]["llm"].append({
"type": o["model_type"],
"name": o["llm_name"],
"used_token": o["used_tokens"]
})
return get_json_result(data=res)
except Exception as e:
return server_error_response(e)




@manager.route('/list', methods=['GET']) # noqa: F821
@login_required
def list_app():

+ 22
- 0
web/src/hooks/llm-hooks.tsx 查看文件

@@ -194,6 +194,24 @@ export const useFetchMyLlmList = (): ResponseGetType<
return { data, loading };
};

export const useFetchMyLlmListDetailed = (): ResponseGetType<
Record<string, any>
> => {
const { data, isFetching: loading } = useQuery({
queryKey: ['myLlmListDetailed'],
initialData: {},
gcTime: 0,
queryFn: async () => {
const { data } = await userService.my_llm({ include_details: true });

return data?.data ?? {};
},
});

return { data, loading };
};


export const useSelectLlmList = () => {
const { data: myLlmList, loading: myLlmListLoading } = useFetchMyLlmList();
const { data: factoryList, loading: factoryListLoading } =
@@ -244,6 +262,7 @@ export const useSaveApiKey = () => {
if (data.code === 0) {
message.success(t('message.modified'));
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
queryClient.invalidateQueries({ queryKey: ['myLlmListDetailed'] });
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
}
return data.code;
@@ -295,6 +314,7 @@ export const useAddLlm = () => {
const { data } = await userService.add_llm(params);
if (data.code === 0) {
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
queryClient.invalidateQueries({ queryKey: ['myLlmListDetailed'] });
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
message.success(t('message.modified'));
}
@@ -318,6 +338,7 @@ export const useDeleteLlm = () => {
const { data } = await userService.delete_llm(params);
if (data.code === 0) {
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
queryClient.invalidateQueries({ queryKey: ['myLlmListDetailed'] });
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
message.success(t('message.deleted'));
}
@@ -341,6 +362,7 @@ export const useDeleteFactory = () => {
const { data } = await userService.deleteFactory(params);
if (data.code === 0) {
queryClient.invalidateQueries({ queryKey: ['myLlmList'] });
queryClient.invalidateQueries({ queryKey: ['myLlmListDetailed'] });
queryClient.invalidateQueries({ queryKey: ['factoryList'] });
message.success(t('message.deleted'));
}

+ 1
- 0
web/src/locales/de.ts 查看文件

@@ -620,6 +620,7 @@ export default {
apiKeyTip:
'Der API-Schlüssel kann durch Registrierung beim entsprechenden LLM-Anbieter erhalten werden.',
showMoreModels: 'Mehr Modelle anzeigen',
hideModels: 'Modelle ausblenden',
baseUrl: 'Basis-URL',
baseUrlTip:
'Wenn Ihr API-Schlüssel von OpenAI stammt, ignorieren Sie dies. Andere Zwischenanbieter geben diese Basis-URL mit dem API-Schlüssel an.',

+ 5
- 2
web/src/locales/en.ts 查看文件

@@ -602,13 +602,14 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
cancel: 'Cancel',
addedModels: 'Added models',
modelsToBeAdded: 'Models to be added',
addTheModel: 'Add the model',
addTheModel: 'Add Model',
apiKey: 'API-Key',
apiKeyMessage:
'Please enter the API key (for locally deployed model,ignore this).',
apiKeyTip:
'The API key can be obtained by registering the corresponding LLM supplier.',
showMoreModels: 'Show more models',
showMoreModels: 'View Models',
hideModels: 'Hide Models',
baseUrl: 'Base-Url',
baseUrlTip:
'If your API key is from OpenAI, just ignore it. Any other intermediate providers will give this base url with the API key.',
@@ -634,6 +635,8 @@ This auto-tagging feature enhances retrieval by adding another layer of domain-s
workspace: 'Workspace',
upgrade: 'Upgrade',
addLlmTitle: 'Add LLM',
editLlmTitle: 'Edit {{name}} Model',
editModel: 'Edit Model',
modelName: 'Model name',
modelID: 'Model ID',
modelUid: 'Model UID',

+ 1
- 0
web/src/locales/es.ts 查看文件

@@ -336,6 +336,7 @@ export default {
apiKeyTip:
'La clave API puede obtenerse registrándose con el proveedor correspondiente de LLM.',
showMoreModels: 'Mostrar más modelos',
hideModels: 'Ocultar modelos',
baseUrl: 'URL base',
baseUrlTip:
'Si tu clave API es de OpenAI, ignora esto. Cualquier otro proveedor intermedio proporcionará esta URL base junto con la clave API.',

+ 1
- 0
web/src/locales/id.ts 查看文件

@@ -508,6 +508,7 @@ export default {
apiKeyTip:
'Kunci API dapat diperoleh dengan mendaftar ke penyedia LLM yang sesuai.',
showMoreModels: 'Tampilkan lebih banyak model',
hideModels: 'Sembunyikan model',
baseUrl: 'Base-Url',
baseUrlTip:
'Jika kunci API Anda berasal dari OpenAI, abaikan saja. Penyedia perantara lainnya akan memberikan base url ini dengan kunci API.',

+ 3
- 0
web/src/locales/ja.ts 查看文件

@@ -504,6 +504,7 @@ export default {
apiKeyTip:
'APIキーは、対応するLLMサプライヤーに登録することで取得できます。',
showMoreModels: 'さらにモデルを表示',
hideModels: 'モデルを隠す',
baseUrl: 'ベースURL',
baseUrlTip:
'APIキーがOpenAIからのものであれば無視してください。他の中間プロバイダーはAPIキーと共にこのベースURLを提供します。',
@@ -529,6 +530,8 @@ export default {
workspace: 'ワークスペース',
upgrade: 'アップグレード',
addLlmTitle: 'LLMを追加',
editLlmTitle: '{{name}}モデルを編集',
editModel: 'モデルを編集',
modelName: 'モデル名',
modelID: 'モデルID',
modelUid: 'モデルUID',

+ 1
- 0
web/src/locales/pt-br.ts 查看文件

@@ -500,6 +500,7 @@ export default {
apiKeyTip:
'A chave da API pode ser obtida registrando-se no fornecedor correspondente do LLM.',
showMoreModels: 'Mostrar mais modelos',
hideModels: 'Ocultar modelos',
baseUrl: 'URL Base',
baseUrlTip:
'Se sua chave da API for do OpenAI, ignore isso. Outros provedores intermediários fornecerão essa URL base com a chave da API.',

+ 1
- 0
web/src/locales/vi.ts 查看文件

@@ -554,6 +554,7 @@ export default {
apiKeyTip:
'Khóa API có thể được lấy bằng cách đăng ký nhà cung cấp LLM tương ứng.',
showMoreModels: 'Hiển thị thêm mô hình',
hideModels: 'Ẩn mô hình',
baseUrl: 'Base-Url',
baseUrlTip:
'Nếu khóa API của bạn từ OpenAI, chỉ cần bỏ qua nó. Bất kỳ nhà cung cấp trung gian nào khác sẽ cung cấp URL cơ sở này với khóa API.',

+ 1
- 0
web/src/locales/zh-traditional.ts 查看文件

@@ -588,6 +588,7 @@ export default {
apiKeyMessage: '請輸入api key(如果是本地部署的模型,請忽略它)',
apiKeyTip: 'API key可以通過註冊相應的LLM供應商來獲取。',
showMoreModels: '展示更多模型',
hideModels: '隱藏模型',
baseUrl: 'base-url',
baseUrlTip:
'如果您的 API 密鑰來自 OpenAI,請忽略它。任何其他中間提供商都會提供帶有 API 密鑰的基本 URL。',

+ 3
- 0
web/src/locales/zh.ts 查看文件

@@ -609,6 +609,7 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
apiKeyMessage: '请输入api key(如果是本地部署的模型,请忽略它)',
apiKeyTip: 'API key可以通过注册相应的LLM供应商来获取。',
showMoreModels: '展示更多模型',
hideModels: '隐藏模型',
baseUrl: 'Base-Url',
baseUrlTip:
'如果您的 API 密钥来自 OpenAI,请忽略它。 任何其他中间提供商都会提供带有 API 密钥的基本 URL。',
@@ -633,6 +634,8 @@ General:实体和关系提取提示来自 GitHub - microsoft/graphrag:基于
workspace: '工作空间',
upgrade: '升级',
addLlmTitle: '添加 LLM',
editLlmTitle: '编辑 {{name}} 模型',
editModel: '编辑模型',
modelName: '模型名称',
modelID: '模型ID',
modelUid: '模型UID',

+ 3
- 1
web/src/pages/user-setting/setting-model/api-key-modal/index.tsx 查看文件

@@ -9,6 +9,7 @@ interface IProps extends Omit<IModalManagerChildrenProps, 'showModal'> {
loading: boolean;
initialValue: string;
llmFactory: string;
editMode?: boolean;
onOk: (postBody: ApiKeyPostBody) => void;
showModal?(): void;
}
@@ -27,6 +28,7 @@ const ApiKeyModal = ({
llmFactory,
loading,
initialValue,
editMode = false,
onOk,
}: IProps) => {
const [form] = Form.useForm();
@@ -52,7 +54,7 @@ const ApiKeyModal = ({

return (
<Modal
title={t('modify')}
title={editMode ? t('editModel') : t('modify')}
open={visible}
onOk={handleOk}
onCancel={hideModal}

+ 34
- 3
web/src/pages/user-setting/setting-model/hooks.ts 查看文件

@@ -11,6 +11,7 @@ import {
} from '@/hooks/llm-hooks';
import { useFetchTenantInfo } from '@/hooks/user-setting-hooks';
import { IAddLlmRequestBody } from '@/interfaces/request/llm';
import { getRealModelName } from '@/utils/llm-util';
import { useCallback, useState } from 'react';
import { ApiKeyPostBody } from '../interface';

@@ -20,6 +21,7 @@ export const useSubmitApiKey = () => {
const [savingParams, setSavingParams] = useState<SavingParamsState>(
{} as SavingParamsState,
);
const [editMode, setEditMode] = useState(false);
const { saveApiKey, loading } = useSaveApiKey();
const {
visible: apiKeyVisible,
@@ -36,14 +38,16 @@ export const useSubmitApiKey = () => {

if (ret === 0) {
hideApiKeyModal();
setEditMode(false);
}
},
[hideApiKeyModal, saveApiKey, savingParams],
);

const onShowApiKeyModal = useCallback(
(savingParams: SavingParamsState) => {
(savingParams: SavingParamsState, isEdit = false) => {
setSavingParams(savingParams);
setEditMode(isEdit);
showApiKeyModal();
},
[showApiKeyModal, setSavingParams],
@@ -53,6 +57,7 @@ export const useSubmitApiKey = () => {
saveApiKeyLoading: loading,
initialApiKey: '',
llmFactory: savingParams.llm_factory,
editMode,
onApiKeySavingOk,
apiKeyVisible,
hideApiKeyModal,
@@ -105,6 +110,9 @@ export const useFetchSystemModelSettingOnMount = () => {

export const useSubmitOllama = () => {
const [selectedLlmFactory, setSelectedLlmFactory] = useState<string>('');
const [editMode, setEditMode] = useState(false);
const [initialValues, setInitialValues] = useState<Partial<IAddLlmRequestBody> | undefined>();
const [originalModelName, setOriginalModelName] = useState<string>('');
const { addLlm, loading } = useAddLlm();
const {
visible: llmAddingVisible,
@@ -114,21 +122,44 @@ export const useSubmitOllama = () => {

const onLlmAddingOk = useCallback(
async (payload: IAddLlmRequestBody) => {
const ret = await addLlm(payload);
const cleanedPayload = { ...payload };
if (!cleanedPayload.api_key || cleanedPayload.api_key.trim() === '') {
delete cleanedPayload.api_key;
}
const ret = await addLlm(cleanedPayload);
if (ret === 0) {
hideLlmAddingModal();
setEditMode(false);
setInitialValues(undefined);
}
},
[hideLlmAddingModal, addLlm],
);

const handleShowLlmAddingModal = (llmFactory: string) => {
const handleShowLlmAddingModal = (llmFactory: string, isEdit = false, modelData?: any, detailedData?: any) => {
setSelectedLlmFactory(llmFactory);
setEditMode(isEdit);
if (isEdit && detailedData) {
const initialVals = {
llm_name: getRealModelName(detailedData.name),
model_type: detailedData.type,
api_base: detailedData.api_base || '',
max_tokens: detailedData.max_tokens || 8192,
api_key: '',
};
setInitialValues(initialVals);
} else {
setInitialValues(undefined);
}
showLlmAddingModal();
};

return {
llmAddingLoading: loading,
editMode,
initialValues,
onLlmAddingOk,
llmAddingVisible,
hideLlmAddingModal,

+ 48
- 9
web/src/pages/user-setting/setting-model/index.tsx 查看文件

@@ -3,9 +3,9 @@ import { LlmIcon } from '@/components/svg-icon';
import { useTheme } from '@/components/theme-provider';
import { LLMFactory } from '@/constants/llm';
import { useSetModalState, useTranslate } from '@/hooks/common-hooks';
import { LlmItem, useSelectLlmList } from '@/hooks/llm-hooks';
import { LlmItem, useSelectLlmList, useFetchMyLlmListDetailed } from '@/hooks/llm-hooks';
import { getRealModelName } from '@/utils/llm-util';
import { CloseCircleOutlined, SettingOutlined } from '@ant-design/icons';
import { CloseCircleOutlined, EditOutlined, SettingOutlined } from '@ant-design/icons';
import {
Button,
Card,
@@ -60,9 +60,10 @@ const { Text } = Typography;
interface IModelCardProps {
item: LlmItem;
clickApiKey: (llmFactory: string) => void;
handleEditModel: (model: any, factory: LlmItem) => void;
}

const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
const ModelCard = ({ item, clickApiKey, handleEditModel }: IModelCardProps) => {
const { visible, switchVisible } = useSetModalState();
const { t } = useTranslate('setting');
const { theme } = useTheme();
@@ -112,7 +113,7 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
</Button>
<Button onClick={handleShowMoreClick}>
<Flex align="center" gap={4}>
{t('showMoreModels')}
{visible ? t('hideModels') : t('showMoreModels')}
<MoreModelIcon />
</Flex>
</Button>
@@ -129,13 +130,20 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {
size="small"
dataSource={item.llm}
className={styles.llmList}
renderItem={(item) => (
renderItem={(model) => (
<List.Item>
<Space>
{getRealModelName(item.name)}
<Tag color="#b8b8b8">{item.type}</Tag>
{getRealModelName(model.name)}
<Tag color="#b8b8b8">{model.type}</Tag>
{isLocalLlmFactory(item.name) && (
<Tooltip title={t('edit', { keyPrefix: 'common' })}>
<Button type={'text'} onClick={() => handleEditModel(model, item)}>
<EditOutlined style={{ color: '#1890ff' }} />
</Button>
</Tooltip>
)}
<Tooltip title={t('delete', { keyPrefix: 'common' })}>
<Button type={'text'} onClick={handleDeleteLlm(item.name)}>
<Button type={'text'} onClick={handleDeleteLlm(model.name)}>
<CloseCircleOutlined style={{ color: '#D92D20' }} />
</Button>
</Tooltip>
@@ -151,11 +159,13 @@ const ModelCard = ({ item, clickApiKey }: IModelCardProps) => {

const UserSettingModel = () => {
const { factoryList, myLlmList: llmList, loading } = useSelectLlmList();
const { data: detailedLlmList } = useFetchMyLlmListDetailed();
const { theme } = useTheme();
const {
saveApiKeyLoading,
initialApiKey,
llmFactory,
editMode,
onApiKeySavingOk,
apiKeyVisible,
hideApiKeyModal,
@@ -175,6 +185,8 @@ const UserSettingModel = () => {
showLlmAddingModal,
onLlmAddingOk,
llmAddingLoading,
editMode: llmEditMode,
initialValues: llmInitialValues,
selectedLlmFactory,
} = useSubmitOllama();

@@ -288,6 +300,30 @@ const UserSettingModel = () => {
[showApiKeyModal, showLlmAddingModal, ModalMap],
);

const handleEditModel = useCallback(
(model: any, factory: LlmItem) => {
if (factory) {
const detailedFactory = detailedLlmList[factory.name];
const detailedModel = detailedFactory?.llm?.find((m: any) => m.name === model.name);
const editData = {
llm_factory: factory.name,
llm_name: model.name,
model_type: model.type
};
if (isLocalLlmFactory(factory.name)) {
showLlmAddingModal(factory.name, true, editData, detailedModel);
} else if (factory.name in ModalMap) {
ModalMap[factory.name as keyof typeof ModalMap]();
} else {
showApiKeyModal(editData, true);
}
}
},
[showApiKeyModal, showLlmAddingModal, ModalMap, detailedLlmList],
);

const items: CollapseProps['items'] = [
{
key: '1',
@@ -297,7 +333,7 @@ const UserSettingModel = () => {
grid={{ gutter: 16, column: 1 }}
dataSource={llmList}
renderItem={(item) => (
<ModelCard item={item} clickApiKey={handleAddModel}></ModelCard>
<ModelCard item={item} clickApiKey={handleAddModel} handleEditModel={handleEditModel}></ModelCard>
)}
/>
),
@@ -384,6 +420,7 @@ const UserSettingModel = () => {
hideModal={hideApiKeyModal}
loading={saveApiKeyLoading}
initialValue={initialApiKey}
editMode={editMode}
onOk={onApiKeySavingOk}
llmFactory={llmFactory}
></ApiKeyModal>
@@ -400,6 +437,8 @@ const UserSettingModel = () => {
hideModal={hideLlmAddingModal}
onOk={onLlmAddingOk}
loading={llmAddingLoading}
editMode={llmEditMode}
initialValues={llmInitialValues}
llmFactory={selectedLlmFactory}
></OllamaModal>
<VolcEngineModal

+ 29
- 3
web/src/pages/user-setting/setting-model/ollama-modal/index.tsx 查看文件

@@ -13,6 +13,7 @@ import {
Switch,
} from 'antd';
import omit from 'lodash/omit';
import { useEffect } from 'react';

type FieldType = IAddLlmRequestBody & { vision: boolean };

@@ -45,7 +46,13 @@ const OllamaModal = ({
onOk,
loading,
llmFactory,
}: IModalProps<IAddLlmRequestBody> & { llmFactory: string }) => {
editMode = false,
initialValues,
}: IModalProps<IAddLlmRequestBody> & {
llmFactory: string;
editMode?: boolean;
initialValues?: Partial<IAddLlmRequestBody>;
}) => {
const [form] = Form.useForm<FieldType>();

const { t } = useTranslate('setting');
@@ -73,6 +80,22 @@ const OllamaModal = ({
await handleOk();
}
};

useEffect(() => {
if (visible && editMode && initialValues) {
const formValues = {
llm_name: initialValues.llm_name,
model_type: initialValues.model_type,
api_base: initialValues.api_base,
max_tokens: initialValues.max_tokens || 8192,
api_key: '',
...initialValues,
};
form.setFieldsValue(formValues);
} else if (visible && !editMode) {
form.resetFields();
}
}, [visible, editMode, initialValues, form]);
const url =
llmFactoryToUrlMap[llmFactory as LlmFactory] ||
@@ -111,7 +134,7 @@ const OllamaModal = ({
};
return (
<Modal
title={t('addLlmTitle', { name: llmFactory })}
title={editMode ? t('editLlmTitle', { name: llmFactory }) : t('addLlmTitle', { name: llmFactory })}
open={visible}
onOk={handleOk}
onCancel={hideModal}
@@ -173,7 +196,10 @@ const OllamaModal = ({
name="api_key"
rules={[{ required: false, message: t('apiKeyMessage') }]}
>
<Input placeholder={t('apiKeyMessage')} onKeyDown={handleKeyDown} />
<Input
placeholder={t('apiKeyMessage')}
onKeyDown={handleKeyDown}
/>
</Form.Item>
<Form.Item<FieldType>
label={t('maxTokens')}

Loading…
取消
儲存