Просмотр исходного кода

Update deepseek model provider info. (#4714)

### What problem does this PR solve?


### Type of change

- [x] Refactoring
tags/v0.16.0
Kevin Hu 8 месяцев назад
Родитель
Сommit
4b9c4c0705
Аккаунт пользователя с таким Email не найден
2 измененных файлов: 95 добавлений и 83 удалений
  1. 2
    2
      api/apps/kb_app.py
  2. 93
    81
      conf/llm_factories.json

+ 2
- 2
api/apps/kb_app.py Просмотреть файл

@@ -15,6 +15,7 @@
#
import json
import logging
import os

from flask import request
from flask_login import login_required, current_user
@@ -24,7 +25,6 @@ from api.db.services.document_service import DocumentService
from api.db.services.file2document_service import File2DocumentService
from api.db.services.file_service import FileService
from api.db.services.user_service import TenantService, UserTenantService
from api.settings import DOC_ENGINE
from api.utils.api_utils import server_error_response, get_data_error_result, validate_request, not_allowed_parameters
from api.utils import get_uuid
from api.db import StatusEnum, FileSource
@@ -97,7 +97,7 @@ def update():
return get_data_error_result(
message="Can't find this knowledgebase!")

if req.get("parser_id", "") == "tag" and DOC_ENGINE == "infinity":
if req.get("parser_id", "") == "tag" and os.environ.get('DOC_ENGINE', "elasticsearch") == "infinity":
return get_json_result(
data=False,
message='The chunk method Tag has not been supported by Infinity yet.',

+ 93
- 81
conf/llm_factories.json Просмотреть файл

@@ -1916,181 +1916,229 @@
"status": "1",
"llm": [
{
"llm_name": "Qwen/Qwen2-7B-Instruct",
"llm_name": "deepseek-ai/DeepSeek-R1",
"tags": "LLM,CHAT,64k",
"max_tokens": 64000,
"model_type": "chat"
},
{
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Llama-70B",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Qwen/Qwen2-1.5B-Instruct",
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Qwen/Qwen1.5-7B-Chat",
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "THUDM/glm-4-9b-chat",
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "THUDM/chatglm3-6b",
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "01-ai/Yi-1.5-9B-Chat-16K",
"tags": "LLM,CHAT,16k",
"max_tokens": 16384,
"llm_name": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "01-ai/Yi-1.5-6B-Chat",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"llm_name": "Pro/deepseek-ai/DeepSeek-R1-Distill-Llama-8B",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "google/gemma-2-9b-it",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"llm_name": "Pro/deepseek-ai/DeepSeek-R1-Distill-Qwen-7B",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "internlm/internlm2_5-7b-chat",
"llm_name": "Pro/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "meta-llama/Meta-Llama-3-8B-Instruct",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"llm_name": "Qwen/QwQ-32B-Preview",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"llm_name": "Qwen/Qwen2.5-Coder-32B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "mistralai/Mistral-7B-Instruct-v0.2",
"llm_name": "AIDC-AI/Marco-o1",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Pro/Qwen/Qwen2-7B-Instruct",
"llm_name": "deepseek-ai/deepseek-vl2",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "deepseek-ai/Janus-Pro-7B",
"tags": "LLM,IMAGE2TEXT,32k",
"max_tokens": 32768,
"model_type": "image2text"
},
{
"llm_name": "Qwen/QVQ-72B-Preview",
"tags": "LLM,IMAGE2TEXT,32k",
"max_tokens": 32768,
"model_type": "image2text"
},
{
"llm_name": "FunAudioLLM/CosyVoice2-0.5B",
"tags": "LLM,TTS,32k",
"max_tokens": 32768,
"model_type": "tts"
},
{
"llm_name": "fishaudio/fish-speech-1.5",
"tags": "LLM,TTS,32k",
"max_tokens": 32768,
"model_type": "tts"
},
{
"llm_name": "meta-llama/Llama-3.3-70B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Pro/Qwen/Qwen2-1.5B-Instruct",
"llm_name": "Qwen/Qwen2-7B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Pro/Qwen/Qwen1.5-7B-Chat",
"llm_name": "Qwen/Qwen2-1.5B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Pro/THUDM/glm-4-9b-chat",
"llm_name": "Qwen/Qwen1.5-7B-Chat",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Pro/THUDM/chatglm3-6b",
"llm_name": "THUDM/glm-4-9b-chat",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Pro/01-ai/Yi-1.5-9B-Chat-16K",
"llm_name": "THUDM/chatglm3-6b",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "01-ai/Yi-1.5-9B-Chat-16K",
"tags": "LLM,CHAT,16k",
"max_tokens": 16384,
"model_type": "chat"
},
{
"llm_name": "Pro/01-ai/Yi-1.5-6B-Chat",
"llm_name": "01-ai/Yi-1.5-6B-Chat",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "Pro/internlm/internlm2_5-7b-chat",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Pro/google/gemma-2-9b-it",
"llm_name": "google/gemma-2-9b-it",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "Pro/meta-llama/Meta-Llama-3.1-8B-Instruct",
"llm_name": "internlm/internlm2_5-7b-chat",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Pro/meta-llama/Meta-Llama-3-8B-Instruct",
"llm_name": "meta-llama/Meta-Llama-3.1-8B-Instruct",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "Pro/mistralai/Mistral-7B-Instruct-v0.2",
"llm_name": "Pro/Qwen/Qwen2-7B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Qwen/Qwen2-72B-Instruct",
"llm_name": "Pro/Qwen/Qwen2-1.5B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Qwen/Qwen2-Math-72B-Instruct",
"llm_name": "Pro/THUDM/glm-4-9b-chat",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Qwen/Qwen2-57B-A14B-Instruct",
"llm_name": "Pro/01-ai/Yi-1.5-6B-Chat",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "Pro/google/gemma-2-9b-it",
"tags": "LLM,CHAT,8k",
"max_tokens": 8192,
"model_type": "chat"
},
{
"llm_name": "Pro/meta-llama/Meta-Llama-3.1-8B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Qwen/Qwen1.5-110B-Chat",
"llm_name": "Pro/mistralai/Mistral-7B-Instruct-v0.2",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Qwen/Qwen1.5-32B-Chat",
"llm_name": "Qwen/Qwen2-Math-72B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Qwen/Qwen1.5-14B-Chat",
"llm_name": "Qwen/Qwen1.5-32B-Chat",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
@@ -2125,12 +2173,6 @@
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "Qwen/Qwen2.5-Math-72B-Instruct",
"tags": "LLM,CHAT,Math,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "Qwen/Qwen2.5-Coder-7B-Instruct",
"tags": "LLM,CHAT,FIM,Coder,32k",
@@ -2155,24 +2197,6 @@
"max_tokens": 16384,
"model_type": "chat"
},
{
"llm_name": "deepseek-ai/DeepSeek-Coder-V2-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "deepseek-ai/DeepSeek-V2-Chat",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "deepseek-ai/deepseek-llm-67b-chat",
"tags": "LLM,CHAT,4k",
"max_tokens": 4096,
"model_type": "chat"
},
{
"llm_name": "internlm/internlm2_5-20b-chat",
"tags": "LLM,CHAT,32k",
@@ -2191,18 +2215,6 @@
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "meta-llama/Meta-Llama-3-70B-Instruct",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "mistralai/Mixtral-8x7B-Instruct-v0.1",
"tags": "LLM,CHAT,32k",
"max_tokens": 32768,
"model_type": "chat"
},
{
"llm_name": "google/gemma-2-27b-it",
"tags": "LLM,CHAT,8k",

Загрузка…
Отмена
Сохранить