Pārlūkot izejas kodu

Feat: add gitee as LLM provider. (#8545)

### What problem does this PR solve?


### Type of change

- [x] New Feature (non-breaking change which adds functionality)
tags/v0.20.0
Kevin Hu pirms 4 mēnešiem
vecāks
revīzija
aafeffa292
Revīzijas autora e-pasta adrese nav piesaistīta nevienam kontam

+ 414
- 0
conf/llm_factories.json Parādīt failu

} }
] ]
}, },
{
"name": "GiteeAI",
"logo": "",
"tags": "LLM,TEXT EMBEDDING,IMAGE2TEXT,SPEECH2TEXT,TEXT RE-RANK",
"status": "1",
"llm": [
{
"llm_name": "ERNIE-4.5-Turbo",
"tags": "LLM,CHAT",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "ERNIE-X1-Turbo",
"tags": "LLM,CHAT",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "DeepSeek-R1",
"tags": "LLM,CHAT",
"max_tokens": 65792,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "DeepSeek-V3",
"tags": "LLM,CHAT",
"max_tokens": 65792,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Qwen3-235B-A22B",
"tags": "LLM,CHAT",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Qwen3-30B-A3B",
"tags": "LLM,CHAT",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Qwen3-32B",
"tags": "LLM,CHAT",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Qwen3-8B",
"tags": "LLM,CHAT",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Qwen3-4B",
"tags": "LLM,CHAT",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Qwen3-0.6B",
"tags": "LLM,CHAT",
"max_tokens": 32000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "QwQ-32B",
"tags": "LLM,CHAT",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "DeepSeek-R1-Distill-Qwen-32B",
"tags": "LLM,CHAT",
"max_tokens": 65792,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "DeepSeek-R1-Distill-Qwen-14B",
"tags": "LLM,CHAT",
"max_tokens": 65792,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "DeepSeek-R1-Distill-Qwen-7B",
"tags": "LLM,CHAT",
"max_tokens": 65792,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "DeepSeek-R1-Distill-Qwen-1.5B",
"tags": "LLM,CHAT",
"max_tokens": 65792,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Qwen2.5-72B-Instruct",
"tags": "LLM,CHAT",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen2.5-32B-Instruct",
"tags": "LLM,CHAT",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Qwen2.5-14B-Instruct",
"tags": "LLM,CHAT",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen2.5-7B-Instruct",
"tags": "LLM,CHAT",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": true
},
{
"llm_name": "Qwen2-72B-Instruct",
"tags": "LLM,CHAT",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Qwen2-7B-Instruct",
"tags": "LLM,CHAT",
"max_tokens": 131072,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "GLM-4-32B",
"tags": "LLM,CHAT",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "GLM-4-9B-0414",
"tags": "LLM,CHAT",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "glm-4-9b-chat",
"tags": "LLM,CHAT",
"max_tokens": 128000,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "internlm3-8b-instruct",
"tags": "LLM,CHAT",
"max_tokens": 4096,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "Yi-34B-Chat",
"tags": "LLM,CHAT",
"max_tokens": 32768,
"model_type": "chat",
"is_tools": false
},
{
"llm_name": "ERNIE-4.5-Turbo-VL",
"tags": "LLM,IMAGE2TEXT",
"max_tokens": 4096,
"model_type": "image2text",
"is_tools": false
},
{
"llm_name": "Qwen2.5-VL-32B-Instruct",
"tags": "LLM,IMAGE2TEXT",
"max_tokens": 32768,
"model_type": "image2text",
"is_tools": true
},
{
"llm_name": "Qwen2-VL-72B",
"tags": "LLM,IMAGE2TEXT",
"max_tokens": 4096,
"model_type": "image2text",
"is_tools": false
},
{
"llm_name": "Align-DS-V",
"tags": "LLM,IMAGE2TEXT",
"max_tokens": 4096,
"model_type": "image2text",
"is_tools": false
},
{
"llm_name": "InternVL3-78B",
"tags": "LLM,IMAGE2TEXT",
"max_tokens": 32768,
"model_type": "image2text",
"is_tools": false
},
{
"llm_name": "InternVL3-38B",
"tags": "LLM,IMAGE2TEXT",
"max_tokens": 32768,
"model_type": "image2text",
"is_tools": false
},
{
"llm_name": "InternVL2.5-78B",
"tags": "LLM,IMAGE2TEXT",
"max_tokens": 32768,
"model_type": "image2text",
"is_tools": false
},
{
"llm_name": "InternVL2.5-26B",
"tags": "LLM,IMAGE2TEXT",
"max_tokens": 16384,
"model_type": "image2text",
"is_tools": false
},
{
"llm_name": "InternVL2-8B",
"tags": "LLM,IMAGE2TEXT",
"max_tokens": 8192,
"model_type": "image2text",
"is_tools": false
},
{
"llm_name": "Qwen2-Audio-7B-Instruct",
"tags": "LLM,SPEECH2TEXT,IMAGE2TEXT",
"max_tokens": 8192,
"model_type": "speech2text",
"is_tools": false
},
{
"llm_name": "whisper-base",
"tags": "SPEECH2TEXT",
"max_tokens": 512,
"model_type": "speech2text",
"is_tools": false
},
{
"llm_name": "whisper-large",
"tags": "SPEECH2TEXT",
"max_tokens": 512,
"model_type": "speech2text",
"is_tools": false
},
{
"llm_name": "whisper-large-v3-turbo",
"tags": "SPEECH2TEXT",
"max_tokens": 512,
"model_type": "speech2text",
"is_tools": false
},
{
"llm_name": "whisper-large-v3",
"tags": "SPEECH2TEXT",
"max_tokens": 512,
"model_type": "speech2text",
"is_tools": false
},
{
"llm_name": "SenseVoiceSmall",
"tags": "SPEECH2TEXT",
"max_tokens": 512,
"model_type": "speech2text",
"is_tools": false
},
{
"llm_name": "Qwen3-Reranker-8B",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 32768,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "Qwen3-Reranker-4B",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 32768,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "Qwen3-Reranker-0.6B",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 32768,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "Qwen3-Embedding-8B",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 8192,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "Qwen3-Embedding-4B",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 4096,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "Qwen3-Embedding-0.6B",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 4096,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "jina-clip-v1",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 512,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "jina-clip-v2",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 8192,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "jina-reranker-m0",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 10240,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "bce-embedding-base_v1",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 512,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "bce-reranker-base_v1",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 512,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "bge-m3",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 8192,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "bge-reranker-v2-m3",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 8192,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "bge-large-zh-v1.5",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 1024,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "bge-small-zh-v1.5",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 512,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "nomic-embed-code",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 512,
"model_type": "embedding",
"is_tools": false
},
{
"llm_name": "all-mpnet-base-v2",
"tags": "TEXT EMBEDDING,TEXT RE-RANK",
"max_tokens": 512,
"model_type": "embedding",
"is_tools": false
}
]
},
{ {
"name": "Google Cloud", "name": "Google Cloud",
"logo": "", "logo": "",

+ 13
- 5
rag/llm/__init__.py Parādīt failu

HuggingFaceEmbed, HuggingFaceEmbed,
VolcEngineEmbed, VolcEngineEmbed,
GPUStackEmbed, GPUStackEmbed,
NovitaEmbed
NovitaEmbed,
GiteeEmbed
) )
from .chat_model import ( from .chat_model import (
GptTurbo, GptTurbo,
HuggingFaceChat, HuggingFaceChat,
GPUStackChat, GPUStackChat,
ModelScopeChat, ModelScopeChat,
GiteeChat
) )


from .cv_model import ( from .cv_model import (
QWenRerank, QWenRerank,
GPUStackRerank, GPUStackRerank,
HuggingfaceRerank, HuggingfaceRerank,
NovitaRerank
NovitaRerank,
GiteeRerank
) )


from .sequence2txt_model import ( from .sequence2txt_model import (
XinferenceSeq2txt, XinferenceSeq2txt,
TencentCloudSeq2txt, TencentCloudSeq2txt,
GPUStackSeq2txt, GPUStackSeq2txt,
GiteeSeq2txt
) )


from .tts_model import ( from .tts_model import (
"HuggingFace": HuggingFaceEmbed, "HuggingFace": HuggingFaceEmbed,
"VolcEngine": VolcEngineEmbed, "VolcEngine": VolcEngineEmbed,
"GPUStack": GPUStackEmbed, "GPUStack": GPUStackEmbed,
"NovitaAI": NovitaEmbed
"NovitaAI": NovitaEmbed,
"GiteeAI": GiteeEmbed
} }


CvModel = { CvModel = {
"Tencent Hunyuan": HunyuanCV, "Tencent Hunyuan": HunyuanCV,
"Anthropic": AnthropicCV, "Anthropic": AnthropicCV,
"SILICONFLOW": SILICONFLOWCV, "SILICONFLOW": SILICONFLOWCV,
"GPUStack": GPUStackCV,
"GPUStack": GPUStackCV
} }


ChatModel = { ChatModel = {
"HuggingFace": HuggingFaceChat, "HuggingFace": HuggingFaceChat,
"GPUStack": GPUStackChat, "GPUStack": GPUStackChat,
"ModelScope":ModelScopeChat, "ModelScope":ModelScopeChat,
"GiteeAI": GiteeChat
} }


RerankModel = { RerankModel = {
"Tongyi-Qianwen": QWenRerank, "Tongyi-Qianwen": QWenRerank,
"GPUStack": GPUStackRerank, "GPUStack": GPUStackRerank,
"HuggingFace": HuggingfaceRerank, "HuggingFace": HuggingfaceRerank,
"NovitaAI": NovitaRerank
"NovitaAI": NovitaRerank,
"GiteeAI": GiteeRerank
} }


Seq2txtModel = { Seq2txtModel = {
"Xinference": XinferenceSeq2txt, "Xinference": XinferenceSeq2txt,
"Tencent Cloud": TencentCloudSeq2txt, "Tencent Cloud": TencentCloudSeq2txt,
"GPUStack": GPUStackSeq2txt, "GPUStack": GPUStackSeq2txt,
"GiteeAI": GiteeSeq2txt
} }


TTSModel = { TTSModel = {

+ 7
- 0
rag/llm/chat_model.py Parādīt failu

super().__init__(key, model_name, base_url, **kwargs) super().__init__(key, model_name, base_url, **kwargs)




class GiteeChat(Base):
def __init__(self, key, model_name, base_url="https://ai.gitee.com/v1/", **kwargs):
if not base_url:
base_url = "https://ai.gitee.com/v1/"
super().__init__(key, model_name, base_url, **kwargs)


class ReplicateChat(Base): class ReplicateChat(Base):
def __init__(self, key, model_name, base_url=None, **kwargs): def __init__(self, key, model_name, base_url=None, **kwargs):
super().__init__(key, model_name, base_url=base_url, **kwargs) super().__init__(key, model_name, base_url=base_url, **kwargs)

+ 5
- 0
rag/llm/embedding_model.py Parādīt failu



class NovitaEmbed(SILICONFLOWEmbed): class NovitaEmbed(SILICONFLOWEmbed):
def __init__(self, key, model_name, base_url="https://api.novita.ai/v3/openai/embeddings"): def __init__(self, key, model_name, base_url="https://api.novita.ai/v3/openai/embeddings"):
super().__init__(key, model_name, base_url)


class GiteeEmbed(SILICONFLOWEmbed):
def __init__(self, key, model_name, base_url="https://ai.gitee.com/v1/embeddings"):
super().__init__(key, model_name, base_url) super().__init__(key, model_name, base_url)

+ 5
- 0
rag/llm/rerank_model.py Parādīt failu



class NovitaRerank(JinaRerank): class NovitaRerank(JinaRerank):
def __init__(self, key, model_name, base_url="https://api.novita.ai/v3/openai/rerank"): def __init__(self, key, model_name, base_url="https://api.novita.ai/v3/openai/rerank"):
super().__init__(key, model_name, base_url)


class GiteeRerank(JinaRerank):
def __init__(self, key, model_name, base_url="https://ai.gitee.com/v1/rerank"):
super().__init__(key, model_name, base_url) super().__init__(key, model_name, base_url)

+ 8
- 0
rag/llm/sequence2txt_model.py Parādīt failu

self.base_url = base_url self.base_url = base_url
self.model_name = model_name self.model_name = model_name
self.key = key self.key = key


class GiteeSeq2txt(Base):
def __init__(self, key, model_name="whisper-1", base_url="https://ai.gitee.com/v1/"):
if not base_url:
base_url = "https://ai.gitee.com/v1/"
self.client = OpenAI(api_key=key, base_url=base_url)
self.model_name = model_name

Notiek ielāde…
Atcelt
Saglabāt