### What problem does this PR solve? Close #7980 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue)tags/v0.19.1
| import time | import time | ||||
| from abc import ABC | from abc import ABC | ||||
| from typing import Any, Protocol | from typing import Any, Protocol | ||||
| from urllib.parse import urljoin | |||||
| import openai | import openai | ||||
| import requests | import requests | ||||
| def __init__(self, key=None, model_name="", base_url=""): | def __init__(self, key=None, model_name="", base_url=""): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("Local llm url cannot be None") | raise ValueError("Local llm url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| super().__init__(key, model_name, base_url) | super().__init__(key, model_name, base_url) | ||||
| def __init__(self, key=None, model_name="", base_url=""): | def __init__(self, key=None, model_name="", base_url=""): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("Local llm url cannot be None") | raise ValueError("Local llm url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| super().__init__(key, model_name.split("___")[0], base_url) | super().__init__(key, model_name.split("___")[0], base_url) | ||||
| def __init__(self, key=None, model_name="", base_url=""): | def __init__(self, key=None, model_name="", base_url=""): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("Local llm url cannot be None") | raise ValueError("Local llm url cannot be None") | ||||
| base_url = base_url.rstrip("/") | |||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| super().__init__(key, model_name.split("___")[0], base_url) | super().__init__(key, model_name.split("___")[0], base_url) | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("Local llm url cannot be None") | raise ValueError("Local llm url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| self.client = OpenAI(api_key="empty", base_url=base_url) | self.client = OpenAI(api_key="empty", base_url=base_url) | ||||
| self.model_name = model_name.split("___")[0] | self.model_name = model_name.split("___")[0] | ||||
| def __init__(self, key, model_name, base_url): | def __init__(self, key, model_name, base_url): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("Local llm url cannot be None") | raise ValueError("Local llm url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| super().__init__(key, model_name, base_url) | super().__init__(key, model_name, base_url) | ||||
| self.client = OpenAI(api_key="lm-studio", base_url=base_url) | self.client = OpenAI(api_key="lm-studio", base_url=base_url) | ||||
| self.model_name = model_name | self.model_name = model_name | ||||
| class LeptonAIChat(Base): | class LeptonAIChat(Base): | ||||
| def __init__(self, key, model_name, base_url=None): | def __init__(self, key, model_name, base_url=None): | ||||
| if not base_url: | if not base_url: | ||||
| base_url = os.path.join("https://" + model_name + ".lepton.run", "api", "v1") | |||||
| base_url = urljoin("https://" + model_name + ".lepton.run", "api/v1") | |||||
| super().__init__(key, model_name, base_url) | super().__init__(key, model_name, base_url) | ||||
| def __init__(self, key=None, model_name="", base_url=""): | def __init__(self, key=None, model_name="", base_url=""): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("Local llm url cannot be None") | raise ValueError("Local llm url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| super().__init__(key, model_name, base_url) | super().__init__(key, model_name, base_url) |
| import os | import os | ||||
| from abc import ABC | from abc import ABC | ||||
| from io import BytesIO | from io import BytesIO | ||||
| from urllib.parse import urljoin | |||||
| import requests | import requests | ||||
| from ollama import Client | from ollama import Client | ||||
| def __init__(self, key, model_name, base_url, lang="Chinese"): | def __init__(self, key, model_name, base_url, lang="Chinese"): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("Local cv model url cannot be None") | raise ValueError("Local cv model url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| self.client = OpenAI(api_key="empty", base_url=base_url) | self.client = OpenAI(api_key="empty", base_url=base_url) | ||||
| self.model_name = model_name.split("___")[0] | self.model_name = model_name.split("___")[0] | ||||
| self.lang = lang | self.lang = lang | ||||
| class XinferenceCV(Base): | class XinferenceCV(Base): | ||||
| def __init__(self, key, model_name="", lang="Chinese", base_url=""): | def __init__(self, key, model_name="", lang="Chinese", base_url=""): | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| self.client = OpenAI(api_key=key, base_url=base_url) | self.client = OpenAI(api_key=key, base_url=base_url) | ||||
| self.model_name = model_name | self.model_name = model_name | ||||
| self.lang = lang | self.lang = lang | ||||
| self.lang = lang | self.lang = lang | ||||
| factory, llm_name = model_name.split("/") | factory, llm_name = model_name.split("/") | ||||
| if factory != "liuhaotian": | if factory != "liuhaotian": | ||||
| self.base_url = os.path.join(base_url, factory, llm_name) | |||||
| self.base_url = urljoin(base_url, f"{factory}/{llm_name}") | |||||
| else: | else: | ||||
| self.base_url = os.path.join( | |||||
| base_url, "community", llm_name.replace("-v1.6", "16") | |||||
| ) | |||||
| self.base_url = urljoin(f"{base_url}/community", llm_name.replace("-v1.6", "16")) | |||||
| self.key = key | self.key = key | ||||
| def describe(self, image): | def describe(self, image): | ||||
| def __init__(self, key, model_name, lang="Chinese", base_url=""): | def __init__(self, key, model_name, lang="Chinese", base_url=""): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("Local llm url cannot be None") | raise ValueError("Local llm url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| self.client = OpenAI(api_key="lm-studio", base_url=base_url) | self.client = OpenAI(api_key="lm-studio", base_url=base_url) | ||||
| self.model_name = model_name | self.model_name = model_name | ||||
| self.lang = lang | self.lang = lang | ||||
| def __init__(self, key, model_name, lang="Chinese", base_url=""): | def __init__(self, key, model_name, lang="Chinese", base_url=""): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("url cannot be None") | raise ValueError("url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| self.client = OpenAI(api_key=key, base_url=base_url) | self.client = OpenAI(api_key=key, base_url=base_url) | ||||
| self.model_name = model_name.split("___")[0] | self.model_name = model_name.split("___")[0] | ||||
| self.lang = lang | self.lang = lang | ||||
| def __init__(self, key, model_name, lang="Chinese", base_url=""): | def __init__(self, key, model_name, lang="Chinese", base_url=""): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("Local llm url cannot be None") | raise ValueError("Local llm url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| self.client = OpenAI(api_key=key, base_url=base_url) | self.client = OpenAI(api_key=key, base_url=base_url) | ||||
| self.model_name = model_name | self.model_name = model_name | ||||
| self.lang = lang | self.lang = lang |
| import logging | import logging | ||||
| import re | import re | ||||
| import threading | import threading | ||||
| from urllib.parse import urljoin | |||||
| import requests | import requests | ||||
| from huggingface_hub import snapshot_download | from huggingface_hub import snapshot_download | ||||
| from zhipuai import ZhipuAI | from zhipuai import ZhipuAI | ||||
| def __init__(self, key, model_name, base_url): | def __init__(self, key, model_name, base_url): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("Local embedding model url cannot be None") | raise ValueError("Local embedding model url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| self.client = OpenAI(api_key="empty", base_url=base_url) | self.client = OpenAI(api_key="empty", base_url=base_url) | ||||
| self.model_name = model_name.split("___")[0] | self.model_name = model_name.split("___")[0] | ||||
| class XinferenceEmbed(Base): | class XinferenceEmbed(Base): | ||||
| def __init__(self, key, model_name="", base_url=""): | def __init__(self, key, model_name="", base_url=""): | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| self.client = OpenAI(api_key=key, base_url=base_url) | self.client = OpenAI(api_key=key, base_url=base_url) | ||||
| self.model_name = model_name | self.model_name = model_name | ||||
| def __init__(self, key, model_name, base_url): | def __init__(self, key, model_name, base_url): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("Local llm url cannot be None") | raise ValueError("Local llm url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| self.client = OpenAI(api_key="lm-studio", base_url=base_url) | self.client = OpenAI(api_key="lm-studio", base_url=base_url) | ||||
| self.model_name = model_name | self.model_name = model_name | ||||
| def __init__(self, key, model_name, base_url): | def __init__(self, key, model_name, base_url): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("url cannot be None") | raise ValueError("url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| self.client = OpenAI(api_key=key, base_url=base_url) | self.client = OpenAI(api_key=key, base_url=base_url) | ||||
| self.model_name = model_name.split("___")[0] | self.model_name = model_name.split("___")[0] | ||||
| def __init__(self, key, model_name, base_url): | def __init__(self, key, model_name, base_url): | ||||
| if not base_url: | if not base_url: | ||||
| raise ValueError("url cannot be None") | raise ValueError("url cannot be None") | ||||
| if base_url.split("/")[-1] != "v1": | |||||
| base_url = os.path.join(base_url, "v1") | |||||
| base_url = urljoin(base_url, "v1") | |||||
| self.client = OpenAI(api_key=key, base_url=base_url) | self.client = OpenAI(api_key=key, base_url=base_url) | ||||
| self.model_name = model_name | self.model_name = model_name |
| self.model_name = model_name | self.model_name = model_name | ||||
| if self.model_name == "nvidia/nv-rerankqa-mistral-4b-v3": | if self.model_name == "nvidia/nv-rerankqa-mistral-4b-v3": | ||||
| self.base_url = os.path.join( | |||||
| base_url, "nv-rerankqa-mistral-4b-v3", "reranking" | |||||
| self.base_url = urljoin(base_url, "nv-rerankqa-mistral-4b-v3/reranking" | |||||
| ) | ) | ||||
| if self.model_name == "nvidia/rerank-qa-mistral-4b": | if self.model_name == "nvidia/rerank-qa-mistral-4b": | ||||
| self.base_url = os.path.join(base_url, "reranking") | |||||
| self.base_url = urljoin(base_url, "reranking") | |||||
| self.model_name = "nv-rerank-qa-mistral-4b:1" | self.model_name = "nv-rerank-qa-mistral-4b:1" | ||||
| self.headers = { | self.headers = { |