|
|
|
@@ -460,14 +460,15 @@ def verify_embedding_availability(embd_id: str, tenant_id: str) -> tuple[bool, R |
|
|
|
""" |
|
|
|
try: |
|
|
|
llm_name, llm_factory = TenantLLMService.split_model_name_and_factory(embd_id) |
|
|
|
if not LLMService.query(llm_name=llm_name, fid=llm_factory, model_type="embedding"): |
|
|
|
return False, get_error_argument_result(f"Unsupported model: <{embd_id}>") |
|
|
|
|
|
|
|
in_llm_service = bool(LLMService.query(llm_name=llm_name, fid=llm_factory, model_type="embedding")) |
|
|
|
# Tongyi-Qianwen is added to TenantLLM by default, but remains unusable with empty api_key |
|
|
|
tenant_llms = TenantLLMService.get_my_llms(tenant_id=tenant_id) |
|
|
|
is_tenant_model = any(llm["llm_name"] == llm_name and llm["llm_factory"] == llm_factory and llm["model_type"] == "embedding" for llm in tenant_llms) |
|
|
|
|
|
|
|
is_builtin_model = embd_id in settings.BUILTIN_EMBEDDING_MODELS |
|
|
|
if not ((is_builtin_model or is_tenant_model or in_llm_service)): |
|
|
|
return False, get_error_argument_result(f"Unsupported model: <{embd_id}>") |
|
|
|
|
|
|
|
if not (is_builtin_model or is_tenant_model): |
|
|
|
return False, get_error_argument_result(f"Unauthorized model: <{embd_id}>") |
|
|
|
except OperationalError as e: |