Browse Source

Update api_utils.py (#8069)

### What problem does this PR solve?


https://github.com/infiniflow/ragflow/issues/8059#issuecomment-2942407486
lazy throw exception to better support custom embedding model

### Type of change
- [x] Bug Fix (non-breaking change which fixes an issue)
tags/v0.19.1
Stephen Hu 4 months ago
parent
commit
f819378fb0
No account linked to committer's email address
1 changed files with 4 additions and 3 deletions
  1. 4
    3
      api/utils/api_utils.py

+ 4
- 3
api/utils/api_utils.py View File

@@ -460,14 +460,15 @@ def verify_embedding_availability(embd_id: str, tenant_id: str) -> tuple[bool, R
"""
try:
llm_name, llm_factory = TenantLLMService.split_model_name_and_factory(embd_id)
if not LLMService.query(llm_name=llm_name, fid=llm_factory, model_type="embedding"):
return False, get_error_argument_result(f"Unsupported model: <{embd_id}>")

in_llm_service = bool(LLMService.query(llm_name=llm_name, fid=llm_factory, model_type="embedding"))
# Tongyi-Qianwen is added to TenantLLM by default, but remains unusable with empty api_key
tenant_llms = TenantLLMService.get_my_llms(tenant_id=tenant_id)
is_tenant_model = any(llm["llm_name"] == llm_name and llm["llm_factory"] == llm_factory and llm["model_type"] == "embedding" for llm in tenant_llms)

is_builtin_model = embd_id in settings.BUILTIN_EMBEDDING_MODELS
if not ((is_builtin_model or is_tenant_model or in_llm_service)):
return False, get_error_argument_result(f"Unsupported model: <{embd_id}>")

if not (is_builtin_model or is_tenant_model):
return False, get_error_argument_result(f"Unauthorized model: <{embd_id}>")
except OperationalError as e:

Loading…
Cancel
Save