Browse Source

Fix: user_default_llm configuration doesn't work for OpenAI API compatible LLM factory (#8502)

### What problem does this PR solve?

https://github.com/infiniflow/ragflow/issues/8467
when add llm the llm_name will like "llm1___OpenAI-API"
f09ca8e795/api/apps/llm_app.py (L173)
so we should not use llm1 to query


### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
tags/v0.20.0
Stephen Hu 4 months ago
parent
commit
938d8dd878
No account linked to committer's email address
1 changed files with 12 additions and 0 deletions
  1. 12
    0
      api/db/services/llm_service.py

+ 12
- 0
api/db/services/llm_service.py View File

@@ -45,6 +45,18 @@ class TenantLLMService(CommonService):
objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm)
else:
objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm, llm_factory=fid)

if (not objs) and fid:
if fid == "LocalAI":
mdlnm += "___LocalAI"
elif fid == "HuggingFace":
mdlnm += "___HuggingFace"
elif fid == "OpenAI-API-Compatible":
mdlnm += "___OpenAI-API"
elif fid == "VLLM":
mdlnm += "___VLLM"
objs = cls.query(tenant_id=tenant_id, llm_name=mdlnm, llm_factory=fid)
if not objs:
return
return objs[0]

Loading…
Cancel
Save