Selaa lähdekoodia

Fix: self-deployed LLM error, (#9217)

### What problem does this PR solve?

Close #9197
Close #9145

### Type of change

- [x] Refactoring
- [x] Bug fixing.
tags/v0.20.1
Kevin Hu 3 kuukautta sitten
vanhempi
commit
6ec3f18e22
No account linked to committer's email address

+ 4
- 2
api/apps/sdk/dify_retrieval.py Näytä tiedosto

#
#
# Copyright 2024 The InfiniFlow Authors. All Rights Reserved. # Copyright 2024 The InfiniFlow Authors. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); # Licensed under the Apache License, Version 2.0 (the "License");
for c in ranks["chunks"]: for c in ranks["chunks"]:
e, doc = DocumentService.get_by_id( c["doc_id"]) e, doc = DocumentService.get_by_id( c["doc_id"])
c.pop("vector", None) c.pop("vector", None)
meta = getattr(doc, 'meta_fields', {})
meta["doc_id"] = c["doc_id"]
records.append({ records.append({
"content": c["content_with_weight"], "content": c["content_with_weight"],
"score": c["similarity"], "score": c["similarity"],
"title": c["docnm_kwd"], "title": c["docnm_kwd"],
"metadata": getattr(doc, 'meta_fields', {})
"metadata": meta
}) })


return jsonify({"records": records}) return jsonify({"records": records})

+ 2
- 1
api/db/services/canvas_service.py Näytä tiedosto

conv.message.append({"role": "assistant", "content": txt, "created_at": time.time(), "id": message_id}) conv.message.append({"role": "assistant", "content": txt, "created_at": time.time(), "id": message_id})
conv.reference = canvas.get_reference() conv.reference = canvas.get_reference()
conv.errors = canvas.error conv.errors = canvas.error
API4ConversationService.append_message(conv.id, conv.to_dict())
conv = conv.to_dict()
API4ConversationService.append_message(conv["id"], conv)




def completionOpenAI(tenant_id, agent_id, question, session_id=None, stream=True, **kwargs): def completionOpenAI(tenant_id, agent_id, question, session_id=None, stream=True, **kwargs):

+ 3
- 0
api/db/services/llm_service.py Näytä tiedosto

if llm_id == llm["llm_name"]: if llm_id == llm["llm_name"]:
return llm["model_type"].split(",")[-1] return llm["model_type"].split(",")[-1]


for llm in LLMService.query(llm_name=llm_id):
return llm.model_type



class LLMBundle: class LLMBundle:
def __init__(self, tenant_id, llm_type, llm_name=None, lang="Chinese", **kwargs): def __init__(self, tenant_id, llm_type, llm_name=None, lang="Chinese", **kwargs):

+ 5
- 5
rag/utils/s3_conn.py Näytä tiedosto



@use_prefix_path @use_prefix_path
@use_default_bucket @use_default_bucket
def put(self, bucket, fnm, binary):
def put(self, bucket, fnm, binary, **kwargs):
logging.debug(f"bucket name {bucket}; filename :{fnm}:") logging.debug(f"bucket name {bucket}; filename :{fnm}:")
for _ in range(1): for _ in range(1):
try: try:


@use_prefix_path @use_prefix_path
@use_default_bucket @use_default_bucket
def rm(self, bucket, fnm):
def rm(self, bucket, fnm, **kwargs):
try: try:
self.conn.delete_object(Bucket=bucket, Key=fnm) self.conn.delete_object(Bucket=bucket, Key=fnm)
except Exception: except Exception:


@use_prefix_path @use_prefix_path
@use_default_bucket @use_default_bucket
def get(self, bucket, fnm):
def get(self, bucket, fnm, **kwargs):
for _ in range(1): for _ in range(1):
try: try:
r = self.conn.get_object(Bucket=bucket, Key=fnm) r = self.conn.get_object(Bucket=bucket, Key=fnm)


@use_prefix_path @use_prefix_path
@use_default_bucket @use_default_bucket
def obj_exist(self, bucket, fnm):
def obj_exist(self, bucket, fnm, **kwargs):
try: try:
if self.conn.head_object(Bucket=bucket, Key=fnm): if self.conn.head_object(Bucket=bucket, Key=fnm):
return True return True


@use_prefix_path @use_prefix_path
@use_default_bucket @use_default_bucket
def get_presigned_url(self, bucket, fnm, expires):
def get_presigned_url(self, bucket, fnm, expires, **kwargs):
for _ in range(10): for _ in range(10):
try: try:
r = self.conn.generate_presigned_url('get_object', r = self.conn.generate_presigned_url('get_object',

Loading…
Peruuta
Tallenna