Browse Source

Fix: fix may lose part of information of last stream chunck (#5584)

### What problem does this PR solve?

 Fix may lose part of information of last stream chunck

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
tags/v0.17.1
Yongteng Lei 8 months ago
parent
commit
f6dd2cd1af
No account linked to committer's email address
1 changed files with 4 additions and 4 deletions
  1. 4
    4
      api/db/services/dialog_service.py

+ 4
- 4
api/db/services/dialog_service.py View File

@@ -72,7 +72,7 @@ def chat_solo(dialog, messages, stream=True):
if prompt_config.get("tts"):
tts_mdl = LLMBundle(dialog.tenant_id, LLMType.TTS)
msg = [{"role": m["role"], "content": re.sub(r"##\d+\$\$", "", m["content"])}
for m in messages if m["role"] != "system"]
for m in messages if m["role"] != "system"]
if stream:
last_ans = ""
for ans in chat_mdl.chat_streamly(prompt_config.get("system", ""), msg, dialog.llm_setting):
@@ -81,7 +81,9 @@ def chat_solo(dialog, messages, stream=True):
if num_tokens_from_string(delta_ans) < 16:
continue
last_ans = answer
yield {"answer": answer, "reference": {}, "audio_binary": tts(tts_mdl, delta_ans), "prompt":"", "created_at": time.time()}
yield {"answer": answer, "reference": {}, "audio_binary": tts(tts_mdl, delta_ans), "prompt": "", "created_at": time.time()}
if delta_ans:
yield {"answer": answer, "reference": {}, "audio_binary": tts(tts_mdl, delta_ans), "prompt": "", "created_at": time.time()}
else:
answer = chat_mdl.chat(prompt_config.get("system", ""), msg, dialog.llm_setting)
user_content = msg[-1].get("content", "[content not available]")
@@ -518,5 +520,3 @@ def ask(question, kb_ids, tenant_id):
answer = ans
yield {"answer": answer, "reference": {}}
yield decorate_answer(answer)



Loading…
Cancel
Save