From f6dd2cd1af67b2e105fe34396a78f8d1b9c3edad Mon Sep 17 00:00:00 2001 From: Yongteng Lei Date: Tue, 4 Mar 2025 11:58:10 +0800 Subject: [PATCH] Fix: fix may lose part of information of last stream chunck (#5584) ### What problem does this PR solve? Fix may lose part of information of last stream chunck ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- api/db/services/dialog_service.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/api/db/services/dialog_service.py b/api/db/services/dialog_service.py index d77d19f20..338d67106 100644 --- a/api/db/services/dialog_service.py +++ b/api/db/services/dialog_service.py @@ -72,7 +72,7 @@ def chat_solo(dialog, messages, stream=True): if prompt_config.get("tts"): tts_mdl = LLMBundle(dialog.tenant_id, LLMType.TTS) msg = [{"role": m["role"], "content": re.sub(r"##\d+\$\$", "", m["content"])} - for m in messages if m["role"] != "system"] + for m in messages if m["role"] != "system"] if stream: last_ans = "" for ans in chat_mdl.chat_streamly(prompt_config.get("system", ""), msg, dialog.llm_setting): @@ -81,7 +81,9 @@ def chat_solo(dialog, messages, stream=True): if num_tokens_from_string(delta_ans) < 16: continue last_ans = answer - yield {"answer": answer, "reference": {}, "audio_binary": tts(tts_mdl, delta_ans), "prompt":"", "created_at": time.time()} + yield {"answer": answer, "reference": {}, "audio_binary": tts(tts_mdl, delta_ans), "prompt": "", "created_at": time.time()} + if delta_ans: + yield {"answer": answer, "reference": {}, "audio_binary": tts(tts_mdl, delta_ans), "prompt": "", "created_at": time.time()} else: answer = chat_mdl.chat(prompt_config.get("system", ""), msg, dialog.llm_setting) user_content = msg[-1].get("content", "[content not available]") @@ -518,5 +520,3 @@ def ask(question, kb_ids, tenant_id): answer = ans yield {"answer": answer, "reference": {}} yield decorate_answer(answer) - -