From 6d66e3f680b849cfb718e7dd73bdbd4916ce4194 Mon Sep 17 00:00:00 2001 From: Novice <857526207@qq.com> Date: Thu, 17 Apr 2025 10:41:56 +0800 Subject: [PATCH] fix(follow_ups): handle empty LLM responses in context (#18237) --- api/core/memory/token_buffer_memory.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/core/memory/token_buffer_memory.py b/api/core/memory/token_buffer_memory.py index 003a0c85b1..3c90dd22a2 100644 --- a/api/core/memory/token_buffer_memory.py +++ b/api/core/memory/token_buffer_memory.py @@ -44,6 +44,7 @@ class TokenBufferMemory: Message.created_at, Message.workflow_run_id, Message.parent_message_id, + Message.answer_tokens, ) .filter( Message.conversation_id == self.conversation.id, @@ -63,7 +64,7 @@ class TokenBufferMemory: thread_messages = extract_thread_messages(messages) # for newly created message, its answer is temporarily empty, we don't need to add it to memory - if thread_messages and not thread_messages[0].answer: + if thread_messages and not thread_messages[0].answer and thread_messages[0].answer_tokens == 0: thread_messages.pop(0) messages = list(reversed(thread_messages))