fix: next suggest question logic problem (#6451)

Co-authored-by: evenyan <yikun.yan@ubtrobot.com>
This commit is contained in:
Even 2024-07-19 20:26:11 +08:00 committed by GitHub
parent 48f872a68c
commit c013086e64
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 2 additions and 1 deletions

View File

@ -64,6 +64,7 @@ User Input:
SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT = (
"Please help me predict the three most likely questions that human would ask, "
"and keeping each question under 20 characters.\n"
"MAKE SURE your output is the SAME language as the Assistant's latest response(if the main response is written in Chinese, then the language of your output must be using Chinese.)!\n"
"The output must be an array in JSON format following the specified schema:\n"
"[\"question1\",\"question2\",\"question3\"]\n"
)

View File

@ -103,7 +103,7 @@ class TokenBufferMemory:
if curr_message_tokens > max_token_limit:
pruned_memory = []
while curr_message_tokens > max_token_limit and prompt_messages:
while curr_message_tokens > max_token_limit and len(prompt_messages)>1:
pruned_memory.append(prompt_messages.pop(0))
curr_message_tokens = self.model_instance.get_llm_num_tokens(
prompt_messages