From 210926cd915b01b27b532953d765d8bde1fff447 Mon Sep 17 00:00:00 2001 From: Xu Song Date: Mon, 20 Jan 2025 22:16:30 +0800 Subject: [PATCH] Fix suggested_question_prompt (#12738) --- api/core/llm_generator/prompts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/core/llm_generator/prompts.py b/api/core/llm_generator/prompts.py index 7c0f247052..f9411e9ec7 100644 --- a/api/core/llm_generator/prompts.py +++ b/api/core/llm_generator/prompts.py @@ -131,7 +131,7 @@ JAVASCRIPT_CODE_GENERATOR_PROMPT_TEMPLATE = ( SUGGESTED_QUESTIONS_AFTER_ANSWER_INSTRUCTION_PROMPT = ( "Please help me predict the three most likely questions that human would ask, " "and keeping each question under 20 characters.\n" - "MAKE SURE your output is the SAME language as the Assistant's latest response" + "MAKE SURE your output is the SAME language as the Assistant's latest response. " "The output must be an array in JSON format following the specified schema:\n" '["question1","question2","question3"]\n' )