From 3571270191a66c9fab0714cd355ee87d9508fea6 Mon Sep 17 00:00:00 2001 From: Kevin Hu Date: Wed, 12 Mar 2025 19:40:54 +0800 Subject: [PATCH] Refa: refine the context window size warning. (#5993) ### What problem does this PR solve? ### Type of change - [x] Refactoring --- rag/llm/chat_model.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rag/llm/chat_model.py b/rag/llm/chat_model.py index ce6457bf7..362433998 100644 --- a/rag/llm/chat_model.py +++ b/rag/llm/chat_model.py @@ -29,8 +29,8 @@ import json import requests import asyncio -LENGTH_NOTIFICATION_CN = "······\n由于长度的原因,回答被截断了,要继续吗?" -LENGTH_NOTIFICATION_EN = "...\nFor the content length reason, it stopped, continue?" +LENGTH_NOTIFICATION_CN = "······\n由于大模型的上下文窗口大小限制,回答已经被大模型截断。" +LENGTH_NOTIFICATION_EN = "...\nThe answer is truncated by your chosen LLM due to its context length limitation." class Base(ABC):