mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-12 22:59:02 +08:00
Fix: When calling the Create chat completion API, the response data… (#5928)
### What problem does this PR solve? ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) Co-authored-by: renqi <renqi08266@fxomail.com>
This commit is contained in:
parent
7bd5a52019
commit
ed11be23bf
@ -259,6 +259,7 @@ def chat_completion_openai_like(tenant_id, chat_id):
|
||||
# The choices field on the last chunk will always be an empty array [].
|
||||
def streamed_response_generator(chat_id, dia, msg):
|
||||
token_used = 0
|
||||
should_split_index = 0
|
||||
response = {
|
||||
"id": f"chatcmpl-{chat_id}",
|
||||
"choices": [
|
||||
@ -284,8 +285,19 @@ def chat_completion_openai_like(tenant_id, chat_id):
|
||||
try:
|
||||
for ans in chat(dia, msg, True):
|
||||
answer = ans["answer"]
|
||||
incremental = answer[token_used:]
|
||||
incremental = answer[should_split_index:]
|
||||
token_used += len(incremental)
|
||||
|
||||
"""
|
||||
bugfix: When calling the Create chat completion API, the response data is incoherent.
|
||||
bug code: token_used += len(incremental)
|
||||
fix author: 任奇
|
||||
"""
|
||||
if incremental.endswith("</think>"):
|
||||
response_data_len = len(incremental.rstrip("</think>"))
|
||||
else:
|
||||
response_data_len = len(incremental)
|
||||
should_split_index += response_data_len
|
||||
response["choices"][0]["delta"]["content"] = incremental
|
||||
yield f"data:{json.dumps(response, ensure_ascii=False)}\n\n"
|
||||
except Exception as e:
|
||||
|
Loading…
x
Reference in New Issue
Block a user