From 2cad98f01fef396571da8c5d8ca83e699815038f Mon Sep 17 00:00:00 2001 From: "cooper.wu" Date: Mon, 26 May 2025 18:04:52 +0800 Subject: [PATCH] fix: #18132 when deepseek llm model, auto_generate name can't work (#18646) Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> --- api/core/llm_generator/llm_generator.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/api/core/llm_generator/llm_generator.py b/api/core/llm_generator/llm_generator.py index e5dbc30689..e01896a491 100644 --- a/api/core/llm_generator/llm_generator.py +++ b/api/core/llm_generator/llm_generator.py @@ -51,15 +51,19 @@ class LLMGenerator: response = cast( LLMResult, model_instance.invoke_llm( - prompt_messages=list(prompts), model_parameters={"max_tokens": 100, "temperature": 1}, stream=False + prompt_messages=list(prompts), model_parameters={"max_tokens": 500, "temperature": 1}, stream=False ), ) answer = cast(str, response.message.content) cleaned_answer = re.sub(r"^.*(\{.*\}).*$", r"\1", answer, flags=re.DOTALL) if cleaned_answer is None: return "" - result_dict = json.loads(cleaned_answer) - answer = result_dict["Your Output"] + try: + result_dict = json.loads(cleaned_answer) + answer = result_dict["Your Output"] + except json.JSONDecodeError as e: + logging.exception("Failed to generate name after answer, use query instead") + answer = query name = answer.strip() if len(name) > 75: