From b271cc34b3e5fa24ca067d662b2b0ac4fb561c51 Mon Sep 17 00:00:00 2001 From: Kevin Hu Date: Fri, 25 Apr 2025 14:38:34 +0800 Subject: [PATCH] Fix: LLM generated tag issue. (#7301) ### What problem does this PR solve? #7298 ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --- rag/prompts.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/rag/prompts.py b/rag/prompts.py index d18157c09..13c0fce88 100644 --- a/rag/prompts.py +++ b/rag/prompts.py @@ -355,15 +355,22 @@ Output: raise Exception(kwd) try: - return json_repair.loads(kwd) + obj = json_repair.loads(kwd) except json_repair.JSONDecodeError: try: result = kwd.replace(prompt[:-1], "").replace("user", "").replace("model", "").strip() result = "{" + result.split("{")[1].split("}")[0] + "}" - return json_repair.loads(result) + obj = json_repair.loads(result) except Exception as e: logging.exception(f"JSON parsing error: {result} -> {e}") raise e + res = {} + for k, v in obj.items(): + try: + res[str(k)] = int(v) + except Exception: + pass + return res def vision_llm_describe_prompt(page=None) -> str: