Fix: LLM generated tag issue. (#7301)

### What problem does this PR solve?
#7298

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
Kevin Hu 2025-04-25 14:38:34 +08:00 committed by GitHub
parent eead838353
commit b271cc34b3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -355,15 +355,22 @@ Output:
raise Exception(kwd)
try:
return json_repair.loads(kwd)
obj = json_repair.loads(kwd)
except json_repair.JSONDecodeError:
try:
result = kwd.replace(prompt[:-1], "").replace("user", "").replace("model", "").strip()
result = "{" + result.split("{")[1].split("}")[0] + "}"
return json_repair.loads(result)
obj = json_repair.loads(result)
except Exception as e:
logging.exception(f"JSON parsing error: {result} -> {e}")
raise e
res = {}
for k, v in obj.items():
try:
res[str(k)] = int(v)
except Exception:
pass
return res
def vision_llm_describe_prompt(page=None) -> str: