From 632b23486f8cb2004cccc06b6be5e1ebf752b383 Mon Sep 17 00:00:00 2001 From: shijiefengjun <35908859+shijiefengjun@users.noreply.github.com> Date: Wed, 13 Nov 2024 16:13:52 +0800 Subject: [PATCH] Fix the value issue of anthropic (#3351) ### What problem does this PR solve? This pull request fixes the issue mentioned in https://github.com/infiniflow/ragflow/issues/3263. 1. response should be parsed as dict, prevent the following code from failing to take values: ans = response["content"][0]["text"] 2. API Model ```claude-instant-1.2``` has retired (by [model-deprecations](https://docs.anthropic.com/en/docs/resources/model-deprecations)), it will trigger errors in the code, so I deleted it from the conf/llm_factories.json file and updated the latest API Model ```claude-3-5-sonnet-20241022``` ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --------- Co-authored-by: chenhaodong Co-authored-by: Kevin Hu --- conf/llm_factories.json | 4 ++-- rag/llm/chat_model.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/conf/llm_factories.json b/conf/llm_factories.json index a6f69dc3f..1ce9adb44 100644 --- a/conf/llm_factories.json +++ b/conf/llm_factories.json @@ -2371,8 +2371,8 @@ "model_type": "chat" }, { - "llm_name": "claude-instant-1.2", - "tags": "LLM,CHAT,100k", + "llm_name": "claude-3-5-sonnet-20241022", + "tags": "LLM,CHAT,200k", "max_tokens": 102400, "model_type": "chat" } diff --git a/rag/llm/chat_model.py b/rag/llm/chat_model.py index 9060fd450..716d020ae 100644 --- a/rag/llm/chat_model.py +++ b/rag/llm/chat_model.py @@ -1260,7 +1260,7 @@ class AnthropicChat(Base): system=self.system, stream=False, **gen_conf, - ).json() + ).to_dict() ans = response["content"][0]["text"] if response["stop_reason"] == "max_tokens": ans += (