From 6acc46bc7b51a8b31d010f891dacef54091d396f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E9=BB=84=E8=85=BE?= <101850389+hangters@users.noreply.github.com> Date: Thu, 15 Aug 2024 14:54:49 +0800 Subject: [PATCH] fix add Bedrock llm error (#1952) ### What problem does this PR solve? #1942 fix add Bedrock llm error ### Type of change - [x] Bug Fix (non-breaking change which fixes an issue) --------- Co-authored-by: Zhedong Cen --- rag/llm/chat_model.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/rag/llm/chat_model.py b/rag/llm/chat_model.py index f5cdf5d25..6a56026b6 100644 --- a/rag/llm/chat_model.py +++ b/rag/llm/chat_model.py @@ -678,6 +678,10 @@ class BedrockChat(Base): if "top_p" in gen_conf: gen_conf["topP"] = gen_conf["top_p"] _ = gen_conf.pop("top_p") + for item in history: + if not isinstance(item["content"],list) and not isinstance(item["content"],tuple): + item["content"] = [{"text":item["content"]}] + try: # Send the message to the model, using a basic inference configuration. @@ -707,7 +711,10 @@ class BedrockChat(Base): if "top_p" in gen_conf: gen_conf["topP"] = gen_conf["top_p"] _ = gen_conf.pop("top_p") - + for item in history: + if not isinstance(item["content"],list) and not isinstance(item["content"],tuple): + item["content"] = [{"text":item["content"]}] + if self.model_name.split('.')[0] == 'ai21': try: response = self.client.converse(