fix add Bedrock llm error (#1952)

### What problem does this PR solve?

#1942  fix add Bedrock llm error

### Type of change

- [x] Bug Fix (non-breaking change which fixes an issue)

---------

Co-authored-by: Zhedong Cen <cenzhedong2@126.com>
This commit is contained in:
黄腾 2024-08-15 14:54:49 +08:00 committed by GitHub
parent ef8728a314
commit 6acc46bc7b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -678,6 +678,10 @@ class BedrockChat(Base):
if "top_p" in gen_conf:
gen_conf["topP"] = gen_conf["top_p"]
_ = gen_conf.pop("top_p")
for item in history:
if not isinstance(item["content"],list) and not isinstance(item["content"],tuple):
item["content"] = [{"text":item["content"]}]
try:
# Send the message to the model, using a basic inference configuration.
@ -707,7 +711,10 @@ class BedrockChat(Base):
if "top_p" in gen_conf:
gen_conf["topP"] = gen_conf["top_p"]
_ = gen_conf.pop("top_p")
for item in history:
if not isinstance(item["content"],list) and not isinstance(item["content"],tuple):
item["content"] = [{"text":item["content"]}]
if self.model_name.split('.')[0] == 'ai21':
try:
response = self.client.converse(