mirror of
https://git.mirrors.martin98.com/https://github.com/infiniflow/ragflow.git
synced 2025-08-12 20:59:00 +08:00
Fix/bedrock issues (#2718)
### What problem does this PR solve? Adding a Bedrock API key for Claude Sonnet was broken. I find the issue came up when trying to test the LLM configuration, the system is a required parameter in boto3. As well, there were problems in Bedrock implementation for embeddings when trying to encode queries. ### Type of change - [X] Bug Fix (non-breaking change which fixes an issue)
This commit is contained in:
parent
abe9995a7c
commit
34761fa4ca
@ -630,7 +630,7 @@ class BedrockChat(Base):
|
||||
modelId=self.model_name,
|
||||
messages=history,
|
||||
inferenceConfig=gen_conf,
|
||||
system=[{"text": system}] if system else None,
|
||||
system=[{"text": (system if system else "Answer the user's message.")}] ,
|
||||
)
|
||||
|
||||
# Extract and print the response text.
|
||||
@ -675,7 +675,8 @@ class BedrockChat(Base):
|
||||
streaming_response = self.client.converse_stream(
|
||||
modelId=self.model_name,
|
||||
messages=history,
|
||||
inferenceConfig=gen_conf
|
||||
inferenceConfig=gen_conf,
|
||||
system=[{"text": system if system else ""}],
|
||||
)
|
||||
|
||||
# Extract and print the streamed response text in real-time.
|
||||
|
@ -443,7 +443,7 @@ class BedrockEmbed(Base):
|
||||
|
||||
response = self.client.invoke_model(modelId=self.model_name, body=json.dumps(body))
|
||||
model_response = json.loads(response["body"].read())
|
||||
embeddings.extend([model_response["embedding"]])
|
||||
embeddings.extend(model_response["embedding"])
|
||||
|
||||
return np.array(embeddings), token_count
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user