Non-Streaming Models Do Not Return Results Properly in _handle_invoke_result (#13571)

Co-authored-by: crazywoola <427733928@qq.com>
This commit is contained in:
Vasu Negi 2025-02-14 03:02:04 -06:00 committed by GitHub
parent 62079991b7
commit 8a0aa91ed7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -247,6 +247,24 @@ class LLMNode(BaseNode[LLMNodeData]):
def _handle_invoke_result(self, invoke_result: LLMResult | Generator) -> Generator[NodeEvent, None, None]:
if isinstance(invoke_result, LLMResult):
content = invoke_result.message.content
if content is None:
message_text = ""
elif isinstance(content, str):
message_text = content
elif isinstance(content, list):
# Assuming the list contains PromptMessageContent objects with a "data" attribute
message_text = "".join(
item.data if hasattr(item, "data") and isinstance(item.data, str) else str(item) for item in content
)
else:
message_text = str(content)
yield ModelInvokeCompletedEvent(
text=message_text,
usage=invoke_result.usage,
finish_reason=None,
)
return
model = None