diff --git a/api/core/workflow/nodes/llm/llm_node.py b/api/core/workflow/nodes/llm/llm_node.py index 0f1c2113fd..0a8839c6f9 100644 --- a/api/core/workflow/nodes/llm/llm_node.py +++ b/api/core/workflow/nodes/llm/llm_node.py @@ -52,6 +52,7 @@ class ModelInvokeCompleted(BaseModel): """ text: str usage: LLMUsage + finish_reason: Optional[str] = None class LLMNode(BaseNode): diff --git a/api/core/workflow/nodes/question_classifier/question_classifier_node.py b/api/core/workflow/nodes/question_classifier/question_classifier_node.py index dfb1dab00a..ecab8db9b6 100644 --- a/api/core/workflow/nodes/question_classifier/question_classifier_node.py +++ b/api/core/workflow/nodes/question_classifier/question_classifier_node.py @@ -74,12 +74,12 @@ class QuestionClassifierNode(LLMNode): result_text = '' usage = LLMUsage.empty_usage() - finished_reason = None + finish_reason = None for event in generator: if isinstance(event, ModelInvokeCompleted): result_text = event.text usage = event.usage - finished_reason = event.finished_reason + finish_reason = event.finish_reason break category_name = node_data.classes[0].name