diff --git a/api/.env.example b/api/.env.example index 9dec564c28..e63aac1409 100644 --- a/api/.env.example +++ b/api/.env.example @@ -269,6 +269,7 @@ OPENSEARCH_PORT=9200 OPENSEARCH_USER=admin OPENSEARCH_PASSWORD=admin OPENSEARCH_SECURE=true +OPENSEARCH_VERIFY_CERTS=true # Baidu configuration BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287 diff --git a/api/configs/middleware/vdb/opensearch_config.py b/api/configs/middleware/vdb/opensearch_config.py index 96f478e9a6..9fd9b60194 100644 --- a/api/configs/middleware/vdb/opensearch_config.py +++ b/api/configs/middleware/vdb/opensearch_config.py @@ -33,6 +33,11 @@ class OpenSearchConfig(BaseSettings): default=False, ) + OPENSEARCH_VERIFY_CERTS: bool = Field( + description="Whether to verify SSL certificates for HTTPS connections (recommended to set True in production)", + default=True, + ) + OPENSEARCH_AUTH_METHOD: AuthMethod = Field( description="Authentication method for OpenSearch connection (default is 'basic')", default=AuthMethod.BASIC, diff --git a/api/controllers/console/auth/login.py b/api/controllers/console/auth/login.py index 86231bf616..5f2a24322d 100644 --- a/api/controllers/console/auth/login.py +++ b/api/controllers/console/auth/login.py @@ -202,18 +202,18 @@ class EmailCodeLoginApi(Resource): except AccountRegisterError as are: raise AccountInFreezeError() if account: - tenant = TenantService.get_join_tenants(account) - if not tenant: + tenants = TenantService.get_join_tenants(account) + if not tenants: workspaces = FeatureService.get_system_features().license.workspaces if not workspaces.is_available(): raise WorkspacesLimitExceeded() if not FeatureService.get_system_features().is_allow_create_workspace: raise NotAllowedCreateWorkspace() else: - tenant = TenantService.create_tenant(f"{account.name}'s Workspace") - TenantService.create_tenant_member(tenant, account, role="owner") - account.current_tenant = tenant - tenant_was_created.send(tenant) + new_tenant = TenantService.create_tenant(f"{account.name}'s Workspace") + TenantService.create_tenant_member(new_tenant, account, role="owner") + account.current_tenant = new_tenant + tenant_was_created.send(new_tenant) if account is None: try: diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index f5284cc43b..395367c9e2 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -148,15 +148,15 @@ def _generate_account(provider: str, user_info: OAuthUserInfo): account = _get_account_by_openid_or_email(provider, user_info) if account: - tenant = TenantService.get_join_tenants(account) - if not tenant: + tenants = TenantService.get_join_tenants(account) + if not tenants: if not FeatureService.get_system_features().is_allow_create_workspace: raise WorkSpaceNotAllowedCreateError() else: - tenant = TenantService.create_tenant(f"{account.name}'s Workspace") - TenantService.create_tenant_member(tenant, account, role="owner") - account.current_tenant = tenant - tenant_was_created.send(tenant) + new_tenant = TenantService.create_tenant(f"{account.name}'s Workspace") + TenantService.create_tenant_member(new_tenant, account, role="owner") + account.current_tenant = new_tenant + tenant_was_created.send(new_tenant) if not account: if not FeatureService.get_system_features().is_allow_register: diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 981619b0cb..e68273afa6 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -540,9 +540,22 @@ class DatasetIndexingStatusApi(Resource): .filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment") .count() ) - document.completed_segments = completed_segments - document.total_segments = total_segments - documents_status.append(marshal(document, document_status_fields)) + # Create a dictionary with document attributes and additional fields + document_dict = { + "id": document.id, + "indexing_status": document.indexing_status, + "processing_started_at": document.processing_started_at, + "parsing_completed_at": document.parsing_completed_at, + "cleaning_completed_at": document.cleaning_completed_at, + "splitting_completed_at": document.splitting_completed_at, + "completed_at": document.completed_at, + "paused_at": document.paused_at, + "error": document.error, + "stopped_at": document.stopped_at, + "completed_segments": completed_segments, + "total_segments": total_segments, + } + documents_status.append(marshal(document_dict, document_status_fields)) data = {"data": documents_status} return data diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index ca18c25e74..f7c04102a9 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -583,11 +583,22 @@ class DocumentBatchIndexingStatusApi(DocumentResource): .filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment") .count() ) - document.completed_segments = completed_segments - document.total_segments = total_segments - if document.is_paused: - document.indexing_status = "paused" - documents_status.append(marshal(document, document_status_fields)) + # Create a dictionary with document attributes and additional fields + document_dict = { + "id": document.id, + "indexing_status": "paused" if document.is_paused else document.indexing_status, + "processing_started_at": document.processing_started_at, + "parsing_completed_at": document.parsing_completed_at, + "cleaning_completed_at": document.cleaning_completed_at, + "splitting_completed_at": document.splitting_completed_at, + "completed_at": document.completed_at, + "paused_at": document.paused_at, + "error": document.error, + "stopped_at": document.stopped_at, + "completed_segments": completed_segments, + "total_segments": total_segments, + } + documents_status.append(marshal(document_dict, document_status_fields)) data = {"data": documents_status} return data @@ -616,11 +627,22 @@ class DocumentIndexingStatusApi(DocumentResource): .count() ) - document.completed_segments = completed_segments - document.total_segments = total_segments - if document.is_paused: - document.indexing_status = "paused" - return marshal(document, document_status_fields) + # Create a dictionary with document attributes and additional fields + document_dict = { + "id": document.id, + "indexing_status": "paused" if document.is_paused else document.indexing_status, + "processing_started_at": document.processing_started_at, + "parsing_completed_at": document.parsing_completed_at, + "cleaning_completed_at": document.cleaning_completed_at, + "splitting_completed_at": document.splitting_completed_at, + "completed_at": document.completed_at, + "paused_at": document.paused_at, + "error": document.error, + "stopped_at": document.stopped_at, + "completed_segments": completed_segments, + "total_segments": total_segments, + } + return marshal(document_dict, document_status_fields) class DocumentDetailApi(DocumentResource): diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py index 34af80bca7..19999e7361 100644 --- a/api/controllers/console/workspace/workspace.py +++ b/api/controllers/console/workspace/workspace.py @@ -68,16 +68,24 @@ class TenantListApi(Resource): @account_initialization_required def get(self): tenants = TenantService.get_join_tenants(current_user) + tenant_dicts = [] for tenant in tenants: features = FeatureService.get_features(tenant.id) - if features.billing.enabled: - tenant.plan = features.billing.subscription.plan - else: - tenant.plan = "sandbox" - if tenant.id == current_user.current_tenant_id: - tenant.current = True # Set current=True for current tenant - return {"workspaces": marshal(tenants, tenants_fields)}, 200 + + # Create a dictionary with tenant attributes + tenant_dict = { + "id": tenant.id, + "name": tenant.name, + "status": tenant.status, + "created_at": tenant.created_at, + "plan": features.billing.subscription.plan if features.billing.enabled else "sandbox", + "current": tenant.id == current_user.current_tenant_id, + } + + tenant_dicts.append(tenant_dict) + + return {"workspaces": marshal(tenant_dicts, tenants_fields)}, 200 class WorkspaceListApi(Resource): diff --git a/api/controllers/files/upload.py b/api/controllers/files/upload.py index 6641632169..f1a15793c7 100644 --- a/api/controllers/files/upload.py +++ b/api/controllers/files/upload.py @@ -64,9 +64,24 @@ class PluginUploadFileApi(Resource): extension = guess_extension(tool_file.mimetype) or ".bin" preview_url = ToolFileManager.sign_file(tool_file_id=tool_file.id, extension=extension) - tool_file.mime_type = mimetype - tool_file.extension = extension - tool_file.preview_url = preview_url + + # Create a dictionary with all the necessary attributes + result = { + "id": tool_file.id, + "user_id": tool_file.user_id, + "tenant_id": tool_file.tenant_id, + "conversation_id": tool_file.conversation_id, + "file_key": tool_file.file_key, + "mimetype": tool_file.mimetype, + "original_url": tool_file.original_url, + "name": tool_file.name, + "size": tool_file.size, + "mime_type": mimetype, + "extension": extension, + "preview_url": preview_url, + } + + return result, 201 except services.errors.file.FileTooLargeError as file_too_large_error: raise FileTooLargeError(file_too_large_error.description) except services.errors.file.UnsupportedFileTypeError: diff --git a/api/controllers/service_api/dataset/document.py b/api/controllers/service_api/dataset/document.py index 44c75f40ef..418363ffbb 100644 --- a/api/controllers/service_api/dataset/document.py +++ b/api/controllers/service_api/dataset/document.py @@ -388,11 +388,22 @@ class DocumentIndexingStatusApi(DatasetApiResource): .filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment") .count() ) - document.completed_segments = completed_segments - document.total_segments = total_segments - if document.is_paused: - document.indexing_status = "paused" - documents_status.append(marshal(document, document_status_fields)) + # Create a dictionary with document attributes and additional fields + document_dict = { + "id": document.id, + "indexing_status": "paused" if document.is_paused else document.indexing_status, + "processing_started_at": document.processing_started_at, + "parsing_completed_at": document.parsing_completed_at, + "cleaning_completed_at": document.cleaning_completed_at, + "splitting_completed_at": document.splitting_completed_at, + "completed_at": document.completed_at, + "paused_at": document.paused_at, + "error": document.error, + "stopped_at": document.stopped_at, + "completed_segments": completed_segments, + "total_segments": total_segments, + } + documents_status.append(marshal(document_dict, document_status_fields)) data = {"data": documents_status} return data diff --git a/api/core/app/app_config/entities.py b/api/core/app/app_config/entities.py index 8ae52131f2..3f31b1c3d5 100644 --- a/api/core/app/app_config/entities.py +++ b/api/core/app/app_config/entities.py @@ -109,6 +109,7 @@ class VariableEntity(BaseModel): description: str = "" type: VariableEntityType required: bool = False + hide: bool = False max_length: Optional[int] = None options: Sequence[str] = Field(default_factory=list) allowed_file_types: Sequence[FileType] = Field(default_factory=list) diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index b74100bb19..4b021aa0b3 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -26,10 +26,13 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.ops.ops_trace_manager import TraceQueueManager from core.prompt.utils.get_thread_messages_length import get_thread_messages_length from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository +from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository +from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository from extensions.ext_database import db from factories import file_factory from models import Account, App, Conversation, EndUser, Message, Workflow, WorkflowNodeExecutionTriggeredFrom +from models.enums import WorkflowRunTriggeredFrom from services.conversation_service import ConversationService from services.errors.message import MessageNotExistsError @@ -159,8 +162,22 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers_lock.set(threading.Lock()) - # Create workflow node execution repository + # Create repositories + # + # Create session factory session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) + # Create workflow execution(aka workflow run) repository + if invoke_from == InvokeFrom.DEBUGGER: + workflow_triggered_from = WorkflowRunTriggeredFrom.DEBUGGING + else: + workflow_triggered_from = WorkflowRunTriggeredFrom.APP_RUN + workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository( + session_factory=session_factory, + user=user, + app_id=application_generate_entity.app_config.app_id, + triggered_from=workflow_triggered_from, + ) + # Create workflow node execution repository workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository( session_factory=session_factory, user=user, @@ -173,6 +190,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): user=user, invoke_from=invoke_from, application_generate_entity=application_generate_entity, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, conversation=conversation, stream=streaming, @@ -226,8 +244,18 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers_lock.set(threading.Lock()) - # Create workflow node execution repository + # Create repositories + # + # Create session factory session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) + # Create workflow execution(aka workflow run) repository + workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository( + session_factory=session_factory, + user=user, + app_id=application_generate_entity.app_config.app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + ) + # Create workflow node execution repository workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository( session_factory=session_factory, user=user, @@ -240,6 +268,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): user=user, invoke_from=InvokeFrom.DEBUGGER, application_generate_entity=application_generate_entity, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, conversation=None, stream=streaming, @@ -291,8 +320,18 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers_lock.set(threading.Lock()) - # Create workflow node execution repository + # Create repositories + # + # Create session factory session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) + # Create workflow execution(aka workflow run) repository + workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository( + session_factory=session_factory, + user=user, + app_id=application_generate_entity.app_config.app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + ) + # Create workflow node execution repository workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository( session_factory=session_factory, user=user, @@ -305,6 +344,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): user=user, invoke_from=InvokeFrom.DEBUGGER, application_generate_entity=application_generate_entity, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, conversation=None, stream=streaming, @@ -317,6 +357,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): user: Union[Account, EndUser], invoke_from: InvokeFrom, application_generate_entity: AdvancedChatAppGenerateEntity, + workflow_execution_repository: WorkflowExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository, conversation: Optional[Conversation] = None, stream: bool = True, @@ -381,6 +422,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): conversation=conversation, message=message, user=user, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, stream=stream, ) @@ -453,6 +495,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): conversation: Conversation, message: Message, user: Union[Account, EndUser], + workflow_execution_repository: WorkflowExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository, stream: bool = False, ) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]: @@ -476,9 +519,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): conversation=conversation, message=message, user=user, - stream=stream, dialogue_count=self._dialogue_count, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, + stream=stream, ) try: diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py index 735b2a9709..e43417668d 100644 --- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py +++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py @@ -10,6 +10,7 @@ from sqlalchemy.orm import Session from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom +from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter from core.app.entities.app_invoke_entities import ( AdvancedChatAppGenerateEntity, InvokeFrom, @@ -64,6 +65,7 @@ from core.ops.ops_trace_manager import TraceQueueManager from core.workflow.enums import SystemVariableKey from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState from core.workflow.nodes import NodeType +from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository from core.workflow.workflow_cycle_manager import WorkflowCycleManager from events.message_event import message_was_created @@ -94,6 +96,7 @@ class AdvancedChatAppGenerateTaskPipeline: user: Union[Account, EndUser], stream: bool, dialogue_count: int, + workflow_execution_repository: WorkflowExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository, ) -> None: self._base_task_pipeline = BasedGenerateTaskPipeline( @@ -125,9 +128,14 @@ class AdvancedChatAppGenerateTaskPipeline: SystemVariableKey.WORKFLOW_ID: workflow.id, SystemVariableKey.WORKFLOW_RUN_ID: application_generate_entity.workflow_run_id, }, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, ) + self._workflow_response_converter = WorkflowResponseConverter( + application_generate_entity=application_generate_entity, + ) + self._task_state = WorkflowTaskState() self._message_cycle_manager = MessageCycleManage( application_generate_entity=application_generate_entity, task_state=self._task_state @@ -294,21 +302,19 @@ class AdvancedChatAppGenerateTaskPipeline: with Session(db.engine, expire_on_commit=False) as session: # init workflow run - workflow_run = self._workflow_cycle_manager._handle_workflow_run_start( + workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start( session=session, workflow_id=self._workflow_id, - user_id=self._user_id, - created_by_role=self._created_by_role, ) - self._workflow_run_id = workflow_run.id + self._workflow_run_id = workflow_execution.id message = self._get_message(session=session) if not message: raise ValueError(f"Message not found: {self._message_id}") - message.workflow_run_id = workflow_run.id - workflow_start_resp = self._workflow_cycle_manager._workflow_start_to_stream_response( - session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run + message.workflow_run_id = workflow_execution.id + workflow_start_resp = self._workflow_response_converter.workflow_start_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution=workflow_execution, ) - session.commit() yield workflow_start_resp elif isinstance( @@ -319,13 +325,10 @@ class AdvancedChatAppGenerateTaskPipeline: raise ValueError("workflow run not initialized.") with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id + workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried( + workflow_execution_id=self._workflow_run_id, event=event ) - workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_retried( - workflow_run=workflow_run, event=event - ) - node_retry_resp = self._workflow_cycle_manager._workflow_node_retry_to_stream_response( + node_retry_resp = self._workflow_response_converter.workflow_node_retry_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, workflow_node_execution=workflow_node_execution, @@ -338,20 +341,15 @@ class AdvancedChatAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - workflow_node_execution = self._workflow_cycle_manager._handle_node_execution_start( - workflow_run=workflow_run, event=event - ) + workflow_node_execution = self._workflow_cycle_manager.handle_node_execution_start( + workflow_execution_id=self._workflow_run_id, event=event + ) - node_start_resp = self._workflow_cycle_manager._workflow_node_start_to_stream_response( - event=event, - task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, - ) - session.commit() + node_start_resp = self._workflow_response_converter.workflow_node_start_to_stream_response( + event=event, + task_id=self._application_generate_entity.task_id, + workflow_node_execution=workflow_node_execution, + ) if node_start_resp: yield node_start_resp @@ -359,15 +357,15 @@ class AdvancedChatAppGenerateTaskPipeline: # Record files if it's an answer node or end node if event.node_type in [NodeType.ANSWER, NodeType.END]: self._recorded_files.extend( - self._workflow_cycle_manager._fetch_files_from_node_outputs(event.outputs or {}) + self._workflow_response_converter.fetch_files_from_node_outputs(event.outputs or {}) ) with Session(db.engine, expire_on_commit=False) as session: - workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_success( + workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success( event=event ) - node_finish_resp = self._workflow_cycle_manager._workflow_node_finish_to_stream_response( + node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, workflow_node_execution=workflow_node_execution, @@ -383,11 +381,11 @@ class AdvancedChatAppGenerateTaskPipeline: | QueueNodeInLoopFailedEvent | QueueNodeExceptionEvent, ): - workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_failed( + workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_failed( event=event ) - node_finish_resp = self._workflow_cycle_manager._workflow_node_finish_to_stream_response( + node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, workflow_node_execution=workflow_node_execution, @@ -399,132 +397,92 @@ class AdvancedChatAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - parallel_start_resp = ( - self._workflow_cycle_manager._workflow_parallel_branch_start_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + parallel_start_resp = ( + self._workflow_response_converter.workflow_parallel_branch_start_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, ) + ) yield parallel_start_resp elif isinstance(event, QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent): if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - parallel_finish_resp = ( - self._workflow_cycle_manager._workflow_parallel_branch_finished_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + parallel_finish_resp = ( + self._workflow_response_converter.workflow_parallel_branch_finished_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, ) + ) yield parallel_finish_resp elif isinstance(event, QueueIterationStartEvent): if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - iter_start_resp = self._workflow_cycle_manager._workflow_iteration_start_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + iter_start_resp = self._workflow_response_converter.workflow_iteration_start_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield iter_start_resp elif isinstance(event, QueueIterationNextEvent): if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - iter_next_resp = self._workflow_cycle_manager._workflow_iteration_next_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + iter_next_resp = self._workflow_response_converter.workflow_iteration_next_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield iter_next_resp elif isinstance(event, QueueIterationCompletedEvent): if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - iter_finish_resp = self._workflow_cycle_manager._workflow_iteration_completed_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + iter_finish_resp = self._workflow_response_converter.workflow_iteration_completed_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield iter_finish_resp elif isinstance(event, QueueLoopStartEvent): if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - loop_start_resp = self._workflow_cycle_manager._workflow_loop_start_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + loop_start_resp = self._workflow_response_converter.workflow_loop_start_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield loop_start_resp elif isinstance(event, QueueLoopNextEvent): if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - loop_next_resp = self._workflow_cycle_manager._workflow_loop_next_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + loop_next_resp = self._workflow_response_converter.workflow_loop_next_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield loop_next_resp elif isinstance(event, QueueLoopCompletedEvent): if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - loop_finish_resp = self._workflow_cycle_manager._workflow_loop_completed_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + loop_finish_resp = self._workflow_response_converter.workflow_loop_completed_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield loop_finish_resp elif isinstance(event, QueueWorkflowSucceededEvent): @@ -535,10 +493,8 @@ class AdvancedChatAppGenerateTaskPipeline: raise ValueError("workflow run not initialized.") with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._handle_workflow_run_success( - session=session, + workflow_execution = self._workflow_cycle_manager.handle_workflow_run_success( workflow_run_id=self._workflow_run_id, - start_at=graph_runtime_state.start_at, total_tokens=graph_runtime_state.total_tokens, total_steps=graph_runtime_state.node_run_steps, outputs=event.outputs, @@ -546,10 +502,11 @@ class AdvancedChatAppGenerateTaskPipeline: trace_manager=trace_manager, ) - workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response( - session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + session=session, + task_id=self._application_generate_entity.task_id, + workflow_execution=workflow_execution, ) - session.commit() yield workflow_finish_resp self._base_task_pipeline._queue_manager.publish( @@ -562,10 +519,8 @@ class AdvancedChatAppGenerateTaskPipeline: raise ValueError("graph runtime state not initialized.") with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._handle_workflow_run_partial_success( - session=session, + workflow_execution = self._workflow_cycle_manager.handle_workflow_run_partial_success( workflow_run_id=self._workflow_run_id, - start_at=graph_runtime_state.start_at, total_tokens=graph_runtime_state.total_tokens, total_steps=graph_runtime_state.node_run_steps, outputs=event.outputs, @@ -573,10 +528,11 @@ class AdvancedChatAppGenerateTaskPipeline: conversation_id=None, trace_manager=trace_manager, ) - workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response( - session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + session=session, + task_id=self._application_generate_entity.task_id, + workflow_execution=workflow_execution, ) - session.commit() yield workflow_finish_resp self._base_task_pipeline._queue_manager.publish( @@ -589,26 +545,25 @@ class AdvancedChatAppGenerateTaskPipeline: raise ValueError("graph runtime state not initialized.") with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._handle_workflow_run_failed( - session=session, + workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed( workflow_run_id=self._workflow_run_id, - start_at=graph_runtime_state.start_at, total_tokens=graph_runtime_state.total_tokens, total_steps=graph_runtime_state.node_run_steps, status=WorkflowRunStatus.FAILED, - error=event.error, + error_message=event.error, conversation_id=self._conversation_id, trace_manager=trace_manager, exceptions_count=event.exceptions_count, ) - workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response( - session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + session=session, + task_id=self._application_generate_entity.task_id, + workflow_execution=workflow_execution, ) - err_event = QueueErrorEvent(error=ValueError(f"Run failed: {workflow_run.error}")) + err_event = QueueErrorEvent(error=ValueError(f"Run failed: {workflow_execution.error_message}")) err = self._base_task_pipeline._handle_error( event=err_event, session=session, message_id=self._message_id ) - session.commit() yield workflow_finish_resp yield self._base_task_pipeline._error_to_stream_response(err) @@ -616,21 +571,19 @@ class AdvancedChatAppGenerateTaskPipeline: elif isinstance(event, QueueStopEvent): if self._workflow_run_id and graph_runtime_state: with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._handle_workflow_run_failed( - session=session, + workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed( workflow_run_id=self._workflow_run_id, - start_at=graph_runtime_state.start_at, total_tokens=graph_runtime_state.total_tokens, total_steps=graph_runtime_state.node_run_steps, status=WorkflowRunStatus.STOPPED, - error=event.get_stop_reason(), + error_message=event.get_stop_reason(), conversation_id=self._conversation_id, trace_manager=trace_manager, ) - workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response( + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( session=session, task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, + workflow_execution=workflow_execution, ) # Save message self._save_message(session=session, graph_runtime_state=graph_runtime_state) @@ -711,7 +664,7 @@ class AdvancedChatAppGenerateTaskPipeline: yield self._message_end_to_stream_response() elif isinstance(event, QueueAgentLogEvent): - yield self._workflow_cycle_manager._handle_agent_log( + yield self._workflow_response_converter.handle_agent_log( task_id=self._application_generate_entity.task_id, event=event ) else: diff --git a/api/core/app/apps/common/__init__.py b/api/core/app/apps/common/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/core/app/apps/common/workflow_response_converter.py b/api/core/app/apps/common/workflow_response_converter.py new file mode 100644 index 0000000000..7669bf74bb --- /dev/null +++ b/api/core/app/apps/common/workflow_response_converter.py @@ -0,0 +1,564 @@ +import time +from collections.abc import Mapping, Sequence +from datetime import UTC, datetime +from typing import Any, Optional, Union, cast + +from sqlalchemy import select +from sqlalchemy.orm import Session + +from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity +from core.app.entities.queue_entities import ( + QueueAgentLogEvent, + QueueIterationCompletedEvent, + QueueIterationNextEvent, + QueueIterationStartEvent, + QueueLoopCompletedEvent, + QueueLoopNextEvent, + QueueLoopStartEvent, + QueueNodeExceptionEvent, + QueueNodeFailedEvent, + QueueNodeInIterationFailedEvent, + QueueNodeInLoopFailedEvent, + QueueNodeRetryEvent, + QueueNodeStartedEvent, + QueueNodeSucceededEvent, + QueueParallelBranchRunFailedEvent, + QueueParallelBranchRunStartedEvent, + QueueParallelBranchRunSucceededEvent, +) +from core.app.entities.task_entities import ( + AgentLogStreamResponse, + IterationNodeCompletedStreamResponse, + IterationNodeNextStreamResponse, + IterationNodeStartStreamResponse, + LoopNodeCompletedStreamResponse, + LoopNodeNextStreamResponse, + LoopNodeStartStreamResponse, + NodeFinishStreamResponse, + NodeRetryStreamResponse, + NodeStartStreamResponse, + ParallelBranchFinishedStreamResponse, + ParallelBranchStartStreamResponse, + WorkflowFinishStreamResponse, + WorkflowStartStreamResponse, +) +from core.file import FILE_MODEL_IDENTITY, File +from core.tools.tool_manager import ToolManager +from core.workflow.entities.node_execution_entities import NodeExecution +from core.workflow.entities.workflow_execution_entities import WorkflowExecution +from core.workflow.nodes import NodeType +from core.workflow.nodes.tool.entities import ToolNodeData +from models import ( + Account, + CreatorUserRole, + EndUser, + WorkflowNodeExecutionStatus, + WorkflowRun, +) + + +class WorkflowResponseConverter: + def __init__( + self, + *, + application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity], + ) -> None: + self._application_generate_entity = application_generate_entity + + def workflow_start_to_stream_response( + self, + *, + task_id: str, + workflow_execution: WorkflowExecution, + ) -> WorkflowStartStreamResponse: + return WorkflowStartStreamResponse( + task_id=task_id, + workflow_run_id=workflow_execution.id, + data=WorkflowStartStreamResponse.Data( + id=workflow_execution.id, + workflow_id=workflow_execution.workflow_id, + sequence_number=workflow_execution.sequence_number, + inputs=workflow_execution.inputs, + created_at=int(workflow_execution.started_at.timestamp()), + ), + ) + + def workflow_finish_to_stream_response( + self, + *, + session: Session, + task_id: str, + workflow_execution: WorkflowExecution, + ) -> WorkflowFinishStreamResponse: + created_by = None + workflow_run = session.scalar(select(WorkflowRun).where(WorkflowRun.id == workflow_execution.id)) + assert workflow_run is not None + if workflow_run.created_by_role == CreatorUserRole.ACCOUNT: + stmt = select(Account).where(Account.id == workflow_run.created_by) + account = session.scalar(stmt) + if account: + created_by = { + "id": account.id, + "name": account.name, + "email": account.email, + } + elif workflow_run.created_by_role == CreatorUserRole.END_USER: + stmt = select(EndUser).where(EndUser.id == workflow_run.created_by) + end_user = session.scalar(stmt) + if end_user: + created_by = { + "id": end_user.id, + "user": end_user.session_id, + } + else: + raise NotImplementedError(f"unknown created_by_role: {workflow_run.created_by_role}") + + # Handle the case where finished_at is None by using current time as default + finished_at_timestamp = ( + int(workflow_execution.finished_at.timestamp()) + if workflow_execution.finished_at + else int(datetime.now(UTC).timestamp()) + ) + + return WorkflowFinishStreamResponse( + task_id=task_id, + workflow_run_id=workflow_execution.id, + data=WorkflowFinishStreamResponse.Data( + id=workflow_execution.id, + workflow_id=workflow_execution.workflow_id, + sequence_number=workflow_execution.sequence_number, + status=workflow_execution.status, + outputs=workflow_execution.outputs, + error=workflow_execution.error_message, + elapsed_time=workflow_execution.elapsed_time, + total_tokens=workflow_execution.total_tokens, + total_steps=workflow_execution.total_steps, + created_by=created_by, + created_at=int(workflow_execution.started_at.timestamp()), + finished_at=finished_at_timestamp, + files=self.fetch_files_from_node_outputs(workflow_execution.outputs), + exceptions_count=workflow_execution.exceptions_count, + ), + ) + + def workflow_node_start_to_stream_response( + self, + *, + event: QueueNodeStartedEvent, + task_id: str, + workflow_node_execution: NodeExecution, + ) -> Optional[NodeStartStreamResponse]: + if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: + return None + if not workflow_node_execution.workflow_run_id: + return None + + response = NodeStartStreamResponse( + task_id=task_id, + workflow_run_id=workflow_node_execution.workflow_run_id, + data=NodeStartStreamResponse.Data( + id=workflow_node_execution.id, + node_id=workflow_node_execution.node_id, + node_type=workflow_node_execution.node_type, + title=workflow_node_execution.title, + index=workflow_node_execution.index, + predecessor_node_id=workflow_node_execution.predecessor_node_id, + inputs=workflow_node_execution.inputs, + created_at=int(workflow_node_execution.created_at.timestamp()), + parallel_id=event.parallel_id, + parallel_start_node_id=event.parallel_start_node_id, + parent_parallel_id=event.parent_parallel_id, + parent_parallel_start_node_id=event.parent_parallel_start_node_id, + iteration_id=event.in_iteration_id, + loop_id=event.in_loop_id, + parallel_run_id=event.parallel_mode_run_id, + agent_strategy=event.agent_strategy, + ), + ) + + # extras logic + if event.node_type == NodeType.TOOL: + node_data = cast(ToolNodeData, event.node_data) + response.data.extras["icon"] = ToolManager.get_tool_icon( + tenant_id=self._application_generate_entity.app_config.tenant_id, + provider_type=node_data.provider_type, + provider_id=node_data.provider_id, + ) + + return response + + def workflow_node_finish_to_stream_response( + self, + *, + event: QueueNodeSucceededEvent + | QueueNodeFailedEvent + | QueueNodeInIterationFailedEvent + | QueueNodeInLoopFailedEvent + | QueueNodeExceptionEvent, + task_id: str, + workflow_node_execution: NodeExecution, + ) -> Optional[NodeFinishStreamResponse]: + if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: + return None + if not workflow_node_execution.workflow_run_id: + return None + if not workflow_node_execution.finished_at: + return None + + return NodeFinishStreamResponse( + task_id=task_id, + workflow_run_id=workflow_node_execution.workflow_run_id, + data=NodeFinishStreamResponse.Data( + id=workflow_node_execution.id, + node_id=workflow_node_execution.node_id, + node_type=workflow_node_execution.node_type, + index=workflow_node_execution.index, + title=workflow_node_execution.title, + predecessor_node_id=workflow_node_execution.predecessor_node_id, + inputs=workflow_node_execution.inputs, + process_data=workflow_node_execution.process_data, + outputs=workflow_node_execution.outputs, + status=workflow_node_execution.status, + error=workflow_node_execution.error, + elapsed_time=workflow_node_execution.elapsed_time, + execution_metadata=workflow_node_execution.metadata, + created_at=int(workflow_node_execution.created_at.timestamp()), + finished_at=int(workflow_node_execution.finished_at.timestamp()), + files=self.fetch_files_from_node_outputs(workflow_node_execution.outputs or {}), + parallel_id=event.parallel_id, + parallel_start_node_id=event.parallel_start_node_id, + parent_parallel_id=event.parent_parallel_id, + parent_parallel_start_node_id=event.parent_parallel_start_node_id, + iteration_id=event.in_iteration_id, + loop_id=event.in_loop_id, + ), + ) + + def workflow_node_retry_to_stream_response( + self, + *, + event: QueueNodeRetryEvent, + task_id: str, + workflow_node_execution: NodeExecution, + ) -> Optional[Union[NodeRetryStreamResponse, NodeFinishStreamResponse]]: + if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: + return None + if not workflow_node_execution.workflow_run_id: + return None + if not workflow_node_execution.finished_at: + return None + + return NodeRetryStreamResponse( + task_id=task_id, + workflow_run_id=workflow_node_execution.workflow_run_id, + data=NodeRetryStreamResponse.Data( + id=workflow_node_execution.id, + node_id=workflow_node_execution.node_id, + node_type=workflow_node_execution.node_type, + index=workflow_node_execution.index, + title=workflow_node_execution.title, + predecessor_node_id=workflow_node_execution.predecessor_node_id, + inputs=workflow_node_execution.inputs, + process_data=workflow_node_execution.process_data, + outputs=workflow_node_execution.outputs, + status=workflow_node_execution.status, + error=workflow_node_execution.error, + elapsed_time=workflow_node_execution.elapsed_time, + execution_metadata=workflow_node_execution.metadata, + created_at=int(workflow_node_execution.created_at.timestamp()), + finished_at=int(workflow_node_execution.finished_at.timestamp()), + files=self.fetch_files_from_node_outputs(workflow_node_execution.outputs or {}), + parallel_id=event.parallel_id, + parallel_start_node_id=event.parallel_start_node_id, + parent_parallel_id=event.parent_parallel_id, + parent_parallel_start_node_id=event.parent_parallel_start_node_id, + iteration_id=event.in_iteration_id, + loop_id=event.in_loop_id, + retry_index=event.retry_index, + ), + ) + + def workflow_parallel_branch_start_to_stream_response( + self, + *, + task_id: str, + workflow_execution_id: str, + event: QueueParallelBranchRunStartedEvent, + ) -> ParallelBranchStartStreamResponse: + return ParallelBranchStartStreamResponse( + task_id=task_id, + workflow_run_id=workflow_execution_id, + data=ParallelBranchStartStreamResponse.Data( + parallel_id=event.parallel_id, + parallel_branch_id=event.parallel_start_node_id, + parent_parallel_id=event.parent_parallel_id, + parent_parallel_start_node_id=event.parent_parallel_start_node_id, + iteration_id=event.in_iteration_id, + loop_id=event.in_loop_id, + created_at=int(time.time()), + ), + ) + + def workflow_parallel_branch_finished_to_stream_response( + self, + *, + task_id: str, + workflow_execution_id: str, + event: QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent, + ) -> ParallelBranchFinishedStreamResponse: + return ParallelBranchFinishedStreamResponse( + task_id=task_id, + workflow_run_id=workflow_execution_id, + data=ParallelBranchFinishedStreamResponse.Data( + parallel_id=event.parallel_id, + parallel_branch_id=event.parallel_start_node_id, + parent_parallel_id=event.parent_parallel_id, + parent_parallel_start_node_id=event.parent_parallel_start_node_id, + iteration_id=event.in_iteration_id, + loop_id=event.in_loop_id, + status="succeeded" if isinstance(event, QueueParallelBranchRunSucceededEvent) else "failed", + error=event.error if isinstance(event, QueueParallelBranchRunFailedEvent) else None, + created_at=int(time.time()), + ), + ) + + def workflow_iteration_start_to_stream_response( + self, + *, + task_id: str, + workflow_execution_id: str, + event: QueueIterationStartEvent, + ) -> IterationNodeStartStreamResponse: + return IterationNodeStartStreamResponse( + task_id=task_id, + workflow_run_id=workflow_execution_id, + data=IterationNodeStartStreamResponse.Data( + id=event.node_id, + node_id=event.node_id, + node_type=event.node_type.value, + title=event.node_data.title, + created_at=int(time.time()), + extras={}, + inputs=event.inputs or {}, + metadata=event.metadata or {}, + parallel_id=event.parallel_id, + parallel_start_node_id=event.parallel_start_node_id, + ), + ) + + def workflow_iteration_next_to_stream_response( + self, + *, + task_id: str, + workflow_execution_id: str, + event: QueueIterationNextEvent, + ) -> IterationNodeNextStreamResponse: + return IterationNodeNextStreamResponse( + task_id=task_id, + workflow_run_id=workflow_execution_id, + data=IterationNodeNextStreamResponse.Data( + id=event.node_id, + node_id=event.node_id, + node_type=event.node_type.value, + title=event.node_data.title, + index=event.index, + pre_iteration_output=event.output, + created_at=int(time.time()), + extras={}, + parallel_id=event.parallel_id, + parallel_start_node_id=event.parallel_start_node_id, + parallel_mode_run_id=event.parallel_mode_run_id, + duration=event.duration, + ), + ) + + def workflow_iteration_completed_to_stream_response( + self, + *, + task_id: str, + workflow_execution_id: str, + event: QueueIterationCompletedEvent, + ) -> IterationNodeCompletedStreamResponse: + return IterationNodeCompletedStreamResponse( + task_id=task_id, + workflow_run_id=workflow_execution_id, + data=IterationNodeCompletedStreamResponse.Data( + id=event.node_id, + node_id=event.node_id, + node_type=event.node_type.value, + title=event.node_data.title, + outputs=event.outputs, + created_at=int(time.time()), + extras={}, + inputs=event.inputs or {}, + status=WorkflowNodeExecutionStatus.SUCCEEDED + if event.error is None + else WorkflowNodeExecutionStatus.FAILED, + error=None, + elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(), + total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0, + execution_metadata=event.metadata, + finished_at=int(time.time()), + steps=event.steps, + parallel_id=event.parallel_id, + parallel_start_node_id=event.parallel_start_node_id, + ), + ) + + def workflow_loop_start_to_stream_response( + self, *, task_id: str, workflow_execution_id: str, event: QueueLoopStartEvent + ) -> LoopNodeStartStreamResponse: + return LoopNodeStartStreamResponse( + task_id=task_id, + workflow_run_id=workflow_execution_id, + data=LoopNodeStartStreamResponse.Data( + id=event.node_id, + node_id=event.node_id, + node_type=event.node_type.value, + title=event.node_data.title, + created_at=int(time.time()), + extras={}, + inputs=event.inputs or {}, + metadata=event.metadata or {}, + parallel_id=event.parallel_id, + parallel_start_node_id=event.parallel_start_node_id, + ), + ) + + def workflow_loop_next_to_stream_response( + self, + *, + task_id: str, + workflow_execution_id: str, + event: QueueLoopNextEvent, + ) -> LoopNodeNextStreamResponse: + return LoopNodeNextStreamResponse( + task_id=task_id, + workflow_run_id=workflow_execution_id, + data=LoopNodeNextStreamResponse.Data( + id=event.node_id, + node_id=event.node_id, + node_type=event.node_type.value, + title=event.node_data.title, + index=event.index, + pre_loop_output=event.output, + created_at=int(time.time()), + extras={}, + parallel_id=event.parallel_id, + parallel_start_node_id=event.parallel_start_node_id, + parallel_mode_run_id=event.parallel_mode_run_id, + duration=event.duration, + ), + ) + + def workflow_loop_completed_to_stream_response( + self, + *, + task_id: str, + workflow_execution_id: str, + event: QueueLoopCompletedEvent, + ) -> LoopNodeCompletedStreamResponse: + return LoopNodeCompletedStreamResponse( + task_id=task_id, + workflow_run_id=workflow_execution_id, + data=LoopNodeCompletedStreamResponse.Data( + id=event.node_id, + node_id=event.node_id, + node_type=event.node_type.value, + title=event.node_data.title, + outputs=event.outputs, + created_at=int(time.time()), + extras={}, + inputs=event.inputs or {}, + status=WorkflowNodeExecutionStatus.SUCCEEDED + if event.error is None + else WorkflowNodeExecutionStatus.FAILED, + error=None, + elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(), + total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0, + execution_metadata=event.metadata, + finished_at=int(time.time()), + steps=event.steps, + parallel_id=event.parallel_id, + parallel_start_node_id=event.parallel_start_node_id, + ), + ) + + def fetch_files_from_node_outputs(self, outputs_dict: Mapping[str, Any] | None) -> Sequence[Mapping[str, Any]]: + """ + Fetch files from node outputs + :param outputs_dict: node outputs dict + :return: + """ + if not outputs_dict: + return [] + + files = [self._fetch_files_from_variable_value(output_value) for output_value in outputs_dict.values()] + # Remove None + files = [file for file in files if file] + # Flatten list + # Flatten the list of sequences into a single list of mappings + flattened_files = [file for sublist in files if sublist for file in sublist] + + # Convert to tuple to match Sequence type + return tuple(flattened_files) + + def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> Sequence[Mapping[str, Any]]: + """ + Fetch files from variable value + :param value: variable value + :return: + """ + if not value: + return [] + + files = [] + if isinstance(value, list): + for item in value: + file = self._get_file_var_from_value(item) + if file: + files.append(file) + elif isinstance(value, dict): + file = self._get_file_var_from_value(value) + if file: + files.append(file) + + return files + + def _get_file_var_from_value(self, value: Union[dict, list]) -> Mapping[str, Any] | None: + """ + Get file var from value + :param value: variable value + :return: + """ + if not value: + return None + + if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY: + return value + elif isinstance(value, File): + return value.to_dict() + + return None + + def handle_agent_log(self, task_id: str, event: QueueAgentLogEvent) -> AgentLogStreamResponse: + """ + Handle agent log + :param task_id: task id + :param event: agent log event + :return: + """ + return AgentLogStreamResponse( + task_id=task_id, + data=AgentLogStreamResponse.Data( + node_execution_id=event.node_execution_id, + id=event.id, + parent_id=event.parent_id, + label=event.label, + error=event.error, + status=event.status, + data=event.data, + metadata=event.metadata, + node_id=event.node_id, + ), + ) diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index d49ff682b9..01d35c57ce 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -18,16 +18,19 @@ from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager from core.app.apps.workflow.app_queue_manager import WorkflowAppQueueManager from core.app.apps.workflow.app_runner import WorkflowAppRunner from core.app.apps.workflow.generate_response_converter import WorkflowAppGenerateResponseConverter +from core.app.apps.workflow.generate_task_pipeline import WorkflowAppGenerateTaskPipeline from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.ops.ops_trace_manager import TraceQueueManager from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository +from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository +from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository -from core.workflow.workflow_app_generate_task_pipeline import WorkflowAppGenerateTaskPipeline from extensions.ext_database import db from factories import file_factory from models import Account, App, EndUser, Workflow, WorkflowNodeExecutionTriggeredFrom +from models.enums import WorkflowRunTriggeredFrom logger = logging.getLogger(__name__) @@ -136,9 +139,22 @@ class WorkflowAppGenerator(BaseAppGenerator): contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers_lock.set(threading.Lock()) - # Create workflow node execution repository + # Create repositories + # + # Create session factory session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) - + # Create workflow execution(aka workflow run) repository + if invoke_from == InvokeFrom.DEBUGGER: + workflow_triggered_from = WorkflowRunTriggeredFrom.DEBUGGING + else: + workflow_triggered_from = WorkflowRunTriggeredFrom.APP_RUN + workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository( + session_factory=session_factory, + user=user, + app_id=application_generate_entity.app_config.app_id, + triggered_from=workflow_triggered_from, + ) + # Create workflow node execution repository workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository( session_factory=session_factory, user=user, @@ -152,6 +168,7 @@ class WorkflowAppGenerator(BaseAppGenerator): user=user, application_generate_entity=application_generate_entity, invoke_from=invoke_from, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, workflow_thread_pool_id=workflow_thread_pool_id, @@ -165,6 +182,7 @@ class WorkflowAppGenerator(BaseAppGenerator): user: Union[Account, EndUser], application_generate_entity: WorkflowAppGenerateEntity, invoke_from: InvokeFrom, + workflow_execution_repository: WorkflowExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository, streaming: bool = True, workflow_thread_pool_id: Optional[str] = None, @@ -209,6 +227,7 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow=workflow, queue_manager=queue_manager, user=user, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, stream=streaming, ) @@ -262,6 +281,17 @@ class WorkflowAppGenerator(BaseAppGenerator): contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers_lock.set(threading.Lock()) + # Create repositories + # + # Create session factory + session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) + # Create workflow execution(aka workflow run) repository + workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository( + session_factory=session_factory, + user=user, + app_id=application_generate_entity.app_config.app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + ) # Create workflow node execution repository session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) @@ -278,6 +308,7 @@ class WorkflowAppGenerator(BaseAppGenerator): user=user, invoke_from=InvokeFrom.DEBUGGER, application_generate_entity=application_generate_entity, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, ) @@ -327,6 +358,17 @@ class WorkflowAppGenerator(BaseAppGenerator): contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers_lock.set(threading.Lock()) + # Create repositories + # + # Create session factory + session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) + # Create workflow execution(aka workflow run) repository + workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository( + session_factory=session_factory, + user=user, + app_id=application_generate_entity.app_config.app_id, + triggered_from=WorkflowRunTriggeredFrom.DEBUGGING, + ) # Create workflow node execution repository session_factory = sessionmaker(bind=db.engine, expire_on_commit=False) @@ -343,6 +385,7 @@ class WorkflowAppGenerator(BaseAppGenerator): user=user, invoke_from=InvokeFrom.DEBUGGER, application_generate_entity=application_generate_entity, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, streaming=streaming, ) @@ -400,6 +443,7 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow: Workflow, queue_manager: AppQueueManager, user: Union[Account, EndUser], + workflow_execution_repository: WorkflowExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository, stream: bool = False, ) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]: @@ -419,8 +463,9 @@ class WorkflowAppGenerator(BaseAppGenerator): workflow=workflow, queue_manager=queue_manager, user=user, - stream=stream, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, + stream=stream, ) try: diff --git a/api/core/workflow/workflow_app_generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py similarity index 72% rename from api/core/workflow/workflow_app_generate_task_pipeline.py rename to api/core/app/apps/workflow/generate_task_pipeline.py index 0396fa8157..0291f49cac 100644 --- a/api/core/workflow/workflow_app_generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -3,10 +3,12 @@ import time from collections.abc import Generator from typing import Optional, Union +from sqlalchemy import select from sqlalchemy.orm import Session from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME from core.app.apps.base_app_queue_manager import AppQueueManager +from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter from core.app.entities.app_invoke_entities import ( InvokeFrom, WorkflowAppGenerateEntity, @@ -53,7 +55,9 @@ from core.app.entities.task_entities import ( from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk from core.ops.ops_trace_manager import TraceQueueManager +from core.workflow.entities.workflow_execution_entities import WorkflowExecution from core.workflow.enums import SystemVariableKey +from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository from core.workflow.workflow_cycle_manager import WorkflowCycleManager from extensions.ext_database import db @@ -83,6 +87,7 @@ class WorkflowAppGenerateTaskPipeline: queue_manager: AppQueueManager, user: Union[Account, EndUser], stream: bool, + workflow_execution_repository: WorkflowExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository, ) -> None: self._base_task_pipeline = BasedGenerateTaskPipeline( @@ -111,9 +116,14 @@ class WorkflowAppGenerateTaskPipeline: SystemVariableKey.WORKFLOW_ID: workflow.id, SystemVariableKey.WORKFLOW_RUN_ID: application_generate_entity.workflow_run_id, }, + workflow_execution_repository=workflow_execution_repository, workflow_node_execution_repository=workflow_node_execution_repository, ) + self._workflow_response_converter = WorkflowResponseConverter( + application_generate_entity=application_generate_entity, + ) + self._application_generate_entity = application_generate_entity self._workflow_id = workflow.id self._workflow_features_dict = workflow.features_dict @@ -258,17 +268,15 @@ class WorkflowAppGenerateTaskPipeline: with Session(db.engine, expire_on_commit=False) as session: # init workflow run - workflow_run = self._workflow_cycle_manager._handle_workflow_run_start( + workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start( session=session, workflow_id=self._workflow_id, - user_id=self._user_id, - created_by_role=self._created_by_role, ) - self._workflow_run_id = workflow_run.id - start_resp = self._workflow_cycle_manager._workflow_start_to_stream_response( - session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run + self._workflow_run_id = workflow_execution.id + start_resp = self._workflow_response_converter.workflow_start_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution=workflow_execution, ) - session.commit() yield start_resp elif isinstance( @@ -278,13 +286,11 @@ class WorkflowAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id + workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried( + workflow_execution_id=self._workflow_run_id, + event=event, ) - workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_retried( - workflow_run=workflow_run, event=event - ) - response = self._workflow_cycle_manager._workflow_node_retry_to_stream_response( + response = self._workflow_response_converter.workflow_node_retry_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, workflow_node_execution=workflow_node_execution, @@ -297,27 +303,22 @@ class WorkflowAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - workflow_node_execution = self._workflow_cycle_manager._handle_node_execution_start( - workflow_run=workflow_run, event=event - ) - node_start_response = self._workflow_cycle_manager._workflow_node_start_to_stream_response( - event=event, - task_id=self._application_generate_entity.task_id, - workflow_node_execution=workflow_node_execution, - ) - session.commit() + workflow_node_execution = self._workflow_cycle_manager.handle_node_execution_start( + workflow_execution_id=self._workflow_run_id, event=event + ) + node_start_response = self._workflow_response_converter.workflow_node_start_to_stream_response( + event=event, + task_id=self._application_generate_entity.task_id, + workflow_node_execution=workflow_node_execution, + ) if node_start_response: yield node_start_response elif isinstance(event, QueueNodeSucceededEvent): - workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_success( + workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success( event=event ) - node_success_response = self._workflow_cycle_manager._workflow_node_finish_to_stream_response( + node_success_response = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, workflow_node_execution=workflow_node_execution, @@ -332,10 +333,10 @@ class WorkflowAppGenerateTaskPipeline: | QueueNodeInLoopFailedEvent | QueueNodeExceptionEvent, ): - workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_failed( + workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_failed( event=event, ) - node_failed_response = self._workflow_cycle_manager._workflow_node_finish_to_stream_response( + node_failed_response = self._workflow_response_converter.workflow_node_finish_to_stream_response( event=event, task_id=self._application_generate_entity.task_id, workflow_node_execution=workflow_node_execution, @@ -348,18 +349,13 @@ class WorkflowAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - parallel_start_resp = ( - self._workflow_cycle_manager._workflow_parallel_branch_start_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + parallel_start_resp = ( + self._workflow_response_converter.workflow_parallel_branch_start_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, ) + ) yield parallel_start_resp @@ -367,18 +363,13 @@ class WorkflowAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - parallel_finish_resp = ( - self._workflow_cycle_manager._workflow_parallel_branch_finished_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + parallel_finish_resp = ( + self._workflow_response_converter.workflow_parallel_branch_finished_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, ) + ) yield parallel_finish_resp @@ -386,16 +377,11 @@ class WorkflowAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - iter_start_resp = self._workflow_cycle_manager._workflow_iteration_start_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + iter_start_resp = self._workflow_response_converter.workflow_iteration_start_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield iter_start_resp @@ -403,16 +389,11 @@ class WorkflowAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - iter_next_resp = self._workflow_cycle_manager._workflow_iteration_next_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + iter_next_resp = self._workflow_response_converter.workflow_iteration_next_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield iter_next_resp @@ -420,16 +401,11 @@ class WorkflowAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - iter_finish_resp = self._workflow_cycle_manager._workflow_iteration_completed_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + iter_finish_resp = self._workflow_response_converter.workflow_iteration_completed_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield iter_finish_resp @@ -437,16 +413,11 @@ class WorkflowAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - loop_start_resp = self._workflow_cycle_manager._workflow_loop_start_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + loop_start_resp = self._workflow_response_converter.workflow_loop_start_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield loop_start_resp @@ -454,16 +425,11 @@ class WorkflowAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - loop_next_resp = self._workflow_cycle_manager._workflow_loop_next_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + loop_next_resp = self._workflow_response_converter.workflow_loop_next_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield loop_next_resp @@ -471,16 +437,11 @@ class WorkflowAppGenerateTaskPipeline: if not self._workflow_run_id: raise ValueError("workflow run not initialized.") - with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._get_workflow_run( - session=session, workflow_run_id=self._workflow_run_id - ) - loop_finish_resp = self._workflow_cycle_manager._workflow_loop_completed_to_stream_response( - session=session, - task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, - event=event, - ) + loop_finish_resp = self._workflow_response_converter.workflow_loop_completed_to_stream_response( + task_id=self._application_generate_entity.task_id, + workflow_execution_id=self._workflow_run_id, + event=event, + ) yield loop_finish_resp @@ -491,10 +452,8 @@ class WorkflowAppGenerateTaskPipeline: raise ValueError("graph runtime state not initialized.") with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._handle_workflow_run_success( - session=session, + workflow_execution = self._workflow_cycle_manager.handle_workflow_run_success( workflow_run_id=self._workflow_run_id, - start_at=graph_runtime_state.start_at, total_tokens=graph_runtime_state.total_tokens, total_steps=graph_runtime_state.node_run_steps, outputs=event.outputs, @@ -503,12 +462,12 @@ class WorkflowAppGenerateTaskPipeline: ) # save workflow app log - self._save_workflow_app_log(session=session, workflow_run=workflow_run) + self._save_workflow_app_log(session=session, workflow_execution=workflow_execution) - workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response( + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( session=session, task_id=self._application_generate_entity.task_id, - workflow_run=workflow_run, + workflow_execution=workflow_execution, ) session.commit() @@ -520,10 +479,8 @@ class WorkflowAppGenerateTaskPipeline: raise ValueError("graph runtime state not initialized.") with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._handle_workflow_run_partial_success( - session=session, + workflow_execution = self._workflow_cycle_manager.handle_workflow_run_partial_success( workflow_run_id=self._workflow_run_id, - start_at=graph_runtime_state.start_at, total_tokens=graph_runtime_state.total_tokens, total_steps=graph_runtime_state.node_run_steps, outputs=event.outputs, @@ -533,10 +490,12 @@ class WorkflowAppGenerateTaskPipeline: ) # save workflow app log - self._save_workflow_app_log(session=session, workflow_run=workflow_run) + self._save_workflow_app_log(session=session, workflow_execution=workflow_execution) - workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response( - session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + session=session, + task_id=self._application_generate_entity.task_id, + workflow_execution=workflow_execution, ) session.commit() @@ -548,26 +507,28 @@ class WorkflowAppGenerateTaskPipeline: raise ValueError("graph runtime state not initialized.") with Session(db.engine, expire_on_commit=False) as session: - workflow_run = self._workflow_cycle_manager._handle_workflow_run_failed( - session=session, + workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed( workflow_run_id=self._workflow_run_id, - start_at=graph_runtime_state.start_at, total_tokens=graph_runtime_state.total_tokens, total_steps=graph_runtime_state.node_run_steps, status=WorkflowRunStatus.FAILED if isinstance(event, QueueWorkflowFailedEvent) else WorkflowRunStatus.STOPPED, - error=event.error if isinstance(event, QueueWorkflowFailedEvent) else event.get_stop_reason(), + error_message=event.error + if isinstance(event, QueueWorkflowFailedEvent) + else event.get_stop_reason(), conversation_id=None, trace_manager=trace_manager, exceptions_count=event.exceptions_count if isinstance(event, QueueWorkflowFailedEvent) else 0, ) # save workflow app log - self._save_workflow_app_log(session=session, workflow_run=workflow_run) + self._save_workflow_app_log(session=session, workflow_execution=workflow_execution) - workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response( - session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run + workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response( + session=session, + task_id=self._application_generate_entity.task_id, + workflow_execution=workflow_execution, ) session.commit() @@ -586,7 +547,7 @@ class WorkflowAppGenerateTaskPipeline: delta_text, from_variable_selector=event.from_variable_selector ) elif isinstance(event, QueueAgentLogEvent): - yield self._workflow_cycle_manager._handle_agent_log( + yield self._workflow_response_converter.handle_agent_log( task_id=self._application_generate_entity.task_id, event=event ) else: @@ -595,11 +556,9 @@ class WorkflowAppGenerateTaskPipeline: if tts_publisher: tts_publisher.publish(None) - def _save_workflow_app_log(self, *, session: Session, workflow_run: WorkflowRun) -> None: - """ - Save workflow app log. - :return: - """ + def _save_workflow_app_log(self, *, session: Session, workflow_execution: WorkflowExecution) -> None: + workflow_run = session.scalar(select(WorkflowRun).where(WorkflowRun.id == workflow_execution.id)) + assert workflow_run is not None invoke_from = self._application_generate_entity.invoke_from if invoke_from == InvokeFrom.SERVICE_API: created_from = WorkflowAppLogCreatedFrom.SERVICE_API diff --git a/api/core/app/entities/task_entities.py b/api/core/app/entities/task_entities.py index 0c2d617f80..9b2bfcbf61 100644 --- a/api/core/app/entities/task_entities.py +++ b/api/core/app/entities/task_entities.py @@ -190,7 +190,7 @@ class WorkflowStartStreamResponse(StreamResponse): id: str workflow_id: str sequence_number: int - inputs: dict + inputs: Mapping[str, Any] created_at: int event: StreamEvent = StreamEvent.WORKFLOW_STARTED @@ -212,7 +212,7 @@ class WorkflowFinishStreamResponse(StreamResponse): workflow_id: str sequence_number: int status: str - outputs: Optional[dict] = None + outputs: Optional[Mapping[str, Any]] = None error: Optional[str] = None elapsed_time: float total_tokens: int @@ -788,7 +788,7 @@ class WorkflowAppBlockingResponse(AppBlockingResponse): id: str workflow_id: str status: str - outputs: Optional[dict] = None + outputs: Optional[Mapping[str, Any]] = None error: Optional[str] = None elapsed_time: float total_tokens: int diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 8ddeb1b846..a3424c7421 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -30,6 +30,7 @@ from core.ops.entities.trace_entity import ( WorkflowTraceInfo, ) from core.ops.utils import get_message_data +from core.workflow.entities.workflow_execution_entities import WorkflowExecution from extensions.ext_database import db from extensions.ext_storage import storage from models.model import App, AppModelConfig, Conversation, Message, MessageFile, TraceAppConfig @@ -234,7 +235,11 @@ class OpsTraceManager: return None tracing_provider = app_ops_trace_config.get("tracing_provider") - if tracing_provider is None or tracing_provider not in provider_config_map: + if tracing_provider is None: + return None + try: + provider_config_map[tracing_provider] + except KeyError: return None # decrypt_token @@ -287,10 +292,11 @@ class OpsTraceManager: :return: """ # auth check - try: - provider_config_map[tracing_provider] - except KeyError: - raise ValueError(f"Invalid tracing provider: {tracing_provider}") + if tracing_provider is not None: + try: + provider_config_map[tracing_provider] + except KeyError: + raise ValueError(f"Invalid tracing provider: {tracing_provider}") app_config: Optional[App] = db.session.query(App).filter(App.id == app_id).first() if not app_config: @@ -369,7 +375,7 @@ class TraceTask: self, trace_type: Any, message_id: Optional[str] = None, - workflow_run: Optional[WorkflowRun] = None, + workflow_execution: Optional[WorkflowExecution] = None, conversation_id: Optional[str] = None, user_id: Optional[str] = None, timer: Optional[Any] = None, @@ -377,7 +383,7 @@ class TraceTask: ): self.trace_type = trace_type self.message_id = message_id - self.workflow_run_id = workflow_run.id if workflow_run else None + self.workflow_run_id = workflow_execution.id if workflow_execution else None self.conversation_id = conversation_id self.user_id = user_id self.timer = timer diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index 01f74b4a22..2c5178241c 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -405,7 +405,29 @@ class RetrievalService: record["child_chunks"] = segment_child_map[record["segment"].id].get("child_chunks") # type: ignore record["score"] = segment_child_map[record["segment"].id]["max_score"] - return [RetrievalSegments(**record) for record in records] + result = [] + for record in records: + # Extract segment + segment = record["segment"] + + # Extract child_chunks, ensuring it's a list or None + child_chunks = record.get("child_chunks") + if not isinstance(child_chunks, list): + child_chunks = None + + # Extract score, ensuring it's a float or None + score_value = record.get("score") + score = ( + float(score_value) + if score_value is not None and isinstance(score_value, int | float | str) + else None + ) + + # Create RetrievalSegments object + retrieval_segment = RetrievalSegments(segment=segment, child_chunks=child_chunks, score=score) + result.append(retrieval_segment) + + return result except Exception as e: db.session.rollback() raise e diff --git a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py index e23b8d197f..6991598ce6 100644 --- a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py +++ b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py @@ -23,7 +23,8 @@ logger = logging.getLogger(__name__) class OpenSearchConfig(BaseModel): host: str port: int - secure: bool = False + secure: bool = False # use_ssl + verify_certs: bool = True auth_method: Literal["basic", "aws_managed_iam"] = "basic" user: Optional[str] = None password: Optional[str] = None @@ -42,6 +43,8 @@ class OpenSearchConfig(BaseModel): raise ValueError("config OPENSEARCH_AWS_REGION is required for AWS_MANAGED_IAM auth method") if not values.get("aws_service"): raise ValueError("config OPENSEARCH_AWS_SERVICE is required for AWS_MANAGED_IAM auth method") + if not values.get("OPENSEARCH_SECURE") and values.get("OPENSEARCH_VERIFY_CERTS"): + raise ValueError("verify_certs=True requires secure (HTTPS) connection") return values def create_aws_managed_iam_auth(self) -> Urllib3AWSV4SignerAuth: @@ -57,7 +60,7 @@ class OpenSearchConfig(BaseModel): params = { "hosts": [{"host": self.host, "port": self.port}], "use_ssl": self.secure, - "verify_certs": self.secure, + "verify_certs": self.verify_certs, "connection_class": Urllib3HttpConnection, "pool_maxsize": 20, } @@ -279,6 +282,7 @@ class OpenSearchVectorFactory(AbstractVectorFactory): host=dify_config.OPENSEARCH_HOST or "localhost", port=dify_config.OPENSEARCH_PORT, secure=dify_config.OPENSEARCH_SECURE, + verify_certs=dify_config.OPENSEARCH_VERIFY_CERTS, auth_method=dify_config.OPENSEARCH_AUTH_METHOD.value, user=dify_config.OPENSEARCH_USER, password=dify_config.OPENSEARCH_PASSWORD, diff --git a/api/core/rag/datasource/vdb/tencent/tencent_vector.py b/api/core/rag/datasource/vdb/tencent/tencent_vector.py index e266659075..d2bf3eb92a 100644 --- a/api/core/rag/datasource/vdb/tencent/tencent_vector.py +++ b/api/core/rag/datasource/vdb/tencent/tencent_vector.py @@ -271,12 +271,15 @@ class TencentVector(BaseVector): for result in res[0]: meta = result.get(self.field_metadata) + if isinstance(meta, str): + # Compatible with version 1.1.3 and below. + meta = json.loads(meta) + score = 1 - result.get("score", 0.0) score = result.get("score", 0.0) if score > score_threshold: meta["score"] = score doc = Document(page_content=result.get(self.field_text), metadata=meta) docs.append(doc) - return docs def delete(self) -> None: diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index d3605da146..c4adf6de4d 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -190,7 +190,7 @@ class DatasetRetrieval: retrieve_config.rerank_mode or "reranking_model", retrieve_config.reranking_model, retrieve_config.weights, - retrieve_config.reranking_enabled or True, + True if retrieve_config.reranking_enabled is None else retrieve_config.reranking_enabled, message_id, metadata_filter_document_ids, metadata_condition, diff --git a/api/core/repositories/sqlalchemy_workflow_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_execution_repository.py new file mode 100644 index 0000000000..c1a71b45d0 --- /dev/null +++ b/api/core/repositories/sqlalchemy_workflow_execution_repository.py @@ -0,0 +1,242 @@ +""" +SQLAlchemy implementation of the WorkflowExecutionRepository. +""" + +import json +import logging +from typing import Optional, Union + +from sqlalchemy import select +from sqlalchemy.engine import Engine +from sqlalchemy.orm import sessionmaker + +from core.workflow.entities.workflow_execution_entities import ( + WorkflowExecution, + WorkflowExecutionStatus, + WorkflowType, +) +from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository +from models import ( + Account, + CreatorUserRole, + EndUser, + WorkflowRun, +) +from models.enums import WorkflowRunTriggeredFrom + +logger = logging.getLogger(__name__) + + +class SQLAlchemyWorkflowExecutionRepository(WorkflowExecutionRepository): + """ + SQLAlchemy implementation of the WorkflowExecutionRepository interface. + + This implementation supports multi-tenancy by filtering operations based on tenant_id. + Each method creates its own session, handles the transaction, and commits changes + to the database. This prevents long-running connections in the workflow core. + + This implementation also includes an in-memory cache for workflow executions to improve + performance by reducing database queries. + """ + + def __init__( + self, + session_factory: sessionmaker | Engine, + user: Union[Account, EndUser], + app_id: Optional[str], + triggered_from: Optional[WorkflowRunTriggeredFrom], + ): + """ + Initialize the repository with a SQLAlchemy sessionmaker or engine and context information. + + Args: + session_factory: SQLAlchemy sessionmaker or engine for creating sessions + user: Account or EndUser object containing tenant_id, user ID, and role information + app_id: App ID for filtering by application (can be None) + triggered_from: Source of the execution trigger (DEBUGGING or APP_RUN) + """ + # If an engine is provided, create a sessionmaker from it + if isinstance(session_factory, Engine): + self._session_factory = sessionmaker(bind=session_factory, expire_on_commit=False) + elif isinstance(session_factory, sessionmaker): + self._session_factory = session_factory + else: + raise ValueError( + f"Invalid session_factory type {type(session_factory).__name__}; expected sessionmaker or Engine" + ) + + # Extract tenant_id from user + tenant_id: str | None = user.tenant_id if isinstance(user, EndUser) else user.current_tenant_id + if not tenant_id: + raise ValueError("User must have a tenant_id or current_tenant_id") + self._tenant_id = tenant_id + + # Store app context + self._app_id = app_id + + # Extract user context + self._triggered_from = triggered_from + self._creator_user_id = user.id + + # Determine user role based on user type + self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER + + # Initialize in-memory cache for workflow executions + # Key: execution_id, Value: WorkflowRun (DB model) + self._execution_cache: dict[str, WorkflowRun] = {} + + def _to_domain_model(self, db_model: WorkflowRun) -> WorkflowExecution: + """ + Convert a database model to a domain model. + + Args: + db_model: The database model to convert + + Returns: + The domain model + """ + # Parse JSON fields + inputs = db_model.inputs_dict + outputs = db_model.outputs_dict + graph = db_model.graph_dict + + # Convert status to domain enum + status = WorkflowExecutionStatus(db_model.status) + + return WorkflowExecution( + id=db_model.id, + workflow_id=db_model.workflow_id, + sequence_number=db_model.sequence_number, + type=WorkflowType(db_model.type), + workflow_version=db_model.version, + graph=graph, + inputs=inputs, + outputs=outputs, + status=status, + error_message=db_model.error or "", + total_tokens=db_model.total_tokens, + total_steps=db_model.total_steps, + exceptions_count=db_model.exceptions_count, + started_at=db_model.created_at, + finished_at=db_model.finished_at, + ) + + def _to_db_model(self, domain_model: WorkflowExecution) -> WorkflowRun: + """ + Convert a domain model to a database model. + + Args: + domain_model: The domain model to convert + + Returns: + The database model + """ + # Use values from constructor if provided + if not self._triggered_from: + raise ValueError("triggered_from is required in repository constructor") + if not self._creator_user_id: + raise ValueError("created_by is required in repository constructor") + if not self._creator_user_role: + raise ValueError("created_by_role is required in repository constructor") + + db_model = WorkflowRun() + db_model.id = domain_model.id + db_model.tenant_id = self._tenant_id + if self._app_id is not None: + db_model.app_id = self._app_id + db_model.workflow_id = domain_model.workflow_id + db_model.triggered_from = self._triggered_from + db_model.sequence_number = domain_model.sequence_number + db_model.type = domain_model.type + db_model.version = domain_model.workflow_version + db_model.graph = json.dumps(domain_model.graph) if domain_model.graph else None + db_model.inputs = json.dumps(domain_model.inputs) if domain_model.inputs else None + db_model.outputs = json.dumps(domain_model.outputs) if domain_model.outputs else None + db_model.status = domain_model.status + db_model.error = domain_model.error_message if domain_model.error_message else None + db_model.total_tokens = domain_model.total_tokens + db_model.total_steps = domain_model.total_steps + db_model.exceptions_count = domain_model.exceptions_count + db_model.created_by_role = self._creator_user_role + db_model.created_by = self._creator_user_id + db_model.created_at = domain_model.started_at + db_model.finished_at = domain_model.finished_at + + # Calculate elapsed time if finished_at is available + if domain_model.finished_at: + db_model.elapsed_time = (domain_model.finished_at - domain_model.started_at).total_seconds() + else: + db_model.elapsed_time = 0 + + return db_model + + def save(self, execution: WorkflowExecution) -> None: + """ + Save or update a WorkflowExecution domain entity to the database. + + This method serves as a domain-to-database adapter that: + 1. Converts the domain entity to its database representation + 2. Persists the database model using SQLAlchemy's merge operation + 3. Maintains proper multi-tenancy by including tenant context during conversion + 4. Updates the in-memory cache for faster subsequent lookups + + The method handles both creating new records and updating existing ones through + SQLAlchemy's merge operation. + + Args: + execution: The WorkflowExecution domain entity to persist + """ + # Convert domain model to database model using tenant context and other attributes + db_model = self._to_db_model(execution) + + # Create a new database session + with self._session_factory() as session: + # SQLAlchemy merge intelligently handles both insert and update operations + # based on the presence of the primary key + session.merge(db_model) + session.commit() + + # Update the in-memory cache for faster subsequent lookups + logger.debug(f"Updating cache for execution_id: {db_model.id}") + self._execution_cache[db_model.id] = db_model + + def get(self, execution_id: str) -> Optional[WorkflowExecution]: + """ + Retrieve a WorkflowExecution by its ID. + + First checks the in-memory cache, and if not found, queries the database. + If found in the database, adds it to the cache for future lookups. + + Args: + execution_id: The workflow execution ID + + Returns: + The WorkflowExecution instance if found, None otherwise + """ + # First check the cache + if execution_id in self._execution_cache: + logger.debug(f"Cache hit for execution_id: {execution_id}") + # Convert cached DB model to domain model + cached_db_model = self._execution_cache[execution_id] + return self._to_domain_model(cached_db_model) + + # If not in cache, query the database + logger.debug(f"Cache miss for execution_id: {execution_id}, querying database") + with self._session_factory() as session: + stmt = select(WorkflowRun).where( + WorkflowRun.id == execution_id, + WorkflowRun.tenant_id == self._tenant_id, + ) + + if self._app_id: + stmt = stmt.where(WorkflowRun.app_id == self._app_id) + + db_model = session.scalar(stmt) + if db_model: + # Add DB model to cache + self._execution_cache[execution_id] = db_model + + # Convert to domain model and return + return self._to_domain_model(db_model) + + return None diff --git a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py index 3bf775db13..8d916a19db 100644 --- a/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py +++ b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py @@ -4,13 +4,14 @@ SQLAlchemy implementation of the WorkflowNodeExecutionRepository. import json import logging -from collections.abc import Mapping, Sequence -from typing import Any, Optional, Union, cast +from collections.abc import Sequence +from typing import Optional, Union from sqlalchemy import UnaryExpression, asc, delete, desc, select from sqlalchemy.engine import Engine from sqlalchemy.orm import sessionmaker +from core.model_runtime.utils.encoders import jsonable_encoder from core.workflow.entities.node_entities import NodeRunMetadataKey from core.workflow.entities.node_execution_entities import ( NodeExecution, @@ -85,8 +86,8 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER # Initialize in-memory cache for node executions - # Key: node_execution_id, Value: NodeExecution - self._node_execution_cache: dict[str, NodeExecution] = {} + # Key: node_execution_id, Value: WorkflowNodeExecution (DB model) + self._node_execution_cache: dict[str, WorkflowNodeExecution] = {} def _to_domain_model(self, db_model: WorkflowNodeExecution) -> NodeExecution: """ @@ -102,7 +103,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) inputs = db_model.inputs_dict process_data = db_model.process_data_dict outputs = db_model.outputs_dict - metadata = db_model.execution_metadata_dict + metadata = {NodeRunMetadataKey(k): v for k, v in db_model.execution_metadata_dict.items()} # Convert status to domain enum status = NodeExecutionStatus(db_model.status) @@ -123,12 +124,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) status=status, error=db_model.error, elapsed_time=db_model.elapsed_time, - # FIXME(QuantumGhost): a temporary workaround for the following type check failure in Python 3.11. - # However, this problem is not occurred in Python 3.12. - # - # A case of this error is: - # https://github.com/langgenius/dify/actions/runs/15112698604/job/42475659482?pr=19737#step:9:24 - metadata=cast(Mapping[NodeRunMetadataKey, Any] | None, metadata), + metadata=metadata, created_at=db_model.created_at, finished_at=db_model.finished_at, ) @@ -171,7 +167,9 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) db_model.status = domain_model.status db_model.error = domain_model.error db_model.elapsed_time = domain_model.elapsed_time - db_model.execution_metadata = json.dumps(domain_model.metadata) if domain_model.metadata else None + db_model.execution_metadata = ( + json.dumps(jsonable_encoder(domain_model.metadata)) if domain_model.metadata else None + ) db_model.created_at = domain_model.created_at db_model.created_by_role = self._creator_user_role db_model.created_by = self._creator_user_id @@ -208,7 +206,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) # Only cache if we have a node_execution_id to use as the cache key if db_model.node_execution_id: logger.debug(f"Updating cache for node_execution_id: {db_model.node_execution_id}") - self._node_execution_cache[db_model.node_execution_id] = execution + self._node_execution_cache[db_model.node_execution_id] = db_model def get_by_node_execution_id(self, node_execution_id: str) -> Optional[NodeExecution]: """ @@ -226,7 +224,9 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) # First check the cache if node_execution_id in self._node_execution_cache: logger.debug(f"Cache hit for node_execution_id: {node_execution_id}") - return self._node_execution_cache[node_execution_id] + # Convert cached DB model to domain model + cached_db_model = self._node_execution_cache[node_execution_id] + return self._to_domain_model(cached_db_model) # If not in cache, query the database logger.debug(f"Cache miss for node_execution_id: {node_execution_id}, querying database") @@ -241,26 +241,25 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) db_model = session.scalar(stmt) if db_model: - # Convert to domain model - domain_model = self._to_domain_model(db_model) + # Add DB model to cache + self._node_execution_cache[node_execution_id] = db_model - # Add to cache - self._node_execution_cache[node_execution_id] = domain_model - - return domain_model + # Convert to domain model and return + return self._to_domain_model(db_model) return None - def get_by_workflow_run( + def get_db_models_by_workflow_run( self, workflow_run_id: str, order_config: Optional[OrderConfig] = None, - ) -> Sequence[NodeExecution]: + ) -> Sequence[WorkflowNodeExecution]: """ - Retrieve all NodeExecution instances for a specific workflow run. + Retrieve all WorkflowNodeExecution database models for a specific workflow run. - This method always queries the database to ensure complete and ordered results, - but updates the cache with any retrieved executions. + This method directly returns database models without converting to domain models, + which is useful when you need to access database-specific fields like triggered_from. + It also updates the in-memory cache with the retrieved models. Args: workflow_run_id: The workflow run ID @@ -269,7 +268,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) order_config.order_direction: Direction to order ("asc" or "desc") Returns: - A list of NodeExecution instances + A list of WorkflowNodeExecution database models """ with self._session_factory() as session: stmt = select(WorkflowNodeExecution).where( @@ -298,16 +297,43 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) db_models = session.scalars(stmt).all() - # Convert database models to domain models and update cache - domain_models = [] + # Update the cache with the retrieved DB models for model in db_models: - domain_model = self._to_domain_model(model) - # Update cache if node_execution_id is present - if domain_model.node_execution_id: - self._node_execution_cache[domain_model.node_execution_id] = domain_model - domain_models.append(domain_model) + if model.node_execution_id: + self._node_execution_cache[model.node_execution_id] = model - return domain_models + return db_models + + def get_by_workflow_run( + self, + workflow_run_id: str, + order_config: Optional[OrderConfig] = None, + ) -> Sequence[NodeExecution]: + """ + Retrieve all NodeExecution instances for a specific workflow run. + + This method always queries the database to ensure complete and ordered results, + but updates the cache with any retrieved executions. + + Args: + workflow_run_id: The workflow run ID + order_config: Optional configuration for ordering results + order_config.order_by: List of fields to order by (e.g., ["index", "created_at"]) + order_config.order_direction: Direction to order ("asc" or "desc") + + Returns: + A list of NodeExecution instances + """ + # Get the database models using the new method + db_models = self.get_db_models_by_workflow_run(workflow_run_id, order_config) + + # Convert database models to domain models + domain_models = [] + for model in db_models: + domain_model = self._to_domain_model(model) + domain_models.append(domain_model) + + return domain_models def get_running_executions(self, workflow_run_id: str) -> Sequence[NodeExecution]: """ @@ -337,10 +363,12 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository) domain_models = [] for model in db_models: - domain_model = self._to_domain_model(model) # Update cache if node_execution_id is present - if domain_model.node_execution_id: - self._node_execution_cache[domain_model.node_execution_id] = domain_model + if model.node_execution_id: + self._node_execution_cache[model.node_execution_id] = model + + # Convert to domain model + domain_model = self._to_domain_model(model) domain_models.append(domain_model) return domain_models diff --git a/api/core/tools/tool_manager.py b/api/core/tools/tool_manager.py index aa2661fe63..0bfe6329b1 100644 --- a/api/core/tools/tool_manager.py +++ b/api/core/tools/tool_manager.py @@ -528,7 +528,7 @@ class ToolManager: yield provider except Exception: - logger.exception(f"load builtin provider {provider}") + logger.exception(f"load builtin provider {provider_path}") continue # set builtin providers loaded cls._builtin_providers_loaded = True @@ -644,10 +644,10 @@ class ToolManager: ) workflow_provider_controllers: list[WorkflowToolProviderController] = [] - for provider in workflow_providers: + for workflow_provider in workflow_providers: try: workflow_provider_controllers.append( - ToolTransformService.workflow_provider_to_controller(db_provider=provider) + ToolTransformService.workflow_provider_to_controller(db_provider=workflow_provider) ) except Exception: # app has been deleted diff --git a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py index fff261e0bd..7b6882ed52 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py @@ -125,6 +125,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool): return "" # get retrieval model , if the model is not setting , using default retrieval_model: dict[str, Any] = dataset.retrieval_model or default_retrieval_model + retrieval_resource_list = [] if dataset.indexing_technique == "economy": # use keyword table query documents = RetrievalService.retrieve( @@ -181,7 +182,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool): score=record.score, ) ) - retrieval_resource_list = [] + if self.return_resource: for record in records: segment = record.segment diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index 241b4a94de..57c93d1d45 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -1,7 +1,9 @@ import json import logging from collections.abc import Generator -from typing import Any, Optional, Union, cast +from typing import Any, Optional, cast + +from flask_login import current_user from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod from core.tools.__base.tool import Tool @@ -87,7 +89,7 @@ class WorkflowTool(Tool): result = generator.generate( app_model=app, workflow=workflow, - user=self._get_user(user_id), + user=cast("Account | EndUser", current_user), args={"inputs": tool_parameters, "files": files}, invoke_from=self.runtime.invoke_from, streaming=False, @@ -111,20 +113,6 @@ class WorkflowTool(Tool): yield self.create_text_message(json.dumps(outputs, ensure_ascii=False)) yield self.create_json_message(outputs) - def _get_user(self, user_id: str) -> Union[EndUser, Account]: - """ - get the user by user id - """ - - user = db.session.query(EndUser).filter(EndUser.id == user_id).first() - if not user: - user = db.session.query(Account).filter(Account.id == user_id).first() - - if not user: - raise ValueError("user not found") - - return user - def fork_tool_runtime(self, runtime: ToolRuntime) -> "WorkflowTool": """ fork a new tool with metadata diff --git a/api/core/workflow/entities/workflow_execution_entities.py b/api/core/workflow/entities/workflow_execution_entities.py new file mode 100644 index 0000000000..200d4697b5 --- /dev/null +++ b/api/core/workflow/entities/workflow_execution_entities.py @@ -0,0 +1,91 @@ +""" +Domain entities for workflow execution. + +Models are independent of the storage mechanism and don't contain +implementation details like tenant_id, app_id, etc. +""" + +from collections.abc import Mapping +from datetime import UTC, datetime +from enum import StrEnum +from typing import Any, Optional + +from pydantic import BaseModel, Field + + +class WorkflowType(StrEnum): + """ + Workflow Type Enum for domain layer + """ + + WORKFLOW = "workflow" + CHAT = "chat" + + +class WorkflowExecutionStatus(StrEnum): + RUNNING = "running" + SUCCEEDED = "succeeded" + FAILED = "failed" + STOPPED = "stopped" + PARTIAL_SUCCEEDED = "partial-succeeded" + + +class WorkflowExecution(BaseModel): + """ + Domain model for workflow execution based on WorkflowRun but without + user, tenant, and app attributes. + """ + + id: str = Field(...) + workflow_id: str = Field(...) + workflow_version: str = Field(...) + sequence_number: int = Field(...) + + type: WorkflowType = Field(...) + graph: Mapping[str, Any] = Field(...) + + inputs: Mapping[str, Any] = Field(...) + outputs: Optional[Mapping[str, Any]] = None + + status: WorkflowExecutionStatus = WorkflowExecutionStatus.RUNNING + error_message: str = Field(default="") + total_tokens: int = Field(default=0) + total_steps: int = Field(default=0) + exceptions_count: int = Field(default=0) + + started_at: datetime = Field(...) + finished_at: Optional[datetime] = None + + @property + def elapsed_time(self) -> float: + """ + Calculate elapsed time in seconds. + If workflow is not finished, use current time. + """ + end_time = self.finished_at or datetime.now(UTC).replace(tzinfo=None) + return (end_time - self.started_at).total_seconds() + + @classmethod + def new( + cls, + *, + id: str, + workflow_id: str, + sequence_number: int, + type: WorkflowType, + workflow_version: str, + graph: Mapping[str, Any], + inputs: Mapping[str, Any], + started_at: datetime, + ) -> "WorkflowExecution": + return WorkflowExecution( + id=id, + workflow_id=workflow_id, + sequence_number=sequence_number, + type=type, + workflow_version=workflow_version, + graph=graph, + inputs=inputs, + status=WorkflowExecutionStatus.RUNNING, + started_at=started_at, + ) diff --git a/api/core/workflow/repository/workflow_execution_repository.py b/api/core/workflow/repository/workflow_execution_repository.py new file mode 100644 index 0000000000..a39a98ee33 --- /dev/null +++ b/api/core/workflow/repository/workflow_execution_repository.py @@ -0,0 +1,42 @@ +from typing import Optional, Protocol + +from core.workflow.entities.workflow_execution_entities import WorkflowExecution + + +class WorkflowExecutionRepository(Protocol): + """ + Repository interface for WorkflowExecution. + + This interface defines the contract for accessing and manipulating + WorkflowExecution data, regardless of the underlying storage mechanism. + + Note: Domain-specific concepts like multi-tenancy (tenant_id), application context (app_id), + and other implementation details should be handled at the implementation level, not in + the core interface. This keeps the core domain model clean and independent of specific + application domains or deployment scenarios. + """ + + def save(self, execution: WorkflowExecution) -> None: + """ + Save or update a WorkflowExecution instance. + + This method handles both creating new records and updating existing ones. + The implementation should determine whether to create or update based on + the execution's ID or other identifying fields. + + Args: + execution: The WorkflowExecution instance to save or update + """ + ... + + def get(self, execution_id: str) -> Optional[WorkflowExecution]: + """ + Retrieve a WorkflowExecution by its ID. + + Args: + execution_id: The workflow execution ID + + Returns: + The WorkflowExecution instance if found, None otherwise + """ + ... diff --git a/api/core/workflow/workflow_cycle_manager.py b/api/core/workflow/workflow_cycle_manager.py index 6d33d7372c..1918dd9f09 100644 --- a/api/core/workflow/workflow_cycle_manager.py +++ b/api/core/workflow/workflow_cycle_manager.py @@ -1,22 +1,13 @@ -import json -import time -from collections.abc import Mapping, Sequence +from collections.abc import Mapping from datetime import UTC, datetime -from typing import Any, Optional, Union, cast +from typing import Any, Optional, Union from uuid import uuid4 from sqlalchemy import func, select from sqlalchemy.orm import Session -from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity +from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity from core.app.entities.queue_entities import ( - QueueAgentLogEvent, - QueueIterationCompletedEvent, - QueueIterationNextEvent, - QueueIterationStartEvent, - QueueLoopCompletedEvent, - QueueLoopNextEvent, - QueueLoopStartEvent, QueueNodeExceptionEvent, QueueNodeFailedEvent, QueueNodeInIterationFailedEvent, @@ -24,50 +15,24 @@ from core.app.entities.queue_entities import ( QueueNodeRetryEvent, QueueNodeStartedEvent, QueueNodeSucceededEvent, - QueueParallelBranchRunFailedEvent, - QueueParallelBranchRunStartedEvent, - QueueParallelBranchRunSucceededEvent, -) -from core.app.entities.task_entities import ( - AgentLogStreamResponse, - IterationNodeCompletedStreamResponse, - IterationNodeNextStreamResponse, - IterationNodeStartStreamResponse, - LoopNodeCompletedStreamResponse, - LoopNodeNextStreamResponse, - LoopNodeStartStreamResponse, - NodeFinishStreamResponse, - NodeRetryStreamResponse, - NodeStartStreamResponse, - ParallelBranchFinishedStreamResponse, - ParallelBranchStartStreamResponse, - WorkflowFinishStreamResponse, - WorkflowStartStreamResponse, ) from core.app.task_pipeline.exc import WorkflowRunNotFoundError -from core.file import FILE_MODEL_IDENTITY, File from core.ops.entities.trace_entity import TraceTaskName from core.ops.ops_trace_manager import TraceQueueManager, TraceTask -from core.tools.tool_manager import ToolManager from core.workflow.entities.node_entities import NodeRunMetadataKey from core.workflow.entities.node_execution_entities import ( NodeExecution, NodeExecutionStatus, ) +from core.workflow.entities.workflow_execution_entities import WorkflowExecution, WorkflowExecutionStatus, WorkflowType from core.workflow.enums import SystemVariableKey -from core.workflow.nodes import NodeType -from core.workflow.nodes.tool.entities import ToolNodeData +from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository from core.workflow.workflow_entry import WorkflowEntry from models import ( - Account, - CreatorUserRole, - EndUser, Workflow, - WorkflowNodeExecutionStatus, WorkflowRun, WorkflowRunStatus, - WorkflowRunTriggeredFrom, ) @@ -77,21 +42,20 @@ class WorkflowCycleManager: *, application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity], workflow_system_variables: dict[SystemVariableKey, Any], + workflow_execution_repository: WorkflowExecutionRepository, workflow_node_execution_repository: WorkflowNodeExecutionRepository, ) -> None: - self._workflow_run: WorkflowRun | None = None self._application_generate_entity = application_generate_entity self._workflow_system_variables = workflow_system_variables + self._workflow_execution_repository = workflow_execution_repository self._workflow_node_execution_repository = workflow_node_execution_repository - def _handle_workflow_run_start( + def handle_workflow_run_start( self, *, session: Session, workflow_id: str, - user_id: str, - created_by_role: CreatorUserRole, - ) -> WorkflowRun: + ) -> WorkflowExecution: workflow_stmt = select(Workflow).where(Workflow.id == workflow_id) workflow = session.scalar(workflow_stmt) if not workflow: @@ -110,157 +74,116 @@ class WorkflowCycleManager: continue inputs[f"sys.{key.value}"] = value - triggered_from = ( - WorkflowRunTriggeredFrom.DEBUGGING - if self._application_generate_entity.invoke_from == InvokeFrom.DEBUGGER - else WorkflowRunTriggeredFrom.APP_RUN - ) - # handle special values inputs = dict(WorkflowEntry.handle_special_values(inputs) or {}) # init workflow run # TODO: This workflow_run_id should always not be None, maybe we can use a more elegant way to handle this - workflow_run_id = str(self._workflow_system_variables.get(SystemVariableKey.WORKFLOW_RUN_ID) or uuid4()) + execution_id = str(self._workflow_system_variables.get(SystemVariableKey.WORKFLOW_RUN_ID) or uuid4()) + execution = WorkflowExecution.new( + id=execution_id, + workflow_id=workflow.id, + sequence_number=new_sequence_number, + type=WorkflowType(workflow.type), + workflow_version=workflow.version, + graph=workflow.graph_dict, + inputs=inputs, + started_at=datetime.now(UTC).replace(tzinfo=None), + ) - workflow_run = WorkflowRun() - workflow_run.id = workflow_run_id - workflow_run.tenant_id = workflow.tenant_id - workflow_run.app_id = workflow.app_id - workflow_run.sequence_number = new_sequence_number - workflow_run.workflow_id = workflow.id - workflow_run.type = workflow.type - workflow_run.triggered_from = triggered_from.value - workflow_run.version = workflow.version - workflow_run.graph = workflow.graph - workflow_run.inputs = json.dumps(inputs) - workflow_run.status = WorkflowRunStatus.RUNNING - workflow_run.created_by_role = created_by_role - workflow_run.created_by = user_id - workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None) + self._workflow_execution_repository.save(execution) - session.add(workflow_run) + return execution - return workflow_run - - def _handle_workflow_run_success( + def handle_workflow_run_success( self, *, - session: Session, workflow_run_id: str, - start_at: float, total_tokens: int, total_steps: int, outputs: Mapping[str, Any] | None = None, conversation_id: Optional[str] = None, trace_manager: Optional[TraceQueueManager] = None, - ) -> WorkflowRun: - """ - Workflow run success - :param workflow_run_id: workflow run id - :param start_at: start time - :param total_tokens: total tokens - :param total_steps: total steps - :param outputs: outputs - :param conversation_id: conversation id - :return: - """ - workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id) + ) -> WorkflowExecution: + workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id) outputs = WorkflowEntry.handle_special_values(outputs) - workflow_run.status = WorkflowRunStatus.SUCCEEDED - workflow_run.outputs = json.dumps(outputs or {}) - workflow_run.elapsed_time = time.perf_counter() - start_at - workflow_run.total_tokens = total_tokens - workflow_run.total_steps = total_steps - workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None) + workflow_execution.status = WorkflowExecutionStatus.SUCCEEDED + workflow_execution.outputs = outputs or {} + workflow_execution.total_tokens = total_tokens + workflow_execution.total_steps = total_steps + workflow_execution.finished_at = datetime.now(UTC).replace(tzinfo=None) if trace_manager: trace_manager.add_trace_task( TraceTask( TraceTaskName.WORKFLOW_TRACE, - workflow_run=workflow_run, + workflow_execution=workflow_execution, conversation_id=conversation_id, user_id=trace_manager.user_id, ) ) - return workflow_run + return workflow_execution - def _handle_workflow_run_partial_success( + def handle_workflow_run_partial_success( self, *, - session: Session, workflow_run_id: str, - start_at: float, total_tokens: int, total_steps: int, outputs: Mapping[str, Any] | None = None, exceptions_count: int = 0, conversation_id: Optional[str] = None, trace_manager: Optional[TraceQueueManager] = None, - ) -> WorkflowRun: - workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id) + ) -> WorkflowExecution: + execution = self._get_workflow_execution_or_raise_error(workflow_run_id) outputs = WorkflowEntry.handle_special_values(dict(outputs) if outputs else None) - workflow_run.status = WorkflowRunStatus.PARTIAL_SUCCEEDED.value - workflow_run.outputs = json.dumps(outputs or {}) - workflow_run.elapsed_time = time.perf_counter() - start_at - workflow_run.total_tokens = total_tokens - workflow_run.total_steps = total_steps - workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None) - workflow_run.exceptions_count = exceptions_count + execution.status = WorkflowExecutionStatus.PARTIAL_SUCCEEDED + execution.outputs = outputs or {} + execution.total_tokens = total_tokens + execution.total_steps = total_steps + execution.finished_at = datetime.now(UTC).replace(tzinfo=None) + execution.exceptions_count = exceptions_count if trace_manager: trace_manager.add_trace_task( TraceTask( TraceTaskName.WORKFLOW_TRACE, - workflow_run=workflow_run, + workflow_execution=execution, conversation_id=conversation_id, user_id=trace_manager.user_id, ) ) - return workflow_run + return execution - def _handle_workflow_run_failed( + def handle_workflow_run_failed( self, *, - session: Session, workflow_run_id: str, - start_at: float, total_tokens: int, total_steps: int, status: WorkflowRunStatus, - error: str, + error_message: str, conversation_id: Optional[str] = None, trace_manager: Optional[TraceQueueManager] = None, exceptions_count: int = 0, - ) -> WorkflowRun: - """ - Workflow run failed - :param workflow_run_id: workflow run id - :param start_at: start time - :param total_tokens: total tokens - :param total_steps: total steps - :param status: status - :param error: error message - :return: - """ - workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id) + ) -> WorkflowExecution: + execution = self._get_workflow_execution_or_raise_error(workflow_run_id) - workflow_run.status = status.value - workflow_run.error = error - workflow_run.elapsed_time = time.perf_counter() - start_at - workflow_run.total_tokens = total_tokens - workflow_run.total_steps = total_steps - workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None) - workflow_run.exceptions_count = exceptions_count + execution.status = WorkflowExecutionStatus(status.value) + execution.error_message = error_message + execution.total_tokens = total_tokens + execution.total_steps = total_steps + execution.finished_at = datetime.now(UTC).replace(tzinfo=None) + execution.exceptions_count = exceptions_count # Use the instance repository to find running executions for a workflow run running_domain_executions = self._workflow_node_execution_repository.get_running_executions( - workflow_run_id=workflow_run.id + workflow_run_id=execution.id ) # Update the domain models @@ -269,7 +192,7 @@ class WorkflowCycleManager: if domain_execution.node_execution_id: # Update the domain model domain_execution.status = NodeExecutionStatus.FAILED - domain_execution.error = error + domain_execution.error = error_message domain_execution.finished_at = now domain_execution.elapsed_time = (now - domain_execution.created_at).total_seconds() @@ -280,15 +203,22 @@ class WorkflowCycleManager: trace_manager.add_trace_task( TraceTask( TraceTaskName.WORKFLOW_TRACE, - workflow_run=workflow_run, + workflow_execution=execution, conversation_id=conversation_id, user_id=trace_manager.user_id, ) ) - return workflow_run + return execution + + def handle_node_execution_start( + self, + *, + workflow_execution_id: str, + event: QueueNodeStartedEvent, + ) -> NodeExecution: + workflow_execution = self._get_workflow_execution_or_raise_error(workflow_execution_id) - def _handle_node_execution_start(self, *, workflow_run: WorkflowRun, event: QueueNodeStartedEvent) -> NodeExecution: # Create a domain model created_at = datetime.now(UTC).replace(tzinfo=None) metadata = { @@ -299,8 +229,8 @@ class WorkflowCycleManager: domain_execution = NodeExecution( id=str(uuid4()), - workflow_id=workflow_run.workflow_id, - workflow_run_id=workflow_run.id, + workflow_id=workflow_execution.workflow_id, + workflow_run_id=workflow_execution.id, predecessor_node_id=event.predecessor_node_id, index=event.node_run_index, node_execution_id=event.node_execution_id, @@ -317,7 +247,7 @@ class WorkflowCycleManager: return domain_execution - def _handle_workflow_node_execution_success(self, *, event: QueueNodeSucceededEvent) -> NodeExecution: + def handle_workflow_node_execution_success(self, *, event: QueueNodeSucceededEvent) -> NodeExecution: # Get the domain model from repository domain_execution = self._workflow_node_execution_repository.get_by_node_execution_id(event.node_execution_id) if not domain_execution: @@ -350,7 +280,7 @@ class WorkflowCycleManager: return domain_execution - def _handle_workflow_node_execution_failed( + def handle_workflow_node_execution_failed( self, *, event: QueueNodeFailedEvent @@ -400,15 +330,10 @@ class WorkflowCycleManager: return domain_execution - def _handle_workflow_node_execution_retried( - self, *, workflow_run: WorkflowRun, event: QueueNodeRetryEvent + def handle_workflow_node_execution_retried( + self, *, workflow_execution_id: str, event: QueueNodeRetryEvent ) -> NodeExecution: - """ - Workflow node execution failed - :param workflow_run: workflow run - :param event: queue node failed event - :return: - """ + workflow_execution = self._get_workflow_execution_or_raise_error(workflow_execution_id) created_at = event.start_at finished_at = datetime.now(UTC).replace(tzinfo=None) elapsed_time = (finished_at - created_at).total_seconds() @@ -433,8 +358,8 @@ class WorkflowCycleManager: # Create a domain model domain_execution = NodeExecution( id=str(uuid4()), - workflow_id=workflow_run.workflow_id, - workflow_run_id=workflow_run.id, + workflow_id=workflow_execution.workflow_id, + workflow_run_id=workflow_execution.id, predecessor_node_id=event.predecessor_node_id, node_execution_id=event.node_execution_id, node_id=event.node_id, @@ -456,491 +381,8 @@ class WorkflowCycleManager: return domain_execution - def _workflow_start_to_stream_response( - self, - *, - session: Session, - task_id: str, - workflow_run: WorkflowRun, - ) -> WorkflowStartStreamResponse: - _ = session - return WorkflowStartStreamResponse( - task_id=task_id, - workflow_run_id=workflow_run.id, - data=WorkflowStartStreamResponse.Data( - id=workflow_run.id, - workflow_id=workflow_run.workflow_id, - sequence_number=workflow_run.sequence_number, - inputs=dict(workflow_run.inputs_dict or {}), - created_at=int(workflow_run.created_at.timestamp()), - ), - ) - - def _workflow_finish_to_stream_response( - self, - *, - session: Session, - task_id: str, - workflow_run: WorkflowRun, - ) -> WorkflowFinishStreamResponse: - created_by = None - if workflow_run.created_by_role == CreatorUserRole.ACCOUNT: - stmt = select(Account).where(Account.id == workflow_run.created_by) - account = session.scalar(stmt) - if account: - created_by = { - "id": account.id, - "name": account.name, - "email": account.email, - } - elif workflow_run.created_by_role == CreatorUserRole.END_USER: - stmt = select(EndUser).where(EndUser.id == workflow_run.created_by) - end_user = session.scalar(stmt) - if end_user: - created_by = { - "id": end_user.id, - "user": end_user.session_id, - } - else: - raise NotImplementedError(f"unknown created_by_role: {workflow_run.created_by_role}") - - return WorkflowFinishStreamResponse( - task_id=task_id, - workflow_run_id=workflow_run.id, - data=WorkflowFinishStreamResponse.Data( - id=workflow_run.id, - workflow_id=workflow_run.workflow_id, - sequence_number=workflow_run.sequence_number, - status=workflow_run.status, - outputs=dict(workflow_run.outputs_dict) if workflow_run.outputs_dict else None, - error=workflow_run.error, - elapsed_time=workflow_run.elapsed_time, - total_tokens=workflow_run.total_tokens, - total_steps=workflow_run.total_steps, - created_by=created_by, - created_at=int(workflow_run.created_at.timestamp()), - finished_at=int(workflow_run.finished_at.timestamp()), - files=self._fetch_files_from_node_outputs(dict(workflow_run.outputs_dict)), - exceptions_count=workflow_run.exceptions_count, - ), - ) - - def _workflow_node_start_to_stream_response( - self, - *, - event: QueueNodeStartedEvent, - task_id: str, - workflow_node_execution: NodeExecution, - ) -> Optional[NodeStartStreamResponse]: - if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: - return None - if not workflow_node_execution.workflow_run_id: - return None - - response = NodeStartStreamResponse( - task_id=task_id, - workflow_run_id=workflow_node_execution.workflow_run_id, - data=NodeStartStreamResponse.Data( - id=workflow_node_execution.id, - node_id=workflow_node_execution.node_id, - node_type=workflow_node_execution.node_type, - title=workflow_node_execution.title, - index=workflow_node_execution.index, - predecessor_node_id=workflow_node_execution.predecessor_node_id, - inputs=workflow_node_execution.inputs, - created_at=int(workflow_node_execution.created_at.timestamp()), - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - parent_parallel_id=event.parent_parallel_id, - parent_parallel_start_node_id=event.parent_parallel_start_node_id, - iteration_id=event.in_iteration_id, - loop_id=event.in_loop_id, - parallel_run_id=event.parallel_mode_run_id, - agent_strategy=event.agent_strategy, - ), - ) - - # extras logic - if event.node_type == NodeType.TOOL: - node_data = cast(ToolNodeData, event.node_data) - response.data.extras["icon"] = ToolManager.get_tool_icon( - tenant_id=self._application_generate_entity.app_config.tenant_id, - provider_type=node_data.provider_type, - provider_id=node_data.provider_id, - ) - - return response - - def _workflow_node_finish_to_stream_response( - self, - *, - event: QueueNodeSucceededEvent - | QueueNodeFailedEvent - | QueueNodeInIterationFailedEvent - | QueueNodeInLoopFailedEvent - | QueueNodeExceptionEvent, - task_id: str, - workflow_node_execution: NodeExecution, - ) -> Optional[NodeFinishStreamResponse]: - if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: - return None - if not workflow_node_execution.workflow_run_id: - return None - if not workflow_node_execution.finished_at: - return None - - return NodeFinishStreamResponse( - task_id=task_id, - workflow_run_id=workflow_node_execution.workflow_run_id, - data=NodeFinishStreamResponse.Data( - id=workflow_node_execution.id, - node_id=workflow_node_execution.node_id, - node_type=workflow_node_execution.node_type, - index=workflow_node_execution.index, - title=workflow_node_execution.title, - predecessor_node_id=workflow_node_execution.predecessor_node_id, - inputs=workflow_node_execution.inputs, - process_data=workflow_node_execution.process_data, - outputs=workflow_node_execution.outputs, - status=workflow_node_execution.status, - error=workflow_node_execution.error, - elapsed_time=workflow_node_execution.elapsed_time, - execution_metadata=workflow_node_execution.metadata, - created_at=int(workflow_node_execution.created_at.timestamp()), - finished_at=int(workflow_node_execution.finished_at.timestamp()), - files=self._fetch_files_from_node_outputs(workflow_node_execution.outputs or {}), - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - parent_parallel_id=event.parent_parallel_id, - parent_parallel_start_node_id=event.parent_parallel_start_node_id, - iteration_id=event.in_iteration_id, - loop_id=event.in_loop_id, - ), - ) - - def _workflow_node_retry_to_stream_response( - self, - *, - event: QueueNodeRetryEvent, - task_id: str, - workflow_node_execution: NodeExecution, - ) -> Optional[Union[NodeRetryStreamResponse, NodeFinishStreamResponse]]: - if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}: - return None - if not workflow_node_execution.workflow_run_id: - return None - if not workflow_node_execution.finished_at: - return None - - return NodeRetryStreamResponse( - task_id=task_id, - workflow_run_id=workflow_node_execution.workflow_run_id, - data=NodeRetryStreamResponse.Data( - id=workflow_node_execution.id, - node_id=workflow_node_execution.node_id, - node_type=workflow_node_execution.node_type, - index=workflow_node_execution.index, - title=workflow_node_execution.title, - predecessor_node_id=workflow_node_execution.predecessor_node_id, - inputs=workflow_node_execution.inputs, - process_data=workflow_node_execution.process_data, - outputs=workflow_node_execution.outputs, - status=workflow_node_execution.status, - error=workflow_node_execution.error, - elapsed_time=workflow_node_execution.elapsed_time, - execution_metadata=workflow_node_execution.metadata, - created_at=int(workflow_node_execution.created_at.timestamp()), - finished_at=int(workflow_node_execution.finished_at.timestamp()), - files=self._fetch_files_from_node_outputs(workflow_node_execution.outputs or {}), - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - parent_parallel_id=event.parent_parallel_id, - parent_parallel_start_node_id=event.parent_parallel_start_node_id, - iteration_id=event.in_iteration_id, - loop_id=event.in_loop_id, - retry_index=event.retry_index, - ), - ) - - def _workflow_parallel_branch_start_to_stream_response( - self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueParallelBranchRunStartedEvent - ) -> ParallelBranchStartStreamResponse: - _ = session - return ParallelBranchStartStreamResponse( - task_id=task_id, - workflow_run_id=workflow_run.id, - data=ParallelBranchStartStreamResponse.Data( - parallel_id=event.parallel_id, - parallel_branch_id=event.parallel_start_node_id, - parent_parallel_id=event.parent_parallel_id, - parent_parallel_start_node_id=event.parent_parallel_start_node_id, - iteration_id=event.in_iteration_id, - loop_id=event.in_loop_id, - created_at=int(time.time()), - ), - ) - - def _workflow_parallel_branch_finished_to_stream_response( - self, - *, - session: Session, - task_id: str, - workflow_run: WorkflowRun, - event: QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent, - ) -> ParallelBranchFinishedStreamResponse: - _ = session - return ParallelBranchFinishedStreamResponse( - task_id=task_id, - workflow_run_id=workflow_run.id, - data=ParallelBranchFinishedStreamResponse.Data( - parallel_id=event.parallel_id, - parallel_branch_id=event.parallel_start_node_id, - parent_parallel_id=event.parent_parallel_id, - parent_parallel_start_node_id=event.parent_parallel_start_node_id, - iteration_id=event.in_iteration_id, - loop_id=event.in_loop_id, - status="succeeded" if isinstance(event, QueueParallelBranchRunSucceededEvent) else "failed", - error=event.error if isinstance(event, QueueParallelBranchRunFailedEvent) else None, - created_at=int(time.time()), - ), - ) - - def _workflow_iteration_start_to_stream_response( - self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationStartEvent - ) -> IterationNodeStartStreamResponse: - _ = session - return IterationNodeStartStreamResponse( - task_id=task_id, - workflow_run_id=workflow_run.id, - data=IterationNodeStartStreamResponse.Data( - id=event.node_id, - node_id=event.node_id, - node_type=event.node_type.value, - title=event.node_data.title, - created_at=int(time.time()), - extras={}, - inputs=event.inputs or {}, - metadata=event.metadata or {}, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - ), - ) - - def _workflow_iteration_next_to_stream_response( - self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationNextEvent - ) -> IterationNodeNextStreamResponse: - _ = session - return IterationNodeNextStreamResponse( - task_id=task_id, - workflow_run_id=workflow_run.id, - data=IterationNodeNextStreamResponse.Data( - id=event.node_id, - node_id=event.node_id, - node_type=event.node_type.value, - title=event.node_data.title, - index=event.index, - pre_iteration_output=event.output, - created_at=int(time.time()), - extras={}, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - parallel_mode_run_id=event.parallel_mode_run_id, - duration=event.duration, - ), - ) - - def _workflow_iteration_completed_to_stream_response( - self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationCompletedEvent - ) -> IterationNodeCompletedStreamResponse: - _ = session - return IterationNodeCompletedStreamResponse( - task_id=task_id, - workflow_run_id=workflow_run.id, - data=IterationNodeCompletedStreamResponse.Data( - id=event.node_id, - node_id=event.node_id, - node_type=event.node_type.value, - title=event.node_data.title, - outputs=event.outputs, - created_at=int(time.time()), - extras={}, - inputs=event.inputs or {}, - status=WorkflowNodeExecutionStatus.SUCCEEDED - if event.error is None - else WorkflowNodeExecutionStatus.FAILED, - error=None, - elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(), - total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0, - execution_metadata=event.metadata, - finished_at=int(time.time()), - steps=event.steps, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - ), - ) - - def _workflow_loop_start_to_stream_response( - self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueLoopStartEvent - ) -> LoopNodeStartStreamResponse: - _ = session - return LoopNodeStartStreamResponse( - task_id=task_id, - workflow_run_id=workflow_run.id, - data=LoopNodeStartStreamResponse.Data( - id=event.node_id, - node_id=event.node_id, - node_type=event.node_type.value, - title=event.node_data.title, - created_at=int(time.time()), - extras={}, - inputs=event.inputs or {}, - metadata=event.metadata or {}, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - ), - ) - - def _workflow_loop_next_to_stream_response( - self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueLoopNextEvent - ) -> LoopNodeNextStreamResponse: - _ = session - return LoopNodeNextStreamResponse( - task_id=task_id, - workflow_run_id=workflow_run.id, - data=LoopNodeNextStreamResponse.Data( - id=event.node_id, - node_id=event.node_id, - node_type=event.node_type.value, - title=event.node_data.title, - index=event.index, - pre_loop_output=event.output, - created_at=int(time.time()), - extras={}, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - parallel_mode_run_id=event.parallel_mode_run_id, - duration=event.duration, - ), - ) - - def _workflow_loop_completed_to_stream_response( - self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueLoopCompletedEvent - ) -> LoopNodeCompletedStreamResponse: - _ = session - return LoopNodeCompletedStreamResponse( - task_id=task_id, - workflow_run_id=workflow_run.id, - data=LoopNodeCompletedStreamResponse.Data( - id=event.node_id, - node_id=event.node_id, - node_type=event.node_type.value, - title=event.node_data.title, - outputs=event.outputs, - created_at=int(time.time()), - extras={}, - inputs=event.inputs or {}, - status=WorkflowNodeExecutionStatus.SUCCEEDED - if event.error is None - else WorkflowNodeExecutionStatus.FAILED, - error=None, - elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(), - total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0, - execution_metadata=event.metadata, - finished_at=int(time.time()), - steps=event.steps, - parallel_id=event.parallel_id, - parallel_start_node_id=event.parallel_start_node_id, - ), - ) - - def _fetch_files_from_node_outputs(self, outputs_dict: Mapping[str, Any]) -> Sequence[Mapping[str, Any]]: - """ - Fetch files from node outputs - :param outputs_dict: node outputs dict - :return: - """ - if not outputs_dict: - return [] - - files = [self._fetch_files_from_variable_value(output_value) for output_value in outputs_dict.values()] - # Remove None - files = [file for file in files if file] - # Flatten list - # Flatten the list of sequences into a single list of mappings - flattened_files = [file for sublist in files if sublist for file in sublist] - - # Convert to tuple to match Sequence type - return tuple(flattened_files) - - def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> Sequence[Mapping[str, Any]]: - """ - Fetch files from variable value - :param value: variable value - :return: - """ - if not value: - return [] - - files = [] - if isinstance(value, list): - for item in value: - file = self._get_file_var_from_value(item) - if file: - files.append(file) - elif isinstance(value, dict): - file = self._get_file_var_from_value(value) - if file: - files.append(file) - - return files - - def _get_file_var_from_value(self, value: Union[dict, list]) -> Mapping[str, Any] | None: - """ - Get file var from value - :param value: variable value - :return: - """ - if not value: - return None - - if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY: - return value - elif isinstance(value, File): - return value.to_dict() - - return None - - def _get_workflow_run(self, *, session: Session, workflow_run_id: str) -> WorkflowRun: - if self._workflow_run and self._workflow_run.id == workflow_run_id: - cached_workflow_run = self._workflow_run - cached_workflow_run = session.merge(cached_workflow_run) - return cached_workflow_run - stmt = select(WorkflowRun).where(WorkflowRun.id == workflow_run_id) - workflow_run = session.scalar(stmt) - if not workflow_run: - raise WorkflowRunNotFoundError(workflow_run_id) - self._workflow_run = workflow_run - - return workflow_run - - def _handle_agent_log(self, task_id: str, event: QueueAgentLogEvent) -> AgentLogStreamResponse: - """ - Handle agent log - :param task_id: task id - :param event: agent log event - :return: - """ - return AgentLogStreamResponse( - task_id=task_id, - data=AgentLogStreamResponse.Data( - node_execution_id=event.node_execution_id, - id=event.id, - parent_id=event.parent_id, - label=event.label, - error=event.error, - status=event.status, - data=event.data, - metadata=event.metadata, - node_id=event.node_id, - ), - ) + def _get_workflow_execution_or_raise_error(self, id: str, /) -> WorkflowExecution: + execution = self._workflow_execution_repository.get(id) + if not execution: + raise WorkflowRunNotFoundError(id) + return execution diff --git a/api/extensions/ext_login.py b/api/extensions/ext_login.py index 10fb89eb73..80fee7ccd8 100644 --- a/api/extensions/ext_login.py +++ b/api/extensions/ext_login.py @@ -3,11 +3,14 @@ import json import flask_login # type: ignore from flask import Response, request from flask_login import user_loaded_from_request, user_logged_in -from werkzeug.exceptions import Unauthorized +from werkzeug.exceptions import NotFound, Unauthorized import contexts from dify_app import DifyApp +from extensions.ext_database import db from libs.passport import PassportService +from models.account import Account +from models.model import EndUser from services.account_service import AccountService login_manager = flask_login.LoginManager() @@ -17,34 +20,48 @@ login_manager = flask_login.LoginManager() @login_manager.request_loader def load_user_from_request(request_from_flask_login): """Load user based on the request.""" - if request.blueprint not in {"console", "inner_api"}: - return None - # Check if the user_id contains a dot, indicating the old format auth_header = request.headers.get("Authorization", "") - if not auth_header: - auth_token = request.args.get("_token") - if not auth_token: - raise Unauthorized("Invalid Authorization token.") - else: + auth_token: str | None = None + if auth_header: if " " not in auth_header: raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") - auth_scheme, auth_token = auth_header.split(None, 1) + auth_scheme, auth_token = auth_header.split(maxsplit=1) auth_scheme = auth_scheme.lower() if auth_scheme != "bearer": raise Unauthorized("Invalid Authorization header format. Expected 'Bearer ' format.") + else: + auth_token = request.args.get("_token") - decoded = PassportService().verify(auth_token) - user_id = decoded.get("user_id") + if request.blueprint in {"console", "inner_api"}: + if not auth_token: + raise Unauthorized("Invalid Authorization token.") + decoded = PassportService().verify(auth_token) + user_id = decoded.get("user_id") + if not user_id: + raise Unauthorized("Invalid Authorization token.") - logged_in_account = AccountService.load_logged_in_account(account_id=user_id) - return logged_in_account + logged_in_account = AccountService.load_logged_in_account(account_id=user_id) + return logged_in_account + elif request.blueprint == "web": + decoded = PassportService().verify(auth_token) + end_user_id = decoded.get("end_user_id") + if not end_user_id: + raise Unauthorized("Invalid Authorization token.") + end_user = db.session.query(EndUser).filter(EndUser.id == decoded["end_user_id"]).first() + if not end_user: + raise NotFound("End user not found.") + return end_user @user_logged_in.connect @user_loaded_from_request.connect def on_user_logged_in(_sender, user): - """Called when a user logged in.""" - if user: + """Called when a user logged in. + + Note: AccountService.load_logged_in_account will populate user.current_tenant_id + through the load_user method, which calls account.set_tenant_id(). + """ + if user and isinstance(user, Account) and user.current_tenant_id: contexts.tenant_id.set(user.current_tenant_id) diff --git a/api/factories/variable_factory.py b/api/factories/variable_factory.py index 69a786e2f5..0757496db7 100644 --- a/api/factories/variable_factory.py +++ b/api/factories/variable_factory.py @@ -39,7 +39,6 @@ from core.variables.variables import ( from core.workflow.constants import ( CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, - PIPELINE_VARIABLE_NODE_ID, ) diff --git a/api/models/account.py b/api/models/account.py index bb6a2a4735..7ffeefa980 100644 --- a/api/models/account.py +++ b/api/models/account.py @@ -1,10 +1,10 @@ import enum import json -from typing import cast +from typing import Optional, cast from flask_login import UserMixin # type: ignore from sqlalchemy import func -from sqlalchemy.orm import Mapped, mapped_column +from sqlalchemy.orm import Mapped, mapped_column, reconstructor from models.base import Base @@ -12,6 +12,66 @@ from .engine import db from .types import StringUUID +class TenantAccountRole(enum.StrEnum): + OWNER = "owner" + ADMIN = "admin" + EDITOR = "editor" + NORMAL = "normal" + DATASET_OPERATOR = "dataset_operator" + + @staticmethod + def is_valid_role(role: str) -> bool: + if not role: + return False + return role in { + TenantAccountRole.OWNER, + TenantAccountRole.ADMIN, + TenantAccountRole.EDITOR, + TenantAccountRole.NORMAL, + TenantAccountRole.DATASET_OPERATOR, + } + + @staticmethod + def is_privileged_role(role: Optional["TenantAccountRole"]) -> bool: + if not role: + return False + return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN} + + @staticmethod + def is_admin_role(role: Optional["TenantAccountRole"]) -> bool: + if not role: + return False + return role == TenantAccountRole.ADMIN + + @staticmethod + def is_non_owner_role(role: Optional["TenantAccountRole"]) -> bool: + if not role: + return False + return role in { + TenantAccountRole.ADMIN, + TenantAccountRole.EDITOR, + TenantAccountRole.NORMAL, + TenantAccountRole.DATASET_OPERATOR, + } + + @staticmethod + def is_editing_role(role: Optional["TenantAccountRole"]) -> bool: + if not role: + return False + return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN, TenantAccountRole.EDITOR} + + @staticmethod + def is_dataset_edit_role(role: Optional["TenantAccountRole"]) -> bool: + if not role: + return False + return role in { + TenantAccountRole.OWNER, + TenantAccountRole.ADMIN, + TenantAccountRole.EDITOR, + TenantAccountRole.DATASET_OPERATOR, + } + + class AccountStatus(enum.StrEnum): PENDING = "pending" UNINITIALIZED = "uninitialized" @@ -41,24 +101,27 @@ class Account(UserMixin, Base): created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + @reconstructor + def init_on_load(self): + self.role: Optional[TenantAccountRole] = None + self._current_tenant: Optional[Tenant] = None + @property def is_password_set(self): return self.password is not None @property def current_tenant(self): - return self._current_tenant # type: ignore + return self._current_tenant @current_tenant.setter - def current_tenant(self, value: "Tenant"): - tenant = value + def current_tenant(self, tenant: "Tenant"): ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=self.id).first() if ta: - tenant.current_role = ta.role - else: - tenant = None # type: ignore - - self._current_tenant = tenant + self.role = TenantAccountRole(ta.role) + self._current_tenant = tenant + return + self._current_tenant = None @property def current_tenant_id(self) -> str | None: @@ -80,12 +143,12 @@ class Account(UserMixin, Base): return tenant, join = tenant_account_join - tenant.current_role = join.role + self.role = join.role self._current_tenant = tenant @property def current_role(self): - return self._current_tenant.current_role + return self.role def get_status(self) -> AccountStatus: status_str = self.status @@ -105,23 +168,23 @@ class Account(UserMixin, Base): # check current_user.current_tenant.current_role in ['admin', 'owner'] @property def is_admin_or_owner(self): - return TenantAccountRole.is_privileged_role(self._current_tenant.current_role) + return TenantAccountRole.is_privileged_role(self.role) @property def is_admin(self): - return TenantAccountRole.is_admin_role(self._current_tenant.current_role) + return TenantAccountRole.is_admin_role(self.role) @property def is_editor(self): - return TenantAccountRole.is_editing_role(self._current_tenant.current_role) + return TenantAccountRole.is_editing_role(self.role) @property def is_dataset_editor(self): - return TenantAccountRole.is_dataset_edit_role(self._current_tenant.current_role) + return TenantAccountRole.is_dataset_edit_role(self.role) @property def is_dataset_operator(self): - return self._current_tenant.current_role == TenantAccountRole.DATASET_OPERATOR + return self.role == TenantAccountRole.DATASET_OPERATOR class TenantStatus(enum.StrEnum): @@ -129,66 +192,6 @@ class TenantStatus(enum.StrEnum): ARCHIVE = "archive" -class TenantAccountRole(enum.StrEnum): - OWNER = "owner" - ADMIN = "admin" - EDITOR = "editor" - NORMAL = "normal" - DATASET_OPERATOR = "dataset_operator" - - @staticmethod - def is_valid_role(role: str) -> bool: - if not role: - return False - return role in { - TenantAccountRole.OWNER, - TenantAccountRole.ADMIN, - TenantAccountRole.EDITOR, - TenantAccountRole.NORMAL, - TenantAccountRole.DATASET_OPERATOR, - } - - @staticmethod - def is_privileged_role(role: str) -> bool: - if not role: - return False - return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN} - - @staticmethod - def is_admin_role(role: str) -> bool: - if not role: - return False - return role == TenantAccountRole.ADMIN - - @staticmethod - def is_non_owner_role(role: str) -> bool: - if not role: - return False - return role in { - TenantAccountRole.ADMIN, - TenantAccountRole.EDITOR, - TenantAccountRole.NORMAL, - TenantAccountRole.DATASET_OPERATOR, - } - - @staticmethod - def is_editing_role(role: str) -> bool: - if not role: - return False - return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN, TenantAccountRole.EDITOR} - - @staticmethod - def is_dataset_edit_role(role: str) -> bool: - if not role: - return False - return role in { - TenantAccountRole.OWNER, - TenantAccountRole.ADMIN, - TenantAccountRole.EDITOR, - TenantAccountRole.DATASET_OPERATOR, - } - - class Tenant(Base): __tablename__ = "tenants" __table_args__ = (db.PrimaryKeyConstraint("id", name="tenant_pkey"),) diff --git a/api/models/base.py b/api/models/base.py index da9509301a..bd120f5487 100644 --- a/api/models/base.py +++ b/api/models/base.py @@ -1,5 +1,7 @@ -from sqlalchemy.orm import declarative_base +from sqlalchemy.orm import DeclarativeBase from models.engine import metadata -Base = declarative_base(metadata=metadata) + +class Base(DeclarativeBase): + metadata = metadata diff --git a/api/models/tools.py b/api/models/tools.py index f92da9a60b..7daa06834f 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -206,10 +206,6 @@ class WorkflowToolProvider(Base): db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)") ) - @property - def schema_type(self) -> ApiProviderSchemaType: - return ApiProviderSchemaType.value_of(self.schema_type_str) - @property def user(self) -> Account | None: return db.session.query(Account).filter(Account.id == self.user_id).first() diff --git a/api/models/workflow.py b/api/models/workflow.py index d5cf71841e..0a9982ed09 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -3,7 +3,7 @@ import logging from collections.abc import Mapping, Sequence from datetime import UTC, datetime from enum import Enum, StrEnum -from typing import TYPE_CHECKING, Any, List, Optional, Self, Union +from typing import TYPE_CHECKING, Any, Optional, Union from uuid import uuid4 from core.variables import utils as variable_utils @@ -154,7 +154,7 @@ class Workflow(Base): conversation_variables: Sequence[Variable], marked_name: str = "", marked_comment: str = "", - ) -> Self: + ) -> "Workflow": workflow = Workflow() workflow.id = str(uuid4()) workflow.tenant_id = tenant_id @@ -447,14 +447,14 @@ class WorkflowRun(Base): status: Mapped[str] = mapped_column(db.String(255)) # running, succeeded, failed, stopped, partial-succeeded outputs: Mapped[Optional[str]] = mapped_column(sa.Text, default="{}") error: Mapped[Optional[str]] = mapped_column(db.Text) - elapsed_time = db.Column(db.Float, nullable=False, server_default=sa.text("0")) + elapsed_time: Mapped[float] = mapped_column(db.Float, nullable=False, server_default=sa.text("0")) total_tokens: Mapped[int] = mapped_column(sa.BigInteger, server_default=sa.text("0")) - total_steps = db.Column(db.Integer, server_default=db.text("0")) + total_steps: Mapped[int] = mapped_column(db.Integer, server_default=db.text("0")) created_by_role: Mapped[str] = mapped_column(db.String(255)) # account, end_user - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) - finished_at = db.Column(db.DateTime) - exceptions_count = db.Column(db.Integer, server_default=db.text("0")) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + finished_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime) + exceptions_count: Mapped[int] = mapped_column(db.Integer, server_default=db.text("0")) @property def created_by_account(self): @@ -469,7 +469,7 @@ class WorkflowRun(Base): return db.session.get(EndUser, self.created_by) if created_by_role == CreatorUserRole.END_USER else None @property - def graph_dict(self): + def graph_dict(self) -> Mapping[str, Any]: return json.loads(self.graph) if self.graph else {} @property @@ -688,8 +688,11 @@ class WorkflowNodeExecution(Base): return json.loads(self.process_data) if self.process_data else None @property - def execution_metadata_dict(self) -> dict[str, Any] | None: - return json.loads(self.execution_metadata) if self.execution_metadata else None + def execution_metadata_dict(self) -> dict[str, Any]: + # When the metadata is unset, we return an empty dictionary instead of `None`. + # This approach streamlines the logic for the caller, making it easier to handle + # cases where metadata is absent. + return json.loads(self.execution_metadata) if self.execution_metadata else {} @property def extras(self): @@ -771,12 +774,12 @@ class WorkflowAppLog(Base): id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) tenant_id: Mapped[str] = mapped_column(StringUUID) app_id: Mapped[str] = mapped_column(StringUUID) - workflow_id = db.Column(StringUUID, nullable=False) + workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False) workflow_run_id: Mapped[str] = mapped_column(StringUUID) - created_from = db.Column(db.String(255), nullable=False) - created_by_role = db.Column(db.String(255), nullable=False) - created_by = db.Column(StringUUID, nullable=False) - created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + created_from: Mapped[str] = mapped_column(db.String(255), nullable=False) + created_by_role: Mapped[str] = mapped_column(db.String(255), nullable=False) + created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) + created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp()) @property def workflow_run(self): @@ -801,9 +804,11 @@ class ConversationVariable(Base): id: Mapped[str] = mapped_column(StringUUID, primary_key=True) conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False, primary_key=True, index=True) app_id: Mapped[str] = mapped_column(StringUUID, nullable=False, index=True) - data = mapped_column(db.Text, nullable=False) - created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp(), index=True) - updated_at = mapped_column( + data: Mapped[str] = mapped_column(db.Text, nullable=False) + created_at: Mapped[datetime] = mapped_column( + db.DateTime, nullable=False, server_default=func.current_timestamp(), index=True + ) + updated_at: Mapped[datetime] = mapped_column( db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp() ) @@ -851,14 +856,14 @@ class WorkflowDraftVariable(Base): # id is the unique identifier of a draft variable. id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) - created_at = mapped_column( + created_at: Mapped[datetime] = mapped_column( db.DateTime, nullable=False, default=_naive_utc_datetime, server_default=func.current_timestamp(), ) - updated_at = mapped_column( + updated_at: Mapped[datetime] = mapped_column( db.DateTime, nullable=False, default=_naive_utc_datetime, diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index 81db03033f..0fe17f9a33 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -1043,11 +1043,11 @@ class DocumentService: "score_threshold_enabled": False, } - dataset.retrieval_model = ( - knowledge_config.retrieval_model.model_dump() - if knowledge_config.retrieval_model - else default_retrieval_model - ) # type: ignore + dataset.retrieval_model = ( + knowledge_config.retrieval_model.model_dump() + if knowledge_config.retrieval_model + else default_retrieval_model + ) # type: ignore documents = [] if knowledge_config.original_document_id: diff --git a/api/services/vector_service.py b/api/services/vector_service.py index 58292c59f4..19e37f4ee3 100644 --- a/api/services/vector_service.py +++ b/api/services/vector_service.py @@ -23,11 +23,10 @@ class VectorService: ): documents: list[Document] = [] - document: Document | None = None for segment in segments: if doc_form == IndexType.PARENT_CHILD_INDEX: - document = db.session.query(DatasetDocument).filter_by(id=segment.document_id).first() - if not document: + dataset_document = db.session.query(DatasetDocument).filter_by(id=segment.document_id).first() + if not dataset_document: _logger.warning( "Expected DatasetDocument record to exist, but none was found, document_id=%s, segment_id=%s", segment.document_id, @@ -37,7 +36,7 @@ class VectorService: # get the process rule processing_rule = ( db.session.query(DatasetProcessRule) - .filter(DatasetProcessRule.id == document.dataset_process_rule_id) + .filter(DatasetProcessRule.id == dataset_document.dataset_process_rule_id) .first() ) if not processing_rule: @@ -61,9 +60,11 @@ class VectorService: ) else: raise ValueError("The knowledge base index technique is not high quality!") - cls.generate_child_chunks(segment, document, dataset, embedding_model_instance, processing_rule, False) + cls.generate_child_chunks( + segment, dataset_document, dataset, embedding_model_instance, processing_rule, False + ) else: - document = Document( + rag_document = Document( page_content=segment.content, metadata={ "doc_id": segment.index_node_id, @@ -72,7 +73,7 @@ class VectorService: "dataset_id": segment.dataset_id, }, ) - documents.append(document) + documents.append(rag_document) if len(documents) > 0: index_processor = IndexProcessorFactory(doc_form).init_index_processor() index_processor.load(dataset, documents, with_keywords=True, keywords_list=keywords_list) diff --git a/api/services/workflow_run_service.py b/api/services/workflow_run_service.py index 3efdb3a79d..21366a4552 100644 --- a/api/services/workflow_run_service.py +++ b/api/services/workflow_run_service.py @@ -15,6 +15,7 @@ from models import ( WorkflowRun, WorkflowRunTriggeredFrom, ) +from models.workflow import WorkflowNodeExecutionTriggeredFrom class WorkflowRunService: @@ -140,14 +141,13 @@ class WorkflowRunService: session_factory=db.engine, user=user, app_id=app_model.id, - triggered_from=None, + triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN, ) - # Use the repository to get the node executions with ordering + # Use the repository to get the database models directly order_config = OrderConfig(order_by=["index"], order_direction="desc") - node_executions = repository.get_by_workflow_run(workflow_run_id=run_id, order_config=order_config) - - # Convert domain models to database models - workflow_node_executions = [repository.to_db_model(node_execution) for node_execution in node_executions] + workflow_node_executions = repository.get_db_models_by_workflow_run( + workflow_run_id=run_id, order_config=order_config + ) return workflow_node_executions diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index c34560a911..2f7defa2ad 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -507,11 +507,11 @@ class WorkflowService: raise DraftWorkflowDeletionError("Cannot delete draft workflow versions") # Check if this workflow is currently referenced by an app - stmt = select(App).where(App.workflow_id == workflow_id) - app = session.scalar(stmt) + app_stmt = select(App).where(App.workflow_id == workflow_id) + app = session.scalar(app_stmt) if app: # Cannot delete a workflow that's currently in use by an app - raise WorkflowInUseError(f"Cannot delete workflow that is currently in use by app '{app.name}'") + raise WorkflowInUseError(f"Cannot delete workflow that is currently in use by app '{app.id}'") # Don't use workflow.tool_published as it's not accurate for specific workflow versions # Check if there's a tool provider using this specific workflow version diff --git a/api/tasks/add_document_to_index_task.py b/api/tasks/add_document_to_index_task.py index be88881efc..75d648e1b7 100644 --- a/api/tasks/add_document_to_index_task.py +++ b/api/tasks/add_document_to_index_task.py @@ -111,7 +111,7 @@ def add_document_to_index_task(dataset_document_id: str): logging.exception("add document to index failed") dataset_document.enabled = False dataset_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) - dataset_document.status = "error" + dataset_document.indexing_status = "error" dataset_document.error = str(e) db.session.commit() finally: diff --git a/api/tasks/remove_app_and_related_data_task.py b/api/tasks/remove_app_and_related_data_task.py index d9c1980d3f..ae6f923ca8 100644 --- a/api/tasks/remove_app_and_related_data_task.py +++ b/api/tasks/remove_app_and_related_data_task.py @@ -193,7 +193,7 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str): def _delete_app_workflow_node_executions(tenant_id: str, app_id: str): # Get app's owner with Session(db.engine, expire_on_commit=False) as session: - stmt = select(Account).where(Account.id == App.owner_id).where(App.id == app_id) + stmt = select(Account).where(Account.id == App.created_by).where(App.id == app_id) user = session.scalar(stmt) if user is None: diff --git a/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py b/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py index 15a9e8e9f4..fa6fc3ba32 100644 --- a/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py +++ b/api/tests/unit_tests/core/tools/workflow_as_tool/test_tool.py @@ -34,13 +34,13 @@ def test_workflow_tool_should_raise_tool_invoke_error_when_result_has_error_fiel # needs to patch those methods to avoid database access. monkeypatch.setattr(tool, "_get_app", lambda *args, **kwargs: None) monkeypatch.setattr(tool, "_get_workflow", lambda *args, **kwargs: None) - monkeypatch.setattr(tool, "_get_user", lambda *args, **kwargs: None) # replace `WorkflowAppGenerator.generate` 's return value. monkeypatch.setattr( "core.app.apps.workflow.app_generator.WorkflowAppGenerator.generate", lambda *args, **kwargs: {"data": {"error": "oops"}}, ) + monkeypatch.setattr("flask_login.current_user", lambda *args, **kwargs: None) with pytest.raises(ToolInvokeError) as exc_info: # WorkflowTool always returns a generator, so we need to iterate to diff --git a/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py b/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py index 94b9d3e2c6..9c955fc086 100644 --- a/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py +++ b/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py @@ -1,45 +1,73 @@ import json -import time from datetime import UTC, datetime -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock import pytest from sqlalchemy.orm import Session +from core.app.app_config.entities import AppAdditionalFeatures, WorkflowUIBasedAppConfig from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom from core.app.entities.queue_entities import ( QueueNodeFailedEvent, QueueNodeStartedEvent, QueueNodeSucceededEvent, ) +from core.workflow.entities.node_entities import NodeRunMetadataKey +from core.workflow.entities.node_execution_entities import NodeExecution, NodeExecutionStatus +from core.workflow.entities.workflow_execution_entities import WorkflowExecution, WorkflowExecutionStatus, WorkflowType from core.workflow.enums import SystemVariableKey from core.workflow.nodes import NodeType +from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository from core.workflow.workflow_cycle_manager import WorkflowCycleManager from models.enums import CreatorUserRole +from models.model import AppMode from models.workflow import ( Workflow, - WorkflowNodeExecutionStatus, WorkflowRun, WorkflowRunStatus, ) @pytest.fixture -def mock_app_generate_entity(): - entity = MagicMock(spec=AdvancedChatAppGenerateEntity) - entity.inputs = {"query": "test query"} - entity.invoke_from = InvokeFrom.WEB_APP - # Create app_config as a separate mock - app_config = MagicMock() - app_config.tenant_id = "test-tenant-id" - app_config.app_id = "test-app-id" - entity.app_config = app_config +def real_app_generate_entity(): + additional_features = AppAdditionalFeatures( + file_upload=None, + opening_statement=None, + suggested_questions=[], + suggested_questions_after_answer=False, + show_retrieve_source=False, + more_like_this=False, + speech_to_text=False, + text_to_speech=None, + trace_config=None, + ) + + app_config = WorkflowUIBasedAppConfig( + tenant_id="test-tenant-id", + app_id="test-app-id", + app_mode=AppMode.WORKFLOW, + additional_features=additional_features, + workflow_id="test-workflow-id", + ) + + entity = AdvancedChatAppGenerateEntity( + task_id="test-task-id", + app_config=app_config, + inputs={"query": "test query"}, + files=[], + user_id="test-user-id", + stream=False, + invoke_from=InvokeFrom.WEB_APP, + query="test query", + conversation_id="test-conversation-id", + ) + return entity @pytest.fixture -def mock_workflow_system_variables(): +def real_workflow_system_variables(): return { SystemVariableKey.QUERY: "test query", SystemVariableKey.CONVERSATION_ID: "test-conversation-id", @@ -59,10 +87,23 @@ def mock_node_execution_repository(): @pytest.fixture -def workflow_cycle_manager(mock_app_generate_entity, mock_workflow_system_variables, mock_node_execution_repository): +def mock_workflow_execution_repository(): + repo = MagicMock(spec=WorkflowExecutionRepository) + repo.get.return_value = None + return repo + + +@pytest.fixture +def workflow_cycle_manager( + real_app_generate_entity, + real_workflow_system_variables, + mock_workflow_execution_repository, + mock_node_execution_repository, +): return WorkflowCycleManager( - application_generate_entity=mock_app_generate_entity, - workflow_system_variables=mock_workflow_system_variables, + application_generate_entity=real_app_generate_entity, + workflow_system_variables=real_workflow_system_variables, + workflow_execution_repository=mock_workflow_execution_repository, workflow_node_execution_repository=mock_node_execution_repository, ) @@ -74,121 +115,173 @@ def mock_session(): @pytest.fixture -def mock_workflow(): - workflow = MagicMock(spec=Workflow) +def real_workflow(): + workflow = Workflow() workflow.id = "test-workflow-id" workflow.tenant_id = "test-tenant-id" workflow.app_id = "test-app-id" workflow.type = "chat" workflow.version = "1.0" - workflow.graph = json.dumps({"nodes": [], "edges": []}) + + graph_data = {"nodes": [], "edges": []} + workflow.graph = json.dumps(graph_data) + workflow.features = json.dumps({"file_upload": {"enabled": False}}) + workflow.created_by = "test-user-id" + workflow.created_at = datetime.now(UTC).replace(tzinfo=None) + workflow.updated_at = datetime.now(UTC).replace(tzinfo=None) + workflow._environment_variables = "{}" + workflow._conversation_variables = "{}" + return workflow @pytest.fixture -def mock_workflow_run(): - workflow_run = MagicMock(spec=WorkflowRun) +def real_workflow_run(): + workflow_run = WorkflowRun() workflow_run.id = "test-workflow-run-id" workflow_run.tenant_id = "test-tenant-id" workflow_run.app_id = "test-app-id" workflow_run.workflow_id = "test-workflow-id" + workflow_run.sequence_number = 1 + workflow_run.type = "chat" + workflow_run.triggered_from = "app-run" + workflow_run.version = "1.0" + workflow_run.graph = json.dumps({"nodes": [], "edges": []}) + workflow_run.inputs = json.dumps({"query": "test query"}) workflow_run.status = WorkflowRunStatus.RUNNING + workflow_run.outputs = json.dumps({"answer": "test answer"}) workflow_run.created_by_role = CreatorUserRole.ACCOUNT workflow_run.created_by = "test-user-id" workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None) - workflow_run.inputs_dict = {"query": "test query"} - workflow_run.outputs_dict = {"answer": "test answer"} + return workflow_run def test_init( - workflow_cycle_manager, mock_app_generate_entity, mock_workflow_system_variables, mock_node_execution_repository + workflow_cycle_manager, + real_app_generate_entity, + real_workflow_system_variables, + mock_workflow_execution_repository, + mock_node_execution_repository, ): """Test initialization of WorkflowCycleManager""" - assert workflow_cycle_manager._workflow_run is None - assert workflow_cycle_manager._application_generate_entity == mock_app_generate_entity - assert workflow_cycle_manager._workflow_system_variables == mock_workflow_system_variables + assert workflow_cycle_manager._application_generate_entity == real_app_generate_entity + assert workflow_cycle_manager._workflow_system_variables == real_workflow_system_variables + assert workflow_cycle_manager._workflow_execution_repository == mock_workflow_execution_repository assert workflow_cycle_manager._workflow_node_execution_repository == mock_node_execution_repository -def test_handle_workflow_run_start(workflow_cycle_manager, mock_session, mock_workflow): - """Test _handle_workflow_run_start method""" +def test_handle_workflow_run_start(workflow_cycle_manager, mock_session, real_workflow): + """Test handle_workflow_run_start method""" # Mock session.scalar to return the workflow and max sequence - mock_session.scalar.side_effect = [mock_workflow, 5] + mock_session.scalar.side_effect = [real_workflow, 5] # Call the method - workflow_run = workflow_cycle_manager._handle_workflow_run_start( + workflow_execution = workflow_cycle_manager.handle_workflow_run_start( session=mock_session, workflow_id="test-workflow-id", - user_id="test-user-id", - created_by_role=CreatorUserRole.ACCOUNT, ) # Verify the result - assert workflow_run.tenant_id == mock_workflow.tenant_id - assert workflow_run.app_id == mock_workflow.app_id - assert workflow_run.workflow_id == mock_workflow.id - assert workflow_run.sequence_number == 6 # max_sequence + 1 - assert workflow_run.status == WorkflowRunStatus.RUNNING - assert workflow_run.created_by_role == CreatorUserRole.ACCOUNT - assert workflow_run.created_by == "test-user-id" + assert workflow_execution.workflow_id == real_workflow.id + assert workflow_execution.sequence_number == 6 # max_sequence + 1 - # Verify session.add was called - mock_session.add.assert_called_once_with(workflow_run) + # Verify the workflow_execution_repository.save was called + workflow_cycle_manager._workflow_execution_repository.save.assert_called_once_with(workflow_execution) -def test_handle_workflow_run_success(workflow_cycle_manager, mock_session, mock_workflow_run): - """Test _handle_workflow_run_success method""" - # Mock _get_workflow_run to return the mock_workflow_run - with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run): - # Call the method - result = workflow_cycle_manager._handle_workflow_run_success( - session=mock_session, - workflow_run_id="test-workflow-run-id", - start_at=time.perf_counter() - 10, # 10 seconds ago - total_tokens=100, - total_steps=5, - outputs={"answer": "test answer"}, - ) +def test_handle_workflow_run_success(workflow_cycle_manager, mock_workflow_execution_repository): + """Test handle_workflow_run_success method""" + # Create a real WorkflowExecution - # Verify the result - assert result == mock_workflow_run - assert result.status == WorkflowRunStatus.SUCCEEDED - assert result.outputs == json.dumps({"answer": "test answer"}) - assert result.total_tokens == 100 - assert result.total_steps == 5 - assert result.finished_at is not None + workflow_execution = WorkflowExecution( + id="test-workflow-run-id", + workflow_id="test-workflow-id", + workflow_version="1.0", + sequence_number=1, + type=WorkflowType.CHAT, + graph={"nodes": [], "edges": []}, + inputs={"query": "test query"}, + started_at=datetime.now(UTC).replace(tzinfo=None), + ) + + # Mock _get_workflow_execution_or_raise_error to return the real workflow_execution + workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution + + # Call the method + result = workflow_cycle_manager.handle_workflow_run_success( + workflow_run_id="test-workflow-run-id", + total_tokens=100, + total_steps=5, + outputs={"answer": "test answer"}, + ) + + # Verify the result + assert result == workflow_execution + assert result.status == WorkflowExecutionStatus.SUCCEEDED + assert result.outputs == {"answer": "test answer"} + assert result.total_tokens == 100 + assert result.total_steps == 5 + assert result.finished_at is not None -def test_handle_workflow_run_failed(workflow_cycle_manager, mock_session, mock_workflow_run): - """Test _handle_workflow_run_failed method""" - # Mock _get_workflow_run to return the mock_workflow_run - with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run): - # Mock get_running_executions to return an empty list - workflow_cycle_manager._workflow_node_execution_repository.get_running_executions.return_value = [] +def test_handle_workflow_run_failed(workflow_cycle_manager, mock_workflow_execution_repository): + """Test handle_workflow_run_failed method""" + # Create a real WorkflowExecution - # Call the method - result = workflow_cycle_manager._handle_workflow_run_failed( - session=mock_session, - workflow_run_id="test-workflow-run-id", - start_at=time.perf_counter() - 10, # 10 seconds ago - total_tokens=50, - total_steps=3, - status=WorkflowRunStatus.FAILED, - error="Test error message", - ) + workflow_execution = WorkflowExecution( + id="test-workflow-run-id", + workflow_id="test-workflow-id", + workflow_version="1.0", + sequence_number=1, + type=WorkflowType.CHAT, + graph={"nodes": [], "edges": []}, + inputs={"query": "test query"}, + started_at=datetime.now(UTC).replace(tzinfo=None), + ) - # Verify the result - assert result == mock_workflow_run - assert result.status == WorkflowRunStatus.FAILED.value - assert result.error == "Test error message" - assert result.total_tokens == 50 - assert result.total_steps == 3 - assert result.finished_at is not None + # Mock _get_workflow_execution_or_raise_error to return the real workflow_execution + workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution + + # Mock get_running_executions to return an empty list + workflow_cycle_manager._workflow_node_execution_repository.get_running_executions.return_value = [] + + # Call the method + result = workflow_cycle_manager.handle_workflow_run_failed( + workflow_run_id="test-workflow-run-id", + total_tokens=50, + total_steps=3, + status=WorkflowRunStatus.FAILED, + error_message="Test error message", + ) + + # Verify the result + assert result == workflow_execution + assert result.status == WorkflowExecutionStatus(WorkflowRunStatus.FAILED.value) + assert result.error_message == "Test error message" + assert result.total_tokens == 50 + assert result.total_steps == 3 + assert result.finished_at is not None -def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_run): - """Test _handle_node_execution_start method""" +def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_execution_repository): + """Test handle_node_execution_start method""" + # Create a real WorkflowExecution + + workflow_execution = WorkflowExecution( + id="test-workflow-execution-id", + workflow_id="test-workflow-id", + workflow_version="1.0", + sequence_number=1, + type=WorkflowType.CHAT, + graph={"nodes": [], "edges": []}, + inputs={"query": "test query"}, + started_at=datetime.now(UTC).replace(tzinfo=None), + ) + + # Mock _get_workflow_execution_or_raise_error to return the real workflow_execution + workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution + # Create a mock event event = MagicMock(spec=QueueNodeStartedEvent) event.node_execution_id = "test-node-execution-id" @@ -207,129 +300,171 @@ def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_run): event.in_loop_id = "test-loop-id" # Call the method - result = workflow_cycle_manager._handle_node_execution_start( - workflow_run=mock_workflow_run, + result = workflow_cycle_manager.handle_node_execution_start( + workflow_execution_id=workflow_execution.id, event=event, ) # Verify the result - # NodeExecution doesn't have tenant_id attribute, it's handled at repository level - # assert result.tenant_id == mock_workflow_run.tenant_id - # assert result.app_id == mock_workflow_run.app_id - assert result.workflow_id == mock_workflow_run.workflow_id - assert result.workflow_run_id == mock_workflow_run.id + assert result.workflow_id == workflow_execution.workflow_id + assert result.workflow_run_id == workflow_execution.id assert result.node_execution_id == event.node_execution_id assert result.node_id == event.node_id assert result.node_type == event.node_type assert result.title == event.node_data.title - assert result.status == WorkflowNodeExecutionStatus.RUNNING.value - # NodeExecution doesn't have created_by_role and created_by attributes, they're handled at repository level - # assert result.created_by_role == mock_workflow_run.created_by_role - # assert result.created_by == mock_workflow_run.created_by + assert result.status == NodeExecutionStatus.RUNNING # Verify save was called workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(result) -def test_get_workflow_run(workflow_cycle_manager, mock_session, mock_workflow_run): - """Test _get_workflow_run method""" - # Mock session.scalar to return the workflow run - mock_session.scalar.return_value = mock_workflow_run +def test_get_workflow_execution_or_raise_error(workflow_cycle_manager, mock_workflow_execution_repository): + """Test _get_workflow_execution_or_raise_error method""" + # Create a real WorkflowExecution - # Call the method - result = workflow_cycle_manager._get_workflow_run( - session=mock_session, - workflow_run_id="test-workflow-run-id", + workflow_execution = WorkflowExecution( + id="test-workflow-run-id", + workflow_id="test-workflow-id", + workflow_version="1.0", + sequence_number=1, + type=WorkflowType.CHAT, + graph={"nodes": [], "edges": []}, + inputs={"query": "test query"}, + started_at=datetime.now(UTC).replace(tzinfo=None), ) + # Mock the repository get method to return the real execution + workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution + + # Call the method + result = workflow_cycle_manager._get_workflow_execution_or_raise_error("test-workflow-run-id") + # Verify the result - assert result == mock_workflow_run - assert workflow_cycle_manager._workflow_run == mock_workflow_run + assert result == workflow_execution + + # Test error case + workflow_cycle_manager._workflow_execution_repository.get.return_value = None + + # Expect an error when execution is not found + with pytest.raises(ValueError): + workflow_cycle_manager._get_workflow_execution_or_raise_error("non-existent-id") def test_handle_workflow_node_execution_success(workflow_cycle_manager): - """Test _handle_workflow_node_execution_success method""" + """Test handle_workflow_node_execution_success method""" # Create a mock event event = MagicMock(spec=QueueNodeSucceededEvent) event.node_execution_id = "test-node-execution-id" event.inputs = {"input": "test input"} event.process_data = {"process": "test process"} event.outputs = {"output": "test output"} - event.execution_metadata = {"metadata": "test metadata"} + event.execution_metadata = {NodeRunMetadataKey.TOTAL_TOKENS: 100} event.start_at = datetime.now(UTC).replace(tzinfo=None) - # Create a mock node execution - node_execution = MagicMock() - node_execution.node_execution_id = "test-node-execution-id" + # Create a real node execution + + node_execution = NodeExecution( + id="test-node-execution-record-id", + node_execution_id="test-node-execution-id", + workflow_id="test-workflow-id", + workflow_run_id="test-workflow-run-id", + index=1, + node_id="test-node-id", + node_type=NodeType.LLM, + title="Test Node", + created_at=datetime.now(UTC).replace(tzinfo=None), + ) # Mock the repository to return the node execution workflow_cycle_manager._workflow_node_execution_repository.get_by_node_execution_id.return_value = node_execution # Call the method - result = workflow_cycle_manager._handle_workflow_node_execution_success( + result = workflow_cycle_manager.handle_workflow_node_execution_success( event=event, ) # Verify the result assert result == node_execution - assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED.value + assert result.status == NodeExecutionStatus.SUCCEEDED # Verify save was called workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(node_execution) -def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_session, mock_workflow_run): - """Test _handle_workflow_run_partial_success method""" - # Mock _get_workflow_run to return the mock_workflow_run - with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run): - # Call the method - result = workflow_cycle_manager._handle_workflow_run_partial_success( - session=mock_session, - workflow_run_id="test-workflow-run-id", - start_at=time.perf_counter() - 10, # 10 seconds ago - total_tokens=75, - total_steps=4, - outputs={"partial_answer": "test partial answer"}, - exceptions_count=2, - ) +def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_workflow_execution_repository): + """Test handle_workflow_run_partial_success method""" + # Create a real WorkflowExecution - # Verify the result - assert result == mock_workflow_run - assert result.status == WorkflowRunStatus.PARTIAL_SUCCEEDED.value - assert result.outputs == json.dumps({"partial_answer": "test partial answer"}) - assert result.total_tokens == 75 - assert result.total_steps == 4 - assert result.exceptions_count == 2 - assert result.finished_at is not None + workflow_execution = WorkflowExecution( + id="test-workflow-run-id", + workflow_id="test-workflow-id", + workflow_version="1.0", + sequence_number=1, + type=WorkflowType.CHAT, + graph={"nodes": [], "edges": []}, + inputs={"query": "test query"}, + started_at=datetime.now(UTC).replace(tzinfo=None), + ) + + # Mock _get_workflow_execution_or_raise_error to return the real workflow_execution + workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution + + # Call the method + result = workflow_cycle_manager.handle_workflow_run_partial_success( + workflow_run_id="test-workflow-run-id", + total_tokens=75, + total_steps=4, + outputs={"partial_answer": "test partial answer"}, + exceptions_count=2, + ) + + # Verify the result + assert result == workflow_execution + assert result.status == WorkflowExecutionStatus.PARTIAL_SUCCEEDED + assert result.outputs == {"partial_answer": "test partial answer"} + assert result.total_tokens == 75 + assert result.total_steps == 4 + assert result.exceptions_count == 2 + assert result.finished_at is not None def test_handle_workflow_node_execution_failed(workflow_cycle_manager): - """Test _handle_workflow_node_execution_failed method""" + """Test handle_workflow_node_execution_failed method""" # Create a mock event event = MagicMock(spec=QueueNodeFailedEvent) event.node_execution_id = "test-node-execution-id" event.inputs = {"input": "test input"} event.process_data = {"process": "test process"} event.outputs = {"output": "test output"} - event.execution_metadata = {"metadata": "test metadata"} + event.execution_metadata = {NodeRunMetadataKey.TOTAL_TOKENS: 100} event.start_at = datetime.now(UTC).replace(tzinfo=None) event.error = "Test error message" - # Create a mock node execution - node_execution = MagicMock() - node_execution.node_execution_id = "test-node-execution-id" + # Create a real node execution + + node_execution = NodeExecution( + id="test-node-execution-record-id", + node_execution_id="test-node-execution-id", + workflow_id="test-workflow-id", + workflow_run_id="test-workflow-run-id", + index=1, + node_id="test-node-id", + node_type=NodeType.LLM, + title="Test Node", + created_at=datetime.now(UTC).replace(tzinfo=None), + ) # Mock the repository to return the node execution workflow_cycle_manager._workflow_node_execution_repository.get_by_node_execution_id.return_value = node_execution # Call the method - result = workflow_cycle_manager._handle_workflow_node_execution_failed( + result = workflow_cycle_manager.handle_workflow_node_execution_failed( event=event, ) # Verify the result assert result == node_execution - assert result.status == WorkflowNodeExecutionStatus.FAILED.value + assert result.status == NodeExecutionStatus.FAILED assert result.error == "Test error message" # Verify save was called diff --git a/api/tests/unit_tests/models/test_workflow.py b/api/tests/unit_tests/models/test_workflow.py index fe56f18f1b..70ce224eb6 100644 --- a/api/tests/unit_tests/models/test_workflow.py +++ b/api/tests/unit_tests/models/test_workflow.py @@ -1,10 +1,11 @@ +import json from unittest import mock from uuid import uuid4 import contexts from constants import HIDDEN_VALUE from core.variables import FloatVariable, IntegerVariable, SecretVariable, StringVariable -from models.workflow import Workflow +from models.workflow import Workflow, WorkflowNodeExecution def test_environment_variables(): @@ -137,3 +138,14 @@ def test_to_dict(): workflow_dict = workflow.to_dict(include_secret=True) assert workflow_dict["environment_variables"][0]["value"] == "secret" assert workflow_dict["environment_variables"][1]["value"] == "text" + + +class TestWorkflowNodeExecution: + def test_execution_metadata_dict(self): + node_exec = WorkflowNodeExecution() + node_exec.execution_metadata = None + assert node_exec.execution_metadata_dict == {} + + original = {"a": 1, "b": ["2"]} + node_exec.execution_metadata = json.dumps(original) + assert node_exec.execution_metadata_dict == original diff --git a/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py b/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py index 8c10b272fb..7c5020db02 100644 --- a/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py +++ b/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py @@ -4,12 +4,14 @@ Unit tests for the SQLAlchemy implementation of WorkflowNodeExecutionRepository. import json from datetime import datetime +from decimal import Decimal from unittest.mock import MagicMock, PropertyMock import pytest from pytest_mock import MockerFixture from sqlalchemy.orm import Session, sessionmaker +from core.model_runtime.utils.encoders import jsonable_encoder from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository from core.workflow.entities.node_entities import NodeRunMetadataKey from core.workflow.entities.node_execution_entities import NodeExecution, NodeExecutionStatus @@ -298,7 +300,7 @@ def test_to_db_model(repository): status=NodeExecutionStatus.RUNNING, error=None, elapsed_time=1.5, - metadata={NodeRunMetadataKey.TOTAL_TOKENS: 100}, + metadata={NodeRunMetadataKey.TOTAL_TOKENS: 100, NodeRunMetadataKey.TOTAL_PRICE: Decimal("0.0")}, created_at=datetime.now(), finished_at=None, ) @@ -324,7 +326,7 @@ def test_to_db_model(repository): assert db_model.inputs_dict == domain_model.inputs assert db_model.process_data_dict == domain_model.process_data assert db_model.outputs_dict == domain_model.outputs - assert db_model.execution_metadata_dict == domain_model.metadata + assert db_model.execution_metadata_dict == jsonable_encoder(domain_model.metadata) assert db_model.status == domain_model.status assert db_model.error == domain_model.error diff --git a/docker/.env.example b/docker/.env.example index 7871f71a98..86c1a5779a 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -531,6 +531,7 @@ RELYT_DATABASE=postgres OPENSEARCH_HOST=opensearch OPENSEARCH_PORT=9200 OPENSEARCH_SECURE=true +OPENSEARCH_VERIFY_CERTS=true OPENSEARCH_AUTH_METHOD=basic OPENSEARCH_USER=admin OPENSEARCH_PASSWORD=admin @@ -1055,7 +1056,7 @@ PLUGIN_MAX_EXECUTION_TIMEOUT=600 PIP_MIRROR_URL= # https://github.com/langgenius/dify-plugin-daemon/blob/main/.env.example -# Plugin storage type, local aws_s3 tencent_cos azure_blob +# Plugin storage type, local aws_s3 tencent_cos azure_blob aliyun_oss PLUGIN_STORAGE_TYPE=local PLUGIN_STORAGE_LOCAL_ROOT=/app/storage PLUGIN_WORKING_PATH=/app/storage/cwd @@ -1078,6 +1079,13 @@ PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING= PLUGIN_TENCENT_COS_SECRET_KEY= PLUGIN_TENCENT_COS_SECRET_ID= PLUGIN_TENCENT_COS_REGION= +# Plugin oss aliyun oss +PLUGIN_ALIYUN_OSS_REGION= +PLUGIN_ALIYUN_OSS_ENDPOINT= +PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID= +PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET= +PLUGIN_ALIYUN_OSS_AUTH_VERSION=v4 +PLUGIN_ALIYUN_OSS_PATH= # ------------------------------ # OTLP Collector Configuration diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index 65fbd007e2..ceb32e4aba 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -178,6 +178,12 @@ services: TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} + ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-} + ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-} + ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-} + ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-} + ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4} + ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index 4f39e801f5..8690a50926 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -115,6 +115,12 @@ services: TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} + ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-} + ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-} + ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-} + ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-} + ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4} + ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-} ports: - "${EXPOSE_PLUGIN_DAEMON_PORT:-5002}:${PLUGIN_DAEMON_PORT:-5002}" - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 0aca3b749e..371646eb9f 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -227,6 +227,7 @@ x-shared-env: &shared-api-worker-env OPENSEARCH_HOST: ${OPENSEARCH_HOST:-opensearch} OPENSEARCH_PORT: ${OPENSEARCH_PORT:-9200} OPENSEARCH_SECURE: ${OPENSEARCH_SECURE:-true} + OPENSEARCH_VERIFY_CERTS: ${OPENSEARCH_VERIFY_CERTS:-true} OPENSEARCH_AUTH_METHOD: ${OPENSEARCH_AUTH_METHOD:-basic} OPENSEARCH_USER: ${OPENSEARCH_USER:-admin} OPENSEARCH_PASSWORD: ${OPENSEARCH_PASSWORD:-admin} @@ -476,6 +477,12 @@ x-shared-env: &shared-api-worker-env PLUGIN_TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} PLUGIN_TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} PLUGIN_TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} + PLUGIN_ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-} + PLUGIN_ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-} + PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-} + PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-} + PLUGIN_ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4} + PLUGIN_ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-} ENABLE_OTEL: ${ENABLE_OTEL:-false} OTLP_BASE_ENDPOINT: ${OTLP_BASE_ENDPOINT:-http://localhost:4318} OTLP_API_KEY: ${OTLP_API_KEY:-} @@ -669,6 +676,12 @@ services: TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} + ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-} + ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-} + ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-} + ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-} + ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4} + ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: diff --git a/docker/middleware.env.example b/docker/middleware.env.example index 2ac09ea264..2437026eec 100644 --- a/docker/middleware.env.example +++ b/docker/middleware.env.example @@ -145,3 +145,10 @@ PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING= PLUGIN_TENCENT_COS_SECRET_KEY= PLUGIN_TENCENT_COS_SECRET_ID= PLUGIN_TENCENT_COS_REGION= +# Plugin oss aliyun oss +PLUGIN_ALIYUN_OSS_REGION= +PLUGIN_ALIYUN_OSS_ENDPOINT= +PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID= +PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET= +PLUGIN_ALIYUN_OSS_AUTH_VERSION=v4 +PLUGIN_ALIYUN_OSS_PATH= diff --git a/web/app/components/app/annotation/index.tsx b/web/app/components/app/annotation/index.tsx index f010f5f8b1..04bce1947b 100644 --- a/web/app/components/app/annotation/index.tsx +++ b/web/app/components/app/annotation/index.tsx @@ -31,126 +31,98 @@ type Props = { appDetail: App } -const Annotation: FC = ({ - appDetail, -}) => { +const Annotation: FC = (props) => { + const { appDetail } = props const { t } = useTranslation() - const [isShowEdit, setIsShowEdit] = React.useState(false) + const [isShowEdit, setIsShowEdit] = useState(false) const [annotationConfig, setAnnotationConfig] = useState(null) - const [isChatApp, setIsChatApp] = useState(false) + const [isChatApp] = useState(appDetail.mode !== 'completion') + const [controlRefreshSwitch, setControlRefreshSwitch] = useState(Date.now()) + const { plan, enableBilling } = useProviderContext() + const isAnnotationFull = enableBilling && plan.usage.annotatedResponse >= plan.total.annotatedResponse + const [isShowAnnotationFullModal, setIsShowAnnotationFullModal] = useState(false) + const [queryParams, setQueryParams] = useState({}) + const [currPage, setCurrPage] = useState(0) + const [limit, setLimit] = useState(APP_PAGE_LIMIT) + const [list, setList] = useState([]) + const [total, setTotal] = useState(0) + const [isLoading, setIsLoading] = useState(false) + const [controlUpdateList, setControlUpdateList] = useState(Date.now()) + const [currItem, setCurrItem] = useState(null) + const [isShowViewModal, setIsShowViewModal] = useState(false) + const debouncedQueryParams = useDebounce(queryParams, { wait: 500 }) const fetchAnnotationConfig = async () => { const res = await doFetchAnnotationConfig(appDetail.id) setAnnotationConfig(res as AnnotationReplyConfig) return (res as AnnotationReplyConfig).id } - useEffect(() => { - const isChatApp = appDetail.mode !== 'completion' - setIsChatApp(isChatApp) - if (isChatApp) - fetchAnnotationConfig() - }, []) - const [controlRefreshSwitch, setControlRefreshSwitch] = useState(Date.now()) - const { plan, enableBilling } = useProviderContext() - const isAnnotationFull = (enableBilling && plan.usage.annotatedResponse >= plan.total.annotatedResponse) - const [isShowAnnotationFullModal, setIsShowAnnotationFullModal] = useState(false) - const ensureJobCompleted = async (jobId: string, status: AnnotationEnableStatus) => { - let isCompleted = false - while (!isCompleted) { - const res: any = await queryAnnotationJobStatus(appDetail.id, status, jobId) - isCompleted = res.job_status === JobStatus.completed - if (isCompleted) - break + useEffect(() => { + if (isChatApp) fetchAnnotationConfig() + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + const ensureJobCompleted = async (jobId: string, status: AnnotationEnableStatus) => { + while (true) { + const res: any = await queryAnnotationJobStatus(appDetail.id, status, jobId) + if (res.job_status === JobStatus.completed) break await sleep(2000) } } - const [queryParams, setQueryParams] = useState({}) - const [currPage, setCurrPage] = React.useState(0) - const debouncedQueryParams = useDebounce(queryParams, { wait: 500 }) - const [limit, setLimit] = React.useState(APP_PAGE_LIMIT) - const query = { - page: currPage + 1, - limit, - keyword: debouncedQueryParams.keyword || '', - } - - const [controlUpdateList, setControlUpdateList] = useState(Date.now()) - const [list, setList] = useState([]) - const [total, setTotal] = useState(10) - const [isLoading, setIsLoading] = useState(false) const fetchList = async (page = 1) => { setIsLoading(true) try { const { data, total }: any = await fetchAnnotationList(appDetail.id, { - ...query, page, + limit, + keyword: debouncedQueryParams.keyword || '', }) setList(data as AnnotationItem[]) setTotal(total) } - catch { - + finally { + setIsLoading(false) } - setIsLoading(false) } useEffect(() => { fetchList(currPage + 1) - }, [currPage]) - - useEffect(() => { - fetchList(1) - setControlUpdateList(Date.now()) - }, [queryParams]) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [currPage, limit, debouncedQueryParams]) const handleAdd = async (payload: AnnotationItemBasic) => { - await addAnnotation(appDetail.id, { - ...payload, - }) - Toast.notify({ - message: t('common.api.actionSuccess'), - type: 'success', - }) + await addAnnotation(appDetail.id, payload) + Toast.notify({ message: t('common.api.actionSuccess'), type: 'success' }) fetchList() setControlUpdateList(Date.now()) } const handleRemove = async (id: string) => { await delAnnotation(appDetail.id, id) - Toast.notify({ - message: t('common.api.actionSuccess'), - type: 'success', - }) + Toast.notify({ message: t('common.api.actionSuccess'), type: 'success' }) fetchList() setControlUpdateList(Date.now()) } - const [currItem, setCurrItem] = useState(list[0]) - const [isShowViewModal, setIsShowViewModal] = useState(false) - useEffect(() => { - if (!isShowEdit) - setControlRefreshSwitch(Date.now()) - }, [isShowEdit]) const handleView = (item: AnnotationItem) => { setCurrItem(item) setIsShowViewModal(true) } const handleSave = async (question: string, answer: string) => { - await editAnnotation(appDetail.id, (currItem as AnnotationItem).id, { - question, - answer, - }) - Toast.notify({ - message: t('common.api.actionSuccess'), - type: 'success', - }) + if (!currItem) return + await editAnnotation(appDetail.id, currItem.id, { question, answer }) + Toast.notify({ message: t('common.api.actionSuccess'), type: 'success' }) fetchList() setControlUpdateList(Date.now()) } + useEffect(() => { + if (!isShowEdit) setControlRefreshSwitch(Date.now()) + }, [isShowEdit]) + return (

{t('appLog.description')}

@@ -211,6 +183,7 @@ const Annotation: FC = ({ {isLoading ? + // eslint-disable-next-line sonarjs/no-nested-conditional : total > 0 ? = ({ )}
- handlePayloadChange('required')(!tempPayload.required)} /> + handlePayloadChange('required')(!tempPayload.required)} /> {t('appDebug.variableConfig.required')}
+ +
+ handlePayloadChange('hide')(!tempPayload.hide)} /> + {t('appDebug.variableConfig.hide')} +
-
+
{runningStatus @@ -210,7 +210,7 @@ function AppCard({ content={isApp ? appUrl : apiUrl} className={'!size-6'} /> - {isApp && } + {isApp && } {isApp && } {/* button copy link/ button regenerate */} {showConfirmDelete && ( diff --git a/web/app/components/base/app-icon-picker/ImageInput.tsx b/web/app/components/base/app-icon-picker/ImageInput.tsx index 9c0a95c021..d42abf867f 100644 --- a/web/app/components/base/app-icon-picker/ImageInput.tsx +++ b/web/app/components/base/app-icon-picker/ImageInput.tsx @@ -94,7 +94,7 @@ const ImageInput: FC = ({
= ({ className={cn(s.container, '!w-[362px] !p-0')} > {!DISABLE_UPLOAD_IMAGE_AS_ICON &&
-
+
{tabs.map(tab => (
) - }, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState]) + }, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState, allInputsHidden]) const answerIcon = (appData?.site && appData.site.use_icon_as_answer_icon) ? void, currentConversationInputs: Record | null, setCurrentConversationInputs: (v: Record) => void, + allInputsHidden: boolean, } export const ChatWithHistoryContext = createContext({ @@ -95,5 +96,6 @@ export const ChatWithHistoryContext = createContext setIsResponding: noop, currentConversationInputs: {}, setCurrentConversationInputs: noop, + allInputsHidden: false, }) export const useChatWithHistoryContext = () => useContext(ChatWithHistoryContext) diff --git a/web/app/components/base/chat/chat-with-history/hooks.tsx b/web/app/components/base/chat/chat-with-history/hooks.tsx index d03a28ae57..3694666139 100644 --- a/web/app/components/base/chat/chat-with-history/hooks.tsx +++ b/web/app/components/base/chat/chat-with-history/hooks.tsx @@ -240,6 +240,11 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => { } }) }, [appParams]) + + const allInputsHidden = useMemo(() => { + return inputsForms.length > 0 && inputsForms.every(item => item.hide === true) + }, [inputsForms]) + useEffect(() => { const conversationInputs: Record = {} @@ -304,6 +309,9 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => { const { notify } = useToastContext() const checkInputsRequired = useCallback((silent?: boolean) => { + if (allInputsHidden) + return true + let hasEmptyInput = '' let fileIsUploading = false const requiredVars = inputsForms.filter(({ required }) => required) @@ -339,7 +347,7 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => { } return true - }, [inputsForms, notify, t]) + }, [inputsForms, notify, t, allInputsHidden]) const handleStartChat = useCallback((callback: any) => { if (checkInputsRequired()) { setShowNewConversationItemInList(true) @@ -507,5 +515,6 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => { setIsResponding, currentConversationInputs, setCurrentConversationInputs, + allInputsHidden, } } diff --git a/web/app/components/base/chat/chat-with-history/index.tsx b/web/app/components/base/chat/chat-with-history/index.tsx index 2ba50ea49b..de023e7f58 100644 --- a/web/app/components/base/chat/chat-with-history/index.tsx +++ b/web/app/components/base/chat/chat-with-history/index.tsx @@ -161,6 +161,7 @@ const ChatWithHistoryWrap: FC = ({ setIsResponding, currentConversationInputs, setCurrentConversationInputs, + allInputsHidden, } = useChatWithHistory(installedAppInfo) return ( @@ -206,6 +207,7 @@ const ChatWithHistoryWrap: FC = ({ setIsResponding, currentConversationInputs, setCurrentConversationInputs, + allInputsHidden, }}> diff --git a/web/app/components/base/chat/chat-with-history/inputs-form/content.tsx b/web/app/components/base/chat/chat-with-history/inputs-form/content.tsx index b8d18244e1..73a1f07b69 100644 --- a/web/app/components/base/chat/chat-with-history/inputs-form/content.tsx +++ b/web/app/components/base/chat/chat-with-history/inputs-form/content.tsx @@ -36,9 +36,11 @@ const InputsFormContent = ({ showTip }: Props) => { }) }, [newConversationInputsRef, handleNewConversationInputsChange, currentConversationInputs, setCurrentConversationInputs]) + const visibleInputsForms = inputsForms.filter(form => form.hide !== true) + return (
- {inputsForms.map(form => ( + {visibleInputsForms.map(form => (
{form.label}
diff --git a/web/app/components/base/chat/chat-with-history/inputs-form/index.tsx b/web/app/components/base/chat/chat-with-history/inputs-form/index.tsx index 30ec11c729..3a1b92089c 100644 --- a/web/app/components/base/chat/chat-with-history/inputs-form/index.tsx +++ b/web/app/components/base/chat/chat-with-history/inputs-form/index.tsx @@ -21,9 +21,14 @@ const InputsFormNode = ({ isMobile, currentConversationId, handleStartChat, + allInputsHidden, themeBuilder, + inputsForms, } = useChatWithHistoryContext() + if (allInputsHidden || inputsForms.length === 0) + return null + return (
{ clearChatList, setClearChatList, setIsResponding, + allInputsHidden, } = useEmbeddedChatbotContext() const appConfig = useMemo(() => { const config = appParams || {} @@ -82,6 +83,9 @@ const ChatWrapper = () => { ) const inputsFormValue = currentConversationId ? currentConversationInputs : newConversationInputsRef?.current const inputDisabled = useMemo(() => { + if (allInputsHidden) + return false + let hasEmptyInput = '' let fileIsUploading = false const requiredVars = inputsForms.filter(({ required }) => required) @@ -111,7 +115,7 @@ const ChatWrapper = () => { if (fileIsUploading) return true return false - }, [inputsFormValue, inputsForms]) + }, [inputsFormValue, inputsForms, allInputsHidden]) useEffect(() => { if (currentChatInstanceRef.current) @@ -160,7 +164,7 @@ const ChatWrapper = () => { const [collapsed, setCollapsed] = useState(!!currentConversationId) const chatNode = useMemo(() => { - if (!inputsForms.length) + if (allInputsHidden || !inputsForms.length) return null if (isMobile) { if (!currentConversationId) @@ -170,7 +174,7 @@ const ChatWrapper = () => { else { return } - }, [inputsForms.length, isMobile, currentConversationId, collapsed]) + }, [inputsForms.length, isMobile, currentConversationId, collapsed, allInputsHidden]) const welcome = useMemo(() => { const welcomeMessage = chatList.find(item => item.isOpeningStatement) @@ -180,7 +184,7 @@ const ChatWrapper = () => { return null if (!welcomeMessage) return null - if (!collapsed && inputsForms.length > 0) + if (!collapsed && inputsForms.length > 0 && !allInputsHidden) return null if (welcomeMessage.suggestedQuestions && welcomeMessage.suggestedQuestions?.length > 0) { return ( @@ -215,7 +219,7 @@ const ChatWrapper = () => {
) - }, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState]) + }, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState, allInputsHidden]) const answerIcon = isDify() ? diff --git a/web/app/components/base/chat/embedded-chatbot/context.tsx b/web/app/components/base/chat/embedded-chatbot/context.tsx index 0dd6e7a29a..5964efd806 100644 --- a/web/app/components/base/chat/embedded-chatbot/context.tsx +++ b/web/app/components/base/chat/embedded-chatbot/context.tsx @@ -53,6 +53,7 @@ export type EmbeddedChatbotContextValue = { setIsResponding: (state: boolean) => void, currentConversationInputs: Record | null, setCurrentConversationInputs: (v: Record) => void, + allInputsHidden: boolean } export const EmbeddedChatbotContext = createContext({ @@ -82,5 +83,6 @@ export const EmbeddedChatbotContext = createContext setIsResponding: noop, currentConversationInputs: {}, setCurrentConversationInputs: noop, + allInputsHidden: false, }) export const useEmbeddedChatbotContext = () => useContext(EmbeddedChatbotContext) diff --git a/web/app/components/base/chat/embedded-chatbot/hooks.tsx b/web/app/components/base/chat/embedded-chatbot/hooks.tsx index ce42575bc9..40c56eca7b 100644 --- a/web/app/components/base/chat/embedded-chatbot/hooks.tsx +++ b/web/app/components/base/chat/embedded-chatbot/hooks.tsx @@ -235,6 +235,10 @@ export const useEmbeddedChatbot = () => { }) }, [initInputs, appParams]) + const allInputsHidden = useMemo(() => { + return inputsForms.length > 0 && inputsForms.every(item => item.hide === true) + }, [inputsForms]) + useEffect(() => { // init inputs from url params (async () => { @@ -306,6 +310,9 @@ export const useEmbeddedChatbot = () => { const { notify } = useToastContext() const checkInputsRequired = useCallback((silent?: boolean) => { + if (allInputsHidden) + return true + let hasEmptyInput = '' let fileIsUploading = false const requiredVars = inputsForms.filter(({ required }) => required) @@ -341,7 +348,7 @@ export const useEmbeddedChatbot = () => { } return true - }, [inputsForms, notify, t]) + }, [inputsForms, notify, t, allInputsHidden]) const handleStartChat = useCallback((callback?: any) => { if (checkInputsRequired()) { setShowNewConversationItemInList(true) @@ -417,5 +424,6 @@ export const useEmbeddedChatbot = () => { setIsResponding, currentConversationInputs, setCurrentConversationInputs, + allInputsHidden, } } diff --git a/web/app/components/base/chat/embedded-chatbot/index.tsx b/web/app/components/base/chat/embedded-chatbot/index.tsx index 49189c419e..ffcb128c8f 100644 --- a/web/app/components/base/chat/embedded-chatbot/index.tsx +++ b/web/app/components/base/chat/embedded-chatbot/index.tsx @@ -168,6 +168,7 @@ const EmbeddedChatbotWrapper = () => { setIsResponding, currentConversationInputs, setCurrentConversationInputs, + allInputsHidden, } = useEmbeddedChatbot() return { setIsResponding, currentConversationInputs, setCurrentConversationInputs, + allInputsHidden, }}> diff --git a/web/app/components/base/chat/embedded-chatbot/inputs-form/content.tsx b/web/app/components/base/chat/embedded-chatbot/inputs-form/content.tsx index e56520d23f..c5f39718f1 100644 --- a/web/app/components/base/chat/embedded-chatbot/inputs-form/content.tsx +++ b/web/app/components/base/chat/embedded-chatbot/inputs-form/content.tsx @@ -36,9 +36,11 @@ const InputsFormContent = ({ showTip }: Props) => { }) }, [newConversationInputsRef, handleNewConversationInputsChange, currentConversationInputs, setCurrentConversationInputs]) + const visibleInputsForms = inputsForms.filter(form => form.hide !== true) + return (
- {inputsForms.map(form => ( + {visibleInputsForms.map(form => (
{form.label}
diff --git a/web/app/components/base/chat/embedded-chatbot/inputs-form/index.tsx b/web/app/components/base/chat/embedded-chatbot/inputs-form/index.tsx index 4ac4aaa16b..88472b5d8f 100644 --- a/web/app/components/base/chat/embedded-chatbot/inputs-form/index.tsx +++ b/web/app/components/base/chat/embedded-chatbot/inputs-form/index.tsx @@ -22,8 +22,13 @@ const InputsFormNode = ({ currentConversationId, themeBuilder, handleStartChat, + allInputsHidden, + inputsForms, } = useEmbeddedChatbotContext() + if (allInputsHidden || inputsForms.length === 0) + return null + return (
{ const srcs = node.children.filter((child: any) => 'properties' in child).map((child: any) => (child as any).properties.src) - if (srcs.length === 0) + if (srcs.length === 0) { + const src = node.properties?.src + if (src) + return return null + } return }) VideoBlock.displayName = 'VideoBlock' const AudioBlock: any = memo(({ node }: any) => { const srcs = node.children.filter((child: any) => 'properties' in child).map((child: any) => (child as any).properties.src) - if (srcs.length === 0) + if (srcs.length === 0) { + const src = node.properties?.src + if (src) + return return null + } return }) AudioBlock.displayName = 'AudioBlock' diff --git a/web/app/components/workflow/nodes/_base/components/help-link.tsx b/web/app/components/workflow/nodes/_base/components/help-link.tsx index 2e7552001b..8bf74529f1 100644 --- a/web/app/components/workflow/nodes/_base/components/help-link.tsx +++ b/web/app/components/workflow/nodes/_base/components/help-link.tsx @@ -24,7 +24,7 @@ const HelpLink = ({ diff --git a/web/app/components/workflow/nodes/_base/components/node-position.tsx b/web/app/components/workflow/nodes/_base/components/node-position.tsx new file mode 100644 index 0000000000..404648dfa6 --- /dev/null +++ b/web/app/components/workflow/nodes/_base/components/node-position.tsx @@ -0,0 +1,54 @@ +import { memo } from 'react' +import { useTranslation } from 'react-i18next' +import { RiCrosshairLine } from '@remixicon/react' +import type { XYPosition } from 'reactflow' +import { useReactFlow, useStoreApi } from 'reactflow' +import TooltipPlus from '@/app/components/base/tooltip' +import { useNodesSyncDraft } from '@/app/components/workflow-app/hooks' + +type NodePositionProps = { + nodePosition: XYPosition, + nodeWidth?: number | null, + nodeHeight?: number | null, +} +const NodePosition = ({ + nodePosition, + nodeWidth, + nodeHeight, +}: NodePositionProps) => { + const { t } = useTranslation() + const reactflow = useReactFlow() + const store = useStoreApi() + const { doSyncWorkflowDraft } = useNodesSyncDraft() + + if (!nodePosition || !nodeWidth || !nodeHeight) return null + + const workflowContainer = document.getElementById('workflow-container') + const { transform } = store.getState() + const zoom = transform[2] + + const { clientWidth, clientHeight } = workflowContainer! + const { setViewport } = reactflow + + return ( + +
{ + setViewport({ + x: (clientWidth - 400 - nodeWidth * zoom) / 2 - nodePosition.x * zoom, + y: (clientHeight - nodeHeight * zoom) / 2 - nodePosition.y * zoom, + zoom: transform[2], + }) + doSyncWorkflowDraft() + }} + > + +
+
+ ) +} + +export default memo(NodePosition) diff --git a/web/app/components/workflow/nodes/_base/panel.tsx b/web/app/components/workflow/nodes/_base/panel.tsx index 2ee39a3b06..49c61b3416 100644 --- a/web/app/components/workflow/nodes/_base/panel.tsx +++ b/web/app/components/workflow/nodes/_base/panel.tsx @@ -16,6 +16,7 @@ import { useTranslation } from 'react-i18next' import NextStep from './components/next-step' import PanelOperator from './components/panel-operator' import HelpLink from './components/help-link' +import NodePosition from './components/node-position' import { DescriptionInput, TitleInput, @@ -55,6 +56,9 @@ const BasePanel: FC = ({ id, data, children, + position, + width, + height, }) => { const { t } = useTranslation() const { showMessageLogModal } = useAppStore(useShallow(state => ({ @@ -150,6 +154,7 @@ const BasePanel: FC = ({ ) } +
diff --git a/web/app/components/workflow/panel/debug-and-preview/index.tsx b/web/app/components/workflow/panel/debug-and-preview/index.tsx index b63b7af16c..563be25a99 100644 --- a/web/app/components/workflow/panel/debug-and-preview/index.tsx +++ b/web/app/components/workflow/panel/debug-and-preview/index.tsx @@ -39,6 +39,7 @@ const DebugAndPreview = () => { const nodes = useNodes() const startNode = nodes.find(node => node.data.type === BlockEnum.Start) const variables = startNode?.data.variables || [] + const visibleVariables = variables.filter(v => v.hide !== true) const [showConversationVariableModal, setShowConversationVariableModal] = useState(false) @@ -107,7 +108,7 @@ const DebugAndPreview = () => { )} - {variables.length > 0 && ( + {visibleVariables.length > 0 && (
{ const nodes = useNodes() const startNode = nodes.find(node => node.data.type === BlockEnum.Start) const variables = startNode?.data.variables || [] + const visibleVariables = variables.filter(v => v.hide !== true) const handleValueChange = (variable: string, v: string) => { const { @@ -29,13 +30,13 @@ const UserInput = () => { }) } - if (!variables.length) + if (!visibleVariables.length) return null return (
- {variables.map((variable, index) => ( + {visibleVariables.map((variable, index) => (
export type ModelConfig = { diff --git a/web/i18n/de-DE/workflow.ts b/web/i18n/de-DE/workflow.ts index e229506706..177467df78 100644 --- a/web/i18n/de-DE/workflow.ts +++ b/web/i18n/de-DE/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'Blöcke organisieren', change: 'Ändern', optional: '(optional)', + moveToThisNode: 'Bewege zu diesem Knoten', }, nodes: { common: { diff --git a/web/i18n/en-US/app-debug.ts b/web/i18n/en-US/app-debug.ts index 3ee5fd3e1d..349ff37118 100644 --- a/web/i18n/en-US/app-debug.ts +++ b/web/i18n/en-US/app-debug.ts @@ -368,6 +368,7 @@ const translation = { 'inputPlaceholder': 'Please input', 'content': 'Content', 'required': 'Required', + 'hide': 'Hide', 'file': { supportFileTypes: 'Support File Types', image: { diff --git a/web/i18n/en-US/billing.ts b/web/i18n/en-US/billing.ts index 57358dcf36..b75fef3e19 100644 --- a/web/i18n/en-US/billing.ts +++ b/web/i18n/en-US/billing.ts @@ -92,9 +92,9 @@ const translation = { member: 'Member', memberAfter: 'Member', messageRequest: { - title: '{{count,number}} messages', - titlePerMonth: '{{count,number}} messages/month', - tooltip: 'Message invocation quotas for various plans using OpenAl models. Messages over the limit will use your OpenAI API Key.', + title: '{{count,number}} message credits', + titlePerMonth: '{{count,number}} message credits/month', + tooltip: 'Message credits are provided to help you easily try out different OpenAI models in Dify. Credits are consumed based on the model type. Once they’re used up, you can switch to your own OpenAI API key.', }, annotatedResponse: { title: '{{count,number}} Annotation Quota Limits', diff --git a/web/i18n/en-US/workflow.ts b/web/i18n/en-US/workflow.ts index f7121a7a8a..f9a58d42e8 100644 --- a/web/i18n/en-US/workflow.ts +++ b/web/i18n/en-US/workflow.ts @@ -300,6 +300,7 @@ const translation = { addNextStep: 'Add the next block in this workflow', selectNextStep: 'Select Next Block', runThisStep: 'Run this step', + moveToThisNode: 'Move to this node', checklist: 'Checklist', checklistTip: 'Make sure all issues are resolved before publishing', checklistResolved: 'All issues are resolved', diff --git a/web/i18n/es-ES/workflow.ts b/web/i18n/es-ES/workflow.ts index 5a2d02bc0d..c7690f3ff4 100644 --- a/web/i18n/es-ES/workflow.ts +++ b/web/i18n/es-ES/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'Organizar bloques', change: 'Cambiar', optional: '(opcional)', + moveToThisNode: 'Mueve a este nodo', }, nodes: { common: { diff --git a/web/i18n/fa-IR/workflow.ts b/web/i18n/fa-IR/workflow.ts index 6b887b1739..0f235012f9 100644 --- a/web/i18n/fa-IR/workflow.ts +++ b/web/i18n/fa-IR/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'سازماندهی بلوک‌ها', change: 'تغییر', optional: '(اختیاری)', + moveToThisNode: 'به این گره بروید', }, nodes: { common: { diff --git a/web/i18n/fr-FR/workflow.ts b/web/i18n/fr-FR/workflow.ts index f6ae2a9ee2..ec360c2352 100644 --- a/web/i18n/fr-FR/workflow.ts +++ b/web/i18n/fr-FR/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'Organiser les blocs', change: 'Modifier', optional: '(facultatif)', + moveToThisNode: 'Déplacer vers ce nœud', }, nodes: { common: { diff --git a/web/i18n/hi-IN/workflow.ts b/web/i18n/hi-IN/workflow.ts index 17d33a0325..6b764b33f8 100644 --- a/web/i18n/hi-IN/workflow.ts +++ b/web/i18n/hi-IN/workflow.ts @@ -314,6 +314,7 @@ const translation = { organizeBlocks: 'ब्लॉक्स को व्यवस्थित करें', change: 'बदलें', optional: '(वैकल्पिक)', + moveToThisNode: 'इस नोड पर जाएं', }, nodes: { common: { diff --git a/web/i18n/it-IT/workflow.ts b/web/i18n/it-IT/workflow.ts index 35d3ba1267..20f948888e 100644 --- a/web/i18n/it-IT/workflow.ts +++ b/web/i18n/it-IT/workflow.ts @@ -317,6 +317,7 @@ const translation = { organizeBlocks: 'Organizza blocchi', change: 'Cambia', optional: '(opzionale)', + moveToThisNode: 'Sposta a questo nodo', }, nodes: { common: { diff --git a/web/i18n/ja-JP/app-debug.ts b/web/i18n/ja-JP/app-debug.ts index 23fc7adba7..350d58d60a 100644 --- a/web/i18n/ja-JP/app-debug.ts +++ b/web/i18n/ja-JP/app-debug.ts @@ -218,6 +218,10 @@ const translation = { enableText: '有効な機能', manage: '管理', }, + documentUpload: { + title: 'ドキュメント', + description: 'ドキュメント機能を有効にすると、AIモデルがファイルを処理し、その内容に基づいて質問に回答できるようになります。', + }, }, codegen: { title: 'コードジェネレーター', @@ -246,6 +250,7 @@ const translation = { noDataLine1: '左側に使用例を記入してください,', noDataLine2: 'オーケストレーションのプレビューがこちらに表示されます。', apply: '適用', + noData: '左側にユースケースを入力すると、こちらでプレビューができます。', loading: 'アプリケーションを処理中です', overwriteTitle: '既存の設定を上書きしますか?', overwriteMessage: 'このプロンプトを適用すると、既存の設定が上書きされます。', @@ -302,10 +307,7 @@ const translation = { waitForImgUpload: '画像のアップロードが完了するまでお待ちください', waitForFileUpload: 'ファイルのアップロードが完了するまでお待ちください', }, - warningMessage: { - timeoutExceeded: 'タイムアウトのため結果が表示されません。完全な結果を手にいれるためには、ログを参照してください。', - }, - chatSubTitle: '手順', + chatSubTitle: 'プロンプト', completionSubTitle: '接頭辞プロンプト', promptTip: 'プロンプトは、AIの応答を指示と制約で誘導します。 {{input}} のような変数を挿入します。このプロンプトはユーザーには表示されません。', formattingChangedTitle: '書式が変更されました', @@ -356,7 +358,6 @@ const translation = { 'varName': '変数名', 'labelName': 'ラベル名', 'inputPlaceholder': '入力してください', - 'content': 'コンテンツ', 'required': '必須', 'file': { supportFileTypes: 'サポートされたファイルタイプ', @@ -452,10 +453,8 @@ const translation = { noPrompt: 'プレプロンプト入力にいくつかのプロンプトを記入してみてください', userInputField: 'ユーザー入力フィールド', noVar: '変数の値を入力してください。新しいセッションが開始されるたびにプロンプトの単語が自動的に置換されます。', - chatVarTip: - '変数の値を入力してください。新しいセッションが開始されるたびにプロンプトの単語が自動的に置換されます。', - completionVarTip: - '変数の値を入力してください。質問が送信されるたびにプロンプトの単語が自動的に置換されます。', + chatVarTip: '変数の値を入力してください。新しいセッションが開始されるたびにプロンプトの単語が自動的に置換されます。', + completionVarTip: '変数の値を入力してください。質問が送信されるたびにプロンプトの単語が自動的に置換されます。', previewTitle: 'プロンプトのプレビュー', queryTitle: 'クエリ内容', queryPlaceholder: 'リクエストテキストを入力してください。', @@ -474,6 +473,7 @@ const translation = { title: 'マルチパスリトリーバル', description: 'ユーザーの意図に基づいて、すべてのナレッジをクエリし、複数のソースから関連するテキストを取得し、再順位付け後、ユーザークエリに最適な結果を選択します。再順位付けモデル API の構成が必要です。', }, + embeddingModelRequired: 'Embeddingモデルが設定されていない', rerankModelRequired: '再順位付けモデルが必要です', params: 'パラメータ', top_k: 'トップK', diff --git a/web/i18n/ja-JP/app-overview.ts b/web/i18n/ja-JP/app-overview.ts index d67860ff37..3b0505b0de 100644 --- a/web/i18n/ja-JP/app-overview.ts +++ b/web/i18n/ja-JP/app-overview.ts @@ -3,7 +3,7 @@ const translation = { firstStepTip: 'はじめるには、', enterKeyTip: '以下にOpenAI APIキーを入力してください', getKeyTip: 'OpenAIダッシュボードからAPIキーを取得してください', - placeholder: 'あなた様のOpenAI APIキー(例:sk-xxxx)', + placeholder: 'OpenAI APIキー(例:sk-xxxx)', }, apiKeyInfo: { cloud: { @@ -67,7 +67,7 @@ const translation = { customDisclaimerPlaceholder: '免責事項を入力してください', customDisclaimerTip: 'アプリケーションの使用に関する免責事項を提供します。', copyrightTooltip: 'プロフェッショナルプラン以上にアップグレードしてください', - copyrightTip: 'ウェブアプリに著作権情報を表示する', + copyrightTip: 'Webアプリに著作権情報を表示する', }, sso: { title: 'WebアプリのSSO', @@ -117,7 +117,7 @@ const translation = { }, apiInfo: { title: 'バックエンドサービスAPI', - explanation: 'あなた様のアプリケーションに簡単に統合できます', + explanation: 'あなたのアプリケーションに簡単に統合できます', accessibleAddress: 'サービスAPIエンドポイント', doc: 'APIリファレンス', }, diff --git a/web/i18n/ja-JP/app.ts b/web/i18n/ja-JP/app.ts index f0bc4ec72d..2c5d2c0f92 100644 --- a/web/i18n/ja-JP/app.ts +++ b/web/i18n/ja-JP/app.ts @@ -10,6 +10,10 @@ const translation = { advanced: 'チャットフロー', }, duplicate: '複製', + mermaid: { + handDrawn: '手描き', + classic: 'クラシック', + }, duplicateTitle: 'アプリを複製する', export: 'DSL をエクスポート', exportFailed: 'DSL のエクスポートに失敗しました。', @@ -21,12 +25,11 @@ const translation = { importFromDSLUrlPlaceholder: 'DSLリンクをここに貼り付けます', deleteAppConfirmTitle: 'このアプリを削除しますか?', deleteAppConfirmContent: - 'アプリを削除すると、元に戻すことはできません。ユーザーはもはやあなた様のアプリにアクセスできず、すべてのプロンプトの設定とログが永久に削除されます。', + 'アプリを削除すると、元に戻すことはできません。他のユーザーはもはやこのアプリにアクセスできず、すべてのプロンプトの設定とログが永久に削除されます。', appDeleted: 'アプリが削除されました', appDeleteFailed: 'アプリの削除に失敗しました', join: 'コミュニティに参加する', - communityIntro: - 'さまざまなチャンネルでチームメンバーや貢献者、開発者と議論します。', + communityIntro: 'さまざまなチャンネルでチームメンバーや貢献者、開発者と議論します。', roadmap: 'ロードマップを見る', newApp: { startFromBlank: '最初から作成', @@ -128,6 +131,7 @@ const translation = { title: 'アプリのパフォーマンスの追跡', description: 'サードパーティのLLMOpsサービスとトレースアプリケーションのパフォーマンス設定を行います。', config: '設定', + view: '見る', collapse: '折りたたむ', expand: '展開', tracing: '追跡', @@ -148,25 +152,24 @@ const translation = { title: 'Langfuse', description: 'トレース、評価、プロンプトの管理、そしてメトリクスを駆使して、LLMアプリケーションのデバッグや改善に役立てます。', }, - inUse: '使用中', - configProvider: { - title: '配置 ', - placeholder: 'あなた様の{{key}}を入力してください', - project: 'プロジェクト', - publicKey: '公開キー', - secretKey: '秘密キー', - viewDocsLink: '{{key}}のドキュメントを見る', - removeConfirmTitle: '{{key}}の設定を削除しますか?', - removeConfirmContent: '現在の設定は使用中です。これを削除すると、トレース機能が無効になります。', - }, - view: '見る', opik: { title: 'オピック', description: 'Opik は、LLM アプリケーションを評価、テスト、監視するためのオープンソース プラットフォームです。', }, + inUse: '使用中', + configProvider: { + title: '配置 ', + placeholder: '{{key}}を入力してください', + project: 'プロジェクト', + publicKey: '公開キー', + secretKey: '秘密キー', + viewDocsLink: '{{key}}に関するドキュメントを見る', + removeConfirmTitle: '{{key}}の設定を削除しますか?', + removeConfirmContent: '現在の設定は使用中です。これを削除すると、トレース機能が無効になります。', + }, weave: { - description: 'Weaveは、LLMアプリケーションを評価、テスト、および監視するためのオープンソースプラットフォームです。', title: '織る', + description: 'Weaveは、LLMアプリケーションを評価、テスト、および監視するためのオープンソースプラットフォームです。', }, }, answerIcon: { @@ -174,10 +177,6 @@ const translation = { description: '共有アプリケーションの中で Webアプリアイコンを使用して🤖を置き換えるかどうか', descriptionInExplore: 'ExploreでWebアプリアイコンを使用して🤖を置き換えるかどうか', }, - mermaid: { - handDrawn: '手描き', - classic: 'クラシック', - }, newAppFromTemplate: { sidebar: { Agent: 'エージェント', @@ -219,6 +218,11 @@ const translation = { title: 'アクセス権限', description: 'Webアプリのアクセス権限を設定します', accessLabel: '誰がアクセスできますか', + accessItemsDescription: { + anyone: '誰でもWebアプリにアクセス可能です', + specific: '特定のグループやメンバーがWebアプリにアクセス可能です', + organization: '組織内の誰でもWebアプリにアクセス可能です', + }, accessItems: { anyone: 'すべてのユーザー', specific: '特定のグループメンバー', diff --git a/web/i18n/ja-JP/billing.ts b/web/i18n/ja-JP/billing.ts index 1b85449b33..b96a972504 100644 --- a/web/i18n/ja-JP/billing.ts +++ b/web/i18n/ja-JP/billing.ts @@ -91,9 +91,9 @@ const translation = { member: 'メンバー', memberAfter: 'メンバー', messageRequest: { - title: '{{count,number}}メッセージ', - titlePerMonth: '{{count,number}}メッセージ/月', - tooltip: 'Open Alモデルを使用するさまざまなプランのメッセージ呼び出しクォータ。上限を超えるメッセージは、Open AI APIキーを使用します。', + title: '{{count,number}}メッセージクレジット', + titlePerMonth: '{{count,number}}メッセージクレジット/月', + tooltip: 'メッセージクレジットは、DifyでさまざまなOpenAIモデルを簡単にお試しいただくためのものです。モデルタイプに応じてクレジットが消費され、使い切った後はご自身のOpenAI APIキーに切り替えていただけます。', }, annotatedResponse: { title: '{{count,number}}の注釈クォータ制限', @@ -173,13 +173,11 @@ const translation = { fullSolution: 'より多くのスペースを得るためにプランをアップグレードしてください。', }, apps: { - fullTipLine1: 'より多くのアプリを作成するには、', - fullTipLine2: 'プランをアップグレードしてください。', - fullTip1: 'アプリをもっと作成するためにアップグレードする', - contactUs: 'お問い合わせ', - fullTip2: 'プランの制限に達しました', - fullTip2des: '使用状況を解放するために非アクティブなアプリケーションを整理することをお勧めします。または、お問い合わせください。', - fullTip1des: 'このプランでのアプリ構築の制限に達しました', + fullTip1: 'アップグレードして制限を解除する', + fullTip1des: 'このプランのアプリ数の上限に達しました。', + fullTip2: 'プラン制限に達しました。', + fullTip2des: '非アクティブなアプリを削除するか、アップグレードプランをご検討ください。', + contactUs: 'こちらからお問い合わせください', }, annotatedResponse: { fullTipLine1: 'より多くの会話を注釈するには、', diff --git a/web/i18n/ja-JP/common.ts b/web/i18n/ja-JP/common.ts index 0ced117db0..f9184332de 100644 --- a/web/i18n/ja-JP/common.ts +++ b/web/i18n/ja-JP/common.ts @@ -171,7 +171,7 @@ const translation = { community: 'コミュニティ', about: 'Difyについて', logout: 'ログアウト', - github: 'ギットハブ', + github: 'GitHub', }, compliance: { soc2Type1: 'SOC 2 Type I 報告書', @@ -252,7 +252,7 @@ const translation = { datasetOperator: 'ナレッジ管理員', datasetOperatorTip: 'ナレッジベースのみを管理できる', inviteTeamMember: 'チームメンバーを招待する', - inviteTeamMemberTip: '彼らはサインイン後、直接あなた様のチームデータにアクセスできます。', + inviteTeamMemberTip: '彼らはサインイン後、直接あなたのチームデータにアクセスできます。', emailNotSetup: 'メールサーバーがセットアップされていないので、招待メールを送信することはできません。代わりに招待後に発行される招待リンクをユーザーに通知してください。', email: 'メール', emailInvalid: '無効なメール形式', @@ -260,7 +260,7 @@ const translation = { sendInvite: '招待を送る', invitedAsRole: '{{role}}ユーザーとして招待されました', invitationSent: '招待が送信されました', - invitationSentTip: '招待が送信され、彼らはDifyにサインインしてあなた様のチームデータにアクセスできます。', + invitationSentTip: '招待が送信され、彼らはDifyにサインインしてあなたのチームデータにアクセスできます。', invitationLink: '招待リンク', failedInvitationEmails: '以下のユーザーは正常に招待されませんでした', ok: 'OK', @@ -272,7 +272,7 @@ const translation = { setEditor: 'エディターに設定', disInvite: '招待をキャンセル', deleteMember: 'メンバーを削除', - you: '(あなた様)', + you: '(あなた)', }, integrations: { connected: '接続済み', @@ -448,8 +448,8 @@ const translation = { connect: '接続', configure: '設定', notion: { - title: 'ノーション', - description: 'ナレッジデータソースとしてノーションを使用します。', + title: 'Notion', + description: 'ナレッジデータソースとしてNotionを使用します。', connectedWorkspace: '接続済みワークスペース', addWorkspace: 'ワークスペースの追加', connected: '接続済み', diff --git a/web/i18n/ja-JP/dataset-documents.ts b/web/i18n/ja-JP/dataset-documents.ts index 3943146b7e..81047872ad 100644 --- a/web/i18n/ja-JP/dataset-documents.ts +++ b/web/i18n/ja-JP/dataset-documents.ts @@ -51,7 +51,7 @@ const translation = { empty: { title: 'まだドキュメントがありません', upload: { - tip: 'ファイルをアップロードしたり、ウェブサイトから同期したり、NotionやGitHubなどのウェブアプリから同期することができます。', + tip: 'ファイルをアップロードしたり、ウェブサイトから同期したり、NotionやGitHubなどのWebアプリから同期することができます。', }, sync: { tip: 'Difyは定期的にNotionからファイルをダウンロードし、処理を完了します。', diff --git a/web/i18n/ja-JP/dataset-settings.ts b/web/i18n/ja-JP/dataset-settings.ts index 6b809ddd43..b0fd2ec55b 100644 --- a/web/i18n/ja-JP/dataset-settings.ts +++ b/web/i18n/ja-JP/dataset-settings.ts @@ -14,7 +14,7 @@ const translation = { permissionsOnlyMe: '自分のみ', permissionsAllMember: 'すべてのチームメンバー', permissionsInvitedMembers: '一部のチームメンバー', - me: '(あなた様)', + me: '(あなた)', indexMethod: 'インデックス方法', indexMethodHighQuality: '高品質', indexMethodHighQualityTip: 'より正確な検索のため、埋め込みモデルを呼び出してドキュメントを処理することで、LLMは高品質な回答を生成できます。', diff --git a/web/i18n/ja-JP/dataset.ts b/web/i18n/ja-JP/dataset.ts index 4e367f7809..1078fee69d 100644 --- a/web/i18n/ja-JP/dataset.ts +++ b/web/i18n/ja-JP/dataset.ts @@ -72,7 +72,7 @@ const translation = { createDatasetIntro: '独自のテキストデータをインポートするか、LLMコンテキストの強化のためにWebhookを介してリアルタイムでデータを書き込むことができます。', deleteDatasetConfirmTitle: 'このナレッジベースを削除しますか?', deleteDatasetConfirmContent: - 'ナレッジベースを削除すると元に戻すことはできません。ユーザーはもはやあなた様のナレッジベースにアクセスできず、すべてのプロンプトの設定とログが永久に削除されます。', + 'ナレッジベースを削除すると元に戻すことはできません。ユーザーはもはやあなたのナレッジベースにアクセスできず、すべてのプロンプトの設定とログが永久に削除されます。', datasetUsedByApp: 'このナレッジベースは一部のアプリによって使用されています。アプリはこのナレッジベースを使用できなくなり、すべてのプロンプト設定とログは永久に削除されます。', datasetDeleted: 'ナレッジベースが削除されました', datasetDeleteFailed: 'ナレッジベースの削除に失敗しました', diff --git a/web/i18n/ja-JP/login.ts b/web/i18n/ja-JP/login.ts index 974212564f..3db651c580 100644 --- a/web/i18n/ja-JP/login.ts +++ b/web/i18n/ja-JP/login.ts @@ -62,11 +62,11 @@ const translation = { link: 'オープンソースライセンス', }, join: '参加する', - joinTipStart: 'あなた様を招待します', + joinTipStart: 'あなたを招待します', joinTipEnd: 'チームに参加する', invalid: 'リンクの有効期限が切れています', explore: 'Difyを探索する', - activatedTipStart: 'あなた様は', + activatedTipStart: 'あなたは', activatedTipEnd: 'チームに参加しました', activated: '今すぐサインイン', adminInitPassword: '管理者初期化パスワード', diff --git a/web/i18n/ja-JP/time.ts b/web/i18n/ja-JP/time.ts index 36d4c699bb..09203a0cc2 100644 --- a/web/i18n/ja-JP/time.ts +++ b/web/i18n/ja-JP/time.ts @@ -2,11 +2,11 @@ const translation = { daysInWeek: { Tue: '火曜日', Sat: '土曜日', - Mon: 'モン', + Mon: '月曜日', Thu: '木曜日', - Fri: '自由', + Fri: '金曜日', Wed: '水曜日', - Sun: '太陽', + Sun: '日曜日', }, months: { November: '11月', @@ -14,13 +14,13 @@ const translation = { March: '3月', September: '9月', July: '7月', - April: '四月', + April: '4月', February: '2月', June: '6月', January: '1月', May: '5月', - August: '八月', - October: '十月', + August: '8月', + October: '10月', }, operation: { now: '今', diff --git a/web/i18n/ja-JP/tools.ts b/web/i18n/ja-JP/tools.ts index f234625eba..57eab7e736 100644 --- a/web/i18n/ja-JP/tools.ts +++ b/web/i18n/ja-JP/tools.ts @@ -108,7 +108,7 @@ const translation = { confirmTitle: '保存しますか?', confirmTip: 'このツールを使用しているアプリは影響を受けます', deleteToolConfirmTitle: 'このツールを削除しますか?', - deleteToolConfirmContent: 'ツールの削除は取り消しできません。ユーザーはもうあなた様のツールにアクセスできません。', + deleteToolConfirmContent: 'ツールの削除は取り消しできません。ユーザーはもうあなたのツールにアクセスできません。', }, test: { title: 'テスト', diff --git a/web/i18n/ja-JP/workflow.ts b/web/i18n/ja-JP/workflow.ts index 066a5581d7..7dc3175391 100644 --- a/web/i18n/ja-JP/workflow.ts +++ b/web/i18n/ja-JP/workflow.ts @@ -307,6 +307,7 @@ const translation = { organizeBlocks: 'ノード整理', change: '変更', optional: '(任意)', + moveToThisNode: 'このノードに移動する', }, nodes: { common: { diff --git a/web/i18n/ko-KR/workflow.ts b/web/i18n/ko-KR/workflow.ts index a3496b7a23..7e7c9b74e4 100644 --- a/web/i18n/ko-KR/workflow.ts +++ b/web/i18n/ko-KR/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: '블록 정리', change: '변경', optional: '(선택사항)', + moveToThisNode: '이 노드로 이동', }, nodes: { common: { diff --git a/web/i18n/pl-PL/workflow.ts b/web/i18n/pl-PL/workflow.ts index ebf1f47d2b..c518c1587c 100644 --- a/web/i18n/pl-PL/workflow.ts +++ b/web/i18n/pl-PL/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'Organizuj bloki', change: 'Zmień', optional: '(opcjonalne)', + moveToThisNode: 'Przenieś do tego węzła', }, nodes: { common: { diff --git a/web/i18n/pt-BR/workflow.ts b/web/i18n/pt-BR/workflow.ts index e64240a06d..be8a8cd11c 100644 --- a/web/i18n/pt-BR/workflow.ts +++ b/web/i18n/pt-BR/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'Organizar blocos', change: 'Mudar', optional: '(opcional)', + moveToThisNode: 'Mova-se para este nó', }, nodes: { common: { diff --git a/web/i18n/ro-RO/workflow.ts b/web/i18n/ro-RO/workflow.ts index 2978565bfc..176bbf9f97 100644 --- a/web/i18n/ro-RO/workflow.ts +++ b/web/i18n/ro-RO/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'Organizează blocurile', change: 'Schimbă', optional: '(opțional)', + moveToThisNode: 'Mutați la acest nod', }, nodes: { common: { diff --git a/web/i18n/ru-RU/workflow.ts b/web/i18n/ru-RU/workflow.ts index 8a292ff0a9..05894568db 100644 --- a/web/i18n/ru-RU/workflow.ts +++ b/web/i18n/ru-RU/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'Организовать блоки', change: 'Изменить', optional: '(необязательно)', + moveToThisNode: 'Перейдите к этому узлу', }, nodes: { common: { diff --git a/web/i18n/sl-SI/workflow.ts b/web/i18n/sl-SI/workflow.ts index 1376bc833f..0af5fa6b65 100644 --- a/web/i18n/sl-SI/workflow.ts +++ b/web/i18n/sl-SI/workflow.ts @@ -739,6 +739,7 @@ const translation = { checklistResolved: 'Vse težave so odpravljene', addNextStep: 'Dodajanje naslednjega bloka v ta potek dela', change: 'Spremeniti', + moveToThisNode: 'Premakni se na to vozlišče', }, nodes: { common: { diff --git a/web/i18n/th-TH/workflow.ts b/web/i18n/th-TH/workflow.ts index 6b91a03a41..77e1eee79d 100644 --- a/web/i18n/th-TH/workflow.ts +++ b/web/i18n/th-TH/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'จัดระเบียบบล็อก', change: 'เปลี่ยน', optional: '(ไม่บังคับ)', + moveToThisNode: 'ย้ายไปที่โหนดนี้', }, nodes: { common: { diff --git a/web/i18n/tr-TR/workflow.ts b/web/i18n/tr-TR/workflow.ts index 999456f1cf..313eae66dd 100644 --- a/web/i18n/tr-TR/workflow.ts +++ b/web/i18n/tr-TR/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'Blokları Düzenle', change: 'Değiştir', optional: '(isteğe bağlı)', + moveToThisNode: 'Bu düğüme geç', }, nodes: { common: { diff --git a/web/i18n/uk-UA/workflow.ts b/web/i18n/uk-UA/workflow.ts index c2d0b137fb..3f2e24b232 100644 --- a/web/i18n/uk-UA/workflow.ts +++ b/web/i18n/uk-UA/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'Організувати блоки', change: 'Змінити', optional: '(необов\'язково)', + moveToThisNode: 'Перемістіть до цього вузла', }, nodes: { common: { diff --git a/web/i18n/vi-VN/workflow.ts b/web/i18n/vi-VN/workflow.ts index 18ae72022e..55efe7b3e8 100644 --- a/web/i18n/vi-VN/workflow.ts +++ b/web/i18n/vi-VN/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: 'Tổ chức các khối', change: 'Thay đổi', optional: '(tùy chọn)', + moveToThisNode: 'Di chuyển đến nút này', }, nodes: { common: { diff --git a/web/i18n/zh-Hans/workflow.ts b/web/i18n/zh-Hans/workflow.ts index ab77e0ef8d..612bd754ab 100644 --- a/web/i18n/zh-Hans/workflow.ts +++ b/web/i18n/zh-Hans/workflow.ts @@ -307,6 +307,7 @@ const translation = { organizeBlocks: '整理节点', change: '更改', optional: '(选填)', + moveToThisNode: '定位至此节点', }, nodes: { common: { diff --git a/web/i18n/zh-Hant/workflow.ts b/web/i18n/zh-Hant/workflow.ts index 9a4d7f25d8..cfa9e320eb 100644 --- a/web/i18n/zh-Hant/workflow.ts +++ b/web/i18n/zh-Hant/workflow.ts @@ -302,6 +302,7 @@ const translation = { organizeBlocks: '整理節點', change: '更改', optional: '(選擇性)', + moveToThisNode: '定位至此節點', }, nodes: { common: {