mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-20 21:39:21 +08:00
Merge branch 'fix/upgrade-page-description' into deploy/dev
This commit is contained in:
commit
f212a46a87
@ -269,6 +269,7 @@ OPENSEARCH_PORT=9200
|
|||||||
OPENSEARCH_USER=admin
|
OPENSEARCH_USER=admin
|
||||||
OPENSEARCH_PASSWORD=admin
|
OPENSEARCH_PASSWORD=admin
|
||||||
OPENSEARCH_SECURE=true
|
OPENSEARCH_SECURE=true
|
||||||
|
OPENSEARCH_VERIFY_CERTS=true
|
||||||
|
|
||||||
# Baidu configuration
|
# Baidu configuration
|
||||||
BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287
|
BAIDU_VECTOR_DB_ENDPOINT=http://127.0.0.1:5287
|
||||||
|
@ -33,6 +33,11 @@ class OpenSearchConfig(BaseSettings):
|
|||||||
default=False,
|
default=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
OPENSEARCH_VERIFY_CERTS: bool = Field(
|
||||||
|
description="Whether to verify SSL certificates for HTTPS connections (recommended to set True in production)",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
|
|
||||||
OPENSEARCH_AUTH_METHOD: AuthMethod = Field(
|
OPENSEARCH_AUTH_METHOD: AuthMethod = Field(
|
||||||
description="Authentication method for OpenSearch connection (default is 'basic')",
|
description="Authentication method for OpenSearch connection (default is 'basic')",
|
||||||
default=AuthMethod.BASIC,
|
default=AuthMethod.BASIC,
|
||||||
|
@ -202,18 +202,18 @@ class EmailCodeLoginApi(Resource):
|
|||||||
except AccountRegisterError as are:
|
except AccountRegisterError as are:
|
||||||
raise AccountInFreezeError()
|
raise AccountInFreezeError()
|
||||||
if account:
|
if account:
|
||||||
tenant = TenantService.get_join_tenants(account)
|
tenants = TenantService.get_join_tenants(account)
|
||||||
if not tenant:
|
if not tenants:
|
||||||
workspaces = FeatureService.get_system_features().license.workspaces
|
workspaces = FeatureService.get_system_features().license.workspaces
|
||||||
if not workspaces.is_available():
|
if not workspaces.is_available():
|
||||||
raise WorkspacesLimitExceeded()
|
raise WorkspacesLimitExceeded()
|
||||||
if not FeatureService.get_system_features().is_allow_create_workspace:
|
if not FeatureService.get_system_features().is_allow_create_workspace:
|
||||||
raise NotAllowedCreateWorkspace()
|
raise NotAllowedCreateWorkspace()
|
||||||
else:
|
else:
|
||||||
tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
|
new_tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
|
||||||
TenantService.create_tenant_member(tenant, account, role="owner")
|
TenantService.create_tenant_member(new_tenant, account, role="owner")
|
||||||
account.current_tenant = tenant
|
account.current_tenant = new_tenant
|
||||||
tenant_was_created.send(tenant)
|
tenant_was_created.send(new_tenant)
|
||||||
|
|
||||||
if account is None:
|
if account is None:
|
||||||
try:
|
try:
|
||||||
|
@ -148,15 +148,15 @@ def _generate_account(provider: str, user_info: OAuthUserInfo):
|
|||||||
account = _get_account_by_openid_or_email(provider, user_info)
|
account = _get_account_by_openid_or_email(provider, user_info)
|
||||||
|
|
||||||
if account:
|
if account:
|
||||||
tenant = TenantService.get_join_tenants(account)
|
tenants = TenantService.get_join_tenants(account)
|
||||||
if not tenant:
|
if not tenants:
|
||||||
if not FeatureService.get_system_features().is_allow_create_workspace:
|
if not FeatureService.get_system_features().is_allow_create_workspace:
|
||||||
raise WorkSpaceNotAllowedCreateError()
|
raise WorkSpaceNotAllowedCreateError()
|
||||||
else:
|
else:
|
||||||
tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
|
new_tenant = TenantService.create_tenant(f"{account.name}'s Workspace")
|
||||||
TenantService.create_tenant_member(tenant, account, role="owner")
|
TenantService.create_tenant_member(new_tenant, account, role="owner")
|
||||||
account.current_tenant = tenant
|
account.current_tenant = new_tenant
|
||||||
tenant_was_created.send(tenant)
|
tenant_was_created.send(new_tenant)
|
||||||
|
|
||||||
if not account:
|
if not account:
|
||||||
if not FeatureService.get_system_features().is_allow_register:
|
if not FeatureService.get_system_features().is_allow_register:
|
||||||
|
@ -540,9 +540,22 @@ class DatasetIndexingStatusApi(Resource):
|
|||||||
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||||
.count()
|
.count()
|
||||||
)
|
)
|
||||||
document.completed_segments = completed_segments
|
# Create a dictionary with document attributes and additional fields
|
||||||
document.total_segments = total_segments
|
document_dict = {
|
||||||
documents_status.append(marshal(document, document_status_fields))
|
"id": document.id,
|
||||||
|
"indexing_status": document.indexing_status,
|
||||||
|
"processing_started_at": document.processing_started_at,
|
||||||
|
"parsing_completed_at": document.parsing_completed_at,
|
||||||
|
"cleaning_completed_at": document.cleaning_completed_at,
|
||||||
|
"splitting_completed_at": document.splitting_completed_at,
|
||||||
|
"completed_at": document.completed_at,
|
||||||
|
"paused_at": document.paused_at,
|
||||||
|
"error": document.error,
|
||||||
|
"stopped_at": document.stopped_at,
|
||||||
|
"completed_segments": completed_segments,
|
||||||
|
"total_segments": total_segments,
|
||||||
|
}
|
||||||
|
documents_status.append(marshal(document_dict, document_status_fields))
|
||||||
data = {"data": documents_status}
|
data = {"data": documents_status}
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@ -583,11 +583,22 @@ class DocumentBatchIndexingStatusApi(DocumentResource):
|
|||||||
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||||
.count()
|
.count()
|
||||||
)
|
)
|
||||||
document.completed_segments = completed_segments
|
# Create a dictionary with document attributes and additional fields
|
||||||
document.total_segments = total_segments
|
document_dict = {
|
||||||
if document.is_paused:
|
"id": document.id,
|
||||||
document.indexing_status = "paused"
|
"indexing_status": "paused" if document.is_paused else document.indexing_status,
|
||||||
documents_status.append(marshal(document, document_status_fields))
|
"processing_started_at": document.processing_started_at,
|
||||||
|
"parsing_completed_at": document.parsing_completed_at,
|
||||||
|
"cleaning_completed_at": document.cleaning_completed_at,
|
||||||
|
"splitting_completed_at": document.splitting_completed_at,
|
||||||
|
"completed_at": document.completed_at,
|
||||||
|
"paused_at": document.paused_at,
|
||||||
|
"error": document.error,
|
||||||
|
"stopped_at": document.stopped_at,
|
||||||
|
"completed_segments": completed_segments,
|
||||||
|
"total_segments": total_segments,
|
||||||
|
}
|
||||||
|
documents_status.append(marshal(document_dict, document_status_fields))
|
||||||
data = {"data": documents_status}
|
data = {"data": documents_status}
|
||||||
return data
|
return data
|
||||||
|
|
||||||
@ -616,11 +627,22 @@ class DocumentIndexingStatusApi(DocumentResource):
|
|||||||
.count()
|
.count()
|
||||||
)
|
)
|
||||||
|
|
||||||
document.completed_segments = completed_segments
|
# Create a dictionary with document attributes and additional fields
|
||||||
document.total_segments = total_segments
|
document_dict = {
|
||||||
if document.is_paused:
|
"id": document.id,
|
||||||
document.indexing_status = "paused"
|
"indexing_status": "paused" if document.is_paused else document.indexing_status,
|
||||||
return marshal(document, document_status_fields)
|
"processing_started_at": document.processing_started_at,
|
||||||
|
"parsing_completed_at": document.parsing_completed_at,
|
||||||
|
"cleaning_completed_at": document.cleaning_completed_at,
|
||||||
|
"splitting_completed_at": document.splitting_completed_at,
|
||||||
|
"completed_at": document.completed_at,
|
||||||
|
"paused_at": document.paused_at,
|
||||||
|
"error": document.error,
|
||||||
|
"stopped_at": document.stopped_at,
|
||||||
|
"completed_segments": completed_segments,
|
||||||
|
"total_segments": total_segments,
|
||||||
|
}
|
||||||
|
return marshal(document_dict, document_status_fields)
|
||||||
|
|
||||||
|
|
||||||
class DocumentDetailApi(DocumentResource):
|
class DocumentDetailApi(DocumentResource):
|
||||||
|
@ -68,16 +68,24 @@ class TenantListApi(Resource):
|
|||||||
@account_initialization_required
|
@account_initialization_required
|
||||||
def get(self):
|
def get(self):
|
||||||
tenants = TenantService.get_join_tenants(current_user)
|
tenants = TenantService.get_join_tenants(current_user)
|
||||||
|
tenant_dicts = []
|
||||||
|
|
||||||
for tenant in tenants:
|
for tenant in tenants:
|
||||||
features = FeatureService.get_features(tenant.id)
|
features = FeatureService.get_features(tenant.id)
|
||||||
if features.billing.enabled:
|
|
||||||
tenant.plan = features.billing.subscription.plan
|
# Create a dictionary with tenant attributes
|
||||||
else:
|
tenant_dict = {
|
||||||
tenant.plan = "sandbox"
|
"id": tenant.id,
|
||||||
if tenant.id == current_user.current_tenant_id:
|
"name": tenant.name,
|
||||||
tenant.current = True # Set current=True for current tenant
|
"status": tenant.status,
|
||||||
return {"workspaces": marshal(tenants, tenants_fields)}, 200
|
"created_at": tenant.created_at,
|
||||||
|
"plan": features.billing.subscription.plan if features.billing.enabled else "sandbox",
|
||||||
|
"current": tenant.id == current_user.current_tenant_id,
|
||||||
|
}
|
||||||
|
|
||||||
|
tenant_dicts.append(tenant_dict)
|
||||||
|
|
||||||
|
return {"workspaces": marshal(tenant_dicts, tenants_fields)}, 200
|
||||||
|
|
||||||
|
|
||||||
class WorkspaceListApi(Resource):
|
class WorkspaceListApi(Resource):
|
||||||
|
@ -64,9 +64,24 @@ class PluginUploadFileApi(Resource):
|
|||||||
|
|
||||||
extension = guess_extension(tool_file.mimetype) or ".bin"
|
extension = guess_extension(tool_file.mimetype) or ".bin"
|
||||||
preview_url = ToolFileManager.sign_file(tool_file_id=tool_file.id, extension=extension)
|
preview_url = ToolFileManager.sign_file(tool_file_id=tool_file.id, extension=extension)
|
||||||
tool_file.mime_type = mimetype
|
|
||||||
tool_file.extension = extension
|
# Create a dictionary with all the necessary attributes
|
||||||
tool_file.preview_url = preview_url
|
result = {
|
||||||
|
"id": tool_file.id,
|
||||||
|
"user_id": tool_file.user_id,
|
||||||
|
"tenant_id": tool_file.tenant_id,
|
||||||
|
"conversation_id": tool_file.conversation_id,
|
||||||
|
"file_key": tool_file.file_key,
|
||||||
|
"mimetype": tool_file.mimetype,
|
||||||
|
"original_url": tool_file.original_url,
|
||||||
|
"name": tool_file.name,
|
||||||
|
"size": tool_file.size,
|
||||||
|
"mime_type": mimetype,
|
||||||
|
"extension": extension,
|
||||||
|
"preview_url": preview_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, 201
|
||||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||||
raise FileTooLargeError(file_too_large_error.description)
|
raise FileTooLargeError(file_too_large_error.description)
|
||||||
except services.errors.file.UnsupportedFileTypeError:
|
except services.errors.file.UnsupportedFileTypeError:
|
||||||
|
@ -388,11 +388,22 @@ class DocumentIndexingStatusApi(DatasetApiResource):
|
|||||||
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
|
||||||
.count()
|
.count()
|
||||||
)
|
)
|
||||||
document.completed_segments = completed_segments
|
# Create a dictionary with document attributes and additional fields
|
||||||
document.total_segments = total_segments
|
document_dict = {
|
||||||
if document.is_paused:
|
"id": document.id,
|
||||||
document.indexing_status = "paused"
|
"indexing_status": "paused" if document.is_paused else document.indexing_status,
|
||||||
documents_status.append(marshal(document, document_status_fields))
|
"processing_started_at": document.processing_started_at,
|
||||||
|
"parsing_completed_at": document.parsing_completed_at,
|
||||||
|
"cleaning_completed_at": document.cleaning_completed_at,
|
||||||
|
"splitting_completed_at": document.splitting_completed_at,
|
||||||
|
"completed_at": document.completed_at,
|
||||||
|
"paused_at": document.paused_at,
|
||||||
|
"error": document.error,
|
||||||
|
"stopped_at": document.stopped_at,
|
||||||
|
"completed_segments": completed_segments,
|
||||||
|
"total_segments": total_segments,
|
||||||
|
}
|
||||||
|
documents_status.append(marshal(document_dict, document_status_fields))
|
||||||
data = {"data": documents_status}
|
data = {"data": documents_status}
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
@ -109,6 +109,7 @@ class VariableEntity(BaseModel):
|
|||||||
description: str = ""
|
description: str = ""
|
||||||
type: VariableEntityType
|
type: VariableEntityType
|
||||||
required: bool = False
|
required: bool = False
|
||||||
|
hide: bool = False
|
||||||
max_length: Optional[int] = None
|
max_length: Optional[int] = None
|
||||||
options: Sequence[str] = Field(default_factory=list)
|
options: Sequence[str] = Field(default_factory=list)
|
||||||
allowed_file_types: Sequence[FileType] = Field(default_factory=list)
|
allowed_file_types: Sequence[FileType] = Field(default_factory=list)
|
||||||
|
@ -26,10 +26,13 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
|||||||
from core.ops.ops_trace_manager import TraceQueueManager
|
from core.ops.ops_trace_manager import TraceQueueManager
|
||||||
from core.prompt.utils.get_thread_messages_length import get_thread_messages_length
|
from core.prompt.utils.get_thread_messages_length import get_thread_messages_length
|
||||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
|
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
|
||||||
|
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from factories import file_factory
|
from factories import file_factory
|
||||||
from models import Account, App, Conversation, EndUser, Message, Workflow, WorkflowNodeExecutionTriggeredFrom
|
from models import Account, App, Conversation, EndUser, Message, Workflow, WorkflowNodeExecutionTriggeredFrom
|
||||||
|
from models.enums import WorkflowRunTriggeredFrom
|
||||||
from services.conversation_service import ConversationService
|
from services.conversation_service import ConversationService
|
||||||
from services.errors.message import MessageNotExistsError
|
from services.errors.message import MessageNotExistsError
|
||||||
|
|
||||||
@ -159,8 +162,22 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
contexts.plugin_tool_providers.set({})
|
contexts.plugin_tool_providers.set({})
|
||||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||||
|
|
||||||
# Create workflow node execution repository
|
# Create repositories
|
||||||
|
#
|
||||||
|
# Create session factory
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
|
# Create workflow execution(aka workflow run) repository
|
||||||
|
if invoke_from == InvokeFrom.DEBUGGER:
|
||||||
|
workflow_triggered_from = WorkflowRunTriggeredFrom.DEBUGGING
|
||||||
|
else:
|
||||||
|
workflow_triggered_from = WorkflowRunTriggeredFrom.APP_RUN
|
||||||
|
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||||
|
session_factory=session_factory,
|
||||||
|
user=user,
|
||||||
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
|
triggered_from=workflow_triggered_from,
|
||||||
|
)
|
||||||
|
# Create workflow node execution repository
|
||||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
session_factory=session_factory,
|
session_factory=session_factory,
|
||||||
user=user,
|
user=user,
|
||||||
@ -173,6 +190,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
user=user,
|
user=user,
|
||||||
invoke_from=invoke_from,
|
invoke_from=invoke_from,
|
||||||
application_generate_entity=application_generate_entity,
|
application_generate_entity=application_generate_entity,
|
||||||
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
conversation=conversation,
|
conversation=conversation,
|
||||||
stream=streaming,
|
stream=streaming,
|
||||||
@ -226,8 +244,18 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
contexts.plugin_tool_providers.set({})
|
contexts.plugin_tool_providers.set({})
|
||||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||||
|
|
||||||
# Create workflow node execution repository
|
# Create repositories
|
||||||
|
#
|
||||||
|
# Create session factory
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
|
# Create workflow execution(aka workflow run) repository
|
||||||
|
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||||
|
session_factory=session_factory,
|
||||||
|
user=user,
|
||||||
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
|
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||||
|
)
|
||||||
|
# Create workflow node execution repository
|
||||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
session_factory=session_factory,
|
session_factory=session_factory,
|
||||||
user=user,
|
user=user,
|
||||||
@ -240,6 +268,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
user=user,
|
user=user,
|
||||||
invoke_from=InvokeFrom.DEBUGGER,
|
invoke_from=InvokeFrom.DEBUGGER,
|
||||||
application_generate_entity=application_generate_entity,
|
application_generate_entity=application_generate_entity,
|
||||||
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
conversation=None,
|
conversation=None,
|
||||||
stream=streaming,
|
stream=streaming,
|
||||||
@ -291,8 +320,18 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
contexts.plugin_tool_providers.set({})
|
contexts.plugin_tool_providers.set({})
|
||||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||||
|
|
||||||
# Create workflow node execution repository
|
# Create repositories
|
||||||
|
#
|
||||||
|
# Create session factory
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
|
# Create workflow execution(aka workflow run) repository
|
||||||
|
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||||
|
session_factory=session_factory,
|
||||||
|
user=user,
|
||||||
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
|
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||||
|
)
|
||||||
|
# Create workflow node execution repository
|
||||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
session_factory=session_factory,
|
session_factory=session_factory,
|
||||||
user=user,
|
user=user,
|
||||||
@ -305,6 +344,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
user=user,
|
user=user,
|
||||||
invoke_from=InvokeFrom.DEBUGGER,
|
invoke_from=InvokeFrom.DEBUGGER,
|
||||||
application_generate_entity=application_generate_entity,
|
application_generate_entity=application_generate_entity,
|
||||||
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
conversation=None,
|
conversation=None,
|
||||||
stream=streaming,
|
stream=streaming,
|
||||||
@ -317,6 +357,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
user: Union[Account, EndUser],
|
user: Union[Account, EndUser],
|
||||||
invoke_from: InvokeFrom,
|
invoke_from: InvokeFrom,
|
||||||
application_generate_entity: AdvancedChatAppGenerateEntity,
|
application_generate_entity: AdvancedChatAppGenerateEntity,
|
||||||
|
workflow_execution_repository: WorkflowExecutionRepository,
|
||||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||||
conversation: Optional[Conversation] = None,
|
conversation: Optional[Conversation] = None,
|
||||||
stream: bool = True,
|
stream: bool = True,
|
||||||
@ -381,6 +422,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
conversation=conversation,
|
conversation=conversation,
|
||||||
message=message,
|
message=message,
|
||||||
user=user,
|
user=user,
|
||||||
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
stream=stream,
|
stream=stream,
|
||||||
)
|
)
|
||||||
@ -453,6 +495,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
conversation: Conversation,
|
conversation: Conversation,
|
||||||
message: Message,
|
message: Message,
|
||||||
user: Union[Account, EndUser],
|
user: Union[Account, EndUser],
|
||||||
|
workflow_execution_repository: WorkflowExecutionRepository,
|
||||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||||
stream: bool = False,
|
stream: bool = False,
|
||||||
) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]:
|
) -> Union[ChatbotAppBlockingResponse, Generator[ChatbotAppStreamResponse, None, None]]:
|
||||||
@ -476,9 +519,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
conversation=conversation,
|
conversation=conversation,
|
||||||
message=message,
|
message=message,
|
||||||
user=user,
|
user=user,
|
||||||
stream=stream,
|
|
||||||
dialogue_count=self._dialogue_count,
|
dialogue_count=self._dialogue_count,
|
||||||
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
|
stream=stream,
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -10,6 +10,7 @@ from sqlalchemy.orm import Session
|
|||||||
|
|
||||||
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
||||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||||
|
from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter
|
||||||
from core.app.entities.app_invoke_entities import (
|
from core.app.entities.app_invoke_entities import (
|
||||||
AdvancedChatAppGenerateEntity,
|
AdvancedChatAppGenerateEntity,
|
||||||
InvokeFrom,
|
InvokeFrom,
|
||||||
@ -64,6 +65,7 @@ from core.ops.ops_trace_manager import TraceQueueManager
|
|||||||
from core.workflow.enums import SystemVariableKey
|
from core.workflow.enums import SystemVariableKey
|
||||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||||
from core.workflow.nodes import NodeType
|
from core.workflow.nodes import NodeType
|
||||||
|
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
||||||
from events.message_event import message_was_created
|
from events.message_event import message_was_created
|
||||||
@ -94,6 +96,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
user: Union[Account, EndUser],
|
user: Union[Account, EndUser],
|
||||||
stream: bool,
|
stream: bool,
|
||||||
dialogue_count: int,
|
dialogue_count: int,
|
||||||
|
workflow_execution_repository: WorkflowExecutionRepository,
|
||||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||||
) -> None:
|
) -> None:
|
||||||
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
||||||
@ -125,9 +128,14 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
SystemVariableKey.WORKFLOW_ID: workflow.id,
|
SystemVariableKey.WORKFLOW_ID: workflow.id,
|
||||||
SystemVariableKey.WORKFLOW_RUN_ID: application_generate_entity.workflow_run_id,
|
SystemVariableKey.WORKFLOW_RUN_ID: application_generate_entity.workflow_run_id,
|
||||||
},
|
},
|
||||||
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._workflow_response_converter = WorkflowResponseConverter(
|
||||||
|
application_generate_entity=application_generate_entity,
|
||||||
|
)
|
||||||
|
|
||||||
self._task_state = WorkflowTaskState()
|
self._task_state = WorkflowTaskState()
|
||||||
self._message_cycle_manager = MessageCycleManage(
|
self._message_cycle_manager = MessageCycleManage(
|
||||||
application_generate_entity=application_generate_entity, task_state=self._task_state
|
application_generate_entity=application_generate_entity, task_state=self._task_state
|
||||||
@ -294,21 +302,19 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
# init workflow run
|
# init workflow run
|
||||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_start(
|
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start(
|
||||||
session=session,
|
session=session,
|
||||||
workflow_id=self._workflow_id,
|
workflow_id=self._workflow_id,
|
||||||
user_id=self._user_id,
|
|
||||||
created_by_role=self._created_by_role,
|
|
||||||
)
|
)
|
||||||
self._workflow_run_id = workflow_run.id
|
self._workflow_run_id = workflow_execution.id
|
||||||
message = self._get_message(session=session)
|
message = self._get_message(session=session)
|
||||||
if not message:
|
if not message:
|
||||||
raise ValueError(f"Message not found: {self._message_id}")
|
raise ValueError(f"Message not found: {self._message_id}")
|
||||||
message.workflow_run_id = workflow_run.id
|
message.workflow_run_id = workflow_execution.id
|
||||||
workflow_start_resp = self._workflow_cycle_manager._workflow_start_to_stream_response(
|
workflow_start_resp = self._workflow_response_converter.workflow_start_to_stream_response(
|
||||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
task_id=self._application_generate_entity.task_id,
|
||||||
|
workflow_execution=workflow_execution,
|
||||||
)
|
)
|
||||||
session.commit()
|
|
||||||
|
|
||||||
yield workflow_start_resp
|
yield workflow_start_resp
|
||||||
elif isinstance(
|
elif isinstance(
|
||||||
@ -319,13 +325,10 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried(
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id, event=event
|
||||||
)
|
)
|
||||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_retried(
|
node_retry_resp = self._workflow_response_converter.workflow_node_retry_to_stream_response(
|
||||||
workflow_run=workflow_run, event=event
|
|
||||||
)
|
|
||||||
node_retry_resp = self._workflow_cycle_manager._workflow_node_retry_to_stream_response(
|
|
||||||
event=event,
|
event=event,
|
||||||
task_id=self._application_generate_entity.task_id,
|
task_id=self._application_generate_entity.task_id,
|
||||||
workflow_node_execution=workflow_node_execution,
|
workflow_node_execution=workflow_node_execution,
|
||||||
@ -338,20 +341,15 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
workflow_node_execution = self._workflow_cycle_manager.handle_node_execution_start(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
workflow_execution_id=self._workflow_run_id, event=event
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
)
|
||||||
)
|
|
||||||
workflow_node_execution = self._workflow_cycle_manager._handle_node_execution_start(
|
|
||||||
workflow_run=workflow_run, event=event
|
|
||||||
)
|
|
||||||
|
|
||||||
node_start_resp = self._workflow_cycle_manager._workflow_node_start_to_stream_response(
|
node_start_resp = self._workflow_response_converter.workflow_node_start_to_stream_response(
|
||||||
event=event,
|
event=event,
|
||||||
task_id=self._application_generate_entity.task_id,
|
task_id=self._application_generate_entity.task_id,
|
||||||
workflow_node_execution=workflow_node_execution,
|
workflow_node_execution=workflow_node_execution,
|
||||||
)
|
)
|
||||||
session.commit()
|
|
||||||
|
|
||||||
if node_start_resp:
|
if node_start_resp:
|
||||||
yield node_start_resp
|
yield node_start_resp
|
||||||
@ -359,15 +357,15 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
# Record files if it's an answer node or end node
|
# Record files if it's an answer node or end node
|
||||||
if event.node_type in [NodeType.ANSWER, NodeType.END]:
|
if event.node_type in [NodeType.ANSWER, NodeType.END]:
|
||||||
self._recorded_files.extend(
|
self._recorded_files.extend(
|
||||||
self._workflow_cycle_manager._fetch_files_from_node_outputs(event.outputs or {})
|
self._workflow_response_converter.fetch_files_from_node_outputs(event.outputs or {})
|
||||||
)
|
)
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_success(
|
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success(
|
||||||
event=event
|
event=event
|
||||||
)
|
)
|
||||||
|
|
||||||
node_finish_resp = self._workflow_cycle_manager._workflow_node_finish_to_stream_response(
|
node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response(
|
||||||
event=event,
|
event=event,
|
||||||
task_id=self._application_generate_entity.task_id,
|
task_id=self._application_generate_entity.task_id,
|
||||||
workflow_node_execution=workflow_node_execution,
|
workflow_node_execution=workflow_node_execution,
|
||||||
@ -383,11 +381,11 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
| QueueNodeInLoopFailedEvent
|
| QueueNodeInLoopFailedEvent
|
||||||
| QueueNodeExceptionEvent,
|
| QueueNodeExceptionEvent,
|
||||||
):
|
):
|
||||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_failed(
|
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_failed(
|
||||||
event=event
|
event=event
|
||||||
)
|
)
|
||||||
|
|
||||||
node_finish_resp = self._workflow_cycle_manager._workflow_node_finish_to_stream_response(
|
node_finish_resp = self._workflow_response_converter.workflow_node_finish_to_stream_response(
|
||||||
event=event,
|
event=event,
|
||||||
task_id=self._application_generate_entity.task_id,
|
task_id=self._application_generate_entity.task_id,
|
||||||
workflow_node_execution=workflow_node_execution,
|
workflow_node_execution=workflow_node_execution,
|
||||||
@ -399,132 +397,92 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
parallel_start_resp = (
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
self._workflow_response_converter.workflow_parallel_branch_start_to_stream_response(
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
task_id=self._application_generate_entity.task_id,
|
||||||
)
|
workflow_execution_id=self._workflow_run_id,
|
||||||
parallel_start_resp = (
|
event=event,
|
||||||
self._workflow_cycle_manager._workflow_parallel_branch_start_to_stream_response(
|
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
yield parallel_start_resp
|
yield parallel_start_resp
|
||||||
elif isinstance(event, QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent):
|
elif isinstance(event, QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent):
|
||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
parallel_finish_resp = (
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
self._workflow_response_converter.workflow_parallel_branch_finished_to_stream_response(
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
task_id=self._application_generate_entity.task_id,
|
||||||
)
|
workflow_execution_id=self._workflow_run_id,
|
||||||
parallel_finish_resp = (
|
event=event,
|
||||||
self._workflow_cycle_manager._workflow_parallel_branch_finished_to_stream_response(
|
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
yield parallel_finish_resp
|
yield parallel_finish_resp
|
||||||
elif isinstance(event, QueueIterationStartEvent):
|
elif isinstance(event, QueueIterationStartEvent):
|
||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
iter_start_resp = self._workflow_response_converter.workflow_iteration_start_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
iter_start_resp = self._workflow_cycle_manager._workflow_iteration_start_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield iter_start_resp
|
yield iter_start_resp
|
||||||
elif isinstance(event, QueueIterationNextEvent):
|
elif isinstance(event, QueueIterationNextEvent):
|
||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
iter_next_resp = self._workflow_response_converter.workflow_iteration_next_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
iter_next_resp = self._workflow_cycle_manager._workflow_iteration_next_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield iter_next_resp
|
yield iter_next_resp
|
||||||
elif isinstance(event, QueueIterationCompletedEvent):
|
elif isinstance(event, QueueIterationCompletedEvent):
|
||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
iter_finish_resp = self._workflow_response_converter.workflow_iteration_completed_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
iter_finish_resp = self._workflow_cycle_manager._workflow_iteration_completed_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield iter_finish_resp
|
yield iter_finish_resp
|
||||||
elif isinstance(event, QueueLoopStartEvent):
|
elif isinstance(event, QueueLoopStartEvent):
|
||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
loop_start_resp = self._workflow_response_converter.workflow_loop_start_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
loop_start_resp = self._workflow_cycle_manager._workflow_loop_start_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield loop_start_resp
|
yield loop_start_resp
|
||||||
elif isinstance(event, QueueLoopNextEvent):
|
elif isinstance(event, QueueLoopNextEvent):
|
||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
loop_next_resp = self._workflow_response_converter.workflow_loop_next_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
loop_next_resp = self._workflow_cycle_manager._workflow_loop_next_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield loop_next_resp
|
yield loop_next_resp
|
||||||
elif isinstance(event, QueueLoopCompletedEvent):
|
elif isinstance(event, QueueLoopCompletedEvent):
|
||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
loop_finish_resp = self._workflow_response_converter.workflow_loop_completed_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
loop_finish_resp = self._workflow_cycle_manager._workflow_loop_completed_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield loop_finish_resp
|
yield loop_finish_resp
|
||||||
elif isinstance(event, QueueWorkflowSucceededEvent):
|
elif isinstance(event, QueueWorkflowSucceededEvent):
|
||||||
@ -535,10 +493,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_success(
|
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_success(
|
||||||
session=session,
|
|
||||||
workflow_run_id=self._workflow_run_id,
|
workflow_run_id=self._workflow_run_id,
|
||||||
start_at=graph_runtime_state.start_at,
|
|
||||||
total_tokens=graph_runtime_state.total_tokens,
|
total_tokens=graph_runtime_state.total_tokens,
|
||||||
total_steps=graph_runtime_state.node_run_steps,
|
total_steps=graph_runtime_state.node_run_steps,
|
||||||
outputs=event.outputs,
|
outputs=event.outputs,
|
||||||
@ -546,10 +502,11 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
trace_manager=trace_manager,
|
trace_manager=trace_manager,
|
||||||
)
|
)
|
||||||
|
|
||||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
session=session,
|
||||||
|
task_id=self._application_generate_entity.task_id,
|
||||||
|
workflow_execution=workflow_execution,
|
||||||
)
|
)
|
||||||
session.commit()
|
|
||||||
|
|
||||||
yield workflow_finish_resp
|
yield workflow_finish_resp
|
||||||
self._base_task_pipeline._queue_manager.publish(
|
self._base_task_pipeline._queue_manager.publish(
|
||||||
@ -562,10 +519,8 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
raise ValueError("graph runtime state not initialized.")
|
raise ValueError("graph runtime state not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_partial_success(
|
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_partial_success(
|
||||||
session=session,
|
|
||||||
workflow_run_id=self._workflow_run_id,
|
workflow_run_id=self._workflow_run_id,
|
||||||
start_at=graph_runtime_state.start_at,
|
|
||||||
total_tokens=graph_runtime_state.total_tokens,
|
total_tokens=graph_runtime_state.total_tokens,
|
||||||
total_steps=graph_runtime_state.node_run_steps,
|
total_steps=graph_runtime_state.node_run_steps,
|
||||||
outputs=event.outputs,
|
outputs=event.outputs,
|
||||||
@ -573,10 +528,11 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
conversation_id=None,
|
conversation_id=None,
|
||||||
trace_manager=trace_manager,
|
trace_manager=trace_manager,
|
||||||
)
|
)
|
||||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
session=session,
|
||||||
|
task_id=self._application_generate_entity.task_id,
|
||||||
|
workflow_execution=workflow_execution,
|
||||||
)
|
)
|
||||||
session.commit()
|
|
||||||
|
|
||||||
yield workflow_finish_resp
|
yield workflow_finish_resp
|
||||||
self._base_task_pipeline._queue_manager.publish(
|
self._base_task_pipeline._queue_manager.publish(
|
||||||
@ -589,26 +545,25 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
raise ValueError("graph runtime state not initialized.")
|
raise ValueError("graph runtime state not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_failed(
|
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed(
|
||||||
session=session,
|
|
||||||
workflow_run_id=self._workflow_run_id,
|
workflow_run_id=self._workflow_run_id,
|
||||||
start_at=graph_runtime_state.start_at,
|
|
||||||
total_tokens=graph_runtime_state.total_tokens,
|
total_tokens=graph_runtime_state.total_tokens,
|
||||||
total_steps=graph_runtime_state.node_run_steps,
|
total_steps=graph_runtime_state.node_run_steps,
|
||||||
status=WorkflowRunStatus.FAILED,
|
status=WorkflowRunStatus.FAILED,
|
||||||
error=event.error,
|
error_message=event.error,
|
||||||
conversation_id=self._conversation_id,
|
conversation_id=self._conversation_id,
|
||||||
trace_manager=trace_manager,
|
trace_manager=trace_manager,
|
||||||
exceptions_count=event.exceptions_count,
|
exceptions_count=event.exceptions_count,
|
||||||
)
|
)
|
||||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
session=session,
|
||||||
|
task_id=self._application_generate_entity.task_id,
|
||||||
|
workflow_execution=workflow_execution,
|
||||||
)
|
)
|
||||||
err_event = QueueErrorEvent(error=ValueError(f"Run failed: {workflow_run.error}"))
|
err_event = QueueErrorEvent(error=ValueError(f"Run failed: {workflow_execution.error_message}"))
|
||||||
err = self._base_task_pipeline._handle_error(
|
err = self._base_task_pipeline._handle_error(
|
||||||
event=err_event, session=session, message_id=self._message_id
|
event=err_event, session=session, message_id=self._message_id
|
||||||
)
|
)
|
||||||
session.commit()
|
|
||||||
|
|
||||||
yield workflow_finish_resp
|
yield workflow_finish_resp
|
||||||
yield self._base_task_pipeline._error_to_stream_response(err)
|
yield self._base_task_pipeline._error_to_stream_response(err)
|
||||||
@ -616,21 +571,19 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
elif isinstance(event, QueueStopEvent):
|
elif isinstance(event, QueueStopEvent):
|
||||||
if self._workflow_run_id and graph_runtime_state:
|
if self._workflow_run_id and graph_runtime_state:
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_failed(
|
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed(
|
||||||
session=session,
|
|
||||||
workflow_run_id=self._workflow_run_id,
|
workflow_run_id=self._workflow_run_id,
|
||||||
start_at=graph_runtime_state.start_at,
|
|
||||||
total_tokens=graph_runtime_state.total_tokens,
|
total_tokens=graph_runtime_state.total_tokens,
|
||||||
total_steps=graph_runtime_state.node_run_steps,
|
total_steps=graph_runtime_state.node_run_steps,
|
||||||
status=WorkflowRunStatus.STOPPED,
|
status=WorkflowRunStatus.STOPPED,
|
||||||
error=event.get_stop_reason(),
|
error_message=event.get_stop_reason(),
|
||||||
conversation_id=self._conversation_id,
|
conversation_id=self._conversation_id,
|
||||||
trace_manager=trace_manager,
|
trace_manager=trace_manager,
|
||||||
)
|
)
|
||||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||||
session=session,
|
session=session,
|
||||||
task_id=self._application_generate_entity.task_id,
|
task_id=self._application_generate_entity.task_id,
|
||||||
workflow_run=workflow_run,
|
workflow_execution=workflow_execution,
|
||||||
)
|
)
|
||||||
# Save message
|
# Save message
|
||||||
self._save_message(session=session, graph_runtime_state=graph_runtime_state)
|
self._save_message(session=session, graph_runtime_state=graph_runtime_state)
|
||||||
@ -711,7 +664,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
|
|
||||||
yield self._message_end_to_stream_response()
|
yield self._message_end_to_stream_response()
|
||||||
elif isinstance(event, QueueAgentLogEvent):
|
elif isinstance(event, QueueAgentLogEvent):
|
||||||
yield self._workflow_cycle_manager._handle_agent_log(
|
yield self._workflow_response_converter.handle_agent_log(
|
||||||
task_id=self._application_generate_entity.task_id, event=event
|
task_id=self._application_generate_entity.task_id, event=event
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
0
api/core/app/apps/common/__init__.py
Normal file
0
api/core/app/apps/common/__init__.py
Normal file
564
api/core/app/apps/common/workflow_response_converter.py
Normal file
564
api/core/app/apps/common/workflow_response_converter.py
Normal file
@ -0,0 +1,564 @@
|
|||||||
|
import time
|
||||||
|
from collections.abc import Mapping, Sequence
|
||||||
|
from datetime import UTC, datetime
|
||||||
|
from typing import Any, Optional, Union, cast
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity
|
||||||
|
from core.app.entities.queue_entities import (
|
||||||
|
QueueAgentLogEvent,
|
||||||
|
QueueIterationCompletedEvent,
|
||||||
|
QueueIterationNextEvent,
|
||||||
|
QueueIterationStartEvent,
|
||||||
|
QueueLoopCompletedEvent,
|
||||||
|
QueueLoopNextEvent,
|
||||||
|
QueueLoopStartEvent,
|
||||||
|
QueueNodeExceptionEvent,
|
||||||
|
QueueNodeFailedEvent,
|
||||||
|
QueueNodeInIterationFailedEvent,
|
||||||
|
QueueNodeInLoopFailedEvent,
|
||||||
|
QueueNodeRetryEvent,
|
||||||
|
QueueNodeStartedEvent,
|
||||||
|
QueueNodeSucceededEvent,
|
||||||
|
QueueParallelBranchRunFailedEvent,
|
||||||
|
QueueParallelBranchRunStartedEvent,
|
||||||
|
QueueParallelBranchRunSucceededEvent,
|
||||||
|
)
|
||||||
|
from core.app.entities.task_entities import (
|
||||||
|
AgentLogStreamResponse,
|
||||||
|
IterationNodeCompletedStreamResponse,
|
||||||
|
IterationNodeNextStreamResponse,
|
||||||
|
IterationNodeStartStreamResponse,
|
||||||
|
LoopNodeCompletedStreamResponse,
|
||||||
|
LoopNodeNextStreamResponse,
|
||||||
|
LoopNodeStartStreamResponse,
|
||||||
|
NodeFinishStreamResponse,
|
||||||
|
NodeRetryStreamResponse,
|
||||||
|
NodeStartStreamResponse,
|
||||||
|
ParallelBranchFinishedStreamResponse,
|
||||||
|
ParallelBranchStartStreamResponse,
|
||||||
|
WorkflowFinishStreamResponse,
|
||||||
|
WorkflowStartStreamResponse,
|
||||||
|
)
|
||||||
|
from core.file import FILE_MODEL_IDENTITY, File
|
||||||
|
from core.tools.tool_manager import ToolManager
|
||||||
|
from core.workflow.entities.node_execution_entities import NodeExecution
|
||||||
|
from core.workflow.entities.workflow_execution_entities import WorkflowExecution
|
||||||
|
from core.workflow.nodes import NodeType
|
||||||
|
from core.workflow.nodes.tool.entities import ToolNodeData
|
||||||
|
from models import (
|
||||||
|
Account,
|
||||||
|
CreatorUserRole,
|
||||||
|
EndUser,
|
||||||
|
WorkflowNodeExecutionStatus,
|
||||||
|
WorkflowRun,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowResponseConverter:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity],
|
||||||
|
) -> None:
|
||||||
|
self._application_generate_entity = application_generate_entity
|
||||||
|
|
||||||
|
def workflow_start_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
task_id: str,
|
||||||
|
workflow_execution: WorkflowExecution,
|
||||||
|
) -> WorkflowStartStreamResponse:
|
||||||
|
return WorkflowStartStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_execution.id,
|
||||||
|
data=WorkflowStartStreamResponse.Data(
|
||||||
|
id=workflow_execution.id,
|
||||||
|
workflow_id=workflow_execution.workflow_id,
|
||||||
|
sequence_number=workflow_execution.sequence_number,
|
||||||
|
inputs=workflow_execution.inputs,
|
||||||
|
created_at=int(workflow_execution.started_at.timestamp()),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def workflow_finish_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
session: Session,
|
||||||
|
task_id: str,
|
||||||
|
workflow_execution: WorkflowExecution,
|
||||||
|
) -> WorkflowFinishStreamResponse:
|
||||||
|
created_by = None
|
||||||
|
workflow_run = session.scalar(select(WorkflowRun).where(WorkflowRun.id == workflow_execution.id))
|
||||||
|
assert workflow_run is not None
|
||||||
|
if workflow_run.created_by_role == CreatorUserRole.ACCOUNT:
|
||||||
|
stmt = select(Account).where(Account.id == workflow_run.created_by)
|
||||||
|
account = session.scalar(stmt)
|
||||||
|
if account:
|
||||||
|
created_by = {
|
||||||
|
"id": account.id,
|
||||||
|
"name": account.name,
|
||||||
|
"email": account.email,
|
||||||
|
}
|
||||||
|
elif workflow_run.created_by_role == CreatorUserRole.END_USER:
|
||||||
|
stmt = select(EndUser).where(EndUser.id == workflow_run.created_by)
|
||||||
|
end_user = session.scalar(stmt)
|
||||||
|
if end_user:
|
||||||
|
created_by = {
|
||||||
|
"id": end_user.id,
|
||||||
|
"user": end_user.session_id,
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(f"unknown created_by_role: {workflow_run.created_by_role}")
|
||||||
|
|
||||||
|
# Handle the case where finished_at is None by using current time as default
|
||||||
|
finished_at_timestamp = (
|
||||||
|
int(workflow_execution.finished_at.timestamp())
|
||||||
|
if workflow_execution.finished_at
|
||||||
|
else int(datetime.now(UTC).timestamp())
|
||||||
|
)
|
||||||
|
|
||||||
|
return WorkflowFinishStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_execution.id,
|
||||||
|
data=WorkflowFinishStreamResponse.Data(
|
||||||
|
id=workflow_execution.id,
|
||||||
|
workflow_id=workflow_execution.workflow_id,
|
||||||
|
sequence_number=workflow_execution.sequence_number,
|
||||||
|
status=workflow_execution.status,
|
||||||
|
outputs=workflow_execution.outputs,
|
||||||
|
error=workflow_execution.error_message,
|
||||||
|
elapsed_time=workflow_execution.elapsed_time,
|
||||||
|
total_tokens=workflow_execution.total_tokens,
|
||||||
|
total_steps=workflow_execution.total_steps,
|
||||||
|
created_by=created_by,
|
||||||
|
created_at=int(workflow_execution.started_at.timestamp()),
|
||||||
|
finished_at=finished_at_timestamp,
|
||||||
|
files=self.fetch_files_from_node_outputs(workflow_execution.outputs),
|
||||||
|
exceptions_count=workflow_execution.exceptions_count,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def workflow_node_start_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
event: QueueNodeStartedEvent,
|
||||||
|
task_id: str,
|
||||||
|
workflow_node_execution: NodeExecution,
|
||||||
|
) -> Optional[NodeStartStreamResponse]:
|
||||||
|
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||||
|
return None
|
||||||
|
if not workflow_node_execution.workflow_run_id:
|
||||||
|
return None
|
||||||
|
|
||||||
|
response = NodeStartStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_node_execution.workflow_run_id,
|
||||||
|
data=NodeStartStreamResponse.Data(
|
||||||
|
id=workflow_node_execution.id,
|
||||||
|
node_id=workflow_node_execution.node_id,
|
||||||
|
node_type=workflow_node_execution.node_type,
|
||||||
|
title=workflow_node_execution.title,
|
||||||
|
index=workflow_node_execution.index,
|
||||||
|
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||||
|
inputs=workflow_node_execution.inputs,
|
||||||
|
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||||
|
parallel_id=event.parallel_id,
|
||||||
|
parallel_start_node_id=event.parallel_start_node_id,
|
||||||
|
parent_parallel_id=event.parent_parallel_id,
|
||||||
|
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||||
|
iteration_id=event.in_iteration_id,
|
||||||
|
loop_id=event.in_loop_id,
|
||||||
|
parallel_run_id=event.parallel_mode_run_id,
|
||||||
|
agent_strategy=event.agent_strategy,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# extras logic
|
||||||
|
if event.node_type == NodeType.TOOL:
|
||||||
|
node_data = cast(ToolNodeData, event.node_data)
|
||||||
|
response.data.extras["icon"] = ToolManager.get_tool_icon(
|
||||||
|
tenant_id=self._application_generate_entity.app_config.tenant_id,
|
||||||
|
provider_type=node_data.provider_type,
|
||||||
|
provider_id=node_data.provider_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
def workflow_node_finish_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
event: QueueNodeSucceededEvent
|
||||||
|
| QueueNodeFailedEvent
|
||||||
|
| QueueNodeInIterationFailedEvent
|
||||||
|
| QueueNodeInLoopFailedEvent
|
||||||
|
| QueueNodeExceptionEvent,
|
||||||
|
task_id: str,
|
||||||
|
workflow_node_execution: NodeExecution,
|
||||||
|
) -> Optional[NodeFinishStreamResponse]:
|
||||||
|
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||||
|
return None
|
||||||
|
if not workflow_node_execution.workflow_run_id:
|
||||||
|
return None
|
||||||
|
if not workflow_node_execution.finished_at:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return NodeFinishStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_node_execution.workflow_run_id,
|
||||||
|
data=NodeFinishStreamResponse.Data(
|
||||||
|
id=workflow_node_execution.id,
|
||||||
|
node_id=workflow_node_execution.node_id,
|
||||||
|
node_type=workflow_node_execution.node_type,
|
||||||
|
index=workflow_node_execution.index,
|
||||||
|
title=workflow_node_execution.title,
|
||||||
|
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||||
|
inputs=workflow_node_execution.inputs,
|
||||||
|
process_data=workflow_node_execution.process_data,
|
||||||
|
outputs=workflow_node_execution.outputs,
|
||||||
|
status=workflow_node_execution.status,
|
||||||
|
error=workflow_node_execution.error,
|
||||||
|
elapsed_time=workflow_node_execution.elapsed_time,
|
||||||
|
execution_metadata=workflow_node_execution.metadata,
|
||||||
|
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||||
|
finished_at=int(workflow_node_execution.finished_at.timestamp()),
|
||||||
|
files=self.fetch_files_from_node_outputs(workflow_node_execution.outputs or {}),
|
||||||
|
parallel_id=event.parallel_id,
|
||||||
|
parallel_start_node_id=event.parallel_start_node_id,
|
||||||
|
parent_parallel_id=event.parent_parallel_id,
|
||||||
|
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||||
|
iteration_id=event.in_iteration_id,
|
||||||
|
loop_id=event.in_loop_id,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def workflow_node_retry_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
event: QueueNodeRetryEvent,
|
||||||
|
task_id: str,
|
||||||
|
workflow_node_execution: NodeExecution,
|
||||||
|
) -> Optional[Union[NodeRetryStreamResponse, NodeFinishStreamResponse]]:
|
||||||
|
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
||||||
|
return None
|
||||||
|
if not workflow_node_execution.workflow_run_id:
|
||||||
|
return None
|
||||||
|
if not workflow_node_execution.finished_at:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return NodeRetryStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_node_execution.workflow_run_id,
|
||||||
|
data=NodeRetryStreamResponse.Data(
|
||||||
|
id=workflow_node_execution.id,
|
||||||
|
node_id=workflow_node_execution.node_id,
|
||||||
|
node_type=workflow_node_execution.node_type,
|
||||||
|
index=workflow_node_execution.index,
|
||||||
|
title=workflow_node_execution.title,
|
||||||
|
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
||||||
|
inputs=workflow_node_execution.inputs,
|
||||||
|
process_data=workflow_node_execution.process_data,
|
||||||
|
outputs=workflow_node_execution.outputs,
|
||||||
|
status=workflow_node_execution.status,
|
||||||
|
error=workflow_node_execution.error,
|
||||||
|
elapsed_time=workflow_node_execution.elapsed_time,
|
||||||
|
execution_metadata=workflow_node_execution.metadata,
|
||||||
|
created_at=int(workflow_node_execution.created_at.timestamp()),
|
||||||
|
finished_at=int(workflow_node_execution.finished_at.timestamp()),
|
||||||
|
files=self.fetch_files_from_node_outputs(workflow_node_execution.outputs or {}),
|
||||||
|
parallel_id=event.parallel_id,
|
||||||
|
parallel_start_node_id=event.parallel_start_node_id,
|
||||||
|
parent_parallel_id=event.parent_parallel_id,
|
||||||
|
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||||
|
iteration_id=event.in_iteration_id,
|
||||||
|
loop_id=event.in_loop_id,
|
||||||
|
retry_index=event.retry_index,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def workflow_parallel_branch_start_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
task_id: str,
|
||||||
|
workflow_execution_id: str,
|
||||||
|
event: QueueParallelBranchRunStartedEvent,
|
||||||
|
) -> ParallelBranchStartStreamResponse:
|
||||||
|
return ParallelBranchStartStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_execution_id,
|
||||||
|
data=ParallelBranchStartStreamResponse.Data(
|
||||||
|
parallel_id=event.parallel_id,
|
||||||
|
parallel_branch_id=event.parallel_start_node_id,
|
||||||
|
parent_parallel_id=event.parent_parallel_id,
|
||||||
|
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||||
|
iteration_id=event.in_iteration_id,
|
||||||
|
loop_id=event.in_loop_id,
|
||||||
|
created_at=int(time.time()),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def workflow_parallel_branch_finished_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
task_id: str,
|
||||||
|
workflow_execution_id: str,
|
||||||
|
event: QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent,
|
||||||
|
) -> ParallelBranchFinishedStreamResponse:
|
||||||
|
return ParallelBranchFinishedStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_execution_id,
|
||||||
|
data=ParallelBranchFinishedStreamResponse.Data(
|
||||||
|
parallel_id=event.parallel_id,
|
||||||
|
parallel_branch_id=event.parallel_start_node_id,
|
||||||
|
parent_parallel_id=event.parent_parallel_id,
|
||||||
|
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
||||||
|
iteration_id=event.in_iteration_id,
|
||||||
|
loop_id=event.in_loop_id,
|
||||||
|
status="succeeded" if isinstance(event, QueueParallelBranchRunSucceededEvent) else "failed",
|
||||||
|
error=event.error if isinstance(event, QueueParallelBranchRunFailedEvent) else None,
|
||||||
|
created_at=int(time.time()),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def workflow_iteration_start_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
task_id: str,
|
||||||
|
workflow_execution_id: str,
|
||||||
|
event: QueueIterationStartEvent,
|
||||||
|
) -> IterationNodeStartStreamResponse:
|
||||||
|
return IterationNodeStartStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_execution_id,
|
||||||
|
data=IterationNodeStartStreamResponse.Data(
|
||||||
|
id=event.node_id,
|
||||||
|
node_id=event.node_id,
|
||||||
|
node_type=event.node_type.value,
|
||||||
|
title=event.node_data.title,
|
||||||
|
created_at=int(time.time()),
|
||||||
|
extras={},
|
||||||
|
inputs=event.inputs or {},
|
||||||
|
metadata=event.metadata or {},
|
||||||
|
parallel_id=event.parallel_id,
|
||||||
|
parallel_start_node_id=event.parallel_start_node_id,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def workflow_iteration_next_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
task_id: str,
|
||||||
|
workflow_execution_id: str,
|
||||||
|
event: QueueIterationNextEvent,
|
||||||
|
) -> IterationNodeNextStreamResponse:
|
||||||
|
return IterationNodeNextStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_execution_id,
|
||||||
|
data=IterationNodeNextStreamResponse.Data(
|
||||||
|
id=event.node_id,
|
||||||
|
node_id=event.node_id,
|
||||||
|
node_type=event.node_type.value,
|
||||||
|
title=event.node_data.title,
|
||||||
|
index=event.index,
|
||||||
|
pre_iteration_output=event.output,
|
||||||
|
created_at=int(time.time()),
|
||||||
|
extras={},
|
||||||
|
parallel_id=event.parallel_id,
|
||||||
|
parallel_start_node_id=event.parallel_start_node_id,
|
||||||
|
parallel_mode_run_id=event.parallel_mode_run_id,
|
||||||
|
duration=event.duration,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def workflow_iteration_completed_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
task_id: str,
|
||||||
|
workflow_execution_id: str,
|
||||||
|
event: QueueIterationCompletedEvent,
|
||||||
|
) -> IterationNodeCompletedStreamResponse:
|
||||||
|
return IterationNodeCompletedStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_execution_id,
|
||||||
|
data=IterationNodeCompletedStreamResponse.Data(
|
||||||
|
id=event.node_id,
|
||||||
|
node_id=event.node_id,
|
||||||
|
node_type=event.node_type.value,
|
||||||
|
title=event.node_data.title,
|
||||||
|
outputs=event.outputs,
|
||||||
|
created_at=int(time.time()),
|
||||||
|
extras={},
|
||||||
|
inputs=event.inputs or {},
|
||||||
|
status=WorkflowNodeExecutionStatus.SUCCEEDED
|
||||||
|
if event.error is None
|
||||||
|
else WorkflowNodeExecutionStatus.FAILED,
|
||||||
|
error=None,
|
||||||
|
elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(),
|
||||||
|
total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
|
||||||
|
execution_metadata=event.metadata,
|
||||||
|
finished_at=int(time.time()),
|
||||||
|
steps=event.steps,
|
||||||
|
parallel_id=event.parallel_id,
|
||||||
|
parallel_start_node_id=event.parallel_start_node_id,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def workflow_loop_start_to_stream_response(
|
||||||
|
self, *, task_id: str, workflow_execution_id: str, event: QueueLoopStartEvent
|
||||||
|
) -> LoopNodeStartStreamResponse:
|
||||||
|
return LoopNodeStartStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_execution_id,
|
||||||
|
data=LoopNodeStartStreamResponse.Data(
|
||||||
|
id=event.node_id,
|
||||||
|
node_id=event.node_id,
|
||||||
|
node_type=event.node_type.value,
|
||||||
|
title=event.node_data.title,
|
||||||
|
created_at=int(time.time()),
|
||||||
|
extras={},
|
||||||
|
inputs=event.inputs or {},
|
||||||
|
metadata=event.metadata or {},
|
||||||
|
parallel_id=event.parallel_id,
|
||||||
|
parallel_start_node_id=event.parallel_start_node_id,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def workflow_loop_next_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
task_id: str,
|
||||||
|
workflow_execution_id: str,
|
||||||
|
event: QueueLoopNextEvent,
|
||||||
|
) -> LoopNodeNextStreamResponse:
|
||||||
|
return LoopNodeNextStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_execution_id,
|
||||||
|
data=LoopNodeNextStreamResponse.Data(
|
||||||
|
id=event.node_id,
|
||||||
|
node_id=event.node_id,
|
||||||
|
node_type=event.node_type.value,
|
||||||
|
title=event.node_data.title,
|
||||||
|
index=event.index,
|
||||||
|
pre_loop_output=event.output,
|
||||||
|
created_at=int(time.time()),
|
||||||
|
extras={},
|
||||||
|
parallel_id=event.parallel_id,
|
||||||
|
parallel_start_node_id=event.parallel_start_node_id,
|
||||||
|
parallel_mode_run_id=event.parallel_mode_run_id,
|
||||||
|
duration=event.duration,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def workflow_loop_completed_to_stream_response(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
task_id: str,
|
||||||
|
workflow_execution_id: str,
|
||||||
|
event: QueueLoopCompletedEvent,
|
||||||
|
) -> LoopNodeCompletedStreamResponse:
|
||||||
|
return LoopNodeCompletedStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
workflow_run_id=workflow_execution_id,
|
||||||
|
data=LoopNodeCompletedStreamResponse.Data(
|
||||||
|
id=event.node_id,
|
||||||
|
node_id=event.node_id,
|
||||||
|
node_type=event.node_type.value,
|
||||||
|
title=event.node_data.title,
|
||||||
|
outputs=event.outputs,
|
||||||
|
created_at=int(time.time()),
|
||||||
|
extras={},
|
||||||
|
inputs=event.inputs or {},
|
||||||
|
status=WorkflowNodeExecutionStatus.SUCCEEDED
|
||||||
|
if event.error is None
|
||||||
|
else WorkflowNodeExecutionStatus.FAILED,
|
||||||
|
error=None,
|
||||||
|
elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(),
|
||||||
|
total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
|
||||||
|
execution_metadata=event.metadata,
|
||||||
|
finished_at=int(time.time()),
|
||||||
|
steps=event.steps,
|
||||||
|
parallel_id=event.parallel_id,
|
||||||
|
parallel_start_node_id=event.parallel_start_node_id,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
def fetch_files_from_node_outputs(self, outputs_dict: Mapping[str, Any] | None) -> Sequence[Mapping[str, Any]]:
|
||||||
|
"""
|
||||||
|
Fetch files from node outputs
|
||||||
|
:param outputs_dict: node outputs dict
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if not outputs_dict:
|
||||||
|
return []
|
||||||
|
|
||||||
|
files = [self._fetch_files_from_variable_value(output_value) for output_value in outputs_dict.values()]
|
||||||
|
# Remove None
|
||||||
|
files = [file for file in files if file]
|
||||||
|
# Flatten list
|
||||||
|
# Flatten the list of sequences into a single list of mappings
|
||||||
|
flattened_files = [file for sublist in files if sublist for file in sublist]
|
||||||
|
|
||||||
|
# Convert to tuple to match Sequence type
|
||||||
|
return tuple(flattened_files)
|
||||||
|
|
||||||
|
def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> Sequence[Mapping[str, Any]]:
|
||||||
|
"""
|
||||||
|
Fetch files from variable value
|
||||||
|
:param value: variable value
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if not value:
|
||||||
|
return []
|
||||||
|
|
||||||
|
files = []
|
||||||
|
if isinstance(value, list):
|
||||||
|
for item in value:
|
||||||
|
file = self._get_file_var_from_value(item)
|
||||||
|
if file:
|
||||||
|
files.append(file)
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
file = self._get_file_var_from_value(value)
|
||||||
|
if file:
|
||||||
|
files.append(file)
|
||||||
|
|
||||||
|
return files
|
||||||
|
|
||||||
|
def _get_file_var_from_value(self, value: Union[dict, list]) -> Mapping[str, Any] | None:
|
||||||
|
"""
|
||||||
|
Get file var from value
|
||||||
|
:param value: variable value
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
if not value:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY:
|
||||||
|
return value
|
||||||
|
elif isinstance(value, File):
|
||||||
|
return value.to_dict()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def handle_agent_log(self, task_id: str, event: QueueAgentLogEvent) -> AgentLogStreamResponse:
|
||||||
|
"""
|
||||||
|
Handle agent log
|
||||||
|
:param task_id: task id
|
||||||
|
:param event: agent log event
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
return AgentLogStreamResponse(
|
||||||
|
task_id=task_id,
|
||||||
|
data=AgentLogStreamResponse.Data(
|
||||||
|
node_execution_id=event.node_execution_id,
|
||||||
|
id=event.id,
|
||||||
|
parent_id=event.parent_id,
|
||||||
|
label=event.label,
|
||||||
|
error=event.error,
|
||||||
|
status=event.status,
|
||||||
|
data=event.data,
|
||||||
|
metadata=event.metadata,
|
||||||
|
node_id=event.node_id,
|
||||||
|
),
|
||||||
|
)
|
@ -18,16 +18,19 @@ from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
|
|||||||
from core.app.apps.workflow.app_queue_manager import WorkflowAppQueueManager
|
from core.app.apps.workflow.app_queue_manager import WorkflowAppQueueManager
|
||||||
from core.app.apps.workflow.app_runner import WorkflowAppRunner
|
from core.app.apps.workflow.app_runner import WorkflowAppRunner
|
||||||
from core.app.apps.workflow.generate_response_converter import WorkflowAppGenerateResponseConverter
|
from core.app.apps.workflow.generate_response_converter import WorkflowAppGenerateResponseConverter
|
||||||
|
from core.app.apps.workflow.generate_task_pipeline import WorkflowAppGenerateTaskPipeline
|
||||||
from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity
|
from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity
|
||||||
from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse
|
from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse
|
||||||
from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
||||||
from core.ops.ops_trace_manager import TraceQueueManager
|
from core.ops.ops_trace_manager import TraceQueueManager
|
||||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
|
from core.repositories.sqlalchemy_workflow_execution_repository import SQLAlchemyWorkflowExecutionRepository
|
||||||
|
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
from core.workflow.workflow_app_generate_task_pipeline import WorkflowAppGenerateTaskPipeline
|
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from factories import file_factory
|
from factories import file_factory
|
||||||
from models import Account, App, EndUser, Workflow, WorkflowNodeExecutionTriggeredFrom
|
from models import Account, App, EndUser, Workflow, WorkflowNodeExecutionTriggeredFrom
|
||||||
|
from models.enums import WorkflowRunTriggeredFrom
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -136,9 +139,22 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
contexts.plugin_tool_providers.set({})
|
contexts.plugin_tool_providers.set({})
|
||||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||||
|
|
||||||
# Create workflow node execution repository
|
# Create repositories
|
||||||
|
#
|
||||||
|
# Create session factory
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
|
# Create workflow execution(aka workflow run) repository
|
||||||
|
if invoke_from == InvokeFrom.DEBUGGER:
|
||||||
|
workflow_triggered_from = WorkflowRunTriggeredFrom.DEBUGGING
|
||||||
|
else:
|
||||||
|
workflow_triggered_from = WorkflowRunTriggeredFrom.APP_RUN
|
||||||
|
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||||
|
session_factory=session_factory,
|
||||||
|
user=user,
|
||||||
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
|
triggered_from=workflow_triggered_from,
|
||||||
|
)
|
||||||
|
# Create workflow node execution repository
|
||||||
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
session_factory=session_factory,
|
session_factory=session_factory,
|
||||||
user=user,
|
user=user,
|
||||||
@ -152,6 +168,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
user=user,
|
user=user,
|
||||||
application_generate_entity=application_generate_entity,
|
application_generate_entity=application_generate_entity,
|
||||||
invoke_from=invoke_from,
|
invoke_from=invoke_from,
|
||||||
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
streaming=streaming,
|
streaming=streaming,
|
||||||
workflow_thread_pool_id=workflow_thread_pool_id,
|
workflow_thread_pool_id=workflow_thread_pool_id,
|
||||||
@ -165,6 +182,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
user: Union[Account, EndUser],
|
user: Union[Account, EndUser],
|
||||||
application_generate_entity: WorkflowAppGenerateEntity,
|
application_generate_entity: WorkflowAppGenerateEntity,
|
||||||
invoke_from: InvokeFrom,
|
invoke_from: InvokeFrom,
|
||||||
|
workflow_execution_repository: WorkflowExecutionRepository,
|
||||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||||
streaming: bool = True,
|
streaming: bool = True,
|
||||||
workflow_thread_pool_id: Optional[str] = None,
|
workflow_thread_pool_id: Optional[str] = None,
|
||||||
@ -209,6 +227,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
workflow=workflow,
|
workflow=workflow,
|
||||||
queue_manager=queue_manager,
|
queue_manager=queue_manager,
|
||||||
user=user,
|
user=user,
|
||||||
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
stream=streaming,
|
stream=streaming,
|
||||||
)
|
)
|
||||||
@ -262,6 +281,17 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
contexts.plugin_tool_providers.set({})
|
contexts.plugin_tool_providers.set({})
|
||||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||||
|
|
||||||
|
# Create repositories
|
||||||
|
#
|
||||||
|
# Create session factory
|
||||||
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
|
# Create workflow execution(aka workflow run) repository
|
||||||
|
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||||
|
session_factory=session_factory,
|
||||||
|
user=user,
|
||||||
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
|
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||||
|
)
|
||||||
# Create workflow node execution repository
|
# Create workflow node execution repository
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
|
|
||||||
@ -278,6 +308,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
user=user,
|
user=user,
|
||||||
invoke_from=InvokeFrom.DEBUGGER,
|
invoke_from=InvokeFrom.DEBUGGER,
|
||||||
application_generate_entity=application_generate_entity,
|
application_generate_entity=application_generate_entity,
|
||||||
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
streaming=streaming,
|
streaming=streaming,
|
||||||
)
|
)
|
||||||
@ -327,6 +358,17 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
contexts.plugin_tool_providers.set({})
|
contexts.plugin_tool_providers.set({})
|
||||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||||
|
|
||||||
|
# Create repositories
|
||||||
|
#
|
||||||
|
# Create session factory
|
||||||
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
|
# Create workflow execution(aka workflow run) repository
|
||||||
|
workflow_execution_repository = SQLAlchemyWorkflowExecutionRepository(
|
||||||
|
session_factory=session_factory,
|
||||||
|
user=user,
|
||||||
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
|
triggered_from=WorkflowRunTriggeredFrom.DEBUGGING,
|
||||||
|
)
|
||||||
# Create workflow node execution repository
|
# Create workflow node execution repository
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
|
|
||||||
@ -343,6 +385,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
user=user,
|
user=user,
|
||||||
invoke_from=InvokeFrom.DEBUGGER,
|
invoke_from=InvokeFrom.DEBUGGER,
|
||||||
application_generate_entity=application_generate_entity,
|
application_generate_entity=application_generate_entity,
|
||||||
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
streaming=streaming,
|
streaming=streaming,
|
||||||
)
|
)
|
||||||
@ -400,6 +443,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
workflow: Workflow,
|
workflow: Workflow,
|
||||||
queue_manager: AppQueueManager,
|
queue_manager: AppQueueManager,
|
||||||
user: Union[Account, EndUser],
|
user: Union[Account, EndUser],
|
||||||
|
workflow_execution_repository: WorkflowExecutionRepository,
|
||||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||||
stream: bool = False,
|
stream: bool = False,
|
||||||
) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
|
) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
|
||||||
@ -419,8 +463,9 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
workflow=workflow,
|
workflow=workflow,
|
||||||
queue_manager=queue_manager,
|
queue_manager=queue_manager,
|
||||||
user=user,
|
user=user,
|
||||||
stream=stream,
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
|
stream=stream,
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -3,10 +3,12 @@ import time
|
|||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
||||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||||
|
from core.app.apps.common.workflow_response_converter import WorkflowResponseConverter
|
||||||
from core.app.entities.app_invoke_entities import (
|
from core.app.entities.app_invoke_entities import (
|
||||||
InvokeFrom,
|
InvokeFrom,
|
||||||
WorkflowAppGenerateEntity,
|
WorkflowAppGenerateEntity,
|
||||||
@ -53,7 +55,9 @@ from core.app.entities.task_entities import (
|
|||||||
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
||||||
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||||
from core.ops.ops_trace_manager import TraceQueueManager
|
from core.ops.ops_trace_manager import TraceQueueManager
|
||||||
|
from core.workflow.entities.workflow_execution_entities import WorkflowExecution
|
||||||
from core.workflow.enums import SystemVariableKey
|
from core.workflow.enums import SystemVariableKey
|
||||||
|
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
@ -83,6 +87,7 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
queue_manager: AppQueueManager,
|
queue_manager: AppQueueManager,
|
||||||
user: Union[Account, EndUser],
|
user: Union[Account, EndUser],
|
||||||
stream: bool,
|
stream: bool,
|
||||||
|
workflow_execution_repository: WorkflowExecutionRepository,
|
||||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||||
) -> None:
|
) -> None:
|
||||||
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
self._base_task_pipeline = BasedGenerateTaskPipeline(
|
||||||
@ -111,9 +116,14 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
SystemVariableKey.WORKFLOW_ID: workflow.id,
|
SystemVariableKey.WORKFLOW_ID: workflow.id,
|
||||||
SystemVariableKey.WORKFLOW_RUN_ID: application_generate_entity.workflow_run_id,
|
SystemVariableKey.WORKFLOW_RUN_ID: application_generate_entity.workflow_run_id,
|
||||||
},
|
},
|
||||||
|
workflow_execution_repository=workflow_execution_repository,
|
||||||
workflow_node_execution_repository=workflow_node_execution_repository,
|
workflow_node_execution_repository=workflow_node_execution_repository,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._workflow_response_converter = WorkflowResponseConverter(
|
||||||
|
application_generate_entity=application_generate_entity,
|
||||||
|
)
|
||||||
|
|
||||||
self._application_generate_entity = application_generate_entity
|
self._application_generate_entity = application_generate_entity
|
||||||
self._workflow_id = workflow.id
|
self._workflow_id = workflow.id
|
||||||
self._workflow_features_dict = workflow.features_dict
|
self._workflow_features_dict = workflow.features_dict
|
||||||
@ -258,17 +268,15 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
# init workflow run
|
# init workflow run
|
||||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_start(
|
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_start(
|
||||||
session=session,
|
session=session,
|
||||||
workflow_id=self._workflow_id,
|
workflow_id=self._workflow_id,
|
||||||
user_id=self._user_id,
|
|
||||||
created_by_role=self._created_by_role,
|
|
||||||
)
|
)
|
||||||
self._workflow_run_id = workflow_run.id
|
self._workflow_run_id = workflow_execution.id
|
||||||
start_resp = self._workflow_cycle_manager._workflow_start_to_stream_response(
|
start_resp = self._workflow_response_converter.workflow_start_to_stream_response(
|
||||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
task_id=self._application_generate_entity.task_id,
|
||||||
|
workflow_execution=workflow_execution,
|
||||||
)
|
)
|
||||||
session.commit()
|
|
||||||
|
|
||||||
yield start_resp
|
yield start_resp
|
||||||
elif isinstance(
|
elif isinstance(
|
||||||
@ -278,13 +286,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_retried(
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
|
event=event,
|
||||||
)
|
)
|
||||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_retried(
|
response = self._workflow_response_converter.workflow_node_retry_to_stream_response(
|
||||||
workflow_run=workflow_run, event=event
|
|
||||||
)
|
|
||||||
response = self._workflow_cycle_manager._workflow_node_retry_to_stream_response(
|
|
||||||
event=event,
|
event=event,
|
||||||
task_id=self._application_generate_entity.task_id,
|
task_id=self._application_generate_entity.task_id,
|
||||||
workflow_node_execution=workflow_node_execution,
|
workflow_node_execution=workflow_node_execution,
|
||||||
@ -297,27 +303,22 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
workflow_node_execution = self._workflow_cycle_manager.handle_node_execution_start(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
workflow_execution_id=self._workflow_run_id, event=event
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
)
|
||||||
)
|
node_start_response = self._workflow_response_converter.workflow_node_start_to_stream_response(
|
||||||
workflow_node_execution = self._workflow_cycle_manager._handle_node_execution_start(
|
event=event,
|
||||||
workflow_run=workflow_run, event=event
|
task_id=self._application_generate_entity.task_id,
|
||||||
)
|
workflow_node_execution=workflow_node_execution,
|
||||||
node_start_response = self._workflow_cycle_manager._workflow_node_start_to_stream_response(
|
)
|
||||||
event=event,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_node_execution=workflow_node_execution,
|
|
||||||
)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
if node_start_response:
|
if node_start_response:
|
||||||
yield node_start_response
|
yield node_start_response
|
||||||
elif isinstance(event, QueueNodeSucceededEvent):
|
elif isinstance(event, QueueNodeSucceededEvent):
|
||||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_success(
|
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_success(
|
||||||
event=event
|
event=event
|
||||||
)
|
)
|
||||||
node_success_response = self._workflow_cycle_manager._workflow_node_finish_to_stream_response(
|
node_success_response = self._workflow_response_converter.workflow_node_finish_to_stream_response(
|
||||||
event=event,
|
event=event,
|
||||||
task_id=self._application_generate_entity.task_id,
|
task_id=self._application_generate_entity.task_id,
|
||||||
workflow_node_execution=workflow_node_execution,
|
workflow_node_execution=workflow_node_execution,
|
||||||
@ -332,10 +333,10 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
| QueueNodeInLoopFailedEvent
|
| QueueNodeInLoopFailedEvent
|
||||||
| QueueNodeExceptionEvent,
|
| QueueNodeExceptionEvent,
|
||||||
):
|
):
|
||||||
workflow_node_execution = self._workflow_cycle_manager._handle_workflow_node_execution_failed(
|
workflow_node_execution = self._workflow_cycle_manager.handle_workflow_node_execution_failed(
|
||||||
event=event,
|
event=event,
|
||||||
)
|
)
|
||||||
node_failed_response = self._workflow_cycle_manager._workflow_node_finish_to_stream_response(
|
node_failed_response = self._workflow_response_converter.workflow_node_finish_to_stream_response(
|
||||||
event=event,
|
event=event,
|
||||||
task_id=self._application_generate_entity.task_id,
|
task_id=self._application_generate_entity.task_id,
|
||||||
workflow_node_execution=workflow_node_execution,
|
workflow_node_execution=workflow_node_execution,
|
||||||
@ -348,18 +349,13 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
parallel_start_resp = (
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
self._workflow_response_converter.workflow_parallel_branch_start_to_stream_response(
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
task_id=self._application_generate_entity.task_id,
|
||||||
)
|
workflow_execution_id=self._workflow_run_id,
|
||||||
parallel_start_resp = (
|
event=event,
|
||||||
self._workflow_cycle_manager._workflow_parallel_branch_start_to_stream_response(
|
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
yield parallel_start_resp
|
yield parallel_start_resp
|
||||||
|
|
||||||
@ -367,18 +363,13 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
parallel_finish_resp = (
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
self._workflow_response_converter.workflow_parallel_branch_finished_to_stream_response(
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
task_id=self._application_generate_entity.task_id,
|
||||||
)
|
workflow_execution_id=self._workflow_run_id,
|
||||||
parallel_finish_resp = (
|
event=event,
|
||||||
self._workflow_cycle_manager._workflow_parallel_branch_finished_to_stream_response(
|
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
yield parallel_finish_resp
|
yield parallel_finish_resp
|
||||||
|
|
||||||
@ -386,16 +377,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
iter_start_resp = self._workflow_response_converter.workflow_iteration_start_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
iter_start_resp = self._workflow_cycle_manager._workflow_iteration_start_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield iter_start_resp
|
yield iter_start_resp
|
||||||
|
|
||||||
@ -403,16 +389,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
iter_next_resp = self._workflow_response_converter.workflow_iteration_next_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
iter_next_resp = self._workflow_cycle_manager._workflow_iteration_next_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield iter_next_resp
|
yield iter_next_resp
|
||||||
|
|
||||||
@ -420,16 +401,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
iter_finish_resp = self._workflow_response_converter.workflow_iteration_completed_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
iter_finish_resp = self._workflow_cycle_manager._workflow_iteration_completed_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield iter_finish_resp
|
yield iter_finish_resp
|
||||||
|
|
||||||
@ -437,16 +413,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
loop_start_resp = self._workflow_response_converter.workflow_loop_start_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
loop_start_resp = self._workflow_cycle_manager._workflow_loop_start_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield loop_start_resp
|
yield loop_start_resp
|
||||||
|
|
||||||
@ -454,16 +425,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
loop_next_resp = self._workflow_response_converter.workflow_loop_next_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
loop_next_resp = self._workflow_cycle_manager._workflow_loop_next_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield loop_next_resp
|
yield loop_next_resp
|
||||||
|
|
||||||
@ -471,16 +437,11 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
if not self._workflow_run_id:
|
if not self._workflow_run_id:
|
||||||
raise ValueError("workflow run not initialized.")
|
raise ValueError("workflow run not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
loop_finish_resp = self._workflow_response_converter.workflow_loop_completed_to_stream_response(
|
||||||
workflow_run = self._workflow_cycle_manager._get_workflow_run(
|
task_id=self._application_generate_entity.task_id,
|
||||||
session=session, workflow_run_id=self._workflow_run_id
|
workflow_execution_id=self._workflow_run_id,
|
||||||
)
|
event=event,
|
||||||
loop_finish_resp = self._workflow_cycle_manager._workflow_loop_completed_to_stream_response(
|
)
|
||||||
session=session,
|
|
||||||
task_id=self._application_generate_entity.task_id,
|
|
||||||
workflow_run=workflow_run,
|
|
||||||
event=event,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield loop_finish_resp
|
yield loop_finish_resp
|
||||||
|
|
||||||
@ -491,10 +452,8 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
raise ValueError("graph runtime state not initialized.")
|
raise ValueError("graph runtime state not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_success(
|
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_success(
|
||||||
session=session,
|
|
||||||
workflow_run_id=self._workflow_run_id,
|
workflow_run_id=self._workflow_run_id,
|
||||||
start_at=graph_runtime_state.start_at,
|
|
||||||
total_tokens=graph_runtime_state.total_tokens,
|
total_tokens=graph_runtime_state.total_tokens,
|
||||||
total_steps=graph_runtime_state.node_run_steps,
|
total_steps=graph_runtime_state.node_run_steps,
|
||||||
outputs=event.outputs,
|
outputs=event.outputs,
|
||||||
@ -503,12 +462,12 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# save workflow app log
|
# save workflow app log
|
||||||
self._save_workflow_app_log(session=session, workflow_run=workflow_run)
|
self._save_workflow_app_log(session=session, workflow_execution=workflow_execution)
|
||||||
|
|
||||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||||
session=session,
|
session=session,
|
||||||
task_id=self._application_generate_entity.task_id,
|
task_id=self._application_generate_entity.task_id,
|
||||||
workflow_run=workflow_run,
|
workflow_execution=workflow_execution,
|
||||||
)
|
)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
@ -520,10 +479,8 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
raise ValueError("graph runtime state not initialized.")
|
raise ValueError("graph runtime state not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_partial_success(
|
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_partial_success(
|
||||||
session=session,
|
|
||||||
workflow_run_id=self._workflow_run_id,
|
workflow_run_id=self._workflow_run_id,
|
||||||
start_at=graph_runtime_state.start_at,
|
|
||||||
total_tokens=graph_runtime_state.total_tokens,
|
total_tokens=graph_runtime_state.total_tokens,
|
||||||
total_steps=graph_runtime_state.node_run_steps,
|
total_steps=graph_runtime_state.node_run_steps,
|
||||||
outputs=event.outputs,
|
outputs=event.outputs,
|
||||||
@ -533,10 +490,12 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# save workflow app log
|
# save workflow app log
|
||||||
self._save_workflow_app_log(session=session, workflow_run=workflow_run)
|
self._save_workflow_app_log(session=session, workflow_execution=workflow_execution)
|
||||||
|
|
||||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
session=session,
|
||||||
|
task_id=self._application_generate_entity.task_id,
|
||||||
|
workflow_execution=workflow_execution,
|
||||||
)
|
)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
@ -548,26 +507,28 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
raise ValueError("graph runtime state not initialized.")
|
raise ValueError("graph runtime state not initialized.")
|
||||||
|
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
workflow_run = self._workflow_cycle_manager._handle_workflow_run_failed(
|
workflow_execution = self._workflow_cycle_manager.handle_workflow_run_failed(
|
||||||
session=session,
|
|
||||||
workflow_run_id=self._workflow_run_id,
|
workflow_run_id=self._workflow_run_id,
|
||||||
start_at=graph_runtime_state.start_at,
|
|
||||||
total_tokens=graph_runtime_state.total_tokens,
|
total_tokens=graph_runtime_state.total_tokens,
|
||||||
total_steps=graph_runtime_state.node_run_steps,
|
total_steps=graph_runtime_state.node_run_steps,
|
||||||
status=WorkflowRunStatus.FAILED
|
status=WorkflowRunStatus.FAILED
|
||||||
if isinstance(event, QueueWorkflowFailedEvent)
|
if isinstance(event, QueueWorkflowFailedEvent)
|
||||||
else WorkflowRunStatus.STOPPED,
|
else WorkflowRunStatus.STOPPED,
|
||||||
error=event.error if isinstance(event, QueueWorkflowFailedEvent) else event.get_stop_reason(),
|
error_message=event.error
|
||||||
|
if isinstance(event, QueueWorkflowFailedEvent)
|
||||||
|
else event.get_stop_reason(),
|
||||||
conversation_id=None,
|
conversation_id=None,
|
||||||
trace_manager=trace_manager,
|
trace_manager=trace_manager,
|
||||||
exceptions_count=event.exceptions_count if isinstance(event, QueueWorkflowFailedEvent) else 0,
|
exceptions_count=event.exceptions_count if isinstance(event, QueueWorkflowFailedEvent) else 0,
|
||||||
)
|
)
|
||||||
|
|
||||||
# save workflow app log
|
# save workflow app log
|
||||||
self._save_workflow_app_log(session=session, workflow_run=workflow_run)
|
self._save_workflow_app_log(session=session, workflow_execution=workflow_execution)
|
||||||
|
|
||||||
workflow_finish_resp = self._workflow_cycle_manager._workflow_finish_to_stream_response(
|
workflow_finish_resp = self._workflow_response_converter.workflow_finish_to_stream_response(
|
||||||
session=session, task_id=self._application_generate_entity.task_id, workflow_run=workflow_run
|
session=session,
|
||||||
|
task_id=self._application_generate_entity.task_id,
|
||||||
|
workflow_execution=workflow_execution,
|
||||||
)
|
)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
@ -586,7 +547,7 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
delta_text, from_variable_selector=event.from_variable_selector
|
delta_text, from_variable_selector=event.from_variable_selector
|
||||||
)
|
)
|
||||||
elif isinstance(event, QueueAgentLogEvent):
|
elif isinstance(event, QueueAgentLogEvent):
|
||||||
yield self._workflow_cycle_manager._handle_agent_log(
|
yield self._workflow_response_converter.handle_agent_log(
|
||||||
task_id=self._application_generate_entity.task_id, event=event
|
task_id=self._application_generate_entity.task_id, event=event
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@ -595,11 +556,9 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
if tts_publisher:
|
if tts_publisher:
|
||||||
tts_publisher.publish(None)
|
tts_publisher.publish(None)
|
||||||
|
|
||||||
def _save_workflow_app_log(self, *, session: Session, workflow_run: WorkflowRun) -> None:
|
def _save_workflow_app_log(self, *, session: Session, workflow_execution: WorkflowExecution) -> None:
|
||||||
"""
|
workflow_run = session.scalar(select(WorkflowRun).where(WorkflowRun.id == workflow_execution.id))
|
||||||
Save workflow app log.
|
assert workflow_run is not None
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
invoke_from = self._application_generate_entity.invoke_from
|
invoke_from = self._application_generate_entity.invoke_from
|
||||||
if invoke_from == InvokeFrom.SERVICE_API:
|
if invoke_from == InvokeFrom.SERVICE_API:
|
||||||
created_from = WorkflowAppLogCreatedFrom.SERVICE_API
|
created_from = WorkflowAppLogCreatedFrom.SERVICE_API
|
@ -190,7 +190,7 @@ class WorkflowStartStreamResponse(StreamResponse):
|
|||||||
id: str
|
id: str
|
||||||
workflow_id: str
|
workflow_id: str
|
||||||
sequence_number: int
|
sequence_number: int
|
||||||
inputs: dict
|
inputs: Mapping[str, Any]
|
||||||
created_at: int
|
created_at: int
|
||||||
|
|
||||||
event: StreamEvent = StreamEvent.WORKFLOW_STARTED
|
event: StreamEvent = StreamEvent.WORKFLOW_STARTED
|
||||||
@ -212,7 +212,7 @@ class WorkflowFinishStreamResponse(StreamResponse):
|
|||||||
workflow_id: str
|
workflow_id: str
|
||||||
sequence_number: int
|
sequence_number: int
|
||||||
status: str
|
status: str
|
||||||
outputs: Optional[dict] = None
|
outputs: Optional[Mapping[str, Any]] = None
|
||||||
error: Optional[str] = None
|
error: Optional[str] = None
|
||||||
elapsed_time: float
|
elapsed_time: float
|
||||||
total_tokens: int
|
total_tokens: int
|
||||||
@ -788,7 +788,7 @@ class WorkflowAppBlockingResponse(AppBlockingResponse):
|
|||||||
id: str
|
id: str
|
||||||
workflow_id: str
|
workflow_id: str
|
||||||
status: str
|
status: str
|
||||||
outputs: Optional[dict] = None
|
outputs: Optional[Mapping[str, Any]] = None
|
||||||
error: Optional[str] = None
|
error: Optional[str] = None
|
||||||
elapsed_time: float
|
elapsed_time: float
|
||||||
total_tokens: int
|
total_tokens: int
|
||||||
|
@ -30,6 +30,7 @@ from core.ops.entities.trace_entity import (
|
|||||||
WorkflowTraceInfo,
|
WorkflowTraceInfo,
|
||||||
)
|
)
|
||||||
from core.ops.utils import get_message_data
|
from core.ops.utils import get_message_data
|
||||||
|
from core.workflow.entities.workflow_execution_entities import WorkflowExecution
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from extensions.ext_storage import storage
|
from extensions.ext_storage import storage
|
||||||
from models.model import App, AppModelConfig, Conversation, Message, MessageFile, TraceAppConfig
|
from models.model import App, AppModelConfig, Conversation, Message, MessageFile, TraceAppConfig
|
||||||
@ -234,7 +235,11 @@ class OpsTraceManager:
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
tracing_provider = app_ops_trace_config.get("tracing_provider")
|
tracing_provider = app_ops_trace_config.get("tracing_provider")
|
||||||
if tracing_provider is None or tracing_provider not in provider_config_map:
|
if tracing_provider is None:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
provider_config_map[tracing_provider]
|
||||||
|
except KeyError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# decrypt_token
|
# decrypt_token
|
||||||
@ -287,10 +292,11 @@ class OpsTraceManager:
|
|||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
# auth check
|
# auth check
|
||||||
try:
|
if tracing_provider is not None:
|
||||||
provider_config_map[tracing_provider]
|
try:
|
||||||
except KeyError:
|
provider_config_map[tracing_provider]
|
||||||
raise ValueError(f"Invalid tracing provider: {tracing_provider}")
|
except KeyError:
|
||||||
|
raise ValueError(f"Invalid tracing provider: {tracing_provider}")
|
||||||
|
|
||||||
app_config: Optional[App] = db.session.query(App).filter(App.id == app_id).first()
|
app_config: Optional[App] = db.session.query(App).filter(App.id == app_id).first()
|
||||||
if not app_config:
|
if not app_config:
|
||||||
@ -369,7 +375,7 @@ class TraceTask:
|
|||||||
self,
|
self,
|
||||||
trace_type: Any,
|
trace_type: Any,
|
||||||
message_id: Optional[str] = None,
|
message_id: Optional[str] = None,
|
||||||
workflow_run: Optional[WorkflowRun] = None,
|
workflow_execution: Optional[WorkflowExecution] = None,
|
||||||
conversation_id: Optional[str] = None,
|
conversation_id: Optional[str] = None,
|
||||||
user_id: Optional[str] = None,
|
user_id: Optional[str] = None,
|
||||||
timer: Optional[Any] = None,
|
timer: Optional[Any] = None,
|
||||||
@ -377,7 +383,7 @@ class TraceTask:
|
|||||||
):
|
):
|
||||||
self.trace_type = trace_type
|
self.trace_type = trace_type
|
||||||
self.message_id = message_id
|
self.message_id = message_id
|
||||||
self.workflow_run_id = workflow_run.id if workflow_run else None
|
self.workflow_run_id = workflow_execution.id if workflow_execution else None
|
||||||
self.conversation_id = conversation_id
|
self.conversation_id = conversation_id
|
||||||
self.user_id = user_id
|
self.user_id = user_id
|
||||||
self.timer = timer
|
self.timer = timer
|
||||||
|
@ -405,7 +405,29 @@ class RetrievalService:
|
|||||||
record["child_chunks"] = segment_child_map[record["segment"].id].get("child_chunks") # type: ignore
|
record["child_chunks"] = segment_child_map[record["segment"].id].get("child_chunks") # type: ignore
|
||||||
record["score"] = segment_child_map[record["segment"].id]["max_score"]
|
record["score"] = segment_child_map[record["segment"].id]["max_score"]
|
||||||
|
|
||||||
return [RetrievalSegments(**record) for record in records]
|
result = []
|
||||||
|
for record in records:
|
||||||
|
# Extract segment
|
||||||
|
segment = record["segment"]
|
||||||
|
|
||||||
|
# Extract child_chunks, ensuring it's a list or None
|
||||||
|
child_chunks = record.get("child_chunks")
|
||||||
|
if not isinstance(child_chunks, list):
|
||||||
|
child_chunks = None
|
||||||
|
|
||||||
|
# Extract score, ensuring it's a float or None
|
||||||
|
score_value = record.get("score")
|
||||||
|
score = (
|
||||||
|
float(score_value)
|
||||||
|
if score_value is not None and isinstance(score_value, int | float | str)
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create RetrievalSegments object
|
||||||
|
retrieval_segment = RetrievalSegments(segment=segment, child_chunks=child_chunks, score=score)
|
||||||
|
result.append(retrieval_segment)
|
||||||
|
|
||||||
|
return result
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
db.session.rollback()
|
db.session.rollback()
|
||||||
raise e
|
raise e
|
||||||
|
@ -23,7 +23,8 @@ logger = logging.getLogger(__name__)
|
|||||||
class OpenSearchConfig(BaseModel):
|
class OpenSearchConfig(BaseModel):
|
||||||
host: str
|
host: str
|
||||||
port: int
|
port: int
|
||||||
secure: bool = False
|
secure: bool = False # use_ssl
|
||||||
|
verify_certs: bool = True
|
||||||
auth_method: Literal["basic", "aws_managed_iam"] = "basic"
|
auth_method: Literal["basic", "aws_managed_iam"] = "basic"
|
||||||
user: Optional[str] = None
|
user: Optional[str] = None
|
||||||
password: Optional[str] = None
|
password: Optional[str] = None
|
||||||
@ -42,6 +43,8 @@ class OpenSearchConfig(BaseModel):
|
|||||||
raise ValueError("config OPENSEARCH_AWS_REGION is required for AWS_MANAGED_IAM auth method")
|
raise ValueError("config OPENSEARCH_AWS_REGION is required for AWS_MANAGED_IAM auth method")
|
||||||
if not values.get("aws_service"):
|
if not values.get("aws_service"):
|
||||||
raise ValueError("config OPENSEARCH_AWS_SERVICE is required for AWS_MANAGED_IAM auth method")
|
raise ValueError("config OPENSEARCH_AWS_SERVICE is required for AWS_MANAGED_IAM auth method")
|
||||||
|
if not values.get("OPENSEARCH_SECURE") and values.get("OPENSEARCH_VERIFY_CERTS"):
|
||||||
|
raise ValueError("verify_certs=True requires secure (HTTPS) connection")
|
||||||
return values
|
return values
|
||||||
|
|
||||||
def create_aws_managed_iam_auth(self) -> Urllib3AWSV4SignerAuth:
|
def create_aws_managed_iam_auth(self) -> Urllib3AWSV4SignerAuth:
|
||||||
@ -57,7 +60,7 @@ class OpenSearchConfig(BaseModel):
|
|||||||
params = {
|
params = {
|
||||||
"hosts": [{"host": self.host, "port": self.port}],
|
"hosts": [{"host": self.host, "port": self.port}],
|
||||||
"use_ssl": self.secure,
|
"use_ssl": self.secure,
|
||||||
"verify_certs": self.secure,
|
"verify_certs": self.verify_certs,
|
||||||
"connection_class": Urllib3HttpConnection,
|
"connection_class": Urllib3HttpConnection,
|
||||||
"pool_maxsize": 20,
|
"pool_maxsize": 20,
|
||||||
}
|
}
|
||||||
@ -279,6 +282,7 @@ class OpenSearchVectorFactory(AbstractVectorFactory):
|
|||||||
host=dify_config.OPENSEARCH_HOST or "localhost",
|
host=dify_config.OPENSEARCH_HOST or "localhost",
|
||||||
port=dify_config.OPENSEARCH_PORT,
|
port=dify_config.OPENSEARCH_PORT,
|
||||||
secure=dify_config.OPENSEARCH_SECURE,
|
secure=dify_config.OPENSEARCH_SECURE,
|
||||||
|
verify_certs=dify_config.OPENSEARCH_VERIFY_CERTS,
|
||||||
auth_method=dify_config.OPENSEARCH_AUTH_METHOD.value,
|
auth_method=dify_config.OPENSEARCH_AUTH_METHOD.value,
|
||||||
user=dify_config.OPENSEARCH_USER,
|
user=dify_config.OPENSEARCH_USER,
|
||||||
password=dify_config.OPENSEARCH_PASSWORD,
|
password=dify_config.OPENSEARCH_PASSWORD,
|
||||||
|
@ -271,12 +271,15 @@ class TencentVector(BaseVector):
|
|||||||
|
|
||||||
for result in res[0]:
|
for result in res[0]:
|
||||||
meta = result.get(self.field_metadata)
|
meta = result.get(self.field_metadata)
|
||||||
|
if isinstance(meta, str):
|
||||||
|
# Compatible with version 1.1.3 and below.
|
||||||
|
meta = json.loads(meta)
|
||||||
|
score = 1 - result.get("score", 0.0)
|
||||||
score = result.get("score", 0.0)
|
score = result.get("score", 0.0)
|
||||||
if score > score_threshold:
|
if score > score_threshold:
|
||||||
meta["score"] = score
|
meta["score"] = score
|
||||||
doc = Document(page_content=result.get(self.field_text), metadata=meta)
|
doc = Document(page_content=result.get(self.field_text), metadata=meta)
|
||||||
docs.append(doc)
|
docs.append(doc)
|
||||||
|
|
||||||
return docs
|
return docs
|
||||||
|
|
||||||
def delete(self) -> None:
|
def delete(self) -> None:
|
||||||
|
@ -190,7 +190,7 @@ class DatasetRetrieval:
|
|||||||
retrieve_config.rerank_mode or "reranking_model",
|
retrieve_config.rerank_mode or "reranking_model",
|
||||||
retrieve_config.reranking_model,
|
retrieve_config.reranking_model,
|
||||||
retrieve_config.weights,
|
retrieve_config.weights,
|
||||||
retrieve_config.reranking_enabled or True,
|
True if retrieve_config.reranking_enabled is None else retrieve_config.reranking_enabled,
|
||||||
message_id,
|
message_id,
|
||||||
metadata_filter_document_ids,
|
metadata_filter_document_ids,
|
||||||
metadata_condition,
|
metadata_condition,
|
||||||
|
@ -0,0 +1,242 @@
|
|||||||
|
"""
|
||||||
|
SQLAlchemy implementation of the WorkflowExecutionRepository.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.engine import Engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
from core.workflow.entities.workflow_execution_entities import (
|
||||||
|
WorkflowExecution,
|
||||||
|
WorkflowExecutionStatus,
|
||||||
|
WorkflowType,
|
||||||
|
)
|
||||||
|
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||||
|
from models import (
|
||||||
|
Account,
|
||||||
|
CreatorUserRole,
|
||||||
|
EndUser,
|
||||||
|
WorkflowRun,
|
||||||
|
)
|
||||||
|
from models.enums import WorkflowRunTriggeredFrom
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SQLAlchemyWorkflowExecutionRepository(WorkflowExecutionRepository):
|
||||||
|
"""
|
||||||
|
SQLAlchemy implementation of the WorkflowExecutionRepository interface.
|
||||||
|
|
||||||
|
This implementation supports multi-tenancy by filtering operations based on tenant_id.
|
||||||
|
Each method creates its own session, handles the transaction, and commits changes
|
||||||
|
to the database. This prevents long-running connections in the workflow core.
|
||||||
|
|
||||||
|
This implementation also includes an in-memory cache for workflow executions to improve
|
||||||
|
performance by reducing database queries.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
session_factory: sessionmaker | Engine,
|
||||||
|
user: Union[Account, EndUser],
|
||||||
|
app_id: Optional[str],
|
||||||
|
triggered_from: Optional[WorkflowRunTriggeredFrom],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize the repository with a SQLAlchemy sessionmaker or engine and context information.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session_factory: SQLAlchemy sessionmaker or engine for creating sessions
|
||||||
|
user: Account or EndUser object containing tenant_id, user ID, and role information
|
||||||
|
app_id: App ID for filtering by application (can be None)
|
||||||
|
triggered_from: Source of the execution trigger (DEBUGGING or APP_RUN)
|
||||||
|
"""
|
||||||
|
# If an engine is provided, create a sessionmaker from it
|
||||||
|
if isinstance(session_factory, Engine):
|
||||||
|
self._session_factory = sessionmaker(bind=session_factory, expire_on_commit=False)
|
||||||
|
elif isinstance(session_factory, sessionmaker):
|
||||||
|
self._session_factory = session_factory
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid session_factory type {type(session_factory).__name__}; expected sessionmaker or Engine"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Extract tenant_id from user
|
||||||
|
tenant_id: str | None = user.tenant_id if isinstance(user, EndUser) else user.current_tenant_id
|
||||||
|
if not tenant_id:
|
||||||
|
raise ValueError("User must have a tenant_id or current_tenant_id")
|
||||||
|
self._tenant_id = tenant_id
|
||||||
|
|
||||||
|
# Store app context
|
||||||
|
self._app_id = app_id
|
||||||
|
|
||||||
|
# Extract user context
|
||||||
|
self._triggered_from = triggered_from
|
||||||
|
self._creator_user_id = user.id
|
||||||
|
|
||||||
|
# Determine user role based on user type
|
||||||
|
self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER
|
||||||
|
|
||||||
|
# Initialize in-memory cache for workflow executions
|
||||||
|
# Key: execution_id, Value: WorkflowRun (DB model)
|
||||||
|
self._execution_cache: dict[str, WorkflowRun] = {}
|
||||||
|
|
||||||
|
def _to_domain_model(self, db_model: WorkflowRun) -> WorkflowExecution:
|
||||||
|
"""
|
||||||
|
Convert a database model to a domain model.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db_model: The database model to convert
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The domain model
|
||||||
|
"""
|
||||||
|
# Parse JSON fields
|
||||||
|
inputs = db_model.inputs_dict
|
||||||
|
outputs = db_model.outputs_dict
|
||||||
|
graph = db_model.graph_dict
|
||||||
|
|
||||||
|
# Convert status to domain enum
|
||||||
|
status = WorkflowExecutionStatus(db_model.status)
|
||||||
|
|
||||||
|
return WorkflowExecution(
|
||||||
|
id=db_model.id,
|
||||||
|
workflow_id=db_model.workflow_id,
|
||||||
|
sequence_number=db_model.sequence_number,
|
||||||
|
type=WorkflowType(db_model.type),
|
||||||
|
workflow_version=db_model.version,
|
||||||
|
graph=graph,
|
||||||
|
inputs=inputs,
|
||||||
|
outputs=outputs,
|
||||||
|
status=status,
|
||||||
|
error_message=db_model.error or "",
|
||||||
|
total_tokens=db_model.total_tokens,
|
||||||
|
total_steps=db_model.total_steps,
|
||||||
|
exceptions_count=db_model.exceptions_count,
|
||||||
|
started_at=db_model.created_at,
|
||||||
|
finished_at=db_model.finished_at,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _to_db_model(self, domain_model: WorkflowExecution) -> WorkflowRun:
|
||||||
|
"""
|
||||||
|
Convert a domain model to a database model.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
domain_model: The domain model to convert
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The database model
|
||||||
|
"""
|
||||||
|
# Use values from constructor if provided
|
||||||
|
if not self._triggered_from:
|
||||||
|
raise ValueError("triggered_from is required in repository constructor")
|
||||||
|
if not self._creator_user_id:
|
||||||
|
raise ValueError("created_by is required in repository constructor")
|
||||||
|
if not self._creator_user_role:
|
||||||
|
raise ValueError("created_by_role is required in repository constructor")
|
||||||
|
|
||||||
|
db_model = WorkflowRun()
|
||||||
|
db_model.id = domain_model.id
|
||||||
|
db_model.tenant_id = self._tenant_id
|
||||||
|
if self._app_id is not None:
|
||||||
|
db_model.app_id = self._app_id
|
||||||
|
db_model.workflow_id = domain_model.workflow_id
|
||||||
|
db_model.triggered_from = self._triggered_from
|
||||||
|
db_model.sequence_number = domain_model.sequence_number
|
||||||
|
db_model.type = domain_model.type
|
||||||
|
db_model.version = domain_model.workflow_version
|
||||||
|
db_model.graph = json.dumps(domain_model.graph) if domain_model.graph else None
|
||||||
|
db_model.inputs = json.dumps(domain_model.inputs) if domain_model.inputs else None
|
||||||
|
db_model.outputs = json.dumps(domain_model.outputs) if domain_model.outputs else None
|
||||||
|
db_model.status = domain_model.status
|
||||||
|
db_model.error = domain_model.error_message if domain_model.error_message else None
|
||||||
|
db_model.total_tokens = domain_model.total_tokens
|
||||||
|
db_model.total_steps = domain_model.total_steps
|
||||||
|
db_model.exceptions_count = domain_model.exceptions_count
|
||||||
|
db_model.created_by_role = self._creator_user_role
|
||||||
|
db_model.created_by = self._creator_user_id
|
||||||
|
db_model.created_at = domain_model.started_at
|
||||||
|
db_model.finished_at = domain_model.finished_at
|
||||||
|
|
||||||
|
# Calculate elapsed time if finished_at is available
|
||||||
|
if domain_model.finished_at:
|
||||||
|
db_model.elapsed_time = (domain_model.finished_at - domain_model.started_at).total_seconds()
|
||||||
|
else:
|
||||||
|
db_model.elapsed_time = 0
|
||||||
|
|
||||||
|
return db_model
|
||||||
|
|
||||||
|
def save(self, execution: WorkflowExecution) -> None:
|
||||||
|
"""
|
||||||
|
Save or update a WorkflowExecution domain entity to the database.
|
||||||
|
|
||||||
|
This method serves as a domain-to-database adapter that:
|
||||||
|
1. Converts the domain entity to its database representation
|
||||||
|
2. Persists the database model using SQLAlchemy's merge operation
|
||||||
|
3. Maintains proper multi-tenancy by including tenant context during conversion
|
||||||
|
4. Updates the in-memory cache for faster subsequent lookups
|
||||||
|
|
||||||
|
The method handles both creating new records and updating existing ones through
|
||||||
|
SQLAlchemy's merge operation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
execution: The WorkflowExecution domain entity to persist
|
||||||
|
"""
|
||||||
|
# Convert domain model to database model using tenant context and other attributes
|
||||||
|
db_model = self._to_db_model(execution)
|
||||||
|
|
||||||
|
# Create a new database session
|
||||||
|
with self._session_factory() as session:
|
||||||
|
# SQLAlchemy merge intelligently handles both insert and update operations
|
||||||
|
# based on the presence of the primary key
|
||||||
|
session.merge(db_model)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
# Update the in-memory cache for faster subsequent lookups
|
||||||
|
logger.debug(f"Updating cache for execution_id: {db_model.id}")
|
||||||
|
self._execution_cache[db_model.id] = db_model
|
||||||
|
|
||||||
|
def get(self, execution_id: str) -> Optional[WorkflowExecution]:
|
||||||
|
"""
|
||||||
|
Retrieve a WorkflowExecution by its ID.
|
||||||
|
|
||||||
|
First checks the in-memory cache, and if not found, queries the database.
|
||||||
|
If found in the database, adds it to the cache for future lookups.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
execution_id: The workflow execution ID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The WorkflowExecution instance if found, None otherwise
|
||||||
|
"""
|
||||||
|
# First check the cache
|
||||||
|
if execution_id in self._execution_cache:
|
||||||
|
logger.debug(f"Cache hit for execution_id: {execution_id}")
|
||||||
|
# Convert cached DB model to domain model
|
||||||
|
cached_db_model = self._execution_cache[execution_id]
|
||||||
|
return self._to_domain_model(cached_db_model)
|
||||||
|
|
||||||
|
# If not in cache, query the database
|
||||||
|
logger.debug(f"Cache miss for execution_id: {execution_id}, querying database")
|
||||||
|
with self._session_factory() as session:
|
||||||
|
stmt = select(WorkflowRun).where(
|
||||||
|
WorkflowRun.id == execution_id,
|
||||||
|
WorkflowRun.tenant_id == self._tenant_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._app_id:
|
||||||
|
stmt = stmt.where(WorkflowRun.app_id == self._app_id)
|
||||||
|
|
||||||
|
db_model = session.scalar(stmt)
|
||||||
|
if db_model:
|
||||||
|
# Add DB model to cache
|
||||||
|
self._execution_cache[execution_id] = db_model
|
||||||
|
|
||||||
|
# Convert to domain model and return
|
||||||
|
return self._to_domain_model(db_model)
|
||||||
|
|
||||||
|
return None
|
@ -4,13 +4,14 @@ SQLAlchemy implementation of the WorkflowNodeExecutionRepository.
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from collections.abc import Mapping, Sequence
|
from collections.abc import Sequence
|
||||||
from typing import Any, Optional, Union, cast
|
from typing import Optional, Union
|
||||||
|
|
||||||
from sqlalchemy import UnaryExpression, asc, delete, desc, select
|
from sqlalchemy import UnaryExpression, asc, delete, desc, select
|
||||||
from sqlalchemy.engine import Engine
|
from sqlalchemy.engine import Engine
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||||
from core.workflow.entities.node_execution_entities import (
|
from core.workflow.entities.node_execution_entities import (
|
||||||
NodeExecution,
|
NodeExecution,
|
||||||
@ -85,8 +86,8 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||||||
self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER
|
self._creator_user_role = CreatorUserRole.ACCOUNT if isinstance(user, Account) else CreatorUserRole.END_USER
|
||||||
|
|
||||||
# Initialize in-memory cache for node executions
|
# Initialize in-memory cache for node executions
|
||||||
# Key: node_execution_id, Value: NodeExecution
|
# Key: node_execution_id, Value: WorkflowNodeExecution (DB model)
|
||||||
self._node_execution_cache: dict[str, NodeExecution] = {}
|
self._node_execution_cache: dict[str, WorkflowNodeExecution] = {}
|
||||||
|
|
||||||
def _to_domain_model(self, db_model: WorkflowNodeExecution) -> NodeExecution:
|
def _to_domain_model(self, db_model: WorkflowNodeExecution) -> NodeExecution:
|
||||||
"""
|
"""
|
||||||
@ -102,7 +103,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||||||
inputs = db_model.inputs_dict
|
inputs = db_model.inputs_dict
|
||||||
process_data = db_model.process_data_dict
|
process_data = db_model.process_data_dict
|
||||||
outputs = db_model.outputs_dict
|
outputs = db_model.outputs_dict
|
||||||
metadata = db_model.execution_metadata_dict
|
metadata = {NodeRunMetadataKey(k): v for k, v in db_model.execution_metadata_dict.items()}
|
||||||
|
|
||||||
# Convert status to domain enum
|
# Convert status to domain enum
|
||||||
status = NodeExecutionStatus(db_model.status)
|
status = NodeExecutionStatus(db_model.status)
|
||||||
@ -123,12 +124,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||||||
status=status,
|
status=status,
|
||||||
error=db_model.error,
|
error=db_model.error,
|
||||||
elapsed_time=db_model.elapsed_time,
|
elapsed_time=db_model.elapsed_time,
|
||||||
# FIXME(QuantumGhost): a temporary workaround for the following type check failure in Python 3.11.
|
metadata=metadata,
|
||||||
# However, this problem is not occurred in Python 3.12.
|
|
||||||
#
|
|
||||||
# A case of this error is:
|
|
||||||
# https://github.com/langgenius/dify/actions/runs/15112698604/job/42475659482?pr=19737#step:9:24
|
|
||||||
metadata=cast(Mapping[NodeRunMetadataKey, Any] | None, metadata),
|
|
||||||
created_at=db_model.created_at,
|
created_at=db_model.created_at,
|
||||||
finished_at=db_model.finished_at,
|
finished_at=db_model.finished_at,
|
||||||
)
|
)
|
||||||
@ -171,7 +167,9 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||||||
db_model.status = domain_model.status
|
db_model.status = domain_model.status
|
||||||
db_model.error = domain_model.error
|
db_model.error = domain_model.error
|
||||||
db_model.elapsed_time = domain_model.elapsed_time
|
db_model.elapsed_time = domain_model.elapsed_time
|
||||||
db_model.execution_metadata = json.dumps(domain_model.metadata) if domain_model.metadata else None
|
db_model.execution_metadata = (
|
||||||
|
json.dumps(jsonable_encoder(domain_model.metadata)) if domain_model.metadata else None
|
||||||
|
)
|
||||||
db_model.created_at = domain_model.created_at
|
db_model.created_at = domain_model.created_at
|
||||||
db_model.created_by_role = self._creator_user_role
|
db_model.created_by_role = self._creator_user_role
|
||||||
db_model.created_by = self._creator_user_id
|
db_model.created_by = self._creator_user_id
|
||||||
@ -208,7 +206,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||||||
# Only cache if we have a node_execution_id to use as the cache key
|
# Only cache if we have a node_execution_id to use as the cache key
|
||||||
if db_model.node_execution_id:
|
if db_model.node_execution_id:
|
||||||
logger.debug(f"Updating cache for node_execution_id: {db_model.node_execution_id}")
|
logger.debug(f"Updating cache for node_execution_id: {db_model.node_execution_id}")
|
||||||
self._node_execution_cache[db_model.node_execution_id] = execution
|
self._node_execution_cache[db_model.node_execution_id] = db_model
|
||||||
|
|
||||||
def get_by_node_execution_id(self, node_execution_id: str) -> Optional[NodeExecution]:
|
def get_by_node_execution_id(self, node_execution_id: str) -> Optional[NodeExecution]:
|
||||||
"""
|
"""
|
||||||
@ -226,7 +224,9 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||||||
# First check the cache
|
# First check the cache
|
||||||
if node_execution_id in self._node_execution_cache:
|
if node_execution_id in self._node_execution_cache:
|
||||||
logger.debug(f"Cache hit for node_execution_id: {node_execution_id}")
|
logger.debug(f"Cache hit for node_execution_id: {node_execution_id}")
|
||||||
return self._node_execution_cache[node_execution_id]
|
# Convert cached DB model to domain model
|
||||||
|
cached_db_model = self._node_execution_cache[node_execution_id]
|
||||||
|
return self._to_domain_model(cached_db_model)
|
||||||
|
|
||||||
# If not in cache, query the database
|
# If not in cache, query the database
|
||||||
logger.debug(f"Cache miss for node_execution_id: {node_execution_id}, querying database")
|
logger.debug(f"Cache miss for node_execution_id: {node_execution_id}, querying database")
|
||||||
@ -241,26 +241,25 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||||||
|
|
||||||
db_model = session.scalar(stmt)
|
db_model = session.scalar(stmt)
|
||||||
if db_model:
|
if db_model:
|
||||||
# Convert to domain model
|
# Add DB model to cache
|
||||||
domain_model = self._to_domain_model(db_model)
|
self._node_execution_cache[node_execution_id] = db_model
|
||||||
|
|
||||||
# Add to cache
|
# Convert to domain model and return
|
||||||
self._node_execution_cache[node_execution_id] = domain_model
|
return self._to_domain_model(db_model)
|
||||||
|
|
||||||
return domain_model
|
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_by_workflow_run(
|
def get_db_models_by_workflow_run(
|
||||||
self,
|
self,
|
||||||
workflow_run_id: str,
|
workflow_run_id: str,
|
||||||
order_config: Optional[OrderConfig] = None,
|
order_config: Optional[OrderConfig] = None,
|
||||||
) -> Sequence[NodeExecution]:
|
) -> Sequence[WorkflowNodeExecution]:
|
||||||
"""
|
"""
|
||||||
Retrieve all NodeExecution instances for a specific workflow run.
|
Retrieve all WorkflowNodeExecution database models for a specific workflow run.
|
||||||
|
|
||||||
This method always queries the database to ensure complete and ordered results,
|
This method directly returns database models without converting to domain models,
|
||||||
but updates the cache with any retrieved executions.
|
which is useful when you need to access database-specific fields like triggered_from.
|
||||||
|
It also updates the in-memory cache with the retrieved models.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
workflow_run_id: The workflow run ID
|
workflow_run_id: The workflow run ID
|
||||||
@ -269,7 +268,7 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||||||
order_config.order_direction: Direction to order ("asc" or "desc")
|
order_config.order_direction: Direction to order ("asc" or "desc")
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
A list of NodeExecution instances
|
A list of WorkflowNodeExecution database models
|
||||||
"""
|
"""
|
||||||
with self._session_factory() as session:
|
with self._session_factory() as session:
|
||||||
stmt = select(WorkflowNodeExecution).where(
|
stmt = select(WorkflowNodeExecution).where(
|
||||||
@ -298,16 +297,43 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||||||
|
|
||||||
db_models = session.scalars(stmt).all()
|
db_models = session.scalars(stmt).all()
|
||||||
|
|
||||||
# Convert database models to domain models and update cache
|
# Update the cache with the retrieved DB models
|
||||||
domain_models = []
|
|
||||||
for model in db_models:
|
for model in db_models:
|
||||||
domain_model = self._to_domain_model(model)
|
if model.node_execution_id:
|
||||||
# Update cache if node_execution_id is present
|
self._node_execution_cache[model.node_execution_id] = model
|
||||||
if domain_model.node_execution_id:
|
|
||||||
self._node_execution_cache[domain_model.node_execution_id] = domain_model
|
|
||||||
domain_models.append(domain_model)
|
|
||||||
|
|
||||||
return domain_models
|
return db_models
|
||||||
|
|
||||||
|
def get_by_workflow_run(
|
||||||
|
self,
|
||||||
|
workflow_run_id: str,
|
||||||
|
order_config: Optional[OrderConfig] = None,
|
||||||
|
) -> Sequence[NodeExecution]:
|
||||||
|
"""
|
||||||
|
Retrieve all NodeExecution instances for a specific workflow run.
|
||||||
|
|
||||||
|
This method always queries the database to ensure complete and ordered results,
|
||||||
|
but updates the cache with any retrieved executions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
workflow_run_id: The workflow run ID
|
||||||
|
order_config: Optional configuration for ordering results
|
||||||
|
order_config.order_by: List of fields to order by (e.g., ["index", "created_at"])
|
||||||
|
order_config.order_direction: Direction to order ("asc" or "desc")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A list of NodeExecution instances
|
||||||
|
"""
|
||||||
|
# Get the database models using the new method
|
||||||
|
db_models = self.get_db_models_by_workflow_run(workflow_run_id, order_config)
|
||||||
|
|
||||||
|
# Convert database models to domain models
|
||||||
|
domain_models = []
|
||||||
|
for model in db_models:
|
||||||
|
domain_model = self._to_domain_model(model)
|
||||||
|
domain_models.append(domain_model)
|
||||||
|
|
||||||
|
return domain_models
|
||||||
|
|
||||||
def get_running_executions(self, workflow_run_id: str) -> Sequence[NodeExecution]:
|
def get_running_executions(self, workflow_run_id: str) -> Sequence[NodeExecution]:
|
||||||
"""
|
"""
|
||||||
@ -337,10 +363,12 @@ class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository)
|
|||||||
domain_models = []
|
domain_models = []
|
||||||
|
|
||||||
for model in db_models:
|
for model in db_models:
|
||||||
domain_model = self._to_domain_model(model)
|
|
||||||
# Update cache if node_execution_id is present
|
# Update cache if node_execution_id is present
|
||||||
if domain_model.node_execution_id:
|
if model.node_execution_id:
|
||||||
self._node_execution_cache[domain_model.node_execution_id] = domain_model
|
self._node_execution_cache[model.node_execution_id] = model
|
||||||
|
|
||||||
|
# Convert to domain model
|
||||||
|
domain_model = self._to_domain_model(model)
|
||||||
domain_models.append(domain_model)
|
domain_models.append(domain_model)
|
||||||
|
|
||||||
return domain_models
|
return domain_models
|
||||||
|
@ -528,7 +528,7 @@ class ToolManager:
|
|||||||
yield provider
|
yield provider
|
||||||
|
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.exception(f"load builtin provider {provider}")
|
logger.exception(f"load builtin provider {provider_path}")
|
||||||
continue
|
continue
|
||||||
# set builtin providers loaded
|
# set builtin providers loaded
|
||||||
cls._builtin_providers_loaded = True
|
cls._builtin_providers_loaded = True
|
||||||
@ -644,10 +644,10 @@ class ToolManager:
|
|||||||
)
|
)
|
||||||
|
|
||||||
workflow_provider_controllers: list[WorkflowToolProviderController] = []
|
workflow_provider_controllers: list[WorkflowToolProviderController] = []
|
||||||
for provider in workflow_providers:
|
for workflow_provider in workflow_providers:
|
||||||
try:
|
try:
|
||||||
workflow_provider_controllers.append(
|
workflow_provider_controllers.append(
|
||||||
ToolTransformService.workflow_provider_to_controller(db_provider=provider)
|
ToolTransformService.workflow_provider_to_controller(db_provider=workflow_provider)
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
# app has been deleted
|
# app has been deleted
|
||||||
|
@ -125,6 +125,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
|
|||||||
return ""
|
return ""
|
||||||
# get retrieval model , if the model is not setting , using default
|
# get retrieval model , if the model is not setting , using default
|
||||||
retrieval_model: dict[str, Any] = dataset.retrieval_model or default_retrieval_model
|
retrieval_model: dict[str, Any] = dataset.retrieval_model or default_retrieval_model
|
||||||
|
retrieval_resource_list = []
|
||||||
if dataset.indexing_technique == "economy":
|
if dataset.indexing_technique == "economy":
|
||||||
# use keyword table query
|
# use keyword table query
|
||||||
documents = RetrievalService.retrieve(
|
documents = RetrievalService.retrieve(
|
||||||
@ -181,7 +182,7 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool):
|
|||||||
score=record.score,
|
score=record.score,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
retrieval_resource_list = []
|
|
||||||
if self.return_resource:
|
if self.return_resource:
|
||||||
for record in records:
|
for record in records:
|
||||||
segment = record.segment
|
segment = record.segment
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
from collections.abc import Generator
|
from collections.abc import Generator
|
||||||
from typing import Any, Optional, Union, cast
|
from typing import Any, Optional, cast
|
||||||
|
|
||||||
|
from flask_login import current_user
|
||||||
|
|
||||||
from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod
|
from core.file import FILE_MODEL_IDENTITY, File, FileTransferMethod
|
||||||
from core.tools.__base.tool import Tool
|
from core.tools.__base.tool import Tool
|
||||||
@ -87,7 +89,7 @@ class WorkflowTool(Tool):
|
|||||||
result = generator.generate(
|
result = generator.generate(
|
||||||
app_model=app,
|
app_model=app,
|
||||||
workflow=workflow,
|
workflow=workflow,
|
||||||
user=self._get_user(user_id),
|
user=cast("Account | EndUser", current_user),
|
||||||
args={"inputs": tool_parameters, "files": files},
|
args={"inputs": tool_parameters, "files": files},
|
||||||
invoke_from=self.runtime.invoke_from,
|
invoke_from=self.runtime.invoke_from,
|
||||||
streaming=False,
|
streaming=False,
|
||||||
@ -111,20 +113,6 @@ class WorkflowTool(Tool):
|
|||||||
yield self.create_text_message(json.dumps(outputs, ensure_ascii=False))
|
yield self.create_text_message(json.dumps(outputs, ensure_ascii=False))
|
||||||
yield self.create_json_message(outputs)
|
yield self.create_json_message(outputs)
|
||||||
|
|
||||||
def _get_user(self, user_id: str) -> Union[EndUser, Account]:
|
|
||||||
"""
|
|
||||||
get the user by user id
|
|
||||||
"""
|
|
||||||
|
|
||||||
user = db.session.query(EndUser).filter(EndUser.id == user_id).first()
|
|
||||||
if not user:
|
|
||||||
user = db.session.query(Account).filter(Account.id == user_id).first()
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
raise ValueError("user not found")
|
|
||||||
|
|
||||||
return user
|
|
||||||
|
|
||||||
def fork_tool_runtime(self, runtime: ToolRuntime) -> "WorkflowTool":
|
def fork_tool_runtime(self, runtime: ToolRuntime) -> "WorkflowTool":
|
||||||
"""
|
"""
|
||||||
fork a new tool with metadata
|
fork a new tool with metadata
|
||||||
|
91
api/core/workflow/entities/workflow_execution_entities.py
Normal file
91
api/core/workflow/entities/workflow_execution_entities.py
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
"""
|
||||||
|
Domain entities for workflow execution.
|
||||||
|
|
||||||
|
Models are independent of the storage mechanism and don't contain
|
||||||
|
implementation details like tenant_id, app_id, etc.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
|
from datetime import UTC, datetime
|
||||||
|
from enum import StrEnum
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowType(StrEnum):
|
||||||
|
"""
|
||||||
|
Workflow Type Enum for domain layer
|
||||||
|
"""
|
||||||
|
|
||||||
|
WORKFLOW = "workflow"
|
||||||
|
CHAT = "chat"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowExecutionStatus(StrEnum):
|
||||||
|
RUNNING = "running"
|
||||||
|
SUCCEEDED = "succeeded"
|
||||||
|
FAILED = "failed"
|
||||||
|
STOPPED = "stopped"
|
||||||
|
PARTIAL_SUCCEEDED = "partial-succeeded"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowExecution(BaseModel):
|
||||||
|
"""
|
||||||
|
Domain model for workflow execution based on WorkflowRun but without
|
||||||
|
user, tenant, and app attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
id: str = Field(...)
|
||||||
|
workflow_id: str = Field(...)
|
||||||
|
workflow_version: str = Field(...)
|
||||||
|
sequence_number: int = Field(...)
|
||||||
|
|
||||||
|
type: WorkflowType = Field(...)
|
||||||
|
graph: Mapping[str, Any] = Field(...)
|
||||||
|
|
||||||
|
inputs: Mapping[str, Any] = Field(...)
|
||||||
|
outputs: Optional[Mapping[str, Any]] = None
|
||||||
|
|
||||||
|
status: WorkflowExecutionStatus = WorkflowExecutionStatus.RUNNING
|
||||||
|
error_message: str = Field(default="")
|
||||||
|
total_tokens: int = Field(default=0)
|
||||||
|
total_steps: int = Field(default=0)
|
||||||
|
exceptions_count: int = Field(default=0)
|
||||||
|
|
||||||
|
started_at: datetime = Field(...)
|
||||||
|
finished_at: Optional[datetime] = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def elapsed_time(self) -> float:
|
||||||
|
"""
|
||||||
|
Calculate elapsed time in seconds.
|
||||||
|
If workflow is not finished, use current time.
|
||||||
|
"""
|
||||||
|
end_time = self.finished_at or datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
return (end_time - self.started_at).total_seconds()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def new(
|
||||||
|
cls,
|
||||||
|
*,
|
||||||
|
id: str,
|
||||||
|
workflow_id: str,
|
||||||
|
sequence_number: int,
|
||||||
|
type: WorkflowType,
|
||||||
|
workflow_version: str,
|
||||||
|
graph: Mapping[str, Any],
|
||||||
|
inputs: Mapping[str, Any],
|
||||||
|
started_at: datetime,
|
||||||
|
) -> "WorkflowExecution":
|
||||||
|
return WorkflowExecution(
|
||||||
|
id=id,
|
||||||
|
workflow_id=workflow_id,
|
||||||
|
sequence_number=sequence_number,
|
||||||
|
type=type,
|
||||||
|
workflow_version=workflow_version,
|
||||||
|
graph=graph,
|
||||||
|
inputs=inputs,
|
||||||
|
status=WorkflowExecutionStatus.RUNNING,
|
||||||
|
started_at=started_at,
|
||||||
|
)
|
@ -0,0 +1,42 @@
|
|||||||
|
from typing import Optional, Protocol
|
||||||
|
|
||||||
|
from core.workflow.entities.workflow_execution_entities import WorkflowExecution
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowExecutionRepository(Protocol):
|
||||||
|
"""
|
||||||
|
Repository interface for WorkflowExecution.
|
||||||
|
|
||||||
|
This interface defines the contract for accessing and manipulating
|
||||||
|
WorkflowExecution data, regardless of the underlying storage mechanism.
|
||||||
|
|
||||||
|
Note: Domain-specific concepts like multi-tenancy (tenant_id), application context (app_id),
|
||||||
|
and other implementation details should be handled at the implementation level, not in
|
||||||
|
the core interface. This keeps the core domain model clean and independent of specific
|
||||||
|
application domains or deployment scenarios.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def save(self, execution: WorkflowExecution) -> None:
|
||||||
|
"""
|
||||||
|
Save or update a WorkflowExecution instance.
|
||||||
|
|
||||||
|
This method handles both creating new records and updating existing ones.
|
||||||
|
The implementation should determine whether to create or update based on
|
||||||
|
the execution's ID or other identifying fields.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
execution: The WorkflowExecution instance to save or update
|
||||||
|
"""
|
||||||
|
...
|
||||||
|
|
||||||
|
def get(self, execution_id: str) -> Optional[WorkflowExecution]:
|
||||||
|
"""
|
||||||
|
Retrieve a WorkflowExecution by its ID.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
execution_id: The workflow execution ID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The WorkflowExecution instance if found, None otherwise
|
||||||
|
"""
|
||||||
|
...
|
@ -1,22 +1,13 @@
|
|||||||
import json
|
from collections.abc import Mapping
|
||||||
import time
|
|
||||||
from collections.abc import Mapping, Sequence
|
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from typing import Any, Optional, Union, cast
|
from typing import Any, Optional, Union
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from sqlalchemy import func, select
|
from sqlalchemy import func, select
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom, WorkflowAppGenerateEntity
|
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity
|
||||||
from core.app.entities.queue_entities import (
|
from core.app.entities.queue_entities import (
|
||||||
QueueAgentLogEvent,
|
|
||||||
QueueIterationCompletedEvent,
|
|
||||||
QueueIterationNextEvent,
|
|
||||||
QueueIterationStartEvent,
|
|
||||||
QueueLoopCompletedEvent,
|
|
||||||
QueueLoopNextEvent,
|
|
||||||
QueueLoopStartEvent,
|
|
||||||
QueueNodeExceptionEvent,
|
QueueNodeExceptionEvent,
|
||||||
QueueNodeFailedEvent,
|
QueueNodeFailedEvent,
|
||||||
QueueNodeInIterationFailedEvent,
|
QueueNodeInIterationFailedEvent,
|
||||||
@ -24,50 +15,24 @@ from core.app.entities.queue_entities import (
|
|||||||
QueueNodeRetryEvent,
|
QueueNodeRetryEvent,
|
||||||
QueueNodeStartedEvent,
|
QueueNodeStartedEvent,
|
||||||
QueueNodeSucceededEvent,
|
QueueNodeSucceededEvent,
|
||||||
QueueParallelBranchRunFailedEvent,
|
|
||||||
QueueParallelBranchRunStartedEvent,
|
|
||||||
QueueParallelBranchRunSucceededEvent,
|
|
||||||
)
|
|
||||||
from core.app.entities.task_entities import (
|
|
||||||
AgentLogStreamResponse,
|
|
||||||
IterationNodeCompletedStreamResponse,
|
|
||||||
IterationNodeNextStreamResponse,
|
|
||||||
IterationNodeStartStreamResponse,
|
|
||||||
LoopNodeCompletedStreamResponse,
|
|
||||||
LoopNodeNextStreamResponse,
|
|
||||||
LoopNodeStartStreamResponse,
|
|
||||||
NodeFinishStreamResponse,
|
|
||||||
NodeRetryStreamResponse,
|
|
||||||
NodeStartStreamResponse,
|
|
||||||
ParallelBranchFinishedStreamResponse,
|
|
||||||
ParallelBranchStartStreamResponse,
|
|
||||||
WorkflowFinishStreamResponse,
|
|
||||||
WorkflowStartStreamResponse,
|
|
||||||
)
|
)
|
||||||
from core.app.task_pipeline.exc import WorkflowRunNotFoundError
|
from core.app.task_pipeline.exc import WorkflowRunNotFoundError
|
||||||
from core.file import FILE_MODEL_IDENTITY, File
|
|
||||||
from core.ops.entities.trace_entity import TraceTaskName
|
from core.ops.entities.trace_entity import TraceTaskName
|
||||||
from core.ops.ops_trace_manager import TraceQueueManager, TraceTask
|
from core.ops.ops_trace_manager import TraceQueueManager, TraceTask
|
||||||
from core.tools.tool_manager import ToolManager
|
|
||||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||||
from core.workflow.entities.node_execution_entities import (
|
from core.workflow.entities.node_execution_entities import (
|
||||||
NodeExecution,
|
NodeExecution,
|
||||||
NodeExecutionStatus,
|
NodeExecutionStatus,
|
||||||
)
|
)
|
||||||
|
from core.workflow.entities.workflow_execution_entities import WorkflowExecution, WorkflowExecutionStatus, WorkflowType
|
||||||
from core.workflow.enums import SystemVariableKey
|
from core.workflow.enums import SystemVariableKey
|
||||||
from core.workflow.nodes import NodeType
|
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||||
from core.workflow.nodes.tool.entities import ToolNodeData
|
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
from core.workflow.workflow_entry import WorkflowEntry
|
from core.workflow.workflow_entry import WorkflowEntry
|
||||||
from models import (
|
from models import (
|
||||||
Account,
|
|
||||||
CreatorUserRole,
|
|
||||||
EndUser,
|
|
||||||
Workflow,
|
Workflow,
|
||||||
WorkflowNodeExecutionStatus,
|
|
||||||
WorkflowRun,
|
WorkflowRun,
|
||||||
WorkflowRunStatus,
|
WorkflowRunStatus,
|
||||||
WorkflowRunTriggeredFrom,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -77,21 +42,20 @@ class WorkflowCycleManager:
|
|||||||
*,
|
*,
|
||||||
application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity],
|
application_generate_entity: Union[AdvancedChatAppGenerateEntity, WorkflowAppGenerateEntity],
|
||||||
workflow_system_variables: dict[SystemVariableKey, Any],
|
workflow_system_variables: dict[SystemVariableKey, Any],
|
||||||
|
workflow_execution_repository: WorkflowExecutionRepository,
|
||||||
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
workflow_node_execution_repository: WorkflowNodeExecutionRepository,
|
||||||
) -> None:
|
) -> None:
|
||||||
self._workflow_run: WorkflowRun | None = None
|
|
||||||
self._application_generate_entity = application_generate_entity
|
self._application_generate_entity = application_generate_entity
|
||||||
self._workflow_system_variables = workflow_system_variables
|
self._workflow_system_variables = workflow_system_variables
|
||||||
|
self._workflow_execution_repository = workflow_execution_repository
|
||||||
self._workflow_node_execution_repository = workflow_node_execution_repository
|
self._workflow_node_execution_repository = workflow_node_execution_repository
|
||||||
|
|
||||||
def _handle_workflow_run_start(
|
def handle_workflow_run_start(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
session: Session,
|
session: Session,
|
||||||
workflow_id: str,
|
workflow_id: str,
|
||||||
user_id: str,
|
) -> WorkflowExecution:
|
||||||
created_by_role: CreatorUserRole,
|
|
||||||
) -> WorkflowRun:
|
|
||||||
workflow_stmt = select(Workflow).where(Workflow.id == workflow_id)
|
workflow_stmt = select(Workflow).where(Workflow.id == workflow_id)
|
||||||
workflow = session.scalar(workflow_stmt)
|
workflow = session.scalar(workflow_stmt)
|
||||||
if not workflow:
|
if not workflow:
|
||||||
@ -110,157 +74,116 @@ class WorkflowCycleManager:
|
|||||||
continue
|
continue
|
||||||
inputs[f"sys.{key.value}"] = value
|
inputs[f"sys.{key.value}"] = value
|
||||||
|
|
||||||
triggered_from = (
|
|
||||||
WorkflowRunTriggeredFrom.DEBUGGING
|
|
||||||
if self._application_generate_entity.invoke_from == InvokeFrom.DEBUGGER
|
|
||||||
else WorkflowRunTriggeredFrom.APP_RUN
|
|
||||||
)
|
|
||||||
|
|
||||||
# handle special values
|
# handle special values
|
||||||
inputs = dict(WorkflowEntry.handle_special_values(inputs) or {})
|
inputs = dict(WorkflowEntry.handle_special_values(inputs) or {})
|
||||||
|
|
||||||
# init workflow run
|
# init workflow run
|
||||||
# TODO: This workflow_run_id should always not be None, maybe we can use a more elegant way to handle this
|
# TODO: This workflow_run_id should always not be None, maybe we can use a more elegant way to handle this
|
||||||
workflow_run_id = str(self._workflow_system_variables.get(SystemVariableKey.WORKFLOW_RUN_ID) or uuid4())
|
execution_id = str(self._workflow_system_variables.get(SystemVariableKey.WORKFLOW_RUN_ID) or uuid4())
|
||||||
|
execution = WorkflowExecution.new(
|
||||||
|
id=execution_id,
|
||||||
|
workflow_id=workflow.id,
|
||||||
|
sequence_number=new_sequence_number,
|
||||||
|
type=WorkflowType(workflow.type),
|
||||||
|
workflow_version=workflow.version,
|
||||||
|
graph=workflow.graph_dict,
|
||||||
|
inputs=inputs,
|
||||||
|
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||||
|
)
|
||||||
|
|
||||||
workflow_run = WorkflowRun()
|
self._workflow_execution_repository.save(execution)
|
||||||
workflow_run.id = workflow_run_id
|
|
||||||
workflow_run.tenant_id = workflow.tenant_id
|
|
||||||
workflow_run.app_id = workflow.app_id
|
|
||||||
workflow_run.sequence_number = new_sequence_number
|
|
||||||
workflow_run.workflow_id = workflow.id
|
|
||||||
workflow_run.type = workflow.type
|
|
||||||
workflow_run.triggered_from = triggered_from.value
|
|
||||||
workflow_run.version = workflow.version
|
|
||||||
workflow_run.graph = workflow.graph
|
|
||||||
workflow_run.inputs = json.dumps(inputs)
|
|
||||||
workflow_run.status = WorkflowRunStatus.RUNNING
|
|
||||||
workflow_run.created_by_role = created_by_role
|
|
||||||
workflow_run.created_by = user_id
|
|
||||||
workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None)
|
|
||||||
|
|
||||||
session.add(workflow_run)
|
return execution
|
||||||
|
|
||||||
return workflow_run
|
def handle_workflow_run_success(
|
||||||
|
|
||||||
def _handle_workflow_run_success(
|
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
session: Session,
|
|
||||||
workflow_run_id: str,
|
workflow_run_id: str,
|
||||||
start_at: float,
|
|
||||||
total_tokens: int,
|
total_tokens: int,
|
||||||
total_steps: int,
|
total_steps: int,
|
||||||
outputs: Mapping[str, Any] | None = None,
|
outputs: Mapping[str, Any] | None = None,
|
||||||
conversation_id: Optional[str] = None,
|
conversation_id: Optional[str] = None,
|
||||||
trace_manager: Optional[TraceQueueManager] = None,
|
trace_manager: Optional[TraceQueueManager] = None,
|
||||||
) -> WorkflowRun:
|
) -> WorkflowExecution:
|
||||||
"""
|
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||||
Workflow run success
|
|
||||||
:param workflow_run_id: workflow run id
|
|
||||||
:param start_at: start time
|
|
||||||
:param total_tokens: total tokens
|
|
||||||
:param total_steps: total steps
|
|
||||||
:param outputs: outputs
|
|
||||||
:param conversation_id: conversation id
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id)
|
|
||||||
|
|
||||||
outputs = WorkflowEntry.handle_special_values(outputs)
|
outputs = WorkflowEntry.handle_special_values(outputs)
|
||||||
|
|
||||||
workflow_run.status = WorkflowRunStatus.SUCCEEDED
|
workflow_execution.status = WorkflowExecutionStatus.SUCCEEDED
|
||||||
workflow_run.outputs = json.dumps(outputs or {})
|
workflow_execution.outputs = outputs or {}
|
||||||
workflow_run.elapsed_time = time.perf_counter() - start_at
|
workflow_execution.total_tokens = total_tokens
|
||||||
workflow_run.total_tokens = total_tokens
|
workflow_execution.total_steps = total_steps
|
||||||
workflow_run.total_steps = total_steps
|
workflow_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
|
||||||
|
|
||||||
if trace_manager:
|
if trace_manager:
|
||||||
trace_manager.add_trace_task(
|
trace_manager.add_trace_task(
|
||||||
TraceTask(
|
TraceTask(
|
||||||
TraceTaskName.WORKFLOW_TRACE,
|
TraceTaskName.WORKFLOW_TRACE,
|
||||||
workflow_run=workflow_run,
|
workflow_execution=workflow_execution,
|
||||||
conversation_id=conversation_id,
|
conversation_id=conversation_id,
|
||||||
user_id=trace_manager.user_id,
|
user_id=trace_manager.user_id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return workflow_run
|
return workflow_execution
|
||||||
|
|
||||||
def _handle_workflow_run_partial_success(
|
def handle_workflow_run_partial_success(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
session: Session,
|
|
||||||
workflow_run_id: str,
|
workflow_run_id: str,
|
||||||
start_at: float,
|
|
||||||
total_tokens: int,
|
total_tokens: int,
|
||||||
total_steps: int,
|
total_steps: int,
|
||||||
outputs: Mapping[str, Any] | None = None,
|
outputs: Mapping[str, Any] | None = None,
|
||||||
exceptions_count: int = 0,
|
exceptions_count: int = 0,
|
||||||
conversation_id: Optional[str] = None,
|
conversation_id: Optional[str] = None,
|
||||||
trace_manager: Optional[TraceQueueManager] = None,
|
trace_manager: Optional[TraceQueueManager] = None,
|
||||||
) -> WorkflowRun:
|
) -> WorkflowExecution:
|
||||||
workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id)
|
execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||||
outputs = WorkflowEntry.handle_special_values(dict(outputs) if outputs else None)
|
outputs = WorkflowEntry.handle_special_values(dict(outputs) if outputs else None)
|
||||||
|
|
||||||
workflow_run.status = WorkflowRunStatus.PARTIAL_SUCCEEDED.value
|
execution.status = WorkflowExecutionStatus.PARTIAL_SUCCEEDED
|
||||||
workflow_run.outputs = json.dumps(outputs or {})
|
execution.outputs = outputs or {}
|
||||||
workflow_run.elapsed_time = time.perf_counter() - start_at
|
execution.total_tokens = total_tokens
|
||||||
workflow_run.total_tokens = total_tokens
|
execution.total_steps = total_steps
|
||||||
workflow_run.total_steps = total_steps
|
execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
execution.exceptions_count = exceptions_count
|
||||||
workflow_run.exceptions_count = exceptions_count
|
|
||||||
|
|
||||||
if trace_manager:
|
if trace_manager:
|
||||||
trace_manager.add_trace_task(
|
trace_manager.add_trace_task(
|
||||||
TraceTask(
|
TraceTask(
|
||||||
TraceTaskName.WORKFLOW_TRACE,
|
TraceTaskName.WORKFLOW_TRACE,
|
||||||
workflow_run=workflow_run,
|
workflow_execution=execution,
|
||||||
conversation_id=conversation_id,
|
conversation_id=conversation_id,
|
||||||
user_id=trace_manager.user_id,
|
user_id=trace_manager.user_id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return workflow_run
|
return execution
|
||||||
|
|
||||||
def _handle_workflow_run_failed(
|
def handle_workflow_run_failed(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
session: Session,
|
|
||||||
workflow_run_id: str,
|
workflow_run_id: str,
|
||||||
start_at: float,
|
|
||||||
total_tokens: int,
|
total_tokens: int,
|
||||||
total_steps: int,
|
total_steps: int,
|
||||||
status: WorkflowRunStatus,
|
status: WorkflowRunStatus,
|
||||||
error: str,
|
error_message: str,
|
||||||
conversation_id: Optional[str] = None,
|
conversation_id: Optional[str] = None,
|
||||||
trace_manager: Optional[TraceQueueManager] = None,
|
trace_manager: Optional[TraceQueueManager] = None,
|
||||||
exceptions_count: int = 0,
|
exceptions_count: int = 0,
|
||||||
) -> WorkflowRun:
|
) -> WorkflowExecution:
|
||||||
"""
|
execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
|
||||||
Workflow run failed
|
|
||||||
:param workflow_run_id: workflow run id
|
|
||||||
:param start_at: start time
|
|
||||||
:param total_tokens: total tokens
|
|
||||||
:param total_steps: total steps
|
|
||||||
:param status: status
|
|
||||||
:param error: error message
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
workflow_run = self._get_workflow_run(session=session, workflow_run_id=workflow_run_id)
|
|
||||||
|
|
||||||
workflow_run.status = status.value
|
execution.status = WorkflowExecutionStatus(status.value)
|
||||||
workflow_run.error = error
|
execution.error_message = error_message
|
||||||
workflow_run.elapsed_time = time.perf_counter() - start_at
|
execution.total_tokens = total_tokens
|
||||||
workflow_run.total_tokens = total_tokens
|
execution.total_steps = total_steps
|
||||||
workflow_run.total_steps = total_steps
|
execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
workflow_run.finished_at = datetime.now(UTC).replace(tzinfo=None)
|
execution.exceptions_count = exceptions_count
|
||||||
workflow_run.exceptions_count = exceptions_count
|
|
||||||
|
|
||||||
# Use the instance repository to find running executions for a workflow run
|
# Use the instance repository to find running executions for a workflow run
|
||||||
running_domain_executions = self._workflow_node_execution_repository.get_running_executions(
|
running_domain_executions = self._workflow_node_execution_repository.get_running_executions(
|
||||||
workflow_run_id=workflow_run.id
|
workflow_run_id=execution.id
|
||||||
)
|
)
|
||||||
|
|
||||||
# Update the domain models
|
# Update the domain models
|
||||||
@ -269,7 +192,7 @@ class WorkflowCycleManager:
|
|||||||
if domain_execution.node_execution_id:
|
if domain_execution.node_execution_id:
|
||||||
# Update the domain model
|
# Update the domain model
|
||||||
domain_execution.status = NodeExecutionStatus.FAILED
|
domain_execution.status = NodeExecutionStatus.FAILED
|
||||||
domain_execution.error = error
|
domain_execution.error = error_message
|
||||||
domain_execution.finished_at = now
|
domain_execution.finished_at = now
|
||||||
domain_execution.elapsed_time = (now - domain_execution.created_at).total_seconds()
|
domain_execution.elapsed_time = (now - domain_execution.created_at).total_seconds()
|
||||||
|
|
||||||
@ -280,15 +203,22 @@ class WorkflowCycleManager:
|
|||||||
trace_manager.add_trace_task(
|
trace_manager.add_trace_task(
|
||||||
TraceTask(
|
TraceTask(
|
||||||
TraceTaskName.WORKFLOW_TRACE,
|
TraceTaskName.WORKFLOW_TRACE,
|
||||||
workflow_run=workflow_run,
|
workflow_execution=execution,
|
||||||
conversation_id=conversation_id,
|
conversation_id=conversation_id,
|
||||||
user_id=trace_manager.user_id,
|
user_id=trace_manager.user_id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return workflow_run
|
return execution
|
||||||
|
|
||||||
|
def handle_node_execution_start(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
workflow_execution_id: str,
|
||||||
|
event: QueueNodeStartedEvent,
|
||||||
|
) -> NodeExecution:
|
||||||
|
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_execution_id)
|
||||||
|
|
||||||
def _handle_node_execution_start(self, *, workflow_run: WorkflowRun, event: QueueNodeStartedEvent) -> NodeExecution:
|
|
||||||
# Create a domain model
|
# Create a domain model
|
||||||
created_at = datetime.now(UTC).replace(tzinfo=None)
|
created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
metadata = {
|
metadata = {
|
||||||
@ -299,8 +229,8 @@ class WorkflowCycleManager:
|
|||||||
|
|
||||||
domain_execution = NodeExecution(
|
domain_execution = NodeExecution(
|
||||||
id=str(uuid4()),
|
id=str(uuid4()),
|
||||||
workflow_id=workflow_run.workflow_id,
|
workflow_id=workflow_execution.workflow_id,
|
||||||
workflow_run_id=workflow_run.id,
|
workflow_run_id=workflow_execution.id,
|
||||||
predecessor_node_id=event.predecessor_node_id,
|
predecessor_node_id=event.predecessor_node_id,
|
||||||
index=event.node_run_index,
|
index=event.node_run_index,
|
||||||
node_execution_id=event.node_execution_id,
|
node_execution_id=event.node_execution_id,
|
||||||
@ -317,7 +247,7 @@ class WorkflowCycleManager:
|
|||||||
|
|
||||||
return domain_execution
|
return domain_execution
|
||||||
|
|
||||||
def _handle_workflow_node_execution_success(self, *, event: QueueNodeSucceededEvent) -> NodeExecution:
|
def handle_workflow_node_execution_success(self, *, event: QueueNodeSucceededEvent) -> NodeExecution:
|
||||||
# Get the domain model from repository
|
# Get the domain model from repository
|
||||||
domain_execution = self._workflow_node_execution_repository.get_by_node_execution_id(event.node_execution_id)
|
domain_execution = self._workflow_node_execution_repository.get_by_node_execution_id(event.node_execution_id)
|
||||||
if not domain_execution:
|
if not domain_execution:
|
||||||
@ -350,7 +280,7 @@ class WorkflowCycleManager:
|
|||||||
|
|
||||||
return domain_execution
|
return domain_execution
|
||||||
|
|
||||||
def _handle_workflow_node_execution_failed(
|
def handle_workflow_node_execution_failed(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
event: QueueNodeFailedEvent
|
event: QueueNodeFailedEvent
|
||||||
@ -400,15 +330,10 @@ class WorkflowCycleManager:
|
|||||||
|
|
||||||
return domain_execution
|
return domain_execution
|
||||||
|
|
||||||
def _handle_workflow_node_execution_retried(
|
def handle_workflow_node_execution_retried(
|
||||||
self, *, workflow_run: WorkflowRun, event: QueueNodeRetryEvent
|
self, *, workflow_execution_id: str, event: QueueNodeRetryEvent
|
||||||
) -> NodeExecution:
|
) -> NodeExecution:
|
||||||
"""
|
workflow_execution = self._get_workflow_execution_or_raise_error(workflow_execution_id)
|
||||||
Workflow node execution failed
|
|
||||||
:param workflow_run: workflow run
|
|
||||||
:param event: queue node failed event
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
created_at = event.start_at
|
created_at = event.start_at
|
||||||
finished_at = datetime.now(UTC).replace(tzinfo=None)
|
finished_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
elapsed_time = (finished_at - created_at).total_seconds()
|
elapsed_time = (finished_at - created_at).total_seconds()
|
||||||
@ -433,8 +358,8 @@ class WorkflowCycleManager:
|
|||||||
# Create a domain model
|
# Create a domain model
|
||||||
domain_execution = NodeExecution(
|
domain_execution = NodeExecution(
|
||||||
id=str(uuid4()),
|
id=str(uuid4()),
|
||||||
workflow_id=workflow_run.workflow_id,
|
workflow_id=workflow_execution.workflow_id,
|
||||||
workflow_run_id=workflow_run.id,
|
workflow_run_id=workflow_execution.id,
|
||||||
predecessor_node_id=event.predecessor_node_id,
|
predecessor_node_id=event.predecessor_node_id,
|
||||||
node_execution_id=event.node_execution_id,
|
node_execution_id=event.node_execution_id,
|
||||||
node_id=event.node_id,
|
node_id=event.node_id,
|
||||||
@ -456,491 +381,8 @@ class WorkflowCycleManager:
|
|||||||
|
|
||||||
return domain_execution
|
return domain_execution
|
||||||
|
|
||||||
def _workflow_start_to_stream_response(
|
def _get_workflow_execution_or_raise_error(self, id: str, /) -> WorkflowExecution:
|
||||||
self,
|
execution = self._workflow_execution_repository.get(id)
|
||||||
*,
|
if not execution:
|
||||||
session: Session,
|
raise WorkflowRunNotFoundError(id)
|
||||||
task_id: str,
|
return execution
|
||||||
workflow_run: WorkflowRun,
|
|
||||||
) -> WorkflowStartStreamResponse:
|
|
||||||
_ = session
|
|
||||||
return WorkflowStartStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_run.id,
|
|
||||||
data=WorkflowStartStreamResponse.Data(
|
|
||||||
id=workflow_run.id,
|
|
||||||
workflow_id=workflow_run.workflow_id,
|
|
||||||
sequence_number=workflow_run.sequence_number,
|
|
||||||
inputs=dict(workflow_run.inputs_dict or {}),
|
|
||||||
created_at=int(workflow_run.created_at.timestamp()),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _workflow_finish_to_stream_response(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
session: Session,
|
|
||||||
task_id: str,
|
|
||||||
workflow_run: WorkflowRun,
|
|
||||||
) -> WorkflowFinishStreamResponse:
|
|
||||||
created_by = None
|
|
||||||
if workflow_run.created_by_role == CreatorUserRole.ACCOUNT:
|
|
||||||
stmt = select(Account).where(Account.id == workflow_run.created_by)
|
|
||||||
account = session.scalar(stmt)
|
|
||||||
if account:
|
|
||||||
created_by = {
|
|
||||||
"id": account.id,
|
|
||||||
"name": account.name,
|
|
||||||
"email": account.email,
|
|
||||||
}
|
|
||||||
elif workflow_run.created_by_role == CreatorUserRole.END_USER:
|
|
||||||
stmt = select(EndUser).where(EndUser.id == workflow_run.created_by)
|
|
||||||
end_user = session.scalar(stmt)
|
|
||||||
if end_user:
|
|
||||||
created_by = {
|
|
||||||
"id": end_user.id,
|
|
||||||
"user": end_user.session_id,
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
raise NotImplementedError(f"unknown created_by_role: {workflow_run.created_by_role}")
|
|
||||||
|
|
||||||
return WorkflowFinishStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_run.id,
|
|
||||||
data=WorkflowFinishStreamResponse.Data(
|
|
||||||
id=workflow_run.id,
|
|
||||||
workflow_id=workflow_run.workflow_id,
|
|
||||||
sequence_number=workflow_run.sequence_number,
|
|
||||||
status=workflow_run.status,
|
|
||||||
outputs=dict(workflow_run.outputs_dict) if workflow_run.outputs_dict else None,
|
|
||||||
error=workflow_run.error,
|
|
||||||
elapsed_time=workflow_run.elapsed_time,
|
|
||||||
total_tokens=workflow_run.total_tokens,
|
|
||||||
total_steps=workflow_run.total_steps,
|
|
||||||
created_by=created_by,
|
|
||||||
created_at=int(workflow_run.created_at.timestamp()),
|
|
||||||
finished_at=int(workflow_run.finished_at.timestamp()),
|
|
||||||
files=self._fetch_files_from_node_outputs(dict(workflow_run.outputs_dict)),
|
|
||||||
exceptions_count=workflow_run.exceptions_count,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _workflow_node_start_to_stream_response(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
event: QueueNodeStartedEvent,
|
|
||||||
task_id: str,
|
|
||||||
workflow_node_execution: NodeExecution,
|
|
||||||
) -> Optional[NodeStartStreamResponse]:
|
|
||||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
|
||||||
return None
|
|
||||||
if not workflow_node_execution.workflow_run_id:
|
|
||||||
return None
|
|
||||||
|
|
||||||
response = NodeStartStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_node_execution.workflow_run_id,
|
|
||||||
data=NodeStartStreamResponse.Data(
|
|
||||||
id=workflow_node_execution.id,
|
|
||||||
node_id=workflow_node_execution.node_id,
|
|
||||||
node_type=workflow_node_execution.node_type,
|
|
||||||
title=workflow_node_execution.title,
|
|
||||||
index=workflow_node_execution.index,
|
|
||||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
|
||||||
inputs=workflow_node_execution.inputs,
|
|
||||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
|
||||||
parallel_id=event.parallel_id,
|
|
||||||
parallel_start_node_id=event.parallel_start_node_id,
|
|
||||||
parent_parallel_id=event.parent_parallel_id,
|
|
||||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
|
||||||
iteration_id=event.in_iteration_id,
|
|
||||||
loop_id=event.in_loop_id,
|
|
||||||
parallel_run_id=event.parallel_mode_run_id,
|
|
||||||
agent_strategy=event.agent_strategy,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# extras logic
|
|
||||||
if event.node_type == NodeType.TOOL:
|
|
||||||
node_data = cast(ToolNodeData, event.node_data)
|
|
||||||
response.data.extras["icon"] = ToolManager.get_tool_icon(
|
|
||||||
tenant_id=self._application_generate_entity.app_config.tenant_id,
|
|
||||||
provider_type=node_data.provider_type,
|
|
||||||
provider_id=node_data.provider_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
def _workflow_node_finish_to_stream_response(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
event: QueueNodeSucceededEvent
|
|
||||||
| QueueNodeFailedEvent
|
|
||||||
| QueueNodeInIterationFailedEvent
|
|
||||||
| QueueNodeInLoopFailedEvent
|
|
||||||
| QueueNodeExceptionEvent,
|
|
||||||
task_id: str,
|
|
||||||
workflow_node_execution: NodeExecution,
|
|
||||||
) -> Optional[NodeFinishStreamResponse]:
|
|
||||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
|
||||||
return None
|
|
||||||
if not workflow_node_execution.workflow_run_id:
|
|
||||||
return None
|
|
||||||
if not workflow_node_execution.finished_at:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return NodeFinishStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_node_execution.workflow_run_id,
|
|
||||||
data=NodeFinishStreamResponse.Data(
|
|
||||||
id=workflow_node_execution.id,
|
|
||||||
node_id=workflow_node_execution.node_id,
|
|
||||||
node_type=workflow_node_execution.node_type,
|
|
||||||
index=workflow_node_execution.index,
|
|
||||||
title=workflow_node_execution.title,
|
|
||||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
|
||||||
inputs=workflow_node_execution.inputs,
|
|
||||||
process_data=workflow_node_execution.process_data,
|
|
||||||
outputs=workflow_node_execution.outputs,
|
|
||||||
status=workflow_node_execution.status,
|
|
||||||
error=workflow_node_execution.error,
|
|
||||||
elapsed_time=workflow_node_execution.elapsed_time,
|
|
||||||
execution_metadata=workflow_node_execution.metadata,
|
|
||||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
|
||||||
finished_at=int(workflow_node_execution.finished_at.timestamp()),
|
|
||||||
files=self._fetch_files_from_node_outputs(workflow_node_execution.outputs or {}),
|
|
||||||
parallel_id=event.parallel_id,
|
|
||||||
parallel_start_node_id=event.parallel_start_node_id,
|
|
||||||
parent_parallel_id=event.parent_parallel_id,
|
|
||||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
|
||||||
iteration_id=event.in_iteration_id,
|
|
||||||
loop_id=event.in_loop_id,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _workflow_node_retry_to_stream_response(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
event: QueueNodeRetryEvent,
|
|
||||||
task_id: str,
|
|
||||||
workflow_node_execution: NodeExecution,
|
|
||||||
) -> Optional[Union[NodeRetryStreamResponse, NodeFinishStreamResponse]]:
|
|
||||||
if workflow_node_execution.node_type in {NodeType.ITERATION, NodeType.LOOP}:
|
|
||||||
return None
|
|
||||||
if not workflow_node_execution.workflow_run_id:
|
|
||||||
return None
|
|
||||||
if not workflow_node_execution.finished_at:
|
|
||||||
return None
|
|
||||||
|
|
||||||
return NodeRetryStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_node_execution.workflow_run_id,
|
|
||||||
data=NodeRetryStreamResponse.Data(
|
|
||||||
id=workflow_node_execution.id,
|
|
||||||
node_id=workflow_node_execution.node_id,
|
|
||||||
node_type=workflow_node_execution.node_type,
|
|
||||||
index=workflow_node_execution.index,
|
|
||||||
title=workflow_node_execution.title,
|
|
||||||
predecessor_node_id=workflow_node_execution.predecessor_node_id,
|
|
||||||
inputs=workflow_node_execution.inputs,
|
|
||||||
process_data=workflow_node_execution.process_data,
|
|
||||||
outputs=workflow_node_execution.outputs,
|
|
||||||
status=workflow_node_execution.status,
|
|
||||||
error=workflow_node_execution.error,
|
|
||||||
elapsed_time=workflow_node_execution.elapsed_time,
|
|
||||||
execution_metadata=workflow_node_execution.metadata,
|
|
||||||
created_at=int(workflow_node_execution.created_at.timestamp()),
|
|
||||||
finished_at=int(workflow_node_execution.finished_at.timestamp()),
|
|
||||||
files=self._fetch_files_from_node_outputs(workflow_node_execution.outputs or {}),
|
|
||||||
parallel_id=event.parallel_id,
|
|
||||||
parallel_start_node_id=event.parallel_start_node_id,
|
|
||||||
parent_parallel_id=event.parent_parallel_id,
|
|
||||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
|
||||||
iteration_id=event.in_iteration_id,
|
|
||||||
loop_id=event.in_loop_id,
|
|
||||||
retry_index=event.retry_index,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _workflow_parallel_branch_start_to_stream_response(
|
|
||||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueParallelBranchRunStartedEvent
|
|
||||||
) -> ParallelBranchStartStreamResponse:
|
|
||||||
_ = session
|
|
||||||
return ParallelBranchStartStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_run.id,
|
|
||||||
data=ParallelBranchStartStreamResponse.Data(
|
|
||||||
parallel_id=event.parallel_id,
|
|
||||||
parallel_branch_id=event.parallel_start_node_id,
|
|
||||||
parent_parallel_id=event.parent_parallel_id,
|
|
||||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
|
||||||
iteration_id=event.in_iteration_id,
|
|
||||||
loop_id=event.in_loop_id,
|
|
||||||
created_at=int(time.time()),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _workflow_parallel_branch_finished_to_stream_response(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
session: Session,
|
|
||||||
task_id: str,
|
|
||||||
workflow_run: WorkflowRun,
|
|
||||||
event: QueueParallelBranchRunSucceededEvent | QueueParallelBranchRunFailedEvent,
|
|
||||||
) -> ParallelBranchFinishedStreamResponse:
|
|
||||||
_ = session
|
|
||||||
return ParallelBranchFinishedStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_run.id,
|
|
||||||
data=ParallelBranchFinishedStreamResponse.Data(
|
|
||||||
parallel_id=event.parallel_id,
|
|
||||||
parallel_branch_id=event.parallel_start_node_id,
|
|
||||||
parent_parallel_id=event.parent_parallel_id,
|
|
||||||
parent_parallel_start_node_id=event.parent_parallel_start_node_id,
|
|
||||||
iteration_id=event.in_iteration_id,
|
|
||||||
loop_id=event.in_loop_id,
|
|
||||||
status="succeeded" if isinstance(event, QueueParallelBranchRunSucceededEvent) else "failed",
|
|
||||||
error=event.error if isinstance(event, QueueParallelBranchRunFailedEvent) else None,
|
|
||||||
created_at=int(time.time()),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _workflow_iteration_start_to_stream_response(
|
|
||||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationStartEvent
|
|
||||||
) -> IterationNodeStartStreamResponse:
|
|
||||||
_ = session
|
|
||||||
return IterationNodeStartStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_run.id,
|
|
||||||
data=IterationNodeStartStreamResponse.Data(
|
|
||||||
id=event.node_id,
|
|
||||||
node_id=event.node_id,
|
|
||||||
node_type=event.node_type.value,
|
|
||||||
title=event.node_data.title,
|
|
||||||
created_at=int(time.time()),
|
|
||||||
extras={},
|
|
||||||
inputs=event.inputs or {},
|
|
||||||
metadata=event.metadata or {},
|
|
||||||
parallel_id=event.parallel_id,
|
|
||||||
parallel_start_node_id=event.parallel_start_node_id,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _workflow_iteration_next_to_stream_response(
|
|
||||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationNextEvent
|
|
||||||
) -> IterationNodeNextStreamResponse:
|
|
||||||
_ = session
|
|
||||||
return IterationNodeNextStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_run.id,
|
|
||||||
data=IterationNodeNextStreamResponse.Data(
|
|
||||||
id=event.node_id,
|
|
||||||
node_id=event.node_id,
|
|
||||||
node_type=event.node_type.value,
|
|
||||||
title=event.node_data.title,
|
|
||||||
index=event.index,
|
|
||||||
pre_iteration_output=event.output,
|
|
||||||
created_at=int(time.time()),
|
|
||||||
extras={},
|
|
||||||
parallel_id=event.parallel_id,
|
|
||||||
parallel_start_node_id=event.parallel_start_node_id,
|
|
||||||
parallel_mode_run_id=event.parallel_mode_run_id,
|
|
||||||
duration=event.duration,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _workflow_iteration_completed_to_stream_response(
|
|
||||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueIterationCompletedEvent
|
|
||||||
) -> IterationNodeCompletedStreamResponse:
|
|
||||||
_ = session
|
|
||||||
return IterationNodeCompletedStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_run.id,
|
|
||||||
data=IterationNodeCompletedStreamResponse.Data(
|
|
||||||
id=event.node_id,
|
|
||||||
node_id=event.node_id,
|
|
||||||
node_type=event.node_type.value,
|
|
||||||
title=event.node_data.title,
|
|
||||||
outputs=event.outputs,
|
|
||||||
created_at=int(time.time()),
|
|
||||||
extras={},
|
|
||||||
inputs=event.inputs or {},
|
|
||||||
status=WorkflowNodeExecutionStatus.SUCCEEDED
|
|
||||||
if event.error is None
|
|
||||||
else WorkflowNodeExecutionStatus.FAILED,
|
|
||||||
error=None,
|
|
||||||
elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(),
|
|
||||||
total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
|
|
||||||
execution_metadata=event.metadata,
|
|
||||||
finished_at=int(time.time()),
|
|
||||||
steps=event.steps,
|
|
||||||
parallel_id=event.parallel_id,
|
|
||||||
parallel_start_node_id=event.parallel_start_node_id,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _workflow_loop_start_to_stream_response(
|
|
||||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueLoopStartEvent
|
|
||||||
) -> LoopNodeStartStreamResponse:
|
|
||||||
_ = session
|
|
||||||
return LoopNodeStartStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_run.id,
|
|
||||||
data=LoopNodeStartStreamResponse.Data(
|
|
||||||
id=event.node_id,
|
|
||||||
node_id=event.node_id,
|
|
||||||
node_type=event.node_type.value,
|
|
||||||
title=event.node_data.title,
|
|
||||||
created_at=int(time.time()),
|
|
||||||
extras={},
|
|
||||||
inputs=event.inputs or {},
|
|
||||||
metadata=event.metadata or {},
|
|
||||||
parallel_id=event.parallel_id,
|
|
||||||
parallel_start_node_id=event.parallel_start_node_id,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _workflow_loop_next_to_stream_response(
|
|
||||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueLoopNextEvent
|
|
||||||
) -> LoopNodeNextStreamResponse:
|
|
||||||
_ = session
|
|
||||||
return LoopNodeNextStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_run.id,
|
|
||||||
data=LoopNodeNextStreamResponse.Data(
|
|
||||||
id=event.node_id,
|
|
||||||
node_id=event.node_id,
|
|
||||||
node_type=event.node_type.value,
|
|
||||||
title=event.node_data.title,
|
|
||||||
index=event.index,
|
|
||||||
pre_loop_output=event.output,
|
|
||||||
created_at=int(time.time()),
|
|
||||||
extras={},
|
|
||||||
parallel_id=event.parallel_id,
|
|
||||||
parallel_start_node_id=event.parallel_start_node_id,
|
|
||||||
parallel_mode_run_id=event.parallel_mode_run_id,
|
|
||||||
duration=event.duration,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _workflow_loop_completed_to_stream_response(
|
|
||||||
self, *, session: Session, task_id: str, workflow_run: WorkflowRun, event: QueueLoopCompletedEvent
|
|
||||||
) -> LoopNodeCompletedStreamResponse:
|
|
||||||
_ = session
|
|
||||||
return LoopNodeCompletedStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
workflow_run_id=workflow_run.id,
|
|
||||||
data=LoopNodeCompletedStreamResponse.Data(
|
|
||||||
id=event.node_id,
|
|
||||||
node_id=event.node_id,
|
|
||||||
node_type=event.node_type.value,
|
|
||||||
title=event.node_data.title,
|
|
||||||
outputs=event.outputs,
|
|
||||||
created_at=int(time.time()),
|
|
||||||
extras={},
|
|
||||||
inputs=event.inputs or {},
|
|
||||||
status=WorkflowNodeExecutionStatus.SUCCEEDED
|
|
||||||
if event.error is None
|
|
||||||
else WorkflowNodeExecutionStatus.FAILED,
|
|
||||||
error=None,
|
|
||||||
elapsed_time=(datetime.now(UTC).replace(tzinfo=None) - event.start_at).total_seconds(),
|
|
||||||
total_tokens=event.metadata.get("total_tokens", 0) if event.metadata else 0,
|
|
||||||
execution_metadata=event.metadata,
|
|
||||||
finished_at=int(time.time()),
|
|
||||||
steps=event.steps,
|
|
||||||
parallel_id=event.parallel_id,
|
|
||||||
parallel_start_node_id=event.parallel_start_node_id,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _fetch_files_from_node_outputs(self, outputs_dict: Mapping[str, Any]) -> Sequence[Mapping[str, Any]]:
|
|
||||||
"""
|
|
||||||
Fetch files from node outputs
|
|
||||||
:param outputs_dict: node outputs dict
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
if not outputs_dict:
|
|
||||||
return []
|
|
||||||
|
|
||||||
files = [self._fetch_files_from_variable_value(output_value) for output_value in outputs_dict.values()]
|
|
||||||
# Remove None
|
|
||||||
files = [file for file in files if file]
|
|
||||||
# Flatten list
|
|
||||||
# Flatten the list of sequences into a single list of mappings
|
|
||||||
flattened_files = [file for sublist in files if sublist for file in sublist]
|
|
||||||
|
|
||||||
# Convert to tuple to match Sequence type
|
|
||||||
return tuple(flattened_files)
|
|
||||||
|
|
||||||
def _fetch_files_from_variable_value(self, value: Union[dict, list]) -> Sequence[Mapping[str, Any]]:
|
|
||||||
"""
|
|
||||||
Fetch files from variable value
|
|
||||||
:param value: variable value
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
if not value:
|
|
||||||
return []
|
|
||||||
|
|
||||||
files = []
|
|
||||||
if isinstance(value, list):
|
|
||||||
for item in value:
|
|
||||||
file = self._get_file_var_from_value(item)
|
|
||||||
if file:
|
|
||||||
files.append(file)
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
file = self._get_file_var_from_value(value)
|
|
||||||
if file:
|
|
||||||
files.append(file)
|
|
||||||
|
|
||||||
return files
|
|
||||||
|
|
||||||
def _get_file_var_from_value(self, value: Union[dict, list]) -> Mapping[str, Any] | None:
|
|
||||||
"""
|
|
||||||
Get file var from value
|
|
||||||
:param value: variable value
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
if not value:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if isinstance(value, dict) and value.get("dify_model_identity") == FILE_MODEL_IDENTITY:
|
|
||||||
return value
|
|
||||||
elif isinstance(value, File):
|
|
||||||
return value.to_dict()
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _get_workflow_run(self, *, session: Session, workflow_run_id: str) -> WorkflowRun:
|
|
||||||
if self._workflow_run and self._workflow_run.id == workflow_run_id:
|
|
||||||
cached_workflow_run = self._workflow_run
|
|
||||||
cached_workflow_run = session.merge(cached_workflow_run)
|
|
||||||
return cached_workflow_run
|
|
||||||
stmt = select(WorkflowRun).where(WorkflowRun.id == workflow_run_id)
|
|
||||||
workflow_run = session.scalar(stmt)
|
|
||||||
if not workflow_run:
|
|
||||||
raise WorkflowRunNotFoundError(workflow_run_id)
|
|
||||||
self._workflow_run = workflow_run
|
|
||||||
|
|
||||||
return workflow_run
|
|
||||||
|
|
||||||
def _handle_agent_log(self, task_id: str, event: QueueAgentLogEvent) -> AgentLogStreamResponse:
|
|
||||||
"""
|
|
||||||
Handle agent log
|
|
||||||
:param task_id: task id
|
|
||||||
:param event: agent log event
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
return AgentLogStreamResponse(
|
|
||||||
task_id=task_id,
|
|
||||||
data=AgentLogStreamResponse.Data(
|
|
||||||
node_execution_id=event.node_execution_id,
|
|
||||||
id=event.id,
|
|
||||||
parent_id=event.parent_id,
|
|
||||||
label=event.label,
|
|
||||||
error=event.error,
|
|
||||||
status=event.status,
|
|
||||||
data=event.data,
|
|
||||||
metadata=event.metadata,
|
|
||||||
node_id=event.node_id,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
@ -3,11 +3,14 @@ import json
|
|||||||
import flask_login # type: ignore
|
import flask_login # type: ignore
|
||||||
from flask import Response, request
|
from flask import Response, request
|
||||||
from flask_login import user_loaded_from_request, user_logged_in
|
from flask_login import user_loaded_from_request, user_logged_in
|
||||||
from werkzeug.exceptions import Unauthorized
|
from werkzeug.exceptions import NotFound, Unauthorized
|
||||||
|
|
||||||
import contexts
|
import contexts
|
||||||
from dify_app import DifyApp
|
from dify_app import DifyApp
|
||||||
|
from extensions.ext_database import db
|
||||||
from libs.passport import PassportService
|
from libs.passport import PassportService
|
||||||
|
from models.account import Account
|
||||||
|
from models.model import EndUser
|
||||||
from services.account_service import AccountService
|
from services.account_service import AccountService
|
||||||
|
|
||||||
login_manager = flask_login.LoginManager()
|
login_manager = flask_login.LoginManager()
|
||||||
@ -17,34 +20,48 @@ login_manager = flask_login.LoginManager()
|
|||||||
@login_manager.request_loader
|
@login_manager.request_loader
|
||||||
def load_user_from_request(request_from_flask_login):
|
def load_user_from_request(request_from_flask_login):
|
||||||
"""Load user based on the request."""
|
"""Load user based on the request."""
|
||||||
if request.blueprint not in {"console", "inner_api"}:
|
|
||||||
return None
|
|
||||||
# Check if the user_id contains a dot, indicating the old format
|
|
||||||
auth_header = request.headers.get("Authorization", "")
|
auth_header = request.headers.get("Authorization", "")
|
||||||
if not auth_header:
|
auth_token: str | None = None
|
||||||
auth_token = request.args.get("_token")
|
if auth_header:
|
||||||
if not auth_token:
|
|
||||||
raise Unauthorized("Invalid Authorization token.")
|
|
||||||
else:
|
|
||||||
if " " not in auth_header:
|
if " " not in auth_header:
|
||||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||||
auth_scheme, auth_token = auth_header.split(None, 1)
|
auth_scheme, auth_token = auth_header.split(maxsplit=1)
|
||||||
auth_scheme = auth_scheme.lower()
|
auth_scheme = auth_scheme.lower()
|
||||||
if auth_scheme != "bearer":
|
if auth_scheme != "bearer":
|
||||||
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
|
||||||
|
else:
|
||||||
|
auth_token = request.args.get("_token")
|
||||||
|
|
||||||
decoded = PassportService().verify(auth_token)
|
if request.blueprint in {"console", "inner_api"}:
|
||||||
user_id = decoded.get("user_id")
|
if not auth_token:
|
||||||
|
raise Unauthorized("Invalid Authorization token.")
|
||||||
|
decoded = PassportService().verify(auth_token)
|
||||||
|
user_id = decoded.get("user_id")
|
||||||
|
if not user_id:
|
||||||
|
raise Unauthorized("Invalid Authorization token.")
|
||||||
|
|
||||||
logged_in_account = AccountService.load_logged_in_account(account_id=user_id)
|
logged_in_account = AccountService.load_logged_in_account(account_id=user_id)
|
||||||
return logged_in_account
|
return logged_in_account
|
||||||
|
elif request.blueprint == "web":
|
||||||
|
decoded = PassportService().verify(auth_token)
|
||||||
|
end_user_id = decoded.get("end_user_id")
|
||||||
|
if not end_user_id:
|
||||||
|
raise Unauthorized("Invalid Authorization token.")
|
||||||
|
end_user = db.session.query(EndUser).filter(EndUser.id == decoded["end_user_id"]).first()
|
||||||
|
if not end_user:
|
||||||
|
raise NotFound("End user not found.")
|
||||||
|
return end_user
|
||||||
|
|
||||||
|
|
||||||
@user_logged_in.connect
|
@user_logged_in.connect
|
||||||
@user_loaded_from_request.connect
|
@user_loaded_from_request.connect
|
||||||
def on_user_logged_in(_sender, user):
|
def on_user_logged_in(_sender, user):
|
||||||
"""Called when a user logged in."""
|
"""Called when a user logged in.
|
||||||
if user:
|
|
||||||
|
Note: AccountService.load_logged_in_account will populate user.current_tenant_id
|
||||||
|
through the load_user method, which calls account.set_tenant_id().
|
||||||
|
"""
|
||||||
|
if user and isinstance(user, Account) and user.current_tenant_id:
|
||||||
contexts.tenant_id.set(user.current_tenant_id)
|
contexts.tenant_id.set(user.current_tenant_id)
|
||||||
|
|
||||||
|
|
||||||
|
@ -39,7 +39,6 @@ from core.variables.variables import (
|
|||||||
from core.workflow.constants import (
|
from core.workflow.constants import (
|
||||||
CONVERSATION_VARIABLE_NODE_ID,
|
CONVERSATION_VARIABLE_NODE_ID,
|
||||||
ENVIRONMENT_VARIABLE_NODE_ID,
|
ENVIRONMENT_VARIABLE_NODE_ID,
|
||||||
PIPELINE_VARIABLE_NODE_ID,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
import enum
|
import enum
|
||||||
import json
|
import json
|
||||||
from typing import cast
|
from typing import Optional, cast
|
||||||
|
|
||||||
from flask_login import UserMixin # type: ignore
|
from flask_login import UserMixin # type: ignore
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
from sqlalchemy.orm import Mapped, mapped_column
|
from sqlalchemy.orm import Mapped, mapped_column, reconstructor
|
||||||
|
|
||||||
from models.base import Base
|
from models.base import Base
|
||||||
|
|
||||||
@ -12,6 +12,66 @@ from .engine import db
|
|||||||
from .types import StringUUID
|
from .types import StringUUID
|
||||||
|
|
||||||
|
|
||||||
|
class TenantAccountRole(enum.StrEnum):
|
||||||
|
OWNER = "owner"
|
||||||
|
ADMIN = "admin"
|
||||||
|
EDITOR = "editor"
|
||||||
|
NORMAL = "normal"
|
||||||
|
DATASET_OPERATOR = "dataset_operator"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_valid_role(role: str) -> bool:
|
||||||
|
if not role:
|
||||||
|
return False
|
||||||
|
return role in {
|
||||||
|
TenantAccountRole.OWNER,
|
||||||
|
TenantAccountRole.ADMIN,
|
||||||
|
TenantAccountRole.EDITOR,
|
||||||
|
TenantAccountRole.NORMAL,
|
||||||
|
TenantAccountRole.DATASET_OPERATOR,
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_privileged_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||||
|
if not role:
|
||||||
|
return False
|
||||||
|
return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_admin_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||||
|
if not role:
|
||||||
|
return False
|
||||||
|
return role == TenantAccountRole.ADMIN
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_non_owner_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||||
|
if not role:
|
||||||
|
return False
|
||||||
|
return role in {
|
||||||
|
TenantAccountRole.ADMIN,
|
||||||
|
TenantAccountRole.EDITOR,
|
||||||
|
TenantAccountRole.NORMAL,
|
||||||
|
TenantAccountRole.DATASET_OPERATOR,
|
||||||
|
}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_editing_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||||
|
if not role:
|
||||||
|
return False
|
||||||
|
return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN, TenantAccountRole.EDITOR}
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_dataset_edit_role(role: Optional["TenantAccountRole"]) -> bool:
|
||||||
|
if not role:
|
||||||
|
return False
|
||||||
|
return role in {
|
||||||
|
TenantAccountRole.OWNER,
|
||||||
|
TenantAccountRole.ADMIN,
|
||||||
|
TenantAccountRole.EDITOR,
|
||||||
|
TenantAccountRole.DATASET_OPERATOR,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class AccountStatus(enum.StrEnum):
|
class AccountStatus(enum.StrEnum):
|
||||||
PENDING = "pending"
|
PENDING = "pending"
|
||||||
UNINITIALIZED = "uninitialized"
|
UNINITIALIZED = "uninitialized"
|
||||||
@ -41,24 +101,27 @@ class Account(UserMixin, Base):
|
|||||||
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||||
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||||
|
|
||||||
|
@reconstructor
|
||||||
|
def init_on_load(self):
|
||||||
|
self.role: Optional[TenantAccountRole] = None
|
||||||
|
self._current_tenant: Optional[Tenant] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_password_set(self):
|
def is_password_set(self):
|
||||||
return self.password is not None
|
return self.password is not None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def current_tenant(self):
|
def current_tenant(self):
|
||||||
return self._current_tenant # type: ignore
|
return self._current_tenant
|
||||||
|
|
||||||
@current_tenant.setter
|
@current_tenant.setter
|
||||||
def current_tenant(self, value: "Tenant"):
|
def current_tenant(self, tenant: "Tenant"):
|
||||||
tenant = value
|
|
||||||
ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=self.id).first()
|
ta = db.session.query(TenantAccountJoin).filter_by(tenant_id=tenant.id, account_id=self.id).first()
|
||||||
if ta:
|
if ta:
|
||||||
tenant.current_role = ta.role
|
self.role = TenantAccountRole(ta.role)
|
||||||
else:
|
self._current_tenant = tenant
|
||||||
tenant = None # type: ignore
|
return
|
||||||
|
self._current_tenant = None
|
||||||
self._current_tenant = tenant
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def current_tenant_id(self) -> str | None:
|
def current_tenant_id(self) -> str | None:
|
||||||
@ -80,12 +143,12 @@ class Account(UserMixin, Base):
|
|||||||
return
|
return
|
||||||
|
|
||||||
tenant, join = tenant_account_join
|
tenant, join = tenant_account_join
|
||||||
tenant.current_role = join.role
|
self.role = join.role
|
||||||
self._current_tenant = tenant
|
self._current_tenant = tenant
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def current_role(self):
|
def current_role(self):
|
||||||
return self._current_tenant.current_role
|
return self.role
|
||||||
|
|
||||||
def get_status(self) -> AccountStatus:
|
def get_status(self) -> AccountStatus:
|
||||||
status_str = self.status
|
status_str = self.status
|
||||||
@ -105,23 +168,23 @@ class Account(UserMixin, Base):
|
|||||||
# check current_user.current_tenant.current_role in ['admin', 'owner']
|
# check current_user.current_tenant.current_role in ['admin', 'owner']
|
||||||
@property
|
@property
|
||||||
def is_admin_or_owner(self):
|
def is_admin_or_owner(self):
|
||||||
return TenantAccountRole.is_privileged_role(self._current_tenant.current_role)
|
return TenantAccountRole.is_privileged_role(self.role)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_admin(self):
|
def is_admin(self):
|
||||||
return TenantAccountRole.is_admin_role(self._current_tenant.current_role)
|
return TenantAccountRole.is_admin_role(self.role)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_editor(self):
|
def is_editor(self):
|
||||||
return TenantAccountRole.is_editing_role(self._current_tenant.current_role)
|
return TenantAccountRole.is_editing_role(self.role)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_dataset_editor(self):
|
def is_dataset_editor(self):
|
||||||
return TenantAccountRole.is_dataset_edit_role(self._current_tenant.current_role)
|
return TenantAccountRole.is_dataset_edit_role(self.role)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_dataset_operator(self):
|
def is_dataset_operator(self):
|
||||||
return self._current_tenant.current_role == TenantAccountRole.DATASET_OPERATOR
|
return self.role == TenantAccountRole.DATASET_OPERATOR
|
||||||
|
|
||||||
|
|
||||||
class TenantStatus(enum.StrEnum):
|
class TenantStatus(enum.StrEnum):
|
||||||
@ -129,66 +192,6 @@ class TenantStatus(enum.StrEnum):
|
|||||||
ARCHIVE = "archive"
|
ARCHIVE = "archive"
|
||||||
|
|
||||||
|
|
||||||
class TenantAccountRole(enum.StrEnum):
|
|
||||||
OWNER = "owner"
|
|
||||||
ADMIN = "admin"
|
|
||||||
EDITOR = "editor"
|
|
||||||
NORMAL = "normal"
|
|
||||||
DATASET_OPERATOR = "dataset_operator"
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def is_valid_role(role: str) -> bool:
|
|
||||||
if not role:
|
|
||||||
return False
|
|
||||||
return role in {
|
|
||||||
TenantAccountRole.OWNER,
|
|
||||||
TenantAccountRole.ADMIN,
|
|
||||||
TenantAccountRole.EDITOR,
|
|
||||||
TenantAccountRole.NORMAL,
|
|
||||||
TenantAccountRole.DATASET_OPERATOR,
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def is_privileged_role(role: str) -> bool:
|
|
||||||
if not role:
|
|
||||||
return False
|
|
||||||
return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def is_admin_role(role: str) -> bool:
|
|
||||||
if not role:
|
|
||||||
return False
|
|
||||||
return role == TenantAccountRole.ADMIN
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def is_non_owner_role(role: str) -> bool:
|
|
||||||
if not role:
|
|
||||||
return False
|
|
||||||
return role in {
|
|
||||||
TenantAccountRole.ADMIN,
|
|
||||||
TenantAccountRole.EDITOR,
|
|
||||||
TenantAccountRole.NORMAL,
|
|
||||||
TenantAccountRole.DATASET_OPERATOR,
|
|
||||||
}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def is_editing_role(role: str) -> bool:
|
|
||||||
if not role:
|
|
||||||
return False
|
|
||||||
return role in {TenantAccountRole.OWNER, TenantAccountRole.ADMIN, TenantAccountRole.EDITOR}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def is_dataset_edit_role(role: str) -> bool:
|
|
||||||
if not role:
|
|
||||||
return False
|
|
||||||
return role in {
|
|
||||||
TenantAccountRole.OWNER,
|
|
||||||
TenantAccountRole.ADMIN,
|
|
||||||
TenantAccountRole.EDITOR,
|
|
||||||
TenantAccountRole.DATASET_OPERATOR,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Tenant(Base):
|
class Tenant(Base):
|
||||||
__tablename__ = "tenants"
|
__tablename__ = "tenants"
|
||||||
__table_args__ = (db.PrimaryKeyConstraint("id", name="tenant_pkey"),)
|
__table_args__ = (db.PrimaryKeyConstraint("id", name="tenant_pkey"),)
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
from sqlalchemy.orm import declarative_base
|
from sqlalchemy.orm import DeclarativeBase
|
||||||
|
|
||||||
from models.engine import metadata
|
from models.engine import metadata
|
||||||
|
|
||||||
Base = declarative_base(metadata=metadata)
|
|
||||||
|
class Base(DeclarativeBase):
|
||||||
|
metadata = metadata
|
||||||
|
@ -206,10 +206,6 @@ class WorkflowToolProvider(Base):
|
|||||||
db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")
|
db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
|
||||||
def schema_type(self) -> ApiProviderSchemaType:
|
|
||||||
return ApiProviderSchemaType.value_of(self.schema_type_str)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user(self) -> Account | None:
|
def user(self) -> Account | None:
|
||||||
return db.session.query(Account).filter(Account.id == self.user_id).first()
|
return db.session.query(Account).filter(Account.id == self.user_id).first()
|
||||||
|
@ -3,7 +3,7 @@ import logging
|
|||||||
from collections.abc import Mapping, Sequence
|
from collections.abc import Mapping, Sequence
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from enum import Enum, StrEnum
|
from enum import Enum, StrEnum
|
||||||
from typing import TYPE_CHECKING, Any, List, Optional, Self, Union
|
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from core.variables import utils as variable_utils
|
from core.variables import utils as variable_utils
|
||||||
@ -154,7 +154,7 @@ class Workflow(Base):
|
|||||||
conversation_variables: Sequence[Variable],
|
conversation_variables: Sequence[Variable],
|
||||||
marked_name: str = "",
|
marked_name: str = "",
|
||||||
marked_comment: str = "",
|
marked_comment: str = "",
|
||||||
) -> Self:
|
) -> "Workflow":
|
||||||
workflow = Workflow()
|
workflow = Workflow()
|
||||||
workflow.id = str(uuid4())
|
workflow.id = str(uuid4())
|
||||||
workflow.tenant_id = tenant_id
|
workflow.tenant_id = tenant_id
|
||||||
@ -447,14 +447,14 @@ class WorkflowRun(Base):
|
|||||||
status: Mapped[str] = mapped_column(db.String(255)) # running, succeeded, failed, stopped, partial-succeeded
|
status: Mapped[str] = mapped_column(db.String(255)) # running, succeeded, failed, stopped, partial-succeeded
|
||||||
outputs: Mapped[Optional[str]] = mapped_column(sa.Text, default="{}")
|
outputs: Mapped[Optional[str]] = mapped_column(sa.Text, default="{}")
|
||||||
error: Mapped[Optional[str]] = mapped_column(db.Text)
|
error: Mapped[Optional[str]] = mapped_column(db.Text)
|
||||||
elapsed_time = db.Column(db.Float, nullable=False, server_default=sa.text("0"))
|
elapsed_time: Mapped[float] = mapped_column(db.Float, nullable=False, server_default=sa.text("0"))
|
||||||
total_tokens: Mapped[int] = mapped_column(sa.BigInteger, server_default=sa.text("0"))
|
total_tokens: Mapped[int] = mapped_column(sa.BigInteger, server_default=sa.text("0"))
|
||||||
total_steps = db.Column(db.Integer, server_default=db.text("0"))
|
total_steps: Mapped[int] = mapped_column(db.Integer, server_default=db.text("0"))
|
||||||
created_by_role: Mapped[str] = mapped_column(db.String(255)) # account, end_user
|
created_by_role: Mapped[str] = mapped_column(db.String(255)) # account, end_user
|
||||||
created_by = db.Column(StringUUID, nullable=False)
|
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||||
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||||
finished_at = db.Column(db.DateTime)
|
finished_at: Mapped[Optional[datetime]] = mapped_column(db.DateTime)
|
||||||
exceptions_count = db.Column(db.Integer, server_default=db.text("0"))
|
exceptions_count: Mapped[int] = mapped_column(db.Integer, server_default=db.text("0"))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def created_by_account(self):
|
def created_by_account(self):
|
||||||
@ -469,7 +469,7 @@ class WorkflowRun(Base):
|
|||||||
return db.session.get(EndUser, self.created_by) if created_by_role == CreatorUserRole.END_USER else None
|
return db.session.get(EndUser, self.created_by) if created_by_role == CreatorUserRole.END_USER else None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def graph_dict(self):
|
def graph_dict(self) -> Mapping[str, Any]:
|
||||||
return json.loads(self.graph) if self.graph else {}
|
return json.loads(self.graph) if self.graph else {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -688,8 +688,11 @@ class WorkflowNodeExecution(Base):
|
|||||||
return json.loads(self.process_data) if self.process_data else None
|
return json.loads(self.process_data) if self.process_data else None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def execution_metadata_dict(self) -> dict[str, Any] | None:
|
def execution_metadata_dict(self) -> dict[str, Any]:
|
||||||
return json.loads(self.execution_metadata) if self.execution_metadata else None
|
# When the metadata is unset, we return an empty dictionary instead of `None`.
|
||||||
|
# This approach streamlines the logic for the caller, making it easier to handle
|
||||||
|
# cases where metadata is absent.
|
||||||
|
return json.loads(self.execution_metadata) if self.execution_metadata else {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def extras(self):
|
def extras(self):
|
||||||
@ -771,12 +774,12 @@ class WorkflowAppLog(Base):
|
|||||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
||||||
tenant_id: Mapped[str] = mapped_column(StringUUID)
|
tenant_id: Mapped[str] = mapped_column(StringUUID)
|
||||||
app_id: Mapped[str] = mapped_column(StringUUID)
|
app_id: Mapped[str] = mapped_column(StringUUID)
|
||||||
workflow_id = db.Column(StringUUID, nullable=False)
|
workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||||
workflow_run_id: Mapped[str] = mapped_column(StringUUID)
|
workflow_run_id: Mapped[str] = mapped_column(StringUUID)
|
||||||
created_from = db.Column(db.String(255), nullable=False)
|
created_from: Mapped[str] = mapped_column(db.String(255), nullable=False)
|
||||||
created_by_role = db.Column(db.String(255), nullable=False)
|
created_by_role: Mapped[str] = mapped_column(db.String(255), nullable=False)
|
||||||
created_by = db.Column(StringUUID, nullable=False)
|
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||||
created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def workflow_run(self):
|
def workflow_run(self):
|
||||||
@ -801,9 +804,11 @@ class ConversationVariable(Base):
|
|||||||
id: Mapped[str] = mapped_column(StringUUID, primary_key=True)
|
id: Mapped[str] = mapped_column(StringUUID, primary_key=True)
|
||||||
conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False, primary_key=True, index=True)
|
conversation_id: Mapped[str] = mapped_column(StringUUID, nullable=False, primary_key=True, index=True)
|
||||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False, index=True)
|
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False, index=True)
|
||||||
data = mapped_column(db.Text, nullable=False)
|
data: Mapped[str] = mapped_column(db.Text, nullable=False)
|
||||||
created_at = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp(), index=True)
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
updated_at = mapped_column(
|
db.DateTime, nullable=False, server_default=func.current_timestamp(), index=True
|
||||||
|
)
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
|
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -851,14 +856,14 @@ class WorkflowDraftVariable(Base):
|
|||||||
# id is the unique identifier of a draft variable.
|
# id is the unique identifier of a draft variable.
|
||||||
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
|
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()"))
|
||||||
|
|
||||||
created_at = mapped_column(
|
created_at: Mapped[datetime] = mapped_column(
|
||||||
db.DateTime,
|
db.DateTime,
|
||||||
nullable=False,
|
nullable=False,
|
||||||
default=_naive_utc_datetime,
|
default=_naive_utc_datetime,
|
||||||
server_default=func.current_timestamp(),
|
server_default=func.current_timestamp(),
|
||||||
)
|
)
|
||||||
|
|
||||||
updated_at = mapped_column(
|
updated_at: Mapped[datetime] = mapped_column(
|
||||||
db.DateTime,
|
db.DateTime,
|
||||||
nullable=False,
|
nullable=False,
|
||||||
default=_naive_utc_datetime,
|
default=_naive_utc_datetime,
|
||||||
|
@ -1043,11 +1043,11 @@ class DocumentService:
|
|||||||
"score_threshold_enabled": False,
|
"score_threshold_enabled": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
dataset.retrieval_model = (
|
dataset.retrieval_model = (
|
||||||
knowledge_config.retrieval_model.model_dump()
|
knowledge_config.retrieval_model.model_dump()
|
||||||
if knowledge_config.retrieval_model
|
if knowledge_config.retrieval_model
|
||||||
else default_retrieval_model
|
else default_retrieval_model
|
||||||
) # type: ignore
|
) # type: ignore
|
||||||
|
|
||||||
documents = []
|
documents = []
|
||||||
if knowledge_config.original_document_id:
|
if knowledge_config.original_document_id:
|
||||||
|
@ -23,11 +23,10 @@ class VectorService:
|
|||||||
):
|
):
|
||||||
documents: list[Document] = []
|
documents: list[Document] = []
|
||||||
|
|
||||||
document: Document | None = None
|
|
||||||
for segment in segments:
|
for segment in segments:
|
||||||
if doc_form == IndexType.PARENT_CHILD_INDEX:
|
if doc_form == IndexType.PARENT_CHILD_INDEX:
|
||||||
document = db.session.query(DatasetDocument).filter_by(id=segment.document_id).first()
|
dataset_document = db.session.query(DatasetDocument).filter_by(id=segment.document_id).first()
|
||||||
if not document:
|
if not dataset_document:
|
||||||
_logger.warning(
|
_logger.warning(
|
||||||
"Expected DatasetDocument record to exist, but none was found, document_id=%s, segment_id=%s",
|
"Expected DatasetDocument record to exist, but none was found, document_id=%s, segment_id=%s",
|
||||||
segment.document_id,
|
segment.document_id,
|
||||||
@ -37,7 +36,7 @@ class VectorService:
|
|||||||
# get the process rule
|
# get the process rule
|
||||||
processing_rule = (
|
processing_rule = (
|
||||||
db.session.query(DatasetProcessRule)
|
db.session.query(DatasetProcessRule)
|
||||||
.filter(DatasetProcessRule.id == document.dataset_process_rule_id)
|
.filter(DatasetProcessRule.id == dataset_document.dataset_process_rule_id)
|
||||||
.first()
|
.first()
|
||||||
)
|
)
|
||||||
if not processing_rule:
|
if not processing_rule:
|
||||||
@ -61,9 +60,11 @@ class VectorService:
|
|||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise ValueError("The knowledge base index technique is not high quality!")
|
raise ValueError("The knowledge base index technique is not high quality!")
|
||||||
cls.generate_child_chunks(segment, document, dataset, embedding_model_instance, processing_rule, False)
|
cls.generate_child_chunks(
|
||||||
|
segment, dataset_document, dataset, embedding_model_instance, processing_rule, False
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
document = Document(
|
rag_document = Document(
|
||||||
page_content=segment.content,
|
page_content=segment.content,
|
||||||
metadata={
|
metadata={
|
||||||
"doc_id": segment.index_node_id,
|
"doc_id": segment.index_node_id,
|
||||||
@ -72,7 +73,7 @@ class VectorService:
|
|||||||
"dataset_id": segment.dataset_id,
|
"dataset_id": segment.dataset_id,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
documents.append(document)
|
documents.append(rag_document)
|
||||||
if len(documents) > 0:
|
if len(documents) > 0:
|
||||||
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
|
||||||
index_processor.load(dataset, documents, with_keywords=True, keywords_list=keywords_list)
|
index_processor.load(dataset, documents, with_keywords=True, keywords_list=keywords_list)
|
||||||
|
@ -15,6 +15,7 @@ from models import (
|
|||||||
WorkflowRun,
|
WorkflowRun,
|
||||||
WorkflowRunTriggeredFrom,
|
WorkflowRunTriggeredFrom,
|
||||||
)
|
)
|
||||||
|
from models.workflow import WorkflowNodeExecutionTriggeredFrom
|
||||||
|
|
||||||
|
|
||||||
class WorkflowRunService:
|
class WorkflowRunService:
|
||||||
@ -140,14 +141,13 @@ class WorkflowRunService:
|
|||||||
session_factory=db.engine,
|
session_factory=db.engine,
|
||||||
user=user,
|
user=user,
|
||||||
app_id=app_model.id,
|
app_id=app_model.id,
|
||||||
triggered_from=None,
|
triggered_from=WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Use the repository to get the node executions with ordering
|
# Use the repository to get the database models directly
|
||||||
order_config = OrderConfig(order_by=["index"], order_direction="desc")
|
order_config = OrderConfig(order_by=["index"], order_direction="desc")
|
||||||
node_executions = repository.get_by_workflow_run(workflow_run_id=run_id, order_config=order_config)
|
workflow_node_executions = repository.get_db_models_by_workflow_run(
|
||||||
|
workflow_run_id=run_id, order_config=order_config
|
||||||
# Convert domain models to database models
|
)
|
||||||
workflow_node_executions = [repository.to_db_model(node_execution) for node_execution in node_executions]
|
|
||||||
|
|
||||||
return workflow_node_executions
|
return workflow_node_executions
|
||||||
|
@ -507,11 +507,11 @@ class WorkflowService:
|
|||||||
raise DraftWorkflowDeletionError("Cannot delete draft workflow versions")
|
raise DraftWorkflowDeletionError("Cannot delete draft workflow versions")
|
||||||
|
|
||||||
# Check if this workflow is currently referenced by an app
|
# Check if this workflow is currently referenced by an app
|
||||||
stmt = select(App).where(App.workflow_id == workflow_id)
|
app_stmt = select(App).where(App.workflow_id == workflow_id)
|
||||||
app = session.scalar(stmt)
|
app = session.scalar(app_stmt)
|
||||||
if app:
|
if app:
|
||||||
# Cannot delete a workflow that's currently in use by an app
|
# Cannot delete a workflow that's currently in use by an app
|
||||||
raise WorkflowInUseError(f"Cannot delete workflow that is currently in use by app '{app.name}'")
|
raise WorkflowInUseError(f"Cannot delete workflow that is currently in use by app '{app.id}'")
|
||||||
|
|
||||||
# Don't use workflow.tool_published as it's not accurate for specific workflow versions
|
# Don't use workflow.tool_published as it's not accurate for specific workflow versions
|
||||||
# Check if there's a tool provider using this specific workflow version
|
# Check if there's a tool provider using this specific workflow version
|
||||||
|
@ -111,7 +111,7 @@ def add_document_to_index_task(dataset_document_id: str):
|
|||||||
logging.exception("add document to index failed")
|
logging.exception("add document to index failed")
|
||||||
dataset_document.enabled = False
|
dataset_document.enabled = False
|
||||||
dataset_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
dataset_document.disabled_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
|
||||||
dataset_document.status = "error"
|
dataset_document.indexing_status = "error"
|
||||||
dataset_document.error = str(e)
|
dataset_document.error = str(e)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
finally:
|
finally:
|
||||||
|
@ -193,7 +193,7 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str):
|
|||||||
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
|
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
|
||||||
# Get app's owner
|
# Get app's owner
|
||||||
with Session(db.engine, expire_on_commit=False) as session:
|
with Session(db.engine, expire_on_commit=False) as session:
|
||||||
stmt = select(Account).where(Account.id == App.owner_id).where(App.id == app_id)
|
stmt = select(Account).where(Account.id == App.created_by).where(App.id == app_id)
|
||||||
user = session.scalar(stmt)
|
user = session.scalar(stmt)
|
||||||
|
|
||||||
if user is None:
|
if user is None:
|
||||||
|
@ -34,13 +34,13 @@ def test_workflow_tool_should_raise_tool_invoke_error_when_result_has_error_fiel
|
|||||||
# needs to patch those methods to avoid database access.
|
# needs to patch those methods to avoid database access.
|
||||||
monkeypatch.setattr(tool, "_get_app", lambda *args, **kwargs: None)
|
monkeypatch.setattr(tool, "_get_app", lambda *args, **kwargs: None)
|
||||||
monkeypatch.setattr(tool, "_get_workflow", lambda *args, **kwargs: None)
|
monkeypatch.setattr(tool, "_get_workflow", lambda *args, **kwargs: None)
|
||||||
monkeypatch.setattr(tool, "_get_user", lambda *args, **kwargs: None)
|
|
||||||
|
|
||||||
# replace `WorkflowAppGenerator.generate` 's return value.
|
# replace `WorkflowAppGenerator.generate` 's return value.
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
"core.app.apps.workflow.app_generator.WorkflowAppGenerator.generate",
|
"core.app.apps.workflow.app_generator.WorkflowAppGenerator.generate",
|
||||||
lambda *args, **kwargs: {"data": {"error": "oops"}},
|
lambda *args, **kwargs: {"data": {"error": "oops"}},
|
||||||
)
|
)
|
||||||
|
monkeypatch.setattr("flask_login.current_user", lambda *args, **kwargs: None)
|
||||||
|
|
||||||
with pytest.raises(ToolInvokeError) as exc_info:
|
with pytest.raises(ToolInvokeError) as exc_info:
|
||||||
# WorkflowTool always returns a generator, so we need to iterate to
|
# WorkflowTool always returns a generator, so we need to iterate to
|
||||||
|
@ -1,45 +1,73 @@
|
|||||||
import json
|
import json
|
||||||
import time
|
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from core.app.app_config.entities import AppAdditionalFeatures, WorkflowUIBasedAppConfig
|
||||||
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom
|
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom
|
||||||
from core.app.entities.queue_entities import (
|
from core.app.entities.queue_entities import (
|
||||||
QueueNodeFailedEvent,
|
QueueNodeFailedEvent,
|
||||||
QueueNodeStartedEvent,
|
QueueNodeStartedEvent,
|
||||||
QueueNodeSucceededEvent,
|
QueueNodeSucceededEvent,
|
||||||
)
|
)
|
||||||
|
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||||
|
from core.workflow.entities.node_execution_entities import NodeExecution, NodeExecutionStatus
|
||||||
|
from core.workflow.entities.workflow_execution_entities import WorkflowExecution, WorkflowExecutionStatus, WorkflowType
|
||||||
from core.workflow.enums import SystemVariableKey
|
from core.workflow.enums import SystemVariableKey
|
||||||
from core.workflow.nodes import NodeType
|
from core.workflow.nodes import NodeType
|
||||||
|
from core.workflow.repository.workflow_execution_repository import WorkflowExecutionRepository
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
||||||
from models.enums import CreatorUserRole
|
from models.enums import CreatorUserRole
|
||||||
|
from models.model import AppMode
|
||||||
from models.workflow import (
|
from models.workflow import (
|
||||||
Workflow,
|
Workflow,
|
||||||
WorkflowNodeExecutionStatus,
|
|
||||||
WorkflowRun,
|
WorkflowRun,
|
||||||
WorkflowRunStatus,
|
WorkflowRunStatus,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_app_generate_entity():
|
def real_app_generate_entity():
|
||||||
entity = MagicMock(spec=AdvancedChatAppGenerateEntity)
|
additional_features = AppAdditionalFeatures(
|
||||||
entity.inputs = {"query": "test query"}
|
file_upload=None,
|
||||||
entity.invoke_from = InvokeFrom.WEB_APP
|
opening_statement=None,
|
||||||
# Create app_config as a separate mock
|
suggested_questions=[],
|
||||||
app_config = MagicMock()
|
suggested_questions_after_answer=False,
|
||||||
app_config.tenant_id = "test-tenant-id"
|
show_retrieve_source=False,
|
||||||
app_config.app_id = "test-app-id"
|
more_like_this=False,
|
||||||
entity.app_config = app_config
|
speech_to_text=False,
|
||||||
|
text_to_speech=None,
|
||||||
|
trace_config=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
app_config = WorkflowUIBasedAppConfig(
|
||||||
|
tenant_id="test-tenant-id",
|
||||||
|
app_id="test-app-id",
|
||||||
|
app_mode=AppMode.WORKFLOW,
|
||||||
|
additional_features=additional_features,
|
||||||
|
workflow_id="test-workflow-id",
|
||||||
|
)
|
||||||
|
|
||||||
|
entity = AdvancedChatAppGenerateEntity(
|
||||||
|
task_id="test-task-id",
|
||||||
|
app_config=app_config,
|
||||||
|
inputs={"query": "test query"},
|
||||||
|
files=[],
|
||||||
|
user_id="test-user-id",
|
||||||
|
stream=False,
|
||||||
|
invoke_from=InvokeFrom.WEB_APP,
|
||||||
|
query="test query",
|
||||||
|
conversation_id="test-conversation-id",
|
||||||
|
)
|
||||||
|
|
||||||
return entity
|
return entity
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_workflow_system_variables():
|
def real_workflow_system_variables():
|
||||||
return {
|
return {
|
||||||
SystemVariableKey.QUERY: "test query",
|
SystemVariableKey.QUERY: "test query",
|
||||||
SystemVariableKey.CONVERSATION_ID: "test-conversation-id",
|
SystemVariableKey.CONVERSATION_ID: "test-conversation-id",
|
||||||
@ -59,10 +87,23 @@ def mock_node_execution_repository():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def workflow_cycle_manager(mock_app_generate_entity, mock_workflow_system_variables, mock_node_execution_repository):
|
def mock_workflow_execution_repository():
|
||||||
|
repo = MagicMock(spec=WorkflowExecutionRepository)
|
||||||
|
repo.get.return_value = None
|
||||||
|
return repo
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def workflow_cycle_manager(
|
||||||
|
real_app_generate_entity,
|
||||||
|
real_workflow_system_variables,
|
||||||
|
mock_workflow_execution_repository,
|
||||||
|
mock_node_execution_repository,
|
||||||
|
):
|
||||||
return WorkflowCycleManager(
|
return WorkflowCycleManager(
|
||||||
application_generate_entity=mock_app_generate_entity,
|
application_generate_entity=real_app_generate_entity,
|
||||||
workflow_system_variables=mock_workflow_system_variables,
|
workflow_system_variables=real_workflow_system_variables,
|
||||||
|
workflow_execution_repository=mock_workflow_execution_repository,
|
||||||
workflow_node_execution_repository=mock_node_execution_repository,
|
workflow_node_execution_repository=mock_node_execution_repository,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -74,121 +115,173 @@ def mock_session():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_workflow():
|
def real_workflow():
|
||||||
workflow = MagicMock(spec=Workflow)
|
workflow = Workflow()
|
||||||
workflow.id = "test-workflow-id"
|
workflow.id = "test-workflow-id"
|
||||||
workflow.tenant_id = "test-tenant-id"
|
workflow.tenant_id = "test-tenant-id"
|
||||||
workflow.app_id = "test-app-id"
|
workflow.app_id = "test-app-id"
|
||||||
workflow.type = "chat"
|
workflow.type = "chat"
|
||||||
workflow.version = "1.0"
|
workflow.version = "1.0"
|
||||||
workflow.graph = json.dumps({"nodes": [], "edges": []})
|
|
||||||
|
graph_data = {"nodes": [], "edges": []}
|
||||||
|
workflow.graph = json.dumps(graph_data)
|
||||||
|
workflow.features = json.dumps({"file_upload": {"enabled": False}})
|
||||||
|
workflow.created_by = "test-user-id"
|
||||||
|
workflow.created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
workflow.updated_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
workflow._environment_variables = "{}"
|
||||||
|
workflow._conversation_variables = "{}"
|
||||||
|
|
||||||
return workflow
|
return workflow
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_workflow_run():
|
def real_workflow_run():
|
||||||
workflow_run = MagicMock(spec=WorkflowRun)
|
workflow_run = WorkflowRun()
|
||||||
workflow_run.id = "test-workflow-run-id"
|
workflow_run.id = "test-workflow-run-id"
|
||||||
workflow_run.tenant_id = "test-tenant-id"
|
workflow_run.tenant_id = "test-tenant-id"
|
||||||
workflow_run.app_id = "test-app-id"
|
workflow_run.app_id = "test-app-id"
|
||||||
workflow_run.workflow_id = "test-workflow-id"
|
workflow_run.workflow_id = "test-workflow-id"
|
||||||
|
workflow_run.sequence_number = 1
|
||||||
|
workflow_run.type = "chat"
|
||||||
|
workflow_run.triggered_from = "app-run"
|
||||||
|
workflow_run.version = "1.0"
|
||||||
|
workflow_run.graph = json.dumps({"nodes": [], "edges": []})
|
||||||
|
workflow_run.inputs = json.dumps({"query": "test query"})
|
||||||
workflow_run.status = WorkflowRunStatus.RUNNING
|
workflow_run.status = WorkflowRunStatus.RUNNING
|
||||||
|
workflow_run.outputs = json.dumps({"answer": "test answer"})
|
||||||
workflow_run.created_by_role = CreatorUserRole.ACCOUNT
|
workflow_run.created_by_role = CreatorUserRole.ACCOUNT
|
||||||
workflow_run.created_by = "test-user-id"
|
workflow_run.created_by = "test-user-id"
|
||||||
workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None)
|
workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
workflow_run.inputs_dict = {"query": "test query"}
|
|
||||||
workflow_run.outputs_dict = {"answer": "test answer"}
|
|
||||||
return workflow_run
|
return workflow_run
|
||||||
|
|
||||||
|
|
||||||
def test_init(
|
def test_init(
|
||||||
workflow_cycle_manager, mock_app_generate_entity, mock_workflow_system_variables, mock_node_execution_repository
|
workflow_cycle_manager,
|
||||||
|
real_app_generate_entity,
|
||||||
|
real_workflow_system_variables,
|
||||||
|
mock_workflow_execution_repository,
|
||||||
|
mock_node_execution_repository,
|
||||||
):
|
):
|
||||||
"""Test initialization of WorkflowCycleManager"""
|
"""Test initialization of WorkflowCycleManager"""
|
||||||
assert workflow_cycle_manager._workflow_run is None
|
assert workflow_cycle_manager._application_generate_entity == real_app_generate_entity
|
||||||
assert workflow_cycle_manager._application_generate_entity == mock_app_generate_entity
|
assert workflow_cycle_manager._workflow_system_variables == real_workflow_system_variables
|
||||||
assert workflow_cycle_manager._workflow_system_variables == mock_workflow_system_variables
|
assert workflow_cycle_manager._workflow_execution_repository == mock_workflow_execution_repository
|
||||||
assert workflow_cycle_manager._workflow_node_execution_repository == mock_node_execution_repository
|
assert workflow_cycle_manager._workflow_node_execution_repository == mock_node_execution_repository
|
||||||
|
|
||||||
|
|
||||||
def test_handle_workflow_run_start(workflow_cycle_manager, mock_session, mock_workflow):
|
def test_handle_workflow_run_start(workflow_cycle_manager, mock_session, real_workflow):
|
||||||
"""Test _handle_workflow_run_start method"""
|
"""Test handle_workflow_run_start method"""
|
||||||
# Mock session.scalar to return the workflow and max sequence
|
# Mock session.scalar to return the workflow and max sequence
|
||||||
mock_session.scalar.side_effect = [mock_workflow, 5]
|
mock_session.scalar.side_effect = [real_workflow, 5]
|
||||||
|
|
||||||
# Call the method
|
# Call the method
|
||||||
workflow_run = workflow_cycle_manager._handle_workflow_run_start(
|
workflow_execution = workflow_cycle_manager.handle_workflow_run_start(
|
||||||
session=mock_session,
|
session=mock_session,
|
||||||
workflow_id="test-workflow-id",
|
workflow_id="test-workflow-id",
|
||||||
user_id="test-user-id",
|
|
||||||
created_by_role=CreatorUserRole.ACCOUNT,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify the result
|
# Verify the result
|
||||||
assert workflow_run.tenant_id == mock_workflow.tenant_id
|
assert workflow_execution.workflow_id == real_workflow.id
|
||||||
assert workflow_run.app_id == mock_workflow.app_id
|
assert workflow_execution.sequence_number == 6 # max_sequence + 1
|
||||||
assert workflow_run.workflow_id == mock_workflow.id
|
|
||||||
assert workflow_run.sequence_number == 6 # max_sequence + 1
|
|
||||||
assert workflow_run.status == WorkflowRunStatus.RUNNING
|
|
||||||
assert workflow_run.created_by_role == CreatorUserRole.ACCOUNT
|
|
||||||
assert workflow_run.created_by == "test-user-id"
|
|
||||||
|
|
||||||
# Verify session.add was called
|
# Verify the workflow_execution_repository.save was called
|
||||||
mock_session.add.assert_called_once_with(workflow_run)
|
workflow_cycle_manager._workflow_execution_repository.save.assert_called_once_with(workflow_execution)
|
||||||
|
|
||||||
|
|
||||||
def test_handle_workflow_run_success(workflow_cycle_manager, mock_session, mock_workflow_run):
|
def test_handle_workflow_run_success(workflow_cycle_manager, mock_workflow_execution_repository):
|
||||||
"""Test _handle_workflow_run_success method"""
|
"""Test handle_workflow_run_success method"""
|
||||||
# Mock _get_workflow_run to return the mock_workflow_run
|
# Create a real WorkflowExecution
|
||||||
with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
|
|
||||||
# Call the method
|
|
||||||
result = workflow_cycle_manager._handle_workflow_run_success(
|
|
||||||
session=mock_session,
|
|
||||||
workflow_run_id="test-workflow-run-id",
|
|
||||||
start_at=time.perf_counter() - 10, # 10 seconds ago
|
|
||||||
total_tokens=100,
|
|
||||||
total_steps=5,
|
|
||||||
outputs={"answer": "test answer"},
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify the result
|
workflow_execution = WorkflowExecution(
|
||||||
assert result == mock_workflow_run
|
id="test-workflow-run-id",
|
||||||
assert result.status == WorkflowRunStatus.SUCCEEDED
|
workflow_id="test-workflow-id",
|
||||||
assert result.outputs == json.dumps({"answer": "test answer"})
|
workflow_version="1.0",
|
||||||
assert result.total_tokens == 100
|
sequence_number=1,
|
||||||
assert result.total_steps == 5
|
type=WorkflowType.CHAT,
|
||||||
assert result.finished_at is not None
|
graph={"nodes": [], "edges": []},
|
||||||
|
inputs={"query": "test query"},
|
||||||
|
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock _get_workflow_execution_or_raise_error to return the real workflow_execution
|
||||||
|
workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
result = workflow_cycle_manager.handle_workflow_run_success(
|
||||||
|
workflow_run_id="test-workflow-run-id",
|
||||||
|
total_tokens=100,
|
||||||
|
total_steps=5,
|
||||||
|
outputs={"answer": "test answer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert result == workflow_execution
|
||||||
|
assert result.status == WorkflowExecutionStatus.SUCCEEDED
|
||||||
|
assert result.outputs == {"answer": "test answer"}
|
||||||
|
assert result.total_tokens == 100
|
||||||
|
assert result.total_steps == 5
|
||||||
|
assert result.finished_at is not None
|
||||||
|
|
||||||
|
|
||||||
def test_handle_workflow_run_failed(workflow_cycle_manager, mock_session, mock_workflow_run):
|
def test_handle_workflow_run_failed(workflow_cycle_manager, mock_workflow_execution_repository):
|
||||||
"""Test _handle_workflow_run_failed method"""
|
"""Test handle_workflow_run_failed method"""
|
||||||
# Mock _get_workflow_run to return the mock_workflow_run
|
# Create a real WorkflowExecution
|
||||||
with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
|
|
||||||
# Mock get_running_executions to return an empty list
|
|
||||||
workflow_cycle_manager._workflow_node_execution_repository.get_running_executions.return_value = []
|
|
||||||
|
|
||||||
# Call the method
|
workflow_execution = WorkflowExecution(
|
||||||
result = workflow_cycle_manager._handle_workflow_run_failed(
|
id="test-workflow-run-id",
|
||||||
session=mock_session,
|
workflow_id="test-workflow-id",
|
||||||
workflow_run_id="test-workflow-run-id",
|
workflow_version="1.0",
|
||||||
start_at=time.perf_counter() - 10, # 10 seconds ago
|
sequence_number=1,
|
||||||
total_tokens=50,
|
type=WorkflowType.CHAT,
|
||||||
total_steps=3,
|
graph={"nodes": [], "edges": []},
|
||||||
status=WorkflowRunStatus.FAILED,
|
inputs={"query": "test query"},
|
||||||
error="Test error message",
|
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify the result
|
# Mock _get_workflow_execution_or_raise_error to return the real workflow_execution
|
||||||
assert result == mock_workflow_run
|
workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution
|
||||||
assert result.status == WorkflowRunStatus.FAILED.value
|
|
||||||
assert result.error == "Test error message"
|
# Mock get_running_executions to return an empty list
|
||||||
assert result.total_tokens == 50
|
workflow_cycle_manager._workflow_node_execution_repository.get_running_executions.return_value = []
|
||||||
assert result.total_steps == 3
|
|
||||||
assert result.finished_at is not None
|
# Call the method
|
||||||
|
result = workflow_cycle_manager.handle_workflow_run_failed(
|
||||||
|
workflow_run_id="test-workflow-run-id",
|
||||||
|
total_tokens=50,
|
||||||
|
total_steps=3,
|
||||||
|
status=WorkflowRunStatus.FAILED,
|
||||||
|
error_message="Test error message",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert result == workflow_execution
|
||||||
|
assert result.status == WorkflowExecutionStatus(WorkflowRunStatus.FAILED.value)
|
||||||
|
assert result.error_message == "Test error message"
|
||||||
|
assert result.total_tokens == 50
|
||||||
|
assert result.total_steps == 3
|
||||||
|
assert result.finished_at is not None
|
||||||
|
|
||||||
|
|
||||||
def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_run):
|
def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_execution_repository):
|
||||||
"""Test _handle_node_execution_start method"""
|
"""Test handle_node_execution_start method"""
|
||||||
|
# Create a real WorkflowExecution
|
||||||
|
|
||||||
|
workflow_execution = WorkflowExecution(
|
||||||
|
id="test-workflow-execution-id",
|
||||||
|
workflow_id="test-workflow-id",
|
||||||
|
workflow_version="1.0",
|
||||||
|
sequence_number=1,
|
||||||
|
type=WorkflowType.CHAT,
|
||||||
|
graph={"nodes": [], "edges": []},
|
||||||
|
inputs={"query": "test query"},
|
||||||
|
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock _get_workflow_execution_or_raise_error to return the real workflow_execution
|
||||||
|
workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution
|
||||||
|
|
||||||
# Create a mock event
|
# Create a mock event
|
||||||
event = MagicMock(spec=QueueNodeStartedEvent)
|
event = MagicMock(spec=QueueNodeStartedEvent)
|
||||||
event.node_execution_id = "test-node-execution-id"
|
event.node_execution_id = "test-node-execution-id"
|
||||||
@ -207,129 +300,171 @@ def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_run):
|
|||||||
event.in_loop_id = "test-loop-id"
|
event.in_loop_id = "test-loop-id"
|
||||||
|
|
||||||
# Call the method
|
# Call the method
|
||||||
result = workflow_cycle_manager._handle_node_execution_start(
|
result = workflow_cycle_manager.handle_node_execution_start(
|
||||||
workflow_run=mock_workflow_run,
|
workflow_execution_id=workflow_execution.id,
|
||||||
event=event,
|
event=event,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify the result
|
# Verify the result
|
||||||
# NodeExecution doesn't have tenant_id attribute, it's handled at repository level
|
assert result.workflow_id == workflow_execution.workflow_id
|
||||||
# assert result.tenant_id == mock_workflow_run.tenant_id
|
assert result.workflow_run_id == workflow_execution.id
|
||||||
# assert result.app_id == mock_workflow_run.app_id
|
|
||||||
assert result.workflow_id == mock_workflow_run.workflow_id
|
|
||||||
assert result.workflow_run_id == mock_workflow_run.id
|
|
||||||
assert result.node_execution_id == event.node_execution_id
|
assert result.node_execution_id == event.node_execution_id
|
||||||
assert result.node_id == event.node_id
|
assert result.node_id == event.node_id
|
||||||
assert result.node_type == event.node_type
|
assert result.node_type == event.node_type
|
||||||
assert result.title == event.node_data.title
|
assert result.title == event.node_data.title
|
||||||
assert result.status == WorkflowNodeExecutionStatus.RUNNING.value
|
assert result.status == NodeExecutionStatus.RUNNING
|
||||||
# NodeExecution doesn't have created_by_role and created_by attributes, they're handled at repository level
|
|
||||||
# assert result.created_by_role == mock_workflow_run.created_by_role
|
|
||||||
# assert result.created_by == mock_workflow_run.created_by
|
|
||||||
|
|
||||||
# Verify save was called
|
# Verify save was called
|
||||||
workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(result)
|
workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(result)
|
||||||
|
|
||||||
|
|
||||||
def test_get_workflow_run(workflow_cycle_manager, mock_session, mock_workflow_run):
|
def test_get_workflow_execution_or_raise_error(workflow_cycle_manager, mock_workflow_execution_repository):
|
||||||
"""Test _get_workflow_run method"""
|
"""Test _get_workflow_execution_or_raise_error method"""
|
||||||
# Mock session.scalar to return the workflow run
|
# Create a real WorkflowExecution
|
||||||
mock_session.scalar.return_value = mock_workflow_run
|
|
||||||
|
|
||||||
# Call the method
|
workflow_execution = WorkflowExecution(
|
||||||
result = workflow_cycle_manager._get_workflow_run(
|
id="test-workflow-run-id",
|
||||||
session=mock_session,
|
workflow_id="test-workflow-id",
|
||||||
workflow_run_id="test-workflow-run-id",
|
workflow_version="1.0",
|
||||||
|
sequence_number=1,
|
||||||
|
type=WorkflowType.CHAT,
|
||||||
|
graph={"nodes": [], "edges": []},
|
||||||
|
inputs={"query": "test query"},
|
||||||
|
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Mock the repository get method to return the real execution
|
||||||
|
workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
result = workflow_cycle_manager._get_workflow_execution_or_raise_error("test-workflow-run-id")
|
||||||
|
|
||||||
# Verify the result
|
# Verify the result
|
||||||
assert result == mock_workflow_run
|
assert result == workflow_execution
|
||||||
assert workflow_cycle_manager._workflow_run == mock_workflow_run
|
|
||||||
|
# Test error case
|
||||||
|
workflow_cycle_manager._workflow_execution_repository.get.return_value = None
|
||||||
|
|
||||||
|
# Expect an error when execution is not found
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
workflow_cycle_manager._get_workflow_execution_or_raise_error("non-existent-id")
|
||||||
|
|
||||||
|
|
||||||
def test_handle_workflow_node_execution_success(workflow_cycle_manager):
|
def test_handle_workflow_node_execution_success(workflow_cycle_manager):
|
||||||
"""Test _handle_workflow_node_execution_success method"""
|
"""Test handle_workflow_node_execution_success method"""
|
||||||
# Create a mock event
|
# Create a mock event
|
||||||
event = MagicMock(spec=QueueNodeSucceededEvent)
|
event = MagicMock(spec=QueueNodeSucceededEvent)
|
||||||
event.node_execution_id = "test-node-execution-id"
|
event.node_execution_id = "test-node-execution-id"
|
||||||
event.inputs = {"input": "test input"}
|
event.inputs = {"input": "test input"}
|
||||||
event.process_data = {"process": "test process"}
|
event.process_data = {"process": "test process"}
|
||||||
event.outputs = {"output": "test output"}
|
event.outputs = {"output": "test output"}
|
||||||
event.execution_metadata = {"metadata": "test metadata"}
|
event.execution_metadata = {NodeRunMetadataKey.TOTAL_TOKENS: 100}
|
||||||
event.start_at = datetime.now(UTC).replace(tzinfo=None)
|
event.start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
# Create a mock node execution
|
# Create a real node execution
|
||||||
node_execution = MagicMock()
|
|
||||||
node_execution.node_execution_id = "test-node-execution-id"
|
node_execution = NodeExecution(
|
||||||
|
id="test-node-execution-record-id",
|
||||||
|
node_execution_id="test-node-execution-id",
|
||||||
|
workflow_id="test-workflow-id",
|
||||||
|
workflow_run_id="test-workflow-run-id",
|
||||||
|
index=1,
|
||||||
|
node_id="test-node-id",
|
||||||
|
node_type=NodeType.LLM,
|
||||||
|
title="Test Node",
|
||||||
|
created_at=datetime.now(UTC).replace(tzinfo=None),
|
||||||
|
)
|
||||||
|
|
||||||
# Mock the repository to return the node execution
|
# Mock the repository to return the node execution
|
||||||
workflow_cycle_manager._workflow_node_execution_repository.get_by_node_execution_id.return_value = node_execution
|
workflow_cycle_manager._workflow_node_execution_repository.get_by_node_execution_id.return_value = node_execution
|
||||||
|
|
||||||
# Call the method
|
# Call the method
|
||||||
result = workflow_cycle_manager._handle_workflow_node_execution_success(
|
result = workflow_cycle_manager.handle_workflow_node_execution_success(
|
||||||
event=event,
|
event=event,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify the result
|
# Verify the result
|
||||||
assert result == node_execution
|
assert result == node_execution
|
||||||
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED.value
|
assert result.status == NodeExecutionStatus.SUCCEEDED
|
||||||
|
|
||||||
# Verify save was called
|
# Verify save was called
|
||||||
workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(node_execution)
|
workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(node_execution)
|
||||||
|
|
||||||
|
|
||||||
def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_session, mock_workflow_run):
|
def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_workflow_execution_repository):
|
||||||
"""Test _handle_workflow_run_partial_success method"""
|
"""Test handle_workflow_run_partial_success method"""
|
||||||
# Mock _get_workflow_run to return the mock_workflow_run
|
# Create a real WorkflowExecution
|
||||||
with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
|
|
||||||
# Call the method
|
|
||||||
result = workflow_cycle_manager._handle_workflow_run_partial_success(
|
|
||||||
session=mock_session,
|
|
||||||
workflow_run_id="test-workflow-run-id",
|
|
||||||
start_at=time.perf_counter() - 10, # 10 seconds ago
|
|
||||||
total_tokens=75,
|
|
||||||
total_steps=4,
|
|
||||||
outputs={"partial_answer": "test partial answer"},
|
|
||||||
exceptions_count=2,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify the result
|
workflow_execution = WorkflowExecution(
|
||||||
assert result == mock_workflow_run
|
id="test-workflow-run-id",
|
||||||
assert result.status == WorkflowRunStatus.PARTIAL_SUCCEEDED.value
|
workflow_id="test-workflow-id",
|
||||||
assert result.outputs == json.dumps({"partial_answer": "test partial answer"})
|
workflow_version="1.0",
|
||||||
assert result.total_tokens == 75
|
sequence_number=1,
|
||||||
assert result.total_steps == 4
|
type=WorkflowType.CHAT,
|
||||||
assert result.exceptions_count == 2
|
graph={"nodes": [], "edges": []},
|
||||||
assert result.finished_at is not None
|
inputs={"query": "test query"},
|
||||||
|
started_at=datetime.now(UTC).replace(tzinfo=None),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock _get_workflow_execution_or_raise_error to return the real workflow_execution
|
||||||
|
workflow_cycle_manager._workflow_execution_repository.get.return_value = workflow_execution
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
result = workflow_cycle_manager.handle_workflow_run_partial_success(
|
||||||
|
workflow_run_id="test-workflow-run-id",
|
||||||
|
total_tokens=75,
|
||||||
|
total_steps=4,
|
||||||
|
outputs={"partial_answer": "test partial answer"},
|
||||||
|
exceptions_count=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert result == workflow_execution
|
||||||
|
assert result.status == WorkflowExecutionStatus.PARTIAL_SUCCEEDED
|
||||||
|
assert result.outputs == {"partial_answer": "test partial answer"}
|
||||||
|
assert result.total_tokens == 75
|
||||||
|
assert result.total_steps == 4
|
||||||
|
assert result.exceptions_count == 2
|
||||||
|
assert result.finished_at is not None
|
||||||
|
|
||||||
|
|
||||||
def test_handle_workflow_node_execution_failed(workflow_cycle_manager):
|
def test_handle_workflow_node_execution_failed(workflow_cycle_manager):
|
||||||
"""Test _handle_workflow_node_execution_failed method"""
|
"""Test handle_workflow_node_execution_failed method"""
|
||||||
# Create a mock event
|
# Create a mock event
|
||||||
event = MagicMock(spec=QueueNodeFailedEvent)
|
event = MagicMock(spec=QueueNodeFailedEvent)
|
||||||
event.node_execution_id = "test-node-execution-id"
|
event.node_execution_id = "test-node-execution-id"
|
||||||
event.inputs = {"input": "test input"}
|
event.inputs = {"input": "test input"}
|
||||||
event.process_data = {"process": "test process"}
|
event.process_data = {"process": "test process"}
|
||||||
event.outputs = {"output": "test output"}
|
event.outputs = {"output": "test output"}
|
||||||
event.execution_metadata = {"metadata": "test metadata"}
|
event.execution_metadata = {NodeRunMetadataKey.TOTAL_TOKENS: 100}
|
||||||
event.start_at = datetime.now(UTC).replace(tzinfo=None)
|
event.start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
event.error = "Test error message"
|
event.error = "Test error message"
|
||||||
|
|
||||||
# Create a mock node execution
|
# Create a real node execution
|
||||||
node_execution = MagicMock()
|
|
||||||
node_execution.node_execution_id = "test-node-execution-id"
|
node_execution = NodeExecution(
|
||||||
|
id="test-node-execution-record-id",
|
||||||
|
node_execution_id="test-node-execution-id",
|
||||||
|
workflow_id="test-workflow-id",
|
||||||
|
workflow_run_id="test-workflow-run-id",
|
||||||
|
index=1,
|
||||||
|
node_id="test-node-id",
|
||||||
|
node_type=NodeType.LLM,
|
||||||
|
title="Test Node",
|
||||||
|
created_at=datetime.now(UTC).replace(tzinfo=None),
|
||||||
|
)
|
||||||
|
|
||||||
# Mock the repository to return the node execution
|
# Mock the repository to return the node execution
|
||||||
workflow_cycle_manager._workflow_node_execution_repository.get_by_node_execution_id.return_value = node_execution
|
workflow_cycle_manager._workflow_node_execution_repository.get_by_node_execution_id.return_value = node_execution
|
||||||
|
|
||||||
# Call the method
|
# Call the method
|
||||||
result = workflow_cycle_manager._handle_workflow_node_execution_failed(
|
result = workflow_cycle_manager.handle_workflow_node_execution_failed(
|
||||||
event=event,
|
event=event,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify the result
|
# Verify the result
|
||||||
assert result == node_execution
|
assert result == node_execution
|
||||||
assert result.status == WorkflowNodeExecutionStatus.FAILED.value
|
assert result.status == NodeExecutionStatus.FAILED
|
||||||
assert result.error == "Test error message"
|
assert result.error == "Test error message"
|
||||||
|
|
||||||
# Verify save was called
|
# Verify save was called
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
|
import json
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
import contexts
|
import contexts
|
||||||
from constants import HIDDEN_VALUE
|
from constants import HIDDEN_VALUE
|
||||||
from core.variables import FloatVariable, IntegerVariable, SecretVariable, StringVariable
|
from core.variables import FloatVariable, IntegerVariable, SecretVariable, StringVariable
|
||||||
from models.workflow import Workflow
|
from models.workflow import Workflow, WorkflowNodeExecution
|
||||||
|
|
||||||
|
|
||||||
def test_environment_variables():
|
def test_environment_variables():
|
||||||
@ -137,3 +138,14 @@ def test_to_dict():
|
|||||||
workflow_dict = workflow.to_dict(include_secret=True)
|
workflow_dict = workflow.to_dict(include_secret=True)
|
||||||
assert workflow_dict["environment_variables"][0]["value"] == "secret"
|
assert workflow_dict["environment_variables"][0]["value"] == "secret"
|
||||||
assert workflow_dict["environment_variables"][1]["value"] == "text"
|
assert workflow_dict["environment_variables"][1]["value"] == "text"
|
||||||
|
|
||||||
|
|
||||||
|
class TestWorkflowNodeExecution:
|
||||||
|
def test_execution_metadata_dict(self):
|
||||||
|
node_exec = WorkflowNodeExecution()
|
||||||
|
node_exec.execution_metadata = None
|
||||||
|
assert node_exec.execution_metadata_dict == {}
|
||||||
|
|
||||||
|
original = {"a": 1, "b": ["2"]}
|
||||||
|
node_exec.execution_metadata = json.dumps(original)
|
||||||
|
assert node_exec.execution_metadata_dict == original
|
||||||
|
@ -4,12 +4,14 @@ Unit tests for the SQLAlchemy implementation of WorkflowNodeExecutionRepository.
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from decimal import Decimal
|
||||||
from unittest.mock import MagicMock, PropertyMock
|
from unittest.mock import MagicMock, PropertyMock
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
from sqlalchemy.orm import Session, sessionmaker
|
from sqlalchemy.orm import Session, sessionmaker
|
||||||
|
|
||||||
|
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||||
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
from core.workflow.entities.node_entities import NodeRunMetadataKey
|
||||||
from core.workflow.entities.node_execution_entities import NodeExecution, NodeExecutionStatus
|
from core.workflow.entities.node_execution_entities import NodeExecution, NodeExecutionStatus
|
||||||
@ -298,7 +300,7 @@ def test_to_db_model(repository):
|
|||||||
status=NodeExecutionStatus.RUNNING,
|
status=NodeExecutionStatus.RUNNING,
|
||||||
error=None,
|
error=None,
|
||||||
elapsed_time=1.5,
|
elapsed_time=1.5,
|
||||||
metadata={NodeRunMetadataKey.TOTAL_TOKENS: 100},
|
metadata={NodeRunMetadataKey.TOTAL_TOKENS: 100, NodeRunMetadataKey.TOTAL_PRICE: Decimal("0.0")},
|
||||||
created_at=datetime.now(),
|
created_at=datetime.now(),
|
||||||
finished_at=None,
|
finished_at=None,
|
||||||
)
|
)
|
||||||
@ -324,7 +326,7 @@ def test_to_db_model(repository):
|
|||||||
assert db_model.inputs_dict == domain_model.inputs
|
assert db_model.inputs_dict == domain_model.inputs
|
||||||
assert db_model.process_data_dict == domain_model.process_data
|
assert db_model.process_data_dict == domain_model.process_data
|
||||||
assert db_model.outputs_dict == domain_model.outputs
|
assert db_model.outputs_dict == domain_model.outputs
|
||||||
assert db_model.execution_metadata_dict == domain_model.metadata
|
assert db_model.execution_metadata_dict == jsonable_encoder(domain_model.metadata)
|
||||||
|
|
||||||
assert db_model.status == domain_model.status
|
assert db_model.status == domain_model.status
|
||||||
assert db_model.error == domain_model.error
|
assert db_model.error == domain_model.error
|
||||||
|
@ -531,6 +531,7 @@ RELYT_DATABASE=postgres
|
|||||||
OPENSEARCH_HOST=opensearch
|
OPENSEARCH_HOST=opensearch
|
||||||
OPENSEARCH_PORT=9200
|
OPENSEARCH_PORT=9200
|
||||||
OPENSEARCH_SECURE=true
|
OPENSEARCH_SECURE=true
|
||||||
|
OPENSEARCH_VERIFY_CERTS=true
|
||||||
OPENSEARCH_AUTH_METHOD=basic
|
OPENSEARCH_AUTH_METHOD=basic
|
||||||
OPENSEARCH_USER=admin
|
OPENSEARCH_USER=admin
|
||||||
OPENSEARCH_PASSWORD=admin
|
OPENSEARCH_PASSWORD=admin
|
||||||
@ -1055,7 +1056,7 @@ PLUGIN_MAX_EXECUTION_TIMEOUT=600
|
|||||||
PIP_MIRROR_URL=
|
PIP_MIRROR_URL=
|
||||||
|
|
||||||
# https://github.com/langgenius/dify-plugin-daemon/blob/main/.env.example
|
# https://github.com/langgenius/dify-plugin-daemon/blob/main/.env.example
|
||||||
# Plugin storage type, local aws_s3 tencent_cos azure_blob
|
# Plugin storage type, local aws_s3 tencent_cos azure_blob aliyun_oss
|
||||||
PLUGIN_STORAGE_TYPE=local
|
PLUGIN_STORAGE_TYPE=local
|
||||||
PLUGIN_STORAGE_LOCAL_ROOT=/app/storage
|
PLUGIN_STORAGE_LOCAL_ROOT=/app/storage
|
||||||
PLUGIN_WORKING_PATH=/app/storage/cwd
|
PLUGIN_WORKING_PATH=/app/storage/cwd
|
||||||
@ -1078,6 +1079,13 @@ PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING=
|
|||||||
PLUGIN_TENCENT_COS_SECRET_KEY=
|
PLUGIN_TENCENT_COS_SECRET_KEY=
|
||||||
PLUGIN_TENCENT_COS_SECRET_ID=
|
PLUGIN_TENCENT_COS_SECRET_ID=
|
||||||
PLUGIN_TENCENT_COS_REGION=
|
PLUGIN_TENCENT_COS_REGION=
|
||||||
|
# Plugin oss aliyun oss
|
||||||
|
PLUGIN_ALIYUN_OSS_REGION=
|
||||||
|
PLUGIN_ALIYUN_OSS_ENDPOINT=
|
||||||
|
PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID=
|
||||||
|
PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET=
|
||||||
|
PLUGIN_ALIYUN_OSS_AUTH_VERSION=v4
|
||||||
|
PLUGIN_ALIYUN_OSS_PATH=
|
||||||
|
|
||||||
# ------------------------------
|
# ------------------------------
|
||||||
# OTLP Collector Configuration
|
# OTLP Collector Configuration
|
||||||
|
@ -178,6 +178,12 @@ services:
|
|||||||
TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
||||||
TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
||||||
TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
||||||
|
ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-}
|
||||||
|
ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-}
|
||||||
|
ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-}
|
||||||
|
ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-}
|
||||||
|
ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4}
|
||||||
|
ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-}
|
||||||
ports:
|
ports:
|
||||||
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
|
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -115,6 +115,12 @@ services:
|
|||||||
TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
||||||
TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
||||||
TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
||||||
|
ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-}
|
||||||
|
ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-}
|
||||||
|
ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-}
|
||||||
|
ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-}
|
||||||
|
ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4}
|
||||||
|
ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-}
|
||||||
ports:
|
ports:
|
||||||
- "${EXPOSE_PLUGIN_DAEMON_PORT:-5002}:${PLUGIN_DAEMON_PORT:-5002}"
|
- "${EXPOSE_PLUGIN_DAEMON_PORT:-5002}:${PLUGIN_DAEMON_PORT:-5002}"
|
||||||
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
|
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
|
||||||
|
@ -227,6 +227,7 @@ x-shared-env: &shared-api-worker-env
|
|||||||
OPENSEARCH_HOST: ${OPENSEARCH_HOST:-opensearch}
|
OPENSEARCH_HOST: ${OPENSEARCH_HOST:-opensearch}
|
||||||
OPENSEARCH_PORT: ${OPENSEARCH_PORT:-9200}
|
OPENSEARCH_PORT: ${OPENSEARCH_PORT:-9200}
|
||||||
OPENSEARCH_SECURE: ${OPENSEARCH_SECURE:-true}
|
OPENSEARCH_SECURE: ${OPENSEARCH_SECURE:-true}
|
||||||
|
OPENSEARCH_VERIFY_CERTS: ${OPENSEARCH_VERIFY_CERTS:-true}
|
||||||
OPENSEARCH_AUTH_METHOD: ${OPENSEARCH_AUTH_METHOD:-basic}
|
OPENSEARCH_AUTH_METHOD: ${OPENSEARCH_AUTH_METHOD:-basic}
|
||||||
OPENSEARCH_USER: ${OPENSEARCH_USER:-admin}
|
OPENSEARCH_USER: ${OPENSEARCH_USER:-admin}
|
||||||
OPENSEARCH_PASSWORD: ${OPENSEARCH_PASSWORD:-admin}
|
OPENSEARCH_PASSWORD: ${OPENSEARCH_PASSWORD:-admin}
|
||||||
@ -476,6 +477,12 @@ x-shared-env: &shared-api-worker-env
|
|||||||
PLUGIN_TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
PLUGIN_TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
||||||
PLUGIN_TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
PLUGIN_TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
||||||
PLUGIN_TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
PLUGIN_TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
||||||
|
PLUGIN_ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-}
|
||||||
|
PLUGIN_ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-}
|
||||||
|
PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-}
|
||||||
|
PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-}
|
||||||
|
PLUGIN_ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4}
|
||||||
|
PLUGIN_ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-}
|
||||||
ENABLE_OTEL: ${ENABLE_OTEL:-false}
|
ENABLE_OTEL: ${ENABLE_OTEL:-false}
|
||||||
OTLP_BASE_ENDPOINT: ${OTLP_BASE_ENDPOINT:-http://localhost:4318}
|
OTLP_BASE_ENDPOINT: ${OTLP_BASE_ENDPOINT:-http://localhost:4318}
|
||||||
OTLP_API_KEY: ${OTLP_API_KEY:-}
|
OTLP_API_KEY: ${OTLP_API_KEY:-}
|
||||||
@ -669,6 +676,12 @@ services:
|
|||||||
TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-}
|
||||||
TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-}
|
||||||
TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-}
|
||||||
|
ALIYUN_OSS_REGION: ${PLUGIN_ALIYUN_OSS_REGION:-}
|
||||||
|
ALIYUN_OSS_ENDPOINT: ${PLUGIN_ALIYUN_OSS_ENDPOINT:-}
|
||||||
|
ALIYUN_OSS_ACCESS_KEY_ID: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID:-}
|
||||||
|
ALIYUN_OSS_ACCESS_KEY_SECRET: ${PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET:-}
|
||||||
|
ALIYUN_OSS_AUTH_VERSION: ${PLUGIN_ALIYUN_OSS_AUTH_VERSION:-v4}
|
||||||
|
ALIYUN_OSS_PATH: ${PLUGIN_ALIYUN_OSS_PATH:-}
|
||||||
ports:
|
ports:
|
||||||
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
|
- "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}"
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -145,3 +145,10 @@ PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING=
|
|||||||
PLUGIN_TENCENT_COS_SECRET_KEY=
|
PLUGIN_TENCENT_COS_SECRET_KEY=
|
||||||
PLUGIN_TENCENT_COS_SECRET_ID=
|
PLUGIN_TENCENT_COS_SECRET_ID=
|
||||||
PLUGIN_TENCENT_COS_REGION=
|
PLUGIN_TENCENT_COS_REGION=
|
||||||
|
# Plugin oss aliyun oss
|
||||||
|
PLUGIN_ALIYUN_OSS_REGION=
|
||||||
|
PLUGIN_ALIYUN_OSS_ENDPOINT=
|
||||||
|
PLUGIN_ALIYUN_OSS_ACCESS_KEY_ID=
|
||||||
|
PLUGIN_ALIYUN_OSS_ACCESS_KEY_SECRET=
|
||||||
|
PLUGIN_ALIYUN_OSS_AUTH_VERSION=v4
|
||||||
|
PLUGIN_ALIYUN_OSS_PATH=
|
||||||
|
@ -31,126 +31,98 @@ type Props = {
|
|||||||
appDetail: App
|
appDetail: App
|
||||||
}
|
}
|
||||||
|
|
||||||
const Annotation: FC<Props> = ({
|
const Annotation: FC<Props> = (props) => {
|
||||||
appDetail,
|
const { appDetail } = props
|
||||||
}) => {
|
|
||||||
const { t } = useTranslation()
|
const { t } = useTranslation()
|
||||||
const [isShowEdit, setIsShowEdit] = React.useState(false)
|
const [isShowEdit, setIsShowEdit] = useState(false)
|
||||||
const [annotationConfig, setAnnotationConfig] = useState<AnnotationReplyConfig | null>(null)
|
const [annotationConfig, setAnnotationConfig] = useState<AnnotationReplyConfig | null>(null)
|
||||||
const [isChatApp, setIsChatApp] = useState(false)
|
const [isChatApp] = useState(appDetail.mode !== 'completion')
|
||||||
|
const [controlRefreshSwitch, setControlRefreshSwitch] = useState(Date.now())
|
||||||
|
const { plan, enableBilling } = useProviderContext()
|
||||||
|
const isAnnotationFull = enableBilling && plan.usage.annotatedResponse >= plan.total.annotatedResponse
|
||||||
|
const [isShowAnnotationFullModal, setIsShowAnnotationFullModal] = useState(false)
|
||||||
|
const [queryParams, setQueryParams] = useState<QueryParam>({})
|
||||||
|
const [currPage, setCurrPage] = useState(0)
|
||||||
|
const [limit, setLimit] = useState(APP_PAGE_LIMIT)
|
||||||
|
const [list, setList] = useState<AnnotationItem[]>([])
|
||||||
|
const [total, setTotal] = useState(0)
|
||||||
|
const [isLoading, setIsLoading] = useState(false)
|
||||||
|
const [controlUpdateList, setControlUpdateList] = useState(Date.now())
|
||||||
|
const [currItem, setCurrItem] = useState<AnnotationItem | null>(null)
|
||||||
|
const [isShowViewModal, setIsShowViewModal] = useState(false)
|
||||||
|
const debouncedQueryParams = useDebounce(queryParams, { wait: 500 })
|
||||||
|
|
||||||
const fetchAnnotationConfig = async () => {
|
const fetchAnnotationConfig = async () => {
|
||||||
const res = await doFetchAnnotationConfig(appDetail.id)
|
const res = await doFetchAnnotationConfig(appDetail.id)
|
||||||
setAnnotationConfig(res as AnnotationReplyConfig)
|
setAnnotationConfig(res as AnnotationReplyConfig)
|
||||||
return (res as AnnotationReplyConfig).id
|
return (res as AnnotationReplyConfig).id
|
||||||
}
|
}
|
||||||
useEffect(() => {
|
|
||||||
const isChatApp = appDetail.mode !== 'completion'
|
|
||||||
setIsChatApp(isChatApp)
|
|
||||||
if (isChatApp)
|
|
||||||
fetchAnnotationConfig()
|
|
||||||
}, [])
|
|
||||||
const [controlRefreshSwitch, setControlRefreshSwitch] = useState(Date.now())
|
|
||||||
const { plan, enableBilling } = useProviderContext()
|
|
||||||
const isAnnotationFull = (enableBilling && plan.usage.annotatedResponse >= plan.total.annotatedResponse)
|
|
||||||
const [isShowAnnotationFullModal, setIsShowAnnotationFullModal] = useState(false)
|
|
||||||
const ensureJobCompleted = async (jobId: string, status: AnnotationEnableStatus) => {
|
|
||||||
let isCompleted = false
|
|
||||||
while (!isCompleted) {
|
|
||||||
const res: any = await queryAnnotationJobStatus(appDetail.id, status, jobId)
|
|
||||||
isCompleted = res.job_status === JobStatus.completed
|
|
||||||
if (isCompleted)
|
|
||||||
break
|
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (isChatApp) fetchAnnotationConfig()
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [])
|
||||||
|
|
||||||
|
const ensureJobCompleted = async (jobId: string, status: AnnotationEnableStatus) => {
|
||||||
|
while (true) {
|
||||||
|
const res: any = await queryAnnotationJobStatus(appDetail.id, status, jobId)
|
||||||
|
if (res.job_status === JobStatus.completed) break
|
||||||
await sleep(2000)
|
await sleep(2000)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const [queryParams, setQueryParams] = useState<QueryParam>({})
|
|
||||||
const [currPage, setCurrPage] = React.useState<number>(0)
|
|
||||||
const debouncedQueryParams = useDebounce(queryParams, { wait: 500 })
|
|
||||||
const [limit, setLimit] = React.useState<number>(APP_PAGE_LIMIT)
|
|
||||||
const query = {
|
|
||||||
page: currPage + 1,
|
|
||||||
limit,
|
|
||||||
keyword: debouncedQueryParams.keyword || '',
|
|
||||||
}
|
|
||||||
|
|
||||||
const [controlUpdateList, setControlUpdateList] = useState(Date.now())
|
|
||||||
const [list, setList] = useState<AnnotationItem[]>([])
|
|
||||||
const [total, setTotal] = useState(10)
|
|
||||||
const [isLoading, setIsLoading] = useState(false)
|
|
||||||
const fetchList = async (page = 1) => {
|
const fetchList = async (page = 1) => {
|
||||||
setIsLoading(true)
|
setIsLoading(true)
|
||||||
try {
|
try {
|
||||||
const { data, total }: any = await fetchAnnotationList(appDetail.id, {
|
const { data, total }: any = await fetchAnnotationList(appDetail.id, {
|
||||||
...query,
|
|
||||||
page,
|
page,
|
||||||
|
limit,
|
||||||
|
keyword: debouncedQueryParams.keyword || '',
|
||||||
})
|
})
|
||||||
setList(data as AnnotationItem[])
|
setList(data as AnnotationItem[])
|
||||||
setTotal(total)
|
setTotal(total)
|
||||||
}
|
}
|
||||||
catch {
|
finally {
|
||||||
|
setIsLoading(false)
|
||||||
}
|
}
|
||||||
setIsLoading(false)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
fetchList(currPage + 1)
|
fetchList(currPage + 1)
|
||||||
}, [currPage])
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, [currPage, limit, debouncedQueryParams])
|
||||||
useEffect(() => {
|
|
||||||
fetchList(1)
|
|
||||||
setControlUpdateList(Date.now())
|
|
||||||
}, [queryParams])
|
|
||||||
|
|
||||||
const handleAdd = async (payload: AnnotationItemBasic) => {
|
const handleAdd = async (payload: AnnotationItemBasic) => {
|
||||||
await addAnnotation(appDetail.id, {
|
await addAnnotation(appDetail.id, payload)
|
||||||
...payload,
|
Toast.notify({ message: t('common.api.actionSuccess'), type: 'success' })
|
||||||
})
|
|
||||||
Toast.notify({
|
|
||||||
message: t('common.api.actionSuccess'),
|
|
||||||
type: 'success',
|
|
||||||
})
|
|
||||||
fetchList()
|
fetchList()
|
||||||
setControlUpdateList(Date.now())
|
setControlUpdateList(Date.now())
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleRemove = async (id: string) => {
|
const handleRemove = async (id: string) => {
|
||||||
await delAnnotation(appDetail.id, id)
|
await delAnnotation(appDetail.id, id)
|
||||||
Toast.notify({
|
Toast.notify({ message: t('common.api.actionSuccess'), type: 'success' })
|
||||||
message: t('common.api.actionSuccess'),
|
|
||||||
type: 'success',
|
|
||||||
})
|
|
||||||
fetchList()
|
fetchList()
|
||||||
setControlUpdateList(Date.now())
|
setControlUpdateList(Date.now())
|
||||||
}
|
}
|
||||||
|
|
||||||
const [currItem, setCurrItem] = useState<AnnotationItem | null>(list[0])
|
|
||||||
const [isShowViewModal, setIsShowViewModal] = useState(false)
|
|
||||||
useEffect(() => {
|
|
||||||
if (!isShowEdit)
|
|
||||||
setControlRefreshSwitch(Date.now())
|
|
||||||
}, [isShowEdit])
|
|
||||||
const handleView = (item: AnnotationItem) => {
|
const handleView = (item: AnnotationItem) => {
|
||||||
setCurrItem(item)
|
setCurrItem(item)
|
||||||
setIsShowViewModal(true)
|
setIsShowViewModal(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
const handleSave = async (question: string, answer: string) => {
|
const handleSave = async (question: string, answer: string) => {
|
||||||
await editAnnotation(appDetail.id, (currItem as AnnotationItem).id, {
|
if (!currItem) return
|
||||||
question,
|
await editAnnotation(appDetail.id, currItem.id, { question, answer })
|
||||||
answer,
|
Toast.notify({ message: t('common.api.actionSuccess'), type: 'success' })
|
||||||
})
|
|
||||||
Toast.notify({
|
|
||||||
message: t('common.api.actionSuccess'),
|
|
||||||
type: 'success',
|
|
||||||
})
|
|
||||||
fetchList()
|
fetchList()
|
||||||
setControlUpdateList(Date.now())
|
setControlUpdateList(Date.now())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isShowEdit) setControlRefreshSwitch(Date.now())
|
||||||
|
}, [isShowEdit])
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className='flex h-full flex-col'>
|
<div className='flex h-full flex-col'>
|
||||||
<p className='system-sm-regular text-text-tertiary'>{t('appLog.description')}</p>
|
<p className='system-sm-regular text-text-tertiary'>{t('appLog.description')}</p>
|
||||||
@ -211,6 +183,7 @@ const Annotation: FC<Props> = ({
|
|||||||
</Filter>
|
</Filter>
|
||||||
{isLoading
|
{isLoading
|
||||||
? <Loading type='app' />
|
? <Loading type='app' />
|
||||||
|
// eslint-disable-next-line sonarjs/no-nested-conditional
|
||||||
: total > 0
|
: total > 0
|
||||||
? <List
|
? <List
|
||||||
list={list}
|
list={list}
|
||||||
|
@ -234,9 +234,14 @@ const ConfigModal: FC<IConfigModalProps> = ({
|
|||||||
)}
|
)}
|
||||||
|
|
||||||
<div className='!mt-5 flex h-6 items-center space-x-2'>
|
<div className='!mt-5 flex h-6 items-center space-x-2'>
|
||||||
<Checkbox checked={tempPayload.required} onCheck={() => handlePayloadChange('required')(!tempPayload.required)} />
|
<Checkbox checked={tempPayload.required} disabled={tempPayload.hide} onCheck={() => handlePayloadChange('required')(!tempPayload.required)} />
|
||||||
<span className='system-sm-semibold text-text-secondary'>{t('appDebug.variableConfig.required')}</span>
|
<span className='system-sm-semibold text-text-secondary'>{t('appDebug.variableConfig.required')}</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div className='!mt-5 flex h-6 items-center space-x-2'>
|
||||||
|
<Checkbox checked={tempPayload.hide} disabled={tempPayload.required} onCheck={() => handlePayloadChange('hide')(!tempPayload.hide)} />
|
||||||
|
<span className='system-sm-semibold text-text-secondary'>{t('appDebug.variableConfig.hide')}</span>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<ModalFoot
|
<ModalFoot
|
||||||
|
@ -18,7 +18,7 @@ import style from './style.module.css'
|
|||||||
import type { ConfigParams } from './settings'
|
import type { ConfigParams } from './settings'
|
||||||
import Tooltip from '@/app/components/base/tooltip'
|
import Tooltip from '@/app/components/base/tooltip'
|
||||||
import AppBasic from '@/app/components/app-sidebar/basic'
|
import AppBasic from '@/app/components/app-sidebar/basic'
|
||||||
import { asyncRunSafe, randomString } from '@/utils'
|
import { asyncRunSafe } from '@/utils'
|
||||||
import { basePath } from '@/utils/var'
|
import { basePath } from '@/utils/var'
|
||||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||||
import Button from '@/app/components/base/button'
|
import Button from '@/app/components/base/button'
|
||||||
@ -184,7 +184,7 @@ function AppCard({
|
|||||||
: t('appOverview.overview.apiInfo.explanation')
|
: t('appOverview.overview.apiInfo.explanation')
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
<div className='flex items-center gap-1'>
|
<div className='flex shrink-0 items-center gap-1'>
|
||||||
<Indicator color={runningStatus ? 'green' : 'yellow'} />
|
<Indicator color={runningStatus ? 'green' : 'yellow'} />
|
||||||
<div className={`${runningStatus ? 'text-text-success' : 'text-text-warning'} system-xs-semibold-uppercase`}>
|
<div className={`${runningStatus ? 'text-text-success' : 'text-text-warning'} system-xs-semibold-uppercase`}>
|
||||||
{runningStatus
|
{runningStatus
|
||||||
@ -210,7 +210,7 @@ function AppCard({
|
|||||||
content={isApp ? appUrl : apiUrl}
|
content={isApp ? appUrl : apiUrl}
|
||||||
className={'!size-6'}
|
className={'!size-6'}
|
||||||
/>
|
/>
|
||||||
{isApp && <ShareQRCode content={isApp ? appUrl : apiUrl} className='z-50 !size-6 rounded-md hover:bg-state-base-hover' selectorId={randomString(8)} />}
|
{isApp && <ShareQRCode content={isApp ? appUrl : apiUrl} />}
|
||||||
{isApp && <Divider type="vertical" className="!mx-0.5 !h-3.5 shrink-0" />}
|
{isApp && <Divider type="vertical" className="!mx-0.5 !h-3.5 shrink-0" />}
|
||||||
{/* button copy link/ button regenerate */}
|
{/* button copy link/ button regenerate */}
|
||||||
{showConfirmDelete && (
|
{showConfirmDelete && (
|
||||||
|
@ -94,7 +94,7 @@ const ImageInput: FC<UploaderProps> = ({
|
|||||||
<div
|
<div
|
||||||
className={classNames(
|
className={classNames(
|
||||||
isDragActive && 'border-primary-600',
|
isDragActive && 'border-primary-600',
|
||||||
'relative aspect-square bg-gray-50 border-[1.5px] border-gray-200 border-dashed rounded-lg flex flex-col justify-center items-center text-gray-500')}
|
'relative aspect-square border-[1.5px] border-dashed rounded-lg flex flex-col justify-center items-center text-gray-500')}
|
||||||
onDragEnter={handleDragEnter}
|
onDragEnter={handleDragEnter}
|
||||||
onDragOver={handleDragOver}
|
onDragOver={handleDragOver}
|
||||||
onDragLeave={handleDragLeave}
|
onDragLeave={handleDragLeave}
|
||||||
|
@ -115,7 +115,7 @@ const AppIconPicker: FC<AppIconPickerProps> = ({
|
|||||||
className={cn(s.container, '!w-[362px] !p-0')}
|
className={cn(s.container, '!w-[362px] !p-0')}
|
||||||
>
|
>
|
||||||
{!DISABLE_UPLOAD_IMAGE_AS_ICON && <div className="w-full p-2 pb-0">
|
{!DISABLE_UPLOAD_IMAGE_AS_ICON && <div className="w-full p-2 pb-0">
|
||||||
<div className='flex items-center justify-center gap-2 rounded-xl bg-background-body p-1'>
|
<div className='flex items-center justify-center gap-2 rounded-xl bg-background-body p-1 text-text-primary'>
|
||||||
{tabs.map(tab => (
|
{tabs.map(tab => (
|
||||||
<button
|
<button
|
||||||
key={tab.key}
|
key={tab.key}
|
||||||
|
@ -4,9 +4,6 @@
|
|||||||
align-items: flex-start;
|
align-items: flex-start;
|
||||||
width: 362px;
|
width: 362px;
|
||||||
max-height: 552px;
|
max-height: 552px;
|
||||||
|
|
||||||
border: 0.5px solid #EAECF0;
|
|
||||||
box-shadow: 0px 12px 16px -4px rgba(16, 24, 40, 0.08), 0px 4px 6px -2px rgba(16, 24, 40, 0.03);
|
box-shadow: 0px 12px 16px -4px rgba(16, 24, 40, 0.08), 0px 4px 6px -2px rgba(16, 24, 40, 0.03);
|
||||||
border-radius: 12px;
|
border-radius: 12px;
|
||||||
background: #fff;
|
|
||||||
}
|
}
|
||||||
|
@ -47,6 +47,7 @@ const ChatWrapper = () => {
|
|||||||
clearChatList,
|
clearChatList,
|
||||||
setClearChatList,
|
setClearChatList,
|
||||||
setIsResponding,
|
setIsResponding,
|
||||||
|
allInputsHidden,
|
||||||
} = useChatWithHistoryContext()
|
} = useChatWithHistoryContext()
|
||||||
const appConfig = useMemo(() => {
|
const appConfig = useMemo(() => {
|
||||||
const config = appParams || {}
|
const config = appParams || {}
|
||||||
@ -81,6 +82,9 @@ const ChatWrapper = () => {
|
|||||||
)
|
)
|
||||||
const inputsFormValue = currentConversationId ? currentConversationInputs : newConversationInputsRef?.current
|
const inputsFormValue = currentConversationId ? currentConversationInputs : newConversationInputsRef?.current
|
||||||
const inputDisabled = useMemo(() => {
|
const inputDisabled = useMemo(() => {
|
||||||
|
if (allInputsHidden)
|
||||||
|
return false
|
||||||
|
|
||||||
let hasEmptyInput = ''
|
let hasEmptyInput = ''
|
||||||
let fileIsUploading = false
|
let fileIsUploading = false
|
||||||
const requiredVars = inputsForms.filter(({ required }) => required)
|
const requiredVars = inputsForms.filter(({ required }) => required)
|
||||||
@ -110,7 +114,7 @@ const ChatWrapper = () => {
|
|||||||
if (fileIsUploading)
|
if (fileIsUploading)
|
||||||
return true
|
return true
|
||||||
return false
|
return false
|
||||||
}, [inputsFormValue, inputsForms])
|
}, [inputsFormValue, inputsForms, allInputsHidden])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (currentChatInstanceRef.current)
|
if (currentChatInstanceRef.current)
|
||||||
@ -161,7 +165,7 @@ const ChatWrapper = () => {
|
|||||||
const [collapsed, setCollapsed] = useState(!!currentConversationId)
|
const [collapsed, setCollapsed] = useState(!!currentConversationId)
|
||||||
|
|
||||||
const chatNode = useMemo(() => {
|
const chatNode = useMemo(() => {
|
||||||
if (!inputsForms.length)
|
if (allInputsHidden || !inputsForms.length)
|
||||||
return null
|
return null
|
||||||
if (isMobile) {
|
if (isMobile) {
|
||||||
if (!currentConversationId)
|
if (!currentConversationId)
|
||||||
@ -171,7 +175,7 @@ const ChatWrapper = () => {
|
|||||||
else {
|
else {
|
||||||
return <InputsForm collapsed={collapsed} setCollapsed={setCollapsed} />
|
return <InputsForm collapsed={collapsed} setCollapsed={setCollapsed} />
|
||||||
}
|
}
|
||||||
}, [inputsForms.length, isMobile, currentConversationId, collapsed])
|
}, [inputsForms.length, isMobile, currentConversationId, collapsed, allInputsHidden])
|
||||||
|
|
||||||
const welcome = useMemo(() => {
|
const welcome = useMemo(() => {
|
||||||
const welcomeMessage = chatList.find(item => item.isOpeningStatement)
|
const welcomeMessage = chatList.find(item => item.isOpeningStatement)
|
||||||
@ -181,7 +185,7 @@ const ChatWrapper = () => {
|
|||||||
return null
|
return null
|
||||||
if (!welcomeMessage)
|
if (!welcomeMessage)
|
||||||
return null
|
return null
|
||||||
if (!collapsed && inputsForms.length > 0)
|
if (!collapsed && inputsForms.length > 0 && !allInputsHidden)
|
||||||
return null
|
return null
|
||||||
if (welcomeMessage.suggestedQuestions && welcomeMessage.suggestedQuestions?.length > 0) {
|
if (welcomeMessage.suggestedQuestions && welcomeMessage.suggestedQuestions?.length > 0) {
|
||||||
return (
|
return (
|
||||||
@ -218,7 +222,7 @@ const ChatWrapper = () => {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState])
|
}, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState, allInputsHidden])
|
||||||
|
|
||||||
const answerIcon = (appData?.site && appData.site.use_icon_as_answer_icon)
|
const answerIcon = (appData?.site && appData.site.use_icon_as_answer_icon)
|
||||||
? <AnswerIcon
|
? <AnswerIcon
|
||||||
|
@ -60,6 +60,7 @@ export type ChatWithHistoryContextValue = {
|
|||||||
setIsResponding: (state: boolean) => void,
|
setIsResponding: (state: boolean) => void,
|
||||||
currentConversationInputs: Record<string, any> | null,
|
currentConversationInputs: Record<string, any> | null,
|
||||||
setCurrentConversationInputs: (v: Record<string, any>) => void,
|
setCurrentConversationInputs: (v: Record<string, any>) => void,
|
||||||
|
allInputsHidden: boolean,
|
||||||
}
|
}
|
||||||
|
|
||||||
export const ChatWithHistoryContext = createContext<ChatWithHistoryContextValue>({
|
export const ChatWithHistoryContext = createContext<ChatWithHistoryContextValue>({
|
||||||
@ -95,5 +96,6 @@ export const ChatWithHistoryContext = createContext<ChatWithHistoryContextValue>
|
|||||||
setIsResponding: noop,
|
setIsResponding: noop,
|
||||||
currentConversationInputs: {},
|
currentConversationInputs: {},
|
||||||
setCurrentConversationInputs: noop,
|
setCurrentConversationInputs: noop,
|
||||||
|
allInputsHidden: false,
|
||||||
})
|
})
|
||||||
export const useChatWithHistoryContext = () => useContext(ChatWithHistoryContext)
|
export const useChatWithHistoryContext = () => useContext(ChatWithHistoryContext)
|
||||||
|
@ -240,6 +240,11 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
}, [appParams])
|
}, [appParams])
|
||||||
|
|
||||||
|
const allInputsHidden = useMemo(() => {
|
||||||
|
return inputsForms.length > 0 && inputsForms.every(item => item.hide === true)
|
||||||
|
}, [inputsForms])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const conversationInputs: Record<string, any> = {}
|
const conversationInputs: Record<string, any> = {}
|
||||||
|
|
||||||
@ -304,6 +309,9 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => {
|
|||||||
|
|
||||||
const { notify } = useToastContext()
|
const { notify } = useToastContext()
|
||||||
const checkInputsRequired = useCallback((silent?: boolean) => {
|
const checkInputsRequired = useCallback((silent?: boolean) => {
|
||||||
|
if (allInputsHidden)
|
||||||
|
return true
|
||||||
|
|
||||||
let hasEmptyInput = ''
|
let hasEmptyInput = ''
|
||||||
let fileIsUploading = false
|
let fileIsUploading = false
|
||||||
const requiredVars = inputsForms.filter(({ required }) => required)
|
const requiredVars = inputsForms.filter(({ required }) => required)
|
||||||
@ -339,7 +347,7 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}, [inputsForms, notify, t])
|
}, [inputsForms, notify, t, allInputsHidden])
|
||||||
const handleStartChat = useCallback((callback: any) => {
|
const handleStartChat = useCallback((callback: any) => {
|
||||||
if (checkInputsRequired()) {
|
if (checkInputsRequired()) {
|
||||||
setShowNewConversationItemInList(true)
|
setShowNewConversationItemInList(true)
|
||||||
@ -507,5 +515,6 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => {
|
|||||||
setIsResponding,
|
setIsResponding,
|
||||||
currentConversationInputs,
|
currentConversationInputs,
|
||||||
setCurrentConversationInputs,
|
setCurrentConversationInputs,
|
||||||
|
allInputsHidden,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -161,6 +161,7 @@ const ChatWithHistoryWrap: FC<ChatWithHistoryWrapProps> = ({
|
|||||||
setIsResponding,
|
setIsResponding,
|
||||||
currentConversationInputs,
|
currentConversationInputs,
|
||||||
setCurrentConversationInputs,
|
setCurrentConversationInputs,
|
||||||
|
allInputsHidden,
|
||||||
} = useChatWithHistory(installedAppInfo)
|
} = useChatWithHistory(installedAppInfo)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -206,6 +207,7 @@ const ChatWithHistoryWrap: FC<ChatWithHistoryWrapProps> = ({
|
|||||||
setIsResponding,
|
setIsResponding,
|
||||||
currentConversationInputs,
|
currentConversationInputs,
|
||||||
setCurrentConversationInputs,
|
setCurrentConversationInputs,
|
||||||
|
allInputsHidden,
|
||||||
}}>
|
}}>
|
||||||
<ChatWithHistory className={className} />
|
<ChatWithHistory className={className} />
|
||||||
</ChatWithHistoryContext.Provider>
|
</ChatWithHistoryContext.Provider>
|
||||||
|
@ -36,9 +36,11 @@ const InputsFormContent = ({ showTip }: Props) => {
|
|||||||
})
|
})
|
||||||
}, [newConversationInputsRef, handleNewConversationInputsChange, currentConversationInputs, setCurrentConversationInputs])
|
}, [newConversationInputsRef, handleNewConversationInputsChange, currentConversationInputs, setCurrentConversationInputs])
|
||||||
|
|
||||||
|
const visibleInputsForms = inputsForms.filter(form => form.hide !== true)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className='space-y-4'>
|
<div className='space-y-4'>
|
||||||
{inputsForms.map(form => (
|
{visibleInputsForms.map(form => (
|
||||||
<div key={form.variable} className='space-y-1'>
|
<div key={form.variable} className='space-y-1'>
|
||||||
<div className='flex h-6 items-center gap-1'>
|
<div className='flex h-6 items-center gap-1'>
|
||||||
<div className='system-md-semibold text-text-secondary'>{form.label}</div>
|
<div className='system-md-semibold text-text-secondary'>{form.label}</div>
|
||||||
|
@ -21,9 +21,14 @@ const InputsFormNode = ({
|
|||||||
isMobile,
|
isMobile,
|
||||||
currentConversationId,
|
currentConversationId,
|
||||||
handleStartChat,
|
handleStartChat,
|
||||||
|
allInputsHidden,
|
||||||
themeBuilder,
|
themeBuilder,
|
||||||
|
inputsForms,
|
||||||
} = useChatWithHistoryContext()
|
} = useChatWithHistoryContext()
|
||||||
|
|
||||||
|
if (allInputsHidden || inputsForms.length === 0)
|
||||||
|
return null
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={cn('flex flex-col items-center px-4 pt-6', isMobile && 'pt-4')}>
|
<div className={cn('flex flex-col items-center px-4 pt-6', isMobile && 'pt-4')}>
|
||||||
<div className={cn(
|
<div className={cn(
|
||||||
|
@ -143,5 +143,6 @@ export type InputForm = {
|
|||||||
label: string
|
label: string
|
||||||
variable: any
|
variable: any
|
||||||
required: boolean
|
required: boolean
|
||||||
|
hide: boolean
|
||||||
[key: string]: any
|
[key: string]: any
|
||||||
}
|
}
|
||||||
|
@ -48,6 +48,7 @@ const ChatWrapper = () => {
|
|||||||
clearChatList,
|
clearChatList,
|
||||||
setClearChatList,
|
setClearChatList,
|
||||||
setIsResponding,
|
setIsResponding,
|
||||||
|
allInputsHidden,
|
||||||
} = useEmbeddedChatbotContext()
|
} = useEmbeddedChatbotContext()
|
||||||
const appConfig = useMemo(() => {
|
const appConfig = useMemo(() => {
|
||||||
const config = appParams || {}
|
const config = appParams || {}
|
||||||
@ -82,6 +83,9 @@ const ChatWrapper = () => {
|
|||||||
)
|
)
|
||||||
const inputsFormValue = currentConversationId ? currentConversationInputs : newConversationInputsRef?.current
|
const inputsFormValue = currentConversationId ? currentConversationInputs : newConversationInputsRef?.current
|
||||||
const inputDisabled = useMemo(() => {
|
const inputDisabled = useMemo(() => {
|
||||||
|
if (allInputsHidden)
|
||||||
|
return false
|
||||||
|
|
||||||
let hasEmptyInput = ''
|
let hasEmptyInput = ''
|
||||||
let fileIsUploading = false
|
let fileIsUploading = false
|
||||||
const requiredVars = inputsForms.filter(({ required }) => required)
|
const requiredVars = inputsForms.filter(({ required }) => required)
|
||||||
@ -111,7 +115,7 @@ const ChatWrapper = () => {
|
|||||||
if (fileIsUploading)
|
if (fileIsUploading)
|
||||||
return true
|
return true
|
||||||
return false
|
return false
|
||||||
}, [inputsFormValue, inputsForms])
|
}, [inputsFormValue, inputsForms, allInputsHidden])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (currentChatInstanceRef.current)
|
if (currentChatInstanceRef.current)
|
||||||
@ -160,7 +164,7 @@ const ChatWrapper = () => {
|
|||||||
const [collapsed, setCollapsed] = useState(!!currentConversationId)
|
const [collapsed, setCollapsed] = useState(!!currentConversationId)
|
||||||
|
|
||||||
const chatNode = useMemo(() => {
|
const chatNode = useMemo(() => {
|
||||||
if (!inputsForms.length)
|
if (allInputsHidden || !inputsForms.length)
|
||||||
return null
|
return null
|
||||||
if (isMobile) {
|
if (isMobile) {
|
||||||
if (!currentConversationId)
|
if (!currentConversationId)
|
||||||
@ -170,7 +174,7 @@ const ChatWrapper = () => {
|
|||||||
else {
|
else {
|
||||||
return <InputsForm collapsed={collapsed} setCollapsed={setCollapsed} />
|
return <InputsForm collapsed={collapsed} setCollapsed={setCollapsed} />
|
||||||
}
|
}
|
||||||
}, [inputsForms.length, isMobile, currentConversationId, collapsed])
|
}, [inputsForms.length, isMobile, currentConversationId, collapsed, allInputsHidden])
|
||||||
|
|
||||||
const welcome = useMemo(() => {
|
const welcome = useMemo(() => {
|
||||||
const welcomeMessage = chatList.find(item => item.isOpeningStatement)
|
const welcomeMessage = chatList.find(item => item.isOpeningStatement)
|
||||||
@ -180,7 +184,7 @@ const ChatWrapper = () => {
|
|||||||
return null
|
return null
|
||||||
if (!welcomeMessage)
|
if (!welcomeMessage)
|
||||||
return null
|
return null
|
||||||
if (!collapsed && inputsForms.length > 0)
|
if (!collapsed && inputsForms.length > 0 && !allInputsHidden)
|
||||||
return null
|
return null
|
||||||
if (welcomeMessage.suggestedQuestions && welcomeMessage.suggestedQuestions?.length > 0) {
|
if (welcomeMessage.suggestedQuestions && welcomeMessage.suggestedQuestions?.length > 0) {
|
||||||
return (
|
return (
|
||||||
@ -215,7 +219,7 @@ const ChatWrapper = () => {
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState])
|
}, [appData?.site.icon, appData?.site.icon_background, appData?.site.icon_type, appData?.site.icon_url, chatList, collapsed, currentConversationId, inputsForms.length, respondingState, allInputsHidden])
|
||||||
|
|
||||||
const answerIcon = isDify()
|
const answerIcon = isDify()
|
||||||
? <LogoAvatar className='relative shrink-0' />
|
? <LogoAvatar className='relative shrink-0' />
|
||||||
|
@ -53,6 +53,7 @@ export type EmbeddedChatbotContextValue = {
|
|||||||
setIsResponding: (state: boolean) => void,
|
setIsResponding: (state: boolean) => void,
|
||||||
currentConversationInputs: Record<string, any> | null,
|
currentConversationInputs: Record<string, any> | null,
|
||||||
setCurrentConversationInputs: (v: Record<string, any>) => void,
|
setCurrentConversationInputs: (v: Record<string, any>) => void,
|
||||||
|
allInputsHidden: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
export const EmbeddedChatbotContext = createContext<EmbeddedChatbotContextValue>({
|
export const EmbeddedChatbotContext = createContext<EmbeddedChatbotContextValue>({
|
||||||
@ -82,5 +83,6 @@ export const EmbeddedChatbotContext = createContext<EmbeddedChatbotContextValue>
|
|||||||
setIsResponding: noop,
|
setIsResponding: noop,
|
||||||
currentConversationInputs: {},
|
currentConversationInputs: {},
|
||||||
setCurrentConversationInputs: noop,
|
setCurrentConversationInputs: noop,
|
||||||
|
allInputsHidden: false,
|
||||||
})
|
})
|
||||||
export const useEmbeddedChatbotContext = () => useContext(EmbeddedChatbotContext)
|
export const useEmbeddedChatbotContext = () => useContext(EmbeddedChatbotContext)
|
||||||
|
@ -235,6 +235,10 @@ export const useEmbeddedChatbot = () => {
|
|||||||
})
|
})
|
||||||
}, [initInputs, appParams])
|
}, [initInputs, appParams])
|
||||||
|
|
||||||
|
const allInputsHidden = useMemo(() => {
|
||||||
|
return inputsForms.length > 0 && inputsForms.every(item => item.hide === true)
|
||||||
|
}, [inputsForms])
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// init inputs from url params
|
// init inputs from url params
|
||||||
(async () => {
|
(async () => {
|
||||||
@ -306,6 +310,9 @@ export const useEmbeddedChatbot = () => {
|
|||||||
|
|
||||||
const { notify } = useToastContext()
|
const { notify } = useToastContext()
|
||||||
const checkInputsRequired = useCallback((silent?: boolean) => {
|
const checkInputsRequired = useCallback((silent?: boolean) => {
|
||||||
|
if (allInputsHidden)
|
||||||
|
return true
|
||||||
|
|
||||||
let hasEmptyInput = ''
|
let hasEmptyInput = ''
|
||||||
let fileIsUploading = false
|
let fileIsUploading = false
|
||||||
const requiredVars = inputsForms.filter(({ required }) => required)
|
const requiredVars = inputsForms.filter(({ required }) => required)
|
||||||
@ -341,7 +348,7 @@ export const useEmbeddedChatbot = () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return true
|
return true
|
||||||
}, [inputsForms, notify, t])
|
}, [inputsForms, notify, t, allInputsHidden])
|
||||||
const handleStartChat = useCallback((callback?: any) => {
|
const handleStartChat = useCallback((callback?: any) => {
|
||||||
if (checkInputsRequired()) {
|
if (checkInputsRequired()) {
|
||||||
setShowNewConversationItemInList(true)
|
setShowNewConversationItemInList(true)
|
||||||
@ -417,5 +424,6 @@ export const useEmbeddedChatbot = () => {
|
|||||||
setIsResponding,
|
setIsResponding,
|
||||||
currentConversationInputs,
|
currentConversationInputs,
|
||||||
setCurrentConversationInputs,
|
setCurrentConversationInputs,
|
||||||
|
allInputsHidden,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -168,6 +168,7 @@ const EmbeddedChatbotWrapper = () => {
|
|||||||
setIsResponding,
|
setIsResponding,
|
||||||
currentConversationInputs,
|
currentConversationInputs,
|
||||||
setCurrentConversationInputs,
|
setCurrentConversationInputs,
|
||||||
|
allInputsHidden,
|
||||||
} = useEmbeddedChatbot()
|
} = useEmbeddedChatbot()
|
||||||
|
|
||||||
return <EmbeddedChatbotContext.Provider value={{
|
return <EmbeddedChatbotContext.Provider value={{
|
||||||
@ -206,6 +207,7 @@ const EmbeddedChatbotWrapper = () => {
|
|||||||
setIsResponding,
|
setIsResponding,
|
||||||
currentConversationInputs,
|
currentConversationInputs,
|
||||||
setCurrentConversationInputs,
|
setCurrentConversationInputs,
|
||||||
|
allInputsHidden,
|
||||||
}}>
|
}}>
|
||||||
<Chatbot />
|
<Chatbot />
|
||||||
</EmbeddedChatbotContext.Provider>
|
</EmbeddedChatbotContext.Provider>
|
||||||
|
@ -36,9 +36,11 @@ const InputsFormContent = ({ showTip }: Props) => {
|
|||||||
})
|
})
|
||||||
}, [newConversationInputsRef, handleNewConversationInputsChange, currentConversationInputs, setCurrentConversationInputs])
|
}, [newConversationInputsRef, handleNewConversationInputsChange, currentConversationInputs, setCurrentConversationInputs])
|
||||||
|
|
||||||
|
const visibleInputsForms = inputsForms.filter(form => form.hide !== true)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className='space-y-4'>
|
<div className='space-y-4'>
|
||||||
{inputsForms.map(form => (
|
{visibleInputsForms.map(form => (
|
||||||
<div key={form.variable} className='space-y-1'>
|
<div key={form.variable} className='space-y-1'>
|
||||||
<div className='flex h-6 items-center gap-1'>
|
<div className='flex h-6 items-center gap-1'>
|
||||||
<div className='system-md-semibold text-text-secondary'>{form.label}</div>
|
<div className='system-md-semibold text-text-secondary'>{form.label}</div>
|
||||||
|
@ -22,8 +22,13 @@ const InputsFormNode = ({
|
|||||||
currentConversationId,
|
currentConversationId,
|
||||||
themeBuilder,
|
themeBuilder,
|
||||||
handleStartChat,
|
handleStartChat,
|
||||||
|
allInputsHidden,
|
||||||
|
inputsForms,
|
||||||
} = useEmbeddedChatbotContext()
|
} = useEmbeddedChatbotContext()
|
||||||
|
|
||||||
|
if (allInputsHidden || inputsForms.length === 0)
|
||||||
|
return null
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={cn('mb-6 flex flex-col items-center px-4 pt-6', isMobile && 'mb-4 pt-4')}>
|
<div className={cn('mb-6 flex flex-col items-center px-4 pt-6', isMobile && 'mb-4 pt-4')}>
|
||||||
<div className={cn(
|
<div className={cn(
|
||||||
|
@ -216,16 +216,24 @@ CodeBlock.displayName = 'CodeBlock'
|
|||||||
|
|
||||||
const VideoBlock: any = memo(({ node }: any) => {
|
const VideoBlock: any = memo(({ node }: any) => {
|
||||||
const srcs = node.children.filter((child: any) => 'properties' in child).map((child: any) => (child as any).properties.src)
|
const srcs = node.children.filter((child: any) => 'properties' in child).map((child: any) => (child as any).properties.src)
|
||||||
if (srcs.length === 0)
|
if (srcs.length === 0) {
|
||||||
|
const src = node.properties?.src
|
||||||
|
if (src)
|
||||||
|
return <VideoGallery key={src} srcs={[src]} />
|
||||||
return null
|
return null
|
||||||
|
}
|
||||||
return <VideoGallery key={srcs.join()} srcs={srcs} />
|
return <VideoGallery key={srcs.join()} srcs={srcs} />
|
||||||
})
|
})
|
||||||
VideoBlock.displayName = 'VideoBlock'
|
VideoBlock.displayName = 'VideoBlock'
|
||||||
|
|
||||||
const AudioBlock: any = memo(({ node }: any) => {
|
const AudioBlock: any = memo(({ node }: any) => {
|
||||||
const srcs = node.children.filter((child: any) => 'properties' in child).map((child: any) => (child as any).properties.src)
|
const srcs = node.children.filter((child: any) => 'properties' in child).map((child: any) => (child as any).properties.src)
|
||||||
if (srcs.length === 0)
|
if (srcs.length === 0) {
|
||||||
|
const src = node.properties?.src
|
||||||
|
if (src)
|
||||||
|
return <AudioGallery key={src} srcs={[src]} />
|
||||||
return null
|
return null
|
||||||
|
}
|
||||||
return <AudioGallery key={srcs.join()} srcs={srcs} />
|
return <AudioGallery key={srcs.join()} srcs={srcs} />
|
||||||
})
|
})
|
||||||
AudioBlock.displayName = 'AudioBlock'
|
AudioBlock.displayName = 'AudioBlock'
|
||||||
|
@ -24,7 +24,7 @@ const HelpLink = ({
|
|||||||
<a
|
<a
|
||||||
href={link}
|
href={link}
|
||||||
target='_blank'
|
target='_blank'
|
||||||
className='mr-1 flex h-6 w-6 items-center justify-center'
|
className='mr-1 flex h-6 w-6 items-center justify-center rounded-md hover:bg-state-base-hover'
|
||||||
>
|
>
|
||||||
<RiBookOpenLine className='h-4 w-4 text-gray-500' />
|
<RiBookOpenLine className='h-4 w-4 text-gray-500' />
|
||||||
</a>
|
</a>
|
||||||
|
@ -0,0 +1,54 @@
|
|||||||
|
import { memo } from 'react'
|
||||||
|
import { useTranslation } from 'react-i18next'
|
||||||
|
import { RiCrosshairLine } from '@remixicon/react'
|
||||||
|
import type { XYPosition } from 'reactflow'
|
||||||
|
import { useReactFlow, useStoreApi } from 'reactflow'
|
||||||
|
import TooltipPlus from '@/app/components/base/tooltip'
|
||||||
|
import { useNodesSyncDraft } from '@/app/components/workflow-app/hooks'
|
||||||
|
|
||||||
|
type NodePositionProps = {
|
||||||
|
nodePosition: XYPosition,
|
||||||
|
nodeWidth?: number | null,
|
||||||
|
nodeHeight?: number | null,
|
||||||
|
}
|
||||||
|
const NodePosition = ({
|
||||||
|
nodePosition,
|
||||||
|
nodeWidth,
|
||||||
|
nodeHeight,
|
||||||
|
}: NodePositionProps) => {
|
||||||
|
const { t } = useTranslation()
|
||||||
|
const reactflow = useReactFlow()
|
||||||
|
const store = useStoreApi()
|
||||||
|
const { doSyncWorkflowDraft } = useNodesSyncDraft()
|
||||||
|
|
||||||
|
if (!nodePosition || !nodeWidth || !nodeHeight) return null
|
||||||
|
|
||||||
|
const workflowContainer = document.getElementById('workflow-container')
|
||||||
|
const { transform } = store.getState()
|
||||||
|
const zoom = transform[2]
|
||||||
|
|
||||||
|
const { clientWidth, clientHeight } = workflowContainer!
|
||||||
|
const { setViewport } = reactflow
|
||||||
|
|
||||||
|
return (
|
||||||
|
<TooltipPlus
|
||||||
|
popupContent={t('workflow.panel.moveToThisNode')}
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
className='mr-1 flex h-6 w-6 cursor-pointer items-center justify-center rounded-md hover:bg-state-base-hover'
|
||||||
|
onClick={() => {
|
||||||
|
setViewport({
|
||||||
|
x: (clientWidth - 400 - nodeWidth * zoom) / 2 - nodePosition.x * zoom,
|
||||||
|
y: (clientHeight - nodeHeight * zoom) / 2 - nodePosition.y * zoom,
|
||||||
|
zoom: transform[2],
|
||||||
|
})
|
||||||
|
doSyncWorkflowDraft()
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
<RiCrosshairLine className='h-4 w-4 text-text-tertiary' />
|
||||||
|
</div>
|
||||||
|
</TooltipPlus>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
export default memo(NodePosition)
|
@ -16,6 +16,7 @@ import { useTranslation } from 'react-i18next'
|
|||||||
import NextStep from './components/next-step'
|
import NextStep from './components/next-step'
|
||||||
import PanelOperator from './components/panel-operator'
|
import PanelOperator from './components/panel-operator'
|
||||||
import HelpLink from './components/help-link'
|
import HelpLink from './components/help-link'
|
||||||
|
import NodePosition from './components/node-position'
|
||||||
import {
|
import {
|
||||||
DescriptionInput,
|
DescriptionInput,
|
||||||
TitleInput,
|
TitleInput,
|
||||||
@ -55,6 +56,9 @@ const BasePanel: FC<BasePanelProps> = ({
|
|||||||
id,
|
id,
|
||||||
data,
|
data,
|
||||||
children,
|
children,
|
||||||
|
position,
|
||||||
|
width,
|
||||||
|
height,
|
||||||
}) => {
|
}) => {
|
||||||
const { t } = useTranslation()
|
const { t } = useTranslation()
|
||||||
const { showMessageLogModal } = useAppStore(useShallow(state => ({
|
const { showMessageLogModal } = useAppStore(useShallow(state => ({
|
||||||
@ -150,6 +154,7 @@ const BasePanel: FC<BasePanelProps> = ({
|
|||||||
</Tooltip>
|
</Tooltip>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
<NodePosition nodePosition={position} nodeWidth={width} nodeHeight={height}></NodePosition>
|
||||||
<HelpLink nodeType={data.type} />
|
<HelpLink nodeType={data.type} />
|
||||||
<PanelOperator id={id} data={data} showHelpLink={false} />
|
<PanelOperator id={id} data={data} showHelpLink={false} />
|
||||||
<div className='mx-3 h-3.5 w-[1px] bg-divider-regular' />
|
<div className='mx-3 h-3.5 w-[1px] bg-divider-regular' />
|
||||||
|
@ -39,6 +39,7 @@ const DebugAndPreview = () => {
|
|||||||
const nodes = useNodes<StartNodeType>()
|
const nodes = useNodes<StartNodeType>()
|
||||||
const startNode = nodes.find(node => node.data.type === BlockEnum.Start)
|
const startNode = nodes.find(node => node.data.type === BlockEnum.Start)
|
||||||
const variables = startNode?.data.variables || []
|
const variables = startNode?.data.variables || []
|
||||||
|
const visibleVariables = variables.filter(v => v.hide !== true)
|
||||||
|
|
||||||
const [showConversationVariableModal, setShowConversationVariableModal] = useState(false)
|
const [showConversationVariableModal, setShowConversationVariableModal] = useState(false)
|
||||||
|
|
||||||
@ -107,7 +108,7 @@ const DebugAndPreview = () => {
|
|||||||
</ActionButton>
|
</ActionButton>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
{variables.length > 0 && (
|
{visibleVariables.length > 0 && (
|
||||||
<div className='relative'>
|
<div className='relative'>
|
||||||
<Tooltip
|
<Tooltip
|
||||||
popupContent={t('workflow.panel.userInputField')}
|
popupContent={t('workflow.panel.userInputField')}
|
||||||
|
@ -17,6 +17,7 @@ const UserInput = () => {
|
|||||||
const nodes = useNodes<StartNodeType>()
|
const nodes = useNodes<StartNodeType>()
|
||||||
const startNode = nodes.find(node => node.data.type === BlockEnum.Start)
|
const startNode = nodes.find(node => node.data.type === BlockEnum.Start)
|
||||||
const variables = startNode?.data.variables || []
|
const variables = startNode?.data.variables || []
|
||||||
|
const visibleVariables = variables.filter(v => v.hide !== true)
|
||||||
|
|
||||||
const handleValueChange = (variable: string, v: string) => {
|
const handleValueChange = (variable: string, v: string) => {
|
||||||
const {
|
const {
|
||||||
@ -29,13 +30,13 @@ const UserInput = () => {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!variables.length)
|
if (!visibleVariables.length)
|
||||||
return null
|
return null
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={cn('sticky top-0 z-[1] rounded-xl border-[0.5px] border-components-panel-border-subtle bg-components-panel-on-panel-item-bg shadow-xs')}>
|
<div className={cn('sticky top-0 z-[1] rounded-xl border-[0.5px] border-components-panel-border-subtle bg-components-panel-on-panel-item-bg shadow-xs')}>
|
||||||
<div className='px-4 pb-4 pt-3'>
|
<div className='px-4 pb-4 pt-3'>
|
||||||
{variables.map((variable, index) => (
|
{visibleVariables.map((variable, index) => (
|
||||||
<div
|
<div
|
||||||
key={variable.variable}
|
key={variable.variable}
|
||||||
className='mb-4 last-of-type:mb-0'
|
className='mb-4 last-of-type:mb-0'
|
||||||
|
@ -21,4 +21,6 @@
|
|||||||
z-index: -1000 !important;
|
z-index: -1000 !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
#workflow-container .react-flow {}
|
#workflow-container .react-flow__attribution {
|
||||||
|
background: none !important;
|
||||||
|
}
|
||||||
|
@ -198,6 +198,7 @@ export type InputVar = {
|
|||||||
hint?: string
|
hint?: string
|
||||||
options?: string[]
|
options?: string[]
|
||||||
value_selector?: ValueSelector
|
value_selector?: ValueSelector
|
||||||
|
hide: boolean
|
||||||
} & Partial<UploadFileSetting>
|
} & Partial<UploadFileSetting>
|
||||||
|
|
||||||
export type ModelConfig = {
|
export type ModelConfig = {
|
||||||
|
@ -302,6 +302,7 @@ const translation = {
|
|||||||
organizeBlocks: 'Blöcke organisieren',
|
organizeBlocks: 'Blöcke organisieren',
|
||||||
change: 'Ändern',
|
change: 'Ändern',
|
||||||
optional: '(optional)',
|
optional: '(optional)',
|
||||||
|
moveToThisNode: 'Bewege zu diesem Knoten',
|
||||||
},
|
},
|
||||||
nodes: {
|
nodes: {
|
||||||
common: {
|
common: {
|
||||||
|
@ -368,6 +368,7 @@ const translation = {
|
|||||||
'inputPlaceholder': 'Please input',
|
'inputPlaceholder': 'Please input',
|
||||||
'content': 'Content',
|
'content': 'Content',
|
||||||
'required': 'Required',
|
'required': 'Required',
|
||||||
|
'hide': 'Hide',
|
||||||
'file': {
|
'file': {
|
||||||
supportFileTypes: 'Support File Types',
|
supportFileTypes: 'Support File Types',
|
||||||
image: {
|
image: {
|
||||||
|
@ -92,9 +92,9 @@ const translation = {
|
|||||||
member: 'Member',
|
member: 'Member',
|
||||||
memberAfter: 'Member',
|
memberAfter: 'Member',
|
||||||
messageRequest: {
|
messageRequest: {
|
||||||
title: '{{count,number}} messages',
|
title: '{{count,number}} message credits',
|
||||||
titlePerMonth: '{{count,number}} messages/month',
|
titlePerMonth: '{{count,number}} message credits/month',
|
||||||
tooltip: 'Message invocation quotas for various plans using OpenAl models. Messages over the limit will use your OpenAI API Key.',
|
tooltip: 'Message credits are provided to help you easily try out different OpenAI models in Dify. Credits are consumed based on the model type. Once they’re used up, you can switch to your own OpenAI API key.',
|
||||||
},
|
},
|
||||||
annotatedResponse: {
|
annotatedResponse: {
|
||||||
title: '{{count,number}} Annotation Quota Limits',
|
title: '{{count,number}} Annotation Quota Limits',
|
||||||
|
@ -300,6 +300,7 @@ const translation = {
|
|||||||
addNextStep: 'Add the next block in this workflow',
|
addNextStep: 'Add the next block in this workflow',
|
||||||
selectNextStep: 'Select Next Block',
|
selectNextStep: 'Select Next Block',
|
||||||
runThisStep: 'Run this step',
|
runThisStep: 'Run this step',
|
||||||
|
moveToThisNode: 'Move to this node',
|
||||||
checklist: 'Checklist',
|
checklist: 'Checklist',
|
||||||
checklistTip: 'Make sure all issues are resolved before publishing',
|
checklistTip: 'Make sure all issues are resolved before publishing',
|
||||||
checklistResolved: 'All issues are resolved',
|
checklistResolved: 'All issues are resolved',
|
||||||
|
@ -302,6 +302,7 @@ const translation = {
|
|||||||
organizeBlocks: 'Organizar bloques',
|
organizeBlocks: 'Organizar bloques',
|
||||||
change: 'Cambiar',
|
change: 'Cambiar',
|
||||||
optional: '(opcional)',
|
optional: '(opcional)',
|
||||||
|
moveToThisNode: 'Mueve a este nodo',
|
||||||
},
|
},
|
||||||
nodes: {
|
nodes: {
|
||||||
common: {
|
common: {
|
||||||
|
@ -302,6 +302,7 @@ const translation = {
|
|||||||
organizeBlocks: 'سازماندهی بلوکها',
|
organizeBlocks: 'سازماندهی بلوکها',
|
||||||
change: 'تغییر',
|
change: 'تغییر',
|
||||||
optional: '(اختیاری)',
|
optional: '(اختیاری)',
|
||||||
|
moveToThisNode: 'به این گره بروید',
|
||||||
},
|
},
|
||||||
nodes: {
|
nodes: {
|
||||||
common: {
|
common: {
|
||||||
|
@ -302,6 +302,7 @@ const translation = {
|
|||||||
organizeBlocks: 'Organiser les blocs',
|
organizeBlocks: 'Organiser les blocs',
|
||||||
change: 'Modifier',
|
change: 'Modifier',
|
||||||
optional: '(facultatif)',
|
optional: '(facultatif)',
|
||||||
|
moveToThisNode: 'Déplacer vers ce nœud',
|
||||||
},
|
},
|
||||||
nodes: {
|
nodes: {
|
||||||
common: {
|
common: {
|
||||||
|
@ -314,6 +314,7 @@ const translation = {
|
|||||||
organizeBlocks: 'ब्लॉक्स को व्यवस्थित करें',
|
organizeBlocks: 'ब्लॉक्स को व्यवस्थित करें',
|
||||||
change: 'बदलें',
|
change: 'बदलें',
|
||||||
optional: '(वैकल्पिक)',
|
optional: '(वैकल्पिक)',
|
||||||
|
moveToThisNode: 'इस नोड पर जाएं',
|
||||||
},
|
},
|
||||||
nodes: {
|
nodes: {
|
||||||
common: {
|
common: {
|
||||||
|
@ -317,6 +317,7 @@ const translation = {
|
|||||||
organizeBlocks: 'Organizza blocchi',
|
organizeBlocks: 'Organizza blocchi',
|
||||||
change: 'Cambia',
|
change: 'Cambia',
|
||||||
optional: '(opzionale)',
|
optional: '(opzionale)',
|
||||||
|
moveToThisNode: 'Sposta a questo nodo',
|
||||||
},
|
},
|
||||||
nodes: {
|
nodes: {
|
||||||
common: {
|
common: {
|
||||||
|
@ -218,6 +218,10 @@ const translation = {
|
|||||||
enableText: '有効な機能',
|
enableText: '有効な機能',
|
||||||
manage: '管理',
|
manage: '管理',
|
||||||
},
|
},
|
||||||
|
documentUpload: {
|
||||||
|
title: 'ドキュメント',
|
||||||
|
description: 'ドキュメント機能を有効にすると、AIモデルがファイルを処理し、その内容に基づいて質問に回答できるようになります。',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
codegen: {
|
codegen: {
|
||||||
title: 'コードジェネレーター',
|
title: 'コードジェネレーター',
|
||||||
@ -246,6 +250,7 @@ const translation = {
|
|||||||
noDataLine1: '左側に使用例を記入してください,',
|
noDataLine1: '左側に使用例を記入してください,',
|
||||||
noDataLine2: 'オーケストレーションのプレビューがこちらに表示されます。',
|
noDataLine2: 'オーケストレーションのプレビューがこちらに表示されます。',
|
||||||
apply: '適用',
|
apply: '適用',
|
||||||
|
noData: '左側にユースケースを入力すると、こちらでプレビューができます。',
|
||||||
loading: 'アプリケーションを処理中です',
|
loading: 'アプリケーションを処理中です',
|
||||||
overwriteTitle: '既存の設定を上書きしますか?',
|
overwriteTitle: '既存の設定を上書きしますか?',
|
||||||
overwriteMessage: 'このプロンプトを適用すると、既存の設定が上書きされます。',
|
overwriteMessage: 'このプロンプトを適用すると、既存の設定が上書きされます。',
|
||||||
@ -302,10 +307,7 @@ const translation = {
|
|||||||
waitForImgUpload: '画像のアップロードが完了するまでお待ちください',
|
waitForImgUpload: '画像のアップロードが完了するまでお待ちください',
|
||||||
waitForFileUpload: 'ファイルのアップロードが完了するまでお待ちください',
|
waitForFileUpload: 'ファイルのアップロードが完了するまでお待ちください',
|
||||||
},
|
},
|
||||||
warningMessage: {
|
chatSubTitle: 'プロンプト',
|
||||||
timeoutExceeded: 'タイムアウトのため結果が表示されません。完全な結果を手にいれるためには、ログを参照してください。',
|
|
||||||
},
|
|
||||||
chatSubTitle: '手順',
|
|
||||||
completionSubTitle: '接頭辞プロンプト',
|
completionSubTitle: '接頭辞プロンプト',
|
||||||
promptTip: 'プロンプトは、AIの応答を指示と制約で誘導します。 {{input}} のような変数を挿入します。このプロンプトはユーザーには表示されません。',
|
promptTip: 'プロンプトは、AIの応答を指示と制約で誘導します。 {{input}} のような変数を挿入します。このプロンプトはユーザーには表示されません。',
|
||||||
formattingChangedTitle: '書式が変更されました',
|
formattingChangedTitle: '書式が変更されました',
|
||||||
@ -356,7 +358,6 @@ const translation = {
|
|||||||
'varName': '変数名',
|
'varName': '変数名',
|
||||||
'labelName': 'ラベル名',
|
'labelName': 'ラベル名',
|
||||||
'inputPlaceholder': '入力してください',
|
'inputPlaceholder': '入力してください',
|
||||||
'content': 'コンテンツ',
|
|
||||||
'required': '必須',
|
'required': '必須',
|
||||||
'file': {
|
'file': {
|
||||||
supportFileTypes: 'サポートされたファイルタイプ',
|
supportFileTypes: 'サポートされたファイルタイプ',
|
||||||
@ -452,10 +453,8 @@ const translation = {
|
|||||||
noPrompt: 'プレプロンプト入力にいくつかのプロンプトを記入してみてください',
|
noPrompt: 'プレプロンプト入力にいくつかのプロンプトを記入してみてください',
|
||||||
userInputField: 'ユーザー入力フィールド',
|
userInputField: 'ユーザー入力フィールド',
|
||||||
noVar: '変数の値を入力してください。新しいセッションが開始されるたびにプロンプトの単語が自動的に置換されます。',
|
noVar: '変数の値を入力してください。新しいセッションが開始されるたびにプロンプトの単語が自動的に置換されます。',
|
||||||
chatVarTip:
|
chatVarTip: '変数の値を入力してください。新しいセッションが開始されるたびにプロンプトの単語が自動的に置換されます。',
|
||||||
'変数の値を入力してください。新しいセッションが開始されるたびにプロンプトの単語が自動的に置換されます。',
|
completionVarTip: '変数の値を入力してください。質問が送信されるたびにプロンプトの単語が自動的に置換されます。',
|
||||||
completionVarTip:
|
|
||||||
'変数の値を入力してください。質問が送信されるたびにプロンプトの単語が自動的に置換されます。',
|
|
||||||
previewTitle: 'プロンプトのプレビュー',
|
previewTitle: 'プロンプトのプレビュー',
|
||||||
queryTitle: 'クエリ内容',
|
queryTitle: 'クエリ内容',
|
||||||
queryPlaceholder: 'リクエストテキストを入力してください。',
|
queryPlaceholder: 'リクエストテキストを入力してください。',
|
||||||
@ -474,6 +473,7 @@ const translation = {
|
|||||||
title: 'マルチパスリトリーバル',
|
title: 'マルチパスリトリーバル',
|
||||||
description: 'ユーザーの意図に基づいて、すべてのナレッジをクエリし、複数のソースから関連するテキストを取得し、再順位付け後、ユーザークエリに最適な結果を選択します。再順位付けモデル API の構成が必要です。',
|
description: 'ユーザーの意図に基づいて、すべてのナレッジをクエリし、複数のソースから関連するテキストを取得し、再順位付け後、ユーザークエリに最適な結果を選択します。再順位付けモデル API の構成が必要です。',
|
||||||
},
|
},
|
||||||
|
embeddingModelRequired: 'Embeddingモデルが設定されていない',
|
||||||
rerankModelRequired: '再順位付けモデルが必要です',
|
rerankModelRequired: '再順位付けモデルが必要です',
|
||||||
params: 'パラメータ',
|
params: 'パラメータ',
|
||||||
top_k: 'トップK',
|
top_k: 'トップK',
|
||||||
|
@ -3,7 +3,7 @@ const translation = {
|
|||||||
firstStepTip: 'はじめるには、',
|
firstStepTip: 'はじめるには、',
|
||||||
enterKeyTip: '以下にOpenAI APIキーを入力してください',
|
enterKeyTip: '以下にOpenAI APIキーを入力してください',
|
||||||
getKeyTip: 'OpenAIダッシュボードからAPIキーを取得してください',
|
getKeyTip: 'OpenAIダッシュボードからAPIキーを取得してください',
|
||||||
placeholder: 'あなた様のOpenAI APIキー(例:sk-xxxx)',
|
placeholder: 'OpenAI APIキー(例:sk-xxxx)',
|
||||||
},
|
},
|
||||||
apiKeyInfo: {
|
apiKeyInfo: {
|
||||||
cloud: {
|
cloud: {
|
||||||
@ -67,7 +67,7 @@ const translation = {
|
|||||||
customDisclaimerPlaceholder: '免責事項を入力してください',
|
customDisclaimerPlaceholder: '免責事項を入力してください',
|
||||||
customDisclaimerTip: 'アプリケーションの使用に関する免責事項を提供します。',
|
customDisclaimerTip: 'アプリケーションの使用に関する免責事項を提供します。',
|
||||||
copyrightTooltip: 'プロフェッショナルプラン以上にアップグレードしてください',
|
copyrightTooltip: 'プロフェッショナルプラン以上にアップグレードしてください',
|
||||||
copyrightTip: 'ウェブアプリに著作権情報を表示する',
|
copyrightTip: 'Webアプリに著作権情報を表示する',
|
||||||
},
|
},
|
||||||
sso: {
|
sso: {
|
||||||
title: 'WebアプリのSSO',
|
title: 'WebアプリのSSO',
|
||||||
@ -117,7 +117,7 @@ const translation = {
|
|||||||
},
|
},
|
||||||
apiInfo: {
|
apiInfo: {
|
||||||
title: 'バックエンドサービスAPI',
|
title: 'バックエンドサービスAPI',
|
||||||
explanation: 'あなた様のアプリケーションに簡単に統合できます',
|
explanation: 'あなたのアプリケーションに簡単に統合できます',
|
||||||
accessibleAddress: 'サービスAPIエンドポイント',
|
accessibleAddress: 'サービスAPIエンドポイント',
|
||||||
doc: 'APIリファレンス',
|
doc: 'APIリファレンス',
|
||||||
},
|
},
|
||||||
|
@ -10,6 +10,10 @@ const translation = {
|
|||||||
advanced: 'チャットフロー',
|
advanced: 'チャットフロー',
|
||||||
},
|
},
|
||||||
duplicate: '複製',
|
duplicate: '複製',
|
||||||
|
mermaid: {
|
||||||
|
handDrawn: '手描き',
|
||||||
|
classic: 'クラシック',
|
||||||
|
},
|
||||||
duplicateTitle: 'アプリを複製する',
|
duplicateTitle: 'アプリを複製する',
|
||||||
export: 'DSL をエクスポート',
|
export: 'DSL をエクスポート',
|
||||||
exportFailed: 'DSL のエクスポートに失敗しました。',
|
exportFailed: 'DSL のエクスポートに失敗しました。',
|
||||||
@ -21,12 +25,11 @@ const translation = {
|
|||||||
importFromDSLUrlPlaceholder: 'DSLリンクをここに貼り付けます',
|
importFromDSLUrlPlaceholder: 'DSLリンクをここに貼り付けます',
|
||||||
deleteAppConfirmTitle: 'このアプリを削除しますか?',
|
deleteAppConfirmTitle: 'このアプリを削除しますか?',
|
||||||
deleteAppConfirmContent:
|
deleteAppConfirmContent:
|
||||||
'アプリを削除すると、元に戻すことはできません。ユーザーはもはやあなた様のアプリにアクセスできず、すべてのプロンプトの設定とログが永久に削除されます。',
|
'アプリを削除すると、元に戻すことはできません。他のユーザーはもはやこのアプリにアクセスできず、すべてのプロンプトの設定とログが永久に削除されます。',
|
||||||
appDeleted: 'アプリが削除されました',
|
appDeleted: 'アプリが削除されました',
|
||||||
appDeleteFailed: 'アプリの削除に失敗しました',
|
appDeleteFailed: 'アプリの削除に失敗しました',
|
||||||
join: 'コミュニティに参加する',
|
join: 'コミュニティに参加する',
|
||||||
communityIntro:
|
communityIntro: 'さまざまなチャンネルでチームメンバーや貢献者、開発者と議論します。',
|
||||||
'さまざまなチャンネルでチームメンバーや貢献者、開発者と議論します。',
|
|
||||||
roadmap: 'ロードマップを見る',
|
roadmap: 'ロードマップを見る',
|
||||||
newApp: {
|
newApp: {
|
||||||
startFromBlank: '最初から作成',
|
startFromBlank: '最初から作成',
|
||||||
@ -128,6 +131,7 @@ const translation = {
|
|||||||
title: 'アプリのパフォーマンスの追跡',
|
title: 'アプリのパフォーマンスの追跡',
|
||||||
description: 'サードパーティのLLMOpsサービスとトレースアプリケーションのパフォーマンス設定を行います。',
|
description: 'サードパーティのLLMOpsサービスとトレースアプリケーションのパフォーマンス設定を行います。',
|
||||||
config: '設定',
|
config: '設定',
|
||||||
|
view: '見る',
|
||||||
collapse: '折りたたむ',
|
collapse: '折りたたむ',
|
||||||
expand: '展開',
|
expand: '展開',
|
||||||
tracing: '追跡',
|
tracing: '追跡',
|
||||||
@ -148,25 +152,24 @@ const translation = {
|
|||||||
title: 'Langfuse',
|
title: 'Langfuse',
|
||||||
description: 'トレース、評価、プロンプトの管理、そしてメトリクスを駆使して、LLMアプリケーションのデバッグや改善に役立てます。',
|
description: 'トレース、評価、プロンプトの管理、そしてメトリクスを駆使して、LLMアプリケーションのデバッグや改善に役立てます。',
|
||||||
},
|
},
|
||||||
inUse: '使用中',
|
|
||||||
configProvider: {
|
|
||||||
title: '配置 ',
|
|
||||||
placeholder: 'あなた様の{{key}}を入力してください',
|
|
||||||
project: 'プロジェクト',
|
|
||||||
publicKey: '公開キー',
|
|
||||||
secretKey: '秘密キー',
|
|
||||||
viewDocsLink: '{{key}}のドキュメントを見る',
|
|
||||||
removeConfirmTitle: '{{key}}の設定を削除しますか?',
|
|
||||||
removeConfirmContent: '現在の設定は使用中です。これを削除すると、トレース機能が無効になります。',
|
|
||||||
},
|
|
||||||
view: '見る',
|
|
||||||
opik: {
|
opik: {
|
||||||
title: 'オピック',
|
title: 'オピック',
|
||||||
description: 'Opik は、LLM アプリケーションを評価、テスト、監視するためのオープンソース プラットフォームです。',
|
description: 'Opik は、LLM アプリケーションを評価、テスト、監視するためのオープンソース プラットフォームです。',
|
||||||
},
|
},
|
||||||
|
inUse: '使用中',
|
||||||
|
configProvider: {
|
||||||
|
title: '配置 ',
|
||||||
|
placeholder: '{{key}}を入力してください',
|
||||||
|
project: 'プロジェクト',
|
||||||
|
publicKey: '公開キー',
|
||||||
|
secretKey: '秘密キー',
|
||||||
|
viewDocsLink: '{{key}}に関するドキュメントを見る',
|
||||||
|
removeConfirmTitle: '{{key}}の設定を削除しますか?',
|
||||||
|
removeConfirmContent: '現在の設定は使用中です。これを削除すると、トレース機能が無効になります。',
|
||||||
|
},
|
||||||
weave: {
|
weave: {
|
||||||
description: 'Weaveは、LLMアプリケーションを評価、テスト、および監視するためのオープンソースプラットフォームです。',
|
|
||||||
title: '織る',
|
title: '織る',
|
||||||
|
description: 'Weaveは、LLMアプリケーションを評価、テスト、および監視するためのオープンソースプラットフォームです。',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
answerIcon: {
|
answerIcon: {
|
||||||
@ -174,10 +177,6 @@ const translation = {
|
|||||||
description: '共有アプリケーションの中で Webアプリアイコンを使用して🤖を置き換えるかどうか',
|
description: '共有アプリケーションの中で Webアプリアイコンを使用して🤖を置き換えるかどうか',
|
||||||
descriptionInExplore: 'ExploreでWebアプリアイコンを使用して🤖を置き換えるかどうか',
|
descriptionInExplore: 'ExploreでWebアプリアイコンを使用して🤖を置き換えるかどうか',
|
||||||
},
|
},
|
||||||
mermaid: {
|
|
||||||
handDrawn: '手描き',
|
|
||||||
classic: 'クラシック',
|
|
||||||
},
|
|
||||||
newAppFromTemplate: {
|
newAppFromTemplate: {
|
||||||
sidebar: {
|
sidebar: {
|
||||||
Agent: 'エージェント',
|
Agent: 'エージェント',
|
||||||
@ -219,6 +218,11 @@ const translation = {
|
|||||||
title: 'アクセス権限',
|
title: 'アクセス権限',
|
||||||
description: 'Webアプリのアクセス権限を設定します',
|
description: 'Webアプリのアクセス権限を設定します',
|
||||||
accessLabel: '誰がアクセスできますか',
|
accessLabel: '誰がアクセスできますか',
|
||||||
|
accessItemsDescription: {
|
||||||
|
anyone: '誰でもWebアプリにアクセス可能です',
|
||||||
|
specific: '特定のグループやメンバーがWebアプリにアクセス可能です',
|
||||||
|
organization: '組織内の誰でもWebアプリにアクセス可能です',
|
||||||
|
},
|
||||||
accessItems: {
|
accessItems: {
|
||||||
anyone: 'すべてのユーザー',
|
anyone: 'すべてのユーザー',
|
||||||
specific: '特定のグループメンバー',
|
specific: '特定のグループメンバー',
|
||||||
|
@ -91,9 +91,9 @@ const translation = {
|
|||||||
member: 'メンバー',
|
member: 'メンバー',
|
||||||
memberAfter: 'メンバー',
|
memberAfter: 'メンバー',
|
||||||
messageRequest: {
|
messageRequest: {
|
||||||
title: '{{count,number}}メッセージ',
|
title: '{{count,number}}メッセージクレジット',
|
||||||
titlePerMonth: '{{count,number}}メッセージ/月',
|
titlePerMonth: '{{count,number}}メッセージクレジット/月',
|
||||||
tooltip: 'Open Alモデルを使用するさまざまなプランのメッセージ呼び出しクォータ。上限を超えるメッセージは、Open AI APIキーを使用します。',
|
tooltip: 'メッセージクレジットは、DifyでさまざまなOpenAIモデルを簡単にお試しいただくためのものです。モデルタイプに応じてクレジットが消費され、使い切った後はご自身のOpenAI APIキーに切り替えていただけます。',
|
||||||
},
|
},
|
||||||
annotatedResponse: {
|
annotatedResponse: {
|
||||||
title: '{{count,number}}の注釈クォータ制限',
|
title: '{{count,number}}の注釈クォータ制限',
|
||||||
@ -173,13 +173,11 @@ const translation = {
|
|||||||
fullSolution: 'より多くのスペースを得るためにプランをアップグレードしてください。',
|
fullSolution: 'より多くのスペースを得るためにプランをアップグレードしてください。',
|
||||||
},
|
},
|
||||||
apps: {
|
apps: {
|
||||||
fullTipLine1: 'より多くのアプリを作成するには、',
|
fullTip1: 'アップグレードして制限を解除する',
|
||||||
fullTipLine2: 'プランをアップグレードしてください。',
|
fullTip1des: 'このプランのアプリ数の上限に達しました。',
|
||||||
fullTip1: 'アプリをもっと作成するためにアップグレードする',
|
fullTip2: 'プラン制限に達しました。',
|
||||||
contactUs: 'お問い合わせ',
|
fullTip2des: '非アクティブなアプリを削除するか、アップグレードプランをご検討ください。',
|
||||||
fullTip2: 'プランの制限に達しました',
|
contactUs: 'こちらからお問い合わせください',
|
||||||
fullTip2des: '使用状況を解放するために非アクティブなアプリケーションを整理することをお勧めします。または、お問い合わせください。',
|
|
||||||
fullTip1des: 'このプランでのアプリ構築の制限に達しました',
|
|
||||||
},
|
},
|
||||||
annotatedResponse: {
|
annotatedResponse: {
|
||||||
fullTipLine1: 'より多くの会話を注釈するには、',
|
fullTipLine1: 'より多くの会話を注釈するには、',
|
||||||
|
@ -171,7 +171,7 @@ const translation = {
|
|||||||
community: 'コミュニティ',
|
community: 'コミュニティ',
|
||||||
about: 'Difyについて',
|
about: 'Difyについて',
|
||||||
logout: 'ログアウト',
|
logout: 'ログアウト',
|
||||||
github: 'ギットハブ',
|
github: 'GitHub',
|
||||||
},
|
},
|
||||||
compliance: {
|
compliance: {
|
||||||
soc2Type1: 'SOC 2 Type I 報告書',
|
soc2Type1: 'SOC 2 Type I 報告書',
|
||||||
@ -252,7 +252,7 @@ const translation = {
|
|||||||
datasetOperator: 'ナレッジ管理員',
|
datasetOperator: 'ナレッジ管理員',
|
||||||
datasetOperatorTip: 'ナレッジベースのみを管理できる',
|
datasetOperatorTip: 'ナレッジベースのみを管理できる',
|
||||||
inviteTeamMember: 'チームメンバーを招待する',
|
inviteTeamMember: 'チームメンバーを招待する',
|
||||||
inviteTeamMemberTip: '彼らはサインイン後、直接あなた様のチームデータにアクセスできます。',
|
inviteTeamMemberTip: '彼らはサインイン後、直接あなたのチームデータにアクセスできます。',
|
||||||
emailNotSetup: 'メールサーバーがセットアップされていないので、招待メールを送信することはできません。代わりに招待後に発行される招待リンクをユーザーに通知してください。',
|
emailNotSetup: 'メールサーバーがセットアップされていないので、招待メールを送信することはできません。代わりに招待後に発行される招待リンクをユーザーに通知してください。',
|
||||||
email: 'メール',
|
email: 'メール',
|
||||||
emailInvalid: '無効なメール形式',
|
emailInvalid: '無効なメール形式',
|
||||||
@ -260,7 +260,7 @@ const translation = {
|
|||||||
sendInvite: '招待を送る',
|
sendInvite: '招待を送る',
|
||||||
invitedAsRole: '{{role}}ユーザーとして招待されました',
|
invitedAsRole: '{{role}}ユーザーとして招待されました',
|
||||||
invitationSent: '招待が送信されました',
|
invitationSent: '招待が送信されました',
|
||||||
invitationSentTip: '招待が送信され、彼らはDifyにサインインしてあなた様のチームデータにアクセスできます。',
|
invitationSentTip: '招待が送信され、彼らはDifyにサインインしてあなたのチームデータにアクセスできます。',
|
||||||
invitationLink: '招待リンク',
|
invitationLink: '招待リンク',
|
||||||
failedInvitationEmails: '以下のユーザーは正常に招待されませんでした',
|
failedInvitationEmails: '以下のユーザーは正常に招待されませんでした',
|
||||||
ok: 'OK',
|
ok: 'OK',
|
||||||
@ -272,7 +272,7 @@ const translation = {
|
|||||||
setEditor: 'エディターに設定',
|
setEditor: 'エディターに設定',
|
||||||
disInvite: '招待をキャンセル',
|
disInvite: '招待をキャンセル',
|
||||||
deleteMember: 'メンバーを削除',
|
deleteMember: 'メンバーを削除',
|
||||||
you: '(あなた様)',
|
you: '(あなた)',
|
||||||
},
|
},
|
||||||
integrations: {
|
integrations: {
|
||||||
connected: '接続済み',
|
connected: '接続済み',
|
||||||
@ -448,8 +448,8 @@ const translation = {
|
|||||||
connect: '接続',
|
connect: '接続',
|
||||||
configure: '設定',
|
configure: '設定',
|
||||||
notion: {
|
notion: {
|
||||||
title: 'ノーション',
|
title: 'Notion',
|
||||||
description: 'ナレッジデータソースとしてノーションを使用します。',
|
description: 'ナレッジデータソースとしてNotionを使用します。',
|
||||||
connectedWorkspace: '接続済みワークスペース',
|
connectedWorkspace: '接続済みワークスペース',
|
||||||
addWorkspace: 'ワークスペースの追加',
|
addWorkspace: 'ワークスペースの追加',
|
||||||
connected: '接続済み',
|
connected: '接続済み',
|
||||||
|
@ -51,7 +51,7 @@ const translation = {
|
|||||||
empty: {
|
empty: {
|
||||||
title: 'まだドキュメントがありません',
|
title: 'まだドキュメントがありません',
|
||||||
upload: {
|
upload: {
|
||||||
tip: 'ファイルをアップロードしたり、ウェブサイトから同期したり、NotionやGitHubなどのウェブアプリから同期することができます。',
|
tip: 'ファイルをアップロードしたり、ウェブサイトから同期したり、NotionやGitHubなどのWebアプリから同期することができます。',
|
||||||
},
|
},
|
||||||
sync: {
|
sync: {
|
||||||
tip: 'Difyは定期的にNotionからファイルをダウンロードし、処理を完了します。',
|
tip: 'Difyは定期的にNotionからファイルをダウンロードし、処理を完了します。',
|
||||||
|
@ -14,7 +14,7 @@ const translation = {
|
|||||||
permissionsOnlyMe: '自分のみ',
|
permissionsOnlyMe: '自分のみ',
|
||||||
permissionsAllMember: 'すべてのチームメンバー',
|
permissionsAllMember: 'すべてのチームメンバー',
|
||||||
permissionsInvitedMembers: '一部のチームメンバー',
|
permissionsInvitedMembers: '一部のチームメンバー',
|
||||||
me: '(あなた様)',
|
me: '(あなた)',
|
||||||
indexMethod: 'インデックス方法',
|
indexMethod: 'インデックス方法',
|
||||||
indexMethodHighQuality: '高品質',
|
indexMethodHighQuality: '高品質',
|
||||||
indexMethodHighQualityTip: 'より正確な検索のため、埋め込みモデルを呼び出してドキュメントを処理することで、LLMは高品質な回答を生成できます。',
|
indexMethodHighQualityTip: 'より正確な検索のため、埋め込みモデルを呼び出してドキュメントを処理することで、LLMは高品質な回答を生成できます。',
|
||||||
|
@ -72,7 +72,7 @@ const translation = {
|
|||||||
createDatasetIntro: '独自のテキストデータをインポートするか、LLMコンテキストの強化のためにWebhookを介してリアルタイムでデータを書き込むことができます。',
|
createDatasetIntro: '独自のテキストデータをインポートするか、LLMコンテキストの強化のためにWebhookを介してリアルタイムでデータを書き込むことができます。',
|
||||||
deleteDatasetConfirmTitle: 'このナレッジベースを削除しますか?',
|
deleteDatasetConfirmTitle: 'このナレッジベースを削除しますか?',
|
||||||
deleteDatasetConfirmContent:
|
deleteDatasetConfirmContent:
|
||||||
'ナレッジベースを削除すると元に戻すことはできません。ユーザーはもはやあなた様のナレッジベースにアクセスできず、すべてのプロンプトの設定とログが永久に削除されます。',
|
'ナレッジベースを削除すると元に戻すことはできません。ユーザーはもはやあなたのナレッジベースにアクセスできず、すべてのプロンプトの設定とログが永久に削除されます。',
|
||||||
datasetUsedByApp: 'このナレッジベースは一部のアプリによって使用されています。アプリはこのナレッジベースを使用できなくなり、すべてのプロンプト設定とログは永久に削除されます。',
|
datasetUsedByApp: 'このナレッジベースは一部のアプリによって使用されています。アプリはこのナレッジベースを使用できなくなり、すべてのプロンプト設定とログは永久に削除されます。',
|
||||||
datasetDeleted: 'ナレッジベースが削除されました',
|
datasetDeleted: 'ナレッジベースが削除されました',
|
||||||
datasetDeleteFailed: 'ナレッジベースの削除に失敗しました',
|
datasetDeleteFailed: 'ナレッジベースの削除に失敗しました',
|
||||||
|
@ -62,11 +62,11 @@ const translation = {
|
|||||||
link: 'オープンソースライセンス',
|
link: 'オープンソースライセンス',
|
||||||
},
|
},
|
||||||
join: '参加する',
|
join: '参加する',
|
||||||
joinTipStart: 'あなた様を招待します',
|
joinTipStart: 'あなたを招待します',
|
||||||
joinTipEnd: 'チームに参加する',
|
joinTipEnd: 'チームに参加する',
|
||||||
invalid: 'リンクの有効期限が切れています',
|
invalid: 'リンクの有効期限が切れています',
|
||||||
explore: 'Difyを探索する',
|
explore: 'Difyを探索する',
|
||||||
activatedTipStart: 'あなた様は',
|
activatedTipStart: 'あなたは',
|
||||||
activatedTipEnd: 'チームに参加しました',
|
activatedTipEnd: 'チームに参加しました',
|
||||||
activated: '今すぐサインイン',
|
activated: '今すぐサインイン',
|
||||||
adminInitPassword: '管理者初期化パスワード',
|
adminInitPassword: '管理者初期化パスワード',
|
||||||
|
@ -2,11 +2,11 @@ const translation = {
|
|||||||
daysInWeek: {
|
daysInWeek: {
|
||||||
Tue: '火曜日',
|
Tue: '火曜日',
|
||||||
Sat: '土曜日',
|
Sat: '土曜日',
|
||||||
Mon: 'モン',
|
Mon: '月曜日',
|
||||||
Thu: '木曜日',
|
Thu: '木曜日',
|
||||||
Fri: '自由',
|
Fri: '金曜日',
|
||||||
Wed: '水曜日',
|
Wed: '水曜日',
|
||||||
Sun: '太陽',
|
Sun: '日曜日',
|
||||||
},
|
},
|
||||||
months: {
|
months: {
|
||||||
November: '11月',
|
November: '11月',
|
||||||
@ -14,13 +14,13 @@ const translation = {
|
|||||||
March: '3月',
|
March: '3月',
|
||||||
September: '9月',
|
September: '9月',
|
||||||
July: '7月',
|
July: '7月',
|
||||||
April: '四月',
|
April: '4月',
|
||||||
February: '2月',
|
February: '2月',
|
||||||
June: '6月',
|
June: '6月',
|
||||||
January: '1月',
|
January: '1月',
|
||||||
May: '5月',
|
May: '5月',
|
||||||
August: '八月',
|
August: '8月',
|
||||||
October: '十月',
|
October: '10月',
|
||||||
},
|
},
|
||||||
operation: {
|
operation: {
|
||||||
now: '今',
|
now: '今',
|
||||||
|
@ -108,7 +108,7 @@ const translation = {
|
|||||||
confirmTitle: '保存しますか?',
|
confirmTitle: '保存しますか?',
|
||||||
confirmTip: 'このツールを使用しているアプリは影響を受けます',
|
confirmTip: 'このツールを使用しているアプリは影響を受けます',
|
||||||
deleteToolConfirmTitle: 'このツールを削除しますか?',
|
deleteToolConfirmTitle: 'このツールを削除しますか?',
|
||||||
deleteToolConfirmContent: 'ツールの削除は取り消しできません。ユーザーはもうあなた様のツールにアクセスできません。',
|
deleteToolConfirmContent: 'ツールの削除は取り消しできません。ユーザーはもうあなたのツールにアクセスできません。',
|
||||||
},
|
},
|
||||||
test: {
|
test: {
|
||||||
title: 'テスト',
|
title: 'テスト',
|
||||||
|
@ -307,6 +307,7 @@ const translation = {
|
|||||||
organizeBlocks: 'ノード整理',
|
organizeBlocks: 'ノード整理',
|
||||||
change: '変更',
|
change: '変更',
|
||||||
optional: '(任意)',
|
optional: '(任意)',
|
||||||
|
moveToThisNode: 'このノードに移動する',
|
||||||
},
|
},
|
||||||
nodes: {
|
nodes: {
|
||||||
common: {
|
common: {
|
||||||
|
@ -302,6 +302,7 @@ const translation = {
|
|||||||
organizeBlocks: '블록 정리',
|
organizeBlocks: '블록 정리',
|
||||||
change: '변경',
|
change: '변경',
|
||||||
optional: '(선택사항)',
|
optional: '(선택사항)',
|
||||||
|
moveToThisNode: '이 노드로 이동',
|
||||||
},
|
},
|
||||||
nodes: {
|
nodes: {
|
||||||
common: {
|
common: {
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user