mirror of
https://git.mirrors.martin98.com/https://github.com/langgenius/dify.git
synced 2025-08-17 19:45:52 +08:00
Merge branch 'main' into deploy/dev
This commit is contained in:
commit
4ea3e8efb8
@ -54,7 +54,6 @@ def initialize_extensions(app: DifyApp):
|
|||||||
ext_otel,
|
ext_otel,
|
||||||
ext_proxy_fix,
|
ext_proxy_fix,
|
||||||
ext_redis,
|
ext_redis,
|
||||||
ext_repositories,
|
|
||||||
ext_sentry,
|
ext_sentry,
|
||||||
ext_set_secretkey,
|
ext_set_secretkey,
|
||||||
ext_storage,
|
ext_storage,
|
||||||
@ -75,7 +74,6 @@ def initialize_extensions(app: DifyApp):
|
|||||||
ext_migrate,
|
ext_migrate,
|
||||||
ext_redis,
|
ext_redis,
|
||||||
ext_storage,
|
ext_storage,
|
||||||
ext_repositories,
|
|
||||||
ext_celery,
|
ext_celery,
|
||||||
ext_login,
|
ext_login,
|
||||||
ext_mail,
|
ext_mail,
|
||||||
|
@ -25,7 +25,7 @@ from core.app.entities.task_entities import ChatbotAppBlockingResponse, ChatbotA
|
|||||||
from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
||||||
from core.ops.ops_trace_manager import TraceQueueManager
|
from core.ops.ops_trace_manager import TraceQueueManager
|
||||||
from core.prompt.utils.get_thread_messages_length import get_thread_messages_length
|
from core.prompt.utils.get_thread_messages_length import get_thread_messages_length
|
||||||
from core.workflow.repository import RepositoryFactory
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from factories import file_factory
|
from factories import file_factory
|
||||||
@ -163,12 +163,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
|
|
||||||
# Create workflow node execution repository
|
# Create workflow node execution repository
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={
|
session_factory=session_factory,
|
||||||
"tenant_id": application_generate_entity.app_config.tenant_id,
|
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||||
"app_id": application_generate_entity.app_config.app_id,
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
"session_factory": session_factory,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._generate(
|
return self._generate(
|
||||||
@ -231,12 +229,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
|
|
||||||
# Create workflow node execution repository
|
# Create workflow node execution repository
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={
|
session_factory=session_factory,
|
||||||
"tenant_id": application_generate_entity.app_config.tenant_id,
|
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||||
"app_id": application_generate_entity.app_config.app_id,
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
"session_factory": session_factory,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._generate(
|
return self._generate(
|
||||||
@ -297,12 +293,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
|||||||
|
|
||||||
# Create workflow node execution repository
|
# Create workflow node execution repository
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={
|
session_factory=session_factory,
|
||||||
"tenant_id": application_generate_entity.app_config.tenant_id,
|
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||||
"app_id": application_generate_entity.app_config.app_id,
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
"session_factory": session_factory,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._generate(
|
return self._generate(
|
||||||
|
@ -9,7 +9,6 @@ from sqlalchemy import select
|
|||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
||||||
from core.app.apps.advanced_chat.app_generator_tts_publisher import AppGeneratorTTSPublisher, AudioTrunk
|
|
||||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||||
from core.app.entities.app_invoke_entities import (
|
from core.app.entities.app_invoke_entities import (
|
||||||
AdvancedChatAppGenerateEntity,
|
AdvancedChatAppGenerateEntity,
|
||||||
@ -58,7 +57,7 @@ from core.app.entities.task_entities import (
|
|||||||
)
|
)
|
||||||
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
||||||
from core.app.task_pipeline.message_cycle_manage import MessageCycleManage
|
from core.app.task_pipeline.message_cycle_manage import MessageCycleManage
|
||||||
from core.app.task_pipeline.workflow_cycle_manage import WorkflowCycleManage
|
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||||
from core.model_runtime.entities.llm_entities import LLMUsage
|
from core.model_runtime.entities.llm_entities import LLMUsage
|
||||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||||
from core.ops.ops_trace_manager import TraceQueueManager
|
from core.ops.ops_trace_manager import TraceQueueManager
|
||||||
@ -66,6 +65,7 @@ from core.workflow.enums import SystemVariableKey
|
|||||||
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
|
||||||
from core.workflow.nodes import NodeType
|
from core.workflow.nodes import NodeType
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
|
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
||||||
from events.message_event import message_was_created
|
from events.message_event import message_was_created
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from models import Conversation, EndUser, Message, MessageFile
|
from models import Conversation, EndUser, Message, MessageFile
|
||||||
@ -113,7 +113,7 @@ class AdvancedChatAppGenerateTaskPipeline:
|
|||||||
else:
|
else:
|
||||||
raise NotImplementedError(f"User type not supported: {type(user)}")
|
raise NotImplementedError(f"User type not supported: {type(user)}")
|
||||||
|
|
||||||
self._workflow_cycle_manager = WorkflowCycleManage(
|
self._workflow_cycle_manager = WorkflowCycleManager(
|
||||||
application_generate_entity=application_generate_entity,
|
application_generate_entity=application_generate_entity,
|
||||||
workflow_system_variables={
|
workflow_system_variables={
|
||||||
SystemVariableKey.QUERY: message.query,
|
SystemVariableKey.QUERY: message.query,
|
||||||
|
@ -18,13 +18,13 @@ from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
|
|||||||
from core.app.apps.workflow.app_queue_manager import WorkflowAppQueueManager
|
from core.app.apps.workflow.app_queue_manager import WorkflowAppQueueManager
|
||||||
from core.app.apps.workflow.app_runner import WorkflowAppRunner
|
from core.app.apps.workflow.app_runner import WorkflowAppRunner
|
||||||
from core.app.apps.workflow.generate_response_converter import WorkflowAppGenerateResponseConverter
|
from core.app.apps.workflow.generate_response_converter import WorkflowAppGenerateResponseConverter
|
||||||
from core.app.apps.workflow.generate_task_pipeline import WorkflowAppGenerateTaskPipeline
|
|
||||||
from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity
|
from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity
|
||||||
from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse
|
from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse
|
||||||
from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
from core.model_runtime.errors.invoke import InvokeAuthorizationError
|
||||||
from core.ops.ops_trace_manager import TraceQueueManager
|
from core.ops.ops_trace_manager import TraceQueueManager
|
||||||
from core.workflow.repository import RepositoryFactory
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
|
from core.workflow.workflow_app_generate_task_pipeline import WorkflowAppGenerateTaskPipeline
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from factories import file_factory
|
from factories import file_factory
|
||||||
from models import Account, App, EndUser, Workflow
|
from models import Account, App, EndUser, Workflow
|
||||||
@ -138,12 +138,10 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
|
|
||||||
# Create workflow node execution repository
|
# Create workflow node execution repository
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={
|
session_factory=session_factory,
|
||||||
"tenant_id": application_generate_entity.app_config.tenant_id,
|
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||||
"app_id": application_generate_entity.app_config.app_id,
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
"session_factory": session_factory,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._generate(
|
return self._generate(
|
||||||
@ -264,12 +262,10 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
|
|
||||||
# Create workflow node execution repository
|
# Create workflow node execution repository
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={
|
session_factory=session_factory,
|
||||||
"tenant_id": application_generate_entity.app_config.tenant_id,
|
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||||
"app_id": application_generate_entity.app_config.app_id,
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
"session_factory": session_factory,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._generate(
|
return self._generate(
|
||||||
@ -329,12 +325,10 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
|||||||
|
|
||||||
# Create workflow node execution repository
|
# Create workflow node execution repository
|
||||||
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
|
||||||
workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={
|
session_factory=session_factory,
|
||||||
"tenant_id": application_generate_entity.app_config.tenant_id,
|
tenant_id=application_generate_entity.app_config.tenant_id,
|
||||||
"app_id": application_generate_entity.app_config.app_id,
|
app_id=application_generate_entity.app_config.app_id,
|
||||||
"session_factory": session_factory,
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._generate(
|
return self._generate(
|
||||||
|
@ -9,7 +9,6 @@ from sqlalchemy import select
|
|||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
||||||
from core.app.apps.advanced_chat.app_generator_tts_publisher import AppGeneratorTTSPublisher, AudioTrunk
|
|
||||||
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
|
||||||
from core.app.entities.app_invoke_entities import (
|
from core.app.entities.app_invoke_entities import (
|
||||||
AgentChatAppGenerateEntity,
|
AgentChatAppGenerateEntity,
|
||||||
@ -45,6 +44,7 @@ from core.app.entities.task_entities import (
|
|||||||
)
|
)
|
||||||
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
||||||
from core.app.task_pipeline.message_cycle_manage import MessageCycleManage
|
from core.app.task_pipeline.message_cycle_manage import MessageCycleManage
|
||||||
|
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||||
from core.model_manager import ModelInstance
|
from core.model_manager import ModelInstance
|
||||||
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
|
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
|
||||||
from core.model_runtime.entities.message_entities import (
|
from core.model_runtime.entities.message_entities import (
|
||||||
|
1
api/core/base/__init__.py
Normal file
1
api/core/base/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# Core base package
|
6
api/core/base/tts/__init__.py
Normal file
6
api/core/base/tts/__init__.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
from core.base.tts.app_generator_tts_publisher import AppGeneratorTTSPublisher, AudioTrunk
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"AppGeneratorTTSPublisher",
|
||||||
|
"AudioTrunk",
|
||||||
|
]
|
@ -29,7 +29,7 @@ from core.ops.langfuse_trace.entities.langfuse_trace_entity import (
|
|||||||
UnitEnum,
|
UnitEnum,
|
||||||
)
|
)
|
||||||
from core.ops.utils import filter_none_values
|
from core.ops.utils import filter_none_values
|
||||||
from core.workflow.repository.repository_factory import RepositoryFactory
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from models.model import EndUser
|
from models.model import EndUser
|
||||||
|
|
||||||
@ -113,8 +113,8 @@ class LangFuseDataTrace(BaseTraceInstance):
|
|||||||
|
|
||||||
# through workflow_run_id get all_nodes_execution using repository
|
# through workflow_run_id get all_nodes_execution using repository
|
||||||
session_factory = sessionmaker(bind=db.engine)
|
session_factory = sessionmaker(bind=db.engine)
|
||||||
workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={"tenant_id": trace_info.tenant_id, "session_factory": session_factory},
|
session_factory=session_factory, tenant_id=trace_info.tenant_id
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get all executions for this workflow run
|
# Get all executions for this workflow run
|
||||||
|
@ -28,7 +28,7 @@ from core.ops.langsmith_trace.entities.langsmith_trace_entity import (
|
|||||||
LangSmithRunUpdateModel,
|
LangSmithRunUpdateModel,
|
||||||
)
|
)
|
||||||
from core.ops.utils import filter_none_values, generate_dotted_order
|
from core.ops.utils import filter_none_values, generate_dotted_order
|
||||||
from core.workflow.repository.repository_factory import RepositoryFactory
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from models.model import EndUser, MessageFile
|
from models.model import EndUser, MessageFile
|
||||||
|
|
||||||
@ -137,12 +137,8 @@ class LangSmithDataTrace(BaseTraceInstance):
|
|||||||
|
|
||||||
# through workflow_run_id get all_nodes_execution using repository
|
# through workflow_run_id get all_nodes_execution using repository
|
||||||
session_factory = sessionmaker(bind=db.engine)
|
session_factory = sessionmaker(bind=db.engine)
|
||||||
workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={
|
session_factory=session_factory, tenant_id=trace_info.tenant_id, app_id=trace_info.metadata.get("app_id")
|
||||||
"tenant_id": trace_info.tenant_id,
|
|
||||||
"app_id": trace_info.metadata.get("app_id"),
|
|
||||||
"session_factory": session_factory,
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get all executions for this workflow run
|
# Get all executions for this workflow run
|
||||||
|
@ -22,7 +22,7 @@ from core.ops.entities.trace_entity import (
|
|||||||
TraceTaskName,
|
TraceTaskName,
|
||||||
WorkflowTraceInfo,
|
WorkflowTraceInfo,
|
||||||
)
|
)
|
||||||
from core.workflow.repository.repository_factory import RepositoryFactory
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from models.model import EndUser, MessageFile
|
from models.model import EndUser, MessageFile
|
||||||
|
|
||||||
@ -150,12 +150,8 @@ class OpikDataTrace(BaseTraceInstance):
|
|||||||
|
|
||||||
# through workflow_run_id get all_nodes_execution using repository
|
# through workflow_run_id get all_nodes_execution using repository
|
||||||
session_factory = sessionmaker(bind=db.engine)
|
session_factory = sessionmaker(bind=db.engine)
|
||||||
workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
|
workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={
|
session_factory=session_factory, tenant_id=trace_info.tenant_id, app_id=trace_info.metadata.get("app_id")
|
||||||
"tenant_id": trace_info.tenant_id,
|
|
||||||
"app_id": trace_info.metadata.get("app_id"),
|
|
||||||
"session_factory": session_factory,
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Get all executions for this workflow run
|
# Get all executions for this workflow run
|
||||||
|
@ -4,3 +4,9 @@ Repository implementations for data access.
|
|||||||
This package contains concrete implementations of the repository interfaces
|
This package contains concrete implementations of the repository interfaces
|
||||||
defined in the core.workflow.repository package.
|
defined in the core.workflow.repository package.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from core.repositories.sqlalchemy_workflow_node_execution_repository import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"SQLAlchemyWorkflowNodeExecutionRepository",
|
||||||
|
]
|
||||||
|
@ -1,87 +0,0 @@
|
|||||||
"""
|
|
||||||
Registry for repository implementations.
|
|
||||||
|
|
||||||
This module is responsible for registering factory functions with the repository factory.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from collections.abc import Mapping
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from sqlalchemy.orm import sessionmaker
|
|
||||||
|
|
||||||
from configs import dify_config
|
|
||||||
from core.repositories.workflow_node_execution import SQLAlchemyWorkflowNodeExecutionRepository
|
|
||||||
from core.workflow.repository.repository_factory import RepositoryFactory
|
|
||||||
from extensions.ext_database import db
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Storage type constants
|
|
||||||
STORAGE_TYPE_RDBMS = "rdbms"
|
|
||||||
STORAGE_TYPE_HYBRID = "hybrid"
|
|
||||||
|
|
||||||
|
|
||||||
def register_repositories() -> None:
|
|
||||||
"""
|
|
||||||
Register repository factory functions with the RepositoryFactory.
|
|
||||||
|
|
||||||
This function reads configuration settings to determine which repository
|
|
||||||
implementations to register.
|
|
||||||
"""
|
|
||||||
# Configure WorkflowNodeExecutionRepository factory based on configuration
|
|
||||||
workflow_node_execution_storage = dify_config.WORKFLOW_NODE_EXECUTION_STORAGE
|
|
||||||
|
|
||||||
# Check storage type and register appropriate implementation
|
|
||||||
if workflow_node_execution_storage == STORAGE_TYPE_RDBMS:
|
|
||||||
# Register SQLAlchemy implementation for RDBMS storage
|
|
||||||
logger.info("Registering WorkflowNodeExecution repository with RDBMS storage")
|
|
||||||
RepositoryFactory.register_workflow_node_execution_factory(create_workflow_node_execution_repository)
|
|
||||||
elif workflow_node_execution_storage == STORAGE_TYPE_HYBRID:
|
|
||||||
# Hybrid storage is not yet implemented
|
|
||||||
raise NotImplementedError("Hybrid storage for WorkflowNodeExecution repository is not yet implemented")
|
|
||||||
else:
|
|
||||||
# Unknown storage type
|
|
||||||
raise ValueError(
|
|
||||||
f"Unknown storage type '{workflow_node_execution_storage}' for WorkflowNodeExecution repository. "
|
|
||||||
f"Supported types: {STORAGE_TYPE_RDBMS}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def create_workflow_node_execution_repository(params: Mapping[str, Any]) -> SQLAlchemyWorkflowNodeExecutionRepository:
|
|
||||||
"""
|
|
||||||
Create a WorkflowNodeExecutionRepository instance using SQLAlchemy implementation.
|
|
||||||
|
|
||||||
This factory function creates a repository for the RDBMS storage type.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
params: Parameters for creating the repository, including:
|
|
||||||
- tenant_id: Required. The tenant ID for multi-tenancy.
|
|
||||||
- app_id: Optional. The application ID for filtering.
|
|
||||||
- session_factory: Optional. A SQLAlchemy sessionmaker instance. If not provided,
|
|
||||||
a new sessionmaker will be created using the global database engine.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A WorkflowNodeExecutionRepository instance
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If required parameters are missing
|
|
||||||
"""
|
|
||||||
# Extract required parameters
|
|
||||||
tenant_id = params.get("tenant_id")
|
|
||||||
if tenant_id is None:
|
|
||||||
raise ValueError("tenant_id is required for WorkflowNodeExecution repository with RDBMS storage")
|
|
||||||
|
|
||||||
# Extract optional parameters
|
|
||||||
app_id = params.get("app_id")
|
|
||||||
|
|
||||||
# Use the session_factory from params if provided, otherwise create one using the global db engine
|
|
||||||
session_factory = params.get("session_factory")
|
|
||||||
if session_factory is None:
|
|
||||||
# Create a sessionmaker using the same engine as the global db session
|
|
||||||
session_factory = sessionmaker(bind=db.engine)
|
|
||||||
|
|
||||||
# Create and return the repository
|
|
||||||
return SQLAlchemyWorkflowNodeExecutionRepository(
|
|
||||||
session_factory=session_factory, tenant_id=tenant_id, app_id=app_id
|
|
||||||
)
|
|
@ -10,13 +10,13 @@ from sqlalchemy import UnaryExpression, asc, delete, desc, select
|
|||||||
from sqlalchemy.engine import Engine
|
from sqlalchemy.engine import Engine
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
from core.workflow.repository.workflow_node_execution_repository import OrderConfig
|
from core.workflow.repository.workflow_node_execution_repository import OrderConfig, WorkflowNodeExecutionRepository
|
||||||
from models.workflow import WorkflowNodeExecution, WorkflowNodeExecutionStatus, WorkflowNodeExecutionTriggeredFrom
|
from models.workflow import WorkflowNodeExecution, WorkflowNodeExecutionStatus, WorkflowNodeExecutionTriggeredFrom
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SQLAlchemyWorkflowNodeExecutionRepository:
|
class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository):
|
||||||
"""
|
"""
|
||||||
SQLAlchemy implementation of the WorkflowNodeExecutionRepository interface.
|
SQLAlchemy implementation of the WorkflowNodeExecutionRepository interface.
|
||||||
|
|
@ -1,9 +0,0 @@
|
|||||||
"""
|
|
||||||
WorkflowNodeExecution repository implementations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from core.repositories.workflow_node_execution.sqlalchemy_repository import SQLAlchemyWorkflowNodeExecutionRepository
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"SQLAlchemyWorkflowNodeExecutionRepository",
|
|
||||||
]
|
|
@ -6,10 +6,9 @@ for accessing and manipulating data, regardless of the underlying
|
|||||||
storage mechanism.
|
storage mechanism.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from core.workflow.repository.repository_factory import RepositoryFactory
|
from core.workflow.repository.workflow_node_execution_repository import OrderConfig, WorkflowNodeExecutionRepository
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"RepositoryFactory",
|
"OrderConfig",
|
||||||
"WorkflowNodeExecutionRepository",
|
"WorkflowNodeExecutionRepository",
|
||||||
]
|
]
|
||||||
|
@ -1,97 +0,0 @@
|
|||||||
"""
|
|
||||||
Repository factory for creating repository instances.
|
|
||||||
|
|
||||||
This module provides a simple factory interface for creating repository instances.
|
|
||||||
It does not contain any implementation details or dependencies on specific repositories.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from collections.abc import Callable, Mapping
|
|
||||||
from typing import Any, Literal, Optional, cast
|
|
||||||
|
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
|
||||||
|
|
||||||
# Type for factory functions - takes a dict of parameters and returns any repository type
|
|
||||||
RepositoryFactoryFunc = Callable[[Mapping[str, Any]], Any]
|
|
||||||
|
|
||||||
# Type for workflow node execution factory function
|
|
||||||
WorkflowNodeExecutionFactoryFunc = Callable[[Mapping[str, Any]], WorkflowNodeExecutionRepository]
|
|
||||||
|
|
||||||
# Repository type literals
|
|
||||||
_RepositoryType = Literal["workflow_node_execution"]
|
|
||||||
|
|
||||||
|
|
||||||
class RepositoryFactory:
|
|
||||||
"""
|
|
||||||
Factory class for creating repository instances.
|
|
||||||
|
|
||||||
This factory delegates the actual repository creation to implementation-specific
|
|
||||||
factory functions that are registered with the factory at runtime.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Dictionary to store factory functions
|
|
||||||
_factory_functions: dict[str, RepositoryFactoryFunc] = {}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _register_factory(cls, repository_type: _RepositoryType, factory_func: RepositoryFactoryFunc) -> None:
|
|
||||||
"""
|
|
||||||
Register a factory function for a specific repository type.
|
|
||||||
This is a private method and should not be called directly.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
repository_type: The type of repository (e.g., 'workflow_node_execution')
|
|
||||||
factory_func: A function that takes parameters and returns a repository instance
|
|
||||||
"""
|
|
||||||
cls._factory_functions[repository_type] = factory_func
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _create_repository(cls, repository_type: _RepositoryType, params: Optional[Mapping[str, Any]] = None) -> Any:
|
|
||||||
"""
|
|
||||||
Create a new repository instance with the provided parameters.
|
|
||||||
This is a private method and should not be called directly.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
repository_type: The type of repository to create
|
|
||||||
params: A dictionary of parameters to pass to the factory function
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A new instance of the requested repository
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If no factory function is registered for the repository type
|
|
||||||
"""
|
|
||||||
if repository_type not in cls._factory_functions:
|
|
||||||
raise ValueError(f"No factory function registered for repository type '{repository_type}'")
|
|
||||||
|
|
||||||
# Use empty dict if params is None
|
|
||||||
params = params or {}
|
|
||||||
|
|
||||||
return cls._factory_functions[repository_type](params)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def register_workflow_node_execution_factory(cls, factory_func: WorkflowNodeExecutionFactoryFunc) -> None:
|
|
||||||
"""
|
|
||||||
Register a factory function for the workflow node execution repository.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
factory_func: A function that takes parameters and returns a WorkflowNodeExecutionRepository instance
|
|
||||||
"""
|
|
||||||
cls._register_factory("workflow_node_execution", factory_func)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create_workflow_node_execution_repository(
|
|
||||||
cls, params: Optional[Mapping[str, Any]] = None
|
|
||||||
) -> WorkflowNodeExecutionRepository:
|
|
||||||
"""
|
|
||||||
Create a new WorkflowNodeExecutionRepository instance with the provided parameters.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
params: A dictionary of parameters to pass to the factory function
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A new instance of the WorkflowNodeExecutionRepository
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: If no factory function is registered for the workflow_node_execution repository type
|
|
||||||
"""
|
|
||||||
# We can safely cast here because we've registered a WorkflowNodeExecutionFactoryFunc
|
|
||||||
return cast(WorkflowNodeExecutionRepository, cls._create_repository("workflow_node_execution", params))
|
|
@ -6,7 +6,6 @@ from typing import Optional, Union
|
|||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
|
||||||
from core.app.apps.advanced_chat.app_generator_tts_publisher import AppGeneratorTTSPublisher, AudioTrunk
|
|
||||||
from core.app.apps.base_app_queue_manager import AppQueueManager
|
from core.app.apps.base_app_queue_manager import AppQueueManager
|
||||||
from core.app.entities.app_invoke_entities import (
|
from core.app.entities.app_invoke_entities import (
|
||||||
InvokeFrom,
|
InvokeFrom,
|
||||||
@ -52,10 +51,11 @@ from core.app.entities.task_entities import (
|
|||||||
WorkflowTaskState,
|
WorkflowTaskState,
|
||||||
)
|
)
|
||||||
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
||||||
from core.app.task_pipeline.workflow_cycle_manage import WorkflowCycleManage
|
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||||
from core.ops.ops_trace_manager import TraceQueueManager
|
from core.ops.ops_trace_manager import TraceQueueManager
|
||||||
from core.workflow.enums import SystemVariableKey
|
from core.workflow.enums import SystemVariableKey
|
||||||
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
|
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from models.account import Account
|
from models.account import Account
|
||||||
from models.enums import CreatedByRole
|
from models.enums import CreatedByRole
|
||||||
@ -102,7 +102,7 @@ class WorkflowAppGenerateTaskPipeline:
|
|||||||
else:
|
else:
|
||||||
raise ValueError(f"Invalid user type: {type(user)}")
|
raise ValueError(f"Invalid user type: {type(user)}")
|
||||||
|
|
||||||
self._workflow_cycle_manager = WorkflowCycleManage(
|
self._workflow_cycle_manager = WorkflowCycleManager(
|
||||||
application_generate_entity=application_generate_entity,
|
application_generate_entity=application_generate_entity,
|
||||||
workflow_system_variables={
|
workflow_system_variables={
|
||||||
SystemVariableKey.FILES: application_generate_entity.files,
|
SystemVariableKey.FILES: application_generate_entity.files,
|
@ -69,7 +69,7 @@ from models.workflow import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class WorkflowCycleManage:
|
class WorkflowCycleManager:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
@ -1,18 +0,0 @@
|
|||||||
"""
|
|
||||||
Extension for initializing repositories.
|
|
||||||
|
|
||||||
This extension registers repository implementations with the RepositoryFactory.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from core.repositories.repository_registry import register_repositories
|
|
||||||
from dify_app import DifyApp
|
|
||||||
|
|
||||||
|
|
||||||
def init_app(_app: DifyApp) -> None:
|
|
||||||
"""
|
|
||||||
Initialize repository implementations.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
_app: The Flask application instance (unused)
|
|
||||||
"""
|
|
||||||
register_repositories()
|
|
@ -5,45 +5,61 @@ Revises: 33f5fac87f29
|
|||||||
Create Date: 2024-10-10 05:16:14.764268
|
Create Date: 2024-10-10 05:16:14.764268
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from alembic import op
|
|
||||||
import models as models
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import postgresql
|
from alembic import op, context
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = 'bbadea11becb'
|
revision = "bbadea11becb"
|
||||||
down_revision = 'd8e744d88ed6'
|
down_revision = "d8e744d88ed6"
|
||||||
branch_labels = None
|
branch_labels = None
|
||||||
depends_on = None
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
|
def _has_name_or_size_column() -> bool:
|
||||||
|
# We cannot access the database in offline mode, so assume
|
||||||
|
# the "name" and "size" columns do not exist.
|
||||||
|
if context.is_offline_mode():
|
||||||
|
# Log a warning message to inform the user that the database schema cannot be inspected
|
||||||
|
# in offline mode, and the generated SQL may not accurately reflect the actual execution.
|
||||||
|
op.execute(
|
||||||
|
"-- Executing in offline mode, assuming the name and size columns do not exist.\n"
|
||||||
|
"-- The generated SQL may differ from what will actually be executed.\n"
|
||||||
|
"-- Please review the migration script carefully!"
|
||||||
|
)
|
||||||
|
|
||||||
|
return False
|
||||||
|
# Use SQLAlchemy inspector to get the columns of the 'tool_files' table
|
||||||
|
inspector = sa.inspect(conn)
|
||||||
|
columns = [col["name"] for col in inspector.get_columns("tool_files")]
|
||||||
|
|
||||||
|
# If 'name' or 'size' columns already exist, exit the upgrade function
|
||||||
|
if "name" in columns or "size" in columns:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
# Get the database connection
|
# Get the database connection
|
||||||
conn = op.get_bind()
|
conn = op.get_bind()
|
||||||
|
|
||||||
# Use SQLAlchemy inspector to get the columns of the 'tool_files' table
|
if _has_name_or_size_column():
|
||||||
inspector = sa.inspect(conn)
|
|
||||||
columns = [col['name'] for col in inspector.get_columns('tool_files')]
|
|
||||||
|
|
||||||
# If 'name' or 'size' columns already exist, exit the upgrade function
|
|
||||||
if 'name' in columns or 'size' in columns:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
with op.batch_alter_table("tool_files", schema=None) as batch_op:
|
||||||
batch_op.add_column(sa.Column('name', sa.String(), nullable=True))
|
batch_op.add_column(sa.Column("name", sa.String(), nullable=True))
|
||||||
batch_op.add_column(sa.Column('size', sa.Integer(), nullable=True))
|
batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True))
|
||||||
op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL")
|
op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL")
|
||||||
op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL")
|
op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL")
|
||||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
with op.batch_alter_table("tool_files", schema=None) as batch_op:
|
||||||
batch_op.alter_column('name', existing_type=sa.String(), nullable=False)
|
batch_op.alter_column("name", existing_type=sa.String(), nullable=False)
|
||||||
batch_op.alter_column('size', existing_type=sa.Integer(), nullable=False)
|
batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False)
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
def downgrade():
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
with op.batch_alter_table('tool_files', schema=None) as batch_op:
|
with op.batch_alter_table("tool_files", schema=None) as batch_op:
|
||||||
batch_op.drop_column('size')
|
batch_op.drop_column("size")
|
||||||
batch_op.drop_column('name')
|
batch_op.drop_column("name")
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
@ -5,28 +5,38 @@ Revises: e1944c35e15e
|
|||||||
Create Date: 2024-12-23 11:54:15.344543
|
Create Date: 2024-12-23 11:54:15.344543
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from alembic import op
|
|
||||||
import models as models
|
from alembic import op, context
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy import inspect
|
from sqlalchemy import inspect
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = 'd7999dfa4aae'
|
revision = "d7999dfa4aae"
|
||||||
down_revision = 'e1944c35e15e'
|
down_revision = "e1944c35e15e"
|
||||||
branch_labels = None
|
branch_labels = None
|
||||||
depends_on = None
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
# Check if column exists before attempting to remove it
|
def _has_retry_index_column() -> bool:
|
||||||
conn = op.get_bind()
|
if context.is_offline_mode():
|
||||||
inspector = inspect(conn)
|
# Log a warning message to inform the user that the database schema cannot be inspected
|
||||||
has_column = 'retry_index' in [col['name'] for col in inspector.get_columns('workflow_node_executions')]
|
# in offline mode, and the generated SQL may not accurately reflect the actual execution.
|
||||||
|
op.execute(
|
||||||
|
'-- Executing in offline mode: assuming the "retry_index" column does not exist.\n'
|
||||||
|
"-- The generated SQL may differ from what will actually be executed.\n"
|
||||||
|
"-- Please review the migration script carefully!"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
conn = op.get_bind()
|
||||||
|
inspector = inspect(conn)
|
||||||
|
return "retry_index" in [col["name"] for col in inspector.get_columns("workflow_node_executions")]
|
||||||
|
|
||||||
|
has_column = _has_retry_index_column()
|
||||||
|
|
||||||
if has_column:
|
if has_column:
|
||||||
with op.batch_alter_table('workflow_node_executions', schema=None) as batch_op:
|
with op.batch_alter_table("workflow_node_executions", schema=None) as batch_op:
|
||||||
batch_op.drop_column('retry_index')
|
batch_op.drop_column("retry_index")
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
def downgrade():
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any, Optional, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from deprecated import deprecated
|
from deprecated import deprecated
|
||||||
@ -304,8 +304,11 @@ class DeprecatedPublishedAppTool(Base):
|
|||||||
db.UniqueConstraint("app_id", "user_id", name="unique_published_app_tool"),
|
db.UniqueConstraint("app_id", "user_id", name="unique_published_app_tool"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
||||||
# id of the app
|
# id of the app
|
||||||
app_id = db.Column(StringUUID, ForeignKey("apps.id"), nullable=False)
|
app_id = db.Column(StringUUID, ForeignKey("apps.id"), nullable=False)
|
||||||
|
|
||||||
|
user_id: Mapped[str] = db.Column(StringUUID, nullable=False)
|
||||||
# who published this tool
|
# who published this tool
|
||||||
description = db.Column(db.Text, nullable=False)
|
description = db.Column(db.Text, nullable=False)
|
||||||
# llm_description of the tool, for LLM
|
# llm_description of the tool, for LLM
|
||||||
@ -325,34 +328,3 @@ class DeprecatedPublishedAppTool(Base):
|
|||||||
@property
|
@property
|
||||||
def description_i18n(self) -> I18nObject:
|
def description_i18n(self) -> I18nObject:
|
||||||
return I18nObject(**json.loads(self.description))
|
return I18nObject(**json.loads(self.description))
|
||||||
|
|
||||||
id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
|
|
||||||
user_id: Mapped[str] = db.Column(StringUUID, nullable=False)
|
|
||||||
tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False)
|
|
||||||
conversation_id: Mapped[Optional[str]] = db.Column(StringUUID, nullable=True)
|
|
||||||
file_key: Mapped[str] = db.Column(db.String(255), nullable=False)
|
|
||||||
mimetype: Mapped[str] = db.Column(db.String(255), nullable=False)
|
|
||||||
original_url: Mapped[Optional[str]] = db.Column(db.String(2048), nullable=True)
|
|
||||||
name: Mapped[str] = mapped_column(default="")
|
|
||||||
size: Mapped[int] = mapped_column(default=-1)
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*,
|
|
||||||
user_id: str,
|
|
||||||
tenant_id: str,
|
|
||||||
conversation_id: Optional[str] = None,
|
|
||||||
file_key: str,
|
|
||||||
mimetype: str,
|
|
||||||
original_url: Optional[str] = None,
|
|
||||||
name: str,
|
|
||||||
size: int,
|
|
||||||
):
|
|
||||||
self.user_id = user_id
|
|
||||||
self.tenant_id = tenant_id
|
|
||||||
self.conversation_id = conversation_id
|
|
||||||
self.file_key = file_key
|
|
||||||
self.mimetype = mimetype
|
|
||||||
self.original_url = original_url
|
|
||||||
self.name = name
|
|
||||||
self.size = size
|
|
||||||
|
@ -2,7 +2,7 @@ import threading
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import contexts
|
import contexts
|
||||||
from core.workflow.repository import RepositoryFactory
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
from core.workflow.repository.workflow_node_execution_repository import OrderConfig
|
from core.workflow.repository.workflow_node_execution_repository import OrderConfig
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
from libs.infinite_scroll_pagination import InfiniteScrollPagination
|
||||||
@ -129,12 +129,8 @@ class WorkflowRunService:
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
# Use the repository to get the node executions
|
# Use the repository to get the node executions
|
||||||
repository = RepositoryFactory.create_workflow_node_execution_repository(
|
repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={
|
session_factory=db.engine, tenant_id=app_model.tenant_id, app_id=app_model.id
|
||||||
"tenant_id": app_model.tenant_id,
|
|
||||||
"app_id": app_model.id,
|
|
||||||
"session_factory": db.session.get_bind(),
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Use the repository to get the node executions with ordering
|
# Use the repository to get the node executions with ordering
|
||||||
|
@ -11,6 +11,7 @@ from sqlalchemy.orm import Session
|
|||||||
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
|
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
|
||||||
from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
|
from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
|
||||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||||
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
from core.variables import Variable
|
from core.variables import Variable
|
||||||
from core.workflow.entities.node_entities import NodeRunResult
|
from core.workflow.entities.node_entities import NodeRunResult
|
||||||
from core.workflow.errors import WorkflowNodeRunFailedError
|
from core.workflow.errors import WorkflowNodeRunFailedError
|
||||||
@ -21,7 +22,6 @@ from core.workflow.nodes.enums import ErrorStrategy
|
|||||||
from core.workflow.nodes.event import RunCompletedEvent
|
from core.workflow.nodes.event import RunCompletedEvent
|
||||||
from core.workflow.nodes.event.types import NodeEvent
|
from core.workflow.nodes.event.types import NodeEvent
|
||||||
from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
|
from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
|
||||||
from core.workflow.repository import RepositoryFactory
|
|
||||||
from core.workflow.workflow_entry import WorkflowEntry
|
from core.workflow.workflow_entry import WorkflowEntry
|
||||||
from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated
|
from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
@ -285,12 +285,8 @@ class WorkflowService:
|
|||||||
workflow_node_execution.workflow_id = draft_workflow.id
|
workflow_node_execution.workflow_id = draft_workflow.id
|
||||||
|
|
||||||
# Use the repository to save the workflow node execution
|
# Use the repository to save the workflow node execution
|
||||||
repository = RepositoryFactory.create_workflow_node_execution_repository(
|
repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={
|
session_factory=db.engine, tenant_id=app_model.tenant_id, app_id=app_model.id
|
||||||
"tenant_id": app_model.tenant_id,
|
|
||||||
"app_id": app_model.id,
|
|
||||||
"session_factory": db.session.get_bind(),
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
repository.save(workflow_node_execution)
|
repository.save(workflow_node_execution)
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ from celery import shared_task # type: ignore
|
|||||||
from sqlalchemy import delete
|
from sqlalchemy import delete
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
from core.workflow.repository import RepositoryFactory
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
from extensions.ext_database import db
|
from extensions.ext_database import db
|
||||||
from models.dataset import AppDatasetJoin
|
from models.dataset import AppDatasetJoin
|
||||||
from models.model import (
|
from models.model import (
|
||||||
@ -189,12 +189,8 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str):
|
|||||||
|
|
||||||
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
|
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
|
||||||
# Create a repository instance for WorkflowNodeExecution
|
# Create a repository instance for WorkflowNodeExecution
|
||||||
repository = RepositoryFactory.create_workflow_node_execution_repository(
|
repository = SQLAlchemyWorkflowNodeExecutionRepository(
|
||||||
params={
|
session_factory=db.engine, tenant_id=tenant_id, app_id=app_id
|
||||||
"tenant_id": tenant_id,
|
|
||||||
"app_id": app_id,
|
|
||||||
"session_factory": db.session.get_bind(),
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Use the clear method to delete all records for this tenant_id and app_id
|
# Use the clear method to delete all records for this tenant_id and app_id
|
||||||
|
@ -0,0 +1,348 @@
|
|||||||
|
import json
|
||||||
|
import time
|
||||||
|
from datetime import UTC, datetime
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom
|
||||||
|
from core.app.entities.queue_entities import (
|
||||||
|
QueueNodeFailedEvent,
|
||||||
|
QueueNodeStartedEvent,
|
||||||
|
QueueNodeSucceededEvent,
|
||||||
|
)
|
||||||
|
from core.workflow.enums import SystemVariableKey
|
||||||
|
from core.workflow.nodes import NodeType
|
||||||
|
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
|
||||||
|
from core.workflow.workflow_cycle_manager import WorkflowCycleManager
|
||||||
|
from models.enums import CreatedByRole
|
||||||
|
from models.workflow import (
|
||||||
|
Workflow,
|
||||||
|
WorkflowNodeExecution,
|
||||||
|
WorkflowNodeExecutionStatus,
|
||||||
|
WorkflowRun,
|
||||||
|
WorkflowRunStatus,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_app_generate_entity():
|
||||||
|
entity = MagicMock(spec=AdvancedChatAppGenerateEntity)
|
||||||
|
entity.inputs = {"query": "test query"}
|
||||||
|
entity.invoke_from = InvokeFrom.WEB_APP
|
||||||
|
# Create app_config as a separate mock
|
||||||
|
app_config = MagicMock()
|
||||||
|
app_config.tenant_id = "test-tenant-id"
|
||||||
|
app_config.app_id = "test-app-id"
|
||||||
|
entity.app_config = app_config
|
||||||
|
return entity
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_workflow_system_variables():
|
||||||
|
return {
|
||||||
|
SystemVariableKey.QUERY: "test query",
|
||||||
|
SystemVariableKey.CONVERSATION_ID: "test-conversation-id",
|
||||||
|
SystemVariableKey.USER_ID: "test-user-id",
|
||||||
|
SystemVariableKey.APP_ID: "test-app-id",
|
||||||
|
SystemVariableKey.WORKFLOW_ID: "test-workflow-id",
|
||||||
|
SystemVariableKey.WORKFLOW_RUN_ID: "test-workflow-run-id",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_node_execution_repository():
|
||||||
|
repo = MagicMock(spec=WorkflowNodeExecutionRepository)
|
||||||
|
repo.get_by_node_execution_id.return_value = None
|
||||||
|
repo.get_running_executions.return_value = []
|
||||||
|
return repo
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def workflow_cycle_manager(mock_app_generate_entity, mock_workflow_system_variables, mock_node_execution_repository):
|
||||||
|
return WorkflowCycleManager(
|
||||||
|
application_generate_entity=mock_app_generate_entity,
|
||||||
|
workflow_system_variables=mock_workflow_system_variables,
|
||||||
|
workflow_node_execution_repository=mock_node_execution_repository,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_session():
|
||||||
|
session = MagicMock(spec=Session)
|
||||||
|
return session
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_workflow():
|
||||||
|
workflow = MagicMock(spec=Workflow)
|
||||||
|
workflow.id = "test-workflow-id"
|
||||||
|
workflow.tenant_id = "test-tenant-id"
|
||||||
|
workflow.app_id = "test-app-id"
|
||||||
|
workflow.type = "chat"
|
||||||
|
workflow.version = "1.0"
|
||||||
|
workflow.graph = json.dumps({"nodes": [], "edges": []})
|
||||||
|
return workflow
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_workflow_run():
|
||||||
|
workflow_run = MagicMock(spec=WorkflowRun)
|
||||||
|
workflow_run.id = "test-workflow-run-id"
|
||||||
|
workflow_run.tenant_id = "test-tenant-id"
|
||||||
|
workflow_run.app_id = "test-app-id"
|
||||||
|
workflow_run.workflow_id = "test-workflow-id"
|
||||||
|
workflow_run.status = WorkflowRunStatus.RUNNING
|
||||||
|
workflow_run.created_by_role = CreatedByRole.ACCOUNT
|
||||||
|
workflow_run.created_by = "test-user-id"
|
||||||
|
workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
workflow_run.inputs_dict = {"query": "test query"}
|
||||||
|
workflow_run.outputs_dict = {"answer": "test answer"}
|
||||||
|
return workflow_run
|
||||||
|
|
||||||
|
|
||||||
|
def test_init(
|
||||||
|
workflow_cycle_manager, mock_app_generate_entity, mock_workflow_system_variables, mock_node_execution_repository
|
||||||
|
):
|
||||||
|
"""Test initialization of WorkflowCycleManager"""
|
||||||
|
assert workflow_cycle_manager._workflow_run is None
|
||||||
|
assert workflow_cycle_manager._workflow_node_executions == {}
|
||||||
|
assert workflow_cycle_manager._application_generate_entity == mock_app_generate_entity
|
||||||
|
assert workflow_cycle_manager._workflow_system_variables == mock_workflow_system_variables
|
||||||
|
assert workflow_cycle_manager._workflow_node_execution_repository == mock_node_execution_repository
|
||||||
|
|
||||||
|
|
||||||
|
def test_handle_workflow_run_start(workflow_cycle_manager, mock_session, mock_workflow):
|
||||||
|
"""Test _handle_workflow_run_start method"""
|
||||||
|
# Mock session.scalar to return the workflow and max sequence
|
||||||
|
mock_session.scalar.side_effect = [mock_workflow, 5]
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
workflow_run = workflow_cycle_manager._handle_workflow_run_start(
|
||||||
|
session=mock_session,
|
||||||
|
workflow_id="test-workflow-id",
|
||||||
|
user_id="test-user-id",
|
||||||
|
created_by_role=CreatedByRole.ACCOUNT,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert workflow_run.tenant_id == mock_workflow.tenant_id
|
||||||
|
assert workflow_run.app_id == mock_workflow.app_id
|
||||||
|
assert workflow_run.workflow_id == mock_workflow.id
|
||||||
|
assert workflow_run.sequence_number == 6 # max_sequence + 1
|
||||||
|
assert workflow_run.status == WorkflowRunStatus.RUNNING
|
||||||
|
assert workflow_run.created_by_role == CreatedByRole.ACCOUNT
|
||||||
|
assert workflow_run.created_by == "test-user-id"
|
||||||
|
|
||||||
|
# Verify session.add was called
|
||||||
|
mock_session.add.assert_called_once_with(workflow_run)
|
||||||
|
|
||||||
|
|
||||||
|
def test_handle_workflow_run_success(workflow_cycle_manager, mock_session, mock_workflow_run):
|
||||||
|
"""Test _handle_workflow_run_success method"""
|
||||||
|
# Mock _get_workflow_run to return the mock_workflow_run
|
||||||
|
with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
|
||||||
|
# Call the method
|
||||||
|
result = workflow_cycle_manager._handle_workflow_run_success(
|
||||||
|
session=mock_session,
|
||||||
|
workflow_run_id="test-workflow-run-id",
|
||||||
|
start_at=time.perf_counter() - 10, # 10 seconds ago
|
||||||
|
total_tokens=100,
|
||||||
|
total_steps=5,
|
||||||
|
outputs={"answer": "test answer"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert result == mock_workflow_run
|
||||||
|
assert result.status == WorkflowRunStatus.SUCCEEDED
|
||||||
|
assert result.outputs == json.dumps({"answer": "test answer"})
|
||||||
|
assert result.total_tokens == 100
|
||||||
|
assert result.total_steps == 5
|
||||||
|
assert result.finished_at is not None
|
||||||
|
|
||||||
|
|
||||||
|
def test_handle_workflow_run_failed(workflow_cycle_manager, mock_session, mock_workflow_run):
|
||||||
|
"""Test _handle_workflow_run_failed method"""
|
||||||
|
# Mock _get_workflow_run to return the mock_workflow_run
|
||||||
|
with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
|
||||||
|
# Mock get_running_executions to return an empty list
|
||||||
|
workflow_cycle_manager._workflow_node_execution_repository.get_running_executions.return_value = []
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
result = workflow_cycle_manager._handle_workflow_run_failed(
|
||||||
|
session=mock_session,
|
||||||
|
workflow_run_id="test-workflow-run-id",
|
||||||
|
start_at=time.perf_counter() - 10, # 10 seconds ago
|
||||||
|
total_tokens=50,
|
||||||
|
total_steps=3,
|
||||||
|
status=WorkflowRunStatus.FAILED,
|
||||||
|
error="Test error message",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert result == mock_workflow_run
|
||||||
|
assert result.status == WorkflowRunStatus.FAILED.value
|
||||||
|
assert result.error == "Test error message"
|
||||||
|
assert result.total_tokens == 50
|
||||||
|
assert result.total_steps == 3
|
||||||
|
assert result.finished_at is not None
|
||||||
|
|
||||||
|
|
||||||
|
def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_run):
|
||||||
|
"""Test _handle_node_execution_start method"""
|
||||||
|
# Create a mock event
|
||||||
|
event = MagicMock(spec=QueueNodeStartedEvent)
|
||||||
|
event.node_execution_id = "test-node-execution-id"
|
||||||
|
event.node_id = "test-node-id"
|
||||||
|
event.node_type = NodeType.LLM
|
||||||
|
|
||||||
|
# Create node_data as a separate mock
|
||||||
|
node_data = MagicMock()
|
||||||
|
node_data.title = "Test Node"
|
||||||
|
event.node_data = node_data
|
||||||
|
|
||||||
|
event.predecessor_node_id = "test-predecessor-node-id"
|
||||||
|
event.node_run_index = 1
|
||||||
|
event.parallel_mode_run_id = "test-parallel-mode-run-id"
|
||||||
|
event.in_iteration_id = "test-iteration-id"
|
||||||
|
event.in_loop_id = "test-loop-id"
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
result = workflow_cycle_manager._handle_node_execution_start(
|
||||||
|
workflow_run=mock_workflow_run,
|
||||||
|
event=event,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert result.tenant_id == mock_workflow_run.tenant_id
|
||||||
|
assert result.app_id == mock_workflow_run.app_id
|
||||||
|
assert result.workflow_id == mock_workflow_run.workflow_id
|
||||||
|
assert result.workflow_run_id == mock_workflow_run.id
|
||||||
|
assert result.node_execution_id == event.node_execution_id
|
||||||
|
assert result.node_id == event.node_id
|
||||||
|
assert result.node_type == event.node_type.value
|
||||||
|
assert result.title == event.node_data.title
|
||||||
|
assert result.status == WorkflowNodeExecutionStatus.RUNNING.value
|
||||||
|
assert result.created_by_role == mock_workflow_run.created_by_role
|
||||||
|
assert result.created_by == mock_workflow_run.created_by
|
||||||
|
|
||||||
|
# Verify save was called
|
||||||
|
workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(result)
|
||||||
|
|
||||||
|
# Verify the node execution was added to the cache
|
||||||
|
assert workflow_cycle_manager._workflow_node_executions[event.node_execution_id] == result
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_workflow_run(workflow_cycle_manager, mock_session, mock_workflow_run):
|
||||||
|
"""Test _get_workflow_run method"""
|
||||||
|
# Mock session.scalar to return the workflow run
|
||||||
|
mock_session.scalar.return_value = mock_workflow_run
|
||||||
|
|
||||||
|
# Call the method
|
||||||
|
result = workflow_cycle_manager._get_workflow_run(
|
||||||
|
session=mock_session,
|
||||||
|
workflow_run_id="test-workflow-run-id",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert result == mock_workflow_run
|
||||||
|
assert workflow_cycle_manager._workflow_run == mock_workflow_run
|
||||||
|
|
||||||
|
|
||||||
|
def test_handle_workflow_node_execution_success(workflow_cycle_manager):
|
||||||
|
"""Test _handle_workflow_node_execution_success method"""
|
||||||
|
# Create a mock event
|
||||||
|
event = MagicMock(spec=QueueNodeSucceededEvent)
|
||||||
|
event.node_execution_id = "test-node-execution-id"
|
||||||
|
event.inputs = {"input": "test input"}
|
||||||
|
event.process_data = {"process": "test process"}
|
||||||
|
event.outputs = {"output": "test output"}
|
||||||
|
event.execution_metadata = {"metadata": "test metadata"}
|
||||||
|
event.start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
|
# Create a mock workflow node execution
|
||||||
|
node_execution = MagicMock(spec=WorkflowNodeExecution)
|
||||||
|
node_execution.node_execution_id = "test-node-execution-id"
|
||||||
|
|
||||||
|
# Mock _get_workflow_node_execution to return the mock node execution
|
||||||
|
with patch.object(workflow_cycle_manager, "_get_workflow_node_execution", return_value=node_execution):
|
||||||
|
# Call the method
|
||||||
|
result = workflow_cycle_manager._handle_workflow_node_execution_success(
|
||||||
|
event=event,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert result == node_execution
|
||||||
|
assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED.value
|
||||||
|
assert result.inputs == json.dumps(event.inputs)
|
||||||
|
assert result.process_data == json.dumps(event.process_data)
|
||||||
|
assert result.outputs == json.dumps(event.outputs)
|
||||||
|
assert result.finished_at is not None
|
||||||
|
assert result.elapsed_time is not None
|
||||||
|
|
||||||
|
# Verify update was called
|
||||||
|
workflow_cycle_manager._workflow_node_execution_repository.update.assert_called_once_with(node_execution)
|
||||||
|
|
||||||
|
|
||||||
|
def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_session, mock_workflow_run):
|
||||||
|
"""Test _handle_workflow_run_partial_success method"""
|
||||||
|
# Mock _get_workflow_run to return the mock_workflow_run
|
||||||
|
with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
|
||||||
|
# Call the method
|
||||||
|
result = workflow_cycle_manager._handle_workflow_run_partial_success(
|
||||||
|
session=mock_session,
|
||||||
|
workflow_run_id="test-workflow-run-id",
|
||||||
|
start_at=time.perf_counter() - 10, # 10 seconds ago
|
||||||
|
total_tokens=75,
|
||||||
|
total_steps=4,
|
||||||
|
outputs={"partial_answer": "test partial answer"},
|
||||||
|
exceptions_count=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert result == mock_workflow_run
|
||||||
|
assert result.status == WorkflowRunStatus.PARTIAL_SUCCEEDED.value
|
||||||
|
assert result.outputs == json.dumps({"partial_answer": "test partial answer"})
|
||||||
|
assert result.total_tokens == 75
|
||||||
|
assert result.total_steps == 4
|
||||||
|
assert result.exceptions_count == 2
|
||||||
|
assert result.finished_at is not None
|
||||||
|
|
||||||
|
|
||||||
|
def test_handle_workflow_node_execution_failed(workflow_cycle_manager):
|
||||||
|
"""Test _handle_workflow_node_execution_failed method"""
|
||||||
|
# Create a mock event
|
||||||
|
event = MagicMock(spec=QueueNodeFailedEvent)
|
||||||
|
event.node_execution_id = "test-node-execution-id"
|
||||||
|
event.inputs = {"input": "test input"}
|
||||||
|
event.process_data = {"process": "test process"}
|
||||||
|
event.outputs = {"output": "test output"}
|
||||||
|
event.execution_metadata = {"metadata": "test metadata"}
|
||||||
|
event.start_at = datetime.now(UTC).replace(tzinfo=None)
|
||||||
|
event.error = "Test error message"
|
||||||
|
|
||||||
|
# Create a mock workflow node execution
|
||||||
|
node_execution = MagicMock(spec=WorkflowNodeExecution)
|
||||||
|
node_execution.node_execution_id = "test-node-execution-id"
|
||||||
|
|
||||||
|
# Mock _get_workflow_node_execution to return the mock node execution
|
||||||
|
with patch.object(workflow_cycle_manager, "_get_workflow_node_execution", return_value=node_execution):
|
||||||
|
# Call the method
|
||||||
|
result = workflow_cycle_manager._handle_workflow_node_execution_failed(
|
||||||
|
event=event,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert result == node_execution
|
||||||
|
assert result.status == WorkflowNodeExecutionStatus.FAILED.value
|
||||||
|
assert result.error == "Test error message"
|
||||||
|
assert result.inputs == json.dumps(event.inputs)
|
||||||
|
assert result.process_data == json.dumps(event.process_data)
|
||||||
|
assert result.outputs == json.dumps(event.outputs)
|
||||||
|
assert result.finished_at is not None
|
||||||
|
assert result.elapsed_time is not None
|
||||||
|
assert result.execution_metadata == json.dumps(event.execution_metadata)
|
||||||
|
|
||||||
|
# Verify update was called
|
||||||
|
workflow_cycle_manager._workflow_node_execution_repository.update.assert_called_once_with(node_execution)
|
@ -8,7 +8,7 @@ import pytest
|
|||||||
from pytest_mock import MockerFixture
|
from pytest_mock import MockerFixture
|
||||||
from sqlalchemy.orm import Session, sessionmaker
|
from sqlalchemy.orm import Session, sessionmaker
|
||||||
|
|
||||||
from core.repositories.workflow_node_execution.sqlalchemy_repository import SQLAlchemyWorkflowNodeExecutionRepository
|
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
|
||||||
from core.workflow.repository.workflow_node_execution_repository import OrderConfig
|
from core.workflow.repository.workflow_node_execution_repository import OrderConfig
|
||||||
from models.workflow import WorkflowNodeExecution
|
from models.workflow import WorkflowNodeExecution
|
||||||
|
|
||||||
@ -80,7 +80,7 @@ def test_get_by_node_execution_id(repository, session, mocker: MockerFixture):
|
|||||||
"""Test get_by_node_execution_id method."""
|
"""Test get_by_node_execution_id method."""
|
||||||
session_obj, _ = session
|
session_obj, _ = session
|
||||||
# Set up mock
|
# Set up mock
|
||||||
mock_select = mocker.patch("core.repositories.workflow_node_execution.sqlalchemy_repository.select")
|
mock_select = mocker.patch("core.repositories.sqlalchemy_workflow_node_execution_repository.select")
|
||||||
mock_stmt = mocker.MagicMock()
|
mock_stmt = mocker.MagicMock()
|
||||||
mock_select.return_value = mock_stmt
|
mock_select.return_value = mock_stmt
|
||||||
mock_stmt.where.return_value = mock_stmt
|
mock_stmt.where.return_value = mock_stmt
|
||||||
@ -99,7 +99,7 @@ def test_get_by_workflow_run(repository, session, mocker: MockerFixture):
|
|||||||
"""Test get_by_workflow_run method."""
|
"""Test get_by_workflow_run method."""
|
||||||
session_obj, _ = session
|
session_obj, _ = session
|
||||||
# Set up mock
|
# Set up mock
|
||||||
mock_select = mocker.patch("core.repositories.workflow_node_execution.sqlalchemy_repository.select")
|
mock_select = mocker.patch("core.repositories.sqlalchemy_workflow_node_execution_repository.select")
|
||||||
mock_stmt = mocker.MagicMock()
|
mock_stmt = mocker.MagicMock()
|
||||||
mock_select.return_value = mock_stmt
|
mock_select.return_value = mock_stmt
|
||||||
mock_stmt.where.return_value = mock_stmt
|
mock_stmt.where.return_value = mock_stmt
|
||||||
@ -120,7 +120,7 @@ def test_get_running_executions(repository, session, mocker: MockerFixture):
|
|||||||
"""Test get_running_executions method."""
|
"""Test get_running_executions method."""
|
||||||
session_obj, _ = session
|
session_obj, _ = session
|
||||||
# Set up mock
|
# Set up mock
|
||||||
mock_select = mocker.patch("core.repositories.workflow_node_execution.sqlalchemy_repository.select")
|
mock_select = mocker.patch("core.repositories.sqlalchemy_workflow_node_execution_repository.select")
|
||||||
mock_stmt = mocker.MagicMock()
|
mock_stmt = mocker.MagicMock()
|
||||||
mock_select.return_value = mock_stmt
|
mock_select.return_value = mock_stmt
|
||||||
mock_stmt.where.return_value = mock_stmt
|
mock_stmt.where.return_value = mock_stmt
|
||||||
@ -158,7 +158,7 @@ def test_clear(repository, session, mocker: MockerFixture):
|
|||||||
"""Test clear method."""
|
"""Test clear method."""
|
||||||
session_obj, _ = session
|
session_obj, _ = session
|
||||||
# Set up mock
|
# Set up mock
|
||||||
mock_delete = mocker.patch("core.repositories.workflow_node_execution.sqlalchemy_repository.delete")
|
mock_delete = mocker.patch("core.repositories.sqlalchemy_workflow_node_execution_repository.delete")
|
||||||
mock_stmt = mocker.MagicMock()
|
mock_stmt = mocker.MagicMock()
|
||||||
mock_delete.return_value = mock_stmt
|
mock_delete.return_value = mock_stmt
|
||||||
mock_stmt.where.return_value = mock_stmt
|
mock_stmt.where.return_value = mock_stmt
|
||||||
|
@ -2,6 +2,9 @@
|
|||||||
|
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
cd "$SCRIPT_DIR/.."
|
||||||
|
|
||||||
# run mypy checks
|
# run mypy checks
|
||||||
uv run --directory api --dev --with pip \
|
uv run --directory api --dev --with pip \
|
||||||
python -m mypy --install-types --non-interactive --cache-fine-grained --sqlite-cache .
|
python -m mypy --install-types --non-interactive --cache-fine-grained --sqlite-cache .
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
cd "$SCRIPT_DIR/../.."
|
||||||
|
|
||||||
# ModelRuntime
|
# ModelRuntime
|
||||||
dev/pytest/pytest_model_runtime.sh
|
dev/pytest/pytest_model_runtime.sh
|
||||||
|
|
||||||
|
@ -1,4 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
cd "$SCRIPT_DIR/../.."
|
||||||
|
|
||||||
pytest api/tests/artifact_tests/
|
pytest api/tests/artifact_tests/
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
cd "$SCRIPT_DIR/../.."
|
||||||
|
|
||||||
pytest api/tests/integration_tests/model_runtime/anthropic \
|
pytest api/tests/integration_tests/model_runtime/anthropic \
|
||||||
api/tests/integration_tests/model_runtime/azure_openai \
|
api/tests/integration_tests/model_runtime/azure_openai \
|
||||||
api/tests/integration_tests/model_runtime/openai api/tests/integration_tests/model_runtime/chatglm \
|
api/tests/integration_tests/model_runtime/openai api/tests/integration_tests/model_runtime/chatglm \
|
||||||
|
@ -1,4 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
cd "$SCRIPT_DIR/../.."
|
||||||
|
|
||||||
pytest api/tests/integration_tests/tools
|
pytest api/tests/integration_tests/tools
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
cd "$SCRIPT_DIR/../.."
|
||||||
|
|
||||||
# libs
|
# libs
|
||||||
pytest api/tests/unit_tests
|
pytest api/tests/unit_tests
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
cd "$SCRIPT_DIR/../.."
|
||||||
|
|
||||||
pytest api/tests/integration_tests/vdb/chroma \
|
pytest api/tests/integration_tests/vdb/chroma \
|
||||||
api/tests/integration_tests/vdb/milvus \
|
api/tests/integration_tests/vdb/milvus \
|
||||||
api/tests/integration_tests/vdb/pgvecto_rs \
|
api/tests/integration_tests/vdb/pgvecto_rs \
|
||||||
|
@ -1,4 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
cd "$SCRIPT_DIR/../.."
|
||||||
|
|
||||||
pytest api/tests/integration_tests/workflow
|
pytest api/tests/integration_tests/workflow
|
||||||
|
@ -2,6 +2,9 @@
|
|||||||
|
|
||||||
set -x
|
set -x
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
cd "$SCRIPT_DIR/.."
|
||||||
|
|
||||||
# run ruff linter
|
# run ruff linter
|
||||||
uv run --directory api --dev ruff check --fix ./
|
uv run --directory api --dev ruff check --fix ./
|
||||||
|
|
||||||
|
@ -6,5 +6,8 @@ if ! command -v uv &> /dev/null; then
|
|||||||
pip install uv
|
pip install uv
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(dirname "$(realpath "$0")")"
|
||||||
|
cd "$SCRIPT_DIR/.."
|
||||||
|
|
||||||
# check uv.lock in sync with pyproject.toml
|
# check uv.lock in sync with pyproject.toml
|
||||||
uv lock --project api
|
uv lock --project api
|
||||||
|
@ -188,7 +188,7 @@ services:
|
|||||||
|
|
||||||
# ssrf_proxy server
|
# ssrf_proxy server
|
||||||
# for more information, please refer to
|
# for more information, please refer to
|
||||||
# https://docs.dify.ai/learn-more/faq/install-faq#id-18.-why-is-ssrf_proxy-needed
|
# https://docs.dify.ai/learn-more/faq/install-faq#18-why-is-ssrf-proxy-needed%3F
|
||||||
ssrf_proxy:
|
ssrf_proxy:
|
||||||
image: ubuntu/squid:latest
|
image: ubuntu/squid:latest
|
||||||
restart: always
|
restart: always
|
||||||
|
@ -123,7 +123,7 @@ services:
|
|||||||
|
|
||||||
# ssrf_proxy server
|
# ssrf_proxy server
|
||||||
# for more information, please refer to
|
# for more information, please refer to
|
||||||
# https://docs.dify.ai/learn-more/faq/install-faq#id-18.-why-is-ssrf_proxy-needed
|
# https://docs.dify.ai/learn-more/faq/install-faq#18-why-is-ssrf-proxy-needed%3F
|
||||||
ssrf_proxy:
|
ssrf_proxy:
|
||||||
image: ubuntu/squid:latest
|
image: ubuntu/squid:latest
|
||||||
restart: always
|
restart: always
|
||||||
|
@ -677,7 +677,7 @@ services:
|
|||||||
|
|
||||||
# ssrf_proxy server
|
# ssrf_proxy server
|
||||||
# for more information, please refer to
|
# for more information, please refer to
|
||||||
# https://docs.dify.ai/learn-more/faq/install-faq#id-18.-why-is-ssrf_proxy-needed
|
# https://docs.dify.ai/learn-more/faq/install-faq#18-why-is-ssrf-proxy-needed%3F
|
||||||
ssrf_proxy:
|
ssrf_proxy:
|
||||||
image: ubuntu/squid:latest
|
image: ubuntu/squid:latest
|
||||||
restart: always
|
restart: always
|
||||||
|
@ -98,7 +98,7 @@ const ExtraInfo = ({ isMobile, relatedApps, expand }: IExtraInfoProps) => {
|
|||||||
className='mt-2 inline-flex cursor-pointer items-center text-xs text-text-accent'
|
className='mt-2 inline-flex cursor-pointer items-center text-xs text-text-accent'
|
||||||
href={
|
href={
|
||||||
locale === LanguagesSupported[1]
|
locale === LanguagesSupported[1]
|
||||||
? 'https://docs.dify.ai/v/zh-hans/guides/knowledge-base/integrate-knowledge-within-application'
|
? 'https://docs.dify.ai/zh-hans/guides/knowledge-base/integrate-knowledge-within-application'
|
||||||
: 'https://docs.dify.ai/guides/knowledge-base/integrate-knowledge-within-application'
|
: 'https://docs.dify.ai/guides/knowledge-base/integrate-knowledge-within-application'
|
||||||
}
|
}
|
||||||
target='_blank' rel='noopener noreferrer'
|
target='_blank' rel='noopener noreferrer'
|
||||||
|
@ -46,8 +46,8 @@ const HistoryPanel: FC<Props> = ({
|
|||||||
<div className='flex justify-between rounded-b-xl bg-background-section-burn px-3 py-2 text-xs text-text-secondary'>
|
<div className='flex justify-between rounded-b-xl bg-background-section-burn px-3 py-2 text-xs text-text-secondary'>
|
||||||
<div>{t('appDebug.feature.conversationHistory.tip')}
|
<div>{t('appDebug.feature.conversationHistory.tip')}
|
||||||
<a href={`${locale === LanguagesSupported[1]
|
<a href={`${locale === LanguagesSupported[1]
|
||||||
? 'https://docs.dify.ai/v/zh-hans/guides/application-design/prompt-engineering'
|
? 'https://docs.dify.ai/zh-hans/learn-more/extended-reading/prompt-engineering/README'
|
||||||
: 'https://docs.dify.ai/features/prompt-engineering'}`}
|
: 'https://docs.dify.ai/en/features/prompt-engineering'}`}
|
||||||
target='_blank' rel='noopener noreferrer'
|
target='_blank' rel='noopener noreferrer'
|
||||||
className='text-[#155EEF]'>{t('appDebug.feature.conversationHistory.learnMore')}
|
className='text-[#155EEF]'>{t('appDebug.feature.conversationHistory.learnMore')}
|
||||||
</a>
|
</a>
|
||||||
|
@ -25,7 +25,7 @@ const AdvancedModeWarning: FC<Props> = ({
|
|||||||
<span className='text-gray-700'>{t('appDebug.promptMode.advancedWarning.description')}</span>
|
<span className='text-gray-700'>{t('appDebug.promptMode.advancedWarning.description')}</span>
|
||||||
<a
|
<a
|
||||||
className='font-medium text-[#155EEF]'
|
className='font-medium text-[#155EEF]'
|
||||||
href={`https://docs.dify.ai/${locale === LanguagesSupported[1] ? 'v/zh-hans/guides/application-design/prompt-engineering' : 'features/prompt-engineering'}`}
|
href={`https://docs.dify.ai/${locale === LanguagesSupported[1] ? '/guides/features/prompt-engineering' : 'features/prompt-engineering'}`}
|
||||||
target='_blank' rel='noopener noreferrer'
|
target='_blank' rel='noopener noreferrer'
|
||||||
>
|
>
|
||||||
{t('appDebug.promptMode.advancedWarning.learnMore')}
|
{t('appDebug.promptMode.advancedWarning.learnMore')}
|
||||||
|
@ -310,17 +310,17 @@ function AppPreview({ mode }: { mode: AppMode }) {
|
|||||||
'chat': {
|
'chat': {
|
||||||
title: t('app.types.chatbot'),
|
title: t('app.types.chatbot'),
|
||||||
description: t('app.newApp.chatbotUserDescription'),
|
description: t('app.newApp.chatbotUserDescription'),
|
||||||
link: 'https://docs.dify.ai/guides/application-orchestrate#application_type',
|
link: 'https://docs.dify.ai/guides/application-orchestrate/readme',
|
||||||
},
|
},
|
||||||
'advanced-chat': {
|
'advanced-chat': {
|
||||||
title: t('app.types.advanced'),
|
title: t('app.types.advanced'),
|
||||||
description: t('app.newApp.advancedUserDescription'),
|
description: t('app.newApp.advancedUserDescription'),
|
||||||
link: 'https://docs.dify.ai/guides/workflow',
|
link: 'https://docs.dify.ai/en/guides/workflow/README',
|
||||||
},
|
},
|
||||||
'agent-chat': {
|
'agent-chat': {
|
||||||
title: t('app.types.agent'),
|
title: t('app.types.agent'),
|
||||||
description: t('app.newApp.agentUserDescription'),
|
description: t('app.newApp.agentUserDescription'),
|
||||||
link: 'https://docs.dify.ai/guides/application-orchestrate/agent',
|
link: 'https://docs.dify.ai/en/guides/application-orchestrate/agent',
|
||||||
},
|
},
|
||||||
'completion': {
|
'completion': {
|
||||||
title: t('app.newApp.completeApp'),
|
title: t('app.newApp.completeApp'),
|
||||||
@ -330,7 +330,7 @@ function AppPreview({ mode }: { mode: AppMode }) {
|
|||||||
'workflow': {
|
'workflow': {
|
||||||
title: t('app.types.workflow'),
|
title: t('app.types.workflow'),
|
||||||
description: t('app.newApp.workflowUserDescription'),
|
description: t('app.newApp.workflowUserDescription'),
|
||||||
link: 'https://docs.dify.ai/guides/workflow',
|
link: 'https://docs.dify.ai/en/guides/workflow/README',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
const previewInfo = modeToPreviewInfoMap[mode]
|
const previewInfo = modeToPreviewInfoMap[mode]
|
||||||
|
@ -103,7 +103,7 @@ const CustomizeModal: FC<IShareLinkProps> = ({
|
|||||||
window.open(
|
window.open(
|
||||||
`https://docs.dify.ai/${locale !== LanguagesSupported[1]
|
`https://docs.dify.ai/${locale !== LanguagesSupported[1]
|
||||||
? 'user-guide/launching-dify-apps/developing-with-apis'
|
? 'user-guide/launching-dify-apps/developing-with-apis'
|
||||||
: `v/${locale.toLowerCase()}/guides/application-publishing/developing-with-apis`
|
: `${locale.toLowerCase()}/guides/application-publishing/developing-with-apis`
|
||||||
}`,
|
}`,
|
||||||
'_blank',
|
'_blank',
|
||||||
)
|
)
|
||||||
|
@ -241,7 +241,7 @@ const SettingsModal: FC<ISettingsModalProps> = ({
|
|||||||
</div>
|
</div>
|
||||||
<div className='system-xs-regular mt-0.5 text-text-tertiary'>
|
<div className='system-xs-regular mt-0.5 text-text-tertiary'>
|
||||||
<span>{t(`${prefixSettings}.modalTip`)}</span>
|
<span>{t(`${prefixSettings}.modalTip`)}</span>
|
||||||
<Link href={`${locale === LanguagesSupported[1] ? 'https://docs.dify.ai/zh-hans/guides/application-publishing/launch-your-webapp-quickly#she-zhi-ni-de-ai-zhan-dian' : 'https://docs.dify.ai/guides/application-publishing/launch-your-webapp-quickly#setting-up-your-ai-site'}`} target='_blank' rel='noopener noreferrer' className='text-text-accent'>{t('common.operation.learnMore')}</Link>
|
<Link href={`${locale === LanguagesSupported[1] ? 'https://docs.dify.ai/zh-hans/guides/application-publishing/launch-your-webapp-quickly#she-zhi-ni-de-ai-zhan-dian' : 'https://docs.dify.ai/en/guides/application-publishing/launch-your-webapp-quickly/README'}`} target='_blank' rel='noopener noreferrer' className='text-text-accent'>{t('common.operation.learnMore')}</Link>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{/* form body */}
|
{/* form body */}
|
||||||
|
@ -264,8 +264,8 @@ const Documents: FC<IDocumentsProps> = ({ datasetId }) => {
|
|||||||
target='_blank'
|
target='_blank'
|
||||||
href={
|
href={
|
||||||
locale === LanguagesSupported[1]
|
locale === LanguagesSupported[1]
|
||||||
? 'https://docs.dify.ai/v/zh-hans/guides/knowledge-base/integrate-knowledge-within-application'
|
? 'https://docs.dify.ai/zh-hans/guides/knowledge-base/integrate-knowledge-within-application'
|
||||||
: 'https://docs.dify.ai/guides/knowledge-base/integrate-knowledge-within-application'
|
: 'https://docs.dify.ai/en/guides/knowledge-base/integrate-knowledge-within-application'
|
||||||
}
|
}
|
||||||
>
|
>
|
||||||
<span>{t('datasetDocuments.list.learnMore')}</span>
|
<span>{t('datasetDocuments.list.learnMore')}</span>
|
||||||
|
@ -16,12 +16,12 @@ const InfoPanel = () => {
|
|||||||
</span>
|
</span>
|
||||||
<span className='system-sm-regular text-text-tertiary'>
|
<span className='system-sm-regular text-text-tertiary'>
|
||||||
{t('dataset.connectDatasetIntro.content.front')}
|
{t('dataset.connectDatasetIntro.content.front')}
|
||||||
<a className='system-sm-regular ml-1 text-text-accent' href='https://docs.dify.ai/guides/knowledge-base/external-knowledge-api-documentation' target='_blank' rel="noopener noreferrer">
|
<a className='system-sm-regular ml-1 text-text-accent' href='https://docs.dify.ai/en/guides/knowledge-base/external-knowledge-api' target='_blank' rel="noopener noreferrer">
|
||||||
{t('dataset.connectDatasetIntro.content.link')}
|
{t('dataset.connectDatasetIntro.content.link')}
|
||||||
</a>
|
</a>
|
||||||
{t('dataset.connectDatasetIntro.content.end')}
|
{t('dataset.connectDatasetIntro.content.end')}
|
||||||
</span>
|
</span>
|
||||||
<a className='system-sm-regular self-stretch text-text-accent' href='https://docs.dify.ai/guides/knowledge-base/connect-external-knowledge' target='_blank' rel="noopener noreferrer">
|
<a className='system-sm-regular self-stretch text-text-accent' href='https://docs.dify.ai/en/guides/knowledge-base/connect-external-knowledge-base' target='_blank' rel="noopener noreferrer">
|
||||||
{t('dataset.connectDatasetIntro.learnMore')}
|
{t('dataset.connectDatasetIntro.learnMore')}
|
||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
@ -59,7 +59,7 @@ const ExternalKnowledgeBaseCreate: React.FC<ExternalKnowledgeBaseCreateProps> =
|
|||||||
<span>{t('dataset.connectHelper.helper1')}</span>
|
<span>{t('dataset.connectHelper.helper1')}</span>
|
||||||
<span className='system-sm-medium text-text-secondary'>{t('dataset.connectHelper.helper2')}</span>
|
<span className='system-sm-medium text-text-secondary'>{t('dataset.connectHelper.helper2')}</span>
|
||||||
<span>{t('dataset.connectHelper.helper3')}</span>
|
<span>{t('dataset.connectHelper.helper3')}</span>
|
||||||
<a className='system-sm-regular self-stretch text-text-accent' href='https://docs.dify.ai/guides/knowledge-base/connect-external-knowledge' target='_blank' rel="noopener noreferrer">
|
<a className='system-sm-regular self-stretch text-text-accent' href='https://docs.dify.ai/en/guides/knowledge-base/connect-external-knowledge-base' target='_blank' rel="noopener noreferrer">
|
||||||
{t('dataset.connectHelper.helper4')}
|
{t('dataset.connectHelper.helper4')}
|
||||||
</a>
|
</a>
|
||||||
<span>{t('dataset.connectHelper.helper5')} </span>
|
<span>{t('dataset.connectHelper.helper5')} </span>
|
||||||
|
@ -90,7 +90,7 @@ Workflow applications offers non-session support and is ideal for translation, a
|
|||||||
Each streaming chunk starts with `data:`, separated by two newline characters `\n\n`, as shown below:
|
Each streaming chunk starts with `data:`, separated by two newline characters `\n\n`, as shown below:
|
||||||
<CodeGroup>
|
<CodeGroup>
|
||||||
```streaming {{ title: 'Response' }}
|
```streaming {{ title: 'Response' }}
|
||||||
data: {"event": "message", "task_id": "900bbd43-dc0b-4383-a372-aa6e6c414227", "id": "663c5084-a254-4040-8ad3-51f2a3c1a77c", "answer": "Hi", "created_at": 1705398420}\n\n
|
data: {"event": "text_chunk", "workflow_run_id": "b85e5fc5-751b-454d-b14e-dc5f240b0a31", "task_id": "bd029338-b068-4d34-a331-fc85478922c2", "data": {"text": "\u4e3a\u4e86", "from_variable_selector": ["1745912968134", "text"]}}\n\n
|
||||||
```
|
```
|
||||||
</CodeGroup>
|
</CodeGroup>
|
||||||
The structure of the streaming chunks varies depending on the `event`:
|
The structure of the streaming chunks varies depending on the `event`:
|
||||||
@ -116,6 +116,13 @@ Workflow applications offers non-session support and is ideal for translation, a
|
|||||||
- `predecessor_node_id` (string) optional Prefix node ID, used for canvas display execution path
|
- `predecessor_node_id` (string) optional Prefix node ID, used for canvas display execution path
|
||||||
- `inputs` (object) Contents of all preceding node variables used in the node
|
- `inputs` (object) Contents of all preceding node variables used in the node
|
||||||
- `created_at` (timestamp) timestamp of start, e.g., 1705395332
|
- `created_at` (timestamp) timestamp of start, e.g., 1705395332
|
||||||
|
- `event: text_chunk` Text fragment
|
||||||
|
- `task_id` (string) Task ID, used for request tracking and the below Stop Generate API
|
||||||
|
- `workflow_run_id` (string) Unique ID of workflow execution
|
||||||
|
- `event` (string) fixed to `text_chunk`
|
||||||
|
- `data` (object) detail
|
||||||
|
- `text` (string) Text content
|
||||||
|
- `from_variable_selector` (array) Text source path, helping developers understand which node and variable generated the text
|
||||||
- `event: node_finished` node execution ends, success or failure in different states in the same event
|
- `event: node_finished` node execution ends, success or failure in different states in the same event
|
||||||
- `task_id` (string) Task ID, used for request tracking and the below Stop Generate API
|
- `task_id` (string) Task ID, used for request tracking and the below Stop Generate API
|
||||||
- `workflow_run_id` (string) Unique ID of workflow execution
|
- `workflow_run_id` (string) Unique ID of workflow execution
|
||||||
|
@ -93,7 +93,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||||||
各ストリーミングチャンクは`data:`で始まり、2つの改行文字`\n\n`で区切られます。以下のように表示されます:
|
各ストリーミングチャンクは`data:`で始まり、2つの改行文字`\n\n`で区切られます。以下のように表示されます:
|
||||||
<CodeGroup>
|
<CodeGroup>
|
||||||
```streaming {{ title: '応答' }}
|
```streaming {{ title: '応答' }}
|
||||||
data: {"event": "message", "task_id": "900bbd43-dc0b-4383-a372-aa6e6c414227", "id": "663c5084-a254-4040-8ad3-51f2a3c1a77c", "answer": "Hi", "created_at": 1705398420}\n\n
|
data: {"event": "text_chunk", "workflow_run_id": "b85e5fc5-751b-454d-b14e-dc5f240b0a31", "task_id": "bd029338-b068-4d34-a331-fc85478922c2", "data": {"text": "\u4e3a\u4e86", "from_variable_selector": ["1745912968134", "text"]}}\n\n
|
||||||
```
|
```
|
||||||
</CodeGroup>
|
</CodeGroup>
|
||||||
ストリーミングチャンクの構造は`event`に応じて異なります:
|
ストリーミングチャンクの構造は`event`に応じて異なります:
|
||||||
@ -119,6 +119,13 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
|
|||||||
- `predecessor_node_id` (string) オプションのプレフィックスノードID、キャンバス表示実行パスに使用
|
- `predecessor_node_id` (string) オプションのプレフィックスノードID、キャンバス表示実行パスに使用
|
||||||
- `inputs` (object) ノードで使用されるすべての前のノード変数の内容
|
- `inputs` (object) ノードで使用されるすべての前のノード変数の内容
|
||||||
- `created_at` (timestamp) 開始のタイムスタンプ、例:1705395332
|
- `created_at` (timestamp) 開始のタイムスタンプ、例:1705395332
|
||||||
|
- `event: text_chunk` テキストフラグメント
|
||||||
|
- `task_id` (string) タスクID、リクエスト追跡と以下のStop Generate APIに使用
|
||||||
|
- `workflow_run_id` (string) ワークフロー実行の一意のID
|
||||||
|
- `event` (string) `text_chunk`に固定
|
||||||
|
- `data` (object) 詳細
|
||||||
|
- `text` (string) テキスト内容
|
||||||
|
- `from_variable_selector` (array) テキスト生成元パス(開発者がどのノードのどの変数から生成されたかを理解するための情報)
|
||||||
- `event: node_finished` ノード実行終了、同じイベントで異なる状態で成功または失敗
|
- `event: node_finished` ノード実行終了、同じイベントで異なる状態で成功または失敗
|
||||||
- `task_id` (string) タスクID、リクエスト追跡と以下のStop Generate APIに使用
|
- `task_id` (string) タスクID、リクエスト追跡と以下のStop Generate APIに使用
|
||||||
- `workflow_run_id` (string) ワークフロー実行の一意のID
|
- `workflow_run_id` (string) ワークフロー実行の一意のID
|
||||||
|
@ -87,7 +87,7 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
|||||||
每个流式块均为 data: 开头,块之间以 `\n\n` 即两个换行符分隔,如下所示:
|
每个流式块均为 data: 开头,块之间以 `\n\n` 即两个换行符分隔,如下所示:
|
||||||
<CodeGroup>
|
<CodeGroup>
|
||||||
```streaming {{ title: 'Response' }}
|
```streaming {{ title: 'Response' }}
|
||||||
data: {"event": "message", "task_id": "900bbd43-dc0b-4383-a372-aa6e6c414227", "id": "663c5084-a254-4040-8ad3-51f2a3c1a77c", "answer": "Hi", "created_at": 1705398420}\n\n
|
data: {"event": "text_chunk", "workflow_run_id": "b85e5fc5-751b-454d-b14e-dc5f240b0a31", "task_id": "bd029338-b068-4d34-a331-fc85478922c2", "data": {"text": "\u4e3a\u4e86", "from_variable_selector": ["1745912968134", "text"]}}\n\n
|
||||||
```
|
```
|
||||||
</CodeGroup>
|
</CodeGroup>
|
||||||
流式块中根据 `event` 不同,结构也不同,包含以下类型:
|
流式块中根据 `event` 不同,结构也不同,包含以下类型:
|
||||||
@ -113,6 +113,13 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
|
|||||||
- `predecessor_node_id` (string) 前置节点 ID,用于画布展示执行路径
|
- `predecessor_node_id` (string) 前置节点 ID,用于画布展示执行路径
|
||||||
- `inputs` (object) 节点中所有使用到的前置节点变量内容
|
- `inputs` (object) 节点中所有使用到的前置节点变量内容
|
||||||
- `created_at` (timestamp) 开始时间
|
- `created_at` (timestamp) 开始时间
|
||||||
|
- `event: text_chunk` 文本片段
|
||||||
|
- `task_id` (string) 任务 ID,用于请求跟踪和下方的停止响应接口
|
||||||
|
- `workflow_run_id` (string) workflow 执行 ID
|
||||||
|
- `event` (string) 固定为 `text_chunk`
|
||||||
|
- `data` (object) 详细内容
|
||||||
|
- `text` (string) 文本内容
|
||||||
|
- `from_variable_selector` (array) 文本来源路径,帮助开发者了解文本是由哪个节点的哪个变量生成的
|
||||||
- `event: node_finished` node 执行结束,成功失败同一事件中不同状态
|
- `event: node_finished` node 执行结束,成功失败同一事件中不同状态
|
||||||
- `task_id` (string) 任务 ID,用于请求跟踪和下方的停止响应接口
|
- `task_id` (string) 任务 ID,用于请求跟踪和下方的停止响应接口
|
||||||
- `workflow_run_id` (string) workflow 执行 ID
|
- `workflow_run_id` (string) workflow 执行 ID
|
||||||
|
@ -28,7 +28,7 @@ const Contribute = ({ onRefreshData }: Props) => {
|
|||||||
const linkUrl = useMemo(() => {
|
const linkUrl = useMemo(() => {
|
||||||
if (language.startsWith('zh_'))
|
if (language.startsWith('zh_'))
|
||||||
return 'https://docs.dify.ai/zh-hans/guides/tools#ru-he-chuang-jian-zi-ding-yi-gong-ju'
|
return 'https://docs.dify.ai/zh-hans/guides/tools#ru-he-chuang-jian-zi-ding-yi-gong-ju'
|
||||||
return 'https://docs.dify.ai/guides/tools#how-to-create-custom-tools'
|
return 'https://docs.dify.ai/en/guides/tools#how-to-create-custom-tools'
|
||||||
}, [language])
|
}, [language])
|
||||||
|
|
||||||
const [isShowEditCollectionToolModal, setIsShowEditCustomCollectionModal] = useState(false)
|
const [isShowEditCollectionToolModal, setIsShowEditCustomCollectionModal] = useState(false)
|
||||||
|
@ -8,6 +8,7 @@ import type { WorkflowProps } from '@/app/components/workflow'
|
|||||||
import WorkflowChildren from './workflow-children'
|
import WorkflowChildren from './workflow-children'
|
||||||
import {
|
import {
|
||||||
useNodesSyncDraft,
|
useNodesSyncDraft,
|
||||||
|
useWorkflowRefreshDraft,
|
||||||
useWorkflowRun,
|
useWorkflowRun,
|
||||||
useWorkflowStartRun,
|
useWorkflowStartRun,
|
||||||
} from '../hooks'
|
} from '../hooks'
|
||||||
@ -32,6 +33,7 @@ const WorkflowMain = ({
|
|||||||
doSyncWorkflowDraft,
|
doSyncWorkflowDraft,
|
||||||
syncWorkflowDraftWhenPageClose,
|
syncWorkflowDraftWhenPageClose,
|
||||||
} = useNodesSyncDraft()
|
} = useNodesSyncDraft()
|
||||||
|
const { handleRefreshWorkflowDraft } = useWorkflowRefreshDraft()
|
||||||
const {
|
const {
|
||||||
handleBackupDraft,
|
handleBackupDraft,
|
||||||
handleLoadBackupDraft,
|
handleLoadBackupDraft,
|
||||||
@ -49,6 +51,7 @@ const WorkflowMain = ({
|
|||||||
return {
|
return {
|
||||||
syncWorkflowDraftWhenPageClose,
|
syncWorkflowDraftWhenPageClose,
|
||||||
doSyncWorkflowDraft,
|
doSyncWorkflowDraft,
|
||||||
|
handleRefreshWorkflowDraft,
|
||||||
handleBackupDraft,
|
handleBackupDraft,
|
||||||
handleLoadBackupDraft,
|
handleLoadBackupDraft,
|
||||||
handleRestoreFromPublishedWorkflow,
|
handleRestoreFromPublishedWorkflow,
|
||||||
@ -61,6 +64,7 @@ const WorkflowMain = ({
|
|||||||
}, [
|
}, [
|
||||||
syncWorkflowDraftWhenPageClose,
|
syncWorkflowDraftWhenPageClose,
|
||||||
doSyncWorkflowDraft,
|
doSyncWorkflowDraft,
|
||||||
|
handleRefreshWorkflowDraft,
|
||||||
handleBackupDraft,
|
handleBackupDraft,
|
||||||
handleLoadBackupDraft,
|
handleLoadBackupDraft,
|
||||||
handleRestoreFromPublishedWorkflow,
|
handleRestoreFromPublishedWorkflow,
|
||||||
|
@ -4,3 +4,4 @@ export * from './use-nodes-sync-draft'
|
|||||||
export * from './use-workflow-run'
|
export * from './use-workflow-run'
|
||||||
export * from './use-workflow-start-run'
|
export * from './use-workflow-start-run'
|
||||||
export * from './use-is-chat-mode'
|
export * from './use-is-chat-mode'
|
||||||
|
export * from './use-workflow-refresh-draft'
|
||||||
|
@ -6,20 +6,20 @@ import {
|
|||||||
useWorkflowStore,
|
useWorkflowStore,
|
||||||
} from '@/app/components/workflow/store'
|
} from '@/app/components/workflow/store'
|
||||||
import { BlockEnum } from '@/app/components/workflow/types'
|
import { BlockEnum } from '@/app/components/workflow/types'
|
||||||
import { useWorkflowUpdate } from '@/app/components/workflow/hooks'
|
|
||||||
import {
|
import {
|
||||||
useNodesReadOnly,
|
useNodesReadOnly,
|
||||||
} from '@/app/components/workflow/hooks/use-workflow'
|
} from '@/app/components/workflow/hooks/use-workflow'
|
||||||
import { syncWorkflowDraft } from '@/service/workflow'
|
import { syncWorkflowDraft } from '@/service/workflow'
|
||||||
import { useFeaturesStore } from '@/app/components/base/features/hooks'
|
import { useFeaturesStore } from '@/app/components/base/features/hooks'
|
||||||
import { API_PREFIX } from '@/config'
|
import { API_PREFIX } from '@/config'
|
||||||
|
import { useWorkflowRefreshDraft } from '.'
|
||||||
|
|
||||||
export const useNodesSyncDraft = () => {
|
export const useNodesSyncDraft = () => {
|
||||||
const store = useStoreApi()
|
const store = useStoreApi()
|
||||||
const workflowStore = useWorkflowStore()
|
const workflowStore = useWorkflowStore()
|
||||||
const featuresStore = useFeaturesStore()
|
const featuresStore = useFeaturesStore()
|
||||||
const { getNodesReadOnly } = useNodesReadOnly()
|
const { getNodesReadOnly } = useNodesReadOnly()
|
||||||
const { handleRefreshWorkflowDraft } = useWorkflowUpdate()
|
const { handleRefreshWorkflowDraft } = useWorkflowRefreshDraft()
|
||||||
const params = useParams()
|
const params = useParams()
|
||||||
|
|
||||||
const getPostParams = useCallback(() => {
|
const getPostParams = useCallback(() => {
|
||||||
|
@ -0,0 +1,36 @@
|
|||||||
|
import { useCallback } from 'react'
|
||||||
|
import { useWorkflowStore } from '@/app/components/workflow/store'
|
||||||
|
import { fetchWorkflowDraft } from '@/service/workflow'
|
||||||
|
import type { WorkflowDataUpdater } from '@/app/components/workflow/types'
|
||||||
|
import { useWorkflowUpdate } from '@/app/components/workflow/hooks'
|
||||||
|
|
||||||
|
export const useWorkflowRefreshDraft = () => {
|
||||||
|
const workflowStore = useWorkflowStore()
|
||||||
|
const { handleUpdateWorkflowCanvas } = useWorkflowUpdate()
|
||||||
|
|
||||||
|
const handleRefreshWorkflowDraft = useCallback(() => {
|
||||||
|
const {
|
||||||
|
appId,
|
||||||
|
setSyncWorkflowDraftHash,
|
||||||
|
setIsSyncingWorkflowDraft,
|
||||||
|
setEnvironmentVariables,
|
||||||
|
setEnvSecrets,
|
||||||
|
setConversationVariables,
|
||||||
|
} = workflowStore.getState()
|
||||||
|
setIsSyncingWorkflowDraft(true)
|
||||||
|
fetchWorkflowDraft(`/apps/${appId}/workflows/draft`).then((response) => {
|
||||||
|
handleUpdateWorkflowCanvas(response.graph as WorkflowDataUpdater)
|
||||||
|
setSyncWorkflowDraftHash(response.hash)
|
||||||
|
setEnvSecrets((response.environment_variables || []).filter(env => env.value_type === 'secret').reduce((acc, env) => {
|
||||||
|
acc[env.id] = env.value
|
||||||
|
return acc
|
||||||
|
}, {} as Record<string, string>))
|
||||||
|
setEnvironmentVariables(response.environment_variables?.map(env => env.value_type === 'secret' ? { ...env, value: '[__HIDDEN__]' } : env) || [])
|
||||||
|
setConversationVariables(response.conversation_variables || [])
|
||||||
|
}).finally(() => setIsSyncingWorkflowDraft(false))
|
||||||
|
}, [handleUpdateWorkflowCanvas, workflowStore])
|
||||||
|
|
||||||
|
return {
|
||||||
|
handleRefreshWorkflowDraft,
|
||||||
|
}
|
||||||
|
}
|
@ -18,6 +18,7 @@ type CommonHooksFnMap = {
|
|||||||
}
|
}
|
||||||
) => Promise<void>
|
) => Promise<void>
|
||||||
syncWorkflowDraftWhenPageClose: () => void
|
syncWorkflowDraftWhenPageClose: () => void
|
||||||
|
handleRefreshWorkflowDraft: () => void
|
||||||
handleBackupDraft: () => void
|
handleBackupDraft: () => void
|
||||||
handleLoadBackupDraft: () => void
|
handleLoadBackupDraft: () => void
|
||||||
handleRestoreFromPublishedWorkflow: (...args: any[]) => void
|
handleRestoreFromPublishedWorkflow: (...args: any[]) => void
|
||||||
@ -35,6 +36,7 @@ export type Shape = {
|
|||||||
export const createHooksStore = ({
|
export const createHooksStore = ({
|
||||||
doSyncWorkflowDraft = async () => noop(),
|
doSyncWorkflowDraft = async () => noop(),
|
||||||
syncWorkflowDraftWhenPageClose = noop,
|
syncWorkflowDraftWhenPageClose = noop,
|
||||||
|
handleRefreshWorkflowDraft = noop,
|
||||||
handleBackupDraft = noop,
|
handleBackupDraft = noop,
|
||||||
handleLoadBackupDraft = noop,
|
handleLoadBackupDraft = noop,
|
||||||
handleRestoreFromPublishedWorkflow = noop,
|
handleRestoreFromPublishedWorkflow = noop,
|
||||||
@ -48,6 +50,7 @@ export const createHooksStore = ({
|
|||||||
refreshAll: props => set(state => ({ ...state, ...props })),
|
refreshAll: props => set(state => ({ ...state, ...props })),
|
||||||
doSyncWorkflowDraft,
|
doSyncWorkflowDraft,
|
||||||
syncWorkflowDraftWhenPageClose,
|
syncWorkflowDraftWhenPageClose,
|
||||||
|
handleRefreshWorkflowDraft,
|
||||||
handleBackupDraft,
|
handleBackupDraft,
|
||||||
handleLoadBackupDraft,
|
handleLoadBackupDraft,
|
||||||
handleRestoreFromPublishedWorkflow,
|
handleRestoreFromPublishedWorkflow,
|
||||||
|
@ -16,3 +16,4 @@ export * from './use-shortcuts'
|
|||||||
export * from './use-workflow-interactions'
|
export * from './use-workflow-interactions'
|
||||||
export * from './use-workflow-mode'
|
export * from './use-workflow-mode'
|
||||||
export * from './use-format-time-from-now'
|
export * from './use-format-time-from-now'
|
||||||
|
export * from './use-workflow-refresh-draft'
|
||||||
|
@ -313,7 +313,6 @@ export const useWorkflowZoom = () => {
|
|||||||
|
|
||||||
export const useWorkflowUpdate = () => {
|
export const useWorkflowUpdate = () => {
|
||||||
const reactflow = useReactFlow()
|
const reactflow = useReactFlow()
|
||||||
const workflowStore = useWorkflowStore()
|
|
||||||
const { eventEmitter } = useEventEmitterContextContext()
|
const { eventEmitter } = useEventEmitterContextContext()
|
||||||
|
|
||||||
const handleUpdateWorkflowCanvas = useCallback((payload: WorkflowDataUpdater) => {
|
const handleUpdateWorkflowCanvas = useCallback((payload: WorkflowDataUpdater) => {
|
||||||
@ -333,32 +332,8 @@ export const useWorkflowUpdate = () => {
|
|||||||
setViewport(viewport)
|
setViewport(viewport)
|
||||||
}, [eventEmitter, reactflow])
|
}, [eventEmitter, reactflow])
|
||||||
|
|
||||||
const handleRefreshWorkflowDraft = useCallback(() => {
|
|
||||||
const {
|
|
||||||
appId,
|
|
||||||
setSyncWorkflowDraftHash,
|
|
||||||
setIsSyncingWorkflowDraft,
|
|
||||||
setEnvironmentVariables,
|
|
||||||
setEnvSecrets,
|
|
||||||
setConversationVariables,
|
|
||||||
} = workflowStore.getState()
|
|
||||||
setIsSyncingWorkflowDraft(true)
|
|
||||||
fetchWorkflowDraft(`/apps/${appId}/workflows/draft`).then((response) => {
|
|
||||||
handleUpdateWorkflowCanvas(response.graph as WorkflowDataUpdater)
|
|
||||||
setSyncWorkflowDraftHash(response.hash)
|
|
||||||
setEnvSecrets((response.environment_variables || []).filter(env => env.value_type === 'secret').reduce((acc, env) => {
|
|
||||||
acc[env.id] = env.value
|
|
||||||
return acc
|
|
||||||
}, {} as Record<string, string>))
|
|
||||||
setEnvironmentVariables(response.environment_variables?.map(env => env.value_type === 'secret' ? { ...env, value: '[__HIDDEN__]' } : env) || [])
|
|
||||||
// #TODO chatVar sync#
|
|
||||||
setConversationVariables(response.conversation_variables || [])
|
|
||||||
}).finally(() => setIsSyncingWorkflowDraft(false))
|
|
||||||
}, [handleUpdateWorkflowCanvas, workflowStore])
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
handleUpdateWorkflowCanvas,
|
handleUpdateWorkflowCanvas,
|
||||||
handleRefreshWorkflowDraft,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -0,0 +1,9 @@
|
|||||||
|
import { useHooksStore } from '@/app/components/workflow/hooks-store'
|
||||||
|
|
||||||
|
export const useWorkflowRefreshDraft = () => {
|
||||||
|
const handleRefreshWorkflowDraft = useHooksStore(s => s.handleRefreshWorkflowDraft)
|
||||||
|
|
||||||
|
return {
|
||||||
|
handleRefreshWorkflowDraft,
|
||||||
|
}
|
||||||
|
}
|
@ -44,7 +44,7 @@ import {
|
|||||||
useShortcuts,
|
useShortcuts,
|
||||||
useWorkflow,
|
useWorkflow,
|
||||||
useWorkflowReadOnly,
|
useWorkflowReadOnly,
|
||||||
useWorkflowUpdate,
|
useWorkflowRefreshDraft,
|
||||||
} from './hooks'
|
} from './hooks'
|
||||||
import CustomNode from './nodes'
|
import CustomNode from './nodes'
|
||||||
import CustomNoteNode from './note-node'
|
import CustomNoteNode from './note-node'
|
||||||
@ -160,7 +160,7 @@ export const Workflow: FC<WorkflowProps> = memo(({
|
|||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
const { handleRefreshWorkflowDraft } = useWorkflowUpdate()
|
const { handleRefreshWorkflowDraft } = useWorkflowRefreshDraft()
|
||||||
const handleSyncWorkflowDraftWhenPageClose = useCallback(() => {
|
const handleSyncWorkflowDraftWhenPageClose = useCallback(() => {
|
||||||
if (document.visibilityState === 'hidden')
|
if (document.visibilityState === 'hidden')
|
||||||
syncWorkflowDraftWhenPageClose()
|
syncWorkflowDraftWhenPageClose()
|
||||||
|
@ -223,7 +223,7 @@ export const AgentStrategy = memo((props: AgentStrategyProps) => {
|
|||||||
<Link href={
|
<Link href={
|
||||||
locale === LanguagesSupported[1]
|
locale === LanguagesSupported[1]
|
||||||
? 'https://docs.dify.ai/zh-hans/guides/workflow/node/agent#xuan-ze-agent-ce-le'
|
? 'https://docs.dify.ai/zh-hans/guides/workflow/node/agent#xuan-ze-agent-ce-le'
|
||||||
: 'https://docs.dify.ai/guides/workflow/node/agent#select-an-agent-strategy'
|
: 'https://docs.dify.ai/en/guides/workflow/node/agent#select-an-agent-strategy'
|
||||||
} className='text-text-accent-secondary' target='_blank'>
|
} className='text-text-accent-secondary' target='_blank'>
|
||||||
{t('workflow.nodes.agent.learnMore')}
|
{t('workflow.nodes.agent.learnMore')}
|
||||||
</Link>
|
</Link>
|
||||||
|
@ -34,7 +34,7 @@ const DefaultValue = ({
|
|||||||
{t('workflow.nodes.common.errorHandle.defaultValue.desc')}
|
{t('workflow.nodes.common.errorHandle.defaultValue.desc')}
|
||||||
|
|
||||||
<a
|
<a
|
||||||
href='https://docs.dify.ai/guides/workflow/error-handling'
|
href='https://docs.dify.ai/en/guides/workflow/error-handling/README'
|
||||||
target='_blank'
|
target='_blank'
|
||||||
className='text-text-accent'
|
className='text-text-accent'
|
||||||
>
|
>
|
||||||
|
@ -8,7 +8,7 @@ export const useNodeHelpLink = (nodeType: BlockEnum) => {
|
|||||||
if (language === 'zh_Hans')
|
if (language === 'zh_Hans')
|
||||||
return 'https://docs.dify.ai/zh-hans/guides/workflow/node/'
|
return 'https://docs.dify.ai/zh-hans/guides/workflow/node/'
|
||||||
|
|
||||||
return 'https://docs.dify.ai/guides/workflow/node/'
|
return 'https://docs.dify.ai/en/guides/workflow/node/'
|
||||||
}, [language])
|
}, [language])
|
||||||
const linkMap = useMemo(() => {
|
const linkMap = useMemo(() => {
|
||||||
if (language === 'zh_Hans') {
|
if (language === 'zh_Hans') {
|
||||||
|
@ -49,7 +49,7 @@ const DEFAULT_SCHEMA: SchemaRoot = {
|
|||||||
|
|
||||||
const HELP_DOC_URL = {
|
const HELP_DOC_URL = {
|
||||||
zh_Hans: 'https://docs.dify.ai/zh-hans/guides/workflow/structured-outputs',
|
zh_Hans: 'https://docs.dify.ai/zh-hans/guides/workflow/structured-outputs',
|
||||||
en_US: 'https://docs.dify.ai/guides/workflow/structured-outputs',
|
en_US: 'https://docs.dify.ai/en/guides/workflow/structured-outputs',
|
||||||
ja_JP: 'https://docs.dify.ai/ja-jp/guides/workflow/structured-outputs',
|
ja_JP: 'https://docs.dify.ai/ja-jp/guides/workflow/structured-outputs',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,7 +164,7 @@ const OneMoreStep = () => {
|
|||||||
<Link
|
<Link
|
||||||
className='system-xs-medium text-text-accent-secondary'
|
className='system-xs-medium text-text-accent-secondary'
|
||||||
target='_blank' rel='noopener noreferrer'
|
target='_blank' rel='noopener noreferrer'
|
||||||
href={'https://docs.dify.ai/user-agreement/open-source'}
|
href={'https://docs.dify.ai/en/policies/agreement/README'}
|
||||||
>{t('login.license.link')}</Link>
|
>{t('login.license.link')}</Link>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -455,7 +455,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'API-Erweiterungen bieten zentralisiertes API-Management und vereinfachen die Konfiguration für eine einfache Verwendung in Difys Anwendungen.',
|
title: 'API-Erweiterungen bieten zentralisiertes API-Management und vereinfachen die Konfiguration für eine einfache Verwendung in Difys Anwendungen.',
|
||||||
link: 'Erfahren Sie, wie Sie Ihre eigene API-Erweiterung entwickeln.',
|
link: 'Erfahren Sie, wie Sie Ihre eigene API-Erweiterung entwickeln.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'API-Erweiterung hinzufügen',
|
add: 'API-Erweiterung hinzufügen',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'API-Erweiterung',
|
title: 'API-Erweiterung',
|
||||||
|
@ -69,7 +69,7 @@ const translation = {
|
|||||||
unknownError: 'Unbekannter Fehler',
|
unknownError: 'Unbekannter Fehler',
|
||||||
resetAll: 'Alles zurücksetzen',
|
resetAll: 'Alles zurücksetzen',
|
||||||
extractOnlyMainContent: 'Extrahieren Sie nur den Hauptinhalt (keine Kopf-, Navigations- und Fußzeilen usw.)',
|
extractOnlyMainContent: 'Extrahieren Sie nur den Hauptinhalt (keine Kopf-, Navigations- und Fußzeilen usw.)',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
firecrawlTitle: 'Extrahieren von Webinhalten mit 🔥Firecrawl',
|
firecrawlTitle: 'Extrahieren von Webinhalten mit 🔥Firecrawl',
|
||||||
maxDepthTooltip: 'Maximale Tiefe für das Crawlen relativ zur eingegebenen URL. Tiefe 0 kratzt nur die Seite der eingegebenen URL, Tiefe 1 kratzt die URL und alles nach der eingegebenen URL + ein / und so weiter.',
|
maxDepthTooltip: 'Maximale Tiefe für das Crawlen relativ zur eingegebenen URL. Tiefe 0 kratzt nur die Seite der eingegebenen URL, Tiefe 1 kratzt die URL und alles nach der eingegebenen URL + ein / und so weiter.',
|
||||||
crawlSubPage: 'Unterseiten crawlen',
|
crawlSubPage: 'Unterseiten crawlen',
|
||||||
|
@ -476,7 +476,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'API extensions provide centralized API management, simplifying configuration for easy use across Dify\'s applications.',
|
title: 'API extensions provide centralized API management, simplifying configuration for easy use across Dify\'s applications.',
|
||||||
link: 'Learn how to develop your own API Extension.',
|
link: 'Learn how to develop your own API Extension.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'Add API Extension',
|
add: 'Add API Extension',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'API Extension',
|
title: 'API Extension',
|
||||||
|
@ -80,10 +80,10 @@ const translation = {
|
|||||||
run: 'Run',
|
run: 'Run',
|
||||||
firecrawlTitle: 'Extract web content with 🔥Firecrawl',
|
firecrawlTitle: 'Extract web content with 🔥Firecrawl',
|
||||||
firecrawlDoc: 'Firecrawl docs',
|
firecrawlDoc: 'Firecrawl docs',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
watercrawlTitle: 'Extract web content with Watercrawl',
|
watercrawlTitle: 'Extract web content with Watercrawl',
|
||||||
watercrawlDoc: 'Watercrawl docs',
|
watercrawlDoc: 'Watercrawl docs',
|
||||||
watercrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
watercrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
jinaReaderTitle: 'Convert the entire site to Markdown',
|
jinaReaderTitle: 'Convert the entire site to Markdown',
|
||||||
jinaReaderDoc: 'Learn more about Jina Reader',
|
jinaReaderDoc: 'Learn more about Jina Reader',
|
||||||
jinaReaderDocLink: 'https://jina.ai/reader',
|
jinaReaderDocLink: 'https://jina.ai/reader',
|
||||||
|
@ -459,7 +459,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'Las extensiones basadas en API proporcionan una gestión centralizada de API, simplificando la configuración para su fácil uso en las aplicaciones de Dify.',
|
title: 'Las extensiones basadas en API proporcionan una gestión centralizada de API, simplificando la configuración para su fácil uso en las aplicaciones de Dify.',
|
||||||
link: 'Aprende cómo desarrollar tu propia Extensión API.',
|
link: 'Aprende cómo desarrollar tu propia Extensión API.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'Agregar Extensión API',
|
add: 'Agregar Extensión API',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'Extensión API',
|
title: 'Extensión API',
|
||||||
|
@ -63,7 +63,7 @@ const translation = {
|
|||||||
run: 'Ejecutar',
|
run: 'Ejecutar',
|
||||||
firecrawlTitle: 'Extraer contenido web con 🔥Firecrawl',
|
firecrawlTitle: 'Extraer contenido web con 🔥Firecrawl',
|
||||||
firecrawlDoc: 'Documentación de Firecrawl',
|
firecrawlDoc: 'Documentación de Firecrawl',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
options: 'Opciones',
|
options: 'Opciones',
|
||||||
crawlSubPage: 'Rastrear subpáginas',
|
crawlSubPage: 'Rastrear subpáginas',
|
||||||
limit: 'Límite',
|
limit: 'Límite',
|
||||||
|
@ -459,7 +459,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'افزونههای مبتنی بر API مدیریت متمرکز API را فراهم میکنند و پیکربندی را برای استفاده آسان در برنامههای Dify ساده میکنند.',
|
title: 'افزونههای مبتنی بر API مدیریت متمرکز API را فراهم میکنند و پیکربندی را برای استفاده آسان در برنامههای Dify ساده میکنند.',
|
||||||
link: 'نحوه توسعه افزونه API خود را بیاموزید.',
|
link: 'نحوه توسعه افزونه API خود را بیاموزید.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'افزودن افزونه API',
|
add: 'افزودن افزونه API',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'افزونه API',
|
title: 'افزونه API',
|
||||||
|
@ -63,7 +63,7 @@ const translation = {
|
|||||||
run: 'اجرا',
|
run: 'اجرا',
|
||||||
firecrawlTitle: 'استخراج محتوای وب با fireFirecrawl',
|
firecrawlTitle: 'استخراج محتوای وب با fireFirecrawl',
|
||||||
firecrawlDoc: 'مستندات Firecrawl',
|
firecrawlDoc: 'مستندات Firecrawl',
|
||||||
firecrawlDocLink: '<a href="https://docs.dify.ai/guides/knowledge-base/sync-from-website">https://docs.dify.ai/guides/knowledge-base/sync-from-website</a>',
|
firecrawlDocLink: '<a href="https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website">https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website</a>',
|
||||||
options: 'گزینهها',
|
options: 'گزینهها',
|
||||||
crawlSubPage: 'خزش صفحات فرعی',
|
crawlSubPage: 'خزش صفحات فرعی',
|
||||||
limit: 'محدودیت',
|
limit: 'محدودیت',
|
||||||
|
@ -61,7 +61,7 @@ const translation = {
|
|||||||
preview: 'Aperçu',
|
preview: 'Aperçu',
|
||||||
crawlSubPage: 'Explorer les sous-pages',
|
crawlSubPage: 'Explorer les sous-pages',
|
||||||
configure: 'Configurer',
|
configure: 'Configurer',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
maxDepth: 'Profondeur maximale',
|
maxDepth: 'Profondeur maximale',
|
||||||
fireCrawlNotConfigured: 'Firecrawl n’est pas configuré',
|
fireCrawlNotConfigured: 'Firecrawl n’est pas configuré',
|
||||||
firecrawlTitle: 'Extraire du contenu web avec 🔥Firecrawl',
|
firecrawlTitle: 'Extraire du contenu web avec 🔥Firecrawl',
|
||||||
|
@ -476,7 +476,7 @@ const translation = {
|
|||||||
title:
|
title:
|
||||||
'एपीआई एक्सटेंशन केंद्रीकृत एपीआई प्रबंधन प्रदान करते हैं, जो Dify के अनुप्रयोगों में आसान उपयोग के लिए कॉन्फ़िगरेशन को सरल बनाते हैं।',
|
'एपीआई एक्सटेंशन केंद्रीकृत एपीआई प्रबंधन प्रदान करते हैं, जो Dify के अनुप्रयोगों में आसान उपयोग के लिए कॉन्फ़िगरेशन को सरल बनाते हैं।',
|
||||||
link: 'अपना खुद का एपीआई एक्सटेंशन कैसे विकसित करें, यह जानें।',
|
link: 'अपना खुद का एपीआई एक्सटेंशन कैसे विकसित करें, यह जानें।',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'एपीआई एक्सटेंशन जोड़ें',
|
add: 'एपीआई एक्सटेंशन जोड़ें',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'एपीआई एक्सटेंशन',
|
title: 'एपीआई एक्सटेंशन',
|
||||||
|
@ -483,7 +483,7 @@ const translation = {
|
|||||||
title:
|
title:
|
||||||
'Le estensioni API forniscono una gestione centralizzata delle API, semplificando la configurazione per un facile utilizzo nelle applicazioni di Dify.',
|
'Le estensioni API forniscono una gestione centralizzata delle API, semplificando la configurazione per un facile utilizzo nelle applicazioni di Dify.',
|
||||||
link: 'Scopri come sviluppare la tua estensione API.',
|
link: 'Scopri come sviluppare la tua estensione API.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'Aggiungi Estensione API',
|
add: 'Aggiungi Estensione API',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'Estensione API',
|
title: 'Estensione API',
|
||||||
|
@ -476,7 +476,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'API拡張機能は、Difyのアプリケーション全体での簡単な使用のための設定を簡素化し、集中的なAPI管理を提供します。',
|
title: 'API拡張機能は、Difyのアプリケーション全体での簡単な使用のための設定を簡素化し、集中的なAPI管理を提供します。',
|
||||||
link: '独自のAPI拡張機能を開発する方法について学ぶ。',
|
link: '独自のAPI拡張機能を開発する方法について学ぶ。',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'API拡張機能を追加',
|
add: 'API拡張機能を追加',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'API拡張機能',
|
title: 'API拡張機能',
|
||||||
|
@ -72,7 +72,7 @@ const translation = {
|
|||||||
run: '実行',
|
run: '実行',
|
||||||
firecrawlTitle: '🔥Firecrawlを使っでウエブコンテンツを抽出',
|
firecrawlTitle: '🔥Firecrawlを使っでウエブコンテンツを抽出',
|
||||||
firecrawlDoc: 'Firecrawlドキュメント',
|
firecrawlDoc: 'Firecrawlドキュメント',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
jinaReaderTitle: 'サイト全体をMarkdownに変換する',
|
jinaReaderTitle: 'サイト全体をMarkdownに変換する',
|
||||||
jinaReaderDoc: 'Jina Readerの詳細',
|
jinaReaderDoc: 'Jina Readerの詳細',
|
||||||
jinaReaderDocLink: 'https://jina.ai/reader',
|
jinaReaderDocLink: 'https://jina.ai/reader',
|
||||||
|
@ -451,7 +451,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'API 기반 확장은 Dify 애플리케이션 전체에서 간편한 사용을 위한 설정을 단순화하고 집중적인 API 관리를 제공합니다.',
|
title: 'API 기반 확장은 Dify 애플리케이션 전체에서 간편한 사용을 위한 설정을 단순화하고 집중적인 API 관리를 제공합니다.',
|
||||||
link: '사용자 정의 API 기반 확장을 개발하는 방법 배우기',
|
link: '사용자 정의 API 기반 확장을 개발하는 방법 배우기',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'API 기반 확장 추가',
|
add: 'API 기반 확장 추가',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'API 기반 확장',
|
title: 'API 기반 확장',
|
||||||
|
@ -52,7 +52,7 @@ const translation = {
|
|||||||
failed: '생성에 실패했습니다',
|
failed: '생성에 실패했습니다',
|
||||||
},
|
},
|
||||||
website: {
|
website: {
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
limit: '한계',
|
limit: '한계',
|
||||||
options: '옵션',
|
options: '옵션',
|
||||||
firecrawlDoc: 'Firecrawl 문서',
|
firecrawlDoc: 'Firecrawl 문서',
|
||||||
|
@ -469,7 +469,7 @@ const translation = {
|
|||||||
title:
|
title:
|
||||||
'Rozszerzenia oparte na interfejsie API zapewniają scentralizowane zarządzanie interfejsami API, upraszczając konfigurację dla łatwego użytkowania w aplikacjach Dify.',
|
'Rozszerzenia oparte na interfejsie API zapewniają scentralizowane zarządzanie interfejsami API, upraszczając konfigurację dla łatwego użytkowania w aplikacjach Dify.',
|
||||||
link: 'Dowiedz się, jak opracować własne rozszerzenie interfejsu API.',
|
link: 'Dowiedz się, jak opracować własne rozszerzenie interfejsu API.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'Dodaj rozszerzenie interfejsu API',
|
add: 'Dodaj rozszerzenie interfejsu API',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'Rozszerzenie interfejsu API',
|
title: 'Rozszerzenie interfejsu API',
|
||||||
|
@ -54,7 +54,7 @@ const translation = {
|
|||||||
},
|
},
|
||||||
website: {
|
website: {
|
||||||
limit: 'Ograniczać',
|
limit: 'Ograniczać',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
firecrawlDoc: 'Dokumentacja Firecrawl',
|
firecrawlDoc: 'Dokumentacja Firecrawl',
|
||||||
unknownError: 'Nieznany błąd',
|
unknownError: 'Nieznany błąd',
|
||||||
fireCrawlNotConfiguredDescription: 'Skonfiguruj Firecrawl z kluczem API, aby z niego korzystać.',
|
fireCrawlNotConfiguredDescription: 'Skonfiguruj Firecrawl z kluczem API, aby z niego korzystać.',
|
||||||
|
@ -455,7 +455,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'As extensões de API fornecem gerenciamento centralizado de API, simplificando a configuração para uso fácil em todos os aplicativos da Dify.',
|
title: 'As extensões de API fornecem gerenciamento centralizado de API, simplificando a configuração para uso fácil em todos os aplicativos da Dify.',
|
||||||
link: 'Saiba como desenvolver sua própria Extensão de API.',
|
link: 'Saiba como desenvolver sua própria Extensão de API.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'Adicionar Extensão de API',
|
add: 'Adicionar Extensão de API',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'Extensão de API',
|
title: 'Extensão de API',
|
||||||
|
@ -58,7 +58,7 @@ const translation = {
|
|||||||
crawlSubPage: 'Rastrear subpáginas',
|
crawlSubPage: 'Rastrear subpáginas',
|
||||||
selectAll: 'Selecionar tudo',
|
selectAll: 'Selecionar tudo',
|
||||||
resetAll: 'Redefinir tudo',
|
resetAll: 'Redefinir tudo',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
includeOnlyPaths: 'Incluir apenas caminhos',
|
includeOnlyPaths: 'Incluir apenas caminhos',
|
||||||
configure: 'Configurar',
|
configure: 'Configurar',
|
||||||
limit: 'Limite',
|
limit: 'Limite',
|
||||||
|
@ -455,7 +455,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'Extensiile bazate pe API oferă o gestionare centralizată a API-urilor, simplificând configurația pentru o utilizare ușoară în aplicațiile Dify.',
|
title: 'Extensiile bazate pe API oferă o gestionare centralizată a API-urilor, simplificând configurația pentru o utilizare ușoară în aplicațiile Dify.',
|
||||||
link: 'Aflați cum să dezvoltați propria extensie bazată pe API.',
|
link: 'Aflați cum să dezvoltați propria extensie bazată pe API.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'Adăugați extensie API',
|
add: 'Adăugați extensie API',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'Extensie API',
|
title: 'Extensie API',
|
||||||
|
@ -65,7 +65,7 @@ const translation = {
|
|||||||
firecrawlTitle: 'Extrageți conținut web cu 🔥Firecrawl',
|
firecrawlTitle: 'Extrageți conținut web cu 🔥Firecrawl',
|
||||||
unknownError: 'Eroare necunoscută',
|
unknownError: 'Eroare necunoscută',
|
||||||
scrapTimeInfo: 'Pagini răzuite {{total}} în total în {{timp}}s',
|
scrapTimeInfo: 'Pagini răzuite {{total}} în total în {{timp}}s',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
excludePaths: 'Excluderea căilor',
|
excludePaths: 'Excluderea căilor',
|
||||||
resetAll: 'Resetați toate',
|
resetAll: 'Resetați toate',
|
||||||
extractOnlyMainContent: 'Extrageți doar conținutul principal (fără anteturi, navigări, subsoluri etc.)',
|
extractOnlyMainContent: 'Extrageți doar conținutul principal (fără anteturi, navigări, subsoluri etc.)',
|
||||||
|
@ -459,7 +459,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'API-расширения обеспечивают централизованное управление API, упрощая настройку для удобного использования в приложениях Dify.',
|
title: 'API-расширения обеспечивают централизованное управление API, упрощая настройку для удобного использования в приложениях Dify.',
|
||||||
link: 'Узнайте, как разработать собственное API-расширение.',
|
link: 'Узнайте, как разработать собственное API-расширение.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'Добавить API Extension',
|
add: 'Добавить API Extension',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'API Extension',
|
title: 'API Extension',
|
||||||
|
@ -63,7 +63,7 @@ const translation = {
|
|||||||
run: 'Запустить',
|
run: 'Запустить',
|
||||||
firecrawlTitle: 'Извлечь веб-контент с помощью 🔥Firecrawl',
|
firecrawlTitle: 'Извлечь веб-контент с помощью 🔥Firecrawl',
|
||||||
firecrawlDoc: 'Документация Firecrawl',
|
firecrawlDoc: 'Документация Firecrawl',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
options: 'Опции',
|
options: 'Опции',
|
||||||
crawlSubPage: 'Сканировать подстраницы',
|
crawlSubPage: 'Сканировать подстраницы',
|
||||||
limit: 'Лимит',
|
limit: 'Лимит',
|
||||||
|
@ -452,7 +452,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'Razširitve API omogočajo centralizirano upravljanje API, kar poenostavi konfiguracijo za enostavno uporabo v aplikacijah Dify.',
|
title: 'Razširitve API omogočajo centralizirano upravljanje API, kar poenostavi konfiguracijo za enostavno uporabo v aplikacijah Dify.',
|
||||||
link: 'Naučite se, kako razviti svojo API razširitev.',
|
link: 'Naučite se, kako razviti svojo API razširitev.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'Dodaj API razširitev',
|
add: 'Dodaj API razširitev',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'API razširitev',
|
title: 'API razširitev',
|
||||||
@ -681,7 +681,7 @@ const translation = {
|
|||||||
type: 'Vrsta',
|
type: 'Vrsta',
|
||||||
link: 'Preberite, kako razvijete lastno razširitev API-ja.',
|
link: 'Preberite, kako razvijete lastno razširitev API-ja.',
|
||||||
title: 'Razširitve API zagotavljajo centralizirano upravljanje API, kar poenostavlja konfiguracijo za enostavno uporabo v aplikacijah Dify.',
|
title: 'Razširitve API zagotavljajo centralizirano upravljanje API, kar poenostavlja konfiguracijo za enostavno uporabo v aplikacijah Dify.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'Dodajanje razširitve API-ja',
|
add: 'Dodajanje razširitve API-ja',
|
||||||
},
|
},
|
||||||
about: {
|
about: {
|
||||||
|
@ -71,7 +71,7 @@ const translation = {
|
|||||||
run: 'Zaženi',
|
run: 'Zaženi',
|
||||||
firecrawlTitle: 'Izvleci spletno vsebino z 🔥Firecrawl',
|
firecrawlTitle: 'Izvleci spletno vsebino z 🔥Firecrawl',
|
||||||
firecrawlDoc: 'Firecrawl dokumentacija',
|
firecrawlDoc: 'Firecrawl dokumentacija',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
jinaReaderTitle: 'Pretvori celotno stran v Markdown',
|
jinaReaderTitle: 'Pretvori celotno stran v Markdown',
|
||||||
jinaReaderDoc: 'Več o Jina Reader',
|
jinaReaderDoc: 'Več o Jina Reader',
|
||||||
jinaReaderDocLink: 'https://jina.ai/reader',
|
jinaReaderDocLink: 'https://jina.ai/reader',
|
||||||
|
@ -454,7 +454,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'ส่วนขยาย API ให้การจัดการ API แบบรวมศูนย์ ทําให้การกําหนดค่าง่ายขึ้นเพื่อให้ใช้งานได้ง่ายในแอปพลิเคชันของ Dify',
|
title: 'ส่วนขยาย API ให้การจัดการ API แบบรวมศูนย์ ทําให้การกําหนดค่าง่ายขึ้นเพื่อให้ใช้งานได้ง่ายในแอปพลิเคชันของ Dify',
|
||||||
link: 'เรียนรู้วิธีพัฒนาส่วนขยาย API ของคุณเอง',
|
link: 'เรียนรู้วิธีพัฒนาส่วนขยาย API ของคุณเอง',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
add: 'เพิ่มส่วนขยาย API',
|
add: 'เพิ่มส่วนขยาย API',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'ส่วนขยาย API',
|
title: 'ส่วนขยาย API',
|
||||||
|
@ -71,7 +71,7 @@ const translation = {
|
|||||||
run: 'วิ่ง',
|
run: 'วิ่ง',
|
||||||
firecrawlTitle: 'แยกเนื้อหาเว็บด้วย 🔥Firecrawl',
|
firecrawlTitle: 'แยกเนื้อหาเว็บด้วย 🔥Firecrawl',
|
||||||
firecrawlDoc: 'เอกสาร Firecrawl',
|
firecrawlDoc: 'เอกสาร Firecrawl',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
jinaReaderTitle: 'แปลงทั้งไซต์เป็น Markdown',
|
jinaReaderTitle: 'แปลงทั้งไซต์เป็น Markdown',
|
||||||
jinaReaderDoc: 'เรียนรู้เพิ่มเติมเกี่ยวกับ Jina Reader',
|
jinaReaderDoc: 'เรียนรู้เพิ่มเติมเกี่ยวกับ Jina Reader',
|
||||||
jinaReaderDocLink: 'https://jina.ai/reader',
|
jinaReaderDocLink: 'https://jina.ai/reader',
|
||||||
|
@ -459,7 +459,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'API uzantıları merkezi API yönetimi sağlar, Dify\'nin uygulamaları arasında kolay kullanım için yapılandırmayı basitleştirir.',
|
title: 'API uzantıları merkezi API yönetimi sağlar, Dify\'nin uygulamaları arasında kolay kullanım için yapılandırmayı basitleştirir.',
|
||||||
link: 'Kendi API Uzantınızı nasıl geliştireceğinizi öğrenin.',
|
link: 'Kendi API Uzantınızı nasıl geliştireceğinizi öğrenin.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'API Uzantısı Ekle',
|
add: 'API Uzantısı Ekle',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'API Uzantısı',
|
title: 'API Uzantısı',
|
||||||
|
@ -63,7 +63,7 @@ const translation = {
|
|||||||
run: 'Çalıştır',
|
run: 'Çalıştır',
|
||||||
firecrawlTitle: '🔥Firecrawl ile web içeriğini çıkarın',
|
firecrawlTitle: '🔥Firecrawl ile web içeriğini çıkarın',
|
||||||
firecrawlDoc: 'Firecrawl dokümanları',
|
firecrawlDoc: 'Firecrawl dokümanları',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
options: 'Seçenekler',
|
options: 'Seçenekler',
|
||||||
crawlSubPage: 'Alt sayfaları tarayın',
|
crawlSubPage: 'Alt sayfaları tarayın',
|
||||||
limit: 'Sınır',
|
limit: 'Sınır',
|
||||||
|
@ -456,7 +456,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'API-розширення забезпечують централізоване керування API, спрощуючи конфігурацію для зручного використання в різних програмах Dify.',
|
title: 'API-розширення забезпечують централізоване керування API, спрощуючи конфігурацію для зручного використання в різних програмах Dify.',
|
||||||
link: 'Дізнайтеся, як розробити власне розширення API.',
|
link: 'Дізнайтеся, як розробити власне розширення API.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'Додати розширення API',
|
add: 'Додати розширення API',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'Розширення API',
|
title: 'Розширення API',
|
||||||
|
@ -60,7 +60,7 @@ const translation = {
|
|||||||
unknownError: 'Невідома помилка',
|
unknownError: 'Невідома помилка',
|
||||||
maxDepth: 'Максимальна глибина',
|
maxDepth: 'Максимальна глибина',
|
||||||
crawlSubPage: 'Сканування підсторінок',
|
crawlSubPage: 'Сканування підсторінок',
|
||||||
firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
|
firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
|
||||||
preview: 'Попередній перегляд',
|
preview: 'Попередній перегляд',
|
||||||
fireCrawlNotConfigured: 'Firecrawl не налаштовано',
|
fireCrawlNotConfigured: 'Firecrawl не налаштовано',
|
||||||
includeOnlyPaths: 'Включати лише контури',
|
includeOnlyPaths: 'Включати лише контури',
|
||||||
|
@ -455,7 +455,7 @@ const translation = {
|
|||||||
apiBasedExtension: {
|
apiBasedExtension: {
|
||||||
title: 'Các tiện ích API cung cấp quản lý API tập trung, giúp cấu hình dễ dàng sử dụng trên các ứng dụng của Dify.',
|
title: 'Các tiện ích API cung cấp quản lý API tập trung, giúp cấu hình dễ dàng sử dụng trên các ứng dụng của Dify.',
|
||||||
link: 'Tìm hiểu cách phát triển Phần mở rộng API của riêng bạn.',
|
link: 'Tìm hiểu cách phát triển Phần mở rộng API của riêng bạn.',
|
||||||
linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
|
linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
|
||||||
add: 'Thêm Phần mở rộng API',
|
add: 'Thêm Phần mở rộng API',
|
||||||
selector: {
|
selector: {
|
||||||
title: 'Phần mở rộng API',
|
title: 'Phần mở rộng API',
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user