diff --git a/api/app.py b/api/app.py index 4f393f6c20..536a9a17b9 100644 --- a/api/app.py +++ b/api/app.py @@ -1,4 +1,39 @@ import os + + +def _setup_gevent(): + """Do gevent monkey patching. + + This function should be called as early as possible. Ideally + it should be the first statement in the entrypoint file. + + It should be + """ + # It seems that JetBrains Python debugger does not work well with gevent, + # so we need to disable gevent in debug mode. + # If you are using debugpy and set GEVENT_SUPPORT=True, you can debug with gevent. + if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() not in {"false", "0", "no"}: + return + if os.environ.get("GEVENT_SUPPORT", "0") == "0": + return + + from gevent import monkey + + # gevent + monkey.patch_all() + + from grpc.experimental import gevent as grpc_gevent # type: ignore + + # grpc gevent + grpc_gevent.init_gevent() + + import psycogreen.gevent # type: ignore + + psycogreen.gevent.patch_psycopg() + + +_setup_gevent() + import sys @@ -14,24 +49,6 @@ if is_db_command(): app = create_migrations_app() else: - # It seems that JetBrains Python debugger does not work well with gevent, - # so we need to disable gevent in debug mode. - # If you are using debugpy and set GEVENT_SUPPORT=True, you can debug with gevent. - if (flask_debug := os.environ.get("FLASK_DEBUG", "0")) and flask_debug.lower() in {"false", "0", "no"}: - from gevent import monkey - - # gevent - monkey.patch_all() - - from grpc.experimental import gevent as grpc_gevent # type: ignore - - # grpc gevent - grpc_gevent.init_gevent() - - import psycogreen.gevent # type: ignore - - psycogreen.gevent.patch_psycopg() - from app_factory import create_app app = create_app() diff --git a/api/configs/middleware/storage/amazon_s3_storage_config.py b/api/configs/middleware/storage/amazon_s3_storage_config.py index f2d94b12ff..e14c210718 100644 --- a/api/configs/middleware/storage/amazon_s3_storage_config.py +++ b/api/configs/middleware/storage/amazon_s3_storage_config.py @@ -1,4 +1,4 @@ -from typing import Optional +from typing import Literal, Optional from pydantic import Field from pydantic_settings import BaseSettings @@ -34,7 +34,7 @@ class S3StorageConfig(BaseSettings): default=None, ) - S3_ADDRESS_STYLE: str = Field( + S3_ADDRESS_STYLE: Literal["auto", "virtual", "path"] = Field( description="S3 addressing style: 'auto', 'path', or 'virtual'", default="auto", ) diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index c55d3fbb66..e9d11c27be 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -63,6 +63,7 @@ from .app import ( statistic, workflow, workflow_app_log, + workflow_draft_variable, workflow_run, workflow_statistic, ) diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index cbbdd324ba..45741a6d5a 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -731,6 +731,27 @@ class WorkflowByIdApi(Resource): return None, 204 +class DraftWorkflowNodeLastRunApi(Resource): + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + @marshal_with(workflow_run_node_execution_fields) + def get(self, app_model: App, node_id: str): + srv = WorkflowService() + workflow = srv.get_draft_workflow(app_model) + if not workflow: + raise NotFound("Workflow not found") + node_exec = srv.get_node_last_run( + app_model=app_model, + workflow=workflow, + node_id=node_id, + ) + if node_exec is None: + raise NotFound("last run not found") + return node_exec + + api.add_resource( DraftWorkflowApi, "/apps//workflows/draft", @@ -795,3 +816,7 @@ api.add_resource( WorkflowByIdApi, "/apps//workflows/", ) +api.add_resource( + DraftWorkflowNodeLastRunApi, + "/apps//workflows/draft/nodes//last-run", +) diff --git a/api/controllers/console/app/workflow_draft_variable.py b/api/controllers/console/app/workflow_draft_variable.py new file mode 100644 index 0000000000..028ea84114 --- /dev/null +++ b/api/controllers/console/app/workflow_draft_variable.py @@ -0,0 +1,319 @@ +import logging +from typing import NoReturn + +from flask import Response +from flask_restful import Resource, fields, inputs, marshal_with, reqparse +from sqlalchemy.orm import Session +from werkzeug.exceptions import Forbidden + +from controllers.console import api +from controllers.console.app.error import ( + DraftWorkflowNotExist, +) +from controllers.console.app.wraps import get_app_model +from controllers.console.wraps import account_initialization_required, setup_required +from controllers.web.error import InvalidArgumentError, NotFoundError +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID +from factories.variable_factory import build_segment +from libs.login import current_user, login_required +from models import App, AppMode, db +from models.workflow import WorkflowDraftVariable +from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService +from services.workflow_service import WorkflowService + +logger = logging.getLogger(__name__) + + +def _create_pagination_parser(): + parser = reqparse.RequestParser() + parser.add_argument( + "page", + type=inputs.int_range(1, 100_000), + required=False, + default=1, + location="args", + help="the page of data requested", + ) + parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args") + return parser + + +_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS = { + "id": fields.String, + "type": fields.String(attribute=lambda model: model.get_variable_type()), + "name": fields.String, + "description": fields.String, + "selector": fields.List(fields.String, attribute=lambda model: model.get_selector()), + "value_type": fields.String, + "edited": fields.Boolean(attribute=lambda model: model.edited), + "visible": fields.Boolean, +} + +_WORKFLOW_DRAFT_VARIABLE_FIELDS = dict( + _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, + value=fields.Raw(attribute=lambda variable: variable.get_value().value), +) + +_WORKFLOW_DRAFT_ENV_VARIABLE_FIELDS = { + "id": fields.String, + "type": fields.String(attribute=lambda _: "env"), + "name": fields.String, + "description": fields.String, + "selector": fields.List(fields.String, attribute=lambda model: model.get_selector()), + "value_type": fields.String, + "edited": fields.Boolean(attribute=lambda model: model.edited), + "visible": fields.Boolean, +} + +_WORKFLOW_DRAFT_ENV_VARIABLE_LIST_FIELDS = { + "items": fields.List(fields.Nested(_WORKFLOW_DRAFT_ENV_VARIABLE_FIELDS)), +} + + +def _get_items(var_list: WorkflowDraftVariableList) -> list[WorkflowDraftVariable]: + return var_list.variables + + +_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS = { + "items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS), attribute=_get_items), + "total": fields.Raw(), +} + +_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS = { + "items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_FIELDS), attribute=_get_items), +} + + +def _api_prerequisite(f): + """Common prerequisites for all draft workflow variable APIs. + + It ensures the following conditions are satisfied: + + - Dify has been property setup. + - The request user has logged in and initialized. + - The requested app is a workflow or a chat flow. + - The request user has the edit permission for the app. + """ + + @setup_required + @login_required + @account_initialization_required + @get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW]) + def wrapper(*args, **kwargs): + if not current_user.is_editor: + raise Forbidden() + return f(*args, **kwargs) + + return wrapper + + +class WorkflowVariableCollectionApi(Resource): + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS) + def get(self, app_model: App): + """ + Get draft workflow + """ + parser = _create_pagination_parser() + args = parser.parse_args() + + # fetch draft workflow by app_model + workflow_service = WorkflowService() + workflow_exist = workflow_service.is_workflow_exist(app_model=app_model) + if not workflow_exist: + raise DraftWorkflowNotExist() + + # fetch draft workflow by app_model + with Session(bind=db.engine, expire_on_commit=False) as session: + draft_var_srv = WorkflowDraftVariableService( + session=session, + ) + workflow_vars = draft_var_srv.list_variables_without_values( + app_id=app_model.id, + page=args.page, + limit=args.limit, + ) + + return workflow_vars + + @_api_prerequisite + def delete(self, app_model: App): + draft_var_srv = WorkflowDraftVariableService( + session=db.session, + ) + draft_var_srv.delete_workflow_variables(app_model.id) + db.session.commit() + return Response("", 204) + + +def validate_node_id(node_id: str) -> NoReturn | None: + if node_id in [ + CONVERSATION_VARIABLE_NODE_ID, + SYSTEM_VARIABLE_NODE_ID, + ]: + # NOTE(QuantumGhost): While we store the system and conversation variables as node variables + # with specific `node_id` in database, we still want to make the API separated. By disallowing + # accessing system and conversation variables in `WorkflowDraftNodeVariableListApi`, + # we mitigate the risk that user of the API depending on the implementation detail of the API. + # + # ref: [Hyrum's Law](https://www.hyrumslaw.com/) + + raise InvalidArgumentError( + f"invalid node_id, please use correspond api for conversation and system variables, node_id={node_id}", + ) + return None + + +class NodeVariableCollectionApi(Resource): + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + def get(self, app_model: App, node_id: str): + validate_node_id(node_id) + with Session(bind=db.engine, expire_on_commit=False) as session: + draft_var_srv = WorkflowDraftVariableService( + session=session, + ) + node_vars = draft_var_srv.list_node_variables(app_model.id, node_id) + + return node_vars + + @_api_prerequisite + def delete(self, app_model: App, node_id: str): + validate_node_id(node_id) + srv = WorkflowDraftVariableService(db.session) + srv.delete_node_variables(app_model.id, node_id) + db.session.commit() + return Response("", 204) + + +class VariableApi(Resource): + _PATCH_NAME_FIELD = "name" + _PATCH_VALUE_FIELD = "value" + + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS) + def get(self, app_model: App, variable_id: str): + draft_var_srv = WorkflowDraftVariableService( + session=db.session, + ) + variable = draft_var_srv.get_variable(variable_id=variable_id) + if variable is None: + raise NotFoundError(description=f"variable not found, id={variable_id}") + if variable.app_id != app_model.id: + raise NotFoundError(description=f"variable not found, id={variable_id}") + return variable + + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS) + def patch(self, app_model: App, variable_id: str): + parser = reqparse.RequestParser() + parser.add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json") + parser.add_argument(self._PATCH_VALUE_FIELD, type=build_segment, required=False, nullable=True, location="json") + + draft_var_srv = WorkflowDraftVariableService( + session=db.session, + ) + args = parser.parse_args(strict=True) + + variable = draft_var_srv.get_variable(variable_id=variable_id) + if variable is None: + raise NotFoundError(description=f"variable not found, id={variable_id}") + if variable.app_id != app_model.id: + raise NotFoundError(description=f"variable not found, id={variable_id}") + + new_name = args.get(self._PATCH_NAME_FIELD, None) + new_value = args.get(self._PATCH_VALUE_FIELD, None) + + if new_name is None and new_value is None: + return variable + draft_var_srv.update_variable(variable, name=new_name, value=new_value) + db.session.commit() + return variable + + @_api_prerequisite + def delete(self, app_model: App, variable_id: str): + draft_var_srv = WorkflowDraftVariableService( + session=db.session, + ) + variable = draft_var_srv.get_variable(variable_id=variable_id) + if variable is None: + raise NotFoundError(description=f"variable not found, id={variable_id}") + if variable.app_id != app_model.id: + raise NotFoundError(description=f"variable not found, id={variable_id}") + draft_var_srv.delete_variable(variable) + db.session.commit() + return Response("", 204) + + +def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList: + with Session(bind=db.engine, expire_on_commit=False) as session: + draft_var_srv = WorkflowDraftVariableService( + session=session, + ) + if node_id == CONVERSATION_VARIABLE_NODE_ID: + draft_vars = draft_var_srv.list_conversation_variables(app_model.id) + elif node_id == SYSTEM_VARIABLE_NODE_ID: + draft_vars = draft_var_srv.list_system_variables(app_model.id) + else: + draft_vars = draft_var_srv.list_node_variables(app_id=app_model.id, node_id=node_id) + return draft_vars + + +class ConversationVariableCollectionApi(Resource): + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + def get(self, app_model: App): + return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID) + + +class SystemVariableCollectionApi(Resource): + @_api_prerequisite + @marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + def get(self, app_model: App): + return _get_variable_list(app_model, SYSTEM_VARIABLE_NODE_ID) + + +class EnvironmentVariableCollectionApi(Resource): + @_api_prerequisite + def get(self, app_model: App): + """ + Get draft workflow + """ + # fetch draft workflow by app_model + workflow_service = WorkflowService() + workflow = workflow_service.get_draft_workflow(app_model=app_model) + if workflow is None: + raise DraftWorkflowNotExist() + + env_vars = workflow.environment_variables + env_vars_list = [] + for v in env_vars: + env_vars_list.append( + { + "id": v.id, + "type": "env", + "name": v.name, + "description": v.description, + "selector": v.selector, + "value_type": v.value_type.value, + "value": v.value, + # Do not track edited for env vars. + "edited": False, + "visible": True, + "editable": True, + } + ) + + return {"items": env_vars_list} + + +api.add_resource( + WorkflowVariableCollectionApi, + "/apps//workflows/draft/variables", +) +api.add_resource(NodeVariableCollectionApi, "/apps//workflows/draft/nodes//variables") +api.add_resource(VariableApi, "/apps//workflows/draft/variables/") + +api.add_resource(ConversationVariableCollectionApi, "/apps//workflows/draft/conversation-variables") +api.add_resource(SystemVariableCollectionApi, "/apps//workflows/draft/system-variables") +api.add_resource(EnvironmentVariableCollectionApi, "/apps//workflows/draft/environment-variables") diff --git a/api/controllers/console/app/workflow_draft_variables_test.py b/api/controllers/console/app/workflow_draft_variables_test.py new file mode 100644 index 0000000000..ce6a447306 --- /dev/null +++ b/api/controllers/console/app/workflow_draft_variables_test.py @@ -0,0 +1,196 @@ +import datetime +import uuid +from collections import OrderedDict +from typing import NamedTuple + +from flask_restful import marshal + +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID +from factories.variable_factory import build_segment +from models.workflow import WorkflowDraftVariable +from services.workflow_draft_variable_service import WorkflowDraftVariableList + +from .workflow_draft_variable import ( + _WORKFLOW_DRAFT_VARIABLE_FIELDS, + _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS, + _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS, + _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS, +) + +_TEST_APP_ID = "test_app_id" + + +class TestWorkflowDraftVariableFields: + def test_conversation_variable(self): + conv_var = WorkflowDraftVariable.new_conversation_variable( + app_id=_TEST_APP_ID, name="conv_var", value=build_segment(1) + ) + + conv_var.id = str(uuid.uuid4()) + conv_var.visible = True + + expected_without_value = OrderedDict( + { + "id": str(conv_var.id), + "type": conv_var.get_variable_type().value, + "name": "conv_var", + "description": "", + "selector": [CONVERSATION_VARIABLE_NODE_ID, "conv_var"], + "value_type": "number", + "edited": False, + "visible": True, + } + ) + + assert marshal(conv_var, _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS) == expected_without_value + expected_with_value = expected_without_value.copy() + expected_with_value["value"] = 1 + assert marshal(conv_var, _WORKFLOW_DRAFT_VARIABLE_FIELDS) == expected_with_value + + def test_create_sys_variable(self): + sys_var = WorkflowDraftVariable.new_sys_variable( + app_id=_TEST_APP_ID, + name="sys_var", + value=build_segment("a"), + editable=True, + ) + + sys_var.id = str(uuid.uuid4()) + sys_var.last_edited_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + sys_var.visible = True + + expected_without_value = OrderedDict( + { + "id": str(sys_var.id), + "type": sys_var.get_variable_type().value, + "name": "sys_var", + "description": "", + "selector": [SYSTEM_VARIABLE_NODE_ID, "sys_var"], + "value_type": "string", + "edited": True, + "visible": True, + } + ) + assert marshal(sys_var, _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS) == expected_without_value + expected_with_value = expected_without_value.copy() + expected_with_value["value"] = "a" + assert marshal(sys_var, _WORKFLOW_DRAFT_VARIABLE_FIELDS) == expected_with_value + + def test_node_variable(self): + node_var = WorkflowDraftVariable.new_node_variable( + app_id=_TEST_APP_ID, + node_id="test_node", + name="node_var", + value=build_segment([1, "a"]), + visible=False, + ) + + node_var.id = str(uuid.uuid4()) + node_var.last_edited_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + + expected_without_value = OrderedDict( + { + "id": str(node_var.id), + "type": node_var.get_variable_type().value, + "name": "node_var", + "description": "", + "selector": ["test_node", "node_var"], + "value_type": "array[any]", + "edited": True, + "visible": False, + } + ) + + assert marshal(node_var, _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS) == expected_without_value + expected_with_value = expected_without_value.copy() + expected_with_value["value"] = [1, "a"] + assert marshal(node_var, _WORKFLOW_DRAFT_VARIABLE_FIELDS) == expected_with_value + + +class TestWorkflowDraftVariableList: + def test_workflow_draft_variable_list(self): + class TestCase(NamedTuple): + name: str + var_list: WorkflowDraftVariableList + expected: dict + + node_var = WorkflowDraftVariable.new_node_variable( + app_id=_TEST_APP_ID, + node_id="test_node", + name="test_var", + value=build_segment("a"), + visible=True, + ) + node_var.id = str(uuid.uuid4()) + node_var_dict = OrderedDict( + { + "id": str(node_var.id), + "type": node_var.get_variable_type().value, + "name": "test_var", + "description": "", + "selector": ["test_node", "test_var"], + "value_type": "string", + "edited": False, + "visible": True, + } + ) + + cases = [ + TestCase( + name="empty variable list", + var_list=WorkflowDraftVariableList(variables=[]), + expected=OrderedDict( + { + "items": [], + "total": None, + } + ), + ), + TestCase( + name="empty variable list with total", + var_list=WorkflowDraftVariableList(variables=[], total=10), + expected=OrderedDict( + { + "items": [], + "total": 10, + } + ), + ), + TestCase( + name="non-empty variable list", + var_list=WorkflowDraftVariableList(variables=[node_var], total=None), + expected=OrderedDict( + { + "items": [node_var_dict], + "total": None, + } + ), + ), + TestCase( + name="non-empty variable list with total", + var_list=WorkflowDraftVariableList(variables=[node_var], total=10), + expected=OrderedDict( + { + "items": [node_var_dict], + "total": 10, + } + ), + ), + ] + + for idx, case in enumerate(cases, 1): + assert marshal(case.var_list, _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS) == case.expected, ( + f"Test case {idx} failed, {case.name=}" + ) + + +def test_workflow_node_variables_fields(): + conv_var = WorkflowDraftVariable.new_conversation_variable( + app_id=_TEST_APP_ID, name="conv_var", value=build_segment(1) + ) + resp = marshal(WorkflowDraftVariableList(variables=[conv_var]), _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS) + assert isinstance(resp, dict) + assert len(resp["items"]) == 1 + item_dict = resp["items"][0] + assert item_dict["name"] == "conv_var" + assert item_dict["value"] == 1 diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py index 9ad8c15847..03b60610aa 100644 --- a/api/controllers/console/app/wraps.py +++ b/api/controllers/console/app/wraps.py @@ -8,6 +8,15 @@ from libs.login import current_user from models import App, AppMode +def _load_app_model(app_id: str) -> Optional[App]: + app_model = ( + db.session.query(App) + .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") + .first() + ) + return app_model + + def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode], None] = None): def decorator(view_func): @wraps(view_func) @@ -20,11 +29,7 @@ def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[ del kwargs["app_id"] - app_model = ( - db.session.query(App) - .filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal") - .first() - ) + app_model = _load_app_model(app_id) if not app_model: raise AppNotFoundError() diff --git a/api/controllers/web/error.py b/api/controllers/web/error.py index 4371e679db..b0192d5494 100644 --- a/api/controllers/web/error.py +++ b/api/controllers/web/error.py @@ -139,3 +139,13 @@ class InvokeRateLimitError(BaseHTTPException): error_code = "rate_limit_error" description = "Rate Limit Error" code = 429 + + +class NotFoundError(BaseHTTPException): + error_code = "unknown" + code = 404 + + +class InvalidArgumentError(BaseHTTPException): + error_code = "invalid_param" + code = 400 diff --git a/api/core/app/apps/advanced_chat/app_runner.py b/api/core/app/apps/advanced_chat/app_runner.py index c83e06bf15..18e8310793 100644 --- a/api/core/app/apps/advanced_chat/app_runner.py +++ b/api/core/app/apps/advanced_chat/app_runner.py @@ -48,6 +48,9 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner): self.message = message self._dialogue_count = dialogue_count + def _get_app_id(self) -> str: + return self.application_generate_entity.app_config.app_id + def run(self) -> None: app_config = self.application_generate_entity.app_config app_config = cast(AdvancedChatAppConfig, app_config) diff --git a/api/core/app/apps/workflow/app_runner.py b/api/core/app/apps/workflow/app_runner.py index b38ee18ac4..13cf4581ce 100644 --- a/api/core/app/apps/workflow/app_runner.py +++ b/api/core/app/apps/workflow/app_runner.py @@ -41,6 +41,9 @@ class WorkflowAppRunner(WorkflowBasedAppRunner): self.queue_manager = queue_manager self.workflow_thread_pool_id = workflow_thread_pool_id + def _get_app_id(self) -> str: + return self.application_generate_entity.app_config.app_id + def run(self) -> None: """ Run application diff --git a/api/core/app/apps/workflow_app_runner.py b/api/core/app/apps/workflow_app_runner.py index 0884fac4a9..1e6c92d59b 100644 --- a/api/core/app/apps/workflow_app_runner.py +++ b/api/core/app/apps/workflow_app_runner.py @@ -1,6 +1,8 @@ from collections.abc import Mapping from typing import Any, Optional, cast +from sqlalchemy.orm import Session + from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom from core.app.apps.base_app_runner import AppRunner from core.app.entities.queue_entities import ( @@ -66,12 +68,19 @@ from core.workflow.workflow_entry import WorkflowEntry from extensions.ext_database import db from models.model import App from models.workflow import Workflow +from services.workflow_draft_variable_service import ( + WorkflowDraftVariableService, + should_save_output_variables_for_draft, +) class WorkflowBasedAppRunner(AppRunner): def __init__(self, queue_manager: AppQueueManager): self.queue_manager = queue_manager + def _get_app_id(self) -> str: + raise NotImplementedError("not implemented") + def _init_graph(self, graph_config: Mapping[str, Any]) -> Graph: """ Init graph @@ -376,6 +385,24 @@ class WorkflowBasedAppRunner(AppRunner): in_loop_id=event.in_loop_id, ) ) + + # FIXME(QuantumGhost): rely on private state of queue_manager is not ideal. + should_save = should_save_output_variables_for_draft( + self.queue_manager._invoke_from, + loop_id=event.in_loop_id, + iteration_id=event.in_iteration_id, + ) + if should_save and outputs is not None: + with Session(bind=db.engine) as session: + draft_var_srv = WorkflowDraftVariableService(session) + draft_var_srv.save_output_variables( + app_id=self._get_app_id(), + node_id=event.node_id, + node_type=event.node_type, + output=outputs, + ) + session.commit() + elif isinstance(event, NodeRunFailedEvent): self._publish_event( QueueNodeFailedEvent( diff --git a/api/core/app/entities/app_invoke_entities.py b/api/core/app/entities/app_invoke_entities.py index 56e6b46a60..23ba05293a 100644 --- a/api/core/app/entities/app_invoke_entities.py +++ b/api/core/app/entities/app_invoke_entities.py @@ -17,9 +17,24 @@ class InvokeFrom(Enum): Invoke From. """ + # SERVICE_API indicates that this invocation is from an API call to Dify app. + # + # Description of service api in Dify docs: + # https://docs.dify.ai/en/guides/application-publishing/developing-with-apis SERVICE_API = "service-api" + + # WEB_APP indicates that this invocation is from + # the web app of the workflow (or chatflow). + # + # Description of web app in Dify docs: + # https://docs.dify.ai/en/guides/application-publishing/launch-your-webapp-quickly/README WEB_APP = "web-app" + + # EXPLORE indicates that this invocation is from + # the workflow (or chatflow) explore page. EXPLORE = "explore" + # DEBUGGER indicates that this invocation is from + # the workflow (or chatflow) edit page. DEBUGGER = "debugger" @classmethod diff --git a/api/core/file/constants.py b/api/core/file/constants.py index ce1d238e93..81ad59f4c0 100644 --- a/api/core/file/constants.py +++ b/api/core/file/constants.py @@ -1 +1,21 @@ +from typing import Any + FILE_MODEL_IDENTITY = "__dify__file__" + +# DUMMY_OUTPUT_IDENTITY is a placeholder output for workflow nodes. +# Its sole possible value is `None`. +# +# This is used to signal the execution of a workflow node when it has no other outputs. +_DUMMY_OUTPUT_IDENTITY = "__dummy__" +_DUMMY_OUTPUT_VALUE: None = None + + +def add_dummy_output(original: dict[str, Any] | None) -> dict[str, Any]: + if original is None: + original = {} + original[_DUMMY_OUTPUT_IDENTITY] = _DUMMY_OUTPUT_VALUE + return original + + +def is_dummy_output_variable(name: str) -> bool: + return name == _DUMMY_OUTPUT_IDENTITY diff --git a/api/core/ops/opik_trace/opik_trace.py b/api/core/ops/opik_trace/opik_trace.py index c22df55357..6c159a4831 100644 --- a/api/core/ops/opik_trace/opik_trace.py +++ b/api/core/ops/opik_trace/opik_trace.py @@ -115,6 +115,7 @@ class OpikDataTrace(BaseTraceInstance): "metadata": workflow_metadata, "input": wrap_dict("input", trace_info.workflow_run_inputs), "output": wrap_dict("output", trace_info.workflow_run_outputs), + "thread_id": trace_info.conversation_id, "tags": ["message", "workflow"], "project_name": self.project, } @@ -144,6 +145,7 @@ class OpikDataTrace(BaseTraceInstance): "metadata": workflow_metadata, "input": wrap_dict("input", trace_info.workflow_run_inputs), "output": wrap_dict("output", trace_info.workflow_run_outputs), + "thread_id": trace_info.conversation_id, "tags": ["workflow"], "project_name": self.project, } @@ -306,6 +308,7 @@ class OpikDataTrace(BaseTraceInstance): "metadata": wrap_metadata(metadata), "input": trace_info.inputs, "output": message_data.answer, + "thread_id": message_data.conversation_id, "tags": ["message", str(trace_info.conversation_mode)], "project_name": self.project, } @@ -420,6 +423,7 @@ class OpikDataTrace(BaseTraceInstance): "metadata": wrap_metadata(trace_info.metadata), "input": trace_info.inputs, "output": trace_info.outputs, + "thread_id": trace_info.conversation_id, "tags": ["generate_name"], "project_name": self.project, } diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index a3424c7421..32301e11e7 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -292,11 +292,14 @@ class OpsTraceManager: :return: """ # auth check - if tracing_provider is not None: + if enabled == True: try: provider_config_map[tracing_provider] except KeyError: raise ValueError(f"Invalid tracing provider: {tracing_provider}") + else: + if tracing_provider is not None: + raise ValueError(f"Invalid tracing provider: {tracing_provider}") app_config: Optional[App] = db.session.query(App).filter(App.id == app_id).first() if not app_config: diff --git a/api/core/workflow/entities/variable_pool.py b/api/core/workflow/entities/variable_pool.py index af26864c01..74540491e5 100644 --- a/api/core/workflow/entities/variable_pool.py +++ b/api/core/workflow/entities/variable_pool.py @@ -7,12 +7,12 @@ from pydantic import BaseModel, Field from core.file import File, FileAttribute, file_manager from core.variables import Segment, SegmentGroup, Variable +from core.variables.consts import MIN_SELECTORS_LENGTH from core.variables.segments import FileSegment, NoneSegment +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID +from core.workflow.enums import SystemVariableKey from factories import variable_factory -from ..constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID -from ..enums import SystemVariableKey - VariableValue = Union[str, int, float, dict, list, File] VARIABLE_PATTERN = re.compile(r"\{\{#([a-zA-Z0-9_]{1,50}(?:\.[a-zA-Z_][a-zA-Z0-9_]{0,29}){1,10})#\}\}") @@ -91,7 +91,7 @@ class VariablePool(BaseModel): Returns: None """ - if len(selector) < 2: + if len(selector) < MIN_SELECTORS_LENGTH: raise ValueError("Invalid selector") if isinstance(value, Variable): @@ -118,7 +118,7 @@ class VariablePool(BaseModel): Raises: ValueError: If the selector is invalid. """ - if len(selector) < 2: + if len(selector) < MIN_SELECTORS_LENGTH: return None hash_key = hash(tuple(selector[1:])) diff --git a/api/core/workflow/graph_engine/entities/event.py b/api/core/workflow/graph_engine/entities/event.py index 689a07c4f6..2dc105edfb 100644 --- a/api/core/workflow/graph_engine/entities/event.py +++ b/api/core/workflow/graph_engine/entities/event.py @@ -65,6 +65,8 @@ class BaseNodeEvent(GraphEngineEvent): """iteration id if node is in iteration""" in_loop_id: Optional[str] = None """loop id if node is in loop""" + # The version of the node, or "1" if not specified. + node_version: str = "1" class NodeRunStartedEvent(BaseNodeEvent): diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index 36273d8ec1..45ccff0368 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -313,6 +313,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) raise e @@ -630,6 +631,7 @@ class GraphEngine: parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, agent_strategy=agent_strategy, + node_version=node_instance.version(), ) db.session.close() @@ -688,6 +690,7 @@ class GraphEngine: error=run_result.error or "Unknown error", retry_index=retries, start_at=retry_start_at, + node_version=node_instance.version(), ) time.sleep(retry_interval) break @@ -723,6 +726,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) should_continue_retry = False else: @@ -737,6 +741,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) should_continue_retry = False elif run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED: @@ -791,6 +796,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) should_continue_retry = False @@ -808,6 +814,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) elif isinstance(item, RunRetrieverResourceEvent): yield NodeRunRetrieverResourceEvent( @@ -822,6 +829,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) except GenerateTaskStoppedError: # trigger node run failed event @@ -838,6 +846,7 @@ class GraphEngine: parallel_start_node_id=parallel_start_node_id, parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, + node_version=node_instance.version(), ) return except Exception as e: diff --git a/api/core/workflow/nodes/answer/answer_node.py b/api/core/workflow/nodes/answer/answer_node.py index 520cbdbb60..34b7e98710 100644 --- a/api/core/workflow/nodes/answer/answer_node.py +++ b/api/core/workflow/nodes/answer/answer_node.py @@ -18,7 +18,11 @@ from models.workflow import WorkflowNodeExecutionStatus class AnswerNode(BaseNode[AnswerNodeData]): _node_data_cls = AnswerNodeData - _node_type: NodeType = NodeType.ANSWER + _node_type = NodeType.ANSWER + + @classmethod + def version(cls) -> str: + return "1" def _run(self) -> NodeRunResult: """ diff --git a/api/core/workflow/nodes/answer/answer_stream_processor.py b/api/core/workflow/nodes/answer/answer_stream_processor.py index ba6ba16e36..f3e4a62ade 100644 --- a/api/core/workflow/nodes/answer/answer_stream_processor.py +++ b/api/core/workflow/nodes/answer/answer_stream_processor.py @@ -109,6 +109,7 @@ class AnswerStreamProcessor(StreamProcessor): parallel_id=event.parallel_id, parallel_start_node_id=event.parallel_start_node_id, from_variable_selector=[answer_node_id, "answer"], + node_version=event.node_version, ) else: route_chunk = cast(VarGenerateRouteChunk, route_chunk) @@ -134,6 +135,7 @@ class AnswerStreamProcessor(StreamProcessor): route_node_state=event.route_node_state, parallel_id=event.parallel_id, parallel_start_node_id=event.parallel_start_node_id, + node_version=event.node_version, ) self.route_position[answer_node_id] += 1 diff --git a/api/core/workflow/nodes/base/node.py b/api/core/workflow/nodes/base/node.py index e566770870..b2b4fe0cf1 100644 --- a/api/core/workflow/nodes/base/node.py +++ b/api/core/workflow/nodes/base/node.py @@ -1,7 +1,7 @@ import logging from abc import abstractmethod from collections.abc import Generator, Mapping, Sequence -from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, Generic, Optional, TypeVar, Union, cast from core.workflow.entities.node_entities import NodeRunResult from core.workflow.nodes.enums import CONTINUE_ON_ERROR_NODE_TYPE, RETRY_ON_ERROR_NODE_TYPE, NodeType @@ -23,7 +23,7 @@ GenericNodeData = TypeVar("GenericNodeData", bound=BaseNodeData) class BaseNode(Generic[GenericNodeData]): _node_data_cls: type[GenericNodeData] - _node_type: NodeType + _node_type: ClassVar[NodeType] def __init__( self, @@ -101,9 +101,10 @@ class BaseNode(Generic[GenericNodeData]): raise ValueError("Node ID is required when extracting variable selector to variable mapping.") node_data = cls._node_data_cls(**config.get("data", {})) - return cls._extract_variable_selector_to_variable_mapping( + data = cls._extract_variable_selector_to_variable_mapping( graph_config=graph_config, node_id=node_id, node_data=cast(GenericNodeData, node_data) ) + return data @classmethod def _extract_variable_selector_to_variable_mapping( @@ -139,6 +140,16 @@ class BaseNode(Generic[GenericNodeData]): """ return self._node_type + @classmethod + @abstractmethod + def version(cls) -> str: + """`node_version` returns the version of current node type.""" + # NOTE(QuantumGhost): This should be in sync with `NODE_TYPE_CLASSES_MAPPING`. + # + # If you have introduced a new node type, please add it to `NODE_TYPE_CLASSES_MAPPING` + # in `api/core/workflow/nodes/__init__.py`. + pass + @property def should_continue_on_error(self) -> bool: """judge if should continue on error diff --git a/api/core/workflow/nodes/code/code_node.py b/api/core/workflow/nodes/code/code_node.py index 804c05f9f4..983019c434 100644 --- a/api/core/workflow/nodes/code/code_node.py +++ b/api/core/workflow/nodes/code/code_node.py @@ -40,6 +40,10 @@ class CodeNode(BaseNode[CodeNodeData]): return code_provider.get_default_config() + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: # Get code language code_language = self.node_data.code_language diff --git a/api/core/workflow/nodes/document_extractor/node.py b/api/core/workflow/nodes/document_extractor/node.py index 8fb1baec89..1ce297a578 100644 --- a/api/core/workflow/nodes/document_extractor/node.py +++ b/api/core/workflow/nodes/document_extractor/node.py @@ -44,6 +44,10 @@ class DocumentExtractorNode(BaseNode[DocumentExtractorNodeData]): _node_data_cls = DocumentExtractorNodeData _node_type = NodeType.DOCUMENT_EXTRACTOR + @classmethod + def version(cls) -> str: + return "1" + def _run(self): variable_selector = self.node_data.variable_selector variable = self.graph_runtime_state.variable_pool.get(variable_selector) diff --git a/api/core/workflow/nodes/end/end_node.py b/api/core/workflow/nodes/end/end_node.py index 6acc915ab5..8afbdcbc0e 100644 --- a/api/core/workflow/nodes/end/end_node.py +++ b/api/core/workflow/nodes/end/end_node.py @@ -9,6 +9,10 @@ class EndNode(BaseNode[EndNodeData]): _node_data_cls = EndNodeData _node_type = NodeType.END + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: """ Run node diff --git a/api/core/workflow/nodes/end/end_stream_processor.py b/api/core/workflow/nodes/end/end_stream_processor.py index 3ae5af7137..a6fb2ffc18 100644 --- a/api/core/workflow/nodes/end/end_stream_processor.py +++ b/api/core/workflow/nodes/end/end_stream_processor.py @@ -139,6 +139,7 @@ class EndStreamProcessor(StreamProcessor): route_node_state=event.route_node_state, parallel_id=event.parallel_id, parallel_start_node_id=event.parallel_start_node_id, + node_version=event.node_version, ) self.route_position[end_node_id] += 1 diff --git a/api/core/workflow/nodes/http_request/node.py b/api/core/workflow/nodes/http_request/node.py index 1c82637974..05a0b57171 100644 --- a/api/core/workflow/nodes/http_request/node.py +++ b/api/core/workflow/nodes/http_request/node.py @@ -60,6 +60,10 @@ class HttpRequestNode(BaseNode[HttpRequestNodeData]): }, } + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: process_data = {} try: diff --git a/api/core/workflow/nodes/if_else/if_else_node.py b/api/core/workflow/nodes/if_else/if_else_node.py index cb51b1ddd5..57792ca09a 100644 --- a/api/core/workflow/nodes/if_else/if_else_node.py +++ b/api/core/workflow/nodes/if_else/if_else_node.py @@ -16,6 +16,10 @@ class IfElseNode(BaseNode[IfElseNodeData]): _node_data_cls = IfElseNodeData _node_type = NodeType.IF_ELSE + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: """ Run node diff --git a/api/core/workflow/nodes/iteration/iteration_node.py b/api/core/workflow/nodes/iteration/iteration_node.py index a061dfc354..2e76b5d5c9 100644 --- a/api/core/workflow/nodes/iteration/iteration_node.py +++ b/api/core/workflow/nodes/iteration/iteration_node.py @@ -72,6 +72,10 @@ class IterationNode(BaseNode[IterationNodeData]): }, } + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> Generator[NodeEvent | InNodeEvent, None, None]: """ Run the node. diff --git a/api/core/workflow/nodes/iteration/iteration_start_node.py b/api/core/workflow/nodes/iteration/iteration_start_node.py index fe955e47d1..365e8b9ab8 100644 --- a/api/core/workflow/nodes/iteration/iteration_start_node.py +++ b/api/core/workflow/nodes/iteration/iteration_start_node.py @@ -13,6 +13,10 @@ class IterationStartNode(BaseNode[IterationStartNodeData]): _node_data_cls = IterationStartNodeData _node_type = NodeType.ITERATION_START + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: """ Run the node. diff --git a/api/core/workflow/nodes/list_operator/node.py b/api/core/workflow/nodes/list_operator/node.py index 04ccfc5405..ed3a93e4cb 100644 --- a/api/core/workflow/nodes/list_operator/node.py +++ b/api/core/workflow/nodes/list_operator/node.py @@ -16,6 +16,10 @@ class ListOperatorNode(BaseNode[ListOperatorNodeData]): _node_data_cls = ListOperatorNodeData _node_type = NodeType.LIST_OPERATOR + @classmethod + def version(cls) -> str: + return "1" + def _run(self): inputs: dict[str, list] = {} process_data: dict[str, list] = {} diff --git a/api/core/workflow/nodes/llm/node.py b/api/core/workflow/nodes/llm/node.py index eeb44601ec..7f11ff86e1 100644 --- a/api/core/workflow/nodes/llm/node.py +++ b/api/core/workflow/nodes/llm/node.py @@ -148,6 +148,10 @@ class LLMNode(BaseNode[LLMNodeData]): ) self._llm_file_saver = llm_file_saver + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> Generator[NodeEvent | InNodeEvent, None, None]: def process_structured_output(text: str) -> Optional[dict[str, Any]]: """Process structured output if enabled""" diff --git a/api/core/workflow/nodes/loop/loop_end_node.py b/api/core/workflow/nodes/loop/loop_end_node.py index 5d4ce0ccbe..da1e817d75 100644 --- a/api/core/workflow/nodes/loop/loop_end_node.py +++ b/api/core/workflow/nodes/loop/loop_end_node.py @@ -13,6 +13,10 @@ class LoopEndNode(BaseNode[LoopEndNodeData]): _node_data_cls = LoopEndNodeData _node_type = NodeType.LOOP_END + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: """ Run the node. diff --git a/api/core/workflow/nodes/loop/loop_node.py b/api/core/workflow/nodes/loop/loop_node.py index bad3e2b928..420a8e0fc3 100644 --- a/api/core/workflow/nodes/loop/loop_node.py +++ b/api/core/workflow/nodes/loop/loop_node.py @@ -54,6 +54,10 @@ class LoopNode(BaseNode[LoopNodeData]): _node_data_cls = LoopNodeData _node_type = NodeType.LOOP + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> Generator[NodeEvent | InNodeEvent, None, None]: """Run the node.""" # Get inputs diff --git a/api/core/workflow/nodes/loop/loop_start_node.py b/api/core/workflow/nodes/loop/loop_start_node.py index 7cf145e4e5..e0e7f4fe6a 100644 --- a/api/core/workflow/nodes/loop/loop_start_node.py +++ b/api/core/workflow/nodes/loop/loop_start_node.py @@ -13,6 +13,10 @@ class LoopStartNode(BaseNode[LoopStartNodeData]): _node_data_cls = LoopStartNodeData _node_type = NodeType.LOOP_START + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: """ Run the node. diff --git a/api/core/workflow/nodes/node_mapping.py b/api/core/workflow/nodes/node_mapping.py index 1f1be59542..67cc884f20 100644 --- a/api/core/workflow/nodes/node_mapping.py +++ b/api/core/workflow/nodes/node_mapping.py @@ -25,6 +25,11 @@ from core.workflow.nodes.variable_assigner.v2 import VariableAssignerNode as Var LATEST_VERSION = "latest" +# NOTE(QuantumGhost): This should be in sync with subclasses of BaseNode. +# Specifically, if you have introduced new node types, you should add them here. +# +# TODO(QuantumGhost): This could be automated with either metaclass or `__init_subclass__` +# hook. Try to avoid duplication of node information. NODE_TYPE_CLASSES_MAPPING: Mapping[NodeType, Mapping[str, type[BaseNode]]] = { NodeType.START: { LATEST_VERSION: StartNode, diff --git a/api/core/workflow/nodes/start/start_node.py b/api/core/workflow/nodes/start/start_node.py index 1b47b81517..fa2592842e 100644 --- a/api/core/workflow/nodes/start/start_node.py +++ b/api/core/workflow/nodes/start/start_node.py @@ -1,3 +1,4 @@ +from core.file.constants import add_dummy_output from core.workflow.constants import SYSTEM_VARIABLE_NODE_ID from core.workflow.entities.node_entities import NodeRunResult from core.workflow.nodes.base import BaseNode @@ -10,6 +11,10 @@ class StartNode(BaseNode[StartNodeData]): _node_data_cls = StartNodeData _node_type = NodeType.START + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: node_inputs = dict(self.graph_runtime_state.variable_pool.user_inputs) system_inputs = self.graph_runtime_state.variable_pool.system_variables @@ -18,5 +23,9 @@ class StartNode(BaseNode[StartNodeData]): # Set system variables as node outputs. for var in system_inputs: node_inputs[SYSTEM_VARIABLE_NODE_ID + "." + var] = system_inputs[var] + outputs = dict(node_inputs) + # Need special handling for `Start` node, as all other output variables + # are treated as systemd variables. + add_dummy_output(outputs) - return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=node_inputs, outputs=node_inputs) + return NodeRunResult(status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=node_inputs, outputs=outputs) diff --git a/api/core/workflow/nodes/template_transform/template_transform_node.py b/api/core/workflow/nodes/template_transform/template_transform_node.py index 22a1b21888..6f89c32435 100644 --- a/api/core/workflow/nodes/template_transform/template_transform_node.py +++ b/api/core/workflow/nodes/template_transform/template_transform_node.py @@ -28,6 +28,10 @@ class TemplateTransformNode(BaseNode[TemplateTransformNodeData]): "config": {"variables": [{"variable": "arg1", "value_selector": []}], "template": "{{ arg1 }}"}, } + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: # Get variables variables = {} diff --git a/api/core/workflow/nodes/tool/tool_node.py b/api/core/workflow/nodes/tool/tool_node.py index c72ae5b69b..b0f62269b4 100644 --- a/api/core/workflow/nodes/tool/tool_node.py +++ b/api/core/workflow/nodes/tool/tool_node.py @@ -44,6 +44,10 @@ class ToolNode(BaseNode[ToolNodeData]): _node_data_cls = ToolNodeData _node_type = NodeType.TOOL + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> Generator: """ Run the tool node diff --git a/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py b/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py index 372496a8fa..5973558235 100644 --- a/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py +++ b/api/core/workflow/nodes/variable_aggregator/variable_aggregator_node.py @@ -9,6 +9,10 @@ class VariableAggregatorNode(BaseNode[VariableAssignerNodeData]): _node_data_cls = VariableAssignerNodeData _node_type = NodeType.VARIABLE_AGGREGATOR + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: # Get variables outputs = {} diff --git a/api/core/workflow/nodes/variable_assigner/common/helpers.py b/api/core/workflow/nodes/variable_assigner/common/helpers.py index 8031b57fa8..2455b69025 100644 --- a/api/core/workflow/nodes/variable_assigner/common/helpers.py +++ b/api/core/workflow/nodes/variable_assigner/common/helpers.py @@ -1,7 +1,11 @@ +from collections.abc import Sequence +from typing import Any, TypedDict + from sqlalchemy import select from sqlalchemy.orm import Session -from core.variables import Variable +from core.variables import Segment, SegmentType, Variable +from core.variables.consts import MIN_SELECTORS_LENGTH from core.workflow.nodes.variable_assigner.common.exc import VariableOperatorNodeError from extensions.ext_database import db from models import ConversationVariable @@ -17,3 +21,22 @@ def update_conversation_variable(conversation_id: str, variable: Variable): raise VariableOperatorNodeError("conversation variable not found in the database") row.data = variable.model_dump_json() session.commit() + + +class VariableOutput(TypedDict): + name: str + selector: Sequence[str] + new_value: Any + type: SegmentType + + +def variable_to_output_mapping(selector: Sequence[str], seg: Segment) -> VariableOutput: + if len(selector) < MIN_SELECTORS_LENGTH: + raise Exception("selector too short") + node_id, var_name = selector[:2] + return { + "name": var_name, + "selector": selector[:2], + "new_value": seg.value, + "type": seg.value_type, + } diff --git a/api/core/workflow/nodes/variable_assigner/v1/node.py b/api/core/workflow/nodes/variable_assigner/v1/node.py index 7c7f14c0b8..087cc0103f 100644 --- a/api/core/workflow/nodes/variable_assigner/v1/node.py +++ b/api/core/workflow/nodes/variable_assigner/v1/node.py @@ -14,9 +14,14 @@ class VariableAssignerNode(BaseNode[VariableAssignerData]): _node_data_cls = VariableAssignerData _node_type = NodeType.VARIABLE_ASSIGNER + @classmethod + def version(cls) -> str: + return "1" + def _run(self) -> NodeRunResult: + assigned_variable_selector = self.node_data.assigned_variable_selector # Should be String, Number, Object, ArrayString, ArrayNumber, ArrayObject - original_variable = self.graph_runtime_state.variable_pool.get(self.node_data.assigned_variable_selector) + original_variable = self.graph_runtime_state.variable_pool.get(assigned_variable_selector) if not isinstance(original_variable, Variable): raise VariableOperatorNodeError("assigned variable not found") @@ -44,7 +49,7 @@ class VariableAssignerNode(BaseNode[VariableAssignerData]): raise VariableOperatorNodeError(f"unsupported write mode: {self.node_data.write_mode}") # Over write the variable. - self.graph_runtime_state.variable_pool.add(self.node_data.assigned_variable_selector, updated_variable) + self.graph_runtime_state.variable_pool.add(assigned_variable_selector, updated_variable) # TODO: Move database operation to the pipeline. # Update conversation variable. @@ -58,6 +63,14 @@ class VariableAssignerNode(BaseNode[VariableAssignerData]): inputs={ "value": income_value.to_object(), }, + outputs={ + # NOTE(QuantumGhost): although only one variable is updated in `v1.VariableAssignerNode`, + # we still set `output_variables` as a list to ensure the schema of output is + # compatible with `v2.VariableAssignerNode`. + "updated_variables": [ + common_helpers.variable_to_output_mapping(assigned_variable_selector, updated_variable) + ] + }, ) diff --git a/api/core/workflow/nodes/variable_assigner/v2/node.py b/api/core/workflow/nodes/variable_assigner/v2/node.py index 6a7ad86b51..a32825cf8c 100644 --- a/api/core/workflow/nodes/variable_assigner/v2/node.py +++ b/api/core/workflow/nodes/variable_assigner/v2/node.py @@ -29,6 +29,10 @@ class VariableAssignerNode(BaseNode[VariableAssignerNodeData]): _node_data_cls = VariableAssignerNodeData _node_type = NodeType.VARIABLE_ASSIGNER + @classmethod + def version(cls) -> str: + return "2" + def _run(self) -> NodeRunResult: inputs = self.node_data.model_dump() process_data: dict[str, Any] = {} @@ -137,6 +141,13 @@ class VariableAssignerNode(BaseNode[VariableAssignerNodeData]): status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=inputs, process_data=process_data, + outputs={ + "updated_variables": [ + common_helpers.variable_to_output_mapping(selector, seg) + for selector in updated_variable_selectors + if (seg := self.graph_runtime_state.variable_pool.get(selector)) is not None + ], + }, ) def _handle_item( diff --git a/api/core/workflow/workflow_entry.py b/api/core/workflow/workflow_entry.py index 7648947fca..b2cfa23aa2 100644 --- a/api/core/workflow/workflow_entry.py +++ b/api/core/workflow/workflow_entry.py @@ -1,8 +1,8 @@ import logging import time import uuid -from collections.abc import Generator, Mapping, Sequence -from typing import Any, Optional, cast +from collections.abc import Callable, Generator, Mapping, Sequence +from typing import Any, Optional, TypeAlias, TypeVar, cast from configs import dify_config from core.app.apps.base_app_queue_manager import GenerateTaskStoppedError @@ -10,6 +10,7 @@ from core.app.entities.app_invoke_entities import InvokeFrom from core.file.models import File from core.workflow.callbacks import WorkflowCallback from core.workflow.constants import ENVIRONMENT_VARIABLE_NODE_ID +from core.workflow.entities.node_entities import NodeRunMetadataKey from core.workflow.entities.variable_pool import VariablePool from core.workflow.errors import WorkflowNodeRunFailedError from core.workflow.graph_engine.entities.event import GraphEngineEvent, GraphRunFailedEvent, InNodeEvent @@ -19,7 +20,7 @@ from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntime from core.workflow.graph_engine.graph_engine import GraphEngine from core.workflow.nodes import NodeType from core.workflow.nodes.base import BaseNode -from core.workflow.nodes.event import NodeEvent +from core.workflow.nodes.event import NodeEvent, RunCompletedEvent from core.workflow.nodes.node_mapping import NODE_TYPE_CLASSES_MAPPING from factories import file_factory from models.enums import UserFrom @@ -120,6 +121,7 @@ class WorkflowEntry: node_id: str, user_id: str, user_inputs: dict, + conversation_variables: dict | None = None, ) -> tuple[BaseNode, Generator[NodeEvent | InNodeEvent, None, None]]: """ Single step run workflow node @@ -144,13 +146,19 @@ class WorkflowEntry: except StopIteration: raise ValueError("node id not found in workflow graph") + node_config_data = node_config.get("data", {}) + # Get node class - node_type = NodeType(node_config.get("data", {}).get("type")) - node_version = node_config.get("data", {}).get("version", "1") + node_type = NodeType(node_config_data.get("type")) + node_version = node_config_data.get("version", "1") node_cls = NODE_TYPE_CLASSES_MAPPING[node_type][node_version] + metadata_attacher = _attach_execution_metadata_based_on_node_config(node_config_data) # init variable pool - variable_pool = VariablePool(environment_variables=workflow.environment_variables) + variable_pool = VariablePool( + environment_variables=workflow.environment_variables, + conversation_variable=conversation_variables or {}, + ) # init graph graph = Graph.init(graph_config=workflow.graph_dict) @@ -188,11 +196,15 @@ class WorkflowEntry: variable_pool=variable_pool, tenant_id=workflow.tenant_id, ) + cls._load_persisted_draft_var_and_populate_pool(app_id=workflow.app_id, variable_pool=variable_pool) + try: # run node generator = node_instance.run() except Exception as e: raise WorkflowNodeRunFailedError(node_instance=node_instance, error=str(e)) + if metadata_attacher: + generator = _wrap_generator(generator, metadata_attacher) return node_instance, generator @classmethod @@ -319,6 +331,16 @@ class WorkflowEntry: return value.to_dict() return value + @classmethod + def _load_persisted_draft_var_and_populate_pool(cls, app_id: str, variable_pool: VariablePool) -> None: + """ + Load persisted draft variables and populate the variable pool. + :param app_id: The application ID. + :param variable_pool: The variable pool to populate. + """ + # TODO(QuantumGhost): + pass + @classmethod def mapping_user_inputs_to_variable_pool( cls, @@ -367,3 +389,61 @@ class WorkflowEntry: # append variable and value to variable pool if variable_node_id != ENVIRONMENT_VARIABLE_NODE_ID: variable_pool.add([variable_node_id] + variable_key_list, input_value) + + +_YieldT_co = TypeVar("_YieldT_co", covariant=True) +_YieldR_co = TypeVar("_YieldR_co", covariant=True) + + +def _wrap_generator( + gen: Generator[_YieldT_co, None, None], + mapper: Callable[[_YieldT_co], _YieldR_co], +) -> Generator[_YieldR_co, None, None]: + for item in gen: + yield mapper(item) + + +_NodeOrInNodeEvent: TypeAlias = NodeEvent | InNodeEvent + + +def _attach_execution_metadata( + extra_metadata: dict[NodeRunMetadataKey, Any], +) -> Callable[[_NodeOrInNodeEvent], _NodeOrInNodeEvent]: + def _execution_metadata_mapper(e: NodeEvent | InNodeEvent) -> NodeEvent | InNodeEvent: + if not isinstance(e, RunCompletedEvent): + return e + run_result = e.run_result + if run_result.metadata is None: + run_result.metadata = {} + for k, v in extra_metadata.items(): + run_result.metadata[k] = v + return e + + return _execution_metadata_mapper + + +def _attach_execution_metadata_based_on_node_config( + node_config: dict, +) -> Callable[[_NodeOrInNodeEvent], _NodeOrInNodeEvent] | None: + in_loop = node_config.get("isInLoop", False) + in_iteration = node_config.get("isInIteration", False) + if in_loop: + loop_id = node_config.get("loop_id") + if loop_id is None: + raise Exception("invalid graph") + return _attach_execution_metadata( + { + NodeRunMetadataKey.LOOP_ID: loop_id, + } + ) + elif in_iteration: + iteration_id = node_config.get("iteration_id") + if iteration_id is None: + raise Exception("invalid graph") + return _attach_execution_metadata( + { + NodeRunMetadataKey.ITERATION_ID: iteration_id, + } + ) + else: + return None diff --git a/api/factories/variable_factory.py b/api/factories/variable_factory.py index 0757496db7..dec6c37c32 100644 --- a/api/factories/variable_factory.py +++ b/api/factories/variable_factory.py @@ -122,6 +122,11 @@ def _build_variable_from_mapping(*, mapping: Mapping[str, Any], selector: Sequen result = result.model_copy(update={"selector": selector}) return cast(Variable, result) + +def infer_segment_type_from_value(value: Any, /) -> SegmentType: + return build_segment(value).value_type + + def build_segment(value: Any, /) -> Segment: if value is None: return NoneSegment() diff --git a/api/models/model.py b/api/models/model.py index ee79fbd6b5..f236981064 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -602,6 +602,14 @@ class InstalledApp(Base): return tenant +class ConversationSource(StrEnum): + """This enumeration is designed for use with `Conversation.from_source`.""" + + # NOTE(QuantumGhost): The enumeration members may not cover all possible cases. + API = "api" + CONSOLE = "console" + + class Conversation(Base): __tablename__ = "conversations" __table_args__ = ( @@ -623,7 +631,14 @@ class Conversation(Base): system_instruction = db.Column(db.Text) system_instruction_tokens = db.Column(db.Integer, nullable=False, server_default=db.text("0")) status = db.Column(db.String(255), nullable=False) + + # The `invoke_from` records how the conversation is created. + # + # Its value corresponds to the members of `InvokeFrom`. + # (api/core/app/entities/app_invoke_entities.py) invoke_from = db.Column(db.String(255), nullable=True) + + # ref: ConversationSource. from_source = db.Column(db.String(255), nullable=False) from_end_user_id = db.Column(StringUUID) from_account_id = db.Column(StringUUID) diff --git a/api/models/workflow.py b/api/models/workflow.py index a3855b20bf..0f58714aa5 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -139,6 +139,8 @@ class Workflow(Base): "rag_pipeline_variables", db.Text, nullable=False, server_default="{}" ) + VERSION_DRAFT = "draft" + @classmethod def new( cls, @@ -203,7 +205,9 @@ class Workflow(Base): features["file_upload"]["number_limits"] = image_number_limits features["file_upload"]["allowed_file_upload_methods"] = image_transfer_methods features["file_upload"]["allowed_file_types"] = features["file_upload"].get("allowed_file_types", ["image"]) - features["file_upload"]["allowed_file_extensions"] = [] + features["file_upload"]["allowed_file_extensions"] = features["file_upload"].get( + "allowed_file_extensions", [] + ) del features["file_upload"]["image"] self._features = json.dumps(features) return self._features @@ -376,6 +380,10 @@ class Workflow(Base): ensure_ascii=False, ) + @staticmethod + def version_from_datetime(d: datetime) -> str: + return str(d) + class WorkflowRunStatus(StrEnum): """ @@ -843,7 +851,7 @@ def _naive_utc_datetime(): class WorkflowDraftVariable(Base): @staticmethod - def unique_columns() -> list[str]: + def unique_app_id_node_id_name() -> list[str]: return [ "app_id", "node_id", @@ -851,7 +859,7 @@ class WorkflowDraftVariable(Base): ] __tablename__ = "workflow_draft_variables" - __table_args__ = (UniqueConstraint(*unique_columns()),) + __table_args__ = (UniqueConstraint(*unique_app_id_node_id_name()),) # id is the unique identifier of a draft variable. id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=db.text("uuid_generate_v4()")) @@ -1016,10 +1024,11 @@ class WorkflowDraftVariable(Base): name: str, value: Segment, visible: bool = True, + editable: bool = True, ) -> "WorkflowDraftVariable": variable = cls._new(app_id=app_id, node_id=node_id, name=name, value=value) variable.visible = visible - variable.editable = True + variable.editable = editable return variable @property diff --git a/api/pyproject.toml b/api/pyproject.toml index 26a0bdb11e..311ccdcfa9 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -39,7 +39,7 @@ dependencies = [ "oci~=2.135.1", "openai~=1.61.0", "openpyxl~=3.1.5", - "opik~=1.3.4", + "opik~=1.7.25", "opentelemetry-api==1.27.0", "opentelemetry-distro==0.48b0", "opentelemetry-exporter-otlp==1.27.0", @@ -148,6 +148,8 @@ dev = [ "types-tensorflow~=2.18.0", "types-tqdm~=4.67.0", "types-ujson~=5.10.0", + "boto3-stubs>=1.38.20", + "hypothesis>=6.131.15", ] ############################################################ diff --git a/api/services/errors/app.py b/api/services/errors/app.py index 87e9e9247d..5d348c61be 100644 --- a/api/services/errors/app.py +++ b/api/services/errors/app.py @@ -4,3 +4,7 @@ class MoreLikeThisDisabledError(Exception): class WorkflowHashNotEqualError(Exception): pass + + +class IsDraftWorkflowError(Exception): + pass diff --git a/api/services/workflow_draft_variable_service.py b/api/services/workflow_draft_variable_service.py new file mode 100644 index 0000000000..15ff43b6e5 --- /dev/null +++ b/api/services/workflow_draft_variable_service.py @@ -0,0 +1,388 @@ +import dataclasses +import logging +from collections.abc import Mapping, Sequence +from typing import Any + +from sqlalchemy import orm +from sqlalchemy.dialects.postgresql import insert +from sqlalchemy.orm import Session + +from core.app.entities.app_invoke_entities import InvokeFrom +from core.file.constants import is_dummy_output_variable +from core.variables import Segment +from core.variables.consts import MIN_SELECTORS_LENGTH +from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID +from core.workflow.nodes import NodeType +from factories import variable_factory +from models.workflow import WorkflowDraftVariable, is_system_variable_editable + +_logger = logging.getLogger(__name__) + + +@dataclasses.dataclass(frozen=True) +class WorkflowDraftVariableList: + variables: list[WorkflowDraftVariable] + total: int | None = None + + +class WorkflowDraftVariableService: + _session: Session + + def __init__(self, session: Session) -> None: + self._session = session + + def get_variable(self, variable_id: str) -> WorkflowDraftVariable | None: + return self._session.query(WorkflowDraftVariable).filter(WorkflowDraftVariable.id == variable_id).first() + + def save_output_variables(self, app_id: str, node_id: str, node_type: NodeType, output: Mapping[str, Any]): + variable_builder = _DraftVariableBuilder(app_id=app_id) + variable_builder.build(node_id=node_id, node_type=node_type, output=output) + draft_variables = variable_builder.get_variables() + # draft_variables = _build_variables_from_output_mapping(app_id, node_id, node_type, output) + if not draft_variables: + return + + # We may use SQLAlchemy ORM operation here. However, considering the fact that: + # + # 1. The variable saving process writes multiple rows into one table (`workflow_draft_variables`). + # Use batch insertion may increase performance dramatically. + # 2. If we use ORM operation, we need to either: + # + # a. Check the existence for each variable before insertion. + # b. Try insertion first, then do update if insertion fails due to unique index violation. + # + # Neither of the above is satisfactory. + # + # - For implementation "a", we need to issue `2n` sqls for `n` variables in output. + # Besides, it's still suffer from concurrency issues. + # - For implementation "b", we need to issue `n` - `2n` sqls (depending on the existence of + # specific variable), which is lesser than plan "a" but still far from ideal. + # + # 3. We do not need the value of SQL execution, nor do we need populate those values back to ORM model + # instances. + # 4. Batch insertion can be combined with `ON CONFLICT DO UPDATE`, allows us to insert or update + # all variables in one SQL statement, and avoid all problems above. + # + # Given reasons above, we use query builder instead of using ORM layer, + # and rely on dialect specific insert operations. + if node_type == NodeType.CODE: + # Clear existing variable for code node. + self._session.query(WorkflowDraftVariable).filter( + WorkflowDraftVariable.app_id == app_id, + WorkflowDraftVariable.node_id == node_id, + ).delete(synchronize_session=False) + stmt = insert(WorkflowDraftVariable).values([_model_to_insertion_dict(v) for v in draft_variables]) + stmt = stmt.on_conflict_do_update( + index_elements=WorkflowDraftVariable.unique_app_id_node_id_name(), + set_={ + "updated_at": stmt.excluded.updated_at, + "last_edited_at": stmt.excluded.last_edited_at, + "description": stmt.excluded.description, + "value_type": stmt.excluded.value_type, + "value": stmt.excluded.value, + "visible": stmt.excluded.visible, + "editable": stmt.excluded.editable, + }, + ) + self._session.execute(stmt) + + def list_variables_without_values(self, app_id: str, page: int, limit: int) -> WorkflowDraftVariableList: + criteria = WorkflowDraftVariable.app_id == app_id + total = None + query = self._session.query(WorkflowDraftVariable).filter(criteria) + if page == 1: + total = query.count() + variables = ( + # Do not load the `value` field. + query.options(orm.defer(WorkflowDraftVariable.value)) + .order_by(WorkflowDraftVariable.id.desc()) + .limit(limit) + .offset((page - 1) * limit) + .all() + ) + + return WorkflowDraftVariableList(variables=variables, total=total) + + def _list_node_variables(self, app_id: str, node_id: str) -> WorkflowDraftVariableList: + criteria = ( + WorkflowDraftVariable.app_id == app_id, + WorkflowDraftVariable.node_id == node_id, + ) + query = self._session.query(WorkflowDraftVariable).filter(*criteria) + variables = query.order_by(WorkflowDraftVariable.id.desc()).all() + return WorkflowDraftVariableList(variables=variables) + + def list_node_variables(self, app_id: str, node_id: str) -> WorkflowDraftVariableList: + return self._list_node_variables(app_id, node_id) + + def list_conversation_variables(self, app_id: str) -> WorkflowDraftVariableList: + return self._list_node_variables(app_id, CONVERSATION_VARIABLE_NODE_ID) + + def list_system_variables(self, app_id: str) -> WorkflowDraftVariableList: + return self._list_node_variables(app_id, SYSTEM_VARIABLE_NODE_ID) + + def get_conversation_variable(self, app_id: str, name: str) -> WorkflowDraftVariable | None: + return self._get_variable(app_id=app_id, node_id=CONVERSATION_VARIABLE_NODE_ID, name=name) + + def get_system_variable(self, app_id: str, name: str) -> WorkflowDraftVariable | None: + return self._get_variable(app_id=app_id, node_id=SYSTEM_VARIABLE_NODE_ID, name=name) + + def get_node_variable(self, app_id: str, node_id: str, name: str) -> WorkflowDraftVariable | None: + return self._get_variable(app_id, node_id, name) + + def _get_variable(self, app_id: str, node_id: str, name: str) -> WorkflowDraftVariable | None: + variable = ( + self._session.query(WorkflowDraftVariable) + .where( + WorkflowDraftVariable.app_id == app_id, + WorkflowDraftVariable.node_id == node_id, + WorkflowDraftVariable.name == name, + ) + .first() + ) + return variable + + def update_variable( + self, + variable: WorkflowDraftVariable, + name: str | None = None, + value: Segment | None = None, + ) -> WorkflowDraftVariable: + if name is not None: + variable.set_name(name) + if value is not None: + variable.set_value(value) + self._session.flush() + return variable + + def delete_variable(self, variable: WorkflowDraftVariable): + self._session.delete(variable) + + def delete_workflow_variables(self, app_id: str): + ( + self._session.query(WorkflowDraftVariable) + .filter(WorkflowDraftVariable.app_id == app_id) + .delete(synchronize_session=False) + ) + + def delete_node_variables(self, app_id: str, node_id: str): + return self._delete_node_variables(app_id, node_id) + + def _delete_node_variables(self, app_id: str, node_id: str): + self._session.query(WorkflowDraftVariable).where( + WorkflowDraftVariable.app_id == app_id, + WorkflowDraftVariable.node_id == node_id, + ).delete() + + +def _model_to_insertion_dict(model: WorkflowDraftVariable) -> dict[str, Any]: + d: dict[str, Any] = { + "app_id": model.app_id, + "last_edited_at": None, + "node_id": model.node_id, + "name": model.name, + "selector": model.selector, + "value_type": model.value_type, + "value": model.value, + } + if model.visible is not None: + d["visible"] = model.visible + if model.editable is not None: + d["editable"] = model.editable + if model.created_at is not None: + d["created_at"] = model.created_at + if model.updated_at is not None: + d["updated_at"] = model.updated_at + if model.description is not None: + d["description"] = model.description + return d + + +def should_save_output_variables_for_draft( + invoke_from: InvokeFrom, loop_id: str | None, iteration_id: str | None +) -> bool: + # Only save output variables for debugging execution of workflow. + if invoke_from != InvokeFrom.DEBUGGER: + return False + + # Currently we do not save output variables for nodes inside loop or iteration. + if loop_id is not None: + return False + if iteration_id is not None: + return False + return True + + +# def should_save_output_variables_for_draft(invoke_from: InvokeFrom, node_exec: WorkflowNodeExecution) -> bool: +# # Only save output variables for debugging execution of workflow. +# if invoke_from != InvokeFrom.DEBUGGER: +# return False +# exec_metadata = node_exec.execution_metadata_dict +# if exec_metadata is None: +# # No execution metadata, assume the node is not in loop or iteration. +# return True +# +# # Currently we do not save output variables for nodes inside loop or iteration. +# loop_id = exec_metadata.get(NodeRunMetadataKey.LOOP_ID) +# if loop_id is not None: +# return False +# iteration_id = exec_metadata.get(NodeRunMetadataKey.ITERATION_ID) +# if iteration_id is not None: +# return False +# return True +# + + +class _DraftVariableBuilder: + _app_id: str + _draft_vars: list[WorkflowDraftVariable] + + def __init__(self, app_id: str): + self._app_id = app_id + self._draft_vars: list[WorkflowDraftVariable] = [] + + def _build_from_variable_assigner_mapping(self, node_id: str, output: Mapping[str, Any]): + updated_variables = output.get("updated_variables", []) + for item in updated_variables: + selector = item.get("selector") + if selector is None: + continue + if len(selector) < MIN_SELECTORS_LENGTH: + raise Exception("selector too short") + # NOTE(QuantumGhost): only the following two kinds of variable could be updated by + # VariableAssigner: ConversationVariable and iteration variable. + # We only save conversation variable here. + if selector[0] != CONVERSATION_VARIABLE_NODE_ID: + continue + name = item.get("name") + if name is None: + continue + new_value = item["new_value"] + value_type = item.get("type") + if value_type is None: + continue + var_seg = variable_factory.build_segment(new_value) + if var_seg.value_type != value_type: + raise Exception("value_type mismatch!") + self._draft_vars.append( + WorkflowDraftVariable.new_conversation_variable( + app_id=self._app_id, + name=name, + value=var_seg, + ) + ) + + def _build_variables_from_start_mapping( + self, + node_id: str, + output: Mapping[str, Any], + ): + original_node_id = node_id + for name, value in output.items(): + value_seg = variable_factory.build_segment(value) + if is_dummy_output_variable(name): + self._draft_vars.append( + WorkflowDraftVariable.new_node_variable( + app_id=self._app_id, + node_id=original_node_id, + name=name, + value=value_seg, + visible=False, + editable=False, + ) + ) + else: + self._draft_vars.append( + WorkflowDraftVariable.new_sys_variable( + app_id=self._app_id, + name=name, + value=value_seg, + editable=self._should_variable_be_editable(node_id, name), + ) + ) + + @staticmethod + def _normalize_variable_for_start_node(node_type: NodeType, node_id: str, name: str): + if node_type != NodeType.START: + return node_id, name + + # TODO(QuantumGhost): need special handling for dummy output variable in + # `Start` node. + if not name.startswith(f"{SYSTEM_VARIABLE_NODE_ID}."): + return node_id, name + _logger.debug( + "Normalizing variable: node_type=%s, node_id=%s, name=%s", + node_type, + node_id, + name, + ) + node_id, name_ = name.split(".", maxsplit=1) + return node_id, name_ + + def _build_variables_from_mapping( + self, + node_id: str, + node_type: NodeType, + output: Mapping[str, Any], + ): + for name, value in output.items(): + value_seg = variable_factory.build_segment(value) + self._draft_vars.append( + WorkflowDraftVariable.new_node_variable( + app_id=self._app_id, + node_id=node_id, + name=name, + value=value_seg, + visible=self._should_variable_be_visible(node_type, node_id, name), + ) + ) + + def build( + self, + node_id: str, + node_type: NodeType, + output: Mapping[str, Any], + ): + if node_type == NodeType.VARIABLE_ASSIGNER: + self._build_from_variable_assigner_mapping(node_id, output) + elif node_type == NodeType.START: + self._build_variables_from_start_mapping(node_id, output) + else: + self._build_variables_from_mapping(node_id, node_type, output) + + def get_variables(self) -> Sequence[WorkflowDraftVariable]: + return self._draft_vars + + @staticmethod + def _should_variable_be_editable(node_id: str, name: str) -> bool: + if node_id in (CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID): + return False + if node_id == SYSTEM_VARIABLE_NODE_ID and not is_system_variable_editable(name): + return False + return True + + @staticmethod + def _should_variable_be_visible(node_type: NodeType, node_id: str, name: str) -> bool: + if node_type in (NodeType.IF_ELSE, NodeType.START): + return False + if node_id == SYSTEM_VARIABLE_NODE_ID and not is_system_variable_editable(name): + return False + return True + + # @staticmethod + # def _normalize_variable(node_type: NodeType, node_id: str, name: str) -> tuple[str, str]: + # if node_type != NodeType.START: + # return node_id, name + # + # # TODO(QuantumGhost): need special handling for dummy output variable in + # # `Start` node. + # if not name.startswith(f"{SYSTEM_VARIABLE_NODE_ID}."): + # return node_id, name + # logging.getLogger(__name__).info( + # "Normalizing variable: node_type=%s, node_id=%s, name=%s", + # node_type, + # node_id, + # name, + # ) + # node_id, name_ = name.split(".", maxsplit=1) + # return node_id, name_ diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py index 2f7defa2ad..82a215108b 100644 --- a/api/services/workflow_service.py +++ b/api/services/workflow_service.py @@ -1,7 +1,9 @@ import json +import logging import time from collections.abc import Callable, Generator, Sequence from datetime import UTC, datetime +from inspect import isgenerator from typing import Any, Optional from uuid import uuid4 @@ -10,9 +12,10 @@ from sqlalchemy.orm import Session from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager +from core.app.entities.app_invoke_entities import InvokeFrom from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository from core.variables import Variable -from core.workflow.entities.node_entities import NodeRunResult +from core.workflow.entities.node_entities import NodeRunMetadataKey, NodeRunResult from core.workflow.entities.node_execution_entities import NodeExecution, NodeExecutionStatus from core.workflow.errors import WorkflowNodeRunFailedError from core.workflow.graph_engine.entities.event import InNodeEvent @@ -35,10 +38,11 @@ from models.workflow import ( WorkflowNodeExecutionTriggeredFrom, WorkflowType, ) -from services.errors.app import WorkflowHashNotEqualError +from services.errors.app import IsDraftWorkflowError, WorkflowHashNotEqualError from services.workflow.workflow_converter import WorkflowConverter from .errors.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError +from .workflow_draft_variable_service import WorkflowDraftVariableService, should_save_output_variables_for_draft class WorkflowService: @@ -46,6 +50,33 @@ class WorkflowService: Workflow Service """ + def get_node_last_run(self, app_model: App, workflow: Workflow, node_id: str) -> WorkflowNodeExecution | None: + # TODO(QuantumGhost): This query is not fully covered by index. + criteria = ( + WorkflowNodeExecution.tenant_id == app_model.tenant_id, + WorkflowNodeExecution.app_id == app_model.id, + WorkflowNodeExecution.workflow_id == workflow.id, + WorkflowNodeExecution.node_id == node_id, + ) + node_exec = ( + db.session.query(WorkflowNodeExecution) + .filter(*criteria) + .order_by(WorkflowNodeExecution.created_at.desc()) + .first() + ) + return node_exec + + def is_workflow_exist(self, app_model: App) -> bool: + return ( + db.session.query(Workflow) + .filter( + Workflow.tenant_id == app_model.tenant_id, + Workflow.app_id == app_model.id, + Workflow.version == Workflow.VERSION_DRAFT, + ) + .count() + ) > 0 + def get_draft_workflow(self, app_model: App) -> Optional[Workflow]: """ Get draft workflow @@ -62,6 +93,21 @@ class WorkflowService: # return draft workflow return workflow + def get_published_workflow_by_id(self, app_model: App, workflow_id: str) -> Optional[Workflow]: + # fetch published workflow by workflow_id + workflow = ( + db.session.query(Workflow) + .filter( + Workflow.tenant_id == app_model.tenant_id, + Workflow.app_id == app_model.id, + Workflow.id == workflow_id, + ) + .first() + ) + if workflow.version == Workflow.VERSION_DRAFT: + raise IsDraftWorkflowError(f"Workflow is draft version, id={workflow_id}") + return workflow + def get_published_workflow(self, app_model: App) -> Optional[Workflow]: """ Get published workflow @@ -200,7 +246,7 @@ class WorkflowService: tenant_id=app_model.tenant_id, app_id=app_model.id, type=draft_workflow.type, - version=str(datetime.now(UTC).replace(tzinfo=None)), + version=Workflow.version_from_datetime(datetime.now(UTC).replace(tzinfo=None)), graph=draft_workflow.graph, created_by=account.id, environment_variables=draft_workflow.environment_variables, @@ -263,8 +309,17 @@ class WorkflowService: if not draft_workflow: raise ValueError("Workflow not initialized") + # conv_vars = common_helpers.get_conversation_variables() + # run draft workflow node start_at = time.perf_counter() + with Session(bind=db.engine) as session: + # TODO(QunatumGhost): inject conversation variables + # to variable pool. + draft_var_srv = WorkflowDraftVariableService(session) + + conv_vars_list = draft_var_srv.list_conversation_variables(app_id=app_model.id) + conv_var_mapping = {v.name: v.get_value().value for v in conv_vars_list.variables} node_execution = self._handle_node_run_result( invoke_node_fn=lambda: WorkflowEntry.single_step_run( @@ -272,6 +327,7 @@ class WorkflowService: node_id=node_id, user_inputs=user_inputs, user_id=account.id, + conversation_variables=conv_var_mapping, ), start_at=start_at, node_id=node_id, @@ -291,6 +347,27 @@ class WorkflowService: # Convert node_execution to WorkflowNodeExecution after save workflow_node_execution = repository.to_db_model(node_execution) + output = workflow_node_execution.outputs_dict or {} + + exec_metadata = workflow_node_execution.execution_metadata_dict or {} + + should_save = should_save_output_variables_for_draft( + invoke_from=InvokeFrom.DEBUGGER, + loop_id=exec_metadata.get(NodeRunMetadataKey.LOOP_ID, None), + iteration_id=exec_metadata.get(NodeRunMetadataKey.ITERATION_ID, None), + ) + if not should_save: + return workflow_node_execution + # TODO(QuantumGhost): single step does not include loop_id or iteration_id in execution_metadata. + with Session(bind=db.engine) as session: + draft_var_srv = WorkflowDraftVariableService(session) + draft_var_srv.save_output_variables( + app_id=app_model.id, + node_id=workflow_node_execution.node_id, + node_type=NodeType(workflow_node_execution.node_type), + output=output, + ) + session.commit() return workflow_node_execution @@ -325,6 +402,7 @@ class WorkflowService: ) -> NodeExecution: try: node_instance, generator = invoke_node_fn() + generator = _inspect_generator(generator) node_run_result: NodeRunResult | None = None for event in generator: @@ -531,3 +609,19 @@ class WorkflowService: session.delete(workflow) return True + + +def _inspect_generator(gen: Generator[Any] | Any) -> Any: + if not isgenerator(gen): + return gen + + def wrapper(): + for item in gen: + logging.getLogger(__name__).info( + "received generator item, type=%s, value=%s", + type(item), + item, + ) + yield item + + return wrapper() diff --git a/api/tests/integration_tests/conftest.py b/api/tests/integration_tests/conftest.py index 6e3ab4b74b..477a91440c 100644 --- a/api/tests/integration_tests/conftest.py +++ b/api/tests/integration_tests/conftest.py @@ -1,4 +1,16 @@ import os +import random +import secrets +from collections.abc import Generator + +import pytest +from flask import Flask +from flask.testing import FlaskClient +from sqlalchemy.orm import Session + +from app_factory import create_app +from models import Account, DifySetup, Tenant, TenantAccountJoin, db +from services.account_service import AccountService, RegisterService # Getting the absolute path of the current file's directory ABS_PATH = os.path.dirname(os.path.abspath(__file__)) @@ -17,3 +29,61 @@ def _load_env() -> None: _load_env() + +_CACHED_APP = create_app() + + +@pytest.fixture +def flask_app() -> Flask: + return _CACHED_APP + + +@pytest.fixture(scope="session") +def setup_account(request) -> Generator[Account, None, None]: + """`dify_setup` completes the setup process for the Dify application. + + It creates `Account` and `Tenant`, and inserts a `DifySetup` record into the database. + + Most tests in the `controllers` package may require dify has been successfully setup. + """ + with _CACHED_APP.test_request_context(): + rand_suffix = random.randint(int(1e6), int(1e7)) + name = f"test-user-{rand_suffix}" + email = f"{name}@example.com" + RegisterService.setup( + email=email, + name=name, + password=secrets.token_hex(16), + ip_address="localhost", + ) + + with _CACHED_APP.test_request_context(): + with Session(bind=db.engine, expire_on_commit=False) as session: + account = session.query(Account).filter_by(email=email).one() + + yield account + + with _CACHED_APP.test_request_context(): + db.session.query(DifySetup).delete() + db.session.query(TenantAccountJoin).delete() + db.session.query(Account).delete() + db.session.query(Tenant).delete() + db.session.commit() + + +@pytest.fixture +def flask_req_ctx(): + with _CACHED_APP.test_request_context(): + yield + + +@pytest.fixture +def auth_header(setup_account) -> dict[str, str]: + token = AccountService.get_account_jwt_token(setup_account) + return {"Authorization": f"Bearer {token}"} + + +@pytest.fixture +def test_client() -> Generator[FlaskClient, None, None]: + with _CACHED_APP.test_client() as client: + yield client diff --git a/api/tests/integration_tests/controllers/console/__init__.py b/api/tests/integration_tests/controllers/console/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/controllers/console/app/__init__.py b/api/tests/integration_tests/controllers/console/app/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/controllers/console/app/test_workflow_draft_variable.py b/api/tests/integration_tests/controllers/console/app/test_workflow_draft_variable.py new file mode 100644 index 0000000000..1dcfa5a560 --- /dev/null +++ b/api/tests/integration_tests/controllers/console/app/test_workflow_draft_variable.py @@ -0,0 +1,46 @@ +import uuid +from unittest import mock + +from controllers.console.app import workflow_draft_variable as draft_variable_api +from controllers.console.app import wraps +from factories.variable_factory import build_segment +from models import App, AppMode +from models.workflow import WorkflowDraftVariable +from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService + + +def _get_mock_srv_class() -> type[WorkflowDraftVariableService]: + return mock.create_autospec(WorkflowDraftVariableService) + + +class TestWorkflowDraftNodeVariableListApi: + def test_get(self, test_client, auth_header, monkeypatch): + srv_class = _get_mock_srv_class() + mock_app_model: App = App() + mock_app_model.id = str(uuid.uuid4()) + test_node_id = "test_node_id" + mock_app_model.mode = AppMode.ADVANCED_CHAT + mock_load_app_model = mock.Mock(return_value=mock_app_model) + + monkeypatch.setattr(draft_variable_api, "WorkflowDraftVariableService", srv_class) + monkeypatch.setattr(wraps, "_load_app_model", mock_load_app_model) + + var1 = WorkflowDraftVariable.create_node_variable( + app_id="test_app_1", + node_id="test_node_1", + name="str_var", + value=build_segment("str_value"), + ) + srv_instance = mock.create_autospec(WorkflowDraftVariableService, instance=True) + srv_class.return_value = srv_instance + srv_instance.list_node_variables.return_value = WorkflowDraftVariableList(variables=[var1]) + + response = test_client.get( + f"/console/api/apps/{mock_app_model.id}/workflows/draft/nodes/{test_node_id}/variables", + headers=auth_header, + ) + assert response.status_code == 200 + response_dict = response.json + assert isinstance(response_dict, dict) + assert "items" in response_dict + assert len(response_dict["items"]) == 1 diff --git a/api/tests/integration_tests/services/__init__.py b/api/tests/integration_tests/services/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/tests/integration_tests/services/test_workflow_draft_variable_service.py b/api/tests/integration_tests/services/test_workflow_draft_variable_service.py new file mode 100644 index 0000000000..fbe7826b3a --- /dev/null +++ b/api/tests/integration_tests/services/test_workflow_draft_variable_service.py @@ -0,0 +1,142 @@ +import unittest +import uuid + +import pytest +from sqlalchemy.orm import Session + +from factories.variable_factory import build_segment +from models import db +from models.workflow import WorkflowDraftVariable +from services.workflow_draft_variable_service import WorkflowDraftVariableService + + +@pytest.mark.usefixtures("flask_req_ctx") +class TestWorkflowDraftVariableService(unittest.TestCase): + _test_app_id: str + _session: Session + _node2_id = "test_node_2" + + def setUp(self): + self._test_app_id = str(uuid.uuid4()) + self._session: Session = db.session + sys_var = WorkflowDraftVariable.create_sys_variable( + app_id=self._test_app_id, + name="sys_var", + value=build_segment("sys_value"), + ) + conv_var = WorkflowDraftVariable.create_conversation_variable( + app_id=self._test_app_id, + name="conv_var", + value=build_segment("conv_value"), + ) + node2_vars = [ + WorkflowDraftVariable.create_node_variable( + app_id=self._test_app_id, + node_id=self._node2_id, + name="int_var", + value=build_segment(1), + visible=False, + ), + WorkflowDraftVariable.create_node_variable( + app_id=self._test_app_id, + node_id=self._node2_id, + name="str_var", + value=build_segment("str_value"), + visible=True, + ), + ] + node1_var = WorkflowDraftVariable.create_node_variable( + app_id=self._test_app_id, + node_id="node_1", + name="str_var", + value=build_segment("str_value"), + visible=True, + ) + _variables = list(node2_vars) + _variables.extend( + [ + node1_var, + sys_var, + conv_var, + ] + ) + + db.session.add_all(_variables) + db.session.flush() + self._variable_ids = [v.id for v in _variables] + self._node1_str_var_id = node1_var.id + self._sys_var_id = sys_var.id + self._conv_var_id = conv_var.id + self._node2_var_ids = [v.id for v in node2_vars] + + def _get_test_srv(self) -> WorkflowDraftVariableService: + return WorkflowDraftVariableService(session=self._session) + + def tearDown(self): + self._session.rollback() + + def test_list_variables(self): + srv = self._get_test_srv() + var_list = srv.list_variables_without_values(self._test_app_id, page=1, limit=2) + assert var_list.total == 5 + assert len(var_list.variables) == 2 + page1_var_ids = {v.id for v in var_list.variables} + assert page1_var_ids.issubset(self._variable_ids) + + var_list_2 = srv.list_variables_without_values(self._test_app_id, page=2, limit=2) + assert var_list_2.total is None + assert len(var_list_2.variables) == 2 + page2_var_ids = {v.id for v in var_list_2.variables} + assert page2_var_ids.isdisjoint(page1_var_ids) + assert page2_var_ids.issubset(self._variable_ids) + + def test_get_node_variable(self): + srv = self._get_test_srv() + node_var = srv.get_node_variable(self._test_app_id, "node_1", "str_var") + assert node_var.id == self._node1_str_var_id + assert node_var.name == "str_var" + assert node_var.get_value() == build_segment("str_value") + + def test_get_system_variable(self): + srv = self._get_test_srv() + sys_var = srv.get_system_variable(self._test_app_id, "sys_var") + assert sys_var.id == self._sys_var_id + assert sys_var.name == "sys_var" + assert sys_var.get_value() == build_segment("sys_value") + + def test_get_conversation_variable(self): + srv = self._get_test_srv() + conv_var = srv.get_conversation_variable(self._test_app_id, "conv_var") + assert conv_var.id == self._conv_var_id + assert conv_var.name == "conv_var" + assert conv_var.get_value() == build_segment("conv_value") + + def test_delete_node_variables(self): + srv = self._get_test_srv() + srv.delete_node_variables(self._test_app_id, self._node2_id) + node2_var_count = ( + self._session.query(WorkflowDraftVariable) + .where( + WorkflowDraftVariable.app_id == self._test_app_id, + WorkflowDraftVariable.node_id == self._node2_id, + ) + .count() + ) + assert node2_var_count == 0 + + def test_delete_variable(self): + srv = self._get_test_srv() + node_1_var = ( + self._session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id == self._node1_str_var_id).one() + ) + srv.delete_variable(node_1_var) + exists = bool( + self._session.query(WorkflowDraftVariable).where(WorkflowDraftVariable.id == self._node1_str_var_id).first() + ) + assert exists is False + + def test__list_node_variables(self): + srv = self._get_test_srv() + node_vars = srv._list_node_variables(self._test_app_id, self._node2_id) + assert len(node_vars) == 2 + assert {v.id for v in node_vars} == set(self._node2_var_ids) diff --git a/api/tests/unit_tests/core/app/segments/test_factory.py b/api/tests/unit_tests/core/app/segments/test_factory.py index e6e289c12a..68fc85aa17 100644 --- a/api/tests/unit_tests/core/app/segments/test_factory.py +++ b/api/tests/unit_tests/core/app/segments/test_factory.py @@ -1,7 +1,11 @@ +from dataclasses import dataclass from uuid import uuid4 import pytest +from hypothesis import given +from hypothesis import strategies as st +from core.file import File, FileTransferMethod, FileType from core.variables import ( ArrayNumberVariable, ArrayObjectVariable, @@ -10,6 +14,7 @@ from core.variables import ( IntegerVariable, ObjectSegment, SecretVariable, + SegmentType, StringVariable, ) from core.variables.exc import VariableError @@ -163,3 +168,103 @@ def test_array_none_variable(): var = variable_factory.build_segment([None, None, None, None]) assert isinstance(var, ArrayAnySegment) assert var.value == [None, None, None, None] + + +@st.composite +def _generate_file(draw) -> File: + file_id = draw(st.text(min_size=1, max_size=10)) + tenant_id = draw(st.text(min_size=1, max_size=10)) + file_type, mime_type, extension = draw( + st.sampled_from( + [ + (FileType.IMAGE, "image/png", ".png"), + (FileType.VIDEO, "video/mp4", ".mp4"), + (FileType.DOCUMENT, "text/plain", ".txt"), + (FileType.AUDIO, "audio/mpeg", ".mp3"), + ] + ) + ) + filename = "test-file" + size = draw(st.integers(min_value=0, max_value=1024 * 1024)) + + transfer_method = draw(st.sampled_from(list(FileTransferMethod))) + if transfer_method == FileTransferMethod.REMOTE_URL: + url = "https://test.example.com/test-file" + file = File( + id="test_file_id", + tenant_id="test_tenant_id", + type=file_type, + transfer_method=transfer_method, + remote_url=url, + related_id=None, + filename=filename, + extension=extension, + mime_type=mime_type, + size=size, + ) + else: + relation_id = draw(st.uuids(version=4)) + + file = File( + id="test_file_id", + tenant_id="test_tenant_id", + type=file_type, + transfer_method=transfer_method, + related_id=str(relation_id), + filename=filename, + extension=extension, + mime_type=mime_type, + size=size, + ) + return file + + +def _scalar_value() -> st.SearchStrategy[int | float | str | File]: + return st.one_of( + st.none(), + st.integers(), + st.floats(), + st.text(), + _generate_file(), + ) + + +@given(_scalar_value()) +def test_build_segment_and_extract_values_for_scalar_types(value): + seg = variable_factory.build_segment(value) + assert seg.value == value + + +@given(st.lists(_scalar_value())) +def test_build_segment_and_extract_values_for_array_types(values): + seg = variable_factory.build_segment(values) + assert seg.value == values + + +def test_build_segment_type_for_scalar(): + @dataclass(frozen=True) + class TestCase: + value: int | float | str | File + expected_type: SegmentType + + file = File( + id="test_file_id", + tenant_id="test_tenant_id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.REMOTE_URL, + remote_url="https://test.example.com/test-file.png", + filename="test-file", + extension=".png", + mime_type="image/png", + size=1000, + ) + cases = [ + TestCase(0, SegmentType.NUMBER), + TestCase(0.0, SegmentType.NUMBER), + TestCase("", SegmentType.STRING), + TestCase(file, SegmentType.FILE), + ] + + for idx, c in enumerate(cases, 1): + segment = variable_factory.build_segment(c.value) + assert segment.value_type == c.expected_type, f"test case {idx} failed." diff --git a/api/tests/unit_tests/core/file/test_models.py b/api/tests/unit_tests/core/file/test_models.py new file mode 100644 index 0000000000..3ada2087c6 --- /dev/null +++ b/api/tests/unit_tests/core/file/test_models.py @@ -0,0 +1,25 @@ +from core.file import File, FileTransferMethod, FileType + + +def test_file(): + file = File( + id="test-file", + tenant_id="test-tenant-id", + type=FileType.IMAGE, + transfer_method=FileTransferMethod.TOOL_FILE, + related_id="test-related-id", + filename="image.png", + extension=".png", + mime_type="image/png", + size=67, + storage_key="test-storage-key", + url="https://example.com/image.png", + ) + assert file.tenant_id == "test-tenant-id" + assert file.type == FileType.IMAGE + assert file.transfer_method == FileTransferMethod.TOOL_FILE + assert file.related_id == "test-related-id" + assert file.filename == "image.png" + assert file.extension == ".png" + assert file.mime_type == "image/png" + assert file.size == 67 diff --git a/api/tests/unit_tests/models/__init__.py b/api/tests/unit_tests/models/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/api/uv.lock b/api/uv.lock index 3a877762d3..9bf95fb2dd 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -540,6 +540,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/65/77/8bbca82f70b062181cf0ae53fd43f1ac6556f3078884bfef9da2269c06a3/boto3-1.35.99-py3-none-any.whl", hash = "sha256:83e560faaec38a956dfb3d62e05e1703ee50432b45b788c09e25107c5058bd71", size = 139178, upload-time = "2025-01-14T20:20:25.48Z" }, ] +[[package]] +name = "boto3-stubs" +version = "1.38.20" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore-stubs" }, + { name = "types-s3transfer" }, + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/89/824fb0a9bebf9f1d6df70bb145f8e9c24fc4d918d4050b5d4dca075cc292/boto3_stubs-1.38.20.tar.gz", hash = "sha256:7f1d7bfff7355eb4d17e7984fbf27f44709cd8484abb54bd6ba34ec73a552605", size = 99063, upload-time = "2025-05-20T23:30:19.84Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/57/69/cfc45dfce3b4ea417f9aec708ade1eda7f280fe8ae7feca796b036619587/boto3_stubs-1.38.20-py3-none-any.whl", hash = "sha256:5406da868980a3854cc9b57db150c6f2e39a4fe4a58f2872e61ac5a3d46f734e", size = 68667, upload-time = "2025-05-20T23:30:12.393Z" }, +] + +[package.optional-dependencies] +bedrock-runtime = [ + { name = "mypy-boto3-bedrock-runtime" }, +] + [[package]] name = "botocore" version = "1.35.99" @@ -554,6 +573,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fc/dd/d87e2a145fad9e08d0ec6edcf9d71f838ccc7acdd919acc4c0d4a93515f8/botocore-1.35.99-py3-none-any.whl", hash = "sha256:b22d27b6b617fc2d7342090d6129000af2efd20174215948c0d7ae2da0fab445", size = 13293216, upload-time = "2025-01-14T20:20:06.427Z" }, ] +[[package]] +name = "botocore-stubs" +version = "1.38.19" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-awscrt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/70/6204c97f8d8362364f11c16085566abdcaa114c264d3a4d709ff697b203b/botocore_stubs-1.38.19.tar.gz", hash = "sha256:84f67a42bb240a8ea0c5fe4f05d497cc411177db600bc7012182e499ac24bf19", size = 42269, upload-time = "2025-05-19T20:18:13.556Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/ce/28b143452c22b678678d832bf8b41218e3d319bf94062b48c28fe5d81163/botocore_stubs-1.38.19-py3-none-any.whl", hash = "sha256:66fd7d231c21134a12acbe313ef7a6b152cbf9bfd7bfa12a62f8c33e94737e26", size = 65603, upload-time = "2025-05-19T20:18:10.445Z" }, +] + [[package]] name = "bottleneck" version = "1.4.2" @@ -1260,9 +1291,11 @@ dependencies = [ [package.dev-dependencies] dev = [ + { name = "boto3-stubs" }, { name = "coverage" }, { name = "dotenv-linter" }, { name = "faker" }, + { name = "hypothesis" }, { name = "lxml-stubs" }, { name = "mypy" }, { name = "pytest" }, @@ -1399,7 +1432,7 @@ requires-dist = [ { name = "opentelemetry-sdk", specifier = "==1.27.0" }, { name = "opentelemetry-semantic-conventions", specifier = "==0.48b0" }, { name = "opentelemetry-util-http", specifier = "==0.48b0" }, - { name = "opik", specifier = "~=1.3.4" }, + { name = "opik", specifier = "~=1.7.25" }, { name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=2.2.2" }, { name = "pandas-stubs", specifier = "~=2.2.3.241009" }, { name = "pandoc", specifier = "~=2.4" }, @@ -1430,9 +1463,11 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ + { name = "boto3-stubs", specifier = ">=1.38.20" }, { name = "coverage", specifier = "~=7.2.4" }, { name = "dotenv-linter", specifier = "~=0.5.0" }, { name = "faker", specifier = "~=32.1.0" }, + { name = "hypothesis", specifier = ">=6.131.15" }, { name = "lxml-stubs", specifier = "~=0.5.1" }, { name = "mypy", specifier = "~=1.15.0" }, { name = "pytest", specifier = "~=8.3.2" }, @@ -2529,6 +2564,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, ] +[[package]] +name = "hypothesis" +version = "6.131.15" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "sortedcontainers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/6f/1e291f80627f3e043b19a86f9f6b172b910e3575577917d3122a6558410d/hypothesis-6.131.15.tar.gz", hash = "sha256:11849998ae5eecc8c586c6c98e47677fcc02d97475065f62768cfffbcc15ef7a", size = 436596, upload_time = "2025-05-07T23:04:25.127Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/c7/78597bcec48e1585ea9029deb2bf2341516e90dd615a3db498413d68a4cc/hypothesis-6.131.15-py3-none-any.whl", hash = "sha256:e02e67e9f3cfd4cd4a67ccc03bf7431beccc1a084c5e90029799ddd36ce006d7", size = 501128, upload_time = "2025-05-07T23:04:22.045Z" }, +] + [[package]] name = "idna" version = "3.10" @@ -3201,6 +3249,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777, upload-time = "2025-02-05T03:50:08.348Z" }, ] +[[package]] +name = "mypy-boto3-bedrock-runtime" +version = "1.38.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/55/56ce6f23d7fb98ce5b8a4261f089890bc94250666ea7089539dab55f6c25/mypy_boto3_bedrock_runtime-1.38.4.tar.gz", hash = "sha256:315a5f84c014c54e5406fdb80b030aba5cc79eb27982aff3d09ef331fb2cdd4d", size = 26169, upload-time = "2025-04-28T19:26:13.437Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/eb/3015c6504540ca4888789ee14f47590c0340b748a33b059eeb6a48b406bb/mypy_boto3_bedrock_runtime-1.38.4-py3-none-any.whl", hash = "sha256:af14320532e9b798095129a3307f4b186ba80258917bb31410cda7f423592d72", size = 31858, upload-time = "2025-04-28T19:26:09.667Z" }, +] + [[package]] name = "mypy-extensions" version = "1.1.0" @@ -3692,11 +3752,13 @@ wheels = [ [[package]] name = "opik" -version = "1.3.6" +version = "1.7.25" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "boto3-stubs", extra = ["bedrock-runtime"] }, { name = "click" }, { name = "httpx" }, + { name = "jinja2" }, { name = "levenshtein" }, { name = "litellm" }, { name = "openai" }, @@ -3709,9 +3771,9 @@ dependencies = [ { name = "tqdm" }, { name = "uuid6" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/16/b37208d6a77f3cc74750cff4e0970e6f596aef0f491a675a40aa879157e6/opik-1.3.6.tar.gz", hash = "sha256:25d6fa8b7aa1ef23d10d598040e539440912c12b765eabfc75c8780bbbfc8ad3", size = 177174, upload-time = "2025-01-15T17:20:48.71Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5c/dd/313895410761ee3eb36c1141fa339254c093b3cdfceb79b111c80eb396be/opik-1.7.25.tar.gz", hash = "sha256:5fcdb05bbc98e995f3eea2f94096f98c5ff7a2aca2c895d50636c44d00a07d4b", size = 286950, upload-time = "2025-05-20T13:51:16.6Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/3f/e9d14a97f85d34505770b7c7715bd72bbfc40a778163818f0d3e871264bb/opik-1.3.6-py3-none-any.whl", hash = "sha256:888973c2a1276d68c9b3cf26d8078db8aa675d2c907edda328cdab4995a8e29b", size = 341630, upload-time = "2025-01-15T17:20:45.983Z" }, + { url = "https://files.pythonhosted.org/packages/63/0a/daee58db3cdd56681672dbc62e5a71200af6d41f34bac2425d1556d3e004/opik-1.7.25-py3-none-any.whl", hash = "sha256:595fc2e6794e35d87449f64dc5d6092705645575d2c34469d04dc2bbe44dd32f", size = 547198, upload-time = "2025-05-20T13:51:14.964Z" }, ] [[package]] @@ -5203,6 +5265,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/37/c3/6eeb6034408dac0fa653d126c9204ade96b819c936e136c5e8a6897eee9c/socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3", size = 12763, upload-time = "2020-04-17T15:50:31.878Z" }, ] +[[package]] +name = "sortedcontainers" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload_time = "2021-05-16T22:03:42.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload_time = "2021-05-16T22:03:41.177Z" }, +] + [[package]] name = "soupsieve" version = "2.7" @@ -5557,6 +5628,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/18/1016ffd4c7775f24371f6a0309483dc5597e8245b5add67924e54ea3b83a/types_aiofiles-24.1.0.20250326-py3-none-any.whl", hash = "sha256:dfb58c9aa18bd449e80fb5d7f49dc3dd20d31de920a46223a61798ee4a521a70", size = 14344, upload-time = "2025-03-26T02:53:31.856Z" }, ] +[[package]] +name = "types-awscrt" +version = "0.27.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/36/6c/583522cfb3c330e92e726af517a91c13247e555e021791a60f1b03c6ff16/types_awscrt-0.27.2.tar.gz", hash = "sha256:acd04f57119eb15626ab0ba9157fc24672421de56e7bd7b9f61681fedee44e91", size = 16304, upload-time = "2025-05-16T03:10:08.712Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/82/1ee2e5c9d28deac086ab3a6ff07c8bc393ef013a083f546c623699881715/types_awscrt-0.27.2-py3-none-any.whl", hash = "sha256:49a045f25bbd5ad2865f314512afced933aed35ddbafc252e2268efa8a787e4e", size = 37761, upload-time = "2025-05-16T03:10:07.466Z" }, +] + [[package]] name = "types-beautifulsoup4" version = "4.12.0.20250204" @@ -5854,6 +5934,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/b1/f4ba392a3341cd9d613f2dce855e82471073c5ec34996fe84ac3857956d0/types_requests_oauthlib-2.0.0.20250306-py3-none-any.whl", hash = "sha256:37707de81d9ce54894afcccd70d4a845dbe4c59e747908faaeba59a96453d993", size = 14446, upload-time = "2025-03-06T02:49:24.364Z" }, ] +[[package]] +name = "types-s3transfer" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/d5/830e9efe91a26601a2bebde6f299239d2d26e542f5d4b3bc7e8c23c81a3f/types_s3transfer-0.12.0.tar.gz", hash = "sha256:f8f59201481e904362873bf0be3267f259d60ad946ebdfcb847d092a1fa26f98", size = 14096, upload-time = "2025-04-23T00:38:19.131Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/43/6097275152463ac9bacf1e00aab30bc6682bf45f6a031be8bf029c030ba2/types_s3transfer-0.12.0-py3-none-any.whl", hash = "sha256:101bbc5b7f00b71512374df881f480fc6bf63c948b5098ab024bf3370fbfb0e8", size = 19553, upload-time = "2025-04-23T00:38:17.865Z" }, +] + [[package]] name = "types-shapely" version = "2.0.0.20250404" diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index ceb32e4aba..cff5efdb6c 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -444,6 +444,7 @@ services: OB_SYS_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} OB_TENANT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai} + OB_SERVER_IP: 127.0.0.1 MODE: MINI ports: - "${OCEANBASE_VECTOR_PORT:-2881}:2881" diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 371646eb9f..8fb3addf88 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -942,6 +942,7 @@ services: OB_SYS_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} OB_TENANT_PASSWORD: ${OCEANBASE_VECTOR_PASSWORD:-difyai123456} OB_CLUSTER_NAME: ${OCEANBASE_CLUSTER_NAME:-difyai} + OB_SERVER_IP: 127.0.0.1 MODE: MINI ports: - "${OCEANBASE_VECTOR_PORT:-2881}:2881" diff --git a/web/app/components/app/create-app-modal/index.tsx b/web/app/components/app/create-app-modal/index.tsx index 6e05bb0209..46cb495801 100644 --- a/web/app/components/app/create-app-modal/index.tsx +++ b/web/app/components/app/create-app-modal/index.tsx @@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next' import { useRouter, useSearchParams } from 'next/navigation' import { useContext, useContextSelector } from 'use-context-selector' -import { RiArrowRightLine, RiCommandLine, RiCornerDownLeftLine, RiExchange2Fill } from '@remixicon/react' +import { RiArrowRightLine, RiArrowRightSLine, RiCommandLine, RiCornerDownLeftLine, RiExchange2Fill } from '@remixicon/react' import Link from 'next/link' import { useDebounceFn, useKeyPress } from 'ahooks' import Image from 'next/image' @@ -43,11 +43,12 @@ function CreateApp({ onClose, onSuccess, onCreateFromTemplate }: CreateAppProps) const { notify } = useContext(ToastContext) const mutateApps = useContextSelector(AppsContext, state => state.mutateApps) - const [appMode, setAppMode] = useState('chat') + const [appMode, setAppMode] = useState('advanced-chat') const [appIcon, setAppIcon] = useState({ type: 'emoji', icon: '🤖', background: '#FFEAD5' }) const [showAppIconPicker, setShowAppIconPicker] = useState(false) const [name, setName] = useState('') const [description, setDescription] = useState('') + const [isAppTypeExpanded, setIsAppTypeExpanded] = useState(false) const { plan, enableBilling } = useProviderContext() const isAppsFull = (enableBilling && plan.usage.buildApps >= plan.total.buildApps) @@ -116,57 +117,7 @@ function CreateApp({ onClose, onSuccess, onCreateFromTemplate }: CreateAppProps)
-
- {t('app.newApp.forBeginners')} -
- - -
} - onClick={() => { - setAppMode('chat') - }} /> - - -
} - onClick={() => { - setAppMode('agent-chat') - }} /> - - -
} - onClick={() => { - setAppMode('completion') - }} /> - - -
-
- {t('app.newApp.forAdvanced')} -
-
- - -
} - onClick={() => { - setAppMode('advanced-chat') - }} /> { setAppMode('workflow') }} /> + + +
} + onClick={() => { + setAppMode('advanced-chat') + }} /> +
+
+ +
+ {isAppTypeExpanded && ( +
+ + +
} + onClick={() => { + setAppMode('chat') + }} /> + + +
} + onClick={() => { + setAppMode('agent-chat') + }} /> + + + } + onClick={() => { + setAppMode('completion') + }} /> + + )} +
diff --git a/web/app/components/base/markdown.tsx b/web/app/components/base/markdown.tsx index 4600543ac4..b28bf2bc8f 100644 --- a/web/app/components/base/markdown.tsx +++ b/web/app/components/base/markdown.tsx @@ -11,7 +11,7 @@ import { atelierHeathDark, atelierHeathLight, } from 'react-syntax-highlighter/dist/esm/styles/hljs' -import { Component, memo, useMemo, useRef, useState } from 'react' +import { Component, memo, useEffect, useMemo, useRef, useState } from 'react' import { flow } from 'lodash-es' import ActionButton from '@/app/components/base/action-button' import CopyIcon from '@/app/components/base/copy-icon' @@ -74,7 +74,7 @@ const preprocessLaTeX = (content: string) => { processedContent = flow([ (str: string) => str.replace(/\\\[(.*?)\\\]/g, (_, equation) => `$$${equation}$$`), - (str: string) => str.replace(/\\\[(.*?)\\\]/gs, (_, equation) => `$$${equation}$$`), + (str: string) => str.replace(/\\\[([\s\S]*?)\\\]/g, (_, equation) => `$$${equation}$$`), (str: string) => str.replace(/\\\((.*?)\\\)/g, (_, equation) => `$$${equation}$$`), (str: string) => str.replace(/(^|[^\\])\$(.+?)\$/g, (_, prefix, equation) => `${prefix}$${equation}$`), ])(processedContent) @@ -124,23 +124,143 @@ export function PreCode(props: { children: any }) { const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any) => { const { theme } = useTheme() const [isSVG, setIsSVG] = useState(true) + const [chartState, setChartState] = useState<'loading' | 'success' | 'error'>('loading') + const [finalChartOption, setFinalChartOption] = useState(null) + const echartsRef = useRef(null) + const contentRef = useRef('') + const processedRef = useRef(false) // Track if content was successfully processed const match = /language-(\w+)/.exec(className || '') const language = match?.[1] const languageShowName = getCorrectCapitalizationLanguageName(language || '') - const chartData = useMemo(() => { - const str = String(children).replace(/\n$/, '') - if (language === 'echarts') { - try { - return JSON.parse(str) - } - catch { } - try { - // eslint-disable-next-line no-new-func, sonarjs/code-eval - return new Function(`return ${str}`)() - } - catch { } + const isDarkMode = theme === Theme.dark + + // Handle container resize for echarts + useEffect(() => { + if (language !== 'echarts' || !echartsRef.current) return + + const handleResize = () => { + // This gets the echarts instance from the component + const instance = echartsRef.current?.getEchartsInstance?.() + if (instance) + instance.resize() + } + + window.addEventListener('resize', handleResize) + + // Also manually trigger resize after a short delay to ensure proper sizing + const resizeTimer = setTimeout(handleResize, 200) + + return () => { + window.removeEventListener('resize', handleResize) + clearTimeout(resizeTimer) + } + }, [language, echartsRef.current]) + + // Process chart data when content changes + useEffect(() => { + // Only process echarts content + if (language !== 'echarts') return + + // Reset state when new content is detected + if (!contentRef.current) { + setChartState('loading') + processedRef.current = false + } + + const newContent = String(children).replace(/\n$/, '') + + // Skip if content hasn't changed + if (contentRef.current === newContent) return + contentRef.current = newContent + + const trimmedContent = newContent.trim() + if (!trimmedContent) return + + // Detect if this is historical data (already complete) + // Historical data typically comes as a complete code block with complete JSON + const isCompleteJson + = (trimmedContent.startsWith('{') && trimmedContent.endsWith('}') + && trimmedContent.split('{').length === trimmedContent.split('}').length) + || (trimmedContent.startsWith('[') && trimmedContent.endsWith(']') + && trimmedContent.split('[').length === trimmedContent.split(']').length) + + // If the JSON structure looks complete, try to parse it right away + if (isCompleteJson && !processedRef.current) { + try { + const parsed = JSON.parse(trimmedContent) + if (typeof parsed === 'object' && parsed !== null) { + setFinalChartOption(parsed) + setChartState('success') + processedRef.current = true + return + } + } + catch { + try { + // eslint-disable-next-line no-new-func, sonarjs/code-eval + const result = new Function(`return ${trimmedContent}`)() + if (typeof result === 'object' && result !== null) { + setFinalChartOption(result) + setChartState('success') + processedRef.current = true + return + } + } + catch { + // If we have a complete JSON structure but it doesn't parse, + // it's likely an error rather than incomplete data + setChartState('error') + processedRef.current = true + return + } + } + } + + // If we get here, either the JSON isn't complete yet, or we failed to parse it + // Check more conditions for streaming data + const isIncomplete + = trimmedContent.length < 5 + || (trimmedContent.startsWith('{') + && (!trimmedContent.endsWith('}') + || trimmedContent.split('{').length !== trimmedContent.split('}').length)) + || (trimmedContent.startsWith('[') + && (!trimmedContent.endsWith(']') + || trimmedContent.split('[').length !== trimmedContent.split('}').length)) + || (trimmedContent.split('"').length % 2 !== 1) + || (trimmedContent.includes('{"') && !trimmedContent.includes('"}')) + + // Only try to parse streaming data if it looks complete and hasn't been processed + if (!isIncomplete && !processedRef.current) { + let isValidOption = false + + try { + const parsed = JSON.parse(trimmedContent) + if (typeof parsed === 'object' && parsed !== null) { + setFinalChartOption(parsed) + isValidOption = true + } + } + catch { + try { + // eslint-disable-next-line no-new-func, sonarjs/code-eval + const result = new Function(`return ${trimmedContent}`)() + if (typeof result === 'object' && result !== null) { + setFinalChartOption(result) + isValidOption = true + } + } + catch { + // Both parsing methods failed, but content looks complete + setChartState('error') + processedRef.current = true + } + } + + if (isValidOption) { + setChartState('success') + processedRef.current = true + } } - return JSON.parse('{"title":{"text":"ECharts error - Wrong option."}}') }, [language, children]) const renderCodeContent = useMemo(() => { @@ -150,14 +270,125 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any if (isSVG) return break - case 'echarts': + case 'echarts': { + // Loading state: show loading indicator + if (chartState === 'loading') { + return ( +
+
+ {/* Rotating spinner that works in both light and dark modes */} + + + + + +
+
Chart loading...
+
+ ) + } + + // Success state: show the chart + if (chartState === 'success' && finalChartOption) { + return ( +
+ + { + const instance = echartsRef.current?.getEchartsInstance?.() + if (instance) + instance.resize() + }, + }} + /> + +
+ ) + } + + // Error state: show error message + const errorOption = { + title: { + text: 'ECharts error - Wrong option.', + }, + } + return ( -
+
- +
) + } case 'svg': if (isSVG) { return ( @@ -192,7 +423,7 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any ) } - }, [children, language, isSVG, chartData, props, theme, match]) + }, [children, language, isSVG, finalChartOption, props, theme, match]) if (inline || !match) return {children} diff --git a/web/app/components/base/tag-input/index.tsx b/web/app/components/base/tag-input/index.tsx index 2be9c5ffc7..4824b6f62d 100644 --- a/web/app/components/base/tag-input/index.tsx +++ b/web/app/components/base/tag-input/index.tsx @@ -93,7 +93,7 @@ const TagInput: FC = ({
{!isSpecialMode && !focused && } void; type?: 'upload' |
{t(`datasetDocuments.list.empty.${type}.tip`)}
- {type === 'upload' && canAdd && }
@@ -267,7 +267,7 @@ const Documents: FC = ({ datasetId }) => { ? 'https://docs.dify.ai/zh-hans/guides/knowledge-base/integrate-knowledge-within-application' : 'https://docs.dify.ai/en/guides/knowledge-base/integrate-knowledge-within-application' } - > + > {t('datasetDocuments.list.learnMore')} diff --git a/web/app/components/datasets/documents/style.module.css b/web/app/components/datasets/documents/style.module.css index ececd3ad90..dd1ced5ad5 100644 --- a/web/app/components/datasets/documents/style.module.css +++ b/web/app/components/datasets/documents/style.module.css @@ -26,7 +26,7 @@ @apply text-text-secondary text-sm; } .addFileBtn { - @apply mt-4 w-fit !text-[13px] text-primary-600 font-medium bg-white border-[0.5px]; + @apply mt-4 w-fit !text-[13px] font-medium border-[0.5px]; } .plusIcon { @apply w-4 h-4 mr-2 stroke-current stroke-[1.5px]; @@ -35,16 +35,16 @@ @apply flex items-center justify-center h-full; } .emptyElement { - @apply bg-gray-50 w-[560px] h-fit box-border px-5 py-4 rounded-2xl; + @apply bg-components-panel-on-panel-item-bg border-divider-subtle w-[560px] h-fit box-border px-5 py-4 rounded-2xl; } .emptyTitle { - @apply text-gray-700 font-semibold; + @apply text-text-secondary font-semibold; } .emptyTip { - @apply mt-2 text-gray-500 text-sm font-normal; + @apply mt-2 text-text-primary text-sm font-normal; } .emptySymbolIconWrapper { - @apply w-[44px] h-[44px] border border-solid border-gray-100 rounded-lg flex items-center justify-center mb-2; + @apply w-[44px] h-[44px] border border-solid border-components-button-secondary-border rounded-lg flex items-center justify-center mb-2; } .commonIcon { @apply w-4 h-4 inline-block align-middle; diff --git a/web/app/components/datasets/settings/permission-selector/index.tsx b/web/app/components/datasets/settings/permission-selector/index.tsx index 9bb6f812d4..18b0908956 100644 --- a/web/app/components/datasets/settings/permission-selector/index.tsx +++ b/web/app/components/datasets/settings/permission-selector/index.tsx @@ -151,7 +151,7 @@ const PermissionSelector = ({ disabled, permission, value, memberList, onChange,
{isPartialMembers && (
-
+
-
{value.url}
+
{value.url}
diff --git a/web/app/components/header/github-star/index.tsx b/web/app/components/header/github-star/index.tsx index b087b9e41a..e825dcdd14 100644 --- a/web/app/components/header/github-star/index.tsx +++ b/web/app/components/header/github-star/index.tsx @@ -2,6 +2,11 @@ import { useQuery } from '@tanstack/react-query' import type { FC } from 'react' import type { GithubRepo } from '@/models/common' +import { RiLoader2Line } from '@remixicon/react' + +const defaultData = { + stargazers_count: 98570, +} const getStar = async () => { const res = await fetch('https://api.github.com/repos/langgenius/dify') @@ -13,15 +18,21 @@ const getStar = async () => { } const GithubStar: FC<{ className: string }> = (props) => { - const { isFetching, data } = useQuery({ + const { isFetching, isError, data } = useQuery({ queryKey: ['github-star'], queryFn: getStar, enabled: process.env.NODE_ENV !== 'development', - initialData: { stargazers_count: 81204 }, + retry: false, + placeholderData: defaultData, }) + if (isFetching) - return null - return {data.stargazers_count.toLocaleString()} + return + + if (isError) + return {defaultData.stargazers_count.toLocaleString()} + + return {data?.stargazers_count.toLocaleString()} } export default GithubStar diff --git a/web/i18n/de-DE/app.ts b/web/i18n/de-DE/app.ts index 7f4b7162e3..5ae5c39b51 100644 --- a/web/i18n/de-DE/app.ts +++ b/web/i18n/de-DE/app.ts @@ -77,20 +77,20 @@ const translation = { learnMore: 'Weitere Informationen', optional: 'Wahlfrei', noTemplateFound: 'Keine Vorlagen gefunden', - workflowUserDescription: 'Workflow-Orchestrierung für Aufgaben in einer einzigen Runde wie Automatisierung und Stapelverarbeitung.', + workflowUserDescription: 'Autonome KI-Arbeitsabläufe visuell per Drag-and-Drop erstellen.', foundResults: '{{Anzahl}} Befund', chatbotShortDescription: 'LLM-basierter Chatbot mit einfacher Einrichtung', completionUserDescription: 'Erstellen Sie schnell einen KI-Assistenten für Textgenerierungsaufgaben mit einfacher Konfiguration.', noAppsFound: 'Keine Apps gefunden', - advancedShortDescription: 'Workflow für komplexe Dialoge mit mehreren Durchläufen mit Speicher', + advancedShortDescription: 'Workflow optimiert für mehrstufige Chats', forAdvanced: 'FÜR FORTGESCHRITTENE', chooseAppType: 'App-Typ auswählen', completionShortDescription: 'KI-Assistent für Textgenerierungsaufgaben', - forBeginners: 'FÜR ANFÄNGER', + forBeginners: 'Einfachere App-Typen', noIdeaTip: 'Keine Ideen? Schauen Sie sich unsere Vorlagen an', - workflowShortDescription: 'Orchestrierung für Single-Turn-Automatisierungsaufgaben', + workflowShortDescription: 'Agentischer Ablauf für intelligente Automatisierungen', noTemplateFoundTip: 'Versuchen Sie, mit verschiedenen Schlüsselwörtern zu suchen.', - advancedUserDescription: 'Workflow-Orchestrierung für komplexe Dialogaufgaben mit mehreren Runden und Speicherkapazitäten.', + advancedUserDescription: 'Workflow mit Speicherfunktionen und Chatbot-Oberfläche.', chatbotUserDescription: 'Erstellen Sie schnell einen LLM-basierten Chatbot mit einfacher Konfiguration. Sie können später zu Chatflow wechseln.', foundResult: '{{Anzahl}} Ergebnis', agentUserDescription: 'Ein intelligenter Agent, der in der Lage ist, iteratives Denken zu führen und autonome Werkzeuge zu verwenden, um Aufgabenziele zu erreichen.', diff --git a/web/i18n/en-US/app.ts b/web/i18n/en-US/app.ts index 4cbc8df0ae..8b53129b2c 100644 --- a/web/i18n/en-US/app.ts +++ b/web/i18n/en-US/app.ts @@ -47,13 +47,13 @@ const translation = { completionUserDescription: 'Quickly build an AI assistant for text generation tasks with simple configuration.', agentShortDescription: 'Intelligent agent with reasoning and autonomous tool use', agentUserDescription: 'An intelligent agent capable of iterative reasoning and autonomous tool use to achieve task goals.', - workflowShortDescription: 'Orchestration for single-turn automation tasks', - workflowUserDescription: 'Workflow orchestration for single-round tasks like automation and batch processing.', + workflowShortDescription: 'Agentic flow for intelligent automations', + workflowUserDescription: 'Visually build autonomous AI workflows with drag-and-drop simplicity.', workflowWarning: 'Currently in beta', - advancedShortDescription: 'Workflow for complex multi-turn dialogues with memory', - advancedUserDescription: 'Workflow orchestration for multi-round complex dialogue tasks with memory capabilities.', - chooseAppType: 'Choose App Type', - forBeginners: 'FOR BEGINNERS', + advancedShortDescription: 'Workflow enhanced for multi-turn chats', + advancedUserDescription: 'Workflow with additional memory features and a chatbot interface.', + chooseAppType: 'Choose an App Type', + forBeginners: 'More basic app types', forAdvanced: 'FOR ADVANCED USERS', noIdeaTip: 'No ideas? Check out our templates', captionName: 'App Name & Icon', diff --git a/web/i18n/en-US/dataset-documents.ts b/web/i18n/en-US/dataset-documents.ts index d7fd70c089..2a79324477 100644 --- a/web/i18n/en-US/dataset-documents.ts +++ b/web/i18n/en-US/dataset-documents.ts @@ -51,7 +51,7 @@ const translation = { empty: { title: 'There is no documentation yet', upload: { - tip: 'You can upload files, sync from the website, or from webb apps like Notion, GitHub, etc.', + tip: 'You can upload files, sync from the website, or from web apps like Notion, GitHub, etc.', }, sync: { tip: 'Dify will periodically download files from your Notion and complete processing.', diff --git a/web/i18n/es-ES/app.ts b/web/i18n/es-ES/app.ts index 3d2a39db97..de3a458d2b 100644 --- a/web/i18n/es-ES/app.ts +++ b/web/i18n/es-ES/app.ts @@ -72,21 +72,21 @@ const translation = { appCreateDSLErrorPart1: 'Se ha detectado una diferencia significativa en las versiones de DSL. Forzar la importación puede hacer que la aplicación no funcione correctamente.', appCreateDSLWarning: 'Precaución: La diferencia de versión de DSL puede afectar a determinadas funciones', appCreateDSLErrorPart3: 'Versión actual de DSL de la aplicación:', - forBeginners: 'PARA PRINCIPIANTES', + forBeginners: 'Tipos de aplicación más básicos', learnMore: 'Aprende más', noTemplateFoundTip: 'Intente buscar usando diferentes palabras clave.', chatbotShortDescription: 'Chatbot basado en LLM con una configuración sencilla', - chooseAppType: 'Elija el tipo de aplicación', + chooseAppType: 'Elija un tipo de aplicación', noAppsFound: 'No se han encontrado aplicaciones', - workflowUserDescription: 'Orquestación del flujo de trabajo para tareas de una sola ronda, como la automatización y el procesamiento por lotes.', - advancedShortDescription: 'Flujo de trabajo para diálogos complejos de varios turnos con memoria', + workflowUserDescription: 'Construya flujos de trabajo autónomos de IA con la simplicidad de arrastrar y soltar.', + advancedShortDescription: 'Flujo de trabajo mejorado para chats de múltiples turnos', forAdvanced: 'PARA USUARIOS AVANZADOS', completionShortDescription: 'Asistente de IA para tareas de generación de texto', optional: 'Opcional', noIdeaTip: '¿No tienes ideas? Echa un vistazo a nuestras plantillas', agentUserDescription: 'Un agente inteligente capaz de realizar un razonamiento iterativo y un uso autónomo de las herramientas para alcanzar los objetivos de las tareas.', - workflowShortDescription: 'Orquestación para tareas de automatización de un solo turno', - advancedUserDescription: 'Orquestación de flujos de trabajo para tareas de diálogo complejas de varias rondas con capacidades de memoria.', + workflowShortDescription: 'Flujo agéntico para automatizaciones inteligentes', + advancedUserDescription: 'Flujo de trabajo con funciones de memoria y una interfaz de chatbot.', agentShortDescription: 'Agente inteligente con razonamiento y uso autónomo de herramientas', foundResults: '{{conteo}} Resultados', noTemplateFound: 'No se han encontrado plantillas', diff --git a/web/i18n/fa-IR/app.ts b/web/i18n/fa-IR/app.ts index d12206b485..f048dfca1f 100644 --- a/web/i18n/fa-IR/app.ts +++ b/web/i18n/fa-IR/app.ts @@ -79,10 +79,10 @@ const translation = { completionShortDescription: 'دستیار هوش مصنوعی برای تسک های تولید متن', foundResult: '{{تعداد}} نتیجه', chatbotUserDescription: 'به سرعت یک چت بات مبتنی بر LLM با پیکربندی ساده بسازید. بعدا می توانید به Chatflow بروید.', - chooseAppType: 'نوع برنامه را انتخاب کنید', + chooseAppType: 'انتخاب نوع برنامه', foundResults: '{{تعداد}} نتیجه', noIdeaTip: 'ایده ای ندارید؟ قالب های ما را بررسی کنید', - forBeginners: 'برای مبتدیان', + forBeginners: 'انواع برنامه‌های پایه‌تر', noAppsFound: 'هیچ برنامه ای یافت نشد', chatbotShortDescription: 'چت بات مبتنی بر LLM با راه اندازی ساده', optional: 'اختیاری', @@ -91,11 +91,11 @@ const translation = { noTemplateFoundTip: 'سعی کنید با استفاده از کلمات کلیدی مختلف جستجو کنید.', noTemplateFound: 'هیچ الگویی یافت نشد', forAdvanced: 'برای کاربران پیشرفته', - workflowShortDescription: 'ارکستراسیون برای تسک های اتوماسیون تک نوبت', - workflowUserDescription: 'ارکستراسیون گردش کار برای کارهای تک مرحله ای مانند اتوماسیون و پردازش دسته ای.', - advancedUserDescription: 'ارکستراسیون گردش کار برای کارهای گفتگوی پیچیده چند مرحله ای با قابلیت های حافظه.', + workflowShortDescription: 'جریان عاملی برای اتوماسیون‌های هوشمند', + workflowUserDescription: 'ساخت بصری گردش‌کارهای خودکار هوش مصنوعی با سادگی کشیدن و رها کردن', + advancedUserDescription: 'گردش‌کار با ویژگی‌های حافظه اضافی و رابط چت‌بات', completionUserDescription: 'به سرعت یک دستیار هوش مصنوعی برای وظایف تولید متن با پیکربندی ساده بسازید.', - advancedShortDescription: 'گردش کار برای دیالوگ های پیچیده چند چرخشی با حافظه', + advancedShortDescription: 'گردش‌کار پیشرفته برای گفتگوهای چند مرحله‌ای', agentUserDescription: 'یک عامل هوشمند که قادر به استدلال تکراری و استفاده از ابزار مستقل برای دستیابی به اهداف وظیفه است.', }, editApp: 'ویرایش اطلاعات', diff --git a/web/i18n/fr-FR/app.ts b/web/i18n/fr-FR/app.ts index dc10abe91b..beea355ffc 100644 --- a/web/i18n/fr-FR/app.ts +++ b/web/i18n/fr-FR/app.ts @@ -73,26 +73,26 @@ const translation = { appCreateDSLErrorPart3: 'Version actuelle de l’application DSL :', appCreateDSLErrorPart2: 'Voulez-vous continuer ?', foundResults: '{{compte}} Résultats', - workflowShortDescription: 'Orchestration pour les tâches d’automatisation à tour unique', + workflowShortDescription: 'Flux agentique pour automatisations intelligentes', agentShortDescription: 'Agent intelligent avec raisonnement et utilisation autonome de l’outil', learnMore: 'Pour en savoir plus', noTemplateFound: 'Aucun modèle trouvé', completionShortDescription: 'Assistant IA pour les tâches de génération de texte', chatbotShortDescription: 'Chatbot basé sur LLM avec configuration simple', - advancedUserDescription: 'Orchestration du flux de travail pour les tâches de dialogue complexes à plusieurs tours avec des capacités de mémoire.', + advancedUserDescription: 'Workflow avec fonctionnalités de mémoire et interface de chatbot.', noTemplateFoundTip: 'Essayez d’effectuer une recherche à l’aide de mots-clés différents.', noAppsFound: 'Aucune application trouvée', - chooseAppType: 'Choisissez le type d’application', + chooseAppType: 'Choisissez un type d’application', forAdvanced: 'POUR LES UTILISATEURS AVANCÉS', chatbotUserDescription: 'Créez rapidement un chatbot basé sur LLM avec une configuration simple. Vous pouvez passer à Chatflow plus tard.', - workflowUserDescription: 'Orchestration du flux de travail pour les tâches ponctuelles telles que l’automatisation et le traitement par lots.', + workflowUserDescription: 'Créez visuellement des flux IA autonomes avec la simplicité du glisser-déposer.', completionUserDescription: 'Créez rapidement un assistant IA pour les tâches de génération de texte avec une configuration simple.', agentUserDescription: 'Un agent intelligent capable d’un raisonnement itératif et d’une utilisation autonome d’outils pour atteindre les objectifs de la tâche.', - forBeginners: 'POUR LES DÉBUTANTS', + forBeginners: 'Types d’applications plus basiques', foundResult: '{{compte}} Résultat', noIdeaTip: 'Pas d’idées ? Consultez nos modèles', optional: 'Optionnel', - advancedShortDescription: 'Flux de travail pour des dialogues complexes à plusieurs tours avec mémoire', + advancedShortDescription: 'Workflow amélioré pour conversations multi-tours', }, editApp: 'Modifier les informations', editAppTitle: 'Modifier les informations de l\'application', diff --git a/web/i18n/hi-IN/app.ts b/web/i18n/hi-IN/app.ts index ee5d77bc09..e5db983f45 100644 --- a/web/i18n/hi-IN/app.ts +++ b/web/i18n/hi-IN/app.ts @@ -73,7 +73,7 @@ const translation = { appCreateDSLWarning: 'सावधानी: DSL संस्करण अंतर कुछ सुविधाओं को प्रभावित कर सकता है', appCreateDSLErrorPart2: 'क्या आप जारी रखना चाहते हैं?', learnMore: 'और जानो', - forBeginners: 'नौसिखियों के लिए', + forBeginners: 'नए उपयोगकर्ताओं के लिए बुनियादी ऐप प्रकार', foundResults: '{{गिनती}} परिणाम', forAdvanced: 'उन्नत उपयोगकर्ताओं के लिए', agentUserDescription: 'पुनरावृत्त तर्क और स्वायत्त उपकरण में सक्षम एक बुद्धिमान एजेंट कार्य लक्ष्यों को प्राप्त करने के लिए उपयोग करता है।', @@ -87,12 +87,12 @@ const translation = { noAppsFound: 'कोई ऐप्लिकेशन नहीं मिला', chooseAppType: 'ऐप प्रकार चुनें', agentShortDescription: 'तर्क और स्वायत्त उपकरण उपयोग के साथ बुद्धिमान एजेंट', - workflowShortDescription: 'सिंगल-टर्न ऑटोमेशन कार्यों के लिए ऑर्केस्ट्रेशन', + workflowShortDescription: 'बुद्धिमान स्वचालन के लिए एजेंटिक प्रवाह', chatbotUserDescription: 'सरल कॉन्फ़िगरेशन के साथ जल्दी से एलएलएम-आधारित चैटबॉट बनाएं। आप बाद में चैटफ्लो पर स्विच कर सकते हैं।', - advancedUserDescription: 'स्मृति क्षमताओं के साथ बहु-दौर जटिल संवाद कार्यों के लिए वर्कफ़्लो ऑर्केस्ट्रेशन।', - advancedShortDescription: 'स्मृति के साथ जटिल बहु-मोड़ संवादों के लिए वर्कफ़्लो', + advancedUserDescription: 'अतिरिक्त मेमोरी सुविधाओं और चैटबॉट इंटरफेस के साथ वर्कफ़्लो।', + advancedShortDescription: 'बहु-चरण वार्तालाप के लिए उन्नत वर्कफ़्लो', noTemplateFoundTip: 'विभिन्न कीवर्ड का उपयोग करके खोजने का प्रयास करें।', - workflowUserDescription: 'स्वचालन और बैच प्रसंस्करण जैसे एकल-दौर कार्यों के लिए वर्कफ़्लो ऑर्केस्ट्रेशन।', + workflowUserDescription: 'ड्रैग-एंड-ड्रॉप सरलता के साथ स्वायत्त AI वर्कफ़्लो का दृश्य निर्माण करें।', }, editApp: 'जानकारी संपादित करें', editAppTitle: 'ऐप जानकारी संपादित करें', diff --git a/web/i18n/it-IT/app.ts b/web/i18n/it-IT/app.ts index ae811571f6..a1762bdea2 100644 --- a/web/i18n/it-IT/app.ts +++ b/web/i18n/it-IT/app.ts @@ -78,13 +78,13 @@ const translation = { appCreateDSLErrorTitle: 'Incompatibilità di versione', appCreateDSLWarning: 'Attenzione: la differenza di versione DSL può influire su alcune funzionalità', appCreateDSLErrorPart4: 'Versione DSL supportata dal sistema:', - forBeginners: 'PER I PRINCIPIANTI', + forBeginners: 'Tipi di app più semplici', noAppsFound: 'Nessuna app trovata', noTemplateFoundTip: 'Prova a cercare utilizzando parole chiave diverse.', foundResults: '{{conteggio}} Risultati', chatbotShortDescription: 'Chatbot basato su LLM con configurazione semplice', forAdvanced: 'PER UTENTI AVANZATI', - workflowShortDescription: 'Orchestrazione per attività di automazione a turno singolo', + workflowShortDescription: 'Flusso agentico per automazioni intelligenti', foundResult: '{{conteggio}} Risultato', noIdeaTip: 'Non hai idee? Dai un\'occhiata ai nostri modelli', completionShortDescription: 'Assistente AI per le attività di generazione del testo', @@ -94,11 +94,11 @@ const translation = { chatbotUserDescription: 'Crea rapidamente un chatbot basato su LLM con una configurazione semplice. Puoi passare a Chatflow in un secondo momento.', agentShortDescription: 'Agente intelligente con ragionamento e uso autonomo degli strumenti', completionUserDescription: 'Crea rapidamente un assistente AI per le attività di generazione di testo con una configurazione semplice.', - advancedUserDescription: 'Orchestrazione del flusso di lavoro per attività di dialogo complesse a più round con funzionalità di memoria.', - workflowUserDescription: 'Orchestrazione del flusso di lavoro per attività a ciclo singolo come l\'automazione e l\'elaborazione batch.', + advancedUserDescription: 'Flusso di lavoro con funzioni di memoria e interfaccia di chatbot.', + workflowUserDescription: 'Crea flussi di lavoro AI autonomi visivamente con la semplicità del drag-and-drop.', agentUserDescription: 'Un agente intelligente in grado di ragionare in modo iterativo e di utilizzare autonomamente gli strumenti per raggiungere gli obiettivi del compito.', - advancedShortDescription: 'Flusso di lavoro per dialoghi complessi a più turni con memoria', - chooseAppType: 'Scegli il tipo di app', + advancedShortDescription: 'Flusso di lavoro migliorato per conversazioni multiple', + chooseAppType: 'Scegli un tipo di app', }, editApp: 'Modifica Info', editAppTitle: 'Modifica Info App', diff --git a/web/i18n/ja-JP/app.ts b/web/i18n/ja-JP/app.ts index 3d9c9cb620..a1b5090b1f 100644 --- a/web/i18n/ja-JP/app.ts +++ b/web/i18n/ja-JP/app.ts @@ -80,25 +80,25 @@ const translation = { appCreateDSLWarning: '注意:DSLのバージョンの違いは、特定の機能に影響を与える可能性があります', appCreateDSLErrorPart1: 'DSL バージョンに大きな違いが検出されました。インポートを強制すると、アプリケーションが誤動作する可能性があります。', optional: '随意', - forBeginners: '初心者向け', + forBeginners: '初心者向けの基本的なアプリタイプ', noTemplateFoundTip: '別のキーワードを使用して検索してみてください。', agentShortDescription: '推論と自律的なツールの使用を備えたインテリジェントエージェント', foundResults: '{{カウント}}業績', noTemplateFound: 'テンプレートが見つかりません', noAppsFound: 'アプリが見つかりませんでした', - workflowShortDescription: 'シングルターンの自動化タスクのオーケストレーション', - completionShortDescription: 'テキスト生成タスクのためのAIアシスタント', - advancedUserDescription: 'メモリ機能を備えたマルチラウンドの複雑な対話タスクのワークフローオーケストレーション。', + workflowShortDescription: 'インテリジェントな自動化のためのエージェントフロー', + completionShortDescription: '複数ターンチャット向けに強化されたワークフロー', + advancedUserDescription: '追加のメモリ機能とチャットボットインターフェースを備えたワークフロー', advancedShortDescription: 'メモリを使用した複雑なマルチターン対話のワークフロー', agentUserDescription: 'タスクの目標を達成するために反復的な推論と自律的なツールを使用できるインテリジェントエージェント。', foundResult: '{{カウント}}結果', forAdvanced: '上級ユーザー向け', - chooseAppType: 'アプリの種類を選択', + chooseAppType: 'アプリタイプを選択', learnMore: '詳細情報', noIdeaTip: 'アイデアがありませんか?テンプレートをご覧ください', chatbotShortDescription: '簡単なセットアップのLLMベースのチャットボット', chatbotUserDescription: '簡単な設定でLLMベースのチャットボットを迅速に構築します。Chatflowは後で切り替えることができます。', - workflowUserDescription: '自動化やバッチ処理などの単一ラウンドのタスクのためのワークフローオーケストレーション。', + workflowUserDescription: 'ドラッグ&ドロップの簡易性で自律型AIワークフローを視覚的に構築', completionUserDescription: '簡単な構成でテキスト生成タスク用のAIアシスタントをすばやく構築します。', }, editApp: '情報を編集する', diff --git a/web/i18n/ko-KR/app.ts b/web/i18n/ko-KR/app.ts index 2ec2e4294c..b4ee4cf6ac 100644 --- a/web/i18n/ko-KR/app.ts +++ b/web/i18n/ko-KR/app.ts @@ -69,10 +69,10 @@ const translation = { appCreateDSLWarning: '주의: DSL 버전 차이는 특정 기능에 영향을 미칠 수 있습니다.', appCreateDSLErrorPart1: 'DSL 버전에서 상당한 차이가 감지되었습니다. 강제로 가져오면 응용 프로그램이 오작동할 수 있습니다.', chooseAppType: '앱 유형 선택', - forBeginners: '초보자용', + forBeginners: '초보자용 기본 앱 유형', forAdvanced: '고급 사용자용', chatbotShortDescription: '간단한 설정으로 LLM 기반 챗봇', - workflowUserDescription: '자동화 및 배치 처리와 같은 단일 라운드 작업을 위한 워크플로우 오케스트레이션.', + workflowUserDescription: '드래그 앤 드롭으로 자율 AI 워크플로우를 시각적으로 구축', noTemplateFoundTip: '다른 키워드를 사용하여 검색해 보십시오.', noIdeaTip: '아이디어가 없으신가요? 템플릿을 확인해 보세요', optional: '선택적', @@ -81,14 +81,14 @@ const translation = { learnMore: '더 알아보세요', foundResults: '{{개수}} 결과', agentShortDescription: '추론 및 자율적인 도구 사용 기능이 있는 지능형 에이전트', - advancedShortDescription: '메모리를 사용한 복잡한 다중 턴 대화를 위한 워크플로우', + advancedShortDescription: '다중 대화를 위해 강화된 워크플로우', noAppsFound: '앱을 찾을 수 없습니다.', foundResult: '{{개수}} 결과', completionUserDescription: '간단한 구성으로 텍스트 생성 작업을 위한 AI 도우미를 빠르게 구축합니다.', chatbotUserDescription: '간단한 구성으로 LLM 기반 챗봇을 빠르게 구축할 수 있습니다. 나중에 Chatflow로 전환할 수 있습니다.', - workflowShortDescription: '단일 턴 자동화 작업을 위한 오케스트레이션', + workflowShortDescription: '지능형 자동화를 위한 에이전트 플로우', agentUserDescription: '작업 목표를 달성하기 위해 반복적인 추론과 자율적인 도구를 사용할 수 있는 지능형 에이전트입니다.', - advancedUserDescription: '메모리 기능이 있는 다라운드의 복잡한 대화 작업을 위한 워크플로우 조정.', + advancedUserDescription: '메모리 기능과 챗봇 인터페이스를 갖춘 워크플로우', }, editApp: '정보 편집하기', editAppTitle: '앱 정보 편집하기', diff --git a/web/i18n/pl-PL/app.ts b/web/i18n/pl-PL/app.ts index d00bf02de1..c60b34f860 100644 --- a/web/i18n/pl-PL/app.ts +++ b/web/i18n/pl-PL/app.ts @@ -84,21 +84,21 @@ const translation = { noTemplateFound: 'Nie znaleziono szablonów', chatbotUserDescription: 'Szybko zbuduj chatbota opartego na LLM z prostą konfiguracją. Możesz przełączyć się na Chatflow później.', optional: 'Fakultatywny', - workflowUserDescription: 'Orkiestracja przepływu pracy dla zadań jednoetapowych, takich jak automatyzacja i przetwarzanie wsadowe.', + workflowUserDescription: 'Twórz autonomiczne przepływy AI wizualnie, z prostotą przeciągnij i upuść.', completionUserDescription: 'Szybko zbuduj asystenta AI do zadań generowania tekstu za pomocą prostej konfiguracji.', - forBeginners: 'DLA POCZĄTKUJĄCYCH', + forBeginners: 'Prostsze typy aplikacji', agentShortDescription: 'Inteligentny agent z rozumowaniem i autonomicznym wykorzystaniem narzędzi', completionShortDescription: 'Asystent AI do zadań generowania tekstu', noIdeaTip: 'Nie masz pomysłów? Sprawdź nasze szablony', forAdvanced: 'DLA ZAAWANSOWANYCH UŻYTKOWNIKÓW', foundResult: '{{liczba}} Wynik', - advancedShortDescription: 'Przepływ pracy dla złożonych, wieloetapowych dialogów z pamięcią', + advancedShortDescription: 'Przepływ ulepszony dla wieloturowych czatów', learnMore: 'Dowiedz się więcej', chatbotShortDescription: 'Chatbot oparty na LLM z prostą konfiguracją', chooseAppType: 'Wybierz typ aplikacji', agentUserDescription: 'Inteligentny agent zdolny do iteracyjnego wnioskowania i autonomicznego wykorzystania narzędzi do osiągania celów zadań.', - workflowShortDescription: 'Orkiestracja dla jednoetapowych zadań automatyzacji', - advancedUserDescription: 'Orkiestracja przepływu pracy dla wielorundowych, złożonych zadań dialogowych z funkcjami pamięci.', + workflowShortDescription: 'Agentowy przepływ dla inteligentnych automatyzacji', + advancedUserDescription: 'Przepływ z dodatkowymi funkcjami pamięci i interfejsem chatbota.', }, editApp: 'Edytuj informacje', editAppTitle: 'Edytuj informacje o aplikacji', diff --git a/web/i18n/pt-BR/app.ts b/web/i18n/pt-BR/app.ts index 4670ea41fa..9e48b72895 100644 --- a/web/i18n/pt-BR/app.ts +++ b/web/i18n/pt-BR/app.ts @@ -74,25 +74,25 @@ const translation = { appCreateDSLErrorPart2: 'Você quer continuar?', learnMore: 'Saiba Mais', optional: 'Opcional', - chooseAppType: 'Escolha o tipo de aplicativo', - forBeginners: 'PARA INICIANTES', + chooseAppType: 'Escolha um tipo de aplicativo', + forBeginners: 'Tipos de aplicativos mais básicos', noTemplateFound: 'Nenhum modelo encontrado', foundResults: '{{contagem}} Resultados', foundResult: '{{contagem}} Resultado', completionUserDescription: 'Crie rapidamente um assistente de IA para tarefas de geração de texto com configuração simples.', noIdeaTip: 'Sem ideias? Confira nossos modelos', - workflowUserDescription: 'Orquestração de fluxo de trabalho para tarefas de rodada única, como automação e processamento em lote.', + workflowUserDescription: 'Construa fluxos autônomos de IA visualmente com simplicidade de arrastar e soltar.', chatbotUserDescription: 'Crie rapidamente um chatbot baseado em LLM com configuração simples. Você pode alternar para o fluxo de chat mais tarde.', agentShortDescription: 'Agente inteligente com raciocínio e uso de ferramenta autônoma', forAdvanced: 'PARA USUÁRIOS AVANÇADOS', chatbotShortDescription: 'Chatbot baseado em LLM com configuração simples', - advancedUserDescription: 'Orquestração de fluxo de trabalho para tarefas de diálogo complexas de várias rodadas com recursos de memória.', + advancedUserDescription: 'Fluxo com recursos adicionais de memória e interface de chatbot.', noTemplateFoundTip: 'Tente pesquisar usando palavras-chave diferentes.', agentUserDescription: 'Um agente inteligente capaz de raciocínio iterativo e uso autônomo de ferramentas para atingir os objetivos da tarefa.', completionShortDescription: 'Assistente de IA para tarefas de geração de texto', - workflowShortDescription: 'Orquestração para tarefas de automação de turno único', + workflowShortDescription: 'Fluxo agêntico para automações inteligentes', noAppsFound: 'Nenhum aplicativo encontrado', - advancedShortDescription: 'Fluxo de trabalho para diálogos complexos de vários turnos com memória', + advancedShortDescription: 'Fluxo aprimorado para conversas de múltiplos turnos', }, editApp: 'Editar Informações', editAppTitle: 'Editar Informações do Aplicativo', diff --git a/web/i18n/ro-RO/app.ts b/web/i18n/ro-RO/app.ts index 1eccd0831b..a96c94d02f 100644 --- a/web/i18n/ro-RO/app.ts +++ b/web/i18n/ro-RO/app.ts @@ -73,26 +73,26 @@ const translation = { appCreateDSLErrorPart1: 'A fost detectată o diferență semnificativă în versiunile DSL. Forțarea importului poate cauza funcționarea defectuoasă a aplicației.', appCreateDSLErrorPart4: 'Versiune DSL suportată de sistem:', chatbotShortDescription: 'Chatbot bazat pe LLM cu configurare simplă', - forBeginners: 'PENTRU ÎNCEPĂTORI', + forBeginners: 'Tipuri de aplicații mai simple', completionShortDescription: 'Asistent AI pentru sarcini de generare de text', agentUserDescription: 'Un agent inteligent capabil de raționament iterativ și utilizare autonomă a instrumentelor pentru a atinge obiectivele sarcinii.', - workflowUserDescription: 'Orchestrarea fluxului de lucru pentru sarcini cu o singură rundă, cum ar fi automatizarea și procesarea în loturi.', + workflowUserDescription: 'Construiește vizual fluxuri AI autonome cu simplitatea drag-and-drop.', optional: 'Facultativ', learnMore: 'Află mai multe', completionUserDescription: 'Construiește rapid un asistent AI pentru sarcinile de generare a textului cu o configurare simplă.', chatbotUserDescription: 'Construiți rapid un chatbot bazat pe LLM cu o configurare simplă. Puteți trece la Chatflow mai târziu.', - advancedShortDescription: 'Flux de lucru pentru dialoguri complexe cu mai multe rotații cu memorie', - advancedUserDescription: 'Orchestrarea fluxului de lucru pentru sarcini complexe de dialog cu mai multe runde cu capacități de memorie.', + advancedShortDescription: 'Flux de lucru îmbunătățit pentru conversații multi-tur', + advancedUserDescription: 'Flux de lucru cu funcții suplimentare de memorie și interfață de chatbot.', noTemplateFoundTip: 'Încercați să căutați folosind cuvinte cheie diferite.', foundResults: '{{număr}} Rezultatele', foundResult: '{{număr}} Rezultat', noIdeaTip: 'Nicio idee? Consultați șabloanele noastre', noAppsFound: 'Nu s-au găsit aplicații', - workflowShortDescription: 'Orchestrare pentru sarcini de automatizare cu o singură tură', + workflowShortDescription: 'Flux agentic pentru automatizări inteligente', agentShortDescription: 'Agent inteligent cu raționament și utilizare autonomă a uneltelor', noTemplateFound: 'Nu s-au găsit șabloane', forAdvanced: 'PENTRU UTILIZATORII AVANSAȚI', - chooseAppType: 'Alegeți tipul de aplicație', + chooseAppType: 'Alegeți un tip de aplicație', }, editApp: 'Editează Info', editAppTitle: 'Editează Info Aplicație', diff --git a/web/i18n/ru-RU/app.ts b/web/i18n/ru-RU/app.ts index 300cbd36ba..609b891c5c 100644 --- a/web/i18n/ru-RU/app.ts +++ b/web/i18n/ru-RU/app.ts @@ -81,13 +81,13 @@ const translation = { foundResults: '{{Количество}} Результаты', optional: 'Необязательный', chatbotShortDescription: 'Чат-бот на основе LLM с простой настройкой', - advancedShortDescription: 'Рабочий процесс для сложных диалогов с несколькими ходами с памятью', + advancedShortDescription: 'Рабочий процесс, улучшенный для многоходовых чатов', foundResult: '{{Количество}} Результат', - workflowShortDescription: 'Оркестровка для задач автоматизации за один оборот', - advancedUserDescription: 'Оркестрация рабочих процессов для многораундовых сложных диалоговых задач с возможностями памяти.', + workflowShortDescription: 'Агентный поток для интеллектуальных автоматизаций', + advancedUserDescription: 'Рабочий процесс с дополнительными функциями памяти и интерфейсом чат-бота.', noAppsFound: 'Приложения не найдены', agentUserDescription: 'Интеллектуальный агент, способный к итеративным рассуждениям и автономному использованию инструментов для достижения целей задачи.', - forBeginners: 'ДЛЯ НАЧИНАЮЩИХ', + forBeginners: 'Более простые типы приложений', chatbotUserDescription: 'Быстро создайте чат-бота на основе LLM с простой настройкой. Вы можете переключиться на Chatflow позже.', noTemplateFound: 'Шаблоны не найдены', completionShortDescription: 'AI-помощник для задач генерации текста', @@ -96,7 +96,7 @@ const translation = { agentShortDescription: 'Интеллектуальный агент с рассуждениями и автономным использованием инструментов', noTemplateFoundTip: 'Попробуйте искать по разным ключевым словам.', completionUserDescription: 'Быстро создайте помощника с искусственным интеллектом для задач генерации текста с простой настройкой.', - workflowUserDescription: 'Оркестрация рабочих процессов для однораундовых задач, таких как автоматизация и пакетная обработка.', + workflowUserDescription: 'Визуально создавайте автономные ИИ-процессы простым перетаскиванием.', }, editApp: 'Редактировать информацию', editAppTitle: 'Редактировать информацию о приложении', diff --git a/web/i18n/sl-SI/app.ts b/web/i18n/sl-SI/app.ts index b700f39f53..f988114acd 100644 --- a/web/i18n/sl-SI/app.ts +++ b/web/i18n/sl-SI/app.ts @@ -76,24 +76,24 @@ const translation = { appCreateDSLErrorPart4: 'Sistemsko podprta različica DSL:', appCreateDSLWarning: 'Pozor: Razlika v različici DSL lahko vpliva na nekatere funkcije', appCreateDSLErrorPart2: 'Želite nadaljevati?', - advancedShortDescription: 'Potek dela za zapletene dialoge z več obrati s pomnilnikom', + advancedShortDescription: 'Potek dela izboljšan za večkratne pogovore', noAppsFound: 'Ni bilo najdenih aplikacij', agentShortDescription: 'Inteligentni agent z razmišljanjem in avtonomno uporabo orodij', foundResult: '{{štetje}} Rezultat', foundResults: '{{štetje}} Rezultati', noTemplateFoundTip: 'Poskusite iskati z različnimi ključnimi besedami.', optional: 'Neobvezno', - forBeginners: 'ZA ZAČETNIKE', + forBeginners: 'Bolj osnovne vrste aplikacij', forAdvanced: 'ZA NAPREDNE UPORABNIKE', noIdeaTip: 'Nimate idej? Oglejte si naše predloge', agentUserDescription: 'Inteligentni agent, ki je sposoben iterativnega sklepanja in avtonomne uporabe orodij za doseganje ciljev nalog.', completionShortDescription: 'Pomočnik AI za naloge generiranja besedila', chatbotUserDescription: 'Hitro zgradite chatbota, ki temelji na LLM, s preprosto konfiguracijo. Na Chatflow lahko preklopite pozneje.', completionUserDescription: 'Hitro ustvarite pomočnika AI za naloge ustvarjanja besedila s preprosto konfiguracijo.', - advancedUserDescription: 'Orkestracija poteka dela za večkrožne zapletene dialogske naloge s pomnilniškimi zmogljivostmi.', - workflowUserDescription: 'Orkestracija poteka dela za enojna opravila, kot sta avtomatizacija in paketna obdelava.', + advancedUserDescription: 'Potek dela z dodatnimi funkcijami spomina in vmesnikom za klepetanje.', + workflowUserDescription: 'Vizualno ustvarjajte avtonomne AI poteke s preprostim vlečenjem in spuščanjem.', noTemplateFound: 'Predloge niso bile najdene', - workflowShortDescription: 'Orkestracija za opravila avtomatizacije z enim obratom', + workflowShortDescription: 'Agentni tok za inteligentne avtomatizacije', chatbotShortDescription: 'Chatbot, ki temelji na LLM, s preprosto nastavitvijo', chooseAppType: 'Izberite vrsto aplikacije', learnMore: 'Izvedi več', diff --git a/web/i18n/th-TH/app.ts b/web/i18n/th-TH/app.ts index f7ddbc41eb..f4999e9aff 100644 --- a/web/i18n/th-TH/app.ts +++ b/web/i18n/th-TH/app.ts @@ -77,22 +77,22 @@ const translation = { noTemplateFoundTip: 'ลองค้นหาโดยใช้คีย์เวิร์ดอื่น', chatbotShortDescription: 'แชทบอทที่ใช้ LLM พร้อมการตั้งค่าที่ง่ายดาย', optional: 'เสริม', - workflowUserDescription: 'การประสานเวิร์กโฟลว์สําหรับงานรอบเดียว เช่น ระบบอัตโนมัติและการประมวลผลแบบแบตช์', + workflowUserDescription: 'สร้างโฟลว์ AI อัตโนมัติด้วยระบบลากและวางอย่างง่าย', agentShortDescription: 'ตัวแทนอัจฉริยะพร้อมการใช้เหตุผลและเครื่องมืออัตโนมัติ', - forBeginners: 'สําหรับผู้เริ่มต้น', + forBeginners: 'ประเภทแอปพื้นฐาน', completionShortDescription: 'ผู้ช่วย AI สําหรับงานสร้างข้อความ', agentUserDescription: 'ตัวแทนอัจฉริยะที่สามารถให้เหตุผลซ้ําๆ และใช้เครื่องมืออัตโนมัติเพื่อให้บรรลุเป้าหมายของงาน', noIdeaTip: 'ไม่มีความคิด? ดูเทมเพลตของเรา', foundResult: '{{นับ}} ผล', noAppsFound: 'ไม่พบแอป', - workflowShortDescription: 'การประสานงานสําหรับงานอัตโนมัติแบบเทิร์นเดียว', + workflowShortDescription: 'โฟลว์อัตโนมัติสำหรับระบบอัจฉริยะ', forAdvanced: 'สําหรับผู้ใช้ขั้นสูง', chatbotUserDescription: 'สร้างแชทบอทที่ใช้ LLM ได้อย่างรวดเร็วด้วยการกําหนดค่าที่ง่าย คุณสามารถเปลี่ยนไปใช้ Chatflow ได้ในภายหลัง', noTemplateFound: 'ไม่พบเทมเพลต', completionUserDescription: 'สร้างผู้ช่วย AI สําหรับงานสร้างข้อความอย่างรวดเร็วด้วยการกําหนดค่าที่ง่าย', - advancedUserDescription: 'การประสานเวิร์กโฟลว์สําหรับงานบทสนทนาที่ซับซ้อนหลายรอบพร้อมความสามารถของหน่วยความจํา', + advancedUserDescription: 'โฟลว์พร้อมคุณสมบัติหน่วยความจำเพิ่มเติมและอินเตอร์เฟซแชทบอท', chooseAppType: 'เลือกประเภทแอป', - advancedShortDescription: 'เวิร์กโฟลว์สําหรับบทสนทนาหลายรอบที่ซับซ้อนพร้อมหน่วยความจํา', + advancedShortDescription: 'โฟลว์ที่เสริมประสิทธิภาพสำหรับการสนทนาหลายรอบ', }, editApp: 'แก้ไขข้อมูล', editAppTitle: 'แก้ไขข้อมูลโปรเจกต์', diff --git a/web/i18n/th-TH/dataset-documents.ts b/web/i18n/th-TH/dataset-documents.ts index 2f4c6d5c9c..91d04d6bc1 100644 --- a/web/i18n/th-TH/dataset-documents.ts +++ b/web/i18n/th-TH/dataset-documents.ts @@ -50,7 +50,7 @@ const translation = { empty: { title: 'ยังไม่มีเอกสาร', upload: { - tip: 'คุณสามารถอัปโหลดไฟล์ ซิงค์จากเว็บไซต์ หรือจากแอป webb เช่น Notion, GitHub เป็นต้น', + tip: 'คุณสามารถอัปโหลดไฟล์ ซิงค์จากเว็บไซต์ หรือจากแอป web เช่น Notion, GitHub เป็นต้น', }, sync: { tip: 'Dify จะดาวน์โหลดไฟล์จาก Notion ของคุณเป็นระยะและดําเนินการให้เสร็จสมบูรณ์', diff --git a/web/i18n/tr-TR/app.ts b/web/i18n/tr-TR/app.ts index f963044dea..0dbc52bf36 100644 --- a/web/i18n/tr-TR/app.ts +++ b/web/i18n/tr-TR/app.ts @@ -78,19 +78,19 @@ const translation = { optional: 'Opsiyonel', foundResult: '{{sayı}} Sonuç', noTemplateFound: 'Şablon bulunamadı', - workflowUserDescription: 'Otomasyon ve toplu işleme gibi tek turlu görevler için iş akışı düzenlemesi.', - advancedUserDescription: 'Bellek özelliklerine sahip çok yönlü karmaşık diyalog görevleri için iş akışı orkestrasyonu.', + workflowUserDescription: 'Sürükle-bırak kolaylığıyla görsel olarak otonom yapay zeka iş akışları oluşturun.', + advancedUserDescription: 'Ek bellek özellikleri ve sohbet robotu arayüzü ile iş akışı.', completionShortDescription: 'Metin oluşturma görevleri için yapay zeka asistanı', noTemplateFoundTip: 'Farklı anahtar kelimeler kullanarak arama yapmayı deneyin.', learnMore: 'Daha fazla bilgi edinin', agentShortDescription: 'Akıl yürütme ve otonom araç kullanımına sahip akıllı ajan', - forBeginners: 'YENI BAŞLAYANLAR IÇIN', - workflowShortDescription: 'Tek dönüşlü otomasyon görevleri için orkestrasyon', + forBeginners: 'Daha temel uygulama türleri', + workflowShortDescription: 'Akıllı otomasyonlar için ajantik akış', agentUserDescription: 'Görev hedeflerine ulaşmak için yinelemeli akıl yürütme ve otonom araç kullanımı yeteneğine sahip akıllı bir ajan.', chooseAppType: 'Uygulama Türünü Seçin', completionUserDescription: 'Basit yapılandırmayla metin oluşturma görevleri için hızlı bir şekilde bir yapay zeka asistanı oluşturun.', chatbotShortDescription: 'Basit kurulumlu LLM tabanlı sohbet robotu', - advancedShortDescription: 'Hafızalı karmaşık çok dönüşlü diyaloglar için iş akışı', + advancedShortDescription: 'Çok turlu sohbetler için geliştirilmiş iş akışı', noIdeaTip: 'Fikriniz yok mu? Şablonlarımıza göz atın', forAdvanced: 'İLERI DÜZEY KULLANICILAR IÇIN', }, diff --git a/web/i18n/uk-UA/app.ts b/web/i18n/uk-UA/app.ts index a90fcd9a3a..a3834aa32b 100644 --- a/web/i18n/uk-UA/app.ts +++ b/web/i18n/uk-UA/app.ts @@ -72,7 +72,7 @@ const translation = { appCreateDSLErrorTitle: 'Несумісність версій', appCreateDSLErrorPart1: 'Виявлено суттєву різницю у версіях DSL. Примусовий імпорт може призвести до неправильної роботи програми.', appCreateDSLWarning: 'Увага: різниця у версіях DSL може вплинути на певні функції', - chooseAppType: 'Виберіть тип програми', + chooseAppType: 'Оберіть тип додатку', noIdeaTip: 'Немає ідей? Перегляньте наші шаблони', noTemplateFoundTip: 'Спробуйте шукати за різними ключовими словами.', foundResult: '{{count}} Результат', @@ -82,17 +82,17 @@ const translation = { forAdvanced: 'ДЛЯ ДОСВІДЧЕНИХ КОРИСТУВАЧІВ', noTemplateFound: 'Не знайдено шаблонів', agentUserDescription: 'Інтелектуальний агент, здатний до ітеративного міркування і автономного використання інструменту для досягнення поставлених цілей.', - advancedUserDescription: 'Оркестрація робочих процесів для багатораундових складних діалогових завдань з можливостями пам\'яті.', + advancedUserDescription: 'Робочий процес з функціями пам\'яті та інтерфейсом чат-бота.', agentShortDescription: 'Інтелектуальний агент з міркуваннями та автономним використанням інструментів', noAppsFound: 'Не знайдено додатків', - forBeginners: 'ДЛЯ ПОЧАТКІВЦІВ', - workflowShortDescription: 'Оркестрування для однотактних завдань автоматизації', + forBeginners: 'Простіші типи додатків', + workflowShortDescription: 'Агентський потік для інтелектуальних автоматизацій', learnMore: 'Дізнатися більше', chatbotUserDescription: 'Швидко створюйте чат-бота на базі LLM за допомогою простої конфігурації. Ви можете переключитися на Chatflow пізніше.', chatbotShortDescription: 'Чат-бот на базі LLM з простим налаштуванням', - advancedShortDescription: 'Робочий процес для складних багатоходових діалогів з пам\'яттю', + advancedShortDescription: 'Робочий процес, вдосконалений для багатоетапних чатів', completionUserDescription: 'Швидко створюйте помічника зі штучним інтелектом для завдань із генерації тексту за допомогою простої конфігурації.', - workflowUserDescription: 'Оркестрація робочих процесів для однокомпонентних завдань, таких як автоматизація та пакетна обробка.', + workflowUserDescription: 'ізуально створюйте автономні ШІ-процеси з простотою перетягування.', }, editApp: 'Редагувати інформацію', editAppTitle: 'Редагувати інформацію про додаток', diff --git a/web/i18n/vi-VN/app.ts b/web/i18n/vi-VN/app.ts index 142bf8bb89..c01c00c45c 100644 --- a/web/i18n/vi-VN/app.ts +++ b/web/i18n/vi-VN/app.ts @@ -72,22 +72,22 @@ const translation = { appCreateDSLErrorPart3: 'Phiên bản DSL ứng dụng hiện tại:', appCreateDSLWarning: 'Phạt cảnh cáo: Sự khác biệt về phiên bản DSL có thể ảnh hưởng đến một số tính năng nhất định', appCreateDSLErrorPart4: 'Phiên bản DSL được hệ thống hỗ trợ:', - forBeginners: 'DÀNH CHO NGƯỜI MỚI BẮT ĐẦU', + forBeginners: 'Các loại ứng dụng cơ bản hơn', chooseAppType: 'Chọn loại ứng dụng', chatbotShortDescription: 'Chatbot dựa trên LLM với thiết lập đơn giản', noTemplateFoundTip: 'Hãy thử tìm kiếm bằng các từ khóa khác nhau.', - workflowShortDescription: 'Điều phối cho các tác vụ tự động hóa một lượt', + workflowShortDescription: 'Luồng tác nhân cho tự động hóa thông minh', optional: 'Tùy chọn', advancedShortDescription: 'Quy trình làm việc cho các cuộc đối thoại nhiều lượt phức tạp với bộ nhớ', - workflowUserDescription: 'Điều phối quy trình làm việc cho các tác vụ một vòng như tự động hóa và xử lý hàng loạt.', + workflowUserDescription: 'Xây dựng trực quan quy trình AI tự động bằng kéo thả đơn giản.', foundResults: '{{đếm}} Kết quả', chatbotUserDescription: 'Nhanh chóng xây dựng chatbot dựa trên LLM với cấu hình đơn giản. Bạn có thể chuyển sang Chatflow sau.', agentUserDescription: 'Một tác nhân thông minh có khả năng suy luận lặp đi lặp lại và sử dụng công cụ tự động để đạt được mục tiêu nhiệm vụ.', noIdeaTip: 'Không có ý tưởng? Kiểm tra các mẫu của chúng tôi', - advancedUserDescription: 'Điều phối quy trình làm việc cho các tác vụ đối thoại phức tạp nhiều vòng với khả năng bộ nhớ.', + advancedUserDescription: 'Quy trình với tính năng bộ nhớ bổ sung và giao diện chatbot.', forAdvanced: 'DÀNH CHO NGƯỜI DÙNG NÂNG CAO', foundResult: '{{đếm}} Kết quả', - agentShortDescription: 'Tác nhân thông minh với lý luận và sử dụng công cụ tự động', + agentShortDescription: 'Quy trình nâng cao cho hội thoại nhiều lượt', noTemplateFound: 'Không tìm thấy mẫu', noAppsFound: 'Không tìm thấy ứng dụng nào', learnMore: 'Tìm hiểu thêm',