From 1abf00e443ce2c273c2716272e1f2d7470e40fd3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E6=AD=A5=E6=B5=AA?= <1107842278@qq.com>
Date: Tue, 6 May 2025 15:11:08 +0800
Subject: [PATCH 1/6] Fix doc bug workflow (#19269)
Co-authored-by: liuwangwang
---
.../components/develop/template/template_workflow.en.mdx | 9 ++++++++-
.../components/develop/template/template_workflow.ja.mdx | 9 ++++++++-
.../components/develop/template/template_workflow.zh.mdx | 9 ++++++++-
3 files changed, 24 insertions(+), 3 deletions(-)
diff --git a/web/app/components/develop/template/template_workflow.en.mdx b/web/app/components/develop/template/template_workflow.en.mdx
index 0f15c4406b..556b306a64 100644
--- a/web/app/components/develop/template/template_workflow.en.mdx
+++ b/web/app/components/develop/template/template_workflow.en.mdx
@@ -90,7 +90,7 @@ Workflow applications offers non-session support and is ideal for translation, a
Each streaming chunk starts with `data:`, separated by two newline characters `\n\n`, as shown below:
```streaming {{ title: 'Response' }}
- data: {"event": "message", "task_id": "900bbd43-dc0b-4383-a372-aa6e6c414227", "id": "663c5084-a254-4040-8ad3-51f2a3c1a77c", "answer": "Hi", "created_at": 1705398420}\n\n
+ data: {"event": "text_chunk", "workflow_run_id": "b85e5fc5-751b-454d-b14e-dc5f240b0a31", "task_id": "bd029338-b068-4d34-a331-fc85478922c2", "data": {"text": "\u4e3a\u4e86", "from_variable_selector": ["1745912968134", "text"]}}\n\n
```
The structure of the streaming chunks varies depending on the `event`:
@@ -116,6 +116,13 @@ Workflow applications offers non-session support and is ideal for translation, a
- `predecessor_node_id` (string) optional Prefix node ID, used for canvas display execution path
- `inputs` (object) Contents of all preceding node variables used in the node
- `created_at` (timestamp) timestamp of start, e.g., 1705395332
+ - `event: text_chunk` Text fragment
+ - `task_id` (string) Task ID, used for request tracking and the below Stop Generate API
+ - `workflow_run_id` (string) Unique ID of workflow execution
+ - `event` (string) fixed to `text_chunk`
+ - `data` (object) detail
+ - `text` (string) Text content
+ - `from_variable_selector` (array) Text source path, helping developers understand which node and variable generated the text
- `event: node_finished` node execution ends, success or failure in different states in the same event
- `task_id` (string) Task ID, used for request tracking and the below Stop Generate API
- `workflow_run_id` (string) Unique ID of workflow execution
diff --git a/web/app/components/develop/template/template_workflow.ja.mdx b/web/app/components/develop/template/template_workflow.ja.mdx
index 0239b40224..3f33be58b9 100644
--- a/web/app/components/develop/template/template_workflow.ja.mdx
+++ b/web/app/components/develop/template/template_workflow.ja.mdx
@@ -93,7 +93,7 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
各ストリーミングチャンクは`data:`で始まり、2つの改行文字`\n\n`で区切られます。以下のように表示されます:
```streaming {{ title: '応答' }}
- data: {"event": "message", "task_id": "900bbd43-dc0b-4383-a372-aa6e6c414227", "id": "663c5084-a254-4040-8ad3-51f2a3c1a77c", "answer": "Hi", "created_at": 1705398420}\n\n
+ data: {"event": "text_chunk", "workflow_run_id": "b85e5fc5-751b-454d-b14e-dc5f240b0a31", "task_id": "bd029338-b068-4d34-a331-fc85478922c2", "data": {"text": "\u4e3a\u4e86", "from_variable_selector": ["1745912968134", "text"]}}\n\n
```
ストリーミングチャンクの構造は`event`に応じて異なります:
@@ -119,6 +119,13 @@ import { Row, Col, Properties, Property, Heading, SubProperty, Paragraph } from
- `predecessor_node_id` (string) オプションのプレフィックスノードID、キャンバス表示実行パスに使用
- `inputs` (object) ノードで使用されるすべての前のノード変数の内容
- `created_at` (timestamp) 開始のタイムスタンプ、例:1705395332
+ - `event: text_chunk` テキストフラグメント
+ - `task_id` (string) タスクID、リクエスト追跡と以下のStop Generate APIに使用
+ - `workflow_run_id` (string) ワークフロー実行の一意のID
+ - `event` (string) `text_chunk`に固定
+ - `data` (object) 詳細
+ - `text` (string) テキスト内容
+ - `from_variable_selector` (array) テキスト生成元パス(開発者がどのノードのどの変数から生成されたかを理解するための情報)
- `event: node_finished` ノード実行終了、同じイベントで異なる状態で成功または失敗
- `task_id` (string) タスクID、リクエスト追跡と以下のStop Generate APIに使用
- `workflow_run_id` (string) ワークフロー実行の一意のID
diff --git a/web/app/components/develop/template/template_workflow.zh.mdx b/web/app/components/develop/template/template_workflow.zh.mdx
index 939df2703d..b032407ee9 100644
--- a/web/app/components/develop/template/template_workflow.zh.mdx
+++ b/web/app/components/develop/template/template_workflow.zh.mdx
@@ -87,7 +87,7 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
每个流式块均为 data: 开头,块之间以 `\n\n` 即两个换行符分隔,如下所示:
```streaming {{ title: 'Response' }}
- data: {"event": "message", "task_id": "900bbd43-dc0b-4383-a372-aa6e6c414227", "id": "663c5084-a254-4040-8ad3-51f2a3c1a77c", "answer": "Hi", "created_at": 1705398420}\n\n
+ data: {"event": "text_chunk", "workflow_run_id": "b85e5fc5-751b-454d-b14e-dc5f240b0a31", "task_id": "bd029338-b068-4d34-a331-fc85478922c2", "data": {"text": "\u4e3a\u4e86", "from_variable_selector": ["1745912968134", "text"]}}\n\n
```
流式块中根据 `event` 不同,结构也不同,包含以下类型:
@@ -113,6 +113,13 @@ Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等
- `predecessor_node_id` (string) 前置节点 ID,用于画布展示执行路径
- `inputs` (object) 节点中所有使用到的前置节点变量内容
- `created_at` (timestamp) 开始时间
+ - `event: text_chunk` 文本片段
+ - `task_id` (string) 任务 ID,用于请求跟踪和下方的停止响应接口
+ - `workflow_run_id` (string) workflow 执行 ID
+ - `event` (string) 固定为 `text_chunk`
+ - `data` (object) 详细内容
+ - `text` (string) 文本内容
+ - `from_variable_selector` (array) 文本来源路径,帮助开发者了解文本是由哪个节点的哪个变量生成的
- `event: node_finished` node 执行结束,成功失败同一事件中不同状态
- `task_id` (string) 任务 ID,用于请求跟踪和下方的停止响应接口
- `workflow_run_id` (string) workflow 执行 ID
From 3ecc1e0228594ba8fcc21e5148ed9708877af59d Mon Sep 17 00:00:00 2001
From: AllenWriter
Date: Tue, 6 May 2025 17:02:01 +0800
Subject: [PATCH 2/6] Fix: update docs link (#19278)
---
docker/docker-compose-template.yaml | 2 +-
docker/docker-compose.middleware.yaml | 2 +-
docker/docker-compose.yaml | 2 +-
.../(datasetDetailLayout)/[datasetId]/layout-main.tsx | 2 +-
.../config-prompt/conversation-history/history-panel.tsx | 4 ++--
.../configuration/prompt-mode/advanced-mode-waring.tsx | 2 +-
web/app/components/app/create-app-modal/index.tsx | 8 ++++----
web/app/components/app/overview/customize/index.tsx | 2 +-
web/app/components/app/overview/settings/index.tsx | 2 +-
web/app/components/datasets/documents/index.tsx | 4 ++--
.../datasets/external-knowledge-base/create/InfoPanel.tsx | 4 ++--
.../datasets/external-knowledge-base/create/index.tsx | 2 +-
web/app/components/tools/provider/custom-create-card.tsx | 2 +-
.../workflow/nodes/_base/components/agent-strategy.tsx | 2 +-
.../nodes/_base/components/error-handle/default-value.tsx | 2 +-
.../workflow/nodes/_base/hooks/use-node-help-link.ts | 2 +-
.../json-schema-config-modal/json-schema-config.tsx | 2 +-
web/app/signin/oneMoreStep.tsx | 2 +-
web/i18n/de-DE/common.ts | 2 +-
web/i18n/de-DE/dataset-creation.ts | 2 +-
web/i18n/en-US/common.ts | 2 +-
web/i18n/en-US/dataset-creation.ts | 4 ++--
web/i18n/es-ES/common.ts | 2 +-
web/i18n/es-ES/dataset-creation.ts | 2 +-
web/i18n/fa-IR/common.ts | 2 +-
web/i18n/fa-IR/dataset-creation.ts | 2 +-
web/i18n/fr-FR/dataset-creation.ts | 2 +-
web/i18n/hi-IN/common.ts | 2 +-
web/i18n/it-IT/common.ts | 2 +-
web/i18n/ja-JP/common.ts | 2 +-
web/i18n/ja-JP/dataset-creation.ts | 2 +-
web/i18n/ko-KR/common.ts | 2 +-
web/i18n/ko-KR/dataset-creation.ts | 2 +-
web/i18n/pl-PL/common.ts | 2 +-
web/i18n/pl-PL/dataset-creation.ts | 2 +-
web/i18n/pt-BR/common.ts | 2 +-
web/i18n/pt-BR/dataset-creation.ts | 2 +-
web/i18n/ro-RO/common.ts | 2 +-
web/i18n/ro-RO/dataset-creation.ts | 2 +-
web/i18n/ru-RU/common.ts | 2 +-
web/i18n/ru-RU/dataset-creation.ts | 2 +-
web/i18n/sl-SI/common.ts | 4 ++--
web/i18n/sl-SI/dataset-creation.ts | 2 +-
web/i18n/th-TH/common.ts | 2 +-
web/i18n/th-TH/dataset-creation.ts | 2 +-
web/i18n/tr-TR/common.ts | 2 +-
web/i18n/tr-TR/dataset-creation.ts | 2 +-
web/i18n/uk-UA/common.ts | 2 +-
web/i18n/uk-UA/dataset-creation.ts | 2 +-
web/i18n/vi-VN/common.ts | 2 +-
web/i18n/vi-VN/dataset-creation.ts | 2 +-
web/i18n/zh-Hans/common.ts | 2 +-
web/i18n/zh-Hans/dataset-creation.ts | 2 +-
web/i18n/zh-Hant/common.ts | 2 +-
web/i18n/zh-Hant/dataset-creation.ts | 2 +-
55 files changed, 63 insertions(+), 63 deletions(-)
diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml
index bfbfe6c19a..487d358b24 100644
--- a/docker/docker-compose-template.yaml
+++ b/docker/docker-compose-template.yaml
@@ -188,7 +188,7 @@ services:
# ssrf_proxy server
# for more information, please refer to
- # https://docs.dify.ai/learn-more/faq/install-faq#id-18.-why-is-ssrf_proxy-needed
+ # https://docs.dify.ai/learn-more/faq/install-faq#18-why-is-ssrf-proxy-needed%3F
ssrf_proxy:
image: ubuntu/squid:latest
restart: always
diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml
index 01c7573a95..498390b708 100644
--- a/docker/docker-compose.middleware.yaml
+++ b/docker/docker-compose.middleware.yaml
@@ -123,7 +123,7 @@ services:
# ssrf_proxy server
# for more information, please refer to
- # https://docs.dify.ai/learn-more/faq/install-faq#id-18.-why-is-ssrf_proxy-needed
+ # https://docs.dify.ai/learn-more/faq/install-faq#18-why-is-ssrf-proxy-needed%3F
ssrf_proxy:
image: ubuntu/squid:latest
restart: always
diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml
index 3ed0f60e96..aee36b3986 100644
--- a/docker/docker-compose.yaml
+++ b/docker/docker-compose.yaml
@@ -677,7 +677,7 @@ services:
# ssrf_proxy server
# for more information, please refer to
- # https://docs.dify.ai/learn-more/faq/install-faq#id-18.-why-is-ssrf_proxy-needed
+ # https://docs.dify.ai/learn-more/faq/install-faq#18-why-is-ssrf-proxy-needed%3F
ssrf_proxy:
image: ubuntu/squid:latest
restart: always
diff --git a/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout-main.tsx b/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout-main.tsx
index 078c7ebd8c..5619b1e445 100644
--- a/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout-main.tsx
+++ b/web/app/(commonLayout)/datasets/(datasetDetailLayout)/[datasetId]/layout-main.tsx
@@ -98,7 +98,7 @@ const ExtraInfo = ({ isMobile, relatedApps, expand }: IExtraInfoProps) => {
className='mt-2 inline-flex cursor-pointer items-center text-xs text-text-accent'
href={
locale === LanguagesSupported[1]
- ? 'https://docs.dify.ai/v/zh-hans/guides/knowledge-base/integrate-knowledge-within-application'
+ ? 'https://docs.dify.ai/zh-hans/guides/knowledge-base/integrate-knowledge-within-application'
: 'https://docs.dify.ai/guides/knowledge-base/integrate-knowledge-within-application'
}
target='_blank' rel='noopener noreferrer'
diff --git a/web/app/components/app/configuration/config-prompt/conversation-history/history-panel.tsx b/web/app/components/app/configuration/config-prompt/conversation-history/history-panel.tsx
index 78e8d8f506..592c95261c 100644
--- a/web/app/components/app/configuration/config-prompt/conversation-history/history-panel.tsx
+++ b/web/app/components/app/configuration/config-prompt/conversation-history/history-panel.tsx
@@ -46,8 +46,8 @@ const HistoryPanel: FC = ({
{t('appDebug.feature.conversationHistory.tip')}
{t('appDebug.feature.conversationHistory.learnMore')}
diff --git a/web/app/components/app/configuration/prompt-mode/advanced-mode-waring.tsx b/web/app/components/app/configuration/prompt-mode/advanced-mode-waring.tsx
index d1cd25e80a..cca775c86e 100644
--- a/web/app/components/app/configuration/prompt-mode/advanced-mode-waring.tsx
+++ b/web/app/components/app/configuration/prompt-mode/advanced-mode-waring.tsx
@@ -25,7 +25,7 @@ const AdvancedModeWarning: FC
= ({
{t('appDebug.promptMode.advancedWarning.description')}
{t('appDebug.promptMode.advancedWarning.learnMore')}
diff --git a/web/app/components/app/create-app-modal/index.tsx b/web/app/components/app/create-app-modal/index.tsx
index 88bccc95af..6e05bb0209 100644
--- a/web/app/components/app/create-app-modal/index.tsx
+++ b/web/app/components/app/create-app-modal/index.tsx
@@ -310,17 +310,17 @@ function AppPreview({ mode }: { mode: AppMode }) {
'chat': {
title: t('app.types.chatbot'),
description: t('app.newApp.chatbotUserDescription'),
- link: 'https://docs.dify.ai/guides/application-orchestrate#application_type',
+ link: 'https://docs.dify.ai/guides/application-orchestrate/readme',
},
'advanced-chat': {
title: t('app.types.advanced'),
description: t('app.newApp.advancedUserDescription'),
- link: 'https://docs.dify.ai/guides/workflow',
+ link: 'https://docs.dify.ai/en/guides/workflow/README',
},
'agent-chat': {
title: t('app.types.agent'),
description: t('app.newApp.agentUserDescription'),
- link: 'https://docs.dify.ai/guides/application-orchestrate/agent',
+ link: 'https://docs.dify.ai/en/guides/application-orchestrate/agent',
},
'completion': {
title: t('app.newApp.completeApp'),
@@ -330,7 +330,7 @@ function AppPreview({ mode }: { mode: AppMode }) {
'workflow': {
title: t('app.types.workflow'),
description: t('app.newApp.workflowUserDescription'),
- link: 'https://docs.dify.ai/guides/workflow',
+ link: 'https://docs.dify.ai/en/guides/workflow/README',
},
}
const previewInfo = modeToPreviewInfoMap[mode]
diff --git a/web/app/components/app/overview/customize/index.tsx b/web/app/components/app/overview/customize/index.tsx
index 9e0fadd87d..4e84dd8b1f 100644
--- a/web/app/components/app/overview/customize/index.tsx
+++ b/web/app/components/app/overview/customize/index.tsx
@@ -103,7 +103,7 @@ const CustomizeModal: FC = ({
window.open(
`https://docs.dify.ai/${locale !== LanguagesSupported[1]
? 'user-guide/launching-dify-apps/developing-with-apis'
- : `v/${locale.toLowerCase()}/guides/application-publishing/developing-with-apis`
+ : `${locale.toLowerCase()}/guides/application-publishing/developing-with-apis`
}`,
'_blank',
)
diff --git a/web/app/components/app/overview/settings/index.tsx b/web/app/components/app/overview/settings/index.tsx
index 679d616e54..c0a5fdf757 100644
--- a/web/app/components/app/overview/settings/index.tsx
+++ b/web/app/components/app/overview/settings/index.tsx
@@ -241,7 +241,7 @@ const SettingsModal: FC = ({
{t(`${prefixSettings}.modalTip`)}
- {t('common.operation.learnMore')}
+ {t('common.operation.learnMore')}
{/* form body */}
diff --git a/web/app/components/datasets/documents/index.tsx b/web/app/components/datasets/documents/index.tsx
index 854c984559..32980eea1e 100644
--- a/web/app/components/datasets/documents/index.tsx
+++ b/web/app/components/datasets/documents/index.tsx
@@ -264,8 +264,8 @@ const Documents: FC = ({ datasetId }) => {
target='_blank'
href={
locale === LanguagesSupported[1]
- ? 'https://docs.dify.ai/v/zh-hans/guides/knowledge-base/integrate-knowledge-within-application'
- : 'https://docs.dify.ai/guides/knowledge-base/integrate-knowledge-within-application'
+ ? 'https://docs.dify.ai/zh-hans/guides/knowledge-base/integrate-knowledge-within-application'
+ : 'https://docs.dify.ai/en/guides/knowledge-base/integrate-knowledge-within-application'
}
>
{t('datasetDocuments.list.learnMore')}
diff --git a/web/app/components/datasets/external-knowledge-base/create/InfoPanel.tsx b/web/app/components/datasets/external-knowledge-base/create/InfoPanel.tsx
index 33386940c4..5c3c1261b6 100644
--- a/web/app/components/datasets/external-knowledge-base/create/InfoPanel.tsx
+++ b/web/app/components/datasets/external-knowledge-base/create/InfoPanel.tsx
@@ -16,12 +16,12 @@ const InfoPanel = () => {
{t('dataset.connectDatasetIntro.content.front')}
-
+
{t('dataset.connectDatasetIntro.content.link')}
{t('dataset.connectDatasetIntro.content.end')}
-
+
{t('dataset.connectDatasetIntro.learnMore')}
diff --git a/web/app/components/datasets/external-knowledge-base/create/index.tsx b/web/app/components/datasets/external-knowledge-base/create/index.tsx
index 6cbfc05ca7..5fbddea06b 100644
--- a/web/app/components/datasets/external-knowledge-base/create/index.tsx
+++ b/web/app/components/datasets/external-knowledge-base/create/index.tsx
@@ -59,7 +59,7 @@ const ExternalKnowledgeBaseCreate: React.FC =
{t('dataset.connectHelper.helper1')}
{t('dataset.connectHelper.helper2')}
{t('dataset.connectHelper.helper3')}
-
+
{t('dataset.connectHelper.helper4')}
{t('dataset.connectHelper.helper5')}
diff --git a/web/app/components/tools/provider/custom-create-card.tsx b/web/app/components/tools/provider/custom-create-card.tsx
index 87c30b9ddb..6dd268cb3a 100644
--- a/web/app/components/tools/provider/custom-create-card.tsx
+++ b/web/app/components/tools/provider/custom-create-card.tsx
@@ -28,7 +28,7 @@ const Contribute = ({ onRefreshData }: Props) => {
const linkUrl = useMemo(() => {
if (language.startsWith('zh_'))
return 'https://docs.dify.ai/zh-hans/guides/tools#ru-he-chuang-jian-zi-ding-yi-gong-ju'
- return 'https://docs.dify.ai/guides/tools#how-to-create-custom-tools'
+ return 'https://docs.dify.ai/en/guides/tools#how-to-create-custom-tools'
}, [language])
const [isShowEditCollectionToolModal, setIsShowEditCustomCollectionModal] = useState(false)
diff --git a/web/app/components/workflow/nodes/_base/components/agent-strategy.tsx b/web/app/components/workflow/nodes/_base/components/agent-strategy.tsx
index d67b7af1a4..1e9612b7c7 100644
--- a/web/app/components/workflow/nodes/_base/components/agent-strategy.tsx
+++ b/web/app/components/workflow/nodes/_base/components/agent-strategy.tsx
@@ -223,7 +223,7 @@ export const AgentStrategy = memo((props: AgentStrategyProps) => {
{t('workflow.nodes.agent.learnMore')}
diff --git a/web/app/components/workflow/nodes/_base/components/error-handle/default-value.tsx b/web/app/components/workflow/nodes/_base/components/error-handle/default-value.tsx
index fa2d50f2c2..51969f8510 100644
--- a/web/app/components/workflow/nodes/_base/components/error-handle/default-value.tsx
+++ b/web/app/components/workflow/nodes/_base/components/error-handle/default-value.tsx
@@ -34,7 +34,7 @@ const DefaultValue = ({
{t('workflow.nodes.common.errorHandle.defaultValue.desc')}
diff --git a/web/app/components/workflow/nodes/_base/hooks/use-node-help-link.ts b/web/app/components/workflow/nodes/_base/hooks/use-node-help-link.ts
index 3c68fbd1fd..daad6ffcc0 100644
--- a/web/app/components/workflow/nodes/_base/hooks/use-node-help-link.ts
+++ b/web/app/components/workflow/nodes/_base/hooks/use-node-help-link.ts
@@ -8,7 +8,7 @@ export const useNodeHelpLink = (nodeType: BlockEnum) => {
if (language === 'zh_Hans')
return 'https://docs.dify.ai/zh-hans/guides/workflow/node/'
- return 'https://docs.dify.ai/guides/workflow/node/'
+ return 'https://docs.dify.ai/en/guides/workflow/node/'
}, [language])
const linkMap = useMemo(() => {
if (language === 'zh_Hans') {
diff --git a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-config.tsx b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-config.tsx
index d125e31dae..344d02c011 100644
--- a/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-config.tsx
+++ b/web/app/components/workflow/nodes/llm/components/json-schema-config-modal/json-schema-config.tsx
@@ -49,7 +49,7 @@ const DEFAULT_SCHEMA: SchemaRoot = {
const HELP_DOC_URL = {
zh_Hans: 'https://docs.dify.ai/zh-hans/guides/workflow/structured-outputs',
- en_US: 'https://docs.dify.ai/guides/workflow/structured-outputs',
+ en_US: 'https://docs.dify.ai/en/guides/workflow/structured-outputs',
ja_JP: 'https://docs.dify.ai/ja-jp/guides/workflow/structured-outputs',
}
diff --git a/web/app/signin/oneMoreStep.tsx b/web/app/signin/oneMoreStep.tsx
index a78a3ab3b1..7a326a13de 100644
--- a/web/app/signin/oneMoreStep.tsx
+++ b/web/app/signin/oneMoreStep.tsx
@@ -164,7 +164,7 @@ const OneMoreStep = () => {
{t('login.license.link')}
diff --git a/web/i18n/de-DE/common.ts b/web/i18n/de-DE/common.ts
index d4ea088571..509f44708d 100644
--- a/web/i18n/de-DE/common.ts
+++ b/web/i18n/de-DE/common.ts
@@ -455,7 +455,7 @@ const translation = {
apiBasedExtension: {
title: 'API-Erweiterungen bieten zentralisiertes API-Management und vereinfachen die Konfiguration für eine einfache Verwendung in Difys Anwendungen.',
link: 'Erfahren Sie, wie Sie Ihre eigene API-Erweiterung entwickeln.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'API-Erweiterung hinzufügen',
selector: {
title: 'API-Erweiterung',
diff --git a/web/i18n/de-DE/dataset-creation.ts b/web/i18n/de-DE/dataset-creation.ts
index 60102a2a7d..6e532794aa 100644
--- a/web/i18n/de-DE/dataset-creation.ts
+++ b/web/i18n/de-DE/dataset-creation.ts
@@ -69,7 +69,7 @@ const translation = {
unknownError: 'Unbekannter Fehler',
resetAll: 'Alles zurücksetzen',
extractOnlyMainContent: 'Extrahieren Sie nur den Hauptinhalt (keine Kopf-, Navigations- und Fußzeilen usw.)',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
firecrawlTitle: 'Extrahieren von Webinhalten mit 🔥Firecrawl',
maxDepthTooltip: 'Maximale Tiefe für das Crawlen relativ zur eingegebenen URL. Tiefe 0 kratzt nur die Seite der eingegebenen URL, Tiefe 1 kratzt die URL und alles nach der eingegebenen URL + ein / und so weiter.',
crawlSubPage: 'Unterseiten crawlen',
diff --git a/web/i18n/en-US/common.ts b/web/i18n/en-US/common.ts
index bf2bf83f68..0491ff57a8 100644
--- a/web/i18n/en-US/common.ts
+++ b/web/i18n/en-US/common.ts
@@ -475,7 +475,7 @@ const translation = {
apiBasedExtension: {
title: 'API extensions provide centralized API management, simplifying configuration for easy use across Dify\'s applications.',
link: 'Learn how to develop your own API Extension.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'Add API Extension',
selector: {
title: 'API Extension',
diff --git a/web/i18n/en-US/dataset-creation.ts b/web/i18n/en-US/dataset-creation.ts
index fe48e076fa..cf2d454f06 100644
--- a/web/i18n/en-US/dataset-creation.ts
+++ b/web/i18n/en-US/dataset-creation.ts
@@ -80,10 +80,10 @@ const translation = {
run: 'Run',
firecrawlTitle: 'Extract web content with 🔥Firecrawl',
firecrawlDoc: 'Firecrawl docs',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
watercrawlTitle: 'Extract web content with Watercrawl',
watercrawlDoc: 'Watercrawl docs',
- watercrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ watercrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
jinaReaderTitle: 'Convert the entire site to Markdown',
jinaReaderDoc: 'Learn more about Jina Reader',
jinaReaderDocLink: 'https://jina.ai/reader',
diff --git a/web/i18n/es-ES/common.ts b/web/i18n/es-ES/common.ts
index 5933105ffd..483949553e 100644
--- a/web/i18n/es-ES/common.ts
+++ b/web/i18n/es-ES/common.ts
@@ -459,7 +459,7 @@ const translation = {
apiBasedExtension: {
title: 'Las extensiones basadas en API proporcionan una gestión centralizada de API, simplificando la configuración para su fácil uso en las aplicaciones de Dify.',
link: 'Aprende cómo desarrollar tu propia Extensión API.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'Agregar Extensión API',
selector: {
title: 'Extensión API',
diff --git a/web/i18n/es-ES/dataset-creation.ts b/web/i18n/es-ES/dataset-creation.ts
index 7047ff9e9a..4fcccb0633 100644
--- a/web/i18n/es-ES/dataset-creation.ts
+++ b/web/i18n/es-ES/dataset-creation.ts
@@ -63,7 +63,7 @@ const translation = {
run: 'Ejecutar',
firecrawlTitle: 'Extraer contenido web con 🔥Firecrawl',
firecrawlDoc: 'Documentación de Firecrawl',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
options: 'Opciones',
crawlSubPage: 'Rastrear subpáginas',
limit: 'Límite',
diff --git a/web/i18n/fa-IR/common.ts b/web/i18n/fa-IR/common.ts
index 44d6bb006b..5ca55da25b 100644
--- a/web/i18n/fa-IR/common.ts
+++ b/web/i18n/fa-IR/common.ts
@@ -459,7 +459,7 @@ const translation = {
apiBasedExtension: {
title: 'افزونههای مبتنی بر API مدیریت متمرکز API را فراهم میکنند و پیکربندی را برای استفاده آسان در برنامههای Dify ساده میکنند.',
link: 'نحوه توسعه افزونه API خود را بیاموزید.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'افزودن افزونه API',
selector: {
title: 'افزونه API',
diff --git a/web/i18n/fa-IR/dataset-creation.ts b/web/i18n/fa-IR/dataset-creation.ts
index 0ca51ef534..55eae67875 100644
--- a/web/i18n/fa-IR/dataset-creation.ts
+++ b/web/i18n/fa-IR/dataset-creation.ts
@@ -63,7 +63,7 @@ const translation = {
run: 'اجرا',
firecrawlTitle: 'استخراج محتوای وب با fireFirecrawl',
firecrawlDoc: 'مستندات Firecrawl',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
options: 'گزینهها',
crawlSubPage: 'خزش صفحات فرعی',
limit: 'محدودیت',
diff --git a/web/i18n/fr-FR/dataset-creation.ts b/web/i18n/fr-FR/dataset-creation.ts
index e7357749c4..50d750aa44 100644
--- a/web/i18n/fr-FR/dataset-creation.ts
+++ b/web/i18n/fr-FR/dataset-creation.ts
@@ -61,7 +61,7 @@ const translation = {
preview: 'Aperçu',
crawlSubPage: 'Explorer les sous-pages',
configure: 'Configurer',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
maxDepth: 'Profondeur maximale',
fireCrawlNotConfigured: 'Firecrawl n’est pas configuré',
firecrawlTitle: 'Extraire du contenu web avec 🔥Firecrawl',
diff --git a/web/i18n/hi-IN/common.ts b/web/i18n/hi-IN/common.ts
index c5cecc1052..dab3229fe2 100644
--- a/web/i18n/hi-IN/common.ts
+++ b/web/i18n/hi-IN/common.ts
@@ -476,7 +476,7 @@ const translation = {
title:
'एपीआई एक्सटेंशन केंद्रीकृत एपीआई प्रबंधन प्रदान करते हैं, जो Dify के अनुप्रयोगों में आसान उपयोग के लिए कॉन्फ़िगरेशन को सरल बनाते हैं।',
link: 'अपना खुद का एपीआई एक्सटेंशन कैसे विकसित करें, यह जानें।',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'एपीआई एक्सटेंशन जोड़ें',
selector: {
title: 'एपीआई एक्सटेंशन',
diff --git a/web/i18n/it-IT/common.ts b/web/i18n/it-IT/common.ts
index cc8129625a..a413a9ef68 100644
--- a/web/i18n/it-IT/common.ts
+++ b/web/i18n/it-IT/common.ts
@@ -483,7 +483,7 @@ const translation = {
title:
'Le estensioni API forniscono una gestione centralizzata delle API, semplificando la configurazione per un facile utilizzo nelle applicazioni di Dify.',
link: 'Scopri come sviluppare la tua estensione API.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'Aggiungi Estensione API',
selector: {
title: 'Estensione API',
diff --git a/web/i18n/ja-JP/common.ts b/web/i18n/ja-JP/common.ts
index 9f480c5af6..c6163ec5cd 100644
--- a/web/i18n/ja-JP/common.ts
+++ b/web/i18n/ja-JP/common.ts
@@ -475,7 +475,7 @@ const translation = {
apiBasedExtension: {
title: 'API拡張機能は、Difyのアプリケーション全体での簡単な使用のための設定を簡素化し、集中的なAPI管理を提供します。',
link: '独自のAPI拡張機能を開発する方法について学ぶ。',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'API拡張機能を追加',
selector: {
title: 'API拡張機能',
diff --git a/web/i18n/ja-JP/dataset-creation.ts b/web/i18n/ja-JP/dataset-creation.ts
index 3de5b4c615..3cd856d134 100644
--- a/web/i18n/ja-JP/dataset-creation.ts
+++ b/web/i18n/ja-JP/dataset-creation.ts
@@ -72,7 +72,7 @@ const translation = {
run: '実行',
firecrawlTitle: '🔥Firecrawlを使っでウエブコンテンツを抽出',
firecrawlDoc: 'Firecrawlドキュメント',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
jinaReaderTitle: 'サイト全体をMarkdownに変換する',
jinaReaderDoc: 'Jina Readerの詳細',
jinaReaderDocLink: 'https://jina.ai/reader',
diff --git a/web/i18n/ko-KR/common.ts b/web/i18n/ko-KR/common.ts
index 8068a76d8e..aff00be97a 100644
--- a/web/i18n/ko-KR/common.ts
+++ b/web/i18n/ko-KR/common.ts
@@ -451,7 +451,7 @@ const translation = {
apiBasedExtension: {
title: 'API 기반 확장은 Dify 애플리케이션 전체에서 간편한 사용을 위한 설정을 단순화하고 집중적인 API 관리를 제공합니다.',
link: '사용자 정의 API 기반 확장을 개발하는 방법 배우기',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'API 기반 확장 추가',
selector: {
title: 'API 기반 확장',
diff --git a/web/i18n/ko-KR/dataset-creation.ts b/web/i18n/ko-KR/dataset-creation.ts
index 33ca624307..10385bbb71 100644
--- a/web/i18n/ko-KR/dataset-creation.ts
+++ b/web/i18n/ko-KR/dataset-creation.ts
@@ -52,7 +52,7 @@ const translation = {
failed: '생성에 실패했습니다',
},
website: {
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
limit: '한계',
options: '옵션',
firecrawlDoc: 'Firecrawl 문서',
diff --git a/web/i18n/pl-PL/common.ts b/web/i18n/pl-PL/common.ts
index c8b0b79257..b0ef026f5e 100644
--- a/web/i18n/pl-PL/common.ts
+++ b/web/i18n/pl-PL/common.ts
@@ -469,7 +469,7 @@ const translation = {
title:
'Rozszerzenia oparte na interfejsie API zapewniają scentralizowane zarządzanie interfejsami API, upraszczając konfigurację dla łatwego użytkowania w aplikacjach Dify.',
link: 'Dowiedz się, jak opracować własne rozszerzenie interfejsu API.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'Dodaj rozszerzenie interfejsu API',
selector: {
title: 'Rozszerzenie interfejsu API',
diff --git a/web/i18n/pl-PL/dataset-creation.ts b/web/i18n/pl-PL/dataset-creation.ts
index 98c0613a30..4073fdd681 100644
--- a/web/i18n/pl-PL/dataset-creation.ts
+++ b/web/i18n/pl-PL/dataset-creation.ts
@@ -54,7 +54,7 @@ const translation = {
},
website: {
limit: 'Ograniczać',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
firecrawlDoc: 'Dokumentacja Firecrawl',
unknownError: 'Nieznany błąd',
fireCrawlNotConfiguredDescription: 'Skonfiguruj Firecrawl z kluczem API, aby z niego korzystać.',
diff --git a/web/i18n/pt-BR/common.ts b/web/i18n/pt-BR/common.ts
index 180bcbb4da..eb92d9ab1d 100644
--- a/web/i18n/pt-BR/common.ts
+++ b/web/i18n/pt-BR/common.ts
@@ -455,7 +455,7 @@ const translation = {
apiBasedExtension: {
title: 'As extensões de API fornecem gerenciamento centralizado de API, simplificando a configuração para uso fácil em todos os aplicativos da Dify.',
link: 'Saiba como desenvolver sua própria Extensão de API.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'Adicionar Extensão de API',
selector: {
title: 'Extensão de API',
diff --git a/web/i18n/pt-BR/dataset-creation.ts b/web/i18n/pt-BR/dataset-creation.ts
index a3949f484b..d4315e6866 100644
--- a/web/i18n/pt-BR/dataset-creation.ts
+++ b/web/i18n/pt-BR/dataset-creation.ts
@@ -58,7 +58,7 @@ const translation = {
crawlSubPage: 'Rastrear subpáginas',
selectAll: 'Selecionar tudo',
resetAll: 'Redefinir tudo',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
includeOnlyPaths: 'Incluir apenas caminhos',
configure: 'Configurar',
limit: 'Limite',
diff --git a/web/i18n/ro-RO/common.ts b/web/i18n/ro-RO/common.ts
index ad000e26c4..e755d59354 100644
--- a/web/i18n/ro-RO/common.ts
+++ b/web/i18n/ro-RO/common.ts
@@ -455,7 +455,7 @@ const translation = {
apiBasedExtension: {
title: 'Extensiile bazate pe API oferă o gestionare centralizată a API-urilor, simplificând configurația pentru o utilizare ușoară în aplicațiile Dify.',
link: 'Aflați cum să dezvoltați propria extensie bazată pe API.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'Adăugați extensie API',
selector: {
title: 'Extensie API',
diff --git a/web/i18n/ro-RO/dataset-creation.ts b/web/i18n/ro-RO/dataset-creation.ts
index 3587070999..2c92d8417c 100644
--- a/web/i18n/ro-RO/dataset-creation.ts
+++ b/web/i18n/ro-RO/dataset-creation.ts
@@ -65,7 +65,7 @@ const translation = {
firecrawlTitle: 'Extrageți conținut web cu 🔥Firecrawl',
unknownError: 'Eroare necunoscută',
scrapTimeInfo: 'Pagini răzuite {{total}} în total în {{timp}}s',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
excludePaths: 'Excluderea căilor',
resetAll: 'Resetați toate',
extractOnlyMainContent: 'Extrageți doar conținutul principal (fără anteturi, navigări, subsoluri etc.)',
diff --git a/web/i18n/ru-RU/common.ts b/web/i18n/ru-RU/common.ts
index d419bcc97e..de925d1742 100644
--- a/web/i18n/ru-RU/common.ts
+++ b/web/i18n/ru-RU/common.ts
@@ -459,7 +459,7 @@ const translation = {
apiBasedExtension: {
title: 'API-расширения обеспечивают централизованное управление API, упрощая настройку для удобного использования в приложениях Dify.',
link: 'Узнайте, как разработать собственное API-расширение.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'Добавить API Extension',
selector: {
title: 'API Extension',
diff --git a/web/i18n/ru-RU/dataset-creation.ts b/web/i18n/ru-RU/dataset-creation.ts
index 765bb88497..0b7b203a28 100644
--- a/web/i18n/ru-RU/dataset-creation.ts
+++ b/web/i18n/ru-RU/dataset-creation.ts
@@ -63,7 +63,7 @@ const translation = {
run: 'Запустить',
firecrawlTitle: 'Извлечь веб-контент с помощью 🔥Firecrawl',
firecrawlDoc: 'Документация Firecrawl',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
options: 'Опции',
crawlSubPage: 'Сканировать подстраницы',
limit: 'Лимит',
diff --git a/web/i18n/sl-SI/common.ts b/web/i18n/sl-SI/common.ts
index 1167f33697..ce80c8a086 100644
--- a/web/i18n/sl-SI/common.ts
+++ b/web/i18n/sl-SI/common.ts
@@ -452,7 +452,7 @@ const translation = {
apiBasedExtension: {
title: 'Razširitve API omogočajo centralizirano upravljanje API, kar poenostavi konfiguracijo za enostavno uporabo v aplikacijah Dify.',
link: 'Naučite se, kako razviti svojo API razširitev.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'Dodaj API razširitev',
selector: {
title: 'API razširitev',
@@ -681,7 +681,7 @@ const translation = {
type: 'Vrsta',
link: 'Preberite, kako razvijete lastno razširitev API-ja.',
title: 'Razširitve API zagotavljajo centralizirano upravljanje API, kar poenostavlja konfiguracijo za enostavno uporabo v aplikacijah Dify.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'Dodajanje razširitve API-ja',
},
about: {
diff --git a/web/i18n/sl-SI/dataset-creation.ts b/web/i18n/sl-SI/dataset-creation.ts
index e675c61813..8f3a67d7d1 100644
--- a/web/i18n/sl-SI/dataset-creation.ts
+++ b/web/i18n/sl-SI/dataset-creation.ts
@@ -71,7 +71,7 @@ const translation = {
run: 'Zaženi',
firecrawlTitle: 'Izvleci spletno vsebino z 🔥Firecrawl',
firecrawlDoc: 'Firecrawl dokumentacija',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
jinaReaderTitle: 'Pretvori celotno stran v Markdown',
jinaReaderDoc: 'Več o Jina Reader',
jinaReaderDocLink: 'https://jina.ai/reader',
diff --git a/web/i18n/th-TH/common.ts b/web/i18n/th-TH/common.ts
index be1f62cdd7..fca1dc428c 100644
--- a/web/i18n/th-TH/common.ts
+++ b/web/i18n/th-TH/common.ts
@@ -454,7 +454,7 @@ const translation = {
apiBasedExtension: {
title: 'ส่วนขยาย API ให้การจัดการ API แบบรวมศูนย์ ทําให้การกําหนดค่าง่ายขึ้นเพื่อให้ใช้งานได้ง่ายในแอปพลิเคชันของ Dify',
link: 'เรียนรู้วิธีพัฒนาส่วนขยาย API ของคุณเอง',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
add: 'เพิ่มส่วนขยาย API',
selector: {
title: 'ส่วนขยาย API',
diff --git a/web/i18n/th-TH/dataset-creation.ts b/web/i18n/th-TH/dataset-creation.ts
index 4be2250184..dd33e65e47 100644
--- a/web/i18n/th-TH/dataset-creation.ts
+++ b/web/i18n/th-TH/dataset-creation.ts
@@ -71,7 +71,7 @@ const translation = {
run: 'วิ่ง',
firecrawlTitle: 'แยกเนื้อหาเว็บด้วย 🔥Firecrawl',
firecrawlDoc: 'เอกสาร Firecrawl',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
jinaReaderTitle: 'แปลงทั้งไซต์เป็น Markdown',
jinaReaderDoc: 'เรียนรู้เพิ่มเติมเกี่ยวกับ Jina Reader',
jinaReaderDocLink: 'https://jina.ai/reader',
diff --git a/web/i18n/tr-TR/common.ts b/web/i18n/tr-TR/common.ts
index 9dd2f2dd7e..d66c226aa6 100644
--- a/web/i18n/tr-TR/common.ts
+++ b/web/i18n/tr-TR/common.ts
@@ -459,7 +459,7 @@ const translation = {
apiBasedExtension: {
title: 'API uzantıları merkezi API yönetimi sağlar, Dify\'nin uygulamaları arasında kolay kullanım için yapılandırmayı basitleştirir.',
link: 'Kendi API Uzantınızı nasıl geliştireceğinizi öğrenin.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'API Uzantısı Ekle',
selector: {
title: 'API Uzantısı',
diff --git a/web/i18n/tr-TR/dataset-creation.ts b/web/i18n/tr-TR/dataset-creation.ts
index 0394a4816a..e672433786 100644
--- a/web/i18n/tr-TR/dataset-creation.ts
+++ b/web/i18n/tr-TR/dataset-creation.ts
@@ -63,7 +63,7 @@ const translation = {
run: 'Çalıştır',
firecrawlTitle: '🔥Firecrawl ile web içeriğini çıkarın',
firecrawlDoc: 'Firecrawl dokümanları',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
options: 'Seçenekler',
crawlSubPage: 'Alt sayfaları tarayın',
limit: 'Sınır',
diff --git a/web/i18n/uk-UA/common.ts b/web/i18n/uk-UA/common.ts
index dd6dab61cc..7416e30594 100644
--- a/web/i18n/uk-UA/common.ts
+++ b/web/i18n/uk-UA/common.ts
@@ -456,7 +456,7 @@ const translation = {
apiBasedExtension: {
title: 'API-розширення забезпечують централізоване керування API, спрощуючи конфігурацію для зручного використання в різних програмах Dify.',
link: 'Дізнайтеся, як розробити власне розширення API.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'Додати розширення API',
selector: {
title: 'Розширення API',
diff --git a/web/i18n/uk-UA/dataset-creation.ts b/web/i18n/uk-UA/dataset-creation.ts
index 96af3bbd1e..9d924629e6 100644
--- a/web/i18n/uk-UA/dataset-creation.ts
+++ b/web/i18n/uk-UA/dataset-creation.ts
@@ -60,7 +60,7 @@ const translation = {
unknownError: 'Невідома помилка',
maxDepth: 'Максимальна глибина',
crawlSubPage: 'Сканування підсторінок',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
preview: 'Попередній перегляд',
fireCrawlNotConfigured: 'Firecrawl не налаштовано',
includeOnlyPaths: 'Включати лише контури',
diff --git a/web/i18n/vi-VN/common.ts b/web/i18n/vi-VN/common.ts
index a047fb6c9f..2ce9c3c41a 100644
--- a/web/i18n/vi-VN/common.ts
+++ b/web/i18n/vi-VN/common.ts
@@ -455,7 +455,7 @@ const translation = {
apiBasedExtension: {
title: 'Các tiện ích API cung cấp quản lý API tập trung, giúp cấu hình dễ dàng sử dụng trên các ứng dụng của Dify.',
link: 'Tìm hiểu cách phát triển Phần mở rộng API của riêng bạn.',
- linkUrl: 'https://docs.dify.ai/features/extension/api_based_extension',
+ linkUrl: 'https://docs.dify.ai/en/guides/extension/api-based-extension/README',
add: 'Thêm Phần mở rộng API',
selector: {
title: 'Phần mở rộng API',
diff --git a/web/i18n/vi-VN/dataset-creation.ts b/web/i18n/vi-VN/dataset-creation.ts
index 8acaf329b2..071c1e3d13 100644
--- a/web/i18n/vi-VN/dataset-creation.ts
+++ b/web/i18n/vi-VN/dataset-creation.ts
@@ -63,7 +63,7 @@ const translation = {
unknownError: 'Lỗi không xác định',
extractOnlyMainContent: 'Chỉ trích xuất nội dung chính (không có đầu trang, điều hướng, chân trang, v.v.)',
exceptionErrorTitle: 'Một ngoại lệ xảy ra trong khi chạy tác vụ Firecrawl:',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
selectAll: 'Chọn tất cả',
firecrawlTitle: 'Trích xuất nội dung web bằng 🔥Firecrawl',
totalPageScraped: 'Tổng số trang được cạo:',
diff --git a/web/i18n/zh-Hans/common.ts b/web/i18n/zh-Hans/common.ts
index 8ed1e28fd8..327f41ea05 100644
--- a/web/i18n/zh-Hans/common.ts
+++ b/web/i18n/zh-Hans/common.ts
@@ -475,7 +475,7 @@ const translation = {
apiBasedExtension: {
title: 'API 扩展提供了一个集中式的 API 管理,在此统一添加 API 配置后,方便在 Dify 上的各类应用中直接使用。',
link: '了解如何开发您自己的 API 扩展。',
- linkUrl: 'https://docs.dify.ai/v/zh-hans/guides/extension/api-based-extension',
+ linkUrl: 'https://docs.dify.ai/zh-hans/guides/extension/api-based-extension',
add: '新增 API 扩展',
selector: {
title: 'API 扩展',
diff --git a/web/i18n/zh-Hans/dataset-creation.ts b/web/i18n/zh-Hans/dataset-creation.ts
index 6a91b1b996..e3ca478f40 100644
--- a/web/i18n/zh-Hans/dataset-creation.ts
+++ b/web/i18n/zh-Hans/dataset-creation.ts
@@ -79,7 +79,7 @@ const translation = {
run: '运行',
firecrawlTitle: '使用 🔥Firecrawl 提取网页内容',
firecrawlDoc: 'Firecrawl 文档',
- firecrawlDocLink: 'https://docs.dify.ai/v/zh-hans/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/zh-hans/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
jinaReaderTitle: '将整个站点内容转换为 Markdown 格式',
jinaReaderDoc: '了解更多关于 Jina Reader',
jinaReaderDocLink: 'https://jina.ai/reader',
diff --git a/web/i18n/zh-Hant/common.ts b/web/i18n/zh-Hant/common.ts
index 18f42a3b93..cf15a00d61 100644
--- a/web/i18n/zh-Hant/common.ts
+++ b/web/i18n/zh-Hant/common.ts
@@ -455,7 +455,7 @@ const translation = {
apiBasedExtension: {
title: 'API 擴充套件提供了一個集中式的 API 管理,在此統一新增 API 配置後,方便在 Dify 上的各類應用中直接使用。',
link: '瞭解如何開發您自己的 API 擴充套件。',
- linkUrl: 'https://docs.dify.ai/v/zh-hans/guides/extension/api-based-extension',
+ linkUrl: 'https://docs.dify.ai/zh-hans/guides/tools/extensions/api-based/api-based-extension',
add: '新增 API 擴充套件',
selector: {
title: 'API 擴充套件',
diff --git a/web/i18n/zh-Hant/dataset-creation.ts b/web/i18n/zh-Hant/dataset-creation.ts
index 9f5186d024..61737775dc 100644
--- a/web/i18n/zh-Hant/dataset-creation.ts
+++ b/web/i18n/zh-Hant/dataset-creation.ts
@@ -61,7 +61,7 @@ const translation = {
fireCrawlNotConfiguredDescription: '使用 API 金鑰配置 Firecrawl 以使用它。',
limit: '限制',
crawlSubPage: '抓取子頁面',
- firecrawlDocLink: 'https://docs.dify.ai/guides/knowledge-base/sync-from-website',
+ firecrawlDocLink: 'https://docs.dify.ai/en/guides/knowledge-base/create-knowledge-and-upload-documents/import-content-data/sync-from-website',
preview: '預覽',
configure: '配置',
excludePaths: '排除路徑',
From 8de24bc16e9deff497132760a88937ea4d9f19ba Mon Sep 17 00:00:00 2001
From: Rajhans Jadhao
Date: Tue, 6 May 2025 14:32:40 +0530
Subject: [PATCH 3/6] chore: enhance dev script robustness by determining the
script directory (#19209)
---
dev/mypy-check | 3 +++
dev/pytest/pytest_all_tests.sh | 3 +++
dev/pytest/pytest_artifacts.sh | 3 +++
dev/pytest/pytest_model_runtime.sh | 3 +++
dev/pytest/pytest_tools.sh | 3 +++
dev/pytest/pytest_unit_tests.sh | 3 +++
dev/pytest/pytest_vdb.sh | 3 +++
dev/pytest/pytest_workflow.sh | 3 +++
dev/reformat | 3 +++
dev/sync-uv | 3 +++
10 files changed, 30 insertions(+)
diff --git a/dev/mypy-check b/dev/mypy-check
index 24cfb06824..c043faffe6 100755
--- a/dev/mypy-check
+++ b/dev/mypy-check
@@ -2,6 +2,9 @@
set -x
+SCRIPT_DIR="$(dirname "$(realpath "$0")")"
+cd "$SCRIPT_DIR/.."
+
# run mypy checks
uv run --directory api --dev --with pip \
python -m mypy --install-types --non-interactive --cache-fine-grained --sqlite-cache .
diff --git a/dev/pytest/pytest_all_tests.sh b/dev/pytest/pytest_all_tests.sh
index f0c8a78548..30898b4fcf 100755
--- a/dev/pytest/pytest_all_tests.sh
+++ b/dev/pytest/pytest_all_tests.sh
@@ -1,6 +1,9 @@
#!/bin/bash
set -x
+SCRIPT_DIR="$(dirname "$(realpath "$0")")"
+cd "$SCRIPT_DIR/../.."
+
# ModelRuntime
dev/pytest/pytest_model_runtime.sh
diff --git a/dev/pytest/pytest_artifacts.sh b/dev/pytest/pytest_artifacts.sh
index d52acb2273..3086ef5cc4 100755
--- a/dev/pytest/pytest_artifacts.sh
+++ b/dev/pytest/pytest_artifacts.sh
@@ -1,4 +1,7 @@
#!/bin/bash
set -x
+SCRIPT_DIR="$(dirname "$(realpath "$0")")"
+cd "$SCRIPT_DIR/../.."
+
pytest api/tests/artifact_tests/
diff --git a/dev/pytest/pytest_model_runtime.sh b/dev/pytest/pytest_model_runtime.sh
index dc6c6ac627..2cbbbbfd81 100755
--- a/dev/pytest/pytest_model_runtime.sh
+++ b/dev/pytest/pytest_model_runtime.sh
@@ -1,6 +1,9 @@
#!/bin/bash
set -x
+SCRIPT_DIR="$(dirname "$(realpath "$0")")"
+cd "$SCRIPT_DIR/../.."
+
pytest api/tests/integration_tests/model_runtime/anthropic \
api/tests/integration_tests/model_runtime/azure_openai \
api/tests/integration_tests/model_runtime/openai api/tests/integration_tests/model_runtime/chatglm \
diff --git a/dev/pytest/pytest_tools.sh b/dev/pytest/pytest_tools.sh
index 0b1a8c9877..d10934626f 100755
--- a/dev/pytest/pytest_tools.sh
+++ b/dev/pytest/pytest_tools.sh
@@ -1,4 +1,7 @@
#!/bin/bash
set -x
+SCRIPT_DIR="$(dirname "$(realpath "$0")")"
+cd "$SCRIPT_DIR/../.."
+
pytest api/tests/integration_tests/tools
diff --git a/dev/pytest/pytest_unit_tests.sh b/dev/pytest/pytest_unit_tests.sh
index 2075596b7f..1a1819ca28 100755
--- a/dev/pytest/pytest_unit_tests.sh
+++ b/dev/pytest/pytest_unit_tests.sh
@@ -1,5 +1,8 @@
#!/bin/bash
set -x
+SCRIPT_DIR="$(dirname "$(realpath "$0")")"
+cd "$SCRIPT_DIR/../.."
+
# libs
pytest api/tests/unit_tests
diff --git a/dev/pytest/pytest_vdb.sh b/dev/pytest/pytest_vdb.sh
index dd03ca3514..7f617a9c05 100755
--- a/dev/pytest/pytest_vdb.sh
+++ b/dev/pytest/pytest_vdb.sh
@@ -1,6 +1,9 @@
#!/bin/bash
set -x
+SCRIPT_DIR="$(dirname "$(realpath "$0")")"
+cd "$SCRIPT_DIR/../.."
+
pytest api/tests/integration_tests/vdb/chroma \
api/tests/integration_tests/vdb/milvus \
api/tests/integration_tests/vdb/pgvecto_rs \
diff --git a/dev/pytest/pytest_workflow.sh b/dev/pytest/pytest_workflow.sh
index db8fdb2fb9..b63d49069f 100755
--- a/dev/pytest/pytest_workflow.sh
+++ b/dev/pytest/pytest_workflow.sh
@@ -1,4 +1,7 @@
#!/bin/bash
set -x
+SCRIPT_DIR="$(dirname "$(realpath "$0")")"
+cd "$SCRIPT_DIR/../.."
+
pytest api/tests/integration_tests/workflow
diff --git a/dev/reformat b/dev/reformat
index 53d7703fce..71cb6abb1e 100755
--- a/dev/reformat
+++ b/dev/reformat
@@ -2,6 +2,9 @@
set -x
+SCRIPT_DIR="$(dirname "$(realpath "$0")")"
+cd "$SCRIPT_DIR/.."
+
# run ruff linter
uv run --directory api --dev ruff check --fix ./
diff --git a/dev/sync-uv b/dev/sync-uv
index 7bc3bb22be..67a8133f5a 100755
--- a/dev/sync-uv
+++ b/dev/sync-uv
@@ -6,5 +6,8 @@ if ! command -v uv &> /dev/null; then
pip install uv
fi
+SCRIPT_DIR="$(dirname "$(realpath "$0")")"
+cd "$SCRIPT_DIR/.."
+
# check uv.lock in sync with pyproject.toml
uv lock --project api
From 9565fe9b1b4246a3b0d49ed8f4cd2f6298adc406 Mon Sep 17 00:00:00 2001
From: QuantumGhost
Date: Tue, 6 May 2025 18:05:19 +0800
Subject: [PATCH 4/6] fix(api): fix alembic offline mode (#19285)
Alembic's offline mode generates SQL from SQLAlchemy migration operations,
providing developers with a clear view of database schema changes without
requiring an active database connection.
However, some migration versions (specifically bbadea11becb and d7999dfa4aae)
were performing database schema introspection, which fails in offline mode
since it requires an actual database connection.
This commit:
- Adds offline mode support by detecting context.is_offline_mode()
- Skips introspection steps when in offline mode
- Adds warning messages in SQL output to inform users that assumptions were made
- Prompts users to review the generated SQL for accuracy
These changes ensure migrations work consistently in both online and offline modes.
Close #19284.
---
...a11becb_add_name_and_size_to_tool_files.py | 56 ++++++++++++-------
..._remove_workflow_node_executions_retry_.py | 32 +++++++----
api/models/tools.py | 36 ++----------
3 files changed, 61 insertions(+), 63 deletions(-)
diff --git a/api/migrations/versions/2024_10_10_0516-bbadea11becb_add_name_and_size_to_tool_files.py b/api/migrations/versions/2024_10_10_0516-bbadea11becb_add_name_and_size_to_tool_files.py
index 5b5656e7ed..00f2b15802 100644
--- a/api/migrations/versions/2024_10_10_0516-bbadea11becb_add_name_and_size_to_tool_files.py
+++ b/api/migrations/versions/2024_10_10_0516-bbadea11becb_add_name_and_size_to_tool_files.py
@@ -5,45 +5,61 @@ Revises: 33f5fac87f29
Create Date: 2024-10-10 05:16:14.764268
"""
-from alembic import op
-import models as models
+
import sqlalchemy as sa
-from sqlalchemy.dialects import postgresql
+from alembic import op, context
# revision identifiers, used by Alembic.
-revision = 'bbadea11becb'
-down_revision = 'd8e744d88ed6'
+revision = "bbadea11becb"
+down_revision = "d8e744d88ed6"
branch_labels = None
depends_on = None
def upgrade():
+ def _has_name_or_size_column() -> bool:
+ # We cannot access the database in offline mode, so assume
+ # the "name" and "size" columns do not exist.
+ if context.is_offline_mode():
+ # Log a warning message to inform the user that the database schema cannot be inspected
+ # in offline mode, and the generated SQL may not accurately reflect the actual execution.
+ op.execute(
+ "-- Executing in offline mode, assuming the name and size columns do not exist.\n"
+ "-- The generated SQL may differ from what will actually be executed.\n"
+ "-- Please review the migration script carefully!"
+ )
+
+ return False
+ # Use SQLAlchemy inspector to get the columns of the 'tool_files' table
+ inspector = sa.inspect(conn)
+ columns = [col["name"] for col in inspector.get_columns("tool_files")]
+
+ # If 'name' or 'size' columns already exist, exit the upgrade function
+ if "name" in columns or "size" in columns:
+ return True
+ return False
+
# ### commands auto generated by Alembic - please adjust! ###
# Get the database connection
conn = op.get_bind()
- # Use SQLAlchemy inspector to get the columns of the 'tool_files' table
- inspector = sa.inspect(conn)
- columns = [col['name'] for col in inspector.get_columns('tool_files')]
-
- # If 'name' or 'size' columns already exist, exit the upgrade function
- if 'name' in columns or 'size' in columns:
+ if _has_name_or_size_column():
return
- with op.batch_alter_table('tool_files', schema=None) as batch_op:
- batch_op.add_column(sa.Column('name', sa.String(), nullable=True))
- batch_op.add_column(sa.Column('size', sa.Integer(), nullable=True))
+ with op.batch_alter_table("tool_files", schema=None) as batch_op:
+ batch_op.add_column(sa.Column("name", sa.String(), nullable=True))
+ batch_op.add_column(sa.Column("size", sa.Integer(), nullable=True))
op.execute("UPDATE tool_files SET name = '' WHERE name IS NULL")
op.execute("UPDATE tool_files SET size = -1 WHERE size IS NULL")
- with op.batch_alter_table('tool_files', schema=None) as batch_op:
- batch_op.alter_column('name', existing_type=sa.String(), nullable=False)
- batch_op.alter_column('size', existing_type=sa.Integer(), nullable=False)
+ with op.batch_alter_table("tool_files", schema=None) as batch_op:
+ batch_op.alter_column("name", existing_type=sa.String(), nullable=False)
+ batch_op.alter_column("size", existing_type=sa.Integer(), nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
- with op.batch_alter_table('tool_files', schema=None) as batch_op:
- batch_op.drop_column('size')
- batch_op.drop_column('name')
+ with op.batch_alter_table("tool_files", schema=None) as batch_op:
+ batch_op.drop_column("size")
+ batch_op.drop_column("name")
# ### end Alembic commands ###
diff --git a/api/migrations/versions/2024_12_23_1154-d7999dfa4aae_remove_workflow_node_executions_retry_.py b/api/migrations/versions/2024_12_23_1154-d7999dfa4aae_remove_workflow_node_executions_retry_.py
index 07454b0917..adf6421e57 100644
--- a/api/migrations/versions/2024_12_23_1154-d7999dfa4aae_remove_workflow_node_executions_retry_.py
+++ b/api/migrations/versions/2024_12_23_1154-d7999dfa4aae_remove_workflow_node_executions_retry_.py
@@ -5,28 +5,38 @@ Revises: e1944c35e15e
Create Date: 2024-12-23 11:54:15.344543
"""
-from alembic import op
-import models as models
-import sqlalchemy as sa
+
+from alembic import op, context
from sqlalchemy import inspect
# revision identifiers, used by Alembic.
-revision = 'd7999dfa4aae'
-down_revision = 'e1944c35e15e'
+revision = "d7999dfa4aae"
+down_revision = "e1944c35e15e"
branch_labels = None
depends_on = None
def upgrade():
- # Check if column exists before attempting to remove it
- conn = op.get_bind()
- inspector = inspect(conn)
- has_column = 'retry_index' in [col['name'] for col in inspector.get_columns('workflow_node_executions')]
+ def _has_retry_index_column() -> bool:
+ if context.is_offline_mode():
+ # Log a warning message to inform the user that the database schema cannot be inspected
+ # in offline mode, and the generated SQL may not accurately reflect the actual execution.
+ op.execute(
+ '-- Executing in offline mode: assuming the "retry_index" column does not exist.\n'
+ "-- The generated SQL may differ from what will actually be executed.\n"
+ "-- Please review the migration script carefully!"
+ )
+ return False
+ conn = op.get_bind()
+ inspector = inspect(conn)
+ return "retry_index" in [col["name"] for col in inspector.get_columns("workflow_node_executions")]
+
+ has_column = _has_retry_index_column()
if has_column:
- with op.batch_alter_table('workflow_node_executions', schema=None) as batch_op:
- batch_op.drop_column('retry_index')
+ with op.batch_alter_table("workflow_node_executions", schema=None) as batch_op:
+ batch_op.drop_column("retry_index")
def downgrade():
diff --git a/api/models/tools.py b/api/models/tools.py
index 05604b9330..e027475e38 100644
--- a/api/models/tools.py
+++ b/api/models/tools.py
@@ -1,6 +1,6 @@
import json
from datetime import datetime
-from typing import Any, Optional, cast
+from typing import Any, cast
import sqlalchemy as sa
from deprecated import deprecated
@@ -304,8 +304,11 @@ class DeprecatedPublishedAppTool(Base):
db.UniqueConstraint("app_id", "user_id", name="unique_published_app_tool"),
)
+ id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
# id of the app
app_id = db.Column(StringUUID, ForeignKey("apps.id"), nullable=False)
+
+ user_id: Mapped[str] = db.Column(StringUUID, nullable=False)
# who published this tool
description = db.Column(db.Text, nullable=False)
# llm_description of the tool, for LLM
@@ -325,34 +328,3 @@ class DeprecatedPublishedAppTool(Base):
@property
def description_i18n(self) -> I18nObject:
return I18nObject(**json.loads(self.description))
-
- id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()"))
- user_id: Mapped[str] = db.Column(StringUUID, nullable=False)
- tenant_id: Mapped[str] = db.Column(StringUUID, nullable=False)
- conversation_id: Mapped[Optional[str]] = db.Column(StringUUID, nullable=True)
- file_key: Mapped[str] = db.Column(db.String(255), nullable=False)
- mimetype: Mapped[str] = db.Column(db.String(255), nullable=False)
- original_url: Mapped[Optional[str]] = db.Column(db.String(2048), nullable=True)
- name: Mapped[str] = mapped_column(default="")
- size: Mapped[int] = mapped_column(default=-1)
-
- def __init__(
- self,
- *,
- user_id: str,
- tenant_id: str,
- conversation_id: Optional[str] = None,
- file_key: str,
- mimetype: str,
- original_url: Optional[str] = None,
- name: str,
- size: int,
- ):
- self.user_id = user_id
- self.tenant_id = tenant_id
- self.conversation_id = conversation_id
- self.file_key = file_key
- self.mimetype = mimetype
- self.original_url = original_url
- self.name = name
- self.size = size
From a6827493f0e9a4a2f8470c717e7f13f7b15c6f67 Mon Sep 17 00:00:00 2001
From: zxhlyh
Date: Tue, 6 May 2025 18:24:10 +0800
Subject: [PATCH 5/6] chore: slice workflow refresh draft hook (#19292)
---
.../workflow-app/components/workflow-main.tsx | 4 +++
.../components/workflow-app/hooks/index.ts | 1 +
.../hooks/use-nodes-sync-draft.ts | 4 +--
.../hooks/use-workflow-refresh-draft.ts | 36 +++++++++++++++++++
.../components/workflow/hooks-store/store.ts | 3 ++
web/app/components/workflow/hooks/index.ts | 1 +
.../hooks/use-workflow-interactions.ts | 25 -------------
.../hooks/use-workflow-refresh-draft.ts | 9 +++++
web/app/components/workflow/index.tsx | 4 +--
9 files changed, 58 insertions(+), 29 deletions(-)
create mode 100644 web/app/components/workflow-app/hooks/use-workflow-refresh-draft.ts
create mode 100644 web/app/components/workflow/hooks/use-workflow-refresh-draft.ts
diff --git a/web/app/components/workflow-app/components/workflow-main.tsx b/web/app/components/workflow-app/components/workflow-main.tsx
index 4ff1f4c624..2f2295cb59 100644
--- a/web/app/components/workflow-app/components/workflow-main.tsx
+++ b/web/app/components/workflow-app/components/workflow-main.tsx
@@ -8,6 +8,7 @@ import type { WorkflowProps } from '@/app/components/workflow'
import WorkflowChildren from './workflow-children'
import {
useNodesSyncDraft,
+ useWorkflowRefreshDraft,
useWorkflowRun,
useWorkflowStartRun,
} from '../hooks'
@@ -32,6 +33,7 @@ const WorkflowMain = ({
doSyncWorkflowDraft,
syncWorkflowDraftWhenPageClose,
} = useNodesSyncDraft()
+ const { handleRefreshWorkflowDraft } = useWorkflowRefreshDraft()
const {
handleBackupDraft,
handleLoadBackupDraft,
@@ -49,6 +51,7 @@ const WorkflowMain = ({
return {
syncWorkflowDraftWhenPageClose,
doSyncWorkflowDraft,
+ handleRefreshWorkflowDraft,
handleBackupDraft,
handleLoadBackupDraft,
handleRestoreFromPublishedWorkflow,
@@ -61,6 +64,7 @@ const WorkflowMain = ({
}, [
syncWorkflowDraftWhenPageClose,
doSyncWorkflowDraft,
+ handleRefreshWorkflowDraft,
handleBackupDraft,
handleLoadBackupDraft,
handleRestoreFromPublishedWorkflow,
diff --git a/web/app/components/workflow-app/hooks/index.ts b/web/app/components/workflow-app/hooks/index.ts
index 1517eb9a16..6373a8591c 100644
--- a/web/app/components/workflow-app/hooks/index.ts
+++ b/web/app/components/workflow-app/hooks/index.ts
@@ -4,3 +4,4 @@ export * from './use-nodes-sync-draft'
export * from './use-workflow-run'
export * from './use-workflow-start-run'
export * from './use-is-chat-mode'
+export * from './use-workflow-refresh-draft'
diff --git a/web/app/components/workflow-app/hooks/use-nodes-sync-draft.ts b/web/app/components/workflow-app/hooks/use-nodes-sync-draft.ts
index 7c6eb6a5be..db21cfb05e 100644
--- a/web/app/components/workflow-app/hooks/use-nodes-sync-draft.ts
+++ b/web/app/components/workflow-app/hooks/use-nodes-sync-draft.ts
@@ -6,20 +6,20 @@ import {
useWorkflowStore,
} from '@/app/components/workflow/store'
import { BlockEnum } from '@/app/components/workflow/types'
-import { useWorkflowUpdate } from '@/app/components/workflow/hooks'
import {
useNodesReadOnly,
} from '@/app/components/workflow/hooks/use-workflow'
import { syncWorkflowDraft } from '@/service/workflow'
import { useFeaturesStore } from '@/app/components/base/features/hooks'
import { API_PREFIX } from '@/config'
+import { useWorkflowRefreshDraft } from '.'
export const useNodesSyncDraft = () => {
const store = useStoreApi()
const workflowStore = useWorkflowStore()
const featuresStore = useFeaturesStore()
const { getNodesReadOnly } = useNodesReadOnly()
- const { handleRefreshWorkflowDraft } = useWorkflowUpdate()
+ const { handleRefreshWorkflowDraft } = useWorkflowRefreshDraft()
const params = useParams()
const getPostParams = useCallback(() => {
diff --git a/web/app/components/workflow-app/hooks/use-workflow-refresh-draft.ts b/web/app/components/workflow-app/hooks/use-workflow-refresh-draft.ts
new file mode 100644
index 0000000000..c944e10c4c
--- /dev/null
+++ b/web/app/components/workflow-app/hooks/use-workflow-refresh-draft.ts
@@ -0,0 +1,36 @@
+import { useCallback } from 'react'
+import { useWorkflowStore } from '@/app/components/workflow/store'
+import { fetchWorkflowDraft } from '@/service/workflow'
+import type { WorkflowDataUpdater } from '@/app/components/workflow/types'
+import { useWorkflowUpdate } from '@/app/components/workflow/hooks'
+
+export const useWorkflowRefreshDraft = () => {
+ const workflowStore = useWorkflowStore()
+ const { handleUpdateWorkflowCanvas } = useWorkflowUpdate()
+
+ const handleRefreshWorkflowDraft = useCallback(() => {
+ const {
+ appId,
+ setSyncWorkflowDraftHash,
+ setIsSyncingWorkflowDraft,
+ setEnvironmentVariables,
+ setEnvSecrets,
+ setConversationVariables,
+ } = workflowStore.getState()
+ setIsSyncingWorkflowDraft(true)
+ fetchWorkflowDraft(`/apps/${appId}/workflows/draft`).then((response) => {
+ handleUpdateWorkflowCanvas(response.graph as WorkflowDataUpdater)
+ setSyncWorkflowDraftHash(response.hash)
+ setEnvSecrets((response.environment_variables || []).filter(env => env.value_type === 'secret').reduce((acc, env) => {
+ acc[env.id] = env.value
+ return acc
+ }, {} as Record))
+ setEnvironmentVariables(response.environment_variables?.map(env => env.value_type === 'secret' ? { ...env, value: '[__HIDDEN__]' } : env) || [])
+ setConversationVariables(response.conversation_variables || [])
+ }).finally(() => setIsSyncingWorkflowDraft(false))
+ }, [handleUpdateWorkflowCanvas, workflowStore])
+
+ return {
+ handleRefreshWorkflowDraft,
+ }
+}
diff --git a/web/app/components/workflow/hooks-store/store.ts b/web/app/components/workflow/hooks-store/store.ts
index 2e40cbfbc9..9f5e1a6650 100644
--- a/web/app/components/workflow/hooks-store/store.ts
+++ b/web/app/components/workflow/hooks-store/store.ts
@@ -18,6 +18,7 @@ type CommonHooksFnMap = {
}
) => Promise
syncWorkflowDraftWhenPageClose: () => void
+ handleRefreshWorkflowDraft: () => void
handleBackupDraft: () => void
handleLoadBackupDraft: () => void
handleRestoreFromPublishedWorkflow: (...args: any[]) => void
@@ -35,6 +36,7 @@ export type Shape = {
export const createHooksStore = ({
doSyncWorkflowDraft = async () => noop(),
syncWorkflowDraftWhenPageClose = noop,
+ handleRefreshWorkflowDraft = noop,
handleBackupDraft = noop,
handleLoadBackupDraft = noop,
handleRestoreFromPublishedWorkflow = noop,
@@ -48,6 +50,7 @@ export const createHooksStore = ({
refreshAll: props => set(state => ({ ...state, ...props })),
doSyncWorkflowDraft,
syncWorkflowDraftWhenPageClose,
+ handleRefreshWorkflowDraft,
handleBackupDraft,
handleLoadBackupDraft,
handleRestoreFromPublishedWorkflow,
diff --git a/web/app/components/workflow/hooks/index.ts b/web/app/components/workflow/hooks/index.ts
index 20a34c69e3..fda0f50aa6 100644
--- a/web/app/components/workflow/hooks/index.ts
+++ b/web/app/components/workflow/hooks/index.ts
@@ -16,3 +16,4 @@ export * from './use-shortcuts'
export * from './use-workflow-interactions'
export * from './use-workflow-mode'
export * from './use-format-time-from-now'
+export * from './use-workflow-refresh-draft'
diff --git a/web/app/components/workflow/hooks/use-workflow-interactions.ts b/web/app/components/workflow/hooks/use-workflow-interactions.ts
index 740868c594..636d3b94f9 100644
--- a/web/app/components/workflow/hooks/use-workflow-interactions.ts
+++ b/web/app/components/workflow/hooks/use-workflow-interactions.ts
@@ -313,7 +313,6 @@ export const useWorkflowZoom = () => {
export const useWorkflowUpdate = () => {
const reactflow = useReactFlow()
- const workflowStore = useWorkflowStore()
const { eventEmitter } = useEventEmitterContextContext()
const handleUpdateWorkflowCanvas = useCallback((payload: WorkflowDataUpdater) => {
@@ -333,32 +332,8 @@ export const useWorkflowUpdate = () => {
setViewport(viewport)
}, [eventEmitter, reactflow])
- const handleRefreshWorkflowDraft = useCallback(() => {
- const {
- appId,
- setSyncWorkflowDraftHash,
- setIsSyncingWorkflowDraft,
- setEnvironmentVariables,
- setEnvSecrets,
- setConversationVariables,
- } = workflowStore.getState()
- setIsSyncingWorkflowDraft(true)
- fetchWorkflowDraft(`/apps/${appId}/workflows/draft`).then((response) => {
- handleUpdateWorkflowCanvas(response.graph as WorkflowDataUpdater)
- setSyncWorkflowDraftHash(response.hash)
- setEnvSecrets((response.environment_variables || []).filter(env => env.value_type === 'secret').reduce((acc, env) => {
- acc[env.id] = env.value
- return acc
- }, {} as Record))
- setEnvironmentVariables(response.environment_variables?.map(env => env.value_type === 'secret' ? { ...env, value: '[__HIDDEN__]' } : env) || [])
- // #TODO chatVar sync#
- setConversationVariables(response.conversation_variables || [])
- }).finally(() => setIsSyncingWorkflowDraft(false))
- }, [handleUpdateWorkflowCanvas, workflowStore])
-
return {
handleUpdateWorkflowCanvas,
- handleRefreshWorkflowDraft,
}
}
diff --git a/web/app/components/workflow/hooks/use-workflow-refresh-draft.ts b/web/app/components/workflow/hooks/use-workflow-refresh-draft.ts
new file mode 100644
index 0000000000..1948bd471d
--- /dev/null
+++ b/web/app/components/workflow/hooks/use-workflow-refresh-draft.ts
@@ -0,0 +1,9 @@
+import { useHooksStore } from '@/app/components/workflow/hooks-store'
+
+export const useWorkflowRefreshDraft = () => {
+ const handleRefreshWorkflowDraft = useHooksStore(s => s.handleRefreshWorkflowDraft)
+
+ return {
+ handleRefreshWorkflowDraft,
+ }
+}
diff --git a/web/app/components/workflow/index.tsx b/web/app/components/workflow/index.tsx
index 3d7692ed7b..549117faf7 100644
--- a/web/app/components/workflow/index.tsx
+++ b/web/app/components/workflow/index.tsx
@@ -44,7 +44,7 @@ import {
useShortcuts,
useWorkflow,
useWorkflowReadOnly,
- useWorkflowUpdate,
+ useWorkflowRefreshDraft,
} from './hooks'
import CustomNode from './nodes'
import CustomNoteNode from './note-node'
@@ -160,7 +160,7 @@ export const Workflow: FC = memo(({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [])
- const { handleRefreshWorkflowDraft } = useWorkflowUpdate()
+ const { handleRefreshWorkflowDraft } = useWorkflowRefreshDraft()
const handleSyncWorkflowDraftWhenPageClose = useCallback(() => {
if (document.visibilityState === 'hidden')
syncWorkflowDraftWhenPageClose()
From f23cf983173b5768185035f72f7119b5cf11a473 Mon Sep 17 00:00:00 2001
From: -LAN-
Date: Tue, 6 May 2025 21:14:51 +0800
Subject: [PATCH 6/6] refactor: Remove RepositoryFactory (#19176)
Signed-off-by: -LAN-
---
api/app_factory.py | 2 -
.../app/apps/advanced_chat/app_generator.py | 32 +-
.../advanced_chat/generate_task_pipeline.py | 6 +-
api/core/app/apps/workflow/app_generator.py | 34 +-
.../easy_ui_based_generate_task_pipeline.py | 2 +-
api/core/base/__init__.py | 1 +
api/core/base/tts/__init__.py | 6 +
.../tts}/app_generator_tts_publisher.py | 0
api/core/ops/langfuse_trace/langfuse_trace.py | 6 +-
.../ops/langsmith_trace/langsmith_trace.py | 10 +-
api/core/ops/opik_trace/opik_trace.py | 10 +-
api/core/repositories/__init__.py | 6 +
api/core/repositories/repository_registry.py | 87 -----
...emy_workflow_node_execution_repository.py} | 4 +-
.../workflow_node_execution/__init__.py | 9 -
api/core/workflow/repository/__init__.py | 5 +-
.../workflow/repository/repository_factory.py | 97 -----
.../workflow_app_generate_task_pipeline.py} | 6 +-
.../workflow_cycle_manager.py} | 2 +-
api/extensions/ext_repositories.py | 18 -
api/services/workflow_run_service.py | 10 +-
api/services/workflow_service.py | 10 +-
api/tasks/remove_app_and_related_data_task.py | 10 +-
.../workflow/test_workflow_cycle_manager.py | 348 ++++++++++++++++++
.../test_sqlalchemy_repository.py | 10 +-
25 files changed, 423 insertions(+), 308 deletions(-)
create mode 100644 api/core/base/__init__.py
create mode 100644 api/core/base/tts/__init__.py
rename api/core/{app/apps/advanced_chat => base/tts}/app_generator_tts_publisher.py (100%)
delete mode 100644 api/core/repositories/repository_registry.py
rename api/core/repositories/{workflow_node_execution/sqlalchemy_repository.py => sqlalchemy_workflow_node_execution_repository.py} (98%)
delete mode 100644 api/core/repositories/workflow_node_execution/__init__.py
delete mode 100644 api/core/workflow/repository/repository_factory.py
rename api/core/{app/apps/workflow/generate_task_pipeline.py => workflow/workflow_app_generate_task_pipeline.py} (99%)
rename api/core/{app/task_pipeline/workflow_cycle_manage.py => workflow/workflow_cycle_manager.py} (99%)
delete mode 100644 api/extensions/ext_repositories.py
create mode 100644 api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py
diff --git a/api/app_factory.py b/api/app_factory.py
index 586f2ded9e..1c886ac5c7 100644
--- a/api/app_factory.py
+++ b/api/app_factory.py
@@ -54,7 +54,6 @@ def initialize_extensions(app: DifyApp):
ext_otel,
ext_proxy_fix,
ext_redis,
- ext_repositories,
ext_sentry,
ext_set_secretkey,
ext_storage,
@@ -75,7 +74,6 @@ def initialize_extensions(app: DifyApp):
ext_migrate,
ext_redis,
ext_storage,
- ext_repositories,
ext_celery,
ext_login,
ext_mail,
diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py
index fd0d7fafbd..4b0e64130b 100644
--- a/api/core/app/apps/advanced_chat/app_generator.py
+++ b/api/core/app/apps/advanced_chat/app_generator.py
@@ -25,7 +25,7 @@ from core.app.entities.task_entities import ChatbotAppBlockingResponse, ChatbotA
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from core.ops.ops_trace_manager import TraceQueueManager
from core.prompt.utils.get_thread_messages_length import get_thread_messages_length
-from core.workflow.repository import RepositoryFactory
+from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
from extensions.ext_database import db
from factories import file_factory
@@ -163,12 +163,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
# Create workflow node execution repository
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
- workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={
- "tenant_id": application_generate_entity.app_config.tenant_id,
- "app_id": application_generate_entity.app_config.app_id,
- "session_factory": session_factory,
- }
+ workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=session_factory,
+ tenant_id=application_generate_entity.app_config.tenant_id,
+ app_id=application_generate_entity.app_config.app_id,
)
return self._generate(
@@ -231,12 +229,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
# Create workflow node execution repository
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
- workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={
- "tenant_id": application_generate_entity.app_config.tenant_id,
- "app_id": application_generate_entity.app_config.app_id,
- "session_factory": session_factory,
- }
+ workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=session_factory,
+ tenant_id=application_generate_entity.app_config.tenant_id,
+ app_id=application_generate_entity.app_config.app_id,
)
return self._generate(
@@ -297,12 +293,10 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
# Create workflow node execution repository
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
- workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={
- "tenant_id": application_generate_entity.app_config.tenant_id,
- "app_id": application_generate_entity.app_config.app_id,
- "session_factory": session_factory,
- }
+ workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=session_factory,
+ tenant_id=application_generate_entity.app_config.tenant_id,
+ app_id=application_generate_entity.app_config.app_id,
)
return self._generate(
diff --git a/api/core/app/apps/advanced_chat/generate_task_pipeline.py b/api/core/app/apps/advanced_chat/generate_task_pipeline.py
index 1f4db54a9c..f71c49d112 100644
--- a/api/core/app/apps/advanced_chat/generate_task_pipeline.py
+++ b/api/core/app/apps/advanced_chat/generate_task_pipeline.py
@@ -9,7 +9,6 @@ from sqlalchemy import select
from sqlalchemy.orm import Session
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
-from core.app.apps.advanced_chat.app_generator_tts_publisher import AppGeneratorTTSPublisher, AudioTrunk
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
from core.app.entities.app_invoke_entities import (
AdvancedChatAppGenerateEntity,
@@ -58,7 +57,7 @@ from core.app.entities.task_entities import (
)
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
from core.app.task_pipeline.message_cycle_manage import MessageCycleManage
-from core.app.task_pipeline.workflow_cycle_manage import WorkflowCycleManage
+from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
from core.model_runtime.entities.llm_entities import LLMUsage
from core.model_runtime.utils.encoders import jsonable_encoder
from core.ops.ops_trace_manager import TraceQueueManager
@@ -66,6 +65,7 @@ from core.workflow.enums import SystemVariableKey
from core.workflow.graph_engine.entities.graph_runtime_state import GraphRuntimeState
from core.workflow.nodes import NodeType
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
+from core.workflow.workflow_cycle_manager import WorkflowCycleManager
from events.message_event import message_was_created
from extensions.ext_database import db
from models import Conversation, EndUser, Message, MessageFile
@@ -113,7 +113,7 @@ class AdvancedChatAppGenerateTaskPipeline:
else:
raise NotImplementedError(f"User type not supported: {type(user)}")
- self._workflow_cycle_manager = WorkflowCycleManage(
+ self._workflow_cycle_manager = WorkflowCycleManager(
application_generate_entity=application_generate_entity,
workflow_system_variables={
SystemVariableKey.QUERY: message.query,
diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py
index 9c3d78a338..1d67671974 100644
--- a/api/core/app/apps/workflow/app_generator.py
+++ b/api/core/app/apps/workflow/app_generator.py
@@ -18,13 +18,13 @@ from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
from core.app.apps.workflow.app_queue_manager import WorkflowAppQueueManager
from core.app.apps.workflow.app_runner import WorkflowAppRunner
from core.app.apps.workflow.generate_response_converter import WorkflowAppGenerateResponseConverter
-from core.app.apps.workflow.generate_task_pipeline import WorkflowAppGenerateTaskPipeline
from core.app.entities.app_invoke_entities import InvokeFrom, WorkflowAppGenerateEntity
from core.app.entities.task_entities import WorkflowAppBlockingResponse, WorkflowAppStreamResponse
from core.model_runtime.errors.invoke import InvokeAuthorizationError
from core.ops.ops_trace_manager import TraceQueueManager
-from core.workflow.repository import RepositoryFactory
+from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
+from core.workflow.workflow_app_generate_task_pipeline import WorkflowAppGenerateTaskPipeline
from extensions.ext_database import db
from factories import file_factory
from models import Account, App, EndUser, Workflow
@@ -138,12 +138,10 @@ class WorkflowAppGenerator(BaseAppGenerator):
# Create workflow node execution repository
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
- workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={
- "tenant_id": application_generate_entity.app_config.tenant_id,
- "app_id": application_generate_entity.app_config.app_id,
- "session_factory": session_factory,
- }
+ workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=session_factory,
+ tenant_id=application_generate_entity.app_config.tenant_id,
+ app_id=application_generate_entity.app_config.app_id,
)
return self._generate(
@@ -264,12 +262,10 @@ class WorkflowAppGenerator(BaseAppGenerator):
# Create workflow node execution repository
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
- workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={
- "tenant_id": application_generate_entity.app_config.tenant_id,
- "app_id": application_generate_entity.app_config.app_id,
- "session_factory": session_factory,
- }
+ workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=session_factory,
+ tenant_id=application_generate_entity.app_config.tenant_id,
+ app_id=application_generate_entity.app_config.app_id,
)
return self._generate(
@@ -329,12 +325,10 @@ class WorkflowAppGenerator(BaseAppGenerator):
# Create workflow node execution repository
session_factory = sessionmaker(bind=db.engine, expire_on_commit=False)
- workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={
- "tenant_id": application_generate_entity.app_config.tenant_id,
- "app_id": application_generate_entity.app_config.app_id,
- "session_factory": session_factory,
- }
+ workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=session_factory,
+ tenant_id=application_generate_entity.app_config.tenant_id,
+ app_id=application_generate_entity.app_config.app_id,
)
return self._generate(
diff --git a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py
index 8c9c26d36e..a98a42f5df 100644
--- a/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py
+++ b/api/core/app/task_pipeline/easy_ui_based_generate_task_pipeline.py
@@ -9,7 +9,6 @@ from sqlalchemy import select
from sqlalchemy.orm import Session
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
-from core.app.apps.advanced_chat.app_generator_tts_publisher import AppGeneratorTTSPublisher, AudioTrunk
from core.app.apps.base_app_queue_manager import AppQueueManager, PublishFrom
from core.app.entities.app_invoke_entities import (
AgentChatAppGenerateEntity,
@@ -45,6 +44,7 @@ from core.app.entities.task_entities import (
)
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
from core.app.task_pipeline.message_cycle_manage import MessageCycleManage
+from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
from core.model_manager import ModelInstance
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
from core.model_runtime.entities.message_entities import (
diff --git a/api/core/base/__init__.py b/api/core/base/__init__.py
new file mode 100644
index 0000000000..3f4bd3b771
--- /dev/null
+++ b/api/core/base/__init__.py
@@ -0,0 +1 @@
+# Core base package
diff --git a/api/core/base/tts/__init__.py b/api/core/base/tts/__init__.py
new file mode 100644
index 0000000000..37b6eeebb0
--- /dev/null
+++ b/api/core/base/tts/__init__.py
@@ -0,0 +1,6 @@
+from core.base.tts.app_generator_tts_publisher import AppGeneratorTTSPublisher, AudioTrunk
+
+__all__ = [
+ "AppGeneratorTTSPublisher",
+ "AudioTrunk",
+]
diff --git a/api/core/app/apps/advanced_chat/app_generator_tts_publisher.py b/api/core/base/tts/app_generator_tts_publisher.py
similarity index 100%
rename from api/core/app/apps/advanced_chat/app_generator_tts_publisher.py
rename to api/core/base/tts/app_generator_tts_publisher.py
diff --git a/api/core/ops/langfuse_trace/langfuse_trace.py b/api/core/ops/langfuse_trace/langfuse_trace.py
index b229d244f7..c74617e558 100644
--- a/api/core/ops/langfuse_trace/langfuse_trace.py
+++ b/api/core/ops/langfuse_trace/langfuse_trace.py
@@ -29,7 +29,7 @@ from core.ops.langfuse_trace.entities.langfuse_trace_entity import (
UnitEnum,
)
from core.ops.utils import filter_none_values
-from core.workflow.repository.repository_factory import RepositoryFactory
+from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from extensions.ext_database import db
from models.model import EndUser
@@ -113,8 +113,8 @@ class LangFuseDataTrace(BaseTraceInstance):
# through workflow_run_id get all_nodes_execution using repository
session_factory = sessionmaker(bind=db.engine)
- workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={"tenant_id": trace_info.tenant_id, "session_factory": session_factory},
+ workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=session_factory, tenant_id=trace_info.tenant_id
)
# Get all executions for this workflow run
diff --git a/api/core/ops/langsmith_trace/langsmith_trace.py b/api/core/ops/langsmith_trace/langsmith_trace.py
index 78a51ff36e..d1e16d3152 100644
--- a/api/core/ops/langsmith_trace/langsmith_trace.py
+++ b/api/core/ops/langsmith_trace/langsmith_trace.py
@@ -28,7 +28,7 @@ from core.ops.langsmith_trace.entities.langsmith_trace_entity import (
LangSmithRunUpdateModel,
)
from core.ops.utils import filter_none_values, generate_dotted_order
-from core.workflow.repository.repository_factory import RepositoryFactory
+from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from extensions.ext_database import db
from models.model import EndUser, MessageFile
@@ -137,12 +137,8 @@ class LangSmithDataTrace(BaseTraceInstance):
# through workflow_run_id get all_nodes_execution using repository
session_factory = sessionmaker(bind=db.engine)
- workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={
- "tenant_id": trace_info.tenant_id,
- "app_id": trace_info.metadata.get("app_id"),
- "session_factory": session_factory,
- },
+ workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=session_factory, tenant_id=trace_info.tenant_id, app_id=trace_info.metadata.get("app_id")
)
# Get all executions for this workflow run
diff --git a/api/core/ops/opik_trace/opik_trace.py b/api/core/ops/opik_trace/opik_trace.py
index a14b5afb8e..1484041447 100644
--- a/api/core/ops/opik_trace/opik_trace.py
+++ b/api/core/ops/opik_trace/opik_trace.py
@@ -22,7 +22,7 @@ from core.ops.entities.trace_entity import (
TraceTaskName,
WorkflowTraceInfo,
)
-from core.workflow.repository.repository_factory import RepositoryFactory
+from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from extensions.ext_database import db
from models.model import EndUser, MessageFile
@@ -150,12 +150,8 @@ class OpikDataTrace(BaseTraceInstance):
# through workflow_run_id get all_nodes_execution using repository
session_factory = sessionmaker(bind=db.engine)
- workflow_node_execution_repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={
- "tenant_id": trace_info.tenant_id,
- "app_id": trace_info.metadata.get("app_id"),
- "session_factory": session_factory,
- },
+ workflow_node_execution_repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=session_factory, tenant_id=trace_info.tenant_id, app_id=trace_info.metadata.get("app_id")
)
# Get all executions for this workflow run
diff --git a/api/core/repositories/__init__.py b/api/core/repositories/__init__.py
index 5c70d50cde..6452317120 100644
--- a/api/core/repositories/__init__.py
+++ b/api/core/repositories/__init__.py
@@ -4,3 +4,9 @@ Repository implementations for data access.
This package contains concrete implementations of the repository interfaces
defined in the core.workflow.repository package.
"""
+
+from core.repositories.sqlalchemy_workflow_node_execution_repository import SQLAlchemyWorkflowNodeExecutionRepository
+
+__all__ = [
+ "SQLAlchemyWorkflowNodeExecutionRepository",
+]
diff --git a/api/core/repositories/repository_registry.py b/api/core/repositories/repository_registry.py
deleted file mode 100644
index b66f3ba8e6..0000000000
--- a/api/core/repositories/repository_registry.py
+++ /dev/null
@@ -1,87 +0,0 @@
-"""
-Registry for repository implementations.
-
-This module is responsible for registering factory functions with the repository factory.
-"""
-
-import logging
-from collections.abc import Mapping
-from typing import Any
-
-from sqlalchemy.orm import sessionmaker
-
-from configs import dify_config
-from core.repositories.workflow_node_execution import SQLAlchemyWorkflowNodeExecutionRepository
-from core.workflow.repository.repository_factory import RepositoryFactory
-from extensions.ext_database import db
-
-logger = logging.getLogger(__name__)
-
-# Storage type constants
-STORAGE_TYPE_RDBMS = "rdbms"
-STORAGE_TYPE_HYBRID = "hybrid"
-
-
-def register_repositories() -> None:
- """
- Register repository factory functions with the RepositoryFactory.
-
- This function reads configuration settings to determine which repository
- implementations to register.
- """
- # Configure WorkflowNodeExecutionRepository factory based on configuration
- workflow_node_execution_storage = dify_config.WORKFLOW_NODE_EXECUTION_STORAGE
-
- # Check storage type and register appropriate implementation
- if workflow_node_execution_storage == STORAGE_TYPE_RDBMS:
- # Register SQLAlchemy implementation for RDBMS storage
- logger.info("Registering WorkflowNodeExecution repository with RDBMS storage")
- RepositoryFactory.register_workflow_node_execution_factory(create_workflow_node_execution_repository)
- elif workflow_node_execution_storage == STORAGE_TYPE_HYBRID:
- # Hybrid storage is not yet implemented
- raise NotImplementedError("Hybrid storage for WorkflowNodeExecution repository is not yet implemented")
- else:
- # Unknown storage type
- raise ValueError(
- f"Unknown storage type '{workflow_node_execution_storage}' for WorkflowNodeExecution repository. "
- f"Supported types: {STORAGE_TYPE_RDBMS}"
- )
-
-
-def create_workflow_node_execution_repository(params: Mapping[str, Any]) -> SQLAlchemyWorkflowNodeExecutionRepository:
- """
- Create a WorkflowNodeExecutionRepository instance using SQLAlchemy implementation.
-
- This factory function creates a repository for the RDBMS storage type.
-
- Args:
- params: Parameters for creating the repository, including:
- - tenant_id: Required. The tenant ID for multi-tenancy.
- - app_id: Optional. The application ID for filtering.
- - session_factory: Optional. A SQLAlchemy sessionmaker instance. If not provided,
- a new sessionmaker will be created using the global database engine.
-
- Returns:
- A WorkflowNodeExecutionRepository instance
-
- Raises:
- ValueError: If required parameters are missing
- """
- # Extract required parameters
- tenant_id = params.get("tenant_id")
- if tenant_id is None:
- raise ValueError("tenant_id is required for WorkflowNodeExecution repository with RDBMS storage")
-
- # Extract optional parameters
- app_id = params.get("app_id")
-
- # Use the session_factory from params if provided, otherwise create one using the global db engine
- session_factory = params.get("session_factory")
- if session_factory is None:
- # Create a sessionmaker using the same engine as the global db session
- session_factory = sessionmaker(bind=db.engine)
-
- # Create and return the repository
- return SQLAlchemyWorkflowNodeExecutionRepository(
- session_factory=session_factory, tenant_id=tenant_id, app_id=app_id
- )
diff --git a/api/core/repositories/workflow_node_execution/sqlalchemy_repository.py b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
similarity index 98%
rename from api/core/repositories/workflow_node_execution/sqlalchemy_repository.py
rename to api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
index b1d37163a4..8bf2ab8761 100644
--- a/api/core/repositories/workflow_node_execution/sqlalchemy_repository.py
+++ b/api/core/repositories/sqlalchemy_workflow_node_execution_repository.py
@@ -10,13 +10,13 @@ from sqlalchemy import UnaryExpression, asc, delete, desc, select
from sqlalchemy.engine import Engine
from sqlalchemy.orm import sessionmaker
-from core.workflow.repository.workflow_node_execution_repository import OrderConfig
+from core.workflow.repository.workflow_node_execution_repository import OrderConfig, WorkflowNodeExecutionRepository
from models.workflow import WorkflowNodeExecution, WorkflowNodeExecutionStatus, WorkflowNodeExecutionTriggeredFrom
logger = logging.getLogger(__name__)
-class SQLAlchemyWorkflowNodeExecutionRepository:
+class SQLAlchemyWorkflowNodeExecutionRepository(WorkflowNodeExecutionRepository):
"""
SQLAlchemy implementation of the WorkflowNodeExecutionRepository interface.
diff --git a/api/core/repositories/workflow_node_execution/__init__.py b/api/core/repositories/workflow_node_execution/__init__.py
deleted file mode 100644
index 76e8282b7d..0000000000
--- a/api/core/repositories/workflow_node_execution/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-"""
-WorkflowNodeExecution repository implementations.
-"""
-
-from core.repositories.workflow_node_execution.sqlalchemy_repository import SQLAlchemyWorkflowNodeExecutionRepository
-
-__all__ = [
- "SQLAlchemyWorkflowNodeExecutionRepository",
-]
diff --git a/api/core/workflow/repository/__init__.py b/api/core/workflow/repository/__init__.py
index d91506e72f..672abb6583 100644
--- a/api/core/workflow/repository/__init__.py
+++ b/api/core/workflow/repository/__init__.py
@@ -6,10 +6,9 @@ for accessing and manipulating data, regardless of the underlying
storage mechanism.
"""
-from core.workflow.repository.repository_factory import RepositoryFactory
-from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
+from core.workflow.repository.workflow_node_execution_repository import OrderConfig, WorkflowNodeExecutionRepository
__all__ = [
- "RepositoryFactory",
+ "OrderConfig",
"WorkflowNodeExecutionRepository",
]
diff --git a/api/core/workflow/repository/repository_factory.py b/api/core/workflow/repository/repository_factory.py
deleted file mode 100644
index 45d6f5d842..0000000000
--- a/api/core/workflow/repository/repository_factory.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
-Repository factory for creating repository instances.
-
-This module provides a simple factory interface for creating repository instances.
-It does not contain any implementation details or dependencies on specific repositories.
-"""
-
-from collections.abc import Callable, Mapping
-from typing import Any, Literal, Optional, cast
-
-from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
-
-# Type for factory functions - takes a dict of parameters and returns any repository type
-RepositoryFactoryFunc = Callable[[Mapping[str, Any]], Any]
-
-# Type for workflow node execution factory function
-WorkflowNodeExecutionFactoryFunc = Callable[[Mapping[str, Any]], WorkflowNodeExecutionRepository]
-
-# Repository type literals
-_RepositoryType = Literal["workflow_node_execution"]
-
-
-class RepositoryFactory:
- """
- Factory class for creating repository instances.
-
- This factory delegates the actual repository creation to implementation-specific
- factory functions that are registered with the factory at runtime.
- """
-
- # Dictionary to store factory functions
- _factory_functions: dict[str, RepositoryFactoryFunc] = {}
-
- @classmethod
- def _register_factory(cls, repository_type: _RepositoryType, factory_func: RepositoryFactoryFunc) -> None:
- """
- Register a factory function for a specific repository type.
- This is a private method and should not be called directly.
-
- Args:
- repository_type: The type of repository (e.g., 'workflow_node_execution')
- factory_func: A function that takes parameters and returns a repository instance
- """
- cls._factory_functions[repository_type] = factory_func
-
- @classmethod
- def _create_repository(cls, repository_type: _RepositoryType, params: Optional[Mapping[str, Any]] = None) -> Any:
- """
- Create a new repository instance with the provided parameters.
- This is a private method and should not be called directly.
-
- Args:
- repository_type: The type of repository to create
- params: A dictionary of parameters to pass to the factory function
-
- Returns:
- A new instance of the requested repository
-
- Raises:
- ValueError: If no factory function is registered for the repository type
- """
- if repository_type not in cls._factory_functions:
- raise ValueError(f"No factory function registered for repository type '{repository_type}'")
-
- # Use empty dict if params is None
- params = params or {}
-
- return cls._factory_functions[repository_type](params)
-
- @classmethod
- def register_workflow_node_execution_factory(cls, factory_func: WorkflowNodeExecutionFactoryFunc) -> None:
- """
- Register a factory function for the workflow node execution repository.
-
- Args:
- factory_func: A function that takes parameters and returns a WorkflowNodeExecutionRepository instance
- """
- cls._register_factory("workflow_node_execution", factory_func)
-
- @classmethod
- def create_workflow_node_execution_repository(
- cls, params: Optional[Mapping[str, Any]] = None
- ) -> WorkflowNodeExecutionRepository:
- """
- Create a new WorkflowNodeExecutionRepository instance with the provided parameters.
-
- Args:
- params: A dictionary of parameters to pass to the factory function
-
- Returns:
- A new instance of the WorkflowNodeExecutionRepository
-
- Raises:
- ValueError: If no factory function is registered for the workflow_node_execution repository type
- """
- # We can safely cast here because we've registered a WorkflowNodeExecutionFactoryFunc
- return cast(WorkflowNodeExecutionRepository, cls._create_repository("workflow_node_execution", params))
diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/workflow/workflow_app_generate_task_pipeline.py
similarity index 99%
rename from api/core/app/apps/workflow/generate_task_pipeline.py
rename to api/core/workflow/workflow_app_generate_task_pipeline.py
index 67cad9c998..10a2d8b38b 100644
--- a/api/core/app/apps/workflow/generate_task_pipeline.py
+++ b/api/core/workflow/workflow_app_generate_task_pipeline.py
@@ -6,7 +6,6 @@ from typing import Optional, Union
from sqlalchemy.orm import Session
from constants.tts_auto_play_timeout import TTS_AUTO_PLAY_TIMEOUT, TTS_AUTO_PLAY_YIELD_CPU_TIME
-from core.app.apps.advanced_chat.app_generator_tts_publisher import AppGeneratorTTSPublisher, AudioTrunk
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import (
InvokeFrom,
@@ -52,10 +51,11 @@ from core.app.entities.task_entities import (
WorkflowTaskState,
)
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
-from core.app.task_pipeline.workflow_cycle_manage import WorkflowCycleManage
+from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
from core.ops.ops_trace_manager import TraceQueueManager
from core.workflow.enums import SystemVariableKey
from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
+from core.workflow.workflow_cycle_manager import WorkflowCycleManager
from extensions.ext_database import db
from models.account import Account
from models.enums import CreatedByRole
@@ -102,7 +102,7 @@ class WorkflowAppGenerateTaskPipeline:
else:
raise ValueError(f"Invalid user type: {type(user)}")
- self._workflow_cycle_manager = WorkflowCycleManage(
+ self._workflow_cycle_manager = WorkflowCycleManager(
application_generate_entity=application_generate_entity,
workflow_system_variables={
SystemVariableKey.FILES: application_generate_entity.files,
diff --git a/api/core/app/task_pipeline/workflow_cycle_manage.py b/api/core/workflow/workflow_cycle_manager.py
similarity index 99%
rename from api/core/app/task_pipeline/workflow_cycle_manage.py
rename to api/core/workflow/workflow_cycle_manager.py
index 09e2ee74e6..01d5db4303 100644
--- a/api/core/app/task_pipeline/workflow_cycle_manage.py
+++ b/api/core/workflow/workflow_cycle_manager.py
@@ -69,7 +69,7 @@ from models.workflow import (
)
-class WorkflowCycleManage:
+class WorkflowCycleManager:
def __init__(
self,
*,
diff --git a/api/extensions/ext_repositories.py b/api/extensions/ext_repositories.py
deleted file mode 100644
index b8cfea121b..0000000000
--- a/api/extensions/ext_repositories.py
+++ /dev/null
@@ -1,18 +0,0 @@
-"""
-Extension for initializing repositories.
-
-This extension registers repository implementations with the RepositoryFactory.
-"""
-
-from core.repositories.repository_registry import register_repositories
-from dify_app import DifyApp
-
-
-def init_app(_app: DifyApp) -> None:
- """
- Initialize repository implementations.
-
- Args:
- _app: The Flask application instance (unused)
- """
- register_repositories()
diff --git a/api/services/workflow_run_service.py b/api/services/workflow_run_service.py
index f7c4f500a8..6d5b737962 100644
--- a/api/services/workflow_run_service.py
+++ b/api/services/workflow_run_service.py
@@ -2,7 +2,7 @@ import threading
from typing import Optional
import contexts
-from core.workflow.repository import RepositoryFactory
+from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from core.workflow.repository.workflow_node_execution_repository import OrderConfig
from extensions.ext_database import db
from libs.infinite_scroll_pagination import InfiniteScrollPagination
@@ -129,12 +129,8 @@ class WorkflowRunService:
return []
# Use the repository to get the node executions
- repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={
- "tenant_id": app_model.tenant_id,
- "app_id": app_model.id,
- "session_factory": db.session.get_bind(),
- }
+ repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=db.engine, tenant_id=app_model.tenant_id, app_id=app_model.id
)
# Use the repository to get the node executions with ordering
diff --git a/api/services/workflow_service.py b/api/services/workflow_service.py
index ebe65e5d5f..331dba8bf1 100644
--- a/api/services/workflow_service.py
+++ b/api/services/workflow_service.py
@@ -11,6 +11,7 @@ from sqlalchemy.orm import Session
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
from core.app.apps.workflow.app_config_manager import WorkflowAppConfigManager
from core.model_runtime.utils.encoders import jsonable_encoder
+from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from core.variables import Variable
from core.workflow.entities.node_entities import NodeRunResult
from core.workflow.errors import WorkflowNodeRunFailedError
@@ -21,7 +22,6 @@ from core.workflow.nodes.enums import ErrorStrategy
from core.workflow.nodes.event import RunCompletedEvent
from core.workflow.nodes.event.types import NodeEvent
from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
-from core.workflow.repository import RepositoryFactory
from core.workflow.workflow_entry import WorkflowEntry
from events.app_event import app_draft_workflow_was_synced, app_published_workflow_was_updated
from extensions.ext_database import db
@@ -285,12 +285,8 @@ class WorkflowService:
workflow_node_execution.workflow_id = draft_workflow.id
# Use the repository to save the workflow node execution
- repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={
- "tenant_id": app_model.tenant_id,
- "app_id": app_model.id,
- "session_factory": db.session.get_bind(),
- }
+ repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=db.engine, tenant_id=app_model.tenant_id, app_id=app_model.id
)
repository.save(workflow_node_execution)
diff --git a/api/tasks/remove_app_and_related_data_task.py b/api/tasks/remove_app_and_related_data_task.py
index dedf1c5334..d5a783396a 100644
--- a/api/tasks/remove_app_and_related_data_task.py
+++ b/api/tasks/remove_app_and_related_data_task.py
@@ -7,7 +7,7 @@ from celery import shared_task # type: ignore
from sqlalchemy import delete
from sqlalchemy.exc import SQLAlchemyError
-from core.workflow.repository import RepositoryFactory
+from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from extensions.ext_database import db
from models.dataset import AppDatasetJoin
from models.model import (
@@ -189,12 +189,8 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str):
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
# Create a repository instance for WorkflowNodeExecution
- repository = RepositoryFactory.create_workflow_node_execution_repository(
- params={
- "tenant_id": tenant_id,
- "app_id": app_id,
- "session_factory": db.session.get_bind(),
- }
+ repository = SQLAlchemyWorkflowNodeExecutionRepository(
+ session_factory=db.engine, tenant_id=tenant_id, app_id=app_id
)
# Use the clear method to delete all records for this tenant_id and app_id
diff --git a/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py b/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py
new file mode 100644
index 0000000000..6b00b203c4
--- /dev/null
+++ b/api/tests/unit_tests/core/workflow/test_workflow_cycle_manager.py
@@ -0,0 +1,348 @@
+import json
+import time
+from datetime import UTC, datetime
+from unittest.mock import MagicMock, patch
+
+import pytest
+from sqlalchemy.orm import Session
+
+from core.app.entities.app_invoke_entities import AdvancedChatAppGenerateEntity, InvokeFrom
+from core.app.entities.queue_entities import (
+ QueueNodeFailedEvent,
+ QueueNodeStartedEvent,
+ QueueNodeSucceededEvent,
+)
+from core.workflow.enums import SystemVariableKey
+from core.workflow.nodes import NodeType
+from core.workflow.repository.workflow_node_execution_repository import WorkflowNodeExecutionRepository
+from core.workflow.workflow_cycle_manager import WorkflowCycleManager
+from models.enums import CreatedByRole
+from models.workflow import (
+ Workflow,
+ WorkflowNodeExecution,
+ WorkflowNodeExecutionStatus,
+ WorkflowRun,
+ WorkflowRunStatus,
+)
+
+
+@pytest.fixture
+def mock_app_generate_entity():
+ entity = MagicMock(spec=AdvancedChatAppGenerateEntity)
+ entity.inputs = {"query": "test query"}
+ entity.invoke_from = InvokeFrom.WEB_APP
+ # Create app_config as a separate mock
+ app_config = MagicMock()
+ app_config.tenant_id = "test-tenant-id"
+ app_config.app_id = "test-app-id"
+ entity.app_config = app_config
+ return entity
+
+
+@pytest.fixture
+def mock_workflow_system_variables():
+ return {
+ SystemVariableKey.QUERY: "test query",
+ SystemVariableKey.CONVERSATION_ID: "test-conversation-id",
+ SystemVariableKey.USER_ID: "test-user-id",
+ SystemVariableKey.APP_ID: "test-app-id",
+ SystemVariableKey.WORKFLOW_ID: "test-workflow-id",
+ SystemVariableKey.WORKFLOW_RUN_ID: "test-workflow-run-id",
+ }
+
+
+@pytest.fixture
+def mock_node_execution_repository():
+ repo = MagicMock(spec=WorkflowNodeExecutionRepository)
+ repo.get_by_node_execution_id.return_value = None
+ repo.get_running_executions.return_value = []
+ return repo
+
+
+@pytest.fixture
+def workflow_cycle_manager(mock_app_generate_entity, mock_workflow_system_variables, mock_node_execution_repository):
+ return WorkflowCycleManager(
+ application_generate_entity=mock_app_generate_entity,
+ workflow_system_variables=mock_workflow_system_variables,
+ workflow_node_execution_repository=mock_node_execution_repository,
+ )
+
+
+@pytest.fixture
+def mock_session():
+ session = MagicMock(spec=Session)
+ return session
+
+
+@pytest.fixture
+def mock_workflow():
+ workflow = MagicMock(spec=Workflow)
+ workflow.id = "test-workflow-id"
+ workflow.tenant_id = "test-tenant-id"
+ workflow.app_id = "test-app-id"
+ workflow.type = "chat"
+ workflow.version = "1.0"
+ workflow.graph = json.dumps({"nodes": [], "edges": []})
+ return workflow
+
+
+@pytest.fixture
+def mock_workflow_run():
+ workflow_run = MagicMock(spec=WorkflowRun)
+ workflow_run.id = "test-workflow-run-id"
+ workflow_run.tenant_id = "test-tenant-id"
+ workflow_run.app_id = "test-app-id"
+ workflow_run.workflow_id = "test-workflow-id"
+ workflow_run.status = WorkflowRunStatus.RUNNING
+ workflow_run.created_by_role = CreatedByRole.ACCOUNT
+ workflow_run.created_by = "test-user-id"
+ workflow_run.created_at = datetime.now(UTC).replace(tzinfo=None)
+ workflow_run.inputs_dict = {"query": "test query"}
+ workflow_run.outputs_dict = {"answer": "test answer"}
+ return workflow_run
+
+
+def test_init(
+ workflow_cycle_manager, mock_app_generate_entity, mock_workflow_system_variables, mock_node_execution_repository
+):
+ """Test initialization of WorkflowCycleManager"""
+ assert workflow_cycle_manager._workflow_run is None
+ assert workflow_cycle_manager._workflow_node_executions == {}
+ assert workflow_cycle_manager._application_generate_entity == mock_app_generate_entity
+ assert workflow_cycle_manager._workflow_system_variables == mock_workflow_system_variables
+ assert workflow_cycle_manager._workflow_node_execution_repository == mock_node_execution_repository
+
+
+def test_handle_workflow_run_start(workflow_cycle_manager, mock_session, mock_workflow):
+ """Test _handle_workflow_run_start method"""
+ # Mock session.scalar to return the workflow and max sequence
+ mock_session.scalar.side_effect = [mock_workflow, 5]
+
+ # Call the method
+ workflow_run = workflow_cycle_manager._handle_workflow_run_start(
+ session=mock_session,
+ workflow_id="test-workflow-id",
+ user_id="test-user-id",
+ created_by_role=CreatedByRole.ACCOUNT,
+ )
+
+ # Verify the result
+ assert workflow_run.tenant_id == mock_workflow.tenant_id
+ assert workflow_run.app_id == mock_workflow.app_id
+ assert workflow_run.workflow_id == mock_workflow.id
+ assert workflow_run.sequence_number == 6 # max_sequence + 1
+ assert workflow_run.status == WorkflowRunStatus.RUNNING
+ assert workflow_run.created_by_role == CreatedByRole.ACCOUNT
+ assert workflow_run.created_by == "test-user-id"
+
+ # Verify session.add was called
+ mock_session.add.assert_called_once_with(workflow_run)
+
+
+def test_handle_workflow_run_success(workflow_cycle_manager, mock_session, mock_workflow_run):
+ """Test _handle_workflow_run_success method"""
+ # Mock _get_workflow_run to return the mock_workflow_run
+ with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
+ # Call the method
+ result = workflow_cycle_manager._handle_workflow_run_success(
+ session=mock_session,
+ workflow_run_id="test-workflow-run-id",
+ start_at=time.perf_counter() - 10, # 10 seconds ago
+ total_tokens=100,
+ total_steps=5,
+ outputs={"answer": "test answer"},
+ )
+
+ # Verify the result
+ assert result == mock_workflow_run
+ assert result.status == WorkflowRunStatus.SUCCEEDED
+ assert result.outputs == json.dumps({"answer": "test answer"})
+ assert result.total_tokens == 100
+ assert result.total_steps == 5
+ assert result.finished_at is not None
+
+
+def test_handle_workflow_run_failed(workflow_cycle_manager, mock_session, mock_workflow_run):
+ """Test _handle_workflow_run_failed method"""
+ # Mock _get_workflow_run to return the mock_workflow_run
+ with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
+ # Mock get_running_executions to return an empty list
+ workflow_cycle_manager._workflow_node_execution_repository.get_running_executions.return_value = []
+
+ # Call the method
+ result = workflow_cycle_manager._handle_workflow_run_failed(
+ session=mock_session,
+ workflow_run_id="test-workflow-run-id",
+ start_at=time.perf_counter() - 10, # 10 seconds ago
+ total_tokens=50,
+ total_steps=3,
+ status=WorkflowRunStatus.FAILED,
+ error="Test error message",
+ )
+
+ # Verify the result
+ assert result == mock_workflow_run
+ assert result.status == WorkflowRunStatus.FAILED.value
+ assert result.error == "Test error message"
+ assert result.total_tokens == 50
+ assert result.total_steps == 3
+ assert result.finished_at is not None
+
+
+def test_handle_node_execution_start(workflow_cycle_manager, mock_workflow_run):
+ """Test _handle_node_execution_start method"""
+ # Create a mock event
+ event = MagicMock(spec=QueueNodeStartedEvent)
+ event.node_execution_id = "test-node-execution-id"
+ event.node_id = "test-node-id"
+ event.node_type = NodeType.LLM
+
+ # Create node_data as a separate mock
+ node_data = MagicMock()
+ node_data.title = "Test Node"
+ event.node_data = node_data
+
+ event.predecessor_node_id = "test-predecessor-node-id"
+ event.node_run_index = 1
+ event.parallel_mode_run_id = "test-parallel-mode-run-id"
+ event.in_iteration_id = "test-iteration-id"
+ event.in_loop_id = "test-loop-id"
+
+ # Call the method
+ result = workflow_cycle_manager._handle_node_execution_start(
+ workflow_run=mock_workflow_run,
+ event=event,
+ )
+
+ # Verify the result
+ assert result.tenant_id == mock_workflow_run.tenant_id
+ assert result.app_id == mock_workflow_run.app_id
+ assert result.workflow_id == mock_workflow_run.workflow_id
+ assert result.workflow_run_id == mock_workflow_run.id
+ assert result.node_execution_id == event.node_execution_id
+ assert result.node_id == event.node_id
+ assert result.node_type == event.node_type.value
+ assert result.title == event.node_data.title
+ assert result.status == WorkflowNodeExecutionStatus.RUNNING.value
+ assert result.created_by_role == mock_workflow_run.created_by_role
+ assert result.created_by == mock_workflow_run.created_by
+
+ # Verify save was called
+ workflow_cycle_manager._workflow_node_execution_repository.save.assert_called_once_with(result)
+
+ # Verify the node execution was added to the cache
+ assert workflow_cycle_manager._workflow_node_executions[event.node_execution_id] == result
+
+
+def test_get_workflow_run(workflow_cycle_manager, mock_session, mock_workflow_run):
+ """Test _get_workflow_run method"""
+ # Mock session.scalar to return the workflow run
+ mock_session.scalar.return_value = mock_workflow_run
+
+ # Call the method
+ result = workflow_cycle_manager._get_workflow_run(
+ session=mock_session,
+ workflow_run_id="test-workflow-run-id",
+ )
+
+ # Verify the result
+ assert result == mock_workflow_run
+ assert workflow_cycle_manager._workflow_run == mock_workflow_run
+
+
+def test_handle_workflow_node_execution_success(workflow_cycle_manager):
+ """Test _handle_workflow_node_execution_success method"""
+ # Create a mock event
+ event = MagicMock(spec=QueueNodeSucceededEvent)
+ event.node_execution_id = "test-node-execution-id"
+ event.inputs = {"input": "test input"}
+ event.process_data = {"process": "test process"}
+ event.outputs = {"output": "test output"}
+ event.execution_metadata = {"metadata": "test metadata"}
+ event.start_at = datetime.now(UTC).replace(tzinfo=None)
+
+ # Create a mock workflow node execution
+ node_execution = MagicMock(spec=WorkflowNodeExecution)
+ node_execution.node_execution_id = "test-node-execution-id"
+
+ # Mock _get_workflow_node_execution to return the mock node execution
+ with patch.object(workflow_cycle_manager, "_get_workflow_node_execution", return_value=node_execution):
+ # Call the method
+ result = workflow_cycle_manager._handle_workflow_node_execution_success(
+ event=event,
+ )
+
+ # Verify the result
+ assert result == node_execution
+ assert result.status == WorkflowNodeExecutionStatus.SUCCEEDED.value
+ assert result.inputs == json.dumps(event.inputs)
+ assert result.process_data == json.dumps(event.process_data)
+ assert result.outputs == json.dumps(event.outputs)
+ assert result.finished_at is not None
+ assert result.elapsed_time is not None
+
+ # Verify update was called
+ workflow_cycle_manager._workflow_node_execution_repository.update.assert_called_once_with(node_execution)
+
+
+def test_handle_workflow_run_partial_success(workflow_cycle_manager, mock_session, mock_workflow_run):
+ """Test _handle_workflow_run_partial_success method"""
+ # Mock _get_workflow_run to return the mock_workflow_run
+ with patch.object(workflow_cycle_manager, "_get_workflow_run", return_value=mock_workflow_run):
+ # Call the method
+ result = workflow_cycle_manager._handle_workflow_run_partial_success(
+ session=mock_session,
+ workflow_run_id="test-workflow-run-id",
+ start_at=time.perf_counter() - 10, # 10 seconds ago
+ total_tokens=75,
+ total_steps=4,
+ outputs={"partial_answer": "test partial answer"},
+ exceptions_count=2,
+ )
+
+ # Verify the result
+ assert result == mock_workflow_run
+ assert result.status == WorkflowRunStatus.PARTIAL_SUCCEEDED.value
+ assert result.outputs == json.dumps({"partial_answer": "test partial answer"})
+ assert result.total_tokens == 75
+ assert result.total_steps == 4
+ assert result.exceptions_count == 2
+ assert result.finished_at is not None
+
+
+def test_handle_workflow_node_execution_failed(workflow_cycle_manager):
+ """Test _handle_workflow_node_execution_failed method"""
+ # Create a mock event
+ event = MagicMock(spec=QueueNodeFailedEvent)
+ event.node_execution_id = "test-node-execution-id"
+ event.inputs = {"input": "test input"}
+ event.process_data = {"process": "test process"}
+ event.outputs = {"output": "test output"}
+ event.execution_metadata = {"metadata": "test metadata"}
+ event.start_at = datetime.now(UTC).replace(tzinfo=None)
+ event.error = "Test error message"
+
+ # Create a mock workflow node execution
+ node_execution = MagicMock(spec=WorkflowNodeExecution)
+ node_execution.node_execution_id = "test-node-execution-id"
+
+ # Mock _get_workflow_node_execution to return the mock node execution
+ with patch.object(workflow_cycle_manager, "_get_workflow_node_execution", return_value=node_execution):
+ # Call the method
+ result = workflow_cycle_manager._handle_workflow_node_execution_failed(
+ event=event,
+ )
+
+ # Verify the result
+ assert result == node_execution
+ assert result.status == WorkflowNodeExecutionStatus.FAILED.value
+ assert result.error == "Test error message"
+ assert result.inputs == json.dumps(event.inputs)
+ assert result.process_data == json.dumps(event.process_data)
+ assert result.outputs == json.dumps(event.outputs)
+ assert result.finished_at is not None
+ assert result.elapsed_time is not None
+ assert result.execution_metadata == json.dumps(event.execution_metadata)
+
+ # Verify update was called
+ workflow_cycle_manager._workflow_node_execution_repository.update.assert_called_once_with(node_execution)
diff --git a/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py b/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py
index c16b453cba..9cda873e90 100644
--- a/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py
+++ b/api/tests/unit_tests/repositories/workflow_node_execution/test_sqlalchemy_repository.py
@@ -8,7 +8,7 @@ import pytest
from pytest_mock import MockerFixture
from sqlalchemy.orm import Session, sessionmaker
-from core.repositories.workflow_node_execution.sqlalchemy_repository import SQLAlchemyWorkflowNodeExecutionRepository
+from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from core.workflow.repository.workflow_node_execution_repository import OrderConfig
from models.workflow import WorkflowNodeExecution
@@ -80,7 +80,7 @@ def test_get_by_node_execution_id(repository, session, mocker: MockerFixture):
"""Test get_by_node_execution_id method."""
session_obj, _ = session
# Set up mock
- mock_select = mocker.patch("core.repositories.workflow_node_execution.sqlalchemy_repository.select")
+ mock_select = mocker.patch("core.repositories.sqlalchemy_workflow_node_execution_repository.select")
mock_stmt = mocker.MagicMock()
mock_select.return_value = mock_stmt
mock_stmt.where.return_value = mock_stmt
@@ -99,7 +99,7 @@ def test_get_by_workflow_run(repository, session, mocker: MockerFixture):
"""Test get_by_workflow_run method."""
session_obj, _ = session
# Set up mock
- mock_select = mocker.patch("core.repositories.workflow_node_execution.sqlalchemy_repository.select")
+ mock_select = mocker.patch("core.repositories.sqlalchemy_workflow_node_execution_repository.select")
mock_stmt = mocker.MagicMock()
mock_select.return_value = mock_stmt
mock_stmt.where.return_value = mock_stmt
@@ -120,7 +120,7 @@ def test_get_running_executions(repository, session, mocker: MockerFixture):
"""Test get_running_executions method."""
session_obj, _ = session
# Set up mock
- mock_select = mocker.patch("core.repositories.workflow_node_execution.sqlalchemy_repository.select")
+ mock_select = mocker.patch("core.repositories.sqlalchemy_workflow_node_execution_repository.select")
mock_stmt = mocker.MagicMock()
mock_select.return_value = mock_stmt
mock_stmt.where.return_value = mock_stmt
@@ -158,7 +158,7 @@ def test_clear(repository, session, mocker: MockerFixture):
"""Test clear method."""
session_obj, _ = session
# Set up mock
- mock_delete = mocker.patch("core.repositories.workflow_node_execution.sqlalchemy_repository.delete")
+ mock_delete = mocker.patch("core.repositories.sqlalchemy_workflow_node_execution_repository.delete")
mock_stmt = mocker.MagicMock()
mock_delete.return_value = mock_stmt
mock_stmt.where.return_value = mock_stmt