Merge remote-tracking branch 'origin/feat/r2' into feat/r2

This commit is contained in:
jyong 2025-04-28 16:19:29 +08:00
commit 3c386c63a6
874 changed files with 31114 additions and 19811 deletions

View File

@ -2,10 +2,10 @@
npm add -g pnpm@10.8.0 npm add -g pnpm@10.8.0
cd web && pnpm install cd web && pnpm install
pipx install poetry pipx install uv
echo 'alias start-api="cd /workspaces/dify/api && poetry run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc echo 'alias start-api="cd /workspaces/dify/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
echo 'alias start-worker="cd /workspaces/dify/api && poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc echo 'alias start-worker="cd /workspaces/dify/api && uv run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
echo 'alias start-web="cd /workspaces/dify/web && pnpm dev"' >> ~/.bashrc echo 'alias start-web="cd /workspaces/dify/web && pnpm dev"' >> ~/.bashrc
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d"' >> ~/.bashrc echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d"' >> ~/.bashrc
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down"' >> ~/.bashrc echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down"' >> ~/.bashrc

View File

@ -1,3 +1,3 @@
#!/bin/bash #!/bin/bash
cd api && poetry install cd api && uv sync

View File

@ -1,36 +0,0 @@
name: Setup Poetry and Python
inputs:
python-version:
description: Python version to use and the Poetry installed with
required: true
default: '3.11'
poetry-version:
description: Poetry version to set up
required: true
default: '2.0.1'
poetry-lockfile:
description: Path to the Poetry lockfile to restore cache from
required: true
default: ''
runs:
using: composite
steps:
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ inputs.python-version }}
cache: pip
- name: Install Poetry
shell: bash
run: pip install poetry==${{ inputs.poetry-version }}
- name: Restore Poetry cache
if: ${{ inputs.poetry-lockfile != '' }}
uses: actions/setup-python@v5
with:
python-version: ${{ inputs.python-version }}
cache: poetry
cache-dependency-path: ${{ inputs.poetry-lockfile }}

34
.github/actions/setup-uv/action.yml vendored Normal file
View File

@ -0,0 +1,34 @@
name: Setup UV and Python
inputs:
python-version:
description: Python version to use and the UV installed with
required: true
default: '3.12'
uv-version:
description: UV version to set up
required: true
default: '0.6.14'
uv-lockfile:
description: Path to the UV lockfile to restore cache from
required: true
default: ''
enable-cache:
required: true
default: true
runs:
using: composite
steps:
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ inputs.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v5
with:
version: ${{ inputs.uv-version }}
python-version: ${{ inputs.python-version }}
enable-cache: ${{ inputs.enable-cache }}
cache-dependency-glob: ${{ inputs.uv-lockfile }}

View File

@ -17,6 +17,9 @@ jobs:
test: test:
name: API Tests name: API Tests
runs-on: ubuntu-latest runs-on: ubuntu-latest
defaults:
run:
shell: bash
strategy: strategy:
matrix: matrix:
python-version: python-version:
@ -27,40 +30,44 @@ jobs:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Setup Poetry and Python ${{ matrix.python-version }} - name: Setup UV and Python
uses: ./.github/actions/setup-poetry uses: ./.github/actions/setup-uv
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
poetry-lockfile: api/poetry.lock uv-lockfile: api/uv.lock
- name: Check Poetry lockfile - name: Check UV lockfile
run: | run: uv lock --project api --check
poetry check -C api --lock
poetry show -C api
- name: Install dependencies - name: Install dependencies
run: poetry install -C api --with dev run: uv sync --project api --dev
- name: Check dependencies in pyproject.toml
run: poetry run -P api bash dev/pytest/pytest_artifacts.sh
- name: Run Unit tests - name: Run Unit tests
run: poetry run -P api bash dev/pytest/pytest_unit_tests.sh run: |
uv run --project api bash dev/pytest/pytest_unit_tests.sh
# Extract coverage percentage and create a summary
TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])')
# Create a detailed coverage summary
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
uv run --project api coverage report >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
- name: Run dify config tests - name: Run dify config tests
run: poetry run -P api python dev/pytest/pytest_config_tests.py run: uv run --project api dev/pytest/pytest_config_tests.py
- name: Cache MyPy - name: MyPy Cache
uses: actions/cache@v4 uses: actions/cache@v4
with: with:
path: api/.mypy_cache path: api/.mypy_cache
key: mypy-${{ matrix.python-version }}-${{ runner.os }}-${{ hashFiles('api/poetry.lock') }} key: mypy-${{ matrix.python-version }}-${{ runner.os }}-${{ hashFiles('api/uv.lock') }}
- name: Run mypy - name: Run MyPy Checks
run: dev/run-mypy run: dev/mypy-check
- name: Set up dotenvs - name: Set up dotenvs
run: | run: |
@ -80,4 +87,4 @@ jobs:
ssrf_proxy ssrf_proxy
- name: Run Workflow - name: Run Workflow
run: poetry run -P api bash dev/pytest/pytest_workflow.sh run: uv run --project api bash dev/pytest/pytest_workflow.sh

View File

@ -24,13 +24,13 @@ jobs:
fetch-depth: 0 fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Setup Poetry and Python - name: Setup UV and Python
uses: ./.github/actions/setup-poetry uses: ./.github/actions/setup-uv
with: with:
poetry-lockfile: api/poetry.lock uv-lockfile: api/uv.lock
- name: Install dependencies - name: Install dependencies
run: poetry install -C api run: uv sync --project api
- name: Prepare middleware env - name: Prepare middleware env
run: | run: |
@ -54,6 +54,4 @@ jobs:
- name: Run DB Migration - name: Run DB Migration
env: env:
DEBUG: true DEBUG: true
run: | run: uv run --directory api flask upgrade-db
cd api
poetry run python -m flask upgrade-db

View File

@ -42,6 +42,7 @@ jobs:
with: with:
push: false push: false
context: "{{defaultContext}}:${{ matrix.context }}" context: "{{defaultContext}}:${{ matrix.context }}"
file: "${{ matrix.file }}"
platforms: ${{ matrix.platform }} platforms: ${{ matrix.platform }}
cache-from: type=gha cache-from: type=gha
cache-to: type=gha,mode=max cache-to: type=gha,mode=max

View File

@ -18,7 +18,6 @@ jobs:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Check changed files - name: Check changed files
@ -29,24 +28,27 @@ jobs:
api/** api/**
.github/workflows/style.yml .github/workflows/style.yml
- name: Setup Poetry and Python - name: Setup UV and Python
if: steps.changed-files.outputs.any_changed == 'true' if: steps.changed-files.outputs.any_changed == 'true'
uses: ./.github/actions/setup-poetry uses: ./.github/actions/setup-uv
with:
uv-lockfile: api/uv.lock
enable-cache: false
- name: Install dependencies - name: Install dependencies
if: steps.changed-files.outputs.any_changed == 'true' if: steps.changed-files.outputs.any_changed == 'true'
run: poetry install -C api --only lint run: uv sync --project api --dev
- name: Ruff check - name: Ruff check
if: steps.changed-files.outputs.any_changed == 'true' if: steps.changed-files.outputs.any_changed == 'true'
run: | run: |
poetry run -C api ruff --version uv run --directory api ruff --version
poetry run -C api ruff check ./ uv run --directory api ruff check ./
poetry run -C api ruff format --check ./ uv run --directory api ruff format --check ./
- name: Dotenv check - name: Dotenv check
if: steps.changed-files.outputs.any_changed == 'true' if: steps.changed-files.outputs.any_changed == 'true'
run: poetry run -P api dotenv-linter ./api/.env.example ./web/.env.example run: uv run --project api dotenv-linter ./api/.env.example ./web/.env.example
- name: Lint hints - name: Lint hints
if: failure() if: failure()
@ -63,7 +65,6 @@ jobs:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Check changed files - name: Check changed files
@ -102,7 +103,6 @@ jobs:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Check changed files - name: Check changed files
@ -133,7 +133,6 @@ jobs:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Check changed files - name: Check changed files

View File

@ -27,7 +27,6 @@ jobs:
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Use Node.js ${{ matrix.node-version }} - name: Use Node.js ${{ matrix.node-version }}

View File

@ -8,7 +8,7 @@ on:
- api/core/rag/datasource/** - api/core/rag/datasource/**
- docker/** - docker/**
- .github/workflows/vdb-tests.yml - .github/workflows/vdb-tests.yml
- api/poetry.lock - api/uv.lock
- api/pyproject.toml - api/pyproject.toml
concurrency: concurrency:
@ -29,22 +29,19 @@ jobs:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Setup Poetry and Python ${{ matrix.python-version }} - name: Setup UV and Python
uses: ./.github/actions/setup-poetry uses: ./.github/actions/setup-uv
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
poetry-lockfile: api/poetry.lock uv-lockfile: api/uv.lock
- name: Check Poetry lockfile - name: Check UV lockfile
run: | run: uv lock --project api --check
poetry check -C api --lock
poetry show -C api
- name: Install dependencies - name: Install dependencies
run: poetry install -C api --with dev run: uv sync --project api --dev
- name: Set up dotenvs - name: Set up dotenvs
run: | run: |
@ -80,7 +77,7 @@ jobs:
elasticsearch elasticsearch
- name: Check TiDB Ready - name: Check TiDB Ready
run: poetry run -P api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
- name: Test Vector Stores - name: Test Vector Stores
run: poetry run -P api bash dev/pytest/pytest_vdb.sh run: uv run --project api bash dev/pytest/pytest_vdb.sh

View File

@ -23,7 +23,6 @@ jobs:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v4 uses: actions/checkout@v4
with: with:
fetch-depth: 0
persist-credentials: false persist-credentials: false
- name: Check changed files - name: Check changed files

1
.gitignore vendored
View File

@ -46,6 +46,7 @@ htmlcov/
.cache .cache
nosetests.xml nosetests.xml
coverage.xml coverage.xml
coverage.json
*.cover *.cover
*.py,cover *.py,cover
.hypothesis/ .hypothesis/

View File

@ -6,7 +6,7 @@
本指南和 Dify 一样在不断完善中。如果有任何滞后于项目实际情况的地方,恳请谅解,我们也欢迎任何改进建议。 本指南和 Dify 一样在不断完善中。如果有任何滞后于项目实际情况的地方,恳请谅解,我们也欢迎任何改进建议。
关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](./LICENSE)。社区同时也遵循[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。 关于许可证,请花一分钟阅读我们简短的[许可和贡献者协议](./LICENSE)。同时也遵循社区[行为准则](https://github.com/langgenius/.github/blob/main/CODE_OF_CONDUCT.md)。
## 开始之前 ## 开始之前

View File

@ -8,7 +8,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> · <a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Self-hosting</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">Self-hosting</a> ·
<a href="https://docs.dify.ai">Documentation</a> · <a href="https://docs.dify.ai">Documentation</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Enterprise inquiry</a> <a href="https://dify.ai/pricing">Dify edition overview</a>
</p> </p>
<p align="center"> <p align="center">
@ -54,7 +54,7 @@
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a> <a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p> </p>
Dify is an open-source LLM app development platform. Its intuitive interface combines agentic AI workflow, RAG pipeline, agent capabilities, model management, observability features and more, letting you quickly go from prototype to production. Dify is an open-source LLM app development platform. Its intuitive interface combines agentic AI workflow, RAG pipeline, agent capabilities, model management, observability features, and more, allowing you to quickly move from prototype to production.
## Quick start ## Quick start
@ -188,7 +188,7 @@ All of Dify's offerings come with corresponding APIs, so you could effortlessly
- **Dify for enterprise / organizations</br>** - **Dify for enterprise / organizations</br>**
We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs. </br> We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=[GitHub]Business%20License%20Inquiry) to discuss enterprise needs. </br>
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one-click. It's an affordable AMI offering with the option to create apps with custom logo and branding. > For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one click. It's an affordable AMI offering with the option to create apps with custom logo and branding.
## Staying ahead ## Staying ahead
@ -233,7 +233,7 @@ Deploy Dify to AWS with [CDK](https://aws.amazon.com/cdk/)
For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
At the same time, please consider supporting Dify by sharing it on social media and at events and conferences. At the same time, please consider supporting Dify by sharing it on social media and at events and conferences.
> We are looking for contributors to help with translating Dify to languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c). > We are looking for contributors to help translate Dify into languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
## Community & contact ## Community & contact

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> · <a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">الاستضافة الذاتية</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">الاستضافة الذاتية</a> ·
<a href="https://docs.dify.ai">التوثيق</a> · <a href="https://docs.dify.ai">التوثيق</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">استفسار الشركات (للإنجليزية فقط)</a> <a href="https://dify.ai/pricing">نظرة عامة على منتجات Dify</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -8,7 +8,7 @@
<a href="https://cloud.dify.ai">ডিফাই ক্লাউড</a> · <a href="https://cloud.dify.ai">ডিফাই ক্লাউড</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">সেল্ফ-হোস্টিং</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">সেল্ফ-হোস্টিং</a> ·
<a href="https://docs.dify.ai">ডকুমেন্টেশন</a> · <a href="https://docs.dify.ai">ডকুমেন্টেশন</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">ব্যাবসায়িক অনুসন্ধান</a> <a href="https://dify.ai/pricing">Dify পণ্যের রূপভেদ</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify 云服务</a> · <a href="https://cloud.dify.ai">Dify 云服务</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">自托管</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">自托管</a> ·
<a href="https://docs.dify.ai">文档</a> · <a href="https://docs.dify.ai">文档</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">(需用英文)常见问题解答 / 联系团队</a> <a href="https://dify.ai/pricing">Dify 产品形态总览</a>
</div> </div>
<p align="center"> <p align="center">

View File

@ -8,7 +8,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> · <a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Selbstgehostetes</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">Selbstgehostetes</a> ·
<a href="https://docs.dify.ai">Dokumentation</a> · <a href="https://docs.dify.ai">Dokumentation</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Anfrage an Unternehmen</a> <a href="https://dify.ai/pricing">Überblick über die Dify-Produkte</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> · <a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Auto-alojamiento</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">Auto-alojamiento</a> ·
<a href="https://docs.dify.ai">Documentación</a> · <a href="https://docs.dify.ai">Documentación</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Consultas empresariales (en inglés)</a> <a href="https://dify.ai/pricing">Resumen de las ediciones de Dify</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> · <a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Auto-hébergement</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">Auto-hébergement</a> ·
<a href="https://docs.dify.ai">Documentation</a> · <a href="https://docs.dify.ai">Documentation</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Demande dentreprise (en anglais seulement)</a> <a href="https://dify.ai/pricing">Présentation des différentes offres Dify</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> · <a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">セルフホスティング</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">セルフホスティング</a> ·
<a href="https://docs.dify.ai">ドキュメント</a> · <a href="https://docs.dify.ai">ドキュメント</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">企業のお問い合わせ(英語のみ)</a> <a href="https://dify.ai/pricing">Difyの各種エディションについて</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> · <a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Self-hosting</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">Self-hosting</a> ·
<a href="https://docs.dify.ai">Documentation</a> · <a href="https://docs.dify.ai">Documentation</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Commercial enquiries</a> <a href="https://dify.ai/pricing">Dify product editions</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify 클라우드</a> · <a href="https://cloud.dify.ai">Dify 클라우드</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">셀프-호스팅</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">셀프-호스팅</a> ·
<a href="https://docs.dify.ai">문서</a> · <a href="https://docs.dify.ai">문서</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">기업 문의 (영어만 가능)</a> <a href="https://dify.ai/pricing">Dify 제품 에디션 안내</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -8,7 +8,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> · <a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Auto-hospedagem</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">Auto-hospedagem</a> ·
<a href="https://docs.dify.ai">Documentação</a> · <a href="https://docs.dify.ai">Documentação</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Consultas empresariais</a> <a href="https://dify.ai/pricing">Visão geral das edições do Dify</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -8,7 +8,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> · <a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Samostojno gostovanje</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">Samostojno gostovanje</a> ·
<a href="https://docs.dify.ai">Dokumentacija</a> · <a href="https://docs.dify.ai">Dokumentacija</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Povpraševanje za podjetja</a> <a href="https://dify.ai/pricing">Pregled ponudb izdelkov Dify</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Bulut</a> · <a href="https://cloud.dify.ai">Dify Bulut</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Kendi Sunucunuzda Barındırma</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">Kendi Sunucunuzda Barındırma</a> ·
<a href="https://docs.dify.ai">Dokümantasyon</a> · <a href="https://docs.dify.ai">Dokümantasyon</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Yalnızca İngilizce: Kurumsal Sorgulama</a> <a href="https://dify.ai/pricing">Dify ürün seçeneklerine genel bakış</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -8,7 +8,7 @@
<a href="https://cloud.dify.ai">Dify 雲端服務</a> · <a href="https://cloud.dify.ai">Dify 雲端服務</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">自行託管</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">自行託管</a> ·
<a href="https://docs.dify.ai">說明文件</a> · <a href="https://docs.dify.ai">說明文件</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">企業諮詢</a> <a href="https://dify.ai/pricing">產品方案概覽</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -4,7 +4,7 @@
<a href="https://cloud.dify.ai">Dify Cloud</a> · <a href="https://cloud.dify.ai">Dify Cloud</a> ·
<a href="https://docs.dify.ai/getting-started/install-self-hosted">Tự triển khai</a> · <a href="https://docs.dify.ai/getting-started/install-self-hosted">Tự triển khai</a> ·
<a href="https://docs.dify.ai">Tài liệu</a> · <a href="https://docs.dify.ai">Tài liệu</a> ·
<a href="https://udify.app/chat/22L1zSxg6yW1cWQg">Yêu cầu doanh nghiệp</a> <a href="https://dify.ai/pricing">Tổng quan các lựa chọn sản phẩm Dify</a>
</p> </p>
<p align="center"> <p align="center">

View File

@ -165,6 +165,7 @@ MILVUS_URI=http://127.0.0.1:19530
MILVUS_TOKEN= MILVUS_TOKEN=
MILVUS_USER=root MILVUS_USER=root
MILVUS_PASSWORD=Milvus MILVUS_PASSWORD=Milvus
MILVUS_ANALYZER_PARAMS=
# MyScale configuration # MyScale configuration
MYSCALE_HOST=127.0.0.1 MYSCALE_HOST=127.0.0.1
@ -296,6 +297,7 @@ LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:
LINDORM_USERNAME=admin LINDORM_USERNAME=admin
LINDORM_PASSWORD=admin LINDORM_PASSWORD=admin
USING_UGC_INDEX=False USING_UGC_INDEX=False
LINDORM_QUERY_TIMEOUT=1
# OceanBase Vector configuration # OceanBase Vector configuration
OCEANBASE_VECTOR_HOST=127.0.0.1 OCEANBASE_VECTOR_HOST=127.0.0.1
@ -423,6 +425,12 @@ WORKFLOW_CALL_MAX_DEPTH=5
WORKFLOW_PARALLEL_DEPTH_LIMIT=3 WORKFLOW_PARALLEL_DEPTH_LIMIT=3
MAX_VARIABLE_SIZE=204800 MAX_VARIABLE_SIZE=204800
# Workflow storage configuration
# Options: rdbms, hybrid
# rdbms: Use only the relational database (default)
# hybrid: Save new data to object storage, read from both object storage and RDBMS
WORKFLOW_NODE_EXECUTION_STORAGE=rdbms
# App configuration # App configuration
APP_MAX_EXECUTION_TIME=1200 APP_MAX_EXECUTION_TIME=1200
APP_MAX_ACTIVE_REQUESTS=0 APP_MAX_ACTIVE_REQUESTS=0
@ -463,3 +471,19 @@ CREATE_TIDB_SERVICE_JOB_ENABLED=false
MAX_SUBMIT_COUNT=100 MAX_SUBMIT_COUNT=100
# Lockout duration in seconds # Lockout duration in seconds
LOGIN_LOCKOUT_DURATION=86400 LOGIN_LOCKOUT_DURATION=86400
# Enable OpenTelemetry
ENABLE_OTEL=false
OTLP_BASE_ENDPOINT=http://localhost:4318
OTLP_API_KEY=
OTEL_EXPORTER_TYPE=otlp
OTEL_SAMPLING_RATE=0.1
OTEL_BATCH_EXPORT_SCHEDULE_DELAY=5000
OTEL_MAX_QUEUE_SIZE=2048
OTEL_MAX_EXPORT_BATCH_SIZE=512
OTEL_METRIC_EXPORT_INTERVAL=60000
OTEL_BATCH_EXPORT_TIMEOUT=10000
OTEL_METRIC_EXPORT_TIMEOUT=30000
# Prevent Clickjacking
ALLOW_EMBED=false

View File

@ -3,20 +3,11 @@ FROM python:3.12-slim-bookworm AS base
WORKDIR /app/api WORKDIR /app/api
# Install Poetry # Install uv
ENV POETRY_VERSION=2.0.1 ENV UV_VERSION=0.6.14
# if you located in China, you can use aliyun mirror to speed up RUN pip install --no-cache-dir uv==${UV_VERSION}
# RUN pip install --no-cache-dir poetry==${POETRY_VERSION} -i https://mirrors.aliyun.com/pypi/simple/
RUN pip install --no-cache-dir poetry==${POETRY_VERSION}
# Configure Poetry
ENV POETRY_CACHE_DIR=/tmp/poetry_cache
ENV POETRY_NO_INTERACTION=1
ENV POETRY_VIRTUALENVS_IN_PROJECT=true
ENV POETRY_VIRTUALENVS_CREATE=true
ENV POETRY_REQUESTS_TIMEOUT=15
FROM base AS packages FROM base AS packages
@ -27,8 +18,8 @@ RUN apt-get update \
&& apt-get install -y --no-install-recommends gcc g++ libc-dev libffi-dev libgmp-dev libmpfr-dev libmpc-dev && apt-get install -y --no-install-recommends gcc g++ libc-dev libffi-dev libgmp-dev libmpfr-dev libmpc-dev
# Install Python dependencies # Install Python dependencies
COPY pyproject.toml poetry.lock ./ COPY pyproject.toml uv.lock ./
RUN poetry install --sync --no-cache --no-root RUN uv sync --locked
# production stage # production stage
FROM base AS production FROM base AS production

View File

@ -3,7 +3,10 @@
## Usage ## Usage
> [!IMPORTANT] > [!IMPORTANT]
> In the v0.6.12 release, we deprecated `pip` as the package management tool for Dify API Backend service and replaced it with `poetry`. >
> In the v1.3.0 release, `poetry` has been replaced with
> [`uv`](https://docs.astral.sh/uv/) as the package manager
> for Dify API backend service.
1. Start the docker-compose stack 1. Start the docker-compose stack
@ -37,19 +40,19 @@
4. Create environment. 4. Create environment.
Dify API service uses [Poetry](https://python-poetry.org/docs/) to manage dependencies. First, you need to add the poetry shell plugin, if you don't have it already, in order to run in a virtual environment. [Note: Poetry shell is no longer a native command so you need to install the poetry plugin beforehand] Dify API service uses [UV](https://docs.astral.sh/uv/) to manage dependencies.
First, you need to add the uv package manager, if you don't have it already.
```bash ```bash
poetry self add poetry-plugin-shell pip install uv
# Or on macOS
brew install uv
``` ```
Then, You can execute `poetry shell` to activate the environment.
5. Install dependencies 5. Install dependencies
```bash ```bash
poetry env use 3.12 uv sync --dev
poetry install
``` ```
6. Run migrate 6. Run migrate
@ -57,21 +60,21 @@
Before the first launch, migrate the database to the latest version. Before the first launch, migrate the database to the latest version.
```bash ```bash
poetry run python -m flask db upgrade uv run flask db upgrade
``` ```
7. Start backend 7. Start backend
```bash ```bash
poetry run python -m flask run --host 0.0.0.0 --port=5001 --debug uv run flask run --host 0.0.0.0 --port=5001 --debug
``` ```
8. Start Dify [web](../web) service. 8. Start Dify [web](../web) service.
9. Setup your application by visiting `http://localhost:3000`... 9. Setup your application by visiting `http://localhost:3000`.
10. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service. 10. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
```bash ```bash
poetry run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion
``` ```
## Testing ## Testing
@ -79,11 +82,11 @@
1. Install dependencies for both the backend and the test environment 1. Install dependencies for both the backend and the test environment
```bash ```bash
poetry install -C api --with dev uv sync --dev
``` ```
2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml` 2. Run the tests locally with mocked system environment variables in `tool.pytest_env` section in `pyproject.toml`
```bash ```bash
poetry run -P api bash dev/pytest/pytest_all_tests.sh uv run -P api bash dev/pytest/pytest_all_tests.sh
``` ```

View File

@ -51,8 +51,11 @@ def initialize_extensions(app: DifyApp):
ext_login, ext_login,
ext_mail, ext_mail,
ext_migrate, ext_migrate,
ext_otel,
ext_otel_patch,
ext_proxy_fix, ext_proxy_fix,
ext_redis, ext_redis,
ext_repositories,
ext_sentry, ext_sentry,
ext_set_secretkey, ext_set_secretkey,
ext_storage, ext_storage,
@ -73,6 +76,7 @@ def initialize_extensions(app: DifyApp):
ext_migrate, ext_migrate,
ext_redis, ext_redis,
ext_storage, ext_storage,
ext_repositories,
ext_celery, ext_celery,
ext_login, ext_login,
ext_mail, ext_mail,
@ -81,6 +85,8 @@ def initialize_extensions(app: DifyApp):
ext_proxy_fix, ext_proxy_fix,
ext_blueprints, ext_blueprints,
ext_commands, ext_commands,
ext_otel_patch, # Apply patch before initializing OpenTelemetry
ext_otel,
] ]
for ext in extensions: for ext in extensions:
short_name = ext.__name__.split(".")[-1] short_name = ext.__name__.split(".")[-1]

View File

@ -17,6 +17,7 @@ from core.rag.models.document import Document
from events.app_event import app_was_created from events.app_event import app_was_created
from extensions.ext_database import db from extensions.ext_database import db
from extensions.ext_redis import redis_client from extensions.ext_redis import redis_client
from extensions.ext_storage import storage
from libs.helper import email as email_validate from libs.helper import email as email_validate
from libs.password import hash_password, password_pattern, valid_password from libs.password import hash_password, password_pattern, valid_password
from libs.rsa import generate_key_pair from libs.rsa import generate_key_pair
@ -271,6 +272,7 @@ def migrate_knowledge_vector_database():
upper_collection_vector_types = { upper_collection_vector_types = {
VectorType.MILVUS, VectorType.MILVUS,
VectorType.PGVECTOR, VectorType.PGVECTOR,
VectorType.VASTBASE,
VectorType.RELYT, VectorType.RELYT,
VectorType.WEAVIATE, VectorType.WEAVIATE,
VectorType.ORACLE, VectorType.ORACLE,
@ -814,3 +816,274 @@ def clear_free_plan_tenant_expired_logs(days: int, batch: int, tenant_ids: list[
ClearFreePlanTenantExpiredLogs.process(days, batch, tenant_ids) ClearFreePlanTenantExpiredLogs.process(days, batch, tenant_ids)
click.echo(click.style("Clear free plan tenant expired logs completed.", fg="green")) click.echo(click.style("Clear free plan tenant expired logs completed.", fg="green"))
@click.command("clear-orphaned-file-records", help="Clear orphaned file records.")
def clear_orphaned_file_records():
"""
Clear orphaned file records in the database.
"""
# define tables and columns to process
files_tables = [
{"table": "upload_files", "id_column": "id", "key_column": "key"},
{"table": "tool_files", "id_column": "id", "key_column": "file_key"},
]
ids_tables = [
{"type": "uuid", "table": "message_files", "column": "upload_file_id"},
{"type": "text", "table": "documents", "column": "data_source_info"},
{"type": "text", "table": "document_segments", "column": "content"},
{"type": "text", "table": "messages", "column": "answer"},
{"type": "text", "table": "workflow_node_executions", "column": "inputs"},
{"type": "text", "table": "workflow_node_executions", "column": "process_data"},
{"type": "text", "table": "workflow_node_executions", "column": "outputs"},
{"type": "text", "table": "conversations", "column": "introduction"},
{"type": "text", "table": "conversations", "column": "system_instruction"},
{"type": "json", "table": "messages", "column": "inputs"},
{"type": "json", "table": "messages", "column": "message"},
]
# notify user and ask for confirmation
click.echo(
click.style("This command will find and delete orphaned file records in the following tables:", fg="yellow")
)
for files_table in files_tables:
click.echo(click.style(f"- {files_table['table']}", fg="yellow"))
click.echo(
click.style("The following tables and columns will be scanned to find orphaned file records:", fg="yellow")
)
for ids_table in ids_tables:
click.echo(click.style(f"- {ids_table['table']} ({ids_table['column']})", fg="yellow"))
click.echo("")
click.echo(click.style("!!! USE WITH CAUTION !!!", fg="red"))
click.echo(
click.style(
(
"Since not all patterns have been fully tested, "
"please note that this command may delete unintended file records."
),
fg="yellow",
)
)
click.echo(
click.style("This cannot be undone. Please make sure to back up your database before proceeding.", fg="yellow")
)
click.echo(
click.style(
(
"It is also recommended to run this during the maintenance window, "
"as this may cause high load on your instance."
),
fg="yellow",
)
)
click.confirm("Do you want to proceed?", abort=True)
# start the cleanup process
click.echo(click.style("Starting orphaned file records cleanup.", fg="white"))
try:
# fetch file id and keys from each table
all_files_in_tables = []
for files_table in files_tables:
click.echo(click.style(f"- Listing file records in table {files_table['table']}", fg="white"))
query = f"SELECT {files_table['id_column']}, {files_table['key_column']} FROM {files_table['table']}"
with db.engine.begin() as conn:
rs = conn.execute(db.text(query))
for i in rs:
all_files_in_tables.append({"table": files_table["table"], "id": str(i[0]), "key": i[1]})
click.echo(click.style(f"Found {len(all_files_in_tables)} files in tables.", fg="white"))
# fetch referred table and columns
guid_regexp = "[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}"
all_ids_in_tables = []
for ids_table in ids_tables:
query = ""
if ids_table["type"] == "uuid":
click.echo(
click.style(
f"- Listing file ids in column {ids_table['column']} in table {ids_table['table']}", fg="white"
)
)
query = (
f"SELECT {ids_table['column']} FROM {ids_table['table']} WHERE {ids_table['column']} IS NOT NULL"
)
with db.engine.begin() as conn:
rs = conn.execute(db.text(query))
for i in rs:
all_ids_in_tables.append({"table": ids_table["table"], "id": str(i[0])})
elif ids_table["type"] == "text":
click.echo(
click.style(
f"- Listing file-id-like strings in column {ids_table['column']} in table {ids_table['table']}",
fg="white",
)
)
query = (
f"SELECT regexp_matches({ids_table['column']}, '{guid_regexp}', 'g') AS extracted_id "
f"FROM {ids_table['table']}"
)
with db.engine.begin() as conn:
rs = conn.execute(db.text(query))
for i in rs:
for j in i[0]:
all_ids_in_tables.append({"table": ids_table["table"], "id": j})
elif ids_table["type"] == "json":
click.echo(
click.style(
(
f"- Listing file-id-like JSON string in column {ids_table['column']} "
f"in table {ids_table['table']}"
),
fg="white",
)
)
query = (
f"SELECT regexp_matches({ids_table['column']}::text, '{guid_regexp}', 'g') AS extracted_id "
f"FROM {ids_table['table']}"
)
with db.engine.begin() as conn:
rs = conn.execute(db.text(query))
for i in rs:
for j in i[0]:
all_ids_in_tables.append({"table": ids_table["table"], "id": j})
click.echo(click.style(f"Found {len(all_ids_in_tables)} file ids in tables.", fg="white"))
except Exception as e:
click.echo(click.style(f"Error fetching keys: {str(e)}", fg="red"))
return
# find orphaned files
all_files = [file["id"] for file in all_files_in_tables]
all_ids = [file["id"] for file in all_ids_in_tables]
orphaned_files = list(set(all_files) - set(all_ids))
if not orphaned_files:
click.echo(click.style("No orphaned file records found. There is nothing to delete.", fg="green"))
return
click.echo(click.style(f"Found {len(orphaned_files)} orphaned file records.", fg="white"))
for file in orphaned_files:
click.echo(click.style(f"- orphaned file id: {file}", fg="black"))
click.confirm(f"Do you want to proceed to delete all {len(orphaned_files)} orphaned file records?", abort=True)
# delete orphaned records for each file
try:
for files_table in files_tables:
click.echo(click.style(f"- Deleting orphaned file records in table {files_table['table']}", fg="white"))
query = f"DELETE FROM {files_table['table']} WHERE {files_table['id_column']} IN :ids"
with db.engine.begin() as conn:
conn.execute(db.text(query), {"ids": tuple(orphaned_files)})
except Exception as e:
click.echo(click.style(f"Error deleting orphaned file records: {str(e)}", fg="red"))
return
click.echo(click.style(f"Removed {len(orphaned_files)} orphaned file records.", fg="green"))
@click.command("remove-orphaned-files-on-storage", help="Remove orphaned files on the storage.")
def remove_orphaned_files_on_storage():
"""
Remove orphaned files on the storage.
"""
# define tables and columns to process
files_tables = [
{"table": "upload_files", "key_column": "key"},
{"table": "tool_files", "key_column": "file_key"},
]
storage_paths = ["image_files", "tools", "upload_files"]
# notify user and ask for confirmation
click.echo(click.style("This command will find and remove orphaned files on the storage,", fg="yellow"))
click.echo(
click.style("by comparing the files on the storage with the records in the following tables:", fg="yellow")
)
for files_table in files_tables:
click.echo(click.style(f"- {files_table['table']}", fg="yellow"))
click.echo(click.style("The following paths on the storage will be scanned to find orphaned files:", fg="yellow"))
for storage_path in storage_paths:
click.echo(click.style(f"- {storage_path}", fg="yellow"))
click.echo("")
click.echo(click.style("!!! USE WITH CAUTION !!!", fg="red"))
click.echo(
click.style(
"Currently, this command will work only for opendal based storage (STORAGE_TYPE=opendal).", fg="yellow"
)
)
click.echo(
click.style(
"Since not all patterns have been fully tested, please note that this command may delete unintended files.",
fg="yellow",
)
)
click.echo(
click.style("This cannot be undone. Please make sure to back up your storage before proceeding.", fg="yellow")
)
click.echo(
click.style(
(
"It is also recommended to run this during the maintenance window, "
"as this may cause high load on your instance."
),
fg="yellow",
)
)
click.confirm("Do you want to proceed?", abort=True)
# start the cleanup process
click.echo(click.style("Starting orphaned files cleanup.", fg="white"))
# fetch file id and keys from each table
all_files_in_tables = []
try:
for files_table in files_tables:
click.echo(click.style(f"- Listing files from table {files_table['table']}", fg="white"))
query = f"SELECT {files_table['key_column']} FROM {files_table['table']}"
with db.engine.begin() as conn:
rs = conn.execute(db.text(query))
for i in rs:
all_files_in_tables.append(str(i[0]))
click.echo(click.style(f"Found {len(all_files_in_tables)} files in tables.", fg="white"))
except Exception as e:
click.echo(click.style(f"Error fetching keys: {str(e)}", fg="red"))
all_files_on_storage = []
for storage_path in storage_paths:
try:
click.echo(click.style(f"- Scanning files on storage path {storage_path}", fg="white"))
files = storage.scan(path=storage_path, files=True, directories=False)
all_files_on_storage.extend(files)
except FileNotFoundError as e:
click.echo(click.style(f" -> Skipping path {storage_path} as it does not exist.", fg="yellow"))
continue
except Exception as e:
click.echo(click.style(f" -> Error scanning files on storage path {storage_path}: {str(e)}", fg="red"))
continue
click.echo(click.style(f"Found {len(all_files_on_storage)} files on storage.", fg="white"))
# find orphaned files
orphaned_files = list(set(all_files_on_storage) - set(all_files_in_tables))
if not orphaned_files:
click.echo(click.style("No orphaned files found. There is nothing to remove.", fg="green"))
return
click.echo(click.style(f"Found {len(orphaned_files)} orphaned files.", fg="white"))
for file in orphaned_files:
click.echo(click.style(f"- orphaned file: {file}", fg="black"))
click.confirm(f"Do you want to proceed to remove all {len(orphaned_files)} orphaned files?", abort=True)
# delete orphaned files
removed_files = 0
error_files = 0
for file in orphaned_files:
try:
storage.delete(file)
removed_files += 1
click.echo(click.style(f"- Removing orphaned file: {file}", fg="white"))
except Exception as e:
error_files += 1
click.echo(click.style(f"- Error deleting orphaned file {file}: {str(e)}", fg="red"))
continue
if error_files == 0:
click.echo(click.style(f"Removed {removed_files} orphaned files without errors.", fg="green"))
else:
click.echo(click.style(f"Removed {removed_files} orphaned files, with {error_files} errors.", fg="yellow"))

View File

@ -9,9 +9,11 @@ from .enterprise import EnterpriseFeatureConfig
from .extra import ExtraServiceConfig from .extra import ExtraServiceConfig
from .feature import FeatureConfig from .feature import FeatureConfig
from .middleware import MiddlewareConfig from .middleware import MiddlewareConfig
from .observability import ObservabilityConfig
from .packaging import PackagingInfo from .packaging import PackagingInfo
from .remote_settings_sources import RemoteSettingsSource, RemoteSettingsSourceConfig, RemoteSettingsSourceName from .remote_settings_sources import RemoteSettingsSource, RemoteSettingsSourceConfig, RemoteSettingsSourceName
from .remote_settings_sources.apollo import ApolloSettingsSource from .remote_settings_sources.apollo import ApolloSettingsSource
from .remote_settings_sources.nacos import NacosSettingsSource
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -33,6 +35,8 @@ class RemoteSettingsSourceFactory(PydanticBaseSettingsSource):
match remote_source_name: match remote_source_name:
case RemoteSettingsSourceName.APOLLO: case RemoteSettingsSourceName.APOLLO:
remote_source = ApolloSettingsSource(current_state) remote_source = ApolloSettingsSource(current_state)
case RemoteSettingsSourceName.NACOS:
remote_source = NacosSettingsSource(current_state)
case _: case _:
logger.warning(f"Unsupported remote source: {remote_source_name}") logger.warning(f"Unsupported remote source: {remote_source_name}")
return {} return {}
@ -59,6 +63,8 @@ class DifyConfig(
MiddlewareConfig, MiddlewareConfig,
# Extra service configs # Extra service configs
ExtraServiceConfig, ExtraServiceConfig,
# Observability configs
ObservabilityConfig,
# Remote source configs # Remote source configs
RemoteSettingsSourceConfig, RemoteSettingsSourceConfig,
# Enterprise feature configs # Enterprise feature configs

View File

@ -12,7 +12,7 @@ from pydantic import (
) )
from pydantic_settings import BaseSettings from pydantic_settings import BaseSettings
from configs.feature.hosted_service import HostedServiceConfig from .hosted_service import HostedServiceConfig
class SecurityConfig(BaseSettings): class SecurityConfig(BaseSettings):
@ -519,6 +519,11 @@ class WorkflowNodeExecutionConfig(BaseSettings):
default=100, default=100,
) )
WORKFLOW_NODE_EXECUTION_STORAGE: str = Field(
default="rdbms",
description="Storage backend for WorkflowNodeExecution. Options: 'rdbms', 'hybrid'",
)
class AuthConfig(BaseSettings): class AuthConfig(BaseSettings):
""" """

View File

@ -22,6 +22,7 @@ from .vdb.baidu_vector_config import BaiduVectorDBConfig
from .vdb.chroma_config import ChromaConfig from .vdb.chroma_config import ChromaConfig
from .vdb.couchbase_config import CouchbaseConfig from .vdb.couchbase_config import CouchbaseConfig
from .vdb.elasticsearch_config import ElasticsearchConfig from .vdb.elasticsearch_config import ElasticsearchConfig
from .vdb.huawei_cloud_config import HuaweiCloudConfig
from .vdb.lindorm_config import LindormConfig from .vdb.lindorm_config import LindormConfig
from .vdb.milvus_config import MilvusConfig from .vdb.milvus_config import MilvusConfig
from .vdb.myscale_config import MyScaleConfig from .vdb.myscale_config import MyScaleConfig
@ -38,6 +39,7 @@ from .vdb.tencent_vector_config import TencentVectorDBConfig
from .vdb.tidb_on_qdrant_config import TidbOnQdrantConfig from .vdb.tidb_on_qdrant_config import TidbOnQdrantConfig
from .vdb.tidb_vector_config import TiDBVectorConfig from .vdb.tidb_vector_config import TiDBVectorConfig
from .vdb.upstash_config import UpstashConfig from .vdb.upstash_config import UpstashConfig
from .vdb.vastbase_vector_config import VastbaseVectorConfig
from .vdb.vikingdb_config import VikingDBConfig from .vdb.vikingdb_config import VikingDBConfig
from .vdb.weaviate_config import WeaviateConfig from .vdb.weaviate_config import WeaviateConfig
@ -263,11 +265,13 @@ class MiddlewareConfig(
VectorStoreConfig, VectorStoreConfig,
AnalyticdbConfig, AnalyticdbConfig,
ChromaConfig, ChromaConfig,
HuaweiCloudConfig,
MilvusConfig, MilvusConfig,
MyScaleConfig, MyScaleConfig,
OpenSearchConfig, OpenSearchConfig,
OracleConfig, OracleConfig,
PGVectorConfig, PGVectorConfig,
VastbaseVectorConfig,
PGVectoRSConfig, PGVectoRSConfig,
QdrantConfig, QdrantConfig,
RelytConfig, RelytConfig,

View File

@ -0,0 +1,25 @@
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
class HuaweiCloudConfig(BaseSettings):
"""
Configuration settings for Huawei cloud search service
"""
HUAWEI_CLOUD_HOSTS: Optional[str] = Field(
description="Hostname or IP address of the Huawei cloud search service instance",
default=None,
)
HUAWEI_CLOUD_USER: Optional[str] = Field(
description="Username for authenticating with Huawei cloud search service",
default=None,
)
HUAWEI_CLOUD_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Huawei cloud search service",
default=None,
)

View File

@ -32,3 +32,4 @@ class LindormConfig(BaseSettings):
description="Using UGC index will store the same type of Index in a single index but can retrieve separately.", description="Using UGC index will store the same type of Index in a single index but can retrieve separately.",
default=False, default=False,
) )
LINDORM_QUERY_TIMEOUT: Optional[float] = Field(description="The lindorm search request timeout (s)", default=2.0)

View File

@ -39,3 +39,8 @@ class MilvusConfig(BaseSettings):
"older versions", "older versions",
default=True, default=True,
) )
MILVUS_ANALYZER_PARAMS: Optional[str] = Field(
description='Milvus text analyzer parameters, e.g., {"type": "chinese"} for Chinese segmentation support.',
default=None,
)

View File

@ -0,0 +1,45 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic_settings import BaseSettings
class VastbaseVectorConfig(BaseSettings):
"""
Configuration settings for Vector (Vastbase with vector extension)
"""
VASTBASE_HOST: Optional[str] = Field(
description="Hostname or IP address of the Vastbase server with Vector extension (e.g., 'localhost')",
default=None,
)
VASTBASE_PORT: PositiveInt = Field(
description="Port number on which the Vastbase server is listening (default is 5432)",
default=5432,
)
VASTBASE_USER: Optional[str] = Field(
description="Username for authenticating with the Vastbase database",
default=None,
)
VASTBASE_PASSWORD: Optional[str] = Field(
description="Password for authenticating with the Vastbase database",
default=None,
)
VASTBASE_DATABASE: Optional[str] = Field(
description="Name of the Vastbase database to connect to",
default=None,
)
VASTBASE_MIN_CONNECTION: PositiveInt = Field(
description="Min connection of the Vastbase database",
default=1,
)
VASTBASE_MAX_CONNECTION: PositiveInt = Field(
description="Max connection of the Vastbase database",
default=5,
)

View File

@ -0,0 +1,9 @@
from configs.observability.otel.otel_config import OTelConfig
class ObservabilityConfig(OTelConfig):
"""
Observability configuration settings
"""
pass

View File

@ -0,0 +1,44 @@
from pydantic import Field
from pydantic_settings import BaseSettings
class OTelConfig(BaseSettings):
"""
OpenTelemetry configuration settings
"""
ENABLE_OTEL: bool = Field(
description="Whether to enable OpenTelemetry",
default=False,
)
OTLP_BASE_ENDPOINT: str = Field(
description="OTLP base endpoint",
default="http://localhost:4318",
)
OTLP_API_KEY: str = Field(
description="OTLP API key",
default="",
)
OTEL_EXPORTER_TYPE: str = Field(
description="OTEL exporter type",
default="otlp",
)
OTEL_SAMPLING_RATE: float = Field(default=0.1, description="Sampling rate for traces (0.0 to 1.0)")
OTEL_BATCH_EXPORT_SCHEDULE_DELAY: int = Field(
default=5000, description="Batch export schedule delay in milliseconds"
)
OTEL_MAX_QUEUE_SIZE: int = Field(default=2048, description="Maximum queue size for the batch span processor")
OTEL_MAX_EXPORT_BATCH_SIZE: int = Field(default=512, description="Maximum export batch size")
OTEL_METRIC_EXPORT_INTERVAL: int = Field(default=60000, description="Metric export interval in milliseconds")
OTEL_BATCH_EXPORT_TIMEOUT: int = Field(default=10000, description="Batch export timeout in milliseconds")
OTEL_METRIC_EXPORT_TIMEOUT: int = Field(default=30000, description="Metric export timeout in milliseconds")

View File

@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
CURRENT_VERSION: str = Field( CURRENT_VERSION: str = Field(
description="Dify version", description="Dify version",
default="1.2.0", default="1.3.0",
) )
COMMIT_SHA: str = Field( COMMIT_SHA: str = Field(

View File

@ -270,7 +270,7 @@ class ApolloClient:
while not self._stopping: while not self._stopping:
for namespace in self._notification_map: for namespace in self._notification_map:
self._do_heart_beat(namespace) self._do_heart_beat(namespace)
time.sleep(60 * 10) # 10分钟 time.sleep(60 * 10) # 10 minutes
def _do_heart_beat(self, namespace): def _do_heart_beat(self, namespace):
url = "{}/configs/{}/{}/{}?ip={}".format(self.config_url, self.app_id, self.cluster, namespace, self.ip) url = "{}/configs/{}/{}/{}?ip={}".format(self.config_url, self.app_id, self.cluster, namespace, self.ip)

View File

@ -3,3 +3,4 @@ from enum import StrEnum
class RemoteSettingsSourceName(StrEnum): class RemoteSettingsSourceName(StrEnum):
APOLLO = "apollo" APOLLO = "apollo"
NACOS = "nacos"

View File

@ -0,0 +1,52 @@
import logging
import os
from collections.abc import Mapping
from typing import Any
from pydantic.fields import FieldInfo
from .http_request import NacosHttpClient
logger = logging.getLogger(__name__)
from configs.remote_settings_sources.base import RemoteSettingsSource
from .utils import _parse_config
class NacosSettingsSource(RemoteSettingsSource):
def __init__(self, configs: Mapping[str, Any]):
self.configs = configs
self.remote_configs: dict[str, Any] = {}
self.async_init()
def async_init(self):
data_id = os.getenv("DIFY_ENV_NACOS_DATA_ID", "dify-api-env.properties")
group = os.getenv("DIFY_ENV_NACOS_GROUP", "nacos-dify")
tenant = os.getenv("DIFY_ENV_NACOS_NAMESPACE", "")
params = {"dataId": data_id, "group": group, "tenant": tenant}
try:
content = NacosHttpClient().http_request("/nacos/v1/cs/configs", method="GET", headers={}, params=params)
self.remote_configs = self._parse_config(content)
except Exception as e:
logger.exception("[get-access-token] exception occurred")
raise
def _parse_config(self, content: str) -> dict:
if not content:
return {}
try:
return _parse_config(self, content)
except Exception as e:
raise RuntimeError(f"Failed to parse config: {e}")
def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
if not isinstance(self.remote_configs, dict):
raise ValueError(f"remote configs is not dict, but {type(self.remote_configs)}")
field_value = self.remote_configs.get(field_name)
if field_value is None:
return None, field_name, False
return field_value, field_name, False

View File

@ -0,0 +1,83 @@
import base64
import hashlib
import hmac
import logging
import os
import time
import requests
logger = logging.getLogger(__name__)
class NacosHttpClient:
def __init__(self):
self.username = os.getenv("DIFY_ENV_NACOS_USERNAME")
self.password = os.getenv("DIFY_ENV_NACOS_PASSWORD")
self.ak = os.getenv("DIFY_ENV_NACOS_ACCESS_KEY")
self.sk = os.getenv("DIFY_ENV_NACOS_SECRET_KEY")
self.server = os.getenv("DIFY_ENV_NACOS_SERVER_ADDR", "localhost:8848")
self.token = None
self.token_ttl = 18000
self.token_expire_time: float = 0
def http_request(self, url, method="GET", headers=None, params=None):
try:
self._inject_auth_info(headers, params)
response = requests.request(method, url="http://" + self.server + url, headers=headers, params=params)
response.raise_for_status()
return response.text
except requests.exceptions.RequestException as e:
return f"Request to Nacos failed: {e}"
def _inject_auth_info(self, headers, params, module="config"):
headers.update({"User-Agent": "Nacos-Http-Client-In-Dify:v0.0.1"})
if module == "login":
return
ts = str(int(time.time() * 1000))
if self.ak and self.sk:
sign_str = self.get_sign_str(params["group"], params["tenant"], ts)
headers["Spas-AccessKey"] = self.ak
headers["Spas-Signature"] = self.__do_sign(sign_str, self.sk)
headers["timeStamp"] = ts
if self.username and self.password:
self.get_access_token(force_refresh=False)
params["accessToken"] = self.token
def __do_sign(self, sign_str, sk):
return (
base64.encodebytes(hmac.new(sk.encode(), sign_str.encode(), digestmod=hashlib.sha1).digest())
.decode()
.strip()
)
def get_sign_str(self, group, tenant, ts):
sign_str = ""
if tenant:
sign_str = tenant + "+"
if group:
sign_str = sign_str + group + "+"
if sign_str:
sign_str += ts
return sign_str
def get_access_token(self, force_refresh=False):
current_time = time.time()
if self.token and not force_refresh and self.token_expire_time > current_time:
return self.token
params = {"username": self.username, "password": self.password}
url = "http://" + self.server + "/nacos/v1/auth/login"
try:
resp = requests.request("POST", url, headers=None, params=params)
resp.raise_for_status()
response_data = resp.json()
self.token = response_data.get("accessToken")
self.token_ttl = response_data.get("tokenTtl", 18000)
self.token_expire_time = current_time + self.token_ttl - 10
except Exception as e:
logger.exception("[get-access-token] exception occur")
raise

View File

@ -0,0 +1,31 @@
def _parse_config(self, content: str) -> dict[str, str]:
config: dict[str, str] = {}
if not content:
return config
for line in content.splitlines():
cleaned_line = line.strip()
if not cleaned_line or cleaned_line.startswith(("#", "!")):
continue
separator_index = -1
for i, c in enumerate(cleaned_line):
if c in ("=", ":") and (i == 0 or cleaned_line[i - 1] != "\\"):
separator_index = i
break
if separator_index == -1:
continue
key = cleaned_line[:separator_index].strip()
raw_value = cleaned_line[separator_index + 1 :].strip()
try:
decoded_value = bytes(raw_value, "utf-8").decode("unicode_escape")
decoded_value = decoded_value.replace(r"\=", "=").replace(r"\:", ":")
except UnicodeDecodeError:
decoded_value = raw_value
config[key] = decoded_value
return config

View File

@ -3,6 +3,8 @@ from configs import dify_config
HIDDEN_VALUE = "[__HIDDEN__]" HIDDEN_VALUE = "[__HIDDEN__]"
UUID_NIL = "00000000-0000-0000-0000-000000000000" UUID_NIL = "00000000-0000-0000-0000-000000000000"
DEFAULT_FILE_NUMBER_LIMITS = 3
IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"] IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"]
IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS]) IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS])

View File

@ -39,8 +39,8 @@ plugin_model_schemas: RecyclableContextVar[dict[str, "AIModelEntity"]] = Recycla
ContextVar("plugin_model_schemas") ContextVar("plugin_model_schemas")
) )
datasource_plugin_providers: RecyclableContextVar[dict[str, "DatasourcePluginProviderController"]] = RecyclableContextVar( datasource_plugin_providers: RecyclableContextVar[dict[str, "DatasourcePluginProviderController"]] = (
ContextVar("datasource_plugin_providers") RecyclableContextVar(ContextVar("datasource_plugin_providers"))
) )
datasource_plugin_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar( datasource_plugin_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar(

View File

@ -4,8 +4,6 @@ import platform
import re import re
import urllib.parse import urllib.parse
import warnings import warnings
from collections.abc import Mapping
from typing import Any
from uuid import uuid4 from uuid import uuid4
import httpx import httpx
@ -29,8 +27,6 @@ except ImportError:
from pydantic import BaseModel from pydantic import BaseModel
from configs import dify_config
class FileInfo(BaseModel): class FileInfo(BaseModel):
filename: str filename: str
@ -87,38 +83,3 @@ def guess_file_info_from_response(response: httpx.Response):
mimetype=mimetype, mimetype=mimetype,
size=int(response.headers.get("Content-Length", -1)), size=int(response.headers.get("Content-Length", -1)),
) )
def get_parameters_from_feature_dict(*, features_dict: Mapping[str, Any], user_input_form: list[dict[str, Any]]):
return {
"opening_statement": features_dict.get("opening_statement"),
"suggested_questions": features_dict.get("suggested_questions", []),
"suggested_questions_after_answer": features_dict.get("suggested_questions_after_answer", {"enabled": False}),
"speech_to_text": features_dict.get("speech_to_text", {"enabled": False}),
"text_to_speech": features_dict.get("text_to_speech", {"enabled": False}),
"retriever_resource": features_dict.get("retriever_resource", {"enabled": False}),
"annotation_reply": features_dict.get("annotation_reply", {"enabled": False}),
"more_like_this": features_dict.get("more_like_this", {"enabled": False}),
"user_input_form": user_input_form,
"sensitive_word_avoidance": features_dict.get(
"sensitive_word_avoidance", {"enabled": False, "type": "", "configs": []}
),
"file_upload": features_dict.get(
"file_upload",
{
"image": {
"enabled": False,
"number_limits": 3,
"detail": "high",
"transfer_methods": ["remote_url", "local_file"],
}
},
),
"system_parameters": {
"image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT,
"video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT,
"audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT,
"file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT,
"workflow_file_upload_limit": dify_config.WORKFLOW_FILE_UPLOAD_LIMIT,
},
}

View File

@ -1,9 +1,9 @@
from flask import Blueprint from flask import Blueprint
from .datasets.rag_pipeline import data_source
from libs.external_api import ExternalApi from libs.external_api import ExternalApi
from .app.app_import import AppImportApi, AppImportCheckDependenciesApi, AppImportConfirmApi from .app.app_import import AppImportApi, AppImportCheckDependenciesApi, AppImportConfirmApi
from .datasets.rag_pipeline import data_source
from .explore.audio import ChatAudioApi, ChatTextApi from .explore.audio import ChatAudioApi, ChatTextApi
from .explore.completion import ChatApi, ChatStopApi, CompletionApi, CompletionStopApi from .explore.completion import ChatApi, ChatStopApi, CompletionApi, CompletionStopApi
from .explore.conversation import ( from .explore.conversation import (

View File

@ -89,7 +89,7 @@ class AnnotationReplyActionStatusApi(Resource):
app_annotation_job_key = "{}_app_annotation_job_{}".format(action, str(job_id)) app_annotation_job_key = "{}_app_annotation_job_{}".format(action, str(job_id))
cache_result = redis_client.get(app_annotation_job_key) cache_result = redis_client.get(app_annotation_job_key)
if cache_result is None: if cache_result is None:
raise ValueError("The job is not exist.") raise ValueError("The job does not exist.")
job_status = cache_result.decode() job_status = cache_result.decode()
error_msg = "" error_msg = ""
@ -186,7 +186,7 @@ class AnnotationUpdateDeleteApi(Resource):
app_id = str(app_id) app_id = str(app_id)
annotation_id = str(annotation_id) annotation_id = str(annotation_id)
AppAnnotationService.delete_app_annotation(app_id, annotation_id) AppAnnotationService.delete_app_annotation(app_id, annotation_id)
return {"result": "success"}, 200 return {"result": "success"}, 204
class AnnotationBatchImportApi(Resource): class AnnotationBatchImportApi(Resource):
@ -226,7 +226,7 @@ class AnnotationBatchImportStatusApi(Resource):
indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id)) indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id))
cache_result = redis_client.get(indexing_cache_key) cache_result = redis_client.get(indexing_cache_key)
if cache_result is None: if cache_result is None:
raise ValueError("The job is not exist.") raise ValueError("The job does not exist.")
job_status = cache_result.decode() job_status = cache_result.decode()
error_msg = "" error_msg = ""
if job_status == "error": if job_status == "error":

View File

@ -80,8 +80,6 @@ class ChatMessageTextApi(Resource):
@account_initialization_required @account_initialization_required
@get_app_model @get_app_model
def post(self, app_model: App): def post(self, app_model: App):
from werkzeug.exceptions import InternalServerError
try: try:
parser = reqparse.RequestParser() parser = reqparse.RequestParser()
parser.add_argument("message_id", type=str, location="json") parser.add_argument("message_id", type=str, location="json")

View File

@ -85,5 +85,35 @@ class RuleCodeGenerateApi(Resource):
return code_result return code_result
class RuleStructuredOutputGenerateApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("instruction", type=str, required=True, nullable=False, location="json")
parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
args = parser.parse_args()
account = current_user
try:
structured_output = LLMGenerator.generate_structured_output(
tenant_id=account.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
return structured_output
api.add_resource(RuleGenerateApi, "/rule-generate") api.add_resource(RuleGenerateApi, "/rule-generate")
api.add_resource(RuleCodeGenerateApi, "/rule-code-generate") api.add_resource(RuleCodeGenerateApi, "/rule-code-generate")
api.add_resource(RuleStructuredOutputGenerateApi, "/rule-structured-output-generate")

View File

@ -84,7 +84,7 @@ class TraceAppConfigApi(Resource):
result = OpsService.delete_tracing_app_config(app_id=app_id, tracing_provider=args["tracing_provider"]) result = OpsService.delete_tracing_app_config(app_id=app_id, tracing_provider=args["tracing_provider"])
if not result: if not result:
raise TracingConfigNotExist() raise TracingConfigNotExist()
return {"result": "success"} return {"result": "success"}, 204
except Exception as e: except Exception as e:
raise BadRequest(str(e)) raise BadRequest(str(e))

View File

@ -1,5 +1,4 @@
from datetime import datetime from dateutil.parser import isoparse
from flask_restful import Resource, marshal_with, reqparse # type: ignore from flask_restful import Resource, marshal_with, reqparse # type: ignore
from flask_restful.inputs import int_range # type: ignore from flask_restful.inputs import int_range # type: ignore
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
@ -41,10 +40,10 @@ class WorkflowAppLogApi(Resource):
args.status = WorkflowRunStatus(args.status) if args.status else None args.status = WorkflowRunStatus(args.status) if args.status else None
if args.created_at__before: if args.created_at__before:
args.created_at__before = datetime.fromisoformat(args.created_at__before.replace("Z", "+00:00")) args.created_at__before = isoparse(args.created_at__before)
if args.created_at__after: if args.created_at__after:
args.created_at__after = datetime.fromisoformat(args.created_at__after.replace("Z", "+00:00")) args.created_at__after = isoparse(args.created_at__after)
# get paginate workflow app logs # get paginate workflow app logs
workflow_app_service = WorkflowAppService() workflow_app_service = WorkflowAppService()

View File

@ -65,7 +65,7 @@ class ApiKeyAuthDataSourceBindingDelete(Resource):
ApiKeyAuthService.delete_provider_auth(current_user.current_tenant_id, binding_id) ApiKeyAuthService.delete_provider_auth(current_user.current_tenant_id, binding_id)
return {"result": "success"}, 200 return {"result": "success"}, 204
api.add_resource(ApiKeyAuthDataSource, "/api-key-auth/data-source") api.add_resource(ApiKeyAuthDataSource, "/api-key-auth/data-source")

View File

@ -74,7 +74,9 @@ class OAuthDataSourceBinding(Resource):
if not oauth_provider: if not oauth_provider:
return {"error": "Invalid provider"}, 400 return {"error": "Invalid provider"}, 400
if "code" in request.args: if "code" in request.args:
code = request.args.get("code") code = request.args.get("code", "")
if not code:
return {"error": "Invalid code"}, 400
try: try:
oauth_provider.get_access_token(code) oauth_provider.get_access_token(code)
except requests.exceptions.HTTPError as e: except requests.exceptions.HTTPError as e:

View File

@ -16,7 +16,7 @@ from controllers.console.auth.error import (
PasswordMismatchError, PasswordMismatchError,
) )
from controllers.console.error import AccountInFreezeError, AccountNotFound, EmailSendIpLimitError from controllers.console.error import AccountInFreezeError, AccountNotFound, EmailSendIpLimitError
from controllers.console.wraps import setup_required from controllers.console.wraps import email_password_login_enabled, setup_required
from events.tenant_event import tenant_was_created from events.tenant_event import tenant_was_created
from extensions.ext_database import db from extensions.ext_database import db
from libs.helper import email, extract_remote_ip from libs.helper import email, extract_remote_ip
@ -30,6 +30,7 @@ from services.feature_service import FeatureService
class ForgotPasswordSendEmailApi(Resource): class ForgotPasswordSendEmailApi(Resource):
@setup_required @setup_required
@email_password_login_enabled
def post(self): def post(self):
parser = reqparse.RequestParser() parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json") parser.add_argument("email", type=email, required=True, location="json")
@ -62,6 +63,7 @@ class ForgotPasswordSendEmailApi(Resource):
class ForgotPasswordCheckApi(Resource): class ForgotPasswordCheckApi(Resource):
@setup_required @setup_required
@email_password_login_enabled
def post(self): def post(self):
parser = reqparse.RequestParser() parser = reqparse.RequestParser()
parser.add_argument("email", type=str, required=True, location="json") parser.add_argument("email", type=str, required=True, location="json")
@ -86,12 +88,21 @@ class ForgotPasswordCheckApi(Resource):
AccountService.add_forgot_password_error_rate_limit(args["email"]) AccountService.add_forgot_password_error_rate_limit(args["email"])
raise EmailCodeError() raise EmailCodeError()
# Verified, revoke the first token
AccountService.revoke_reset_password_token(args["token"])
# Refresh token data by generating a new token
_, new_token = AccountService.generate_reset_password_token(
user_email, code=args["code"], additional_data={"phase": "reset"}
)
AccountService.reset_forgot_password_error_rate_limit(args["email"]) AccountService.reset_forgot_password_error_rate_limit(args["email"])
return {"is_valid": True, "email": token_data.get("email")} return {"is_valid": True, "email": token_data.get("email"), "token": new_token}
class ForgotPasswordResetApi(Resource): class ForgotPasswordResetApi(Resource):
@setup_required @setup_required
@email_password_login_enabled
def post(self): def post(self):
parser = reqparse.RequestParser() parser = reqparse.RequestParser()
parser.add_argument("token", type=str, required=True, nullable=False, location="json") parser.add_argument("token", type=str, required=True, nullable=False, location="json")
@ -107,6 +118,9 @@ class ForgotPasswordResetApi(Resource):
reset_data = AccountService.get_reset_password_data(args["token"]) reset_data = AccountService.get_reset_password_data(args["token"])
if not reset_data: if not reset_data:
raise InvalidTokenError() raise InvalidTokenError()
# Must use token in reset phase
if reset_data.get("phase", "") != "reset":
raise InvalidTokenError()
# Revoke token to prevent reuse # Revoke token to prevent reuse
AccountService.revoke_reset_password_token(args["token"]) AccountService.revoke_reset_password_token(args["token"])

View File

@ -22,7 +22,7 @@ from controllers.console.error import (
EmailSendIpLimitError, EmailSendIpLimitError,
NotAllowedCreateWorkspace, NotAllowedCreateWorkspace,
) )
from controllers.console.wraps import setup_required from controllers.console.wraps import email_password_login_enabled, setup_required
from events.tenant_event import tenant_was_created from events.tenant_event import tenant_was_created
from libs.helper import email, extract_remote_ip from libs.helper import email, extract_remote_ip
from libs.password import valid_password from libs.password import valid_password
@ -38,6 +38,7 @@ class LoginApi(Resource):
"""Resource for user login.""" """Resource for user login."""
@setup_required @setup_required
@email_password_login_enabled
def post(self): def post(self):
"""Authenticate user and login.""" """Authenticate user and login."""
parser = reqparse.RequestParser() parser = reqparse.RequestParser()
@ -110,6 +111,7 @@ class LogoutApi(Resource):
class ResetPasswordSendEmailApi(Resource): class ResetPasswordSendEmailApi(Resource):
@setup_required @setup_required
@email_password_login_enabled
def post(self): def post(self):
parser = reqparse.RequestParser() parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json") parser.add_argument("email", type=email, required=True, location="json")

View File

@ -657,6 +657,7 @@ class DatasetRetrievalSettingApi(Resource):
| VectorType.ELASTICSEARCH | VectorType.ELASTICSEARCH
| VectorType.ELASTICSEARCH_JA | VectorType.ELASTICSEARCH_JA
| VectorType.PGVECTOR | VectorType.PGVECTOR
| VectorType.VASTBASE
| VectorType.TIDB_ON_QDRANT | VectorType.TIDB_ON_QDRANT
| VectorType.LINDORM | VectorType.LINDORM
| VectorType.COUCHBASE | VectorType.COUCHBASE
@ -664,6 +665,7 @@ class DatasetRetrievalSettingApi(Resource):
| VectorType.OPENGAUSS | VectorType.OPENGAUSS
| VectorType.OCEANBASE | VectorType.OCEANBASE
| VectorType.TABLESTORE | VectorType.TABLESTORE
| VectorType.HUAWEI_CLOUD
| VectorType.TENCENT | VectorType.TENCENT
): ):
return { return {
@ -705,11 +707,13 @@ class DatasetRetrievalSettingMockApi(Resource):
| VectorType.ELASTICSEARCH_JA | VectorType.ELASTICSEARCH_JA
| VectorType.COUCHBASE | VectorType.COUCHBASE
| VectorType.PGVECTOR | VectorType.PGVECTOR
| VectorType.VASTBASE
| VectorType.LINDORM | VectorType.LINDORM
| VectorType.OPENGAUSS | VectorType.OPENGAUSS
| VectorType.OCEANBASE | VectorType.OCEANBASE
| VectorType.TABLESTORE | VectorType.TABLESTORE
| VectorType.TENCENT | VectorType.TENCENT
| VectorType.HUAWEI_CLOUD
): ):
return { return {
"retrieval_method": [ "retrieval_method": [

View File

@ -40,7 +40,7 @@ from core.indexing_runner import IndexingRunner
from core.model_manager import ModelManager from core.model_manager import ModelManager
from core.model_runtime.entities.model_entities import ModelType from core.model_runtime.entities.model_entities import ModelType
from core.model_runtime.errors.invoke import InvokeAuthorizationError from core.model_runtime.errors.invoke import InvokeAuthorizationError
from core.plugin.manager.exc import PluginDaemonClientSideError from core.plugin.impl.exc import PluginDaemonClientSideError
from core.rag.extractor.entity.extract_setting import ExtractSetting from core.rag.extractor.entity.extract_setting import ExtractSetting
from extensions.ext_database import db from extensions.ext_database import db
from extensions.ext_redis import redis_client from extensions.ext_redis import redis_client

View File

@ -131,7 +131,7 @@ class DatasetDocumentSegmentListApi(Resource):
except services.errors.account.NoPermissionError as e: except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e)) raise Forbidden(str(e))
SegmentService.delete_segments(segment_ids, document, dataset) SegmentService.delete_segments(segment_ids, document, dataset)
return {"result": "success"}, 200 return {"result": "success"}, 204
class DatasetDocumentSegmentApi(Resource): class DatasetDocumentSegmentApi(Resource):
@ -333,7 +333,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
except services.errors.account.NoPermissionError as e: except services.errors.account.NoPermissionError as e:
raise Forbidden(str(e)) raise Forbidden(str(e))
SegmentService.delete_segment(segment, document, dataset) SegmentService.delete_segment(segment, document, dataset)
return {"result": "success"}, 200 return {"result": "success"}, 204
class DatasetDocumentSegmentBatchImportApi(Resource): class DatasetDocumentSegmentBatchImportApi(Resource):
@ -398,7 +398,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
indexing_cache_key = "segment_batch_import_{}".format(job_id) indexing_cache_key = "segment_batch_import_{}".format(job_id)
cache_result = redis_client.get(indexing_cache_key) cache_result = redis_client.get(indexing_cache_key)
if cache_result is None: if cache_result is None:
raise ValueError("The job is not exist.") raise ValueError("The job does not exist.")
return {"job_id": job_id, "job_status": cache_result.decode()}, 200 return {"job_id": job_id, "job_status": cache_result.decode()}, 200
@ -590,7 +590,7 @@ class ChildChunkUpdateApi(Resource):
SegmentService.delete_child_chunk(child_chunk, dataset) SegmentService.delete_child_chunk(child_chunk, dataset)
except ChildChunkDeleteIndexServiceError as e: except ChildChunkDeleteIndexServiceError as e:
raise ChildChunkDeleteIndexError(str(e)) raise ChildChunkDeleteIndexError(str(e))
return {"result": "success"}, 200 return {"result": "success"}, 204
@setup_required @setup_required
@login_required @login_required

View File

@ -106,4 +106,4 @@ class ChildChunkDeleteIndexError(BaseHTTPException):
class PipelineNotFoundError(BaseHTTPException): class PipelineNotFoundError(BaseHTTPException):
error_code = "pipeline_not_found" error_code = "pipeline_not_found"
description = "Pipeline not found." description = "Pipeline not found."
code = 404 code = 404

View File

@ -21,12 +21,6 @@ def _validate_name(name):
return name return name
def _validate_description_length(description):
if description and len(description) > 400:
raise ValueError("Description cannot exceed 400 characters.")
return description
class ExternalApiTemplateListApi(Resource): class ExternalApiTemplateListApi(Resource):
@setup_required @setup_required
@login_required @login_required
@ -141,7 +135,7 @@ class ExternalApiTemplateApi(Resource):
raise Forbidden() raise Forbidden()
ExternalDatasetService.delete_external_knowledge_api(current_user.current_tenant_id, external_knowledge_api_id) ExternalDatasetService.delete_external_knowledge_api(current_user.current_tenant_id, external_knowledge_api_id)
return {"result": "success"}, 200 return {"result": "success"}, 204
class ExternalApiUseCheckApi(Resource): class ExternalApiUseCheckApi(Resource):

View File

@ -14,18 +14,6 @@ from services.entities.knowledge_entities.knowledge_entities import (
from services.metadata_service import MetadataService from services.metadata_service import MetadataService
def _validate_name(name):
if not name or len(name) < 1 or len(name) > 40:
raise ValueError("Name must be between 1 to 40 characters.")
return name
def _validate_description_length(description):
if len(description) > 400:
raise ValueError("Description cannot exceed 400 characters.")
return description
class DatasetMetadataCreateApi(Resource): class DatasetMetadataCreateApi(Resource):
@setup_required @setup_required
@login_required @login_required
@ -94,7 +82,7 @@ class DatasetMetadataApi(Resource):
DatasetService.check_dataset_permission(dataset, current_user) DatasetService.check_dataset_permission(dataset, current_user)
MetadataService.delete_metadata(dataset_id_str, metadata_id_str) MetadataService.delete_metadata(dataset_id_str, metadata_id_str)
return 200 return {"result": "success"}, 204
class DatasetMetadataBuiltInFieldApi(Resource): class DatasetMetadataBuiltInFieldApi(Resource):

View File

@ -1,47 +1,17 @@
import json
import logging import logging
from typing import cast
from flask import abort, request from flask import request
from flask_restful import Resource, inputs, marshal_with, reqparse # type: ignore # type: ignore from flask_restful import Resource, reqparse # type: ignore # type: ignore
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from configs import dify_config
from controllers.console import api from controllers.console import api
from controllers.console.app.error import (
ConversationCompletedError,
DraftWorkflowNotExist,
DraftWorkflowNotSync,
)
from controllers.console.app.wraps import get_app_model
from controllers.console.datasets.wraps import get_rag_pipeline
from controllers.console.wraps import ( from controllers.console.wraps import (
account_initialization_required, account_initialization_required,
enterprise_license_required, enterprise_license_required,
setup_required, setup_required,
) )
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError from libs.login import login_required
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from extensions.ext_database import db
from factories import variable_factory
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
from fields.workflow_run_fields import workflow_run_node_execution_fields
from libs import helper
from libs.helper import TimestampField
from libs.login import current_user, login_required
from models import App
from models.account import Account
from models.dataset import Pipeline
from models.model import AppMode
from services.app_generate_service import AppGenerateService
from services.entities.knowledge_entities.rag_pipeline_entities import PipelineTemplateInfoEntity from services.entities.knowledge_entities.rag_pipeline_entities import PipelineTemplateInfoEntity
from services.errors.app import WorkflowHashNotEqualError
from services.errors.llm import InvokeRateLimitError
from services.rag_pipeline.rag_pipeline import RagPipelineService from services.rag_pipeline.rag_pipeline import RagPipelineService
from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError, WorkflowService
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@ -147,6 +147,7 @@ class DraftRagPipelineApi(Resource):
"updated_at": TimestampField().format(workflow.updated_at or workflow.created_at), "updated_at": TimestampField().format(workflow.updated_at or workflow.created_at),
} }
class RagPipelineDraftRunIterationNodeApi(Resource): class RagPipelineDraftRunIterationNodeApi(Resource):
@setup_required @setup_required
@login_required @login_required
@ -255,6 +256,7 @@ class DraftRagPipelineRunApi(Resource):
except InvokeRateLimitError as ex: except InvokeRateLimitError as ex:
raise InvokeRateLimitHttpError(ex.description) raise InvokeRateLimitHttpError(ex.description)
class RagPipelineDatasourceNodeRunApi(Resource): class RagPipelineDatasourceNodeRunApi(Resource):
@setup_required @setup_required
@login_required @login_required
@ -319,6 +321,7 @@ class RagPipelineDraftNodeRunApi(Resource):
return workflow_node_execution return workflow_node_execution
class RagPipelineTaskStopApi(Resource): class RagPipelineTaskStopApi(Resource):
@setup_required @setup_required
@login_required @login_required
@ -336,6 +339,7 @@ class RagPipelineTaskStopApi(Resource):
return {"result": "success"} return {"result": "success"}
class PublishedRagPipelineApi(Resource): class PublishedRagPipelineApi(Resource):
@setup_required @setup_required
@login_required @login_required
@ -649,6 +653,7 @@ class RagPipelineByIdApi(Resource):
return None, 204 return None, 204
class RagPipelineSecondStepApi(Resource): class RagPipelineSecondStepApi(Resource):
@setup_required @setup_required
@login_required @login_required
@ -662,11 +667,11 @@ class RagPipelineSecondStepApi(Resource):
if not current_user.is_editor: if not current_user.is_editor:
raise Forbidden() raise Forbidden()
datasource_provider = request.args.get("datasource_provider", required=True, type=str) datasource_provider = request.args.get("datasource_provider", required=True, type=str)
rag_pipeline_service = RagPipelineService() rag_pipeline_service = RagPipelineService()
return rag_pipeline_service.get_second_step_parameters(pipeline=pipeline, return rag_pipeline_service.get_second_step_parameters(
datasource_provider=datasource_provider pipeline=pipeline, datasource_provider=datasource_provider
) )
api.add_resource( api.add_resource(
@ -727,4 +732,3 @@ api.add_resource(
RagPipelineByIdApi, RagPipelineByIdApi,
"/rag/pipelines/<uuid:pipeline_id>/workflows/<string:workflow_id>", "/rag/pipelines/<uuid:pipeline_id>/workflows/<string:workflow_id>",
) )

View File

@ -8,7 +8,9 @@ from libs.login import current_user
from models.dataset import Pipeline from models.dataset import Pipeline
def get_rag_pipeline(view: Optional[Callable] = None,): def get_rag_pipeline(
view: Optional[Callable] = None,
):
def decorator(view_func): def decorator(view_func):
@wraps(view_func) @wraps(view_func)
def decorated_view(*args, **kwargs): def decorated_view(*args, **kwargs):

View File

@ -113,7 +113,7 @@ class InstalledAppApi(InstalledAppResource):
db.session.delete(installed_app) db.session.delete(installed_app)
db.session.commit() db.session.commit()
return {"result": "success", "message": "App uninstalled successfully"} return {"result": "success", "message": "App uninstalled successfully"}, 204
def patch(self, installed_app): def patch(self, installed_app):
parser = reqparse.RequestParser() parser = reqparse.RequestParser()

View File

@ -1,10 +1,10 @@
from flask_restful import marshal_with # type: ignore from flask_restful import marshal_with # type: ignore
from controllers.common import fields from controllers.common import fields
from controllers.common import helpers as controller_helpers
from controllers.console import api from controllers.console import api
from controllers.console.app.error import AppUnavailableError from controllers.console.app.error import AppUnavailableError
from controllers.console.explore.wraps import InstalledAppResource from controllers.console.explore.wraps import InstalledAppResource
from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict
from models.model import AppMode, InstalledApp from models.model import AppMode, InstalledApp
from services.app_service import AppService from services.app_service import AppService
@ -36,9 +36,7 @@ class AppParameterApi(InstalledAppResource):
user_input_form = features_dict.get("user_input_form", []) user_input_form = features_dict.get("user_input_form", [])
return controller_helpers.get_parameters_from_feature_dict( return get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form)
features_dict=features_dict, user_input_form=user_input_form
)
class ExploreAppMetaApi(InstalledAppResource): class ExploreAppMetaApi(InstalledAppResource):

View File

@ -72,7 +72,7 @@ class SavedMessageApi(InstalledAppResource):
SavedMessageService.delete(app_model, current_user, message_id) SavedMessageService.delete(app_model, current_user, message_id)
return {"result": "success"} return {"result": "success"}, 204
api.add_resource( api.add_resource(

View File

@ -99,7 +99,7 @@ class APIBasedExtensionDetailAPI(Resource):
APIBasedExtensionService.delete(extension_data_from_db) APIBasedExtensionService.delete(extension_data_from_db)
return {"result": "success"} return {"result": "success"}, 204
api.add_resource(CodeBasedExtensionAPI, "/code-based-extension") api.add_resource(CodeBasedExtensionAPI, "/code-based-extension")

View File

@ -86,7 +86,7 @@ class TagUpdateDeleteApi(Resource):
TagService.delete_tag(tag_id) TagService.delete_tag(tag_id)
return 200 return 204
class TagBindingCreateApi(Resource): class TagBindingCreateApi(Resource):

View File

@ -286,8 +286,6 @@ class AccountDeleteApi(Resource):
class AccountDeleteUpdateFeedbackApi(Resource): class AccountDeleteUpdateFeedbackApi(Resource):
@setup_required @setup_required
def post(self): def post(self):
account = current_user
parser = reqparse.RequestParser() parser = reqparse.RequestParser()
parser.add_argument("email", type=str, required=True, location="json") parser.add_argument("email", type=str, required=True, location="json")
parser.add_argument("feedback", type=str, required=True, location="json") parser.add_argument("feedback", type=str, required=True, location="json")

View File

@ -5,6 +5,7 @@ from werkzeug.exceptions import Forbidden
from controllers.console import api from controllers.console import api
from controllers.console.wraps import account_initialization_required, setup_required from controllers.console.wraps import account_initialization_required, setup_required
from core.model_runtime.utils.encoders import jsonable_encoder from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.impl.exc import PluginPermissionDeniedError
from libs.login import login_required from libs.login import login_required
from services.plugin.endpoint_service import EndpointService from services.plugin.endpoint_service import EndpointService
@ -28,15 +29,18 @@ class EndpointCreateApi(Resource):
settings = args["settings"] settings = args["settings"]
name = args["name"] name = args["name"]
return { try:
"success": EndpointService.create_endpoint( return {
tenant_id=user.current_tenant_id, "success": EndpointService.create_endpoint(
user_id=user.id, tenant_id=user.current_tenant_id,
plugin_unique_identifier=plugin_unique_identifier, user_id=user.id,
name=name, plugin_unique_identifier=plugin_unique_identifier,
settings=settings, name=name,
) settings=settings,
} )
}
except PluginPermissionDeniedError as e:
raise ValueError(e.description) from e
class EndpointListApi(Resource): class EndpointListApi(Resource):

View File

@ -10,7 +10,7 @@ from controllers.console import api
from controllers.console.workspace import plugin_permission_required from controllers.console.workspace import plugin_permission_required
from controllers.console.wraps import account_initialization_required, setup_required from controllers.console.wraps import account_initialization_required, setup_required
from core.model_runtime.utils.encoders import jsonable_encoder from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.manager.exc import PluginDaemonClientSideError from core.plugin.impl.exc import PluginDaemonClientSideError
from libs.login import login_required from libs.login import login_required
from models.account import TenantPluginPermission from models.account import TenantPluginPermission
from services.plugin.plugin_permission_service import PluginPermissionService from services.plugin.plugin_permission_service import PluginPermissionService
@ -249,6 +249,31 @@ class PluginInstallFromMarketplaceApi(Resource):
return jsonable_encoder(response) return jsonable_encoder(response)
class PluginFetchMarketplacePkgApi(Resource):
@setup_required
@login_required
@account_initialization_required
@plugin_permission_required(install_required=True)
def get(self):
tenant_id = current_user.current_tenant_id
parser = reqparse.RequestParser()
parser.add_argument("plugin_unique_identifier", type=str, required=True, location="args")
args = parser.parse_args()
try:
return jsonable_encoder(
{
"manifest": PluginService.fetch_marketplace_pkg(
tenant_id,
args["plugin_unique_identifier"],
)
}
)
except PluginDaemonClientSideError as e:
raise ValueError(e)
class PluginFetchManifestApi(Resource): class PluginFetchManifestApi(Resource):
@setup_required @setup_required
@login_required @login_required
@ -488,6 +513,7 @@ api.add_resource(PluginDeleteInstallTaskApi, "/workspaces/current/plugin/tasks/<
api.add_resource(PluginDeleteAllInstallTaskItemsApi, "/workspaces/current/plugin/tasks/delete_all") api.add_resource(PluginDeleteAllInstallTaskItemsApi, "/workspaces/current/plugin/tasks/delete_all")
api.add_resource(PluginDeleteInstallTaskItemApi, "/workspaces/current/plugin/tasks/<task_id>/delete/<path:identifier>") api.add_resource(PluginDeleteInstallTaskItemApi, "/workspaces/current/plugin/tasks/<task_id>/delete/<path:identifier>")
api.add_resource(PluginUninstallApi, "/workspaces/current/plugin/uninstall") api.add_resource(PluginUninstallApi, "/workspaces/current/plugin/uninstall")
api.add_resource(PluginFetchMarketplacePkgApi, "/workspaces/current/plugin/marketplace/pkg")
api.add_resource(PluginChangePermissionApi, "/workspaces/current/plugin/permission/change") api.add_resource(PluginChangePermissionApi, "/workspaces/current/plugin/permission/change")
api.add_resource(PluginFetchPermissionApi, "/workspaces/current/plugin/permission/fetch") api.add_resource(PluginFetchPermissionApi, "/workspaces/current/plugin/permission/fetch")

View File

@ -10,6 +10,7 @@ from configs import dify_config
from controllers.console.workspace.error import AccountNotInitializedError from controllers.console.workspace.error import AccountNotInitializedError
from extensions.ext_database import db from extensions.ext_database import db
from extensions.ext_redis import redis_client from extensions.ext_redis import redis_client
from models.account import AccountStatus
from models.dataset import RateLimitLog from models.dataset import RateLimitLog
from models.model import DifySetup from models.model import DifySetup
from services.feature_service import FeatureService, LicenseStatus from services.feature_service import FeatureService, LicenseStatus
@ -24,7 +25,7 @@ def account_initialization_required(view):
# check account initialization # check account initialization
account = current_user account = current_user
if account.status == "uninitialized": if account.status == AccountStatus.UNINITIALIZED:
raise AccountNotInitializedError() raise AccountNotInitializedError()
return view(*args, **kwargs) return view(*args, **kwargs)
@ -210,3 +211,16 @@ def enterprise_license_required(view):
return view(*args, **kwargs) return view(*args, **kwargs)
return decorated return decorated
def email_password_login_enabled(view):
@wraps(view)
def decorated(*args, **kwargs):
features = FeatureService.get_system_features()
if features.enable_email_password_login:
return view(*args, **kwargs)
# otherwise, return 403
abort(403)
return decorated

View File

@ -1,3 +1,5 @@
from mimetypes import guess_extension
from flask import request from flask import request
from flask_restful import Resource, marshal_with # type: ignore from flask_restful import Resource, marshal_with # type: ignore
from werkzeug.exceptions import Forbidden from werkzeug.exceptions import Forbidden
@ -9,8 +11,8 @@ from controllers.files.error import UnsupportedFileTypeError
from controllers.inner_api.plugin.wraps import get_user from controllers.inner_api.plugin.wraps import get_user
from controllers.service_api.app.error import FileTooLargeError from controllers.service_api.app.error import FileTooLargeError
from core.file.helpers import verify_plugin_file_signature from core.file.helpers import verify_plugin_file_signature
from core.tools.tool_file_manager import ToolFileManager
from fields.file_fields import file_fields from fields.file_fields import file_fields
from services.file_service import FileService
class PluginUploadFileApi(Resource): class PluginUploadFileApi(Resource):
@ -51,19 +53,26 @@ class PluginUploadFileApi(Resource):
raise Forbidden("Invalid request.") raise Forbidden("Invalid request.")
try: try:
upload_file = FileService.upload_file( tool_file = ToolFileManager.create_file_by_raw(
filename=filename, user_id=user.id,
content=file.read(), tenant_id=tenant_id,
file_binary=file.read(),
mimetype=mimetype, mimetype=mimetype,
user=user, filename=filename,
source=None, conversation_id=None,
) )
extension = guess_extension(tool_file.mimetype) or ".bin"
preview_url = ToolFileManager.sign_file(tool_file_id=tool_file.id, extension=extension)
tool_file.mime_type = mimetype
tool_file.extension = extension
tool_file.preview_url = preview_url
except services.errors.file.FileTooLargeError as file_too_large_error: except services.errors.file.FileTooLargeError as file_too_large_error:
raise FileTooLargeError(file_too_large_error.description) raise FileTooLargeError(file_too_large_error.description)
except services.errors.file.UnsupportedFileTypeError: except services.errors.file.UnsupportedFileTypeError:
raise UnsupportedFileTypeError() raise UnsupportedFileTypeError()
return upload_file, 201 return tool_file, 201
api.add_resource(PluginUploadFileApi, "/files/upload/for-plugin") api.add_resource(PluginUploadFileApi, "/files/upload/for-plugin")

View File

@ -13,6 +13,7 @@ from core.plugin.backwards_invocation.model import PluginModelBackwardsInvocatio
from core.plugin.backwards_invocation.node import PluginNodeBackwardsInvocation from core.plugin.backwards_invocation.node import PluginNodeBackwardsInvocation
from core.plugin.backwards_invocation.tool import PluginToolBackwardsInvocation from core.plugin.backwards_invocation.tool import PluginToolBackwardsInvocation
from core.plugin.entities.request import ( from core.plugin.entities.request import (
RequestFetchAppInfo,
RequestInvokeApp, RequestInvokeApp,
RequestInvokeEncrypt, RequestInvokeEncrypt,
RequestInvokeLLM, RequestInvokeLLM,
@ -278,6 +279,17 @@ class PluginUploadFileRequestApi(Resource):
return BaseBackwardsInvocationResponse(data={"url": url}).model_dump() return BaseBackwardsInvocationResponse(data={"url": url}).model_dump()
class PluginFetchAppInfoApi(Resource):
@setup_required
@plugin_inner_api_only
@get_user_tenant
@plugin_data(payload_type=RequestFetchAppInfo)
def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestFetchAppInfo):
return BaseBackwardsInvocationResponse(
data=PluginAppBackwardsInvocation.fetch_app_info(payload.app_id, tenant_model.id)
).model_dump()
api.add_resource(PluginInvokeLLMApi, "/invoke/llm") api.add_resource(PluginInvokeLLMApi, "/invoke/llm")
api.add_resource(PluginInvokeTextEmbeddingApi, "/invoke/text-embedding") api.add_resource(PluginInvokeTextEmbeddingApi, "/invoke/text-embedding")
api.add_resource(PluginInvokeRerankApi, "/invoke/rerank") api.add_resource(PluginInvokeRerankApi, "/invoke/rerank")
@ -291,3 +303,4 @@ api.add_resource(PluginInvokeAppApi, "/invoke/app")
api.add_resource(PluginInvokeEncryptApi, "/invoke/encrypt") api.add_resource(PluginInvokeEncryptApi, "/invoke/encrypt")
api.add_resource(PluginInvokeSummaryApi, "/invoke/summary") api.add_resource(PluginInvokeSummaryApi, "/invoke/summary")
api.add_resource(PluginUploadFileRequestApi, "/upload/file/request") api.add_resource(PluginUploadFileRequestApi, "/upload/file/request")
api.add_resource(PluginFetchAppInfoApi, "/fetch/app/info")

View File

@ -98,7 +98,7 @@ class AnnotationUpdateDeleteApi(Resource):
annotation_id = str(annotation_id) annotation_id = str(annotation_id)
AppAnnotationService.delete_app_annotation(app_model.id, annotation_id) AppAnnotationService.delete_app_annotation(app_model.id, annotation_id)
return {"result": "success"}, 200 return {"result": "success"}, 204
api.add_resource(AnnotationReplyActionApi, "/apps/annotation-reply/<string:action>") api.add_resource(AnnotationReplyActionApi, "/apps/annotation-reply/<string:action>")

View File

@ -1,10 +1,10 @@
from flask_restful import Resource, marshal_with # type: ignore from flask_restful import Resource, marshal_with # type: ignore
from controllers.common import fields from controllers.common import fields
from controllers.common import helpers as controller_helpers
from controllers.service_api import api from controllers.service_api import api
from controllers.service_api.app.error import AppUnavailableError from controllers.service_api.app.error import AppUnavailableError
from controllers.service_api.wraps import validate_app_token from controllers.service_api.wraps import validate_app_token
from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict
from models.model import App, AppMode from models.model import App, AppMode
from services.app_service import AppService from services.app_service import AppService
@ -32,9 +32,7 @@ class AppParameterApi(Resource):
user_input_form = features_dict.get("user_input_form", []) user_input_form = features_dict.get("user_input_form", [])
return controller_helpers.get_parameters_from_feature_dict( return get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form)
features_dict=features_dict, user_input_form=user_input_form
)
class AppMetaApi(Resource): class AppMetaApi(Resource):

View File

@ -14,6 +14,9 @@ from fields.conversation_fields import (
conversation_infinite_scroll_pagination_fields, conversation_infinite_scroll_pagination_fields,
simple_conversation_fields, simple_conversation_fields,
) )
from fields.conversation_variable_fields import (
conversation_variable_infinite_scroll_pagination_fields,
)
from libs.helper import uuid_value from libs.helper import uuid_value
from models.model import App, AppMode, EndUser from models.model import App, AppMode, EndUser
from services.conversation_service import ConversationService from services.conversation_service import ConversationService
@ -69,7 +72,7 @@ class ConversationDetailApi(Resource):
ConversationService.delete(app_model, conversation_id, end_user) ConversationService.delete(app_model, conversation_id, end_user)
except services.errors.conversation.ConversationNotExistsError: except services.errors.conversation.ConversationNotExistsError:
raise NotFound("Conversation Not Exists.") raise NotFound("Conversation Not Exists.")
return {"result": "success"}, 200 return {"result": "success"}, 204
class ConversationRenameApi(Resource): class ConversationRenameApi(Resource):
@ -93,6 +96,31 @@ class ConversationRenameApi(Resource):
raise NotFound("Conversation Not Exists.") raise NotFound("Conversation Not Exists.")
class ConversationVariablesApi(Resource):
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY))
@marshal_with(conversation_variable_infinite_scroll_pagination_fields)
def get(self, app_model: App, end_user: EndUser, c_id):
# conversational variable only for chat app
app_mode = AppMode.value_of(app_model.mode)
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
raise NotChatAppError()
conversation_id = str(c_id)
parser = reqparse.RequestParser()
parser.add_argument("last_id", type=uuid_value, location="args")
parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
args = parser.parse_args()
try:
return ConversationService.get_conversational_variable(
app_model, conversation_id, end_user, args["limit"], args["last_id"]
)
except services.errors.conversation.ConversationNotExistsError:
raise NotFound("Conversation Not Exists.")
api.add_resource(ConversationRenameApi, "/conversations/<uuid:c_id>/name", endpoint="conversation_name") api.add_resource(ConversationRenameApi, "/conversations/<uuid:c_id>/name", endpoint="conversation_name")
api.add_resource(ConversationApi, "/conversations") api.add_resource(ConversationApi, "/conversations")
api.add_resource(ConversationDetailApi, "/conversations/<uuid:c_id>", endpoint="conversation_detail") api.add_resource(ConversationDetailApi, "/conversations/<uuid:c_id>", endpoint="conversation_detail")
api.add_resource(ConversationVariablesApi, "/conversations/<uuid:c_id>/variables", endpoint="conversation_variables")

View File

@ -1,6 +1,6 @@
import logging import logging
from datetime import datetime
from dateutil.parser import isoparse
from flask_restful import Resource, fields, marshal_with, reqparse # type: ignore from flask_restful import Resource, fields, marshal_with, reqparse # type: ignore
from flask_restful.inputs import int_range # type: ignore from flask_restful.inputs import int_range # type: ignore
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
@ -59,7 +59,7 @@ class WorkflowRunDetailApi(Resource):
Get a workflow task running detail Get a workflow task running detail
""" """
app_mode = AppMode.value_of(app_model.mode) app_mode = AppMode.value_of(app_model.mode)
if app_mode != AppMode.WORKFLOW: if app_mode not in [AppMode.WORKFLOW, AppMode.ADVANCED_CHAT]:
raise NotWorkflowAppError() raise NotWorkflowAppError()
workflow_run = db.session.query(WorkflowRun).filter(WorkflowRun.id == workflow_run_id).first() workflow_run = db.session.query(WorkflowRun).filter(WorkflowRun.id == workflow_run_id).first()
@ -140,10 +140,10 @@ class WorkflowAppLogApi(Resource):
args.status = WorkflowRunStatus(args.status) if args.status else None args.status = WorkflowRunStatus(args.status) if args.status else None
if args.created_at__before: if args.created_at__before:
args.created_at__before = datetime.fromisoformat(args.created_at__before.replace("Z", "+00:00")) args.created_at__before = isoparse(args.created_at__before)
if args.created_at__after: if args.created_at__after:
args.created_at__after = datetime.fromisoformat(args.created_at__after.replace("Z", "+00:00")) args.created_at__after = isoparse(args.created_at__after)
# get paginate workflow app logs # get paginate workflow app logs
workflow_app_service = WorkflowAppService() workflow_app_service = WorkflowAppService()

View File

@ -13,6 +13,7 @@ from fields.dataset_fields import dataset_detail_fields
from libs.login import current_user from libs.login import current_user
from models.dataset import Dataset, DatasetPermissionEnum from models.dataset import Dataset, DatasetPermissionEnum
from services.dataset_service import DatasetPermissionService, DatasetService from services.dataset_service import DatasetPermissionService, DatasetService
from services.entities.knowledge_entities.knowledge_entities import RetrievalModel
def _validate_name(name): def _validate_name(name):
@ -120,8 +121,11 @@ class DatasetListApi(DatasetApiResource):
nullable=True, nullable=True,
required=False, required=False,
) )
args = parser.parse_args() parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json")
parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
args = parser.parse_args()
try: try:
dataset = DatasetService.create_empty_dataset( dataset = DatasetService.create_empty_dataset(
tenant_id=tenant_id, tenant_id=tenant_id,
@ -133,6 +137,11 @@ class DatasetListApi(DatasetApiResource):
provider=args["provider"], provider=args["provider"],
external_knowledge_api_id=args["external_knowledge_api_id"], external_knowledge_api_id=args["external_knowledge_api_id"],
external_knowledge_id=args["external_knowledge_id"], external_knowledge_id=args["external_knowledge_id"],
embedding_model_provider=args["embedding_model_provider"],
embedding_model_name=args["embedding_model"],
retrieval_model=RetrievalModel(**args["retrieval_model"])
if args["retrieval_model"] is not None
else None,
) )
except services.errors.dataset.DatasetNameDuplicateError: except services.errors.dataset.DatasetNameDuplicateError:
raise DatasetNameDuplicateError() raise DatasetNameDuplicateError()

View File

@ -49,7 +49,9 @@ class DocumentAddByTextApi(DatasetApiResource):
parser.add_argument( parser.add_argument(
"indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json" "indexing_technique", type=str, choices=Dataset.INDEXING_TECHNIQUE_LIST, nullable=False, location="json"
) )
parser.add_argument("retrieval_model", type=dict, required=False, nullable=False, location="json") parser.add_argument("retrieval_model", type=dict, required=False, nullable=True, location="json")
parser.add_argument("embedding_model", type=str, required=False, nullable=True, location="json")
parser.add_argument("embedding_model_provider", type=str, required=False, nullable=True, location="json")
args = parser.parse_args() args = parser.parse_args()
dataset_id = str(dataset_id) dataset_id = str(dataset_id)
@ -57,7 +59,7 @@ class DocumentAddByTextApi(DatasetApiResource):
dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
if not dataset: if not dataset:
raise ValueError("Dataset is not exist.") raise ValueError("Dataset does not exist.")
if not dataset.indexing_technique and not args["indexing_technique"]: if not dataset.indexing_technique and not args["indexing_technique"]:
raise ValueError("indexing_technique is required.") raise ValueError("indexing_technique is required.")
@ -114,7 +116,7 @@ class DocumentUpdateByTextApi(DatasetApiResource):
dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
if not dataset: if not dataset:
raise ValueError("Dataset is not exist.") raise ValueError("Dataset does not exist.")
# indexing_technique is already set in dataset since this is an update # indexing_technique is already set in dataset since this is an update
args["indexing_technique"] = dataset.indexing_technique args["indexing_technique"] = dataset.indexing_technique
@ -172,7 +174,7 @@ class DocumentAddByFileApi(DatasetApiResource):
dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
if not dataset: if not dataset:
raise ValueError("Dataset is not exist.") raise ValueError("Dataset does not exist.")
if not dataset.indexing_technique and not args.get("indexing_technique"): if not dataset.indexing_technique and not args.get("indexing_technique"):
raise ValueError("indexing_technique is required.") raise ValueError("indexing_technique is required.")
@ -239,7 +241,7 @@ class DocumentUpdateByFileApi(DatasetApiResource):
dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
if not dataset: if not dataset:
raise ValueError("Dataset is not exist.") raise ValueError("Dataset does not exist.")
# indexing_technique is already set in dataset since this is an update # indexing_technique is already set in dataset since this is an update
args["indexing_technique"] = dataset.indexing_technique args["indexing_technique"] = dataset.indexing_technique
@ -303,7 +305,7 @@ class DocumentDeleteApi(DatasetApiResource):
dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first() dataset = db.session.query(Dataset).filter(Dataset.tenant_id == tenant_id, Dataset.id == dataset_id).first()
if not dataset: if not dataset:
raise ValueError("Dataset is not exist.") raise ValueError("Dataset does not exist.")
document = DocumentService.get_document(dataset.id, document_id) document = DocumentService.get_document(dataset.id, document_id)
@ -321,7 +323,7 @@ class DocumentDeleteApi(DatasetApiResource):
except services.errors.document.DocumentIndexingError: except services.errors.document.DocumentIndexingError:
raise DocumentIndexingError("Cannot delete document during indexing.") raise DocumentIndexingError("Cannot delete document during indexing.")
return {"result": "success"}, 200 return {"result": "success"}, 204
class DocumentListApi(DatasetApiResource): class DocumentListApi(DatasetApiResource):

View File

@ -13,18 +13,6 @@ from services.entities.knowledge_entities.knowledge_entities import (
from services.metadata_service import MetadataService from services.metadata_service import MetadataService
def _validate_name(name):
if not name or len(name) < 1 or len(name) > 40:
raise ValueError("Name must be between 1 to 40 characters.")
return name
def _validate_description_length(description):
if len(description) > 400:
raise ValueError("Description cannot exceed 400 characters.")
return description
class DatasetMetadataCreateServiceApi(DatasetApiResource): class DatasetMetadataCreateServiceApi(DatasetApiResource):
def post(self, tenant_id, dataset_id): def post(self, tenant_id, dataset_id):
parser = reqparse.RequestParser() parser = reqparse.RequestParser()
@ -75,7 +63,7 @@ class DatasetMetadataServiceApi(DatasetApiResource):
DatasetService.check_dataset_permission(dataset, current_user) DatasetService.check_dataset_permission(dataset, current_user)
MetadataService.delete_metadata(dataset_id_str, metadata_id_str) MetadataService.delete_metadata(dataset_id_str, metadata_id_str)
return 200 return 204
class DatasetMetadataBuiltInFieldServiceApi(DatasetApiResource): class DatasetMetadataBuiltInFieldServiceApi(DatasetApiResource):

View File

@ -117,14 +117,13 @@ class SegmentApi(DatasetApiResource):
parser.add_argument("keyword", type=str, default=None, location="args") parser.add_argument("keyword", type=str, default=None, location="args")
args = parser.parse_args() args = parser.parse_args()
status_list = args["status"]
keyword = args["keyword"]
segments, total = SegmentService.get_segments( segments, total = SegmentService.get_segments(
document_id=document_id, document_id=document_id,
tenant_id=current_user.current_tenant_id, tenant_id=current_user.current_tenant_id,
status_list=args["status"], status_list=args["status"],
keyword=args["keyword"], keyword=args["keyword"],
page=page,
limit=limit,
) )
response = { response = {
@ -160,7 +159,7 @@ class DatasetSegmentApi(DatasetApiResource):
if not segment: if not segment:
raise NotFound("Segment not found.") raise NotFound("Segment not found.")
SegmentService.delete_segment(segment, document, dataset) SegmentService.delete_segment(segment, document, dataset)
return {"result": "success"}, 200 return {"result": "success"}, 204
@cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_resource_check("vector_space", "dataset")
def post(self, tenant_id, dataset_id, document_id, segment_id): def post(self, tenant_id, dataset_id, document_id, segment_id):
@ -345,7 +344,7 @@ class DatasetChildChunkApi(DatasetApiResource):
except ChildChunkDeleteIndexServiceError as e: except ChildChunkDeleteIndexServiceError as e:
raise ChildChunkDeleteIndexError(str(e)) raise ChildChunkDeleteIndexError(str(e))
return {"result": "success"}, 200 return {"result": "success"}, 204
@cloud_edition_billing_resource_check("vector_space", "dataset") @cloud_edition_billing_resource_check("vector_space", "dataset")
@cloud_edition_billing_knowledge_limit_check("add_segment", "dataset") @cloud_edition_billing_knowledge_limit_check("add_segment", "dataset")

View File

@ -1,10 +1,10 @@
from flask_restful import marshal_with # type: ignore from flask_restful import marshal_with # type: ignore
from controllers.common import fields from controllers.common import fields
from controllers.common import helpers as controller_helpers
from controllers.web import api from controllers.web import api
from controllers.web.error import AppUnavailableError from controllers.web.error import AppUnavailableError
from controllers.web.wraps import WebApiResource from controllers.web.wraps import WebApiResource
from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict
from models.model import App, AppMode from models.model import App, AppMode
from services.app_service import AppService from services.app_service import AppService
@ -31,9 +31,7 @@ class AppParameterApi(WebApiResource):
user_input_form = features_dict.get("user_input_form", []) user_input_form = features_dict.get("user_input_form", [])
return controller_helpers.get_parameters_from_feature_dict( return get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form)
features_dict=features_dict, user_input_form=user_input_form
)
class AppMeta(WebApiResource): class AppMeta(WebApiResource):

View File

@ -46,6 +46,7 @@ class MessageListApi(WebApiResource):
"retriever_resources": fields.List(fields.Nested(retriever_resource_fields)), "retriever_resources": fields.List(fields.Nested(retriever_resource_fields)),
"created_at": TimestampField, "created_at": TimestampField,
"agent_thoughts": fields.List(fields.Nested(agent_thought_fields)), "agent_thoughts": fields.List(fields.Nested(agent_thought_fields)),
"metadata": fields.Raw(attribute="message_metadata_dict"),
"status": fields.String, "status": fields.String,
"error": fields.String, "error": fields.String,
} }

View File

@ -67,7 +67,7 @@ class SavedMessageApi(WebApiResource):
SavedMessageService.delete(app_model, end_user, message_id) SavedMessageService.delete(app_model, end_user, message_id)
return {"result": "success"} return {"result": "success"}, 204
api.add_resource(SavedMessageListApi, "/saved-messages") api.add_resource(SavedMessageListApi, "/saved-messages")

View File

@ -21,14 +21,13 @@ from core.model_runtime.entities import (
AssistantPromptMessage, AssistantPromptMessage,
LLMUsage, LLMUsage,
PromptMessage, PromptMessage,
PromptMessageContent,
PromptMessageTool, PromptMessageTool,
SystemPromptMessage, SystemPromptMessage,
TextPromptMessageContent, TextPromptMessageContent,
ToolPromptMessage, ToolPromptMessage,
UserPromptMessage, UserPromptMessage,
) )
from core.model_runtime.entities.message_entities import ImagePromptMessageContent from core.model_runtime.entities.message_entities import ImagePromptMessageContent, PromptMessageContentUnionTypes
from core.model_runtime.entities.model_entities import ModelFeature from core.model_runtime.entities.model_entities import ModelFeature
from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
from core.prompt.utils.extract_thread_messages import extract_thread_messages from core.prompt.utils.extract_thread_messages import extract_thread_messages
@ -501,7 +500,7 @@ class BaseAgentRunner(AppRunner):
) )
if not file_objs: if not file_objs:
return UserPromptMessage(content=message.query) return UserPromptMessage(content=message.query)
prompt_message_contents: list[PromptMessageContent] = [] prompt_message_contents: list[PromptMessageContentUnionTypes] = []
prompt_message_contents.append(TextPromptMessageContent(data=message.query)) prompt_message_contents.append(TextPromptMessageContent(data=message.query))
for file in file_objs: for file in file_objs:
prompt_message_contents.append( prompt_message_contents.append(

View File

@ -191,7 +191,7 @@ class CotAgentRunner(BaseAgentRunner, ABC):
# action is final answer, return final answer directly # action is final answer, return final answer directly
try: try:
if isinstance(scratchpad.action.action_input, dict): if isinstance(scratchpad.action.action_input, dict):
final_answer = json.dumps(scratchpad.action.action_input) final_answer = json.dumps(scratchpad.action.action_input, ensure_ascii=False)
elif isinstance(scratchpad.action.action_input, str): elif isinstance(scratchpad.action.action_input, str):
final_answer = scratchpad.action.action_input final_answer = scratchpad.action.action_input
else: else:

View File

@ -5,12 +5,11 @@ from core.file import file_manager
from core.model_runtime.entities import ( from core.model_runtime.entities import (
AssistantPromptMessage, AssistantPromptMessage,
PromptMessage, PromptMessage,
PromptMessageContent,
SystemPromptMessage, SystemPromptMessage,
TextPromptMessageContent, TextPromptMessageContent,
UserPromptMessage, UserPromptMessage,
) )
from core.model_runtime.entities.message_entities import ImagePromptMessageContent from core.model_runtime.entities.message_entities import ImagePromptMessageContent, PromptMessageContentUnionTypes
from core.model_runtime.utils.encoders import jsonable_encoder from core.model_runtime.utils.encoders import jsonable_encoder
@ -40,7 +39,7 @@ class CotChatAgentRunner(CotAgentRunner):
Organize user query Organize user query
""" """
if self.files: if self.files:
prompt_message_contents: list[PromptMessageContent] = [] prompt_message_contents: list[PromptMessageContentUnionTypes] = []
prompt_message_contents.append(TextPromptMessageContent(data=query)) prompt_message_contents.append(TextPromptMessageContent(data=query))
# get image detail config # get image detail config

View File

@ -15,14 +15,13 @@ from core.model_runtime.entities import (
LLMResultChunkDelta, LLMResultChunkDelta,
LLMUsage, LLMUsage,
PromptMessage, PromptMessage,
PromptMessageContent,
PromptMessageContentType, PromptMessageContentType,
SystemPromptMessage, SystemPromptMessage,
TextPromptMessageContent, TextPromptMessageContent,
ToolPromptMessage, ToolPromptMessage,
UserPromptMessage, UserPromptMessage,
) )
from core.model_runtime.entities.message_entities import ImagePromptMessageContent from core.model_runtime.entities.message_entities import ImagePromptMessageContent, PromptMessageContentUnionTypes
from core.prompt.agent_history_prompt_transform import AgentHistoryPromptTransform from core.prompt.agent_history_prompt_transform import AgentHistoryPromptTransform
from core.tools.entities.tool_entities import ToolInvokeMeta from core.tools.entities.tool_entities import ToolInvokeMeta
from core.tools.tool_engine import ToolEngine from core.tools.tool_engine import ToolEngine
@ -395,7 +394,7 @@ class FunctionCallAgentRunner(BaseAgentRunner):
Organize user query Organize user query
""" """
if self.files: if self.files:
prompt_message_contents: list[PromptMessageContent] = [] prompt_message_contents: list[PromptMessageContentUnionTypes] = []
prompt_message_contents.append(TextPromptMessageContent(data=query)) prompt_message_contents.append(TextPromptMessageContent(data=query))
# get image detail config # get image detail config

View File

@ -52,6 +52,7 @@ class AgentStrategyParameter(PluginParameter):
return cast_parameter_value(self, value) return cast_parameter_value(self, value)
type: AgentStrategyParameterType = Field(..., description="The type of the parameter") type: AgentStrategyParameterType = Field(..., description="The type of the parameter")
help: Optional[I18nObject] = None
def init_frontend_parameter(self, value: Any): def init_frontend_parameter(self, value: Any):
return init_frontend_parameter(self, self.type, value) return init_frontend_parameter(self, self.type, value)

View File

@ -4,7 +4,7 @@ from typing import Any, Optional
from core.agent.entities import AgentInvokeMessage from core.agent.entities import AgentInvokeMessage
from core.agent.plugin_entities import AgentStrategyEntity, AgentStrategyParameter from core.agent.plugin_entities import AgentStrategyEntity, AgentStrategyParameter
from core.agent.strategy.base import BaseAgentStrategy from core.agent.strategy.base import BaseAgentStrategy
from core.plugin.manager.agent import PluginAgentManager from core.plugin.impl.agent import PluginAgentClient
from core.plugin.utils.converter import convert_parameters_to_plugin_format from core.plugin.utils.converter import convert_parameters_to_plugin_format
@ -42,7 +42,7 @@ class PluginAgentStrategy(BaseAgentStrategy):
""" """
Invoke the agent strategy. Invoke the agent strategy.
""" """
manager = PluginAgentManager() manager = PluginAgentClient()
initialized_params = self.initialize_parameters(params) initialized_params = self.initialize_parameters(params)
params = convert_parameters_to_plugin_format(initialized_params) params = convert_parameters_to_plugin_format(initialized_params)

View File

@ -0,0 +1,45 @@
from collections.abc import Mapping
from typing import Any
from configs import dify_config
from constants import DEFAULT_FILE_NUMBER_LIMITS
def get_parameters_from_feature_dict(
*, features_dict: Mapping[str, Any], user_input_form: list[dict[str, Any]]
) -> Mapping[str, Any]:
"""
Mapping from feature dict to webapp parameters
"""
return {
"opening_statement": features_dict.get("opening_statement"),
"suggested_questions": features_dict.get("suggested_questions", []),
"suggested_questions_after_answer": features_dict.get("suggested_questions_after_answer", {"enabled": False}),
"speech_to_text": features_dict.get("speech_to_text", {"enabled": False}),
"text_to_speech": features_dict.get("text_to_speech", {"enabled": False}),
"retriever_resource": features_dict.get("retriever_resource", {"enabled": False}),
"annotation_reply": features_dict.get("annotation_reply", {"enabled": False}),
"more_like_this": features_dict.get("more_like_this", {"enabled": False}),
"user_input_form": user_input_form,
"sensitive_word_avoidance": features_dict.get(
"sensitive_word_avoidance", {"enabled": False, "type": "", "configs": []}
),
"file_upload": features_dict.get(
"file_upload",
{
"image": {
"enabled": False,
"number_limits": DEFAULT_FILE_NUMBER_LIMITS,
"detail": "high",
"transfer_methods": ["remote_url", "local_file"],
}
},
),
"system_parameters": {
"image_file_size_limit": dify_config.UPLOAD_IMAGE_FILE_SIZE_LIMIT,
"video_file_size_limit": dify_config.UPLOAD_VIDEO_FILE_SIZE_LIMIT,
"audio_file_size_limit": dify_config.UPLOAD_AUDIO_FILE_SIZE_LIMIT,
"file_size_limit": dify_config.UPLOAD_FILE_SIZE_LIMIT,
"workflow_file_upload_limit": dify_config.WORKFLOW_FILE_UPLOAD_LIMIT,
},
}

View File

@ -1,6 +1,7 @@
from collections.abc import Mapping from collections.abc import Mapping
from typing import Any from typing import Any
from constants import DEFAULT_FILE_NUMBER_LIMITS
from core.file import FileUploadConfig from core.file import FileUploadConfig
@ -18,7 +19,7 @@ class FileUploadConfigManager:
if file_upload_dict.get("enabled"): if file_upload_dict.get("enabled"):
transform_methods = file_upload_dict.get("allowed_file_upload_methods", []) transform_methods = file_upload_dict.get("allowed_file_upload_methods", [])
file_upload_dict["image_config"] = { file_upload_dict["image_config"] = {
"number_limits": file_upload_dict.get("number_limits", 1), "number_limits": file_upload_dict.get("number_limits", DEFAULT_FILE_NUMBER_LIMITS),
"transfer_methods": transform_methods, "transfer_methods": transform_methods,
} }

Some files were not shown because too many files have changed in this diff Show More