Merge branch 'feat/mcp' into deploy/dev

This commit is contained in:
Novice 2025-05-28 13:56:26 +08:00
commit 946d39af74
130 changed files with 1263 additions and 817 deletions

View File

@ -7,6 +7,7 @@ pipx install uv
echo 'alias start-api="cd /workspaces/dify/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc echo 'alias start-api="cd /workspaces/dify/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
echo 'alias start-worker="cd /workspaces/dify/api && uv run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc echo 'alias start-worker="cd /workspaces/dify/api && uv run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
echo 'alias start-web="cd /workspaces/dify/web && pnpm dev"' >> ~/.bashrc echo 'alias start-web="cd /workspaces/dify/web && pnpm dev"' >> ~/.bashrc
echo 'alias start-web-prod="cd /workspaces/dify/web && pnpm build && pnpm start"' >> ~/.bashrc
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d"' >> ~/.bashrc echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d"' >> ~/.bashrc
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down"' >> ~/.bashrc echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down"' >> ~/.bashrc

View File

@ -31,11 +31,19 @@ jobs:
echo "FILES_CHANGED=false" >> $GITHUB_ENV echo "FILES_CHANGED=false" >> $GITHUB_ENV
fi fi
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
run_install: false
- name: Set up Node.js - name: Set up Node.js
if: env.FILES_CHANGED == 'true' if: env.FILES_CHANGED == 'true'
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 'lts/*' node-version: 'lts/*'
cache: pnpm
cache-dependency-path: ./web/package.json
- name: Install dependencies - name: Install dependencies
if: env.FILES_CHANGED == 'true' if: env.FILES_CHANGED == 'true'

View File

@ -235,7 +235,7 @@ At the same time, please consider supporting Dify by sharing it on social media
## Community & contact ## Community & contact
- [Github Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions. - [GitHub Discussion](https://github.com/langgenius/dify/discussions). Best for: sharing feedback and asking questions.
- [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). - [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
- [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community. - [Discord](https://discord.gg/FngNHpbcY7). Best for: sharing your applications and hanging out with the community.
- [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community. - [X(Twitter)](https://twitter.com/dify_ai). Best for: sharing your applications and hanging out with the community.

View File

@ -223,7 +223,7 @@ docker compose up -d
</a> </a>
## المجتمع والاتصال ## المجتمع والاتصال
- [مناقشة Github](https://github.com/langgenius/dify/discussions). الأفضل لـ: مشاركة التعليقات وطرح الأسئلة. - [مناقشة GitHub](https://github.com/langgenius/dify/discussions). الأفضل لـ: مشاركة التعليقات وطرح الأسئلة.
- [المشكلات على GitHub](https://github.com/langgenius/dify/issues). الأفضل لـ: الأخطاء التي تواجهها في استخدام Dify.AI، واقتراحات الميزات. انظر [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). - [المشكلات على GitHub](https://github.com/langgenius/dify/issues). الأفضل لـ: الأخطاء التي تواجهها في استخدام Dify.AI، واقتراحات الميزات. انظر [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
- [Discord](https://discord.gg/FngNHpbcY7). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع. - [Discord](https://discord.gg/FngNHpbcY7). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع.
- [تويتر](https://twitter.com/dify_ai). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع. - [تويتر](https://twitter.com/dify_ai). الأفضل لـ: مشاركة تطبيقاتك والترفيه مع المجتمع.

View File

@ -234,7 +234,7 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন
## কমিউনিটি এবং যোগাযোগ ## কমিউনিটি এবং যোগাযোগ
- [Github Discussion](https://github.com/langgenius/dify/discussions) ফিডব্যাক এবং প্রতিক্রিয়া জানানোর মাধ্যম। - [GitHub Discussion](https://github.com/langgenius/dify/discussions) ফিডব্যাক এবং প্রতিক্রিয়া জানানোর মাধ্যম।
- [GitHub Issues](https://github.com/langgenius/dify/issues). Dify.AI ব্যবহার করে আপনি যেসব বাগের সম্মুখীন হন এবং ফিচার প্রস্তাবনা। আমাদের [অবদান নির্দেশিকা](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) দেখুন। - [GitHub Issues](https://github.com/langgenius/dify/issues). Dify.AI ব্যবহার করে আপনি যেসব বাগের সম্মুখীন হন এবং ফিচার প্রস্তাবনা। আমাদের [অবদান নির্দেশিকা](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) দেখুন।
- [Discord](https://discord.gg/FngNHpbcY7) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম। - [Discord](https://discord.gg/FngNHpbcY7) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম।
- [X(Twitter)](https://twitter.com/dify_ai) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম। - [X(Twitter)](https://twitter.com/dify_ai) আপনার এপ্লিকেশন শেয়ার এবং কমিউনিটি আড্ডার মাধ্যম।

View File

@ -243,7 +243,7 @@ docker compose up -d
我们欢迎您为 Dify 做出贡献,以帮助改善 Dify。包括提交代码、问题、新想法或分享您基于 Dify 创建的有趣且有用的 AI 应用程序。同时,我们也欢迎您在不同的活动、会议和社交媒体上分享 Dify。 我们欢迎您为 Dify 做出贡献,以帮助改善 Dify。包括提交代码、问题、新想法或分享您基于 Dify 创建的有趣且有用的 AI 应用程序。同时,我们也欢迎您在不同的活动、会议和社交媒体上分享 Dify。
- [Github Discussion](https://github.com/langgenius/dify/discussions). 👉:分享您的应用程序并与社区交流。 - [GitHub Discussion](https://github.com/langgenius/dify/discussions). 👉:分享您的应用程序并与社区交流。
- [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](CONTRIBUTING.md)。 - [GitHub Issues](https://github.com/langgenius/dify/issues)。👉:使用 Dify.AI 时遇到的错误和问题,请参阅[贡献指南](CONTRIBUTING.md)。
- [电子邮件支持](mailto:hello@dify.ai?subject=[GitHub]Questions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。 - [电子邮件支持](mailto:hello@dify.ai?subject=[GitHub]Questions%20About%20Dify)。👉:关于使用 Dify.AI 的问题。
- [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。 - [Discord](https://discord.gg/FngNHpbcY7)。👉:分享您的应用程序并与社区交流。

View File

@ -230,7 +230,7 @@ Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](
## Gemeinschaft & Kontakt ## Gemeinschaft & Kontakt
* [Github Discussion](https://github.com/langgenius/dify/discussions). Am besten geeignet für: den Austausch von Feedback und das Stellen von Fragen. * [GitHub Discussion](https://github.com/langgenius/dify/discussions). Am besten geeignet für: den Austausch von Feedback und das Stellen von Fragen.
* [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). * [GitHub Issues](https://github.com/langgenius/dify/issues). Am besten für: Fehler, auf die Sie bei der Verwendung von Dify.AI stoßen, und Funktionsvorschläge. Siehe unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [Discord](https://discord.gg/FngNHpbcY7). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. * [Discord](https://discord.gg/FngNHpbcY7). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community.
* [X(Twitter)](https://twitter.com/dify_ai). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community. * [X(Twitter)](https://twitter.com/dify_ai). Am besten geeignet für: den Austausch von Bewerbungen und den Austausch mit der Community.

View File

@ -236,7 +236,7 @@ docker compose up -d
## コミュニティ & お問い合わせ ## コミュニティ & お問い合わせ
* [Github Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。 * [GitHub Discussion](https://github.com/langgenius/dify/discussions). 主に: フィードバックの共有や質問。
* [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください * [GitHub Issues](https://github.com/langgenius/dify/issues). 主に: Dify.AIを使用する際に発生するエラーや問題については、[貢献ガイド](CONTRIBUTING_JA.md)を参照してください
* [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。 * [Discord](https://discord.gg/FngNHpbcY7). 主に: アプリケーションの共有やコミュニティとの交流。
* [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。 * [X(Twitter)](https://twitter.com/dify_ai). 主に: アプリケーションの共有やコミュニティとの交流。

View File

@ -235,7 +235,7 @@ At the same time, please consider supporting Dify by sharing it on social media
## Community & Contact ## Community & Contact
* [Github Discussion](https://github.com/langgenius/dify/discussions * [GitHub Discussion](https://github.com/langgenius/dify/discussions
). Best for: sharing feedback and asking questions. ). Best for: sharing feedback and asking questions.
* [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). * [GitHub Issues](https://github.com/langgenius/dify/issues). Best for: bugs you encounter using Dify.AI, and feature proposals. See our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).

View File

@ -229,7 +229,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
## 커뮤니티 & 연락처 ## 커뮤니티 & 연락처
* [Github 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다. * [GitHub 토론](https://github.com/langgenius/dify/discussions). 피드백 공유 및 질문하기에 적합합니다.
* [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요. * [GitHub 이슈](https://github.com/langgenius/dify/issues). Dify.AI 사용 중 발견한 버그와 기능 제안에 적합합니다. [기여 가이드](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)를 참조하세요.
* [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. * [디스코드](https://discord.gg/FngNHpbcY7). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.
* [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다. * [트위터](https://twitter.com/dify_ai). 애플리케이션 공유 및 커뮤니티와 소통하기에 적합합니다.

View File

@ -229,7 +229,7 @@ Za tiste, ki bi radi prispevali kodo, si oglejte naš vodnik za prispevke . Hkra
## Skupnost in stik ## Skupnost in stik
* [Github Discussion](https://github.com/langgenius/dify/discussions). Najboljše za: izmenjavo povratnih informacij in postavljanje vprašanj. * [GitHub Discussion](https://github.com/langgenius/dify/discussions). Najboljše za: izmenjavo povratnih informacij in postavljanje vprašanj.
* [GitHub Issues](https://github.com/langgenius/dify/issues). Najboljše za: hrošče, na katere naletite pri uporabi Dify.AI, in predloge funkcij. Oglejte si naš [vodnik za prispevke](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). * [GitHub Issues](https://github.com/langgenius/dify/issues). Najboljše za: hrošče, na katere naletite pri uporabi Dify.AI, in predloge funkcij. Oglejte si naš [vodnik za prispevke](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
* [Discord](https://discord.gg/FngNHpbcY7). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo. * [Discord](https://discord.gg/FngNHpbcY7). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo.
* [X(Twitter)](https://twitter.com/dify_ai). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo. * [X(Twitter)](https://twitter.com/dify_ai). Najboljše za: deljenje vaših aplikacij in druženje s skupnostjo.

View File

@ -227,7 +227,7 @@ Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda p
## Topluluk & iletişim ## Topluluk & iletişim
* [Github Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için. * [GitHub Tartışmaları](https://github.com/langgenius/dify/discussions). En uygun: geri bildirim paylaşmak ve soru sormak için.
* [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın. * [GitHub Sorunları](https://github.com/langgenius/dify/issues). En uygun: Dify.AI kullanırken karşılaştığınız hatalar ve özellik önerileri için. [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakın.
* [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. * [Discord](https://discord.gg/FngNHpbcY7). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.
* [X(Twitter)](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için. * [X(Twitter)](https://twitter.com/dify_ai). En uygun: uygulamalarınızı paylaşmak ve toplulukla vakit geçirmek için.

View File

@ -233,7 +233,7 @@ Dify 的所有功能都提供相應的 API因此您可以輕鬆地將 Dify
## 社群與聯絡方式 ## 社群與聯絡方式
- [Github Discussion](https://github.com/langgenius/dify/discussions):最適合分享反饋和提問。 - [GitHub Discussion](https://github.com/langgenius/dify/discussions):最適合分享反饋和提問。
- [GitHub Issues](https://github.com/langgenius/dify/issues):最適合報告使用 Dify.AI 時遇到的問題和提出功能建議。請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。 - [GitHub Issues](https://github.com/langgenius/dify/issues):最適合報告使用 Dify.AI 時遇到的問題和提出功能建議。請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。
- [Discord](https://discord.gg/FngNHpbcY7):最適合分享您的應用程式並與社群互動。 - [Discord](https://discord.gg/FngNHpbcY7):最適合分享您的應用程式並與社群互動。
- [X(Twitter)](https://twitter.com/dify_ai):最適合分享您的應用程式並與社群互動。 - [X(Twitter)](https://twitter.com/dify_ai):最適合分享您的應用程式並與社群互動。

View File

@ -152,6 +152,7 @@ QDRANT_API_KEY=difyai123456
QDRANT_CLIENT_TIMEOUT=20 QDRANT_CLIENT_TIMEOUT=20
QDRANT_GRPC_ENABLED=false QDRANT_GRPC_ENABLED=false
QDRANT_GRPC_PORT=6334 QDRANT_GRPC_PORT=6334
QDRANT_REPLICATION_FACTOR=1
#Couchbase configuration #Couchbase configuration
COUCHBASE_CONNECTION_STRING=127.0.0.1 COUCHBASE_CONNECTION_STRING=127.0.0.1

View File

@ -33,3 +33,8 @@ class QdrantConfig(BaseSettings):
description="Port number for gRPC connection to Qdrant server (default is 6334)", description="Port number for gRPC connection to Qdrant server (default is 6334)",
default=6334, default=6334,
) )
QDRANT_REPLICATION_FACTOR: PositiveInt = Field(
description="Replication factor for Qdrant collections (default is 1)",
default=1,
)

View File

@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings):
CURRENT_VERSION: str = Field( CURRENT_VERSION: str = Field(
description="Dify version", description="Dify version",
default="1.4.0", default="1.4.1",
) )
COMMIT_SHA: str = Field( COMMIT_SHA: str = Field(

View File

@ -12,10 +12,6 @@ if TYPE_CHECKING:
from core.workflow.entities.variable_pool import VariablePool from core.workflow.entities.variable_pool import VariablePool
tenant_id: ContextVar[str] = ContextVar("tenant_id")
workflow_variable_pool: ContextVar["VariablePool"] = ContextVar("workflow_variable_pool")
""" """
To avoid race-conditions caused by gunicorn thread recycling, using RecyclableContextVar to replace with To avoid race-conditions caused by gunicorn thread recycling, using RecyclableContextVar to replace with
""" """

View File

@ -53,7 +53,6 @@ class AppMCPServerController(Resource):
) )
db.session.add(server) db.session.add(server)
db.session.commit() db.session.commit()
return server return server
@setup_required @setup_required
@ -68,12 +67,17 @@ class AppMCPServerController(Resource):
parser.add_argument("id", type=str, required=True, location="json") parser.add_argument("id", type=str, required=True, location="json")
parser.add_argument("description", type=str, required=True, location="json") parser.add_argument("description", type=str, required=True, location="json")
parser.add_argument("parameters", type=dict, required=True, location="json") parser.add_argument("parameters", type=dict, required=True, location="json")
parser.add_argument("status", type=str, required=False, location="json")
args = parser.parse_args() args = parser.parse_args()
server = db.session.query(AppMCPServer).filter(AppMCPServer.id == args["id"]).first() server = db.session.query(AppMCPServer).filter(AppMCPServer.id == args["id"]).first()
if not server: if not server:
raise Forbidden() raise Forbidden()
server.description = args["description"] server.description = args["description"]
server.parameters = json.dumps(args["parameters"], ensure_ascii=False) server.parameters = json.dumps(args["parameters"], ensure_ascii=False)
if args["status"]:
if args["status"] not in [status.value for status in AppMCPServerStatus]:
raise ValueError("Invalid status")
server.status = args["status"]
db.session.commit() db.session.commit()
return server return server

View File

@ -41,12 +41,16 @@ class PluginListApi(Resource):
@account_initialization_required @account_initialization_required
def get(self): def get(self):
tenant_id = current_user.current_tenant_id tenant_id = current_user.current_tenant_id
parser = reqparse.RequestParser()
parser.add_argument("page", type=int, required=False, location="args", default=1)
parser.add_argument("page_size", type=int, required=False, location="args", default=256)
args = parser.parse_args()
try: try:
plugins = PluginService.list(tenant_id) plugins_with_total = PluginService.list_with_total(tenant_id, args["page"], args["page_size"])
except PluginDaemonClientSideError as e: except PluginDaemonClientSideError as e:
raise ValueError(e) raise ValueError(e)
return jsonable_encoder({"plugins": plugins}) return jsonable_encoder({"plugins": plugins_with_total.list, "total": plugins_with_total.total})
class PluginListLatestVersionsApi(Resource): class PluginListLatestVersionsApi(Resource):

View File

@ -3,7 +3,7 @@ from flask_restful import Resource, marshal, marshal_with, reqparse
from werkzeug.exceptions import Forbidden from werkzeug.exceptions import Forbidden
from controllers.service_api import api from controllers.service_api import api
from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate_app_token from controllers.service_api.wraps import validate_app_token
from extensions.ext_redis import redis_client from extensions.ext_redis import redis_client
from fields.annotation_fields import ( from fields.annotation_fields import (
annotation_fields, annotation_fields,
@ -14,7 +14,7 @@ from services.annotation_service import AppAnnotationService
class AnnotationReplyActionApi(Resource): class AnnotationReplyActionApi(Resource):
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON)) @validate_app_token
def post(self, app_model: App, end_user: EndUser, action): def post(self, app_model: App, end_user: EndUser, action):
parser = reqparse.RequestParser() parser = reqparse.RequestParser()
parser.add_argument("score_threshold", required=True, type=float, location="json") parser.add_argument("score_threshold", required=True, type=float, location="json")
@ -31,7 +31,7 @@ class AnnotationReplyActionApi(Resource):
class AnnotationReplyActionStatusApi(Resource): class AnnotationReplyActionStatusApi(Resource):
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY)) @validate_app_token
def get(self, app_model: App, end_user: EndUser, job_id, action): def get(self, app_model: App, end_user: EndUser, job_id, action):
job_id = str(job_id) job_id = str(job_id)
app_annotation_job_key = "{}_app_annotation_job_{}".format(action, str(job_id)) app_annotation_job_key = "{}_app_annotation_job_{}".format(action, str(job_id))
@ -49,7 +49,7 @@ class AnnotationReplyActionStatusApi(Resource):
class AnnotationListApi(Resource): class AnnotationListApi(Resource):
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY)) @validate_app_token
def get(self, app_model: App, end_user: EndUser): def get(self, app_model: App, end_user: EndUser):
page = request.args.get("page", default=1, type=int) page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int) limit = request.args.get("limit", default=20, type=int)
@ -65,7 +65,7 @@ class AnnotationListApi(Resource):
} }
return response, 200 return response, 200
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON)) @validate_app_token
@marshal_with(annotation_fields) @marshal_with(annotation_fields)
def post(self, app_model: App, end_user: EndUser): def post(self, app_model: App, end_user: EndUser):
parser = reqparse.RequestParser() parser = reqparse.RequestParser()
@ -77,7 +77,7 @@ class AnnotationListApi(Resource):
class AnnotationUpdateDeleteApi(Resource): class AnnotationUpdateDeleteApi(Resource):
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.JSON)) @validate_app_token
@marshal_with(annotation_fields) @marshal_with(annotation_fields)
def put(self, app_model: App, end_user: EndUser, annotation_id): def put(self, app_model: App, end_user: EndUser, annotation_id):
if not current_user.is_editor: if not current_user.is_editor:
@ -91,7 +91,7 @@ class AnnotationUpdateDeleteApi(Resource):
annotation = AppAnnotationService.update_app_annotation_directly(args, app_model.id, annotation_id) annotation = AppAnnotationService.update_app_annotation_directly(args, app_model.id, annotation_id)
return annotation return annotation
@validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.QUERY)) @validate_app_token
def delete(self, app_model: App, end_user: EndUser, annotation_id): def delete(self, app_model: App, end_user: EndUser, annotation_id):
if not current_user.is_editor: if not current_user.is_editor:
raise Forbidden() raise Forbidden()

View File

@ -99,7 +99,12 @@ def validate_app_token(view: Optional[Callable] = None, *, fetch_user_arg: Optio
if user_id: if user_id:
user_id = str(user_id) user_id = str(user_id)
kwargs["end_user"] = create_or_update_end_user_for_user_id(app_model, user_id) end_user = create_or_update_end_user_for_user_id(app_model, user_id)
kwargs["end_user"] = end_user
# Set EndUser as current logged-in user for flask_login.current_user
current_app.login_manager._update_request_context_with_user(end_user) # type: ignore
user_logged_in.send(current_app._get_current_object(), user=end_user) # type: ignore
return view_func(*args, **kwargs) return view_func(*args, **kwargs)

View File

@ -5,7 +5,7 @@ import uuid
from collections.abc import Generator, Mapping from collections.abc import Generator, Mapping
from typing import Any, Literal, Optional, Union, overload from typing import Any, Literal, Optional, Union, overload
from flask import Flask, current_app from flask import Flask, copy_current_request_context, current_app, has_request_context
from pydantic import ValidationError from pydantic import ValidationError
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
@ -158,7 +158,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
trace_manager=trace_manager, trace_manager=trace_manager,
workflow_run_id=workflow_run_id, workflow_run_id=workflow_run_id,
) )
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers.set({})
contexts.plugin_tool_providers_lock.set(threading.Lock()) contexts.plugin_tool_providers_lock.set(threading.Lock())
@ -240,7 +239,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
node_id=node_id, inputs=args["inputs"] node_id=node_id, inputs=args["inputs"]
), ),
) )
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers.set({})
contexts.plugin_tool_providers_lock.set(threading.Lock()) contexts.plugin_tool_providers_lock.set(threading.Lock())
@ -316,7 +314,6 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
extras={"auto_generate_conversation_name": False}, extras={"auto_generate_conversation_name": False},
single_loop_run=AdvancedChatAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]), single_loop_run=AdvancedChatAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]),
) )
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers.set({})
contexts.plugin_tool_providers_lock.set(threading.Lock()) contexts.plugin_tool_providers_lock.set(threading.Lock())
@ -399,18 +396,23 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
message_id=message.id, message_id=message.id,
) )
# new thread # new thread with request context and contextvars
worker_thread = threading.Thread( context = contextvars.copy_context()
target=self._generate_worker,
kwargs={ @copy_current_request_context
"flask_app": current_app._get_current_object(), # type: ignore def worker_with_context():
"application_generate_entity": application_generate_entity, # Run the worker within the copied context
"queue_manager": queue_manager, return context.run(
"conversation_id": conversation.id, self._generate_worker,
"message_id": message.id, flask_app=current_app._get_current_object(), # type: ignore
"context": contextvars.copy_context(), application_generate_entity=application_generate_entity,
}, queue_manager=queue_manager,
) conversation_id=conversation.id,
message_id=message.id,
context=context,
)
worker_thread = threading.Thread(target=worker_with_context)
worker_thread.start() worker_thread.start()
@ -449,8 +451,22 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
""" """
for var, val in context.items(): for var, val in context.items():
var.set(val) var.set(val)
# FIXME(-LAN-): Save current user before entering new app context
from flask import g
saved_user = None
if has_request_context() and hasattr(g, "_login_user"):
saved_user = g._login_user
with flask_app.app_context(): with flask_app.app_context():
try: try:
# Restore user in new app context
if saved_user is not None:
from flask import g
g._login_user = saved_user
# get conversation and message # get conversation and message
conversation = self._get_conversation(conversation_id) conversation = self._get_conversation(conversation_id)
message = self._get_message(message_id) message = self._get_message(message_id)

View File

@ -315,6 +315,7 @@ class AdvancedChatAppGenerateTaskPipeline:
task_id=self._application_generate_entity.task_id, task_id=self._application_generate_entity.task_id,
workflow_execution=workflow_execution, workflow_execution=workflow_execution,
) )
session.commit()
yield workflow_start_resp yield workflow_start_resp
elif isinstance( elif isinstance(

View File

@ -5,7 +5,7 @@ import uuid
from collections.abc import Generator, Mapping from collections.abc import Generator, Mapping
from typing import Any, Literal, Union, overload from typing import Any, Literal, Union, overload
from flask import Flask, current_app from flask import Flask, copy_current_request_context, current_app, has_request_context
from pydantic import ValidationError from pydantic import ValidationError
from configs import dify_config from configs import dify_config
@ -179,18 +179,23 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
message_id=message.id, message_id=message.id,
) )
# new thread # new thread with request context and contextvars
worker_thread = threading.Thread( context = contextvars.copy_context()
target=self._generate_worker,
kwargs={ @copy_current_request_context
"flask_app": current_app._get_current_object(), # type: ignore def worker_with_context():
"context": contextvars.copy_context(), # Run the worker within the copied context
"application_generate_entity": application_generate_entity, return context.run(
"queue_manager": queue_manager, self._generate_worker,
"conversation_id": conversation.id, flask_app=current_app._get_current_object(), # type: ignore
"message_id": message.id, context=context,
}, application_generate_entity=application_generate_entity,
) queue_manager=queue_manager,
conversation_id=conversation.id,
message_id=message.id,
)
worker_thread = threading.Thread(target=worker_with_context)
worker_thread.start() worker_thread.start()
@ -227,8 +232,21 @@ class AgentChatAppGenerator(MessageBasedAppGenerator):
for var, val in context.items(): for var, val in context.items():
var.set(val) var.set(val)
# FIXME(-LAN-): Save current user before entering new app context
from flask import g
saved_user = None
if has_request_context() and hasattr(g, "_login_user"):
saved_user = g._login_user
with flask_app.app_context(): with flask_app.app_context():
try: try:
# Restore user in new app context
if saved_user is not None:
from flask import g
g._login_user = saved_user
# get conversation and message # get conversation and message
conversation = self._get_conversation(conversation_id) conversation = self._get_conversation(conversation_id)
message = self._get_message(message_id) message = self._get_message(message_id)

View File

@ -4,7 +4,7 @@ import uuid
from collections.abc import Generator, Mapping from collections.abc import Generator, Mapping
from typing import Any, Literal, Union, overload from typing import Any, Literal, Union, overload
from flask import Flask, current_app from flask import Flask, copy_current_request_context, current_app
from pydantic import ValidationError from pydantic import ValidationError
from configs import dify_config from configs import dify_config
@ -170,17 +170,18 @@ class ChatAppGenerator(MessageBasedAppGenerator):
message_id=message.id, message_id=message.id,
) )
# new thread # new thread with request context
worker_thread = threading.Thread( @copy_current_request_context
target=self._generate_worker, def worker_with_context():
kwargs={ return self._generate_worker(
"flask_app": current_app._get_current_object(), # type: ignore flask_app=current_app._get_current_object(), # type: ignore
"application_generate_entity": application_generate_entity, application_generate_entity=application_generate_entity,
"queue_manager": queue_manager, queue_manager=queue_manager,
"conversation_id": conversation.id, conversation_id=conversation.id,
"message_id": message.id, message_id=message.id,
}, )
)
worker_thread = threading.Thread(target=worker_with_context)
worker_thread.start() worker_thread.start()

View File

@ -4,7 +4,7 @@ import uuid
from collections.abc import Generator, Mapping from collections.abc import Generator, Mapping
from typing import Any, Literal, Union, overload from typing import Any, Literal, Union, overload
from flask import Flask, current_app from flask import Flask, copy_current_request_context, current_app
from pydantic import ValidationError from pydantic import ValidationError
from configs import dify_config from configs import dify_config
@ -151,16 +151,17 @@ class CompletionAppGenerator(MessageBasedAppGenerator):
message_id=message.id, message_id=message.id,
) )
# new thread # new thread with request context
worker_thread = threading.Thread( @copy_current_request_context
target=self._generate_worker, def worker_with_context():
kwargs={ return self._generate_worker(
"flask_app": current_app._get_current_object(), # type: ignore flask_app=current_app._get_current_object(), # type: ignore
"application_generate_entity": application_generate_entity, application_generate_entity=application_generate_entity,
"queue_manager": queue_manager, queue_manager=queue_manager,
"message_id": message.id, message_id=message.id,
}, )
)
worker_thread = threading.Thread(target=worker_with_context)
worker_thread.start() worker_thread.start()
@ -313,16 +314,17 @@ class CompletionAppGenerator(MessageBasedAppGenerator):
message_id=message.id, message_id=message.id,
) )
# new thread # new thread with request context
worker_thread = threading.Thread( @copy_current_request_context
target=self._generate_worker, def worker_with_context():
kwargs={ return self._generate_worker(
"flask_app": current_app._get_current_object(), # type: ignore flask_app=current_app._get_current_object(), # type: ignore
"application_generate_entity": application_generate_entity, application_generate_entity=application_generate_entity,
"queue_manager": queue_manager, queue_manager=queue_manager,
"message_id": message.id, message_id=message.id,
}, )
)
worker_thread = threading.Thread(target=worker_with_context)
worker_thread.start() worker_thread.start()

View File

@ -5,7 +5,7 @@ import uuid
from collections.abc import Generator, Mapping, Sequence from collections.abc import Generator, Mapping, Sequence
from typing import Any, Literal, Optional, Union, overload from typing import Any, Literal, Optional, Union, overload
from flask import Flask, current_app from flask import Flask, copy_current_request_context, current_app, has_request_context
from pydantic import ValidationError from pydantic import ValidationError
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
@ -135,7 +135,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
workflow_run_id=workflow_run_id, workflow_run_id=workflow_run_id,
) )
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers.set({})
contexts.plugin_tool_providers_lock.set(threading.Lock()) contexts.plugin_tool_providers_lock.set(threading.Lock())
@ -207,17 +206,22 @@ class WorkflowAppGenerator(BaseAppGenerator):
app_mode=app_model.mode, app_mode=app_model.mode,
) )
# new thread # new thread with request context and contextvars
worker_thread = threading.Thread( context = contextvars.copy_context()
target=self._generate_worker,
kwargs={ @copy_current_request_context
"flask_app": current_app._get_current_object(), # type: ignore def worker_with_context():
"application_generate_entity": application_generate_entity, # Run the worker within the copied context
"queue_manager": queue_manager, return context.run(
"context": contextvars.copy_context(), self._generate_worker,
"workflow_thread_pool_id": workflow_thread_pool_id, flask_app=current_app._get_current_object(), # type: ignore
}, application_generate_entity=application_generate_entity,
) queue_manager=queue_manager,
context=context,
workflow_thread_pool_id=workflow_thread_pool_id,
)
worker_thread = threading.Thread(target=worker_with_context)
worker_thread.start() worker_thread.start()
@ -277,7 +281,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
), ),
workflow_run_id=str(uuid.uuid4()), workflow_run_id=str(uuid.uuid4()),
) )
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers.set({})
contexts.plugin_tool_providers_lock.set(threading.Lock()) contexts.plugin_tool_providers_lock.set(threading.Lock())
@ -354,7 +357,6 @@ class WorkflowAppGenerator(BaseAppGenerator):
single_loop_run=WorkflowAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]), single_loop_run=WorkflowAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]),
workflow_run_id=str(uuid.uuid4()), workflow_run_id=str(uuid.uuid4()),
) )
contexts.tenant_id.set(application_generate_entity.app_config.tenant_id)
contexts.plugin_tool_providers.set({}) contexts.plugin_tool_providers.set({})
contexts.plugin_tool_providers_lock.set(threading.Lock()) contexts.plugin_tool_providers_lock.set(threading.Lock())
@ -408,8 +410,22 @@ class WorkflowAppGenerator(BaseAppGenerator):
""" """
for var, val in context.items(): for var, val in context.items():
var.set(val) var.set(val)
# FIXME(-LAN-): Save current user before entering new app context
from flask import g
saved_user = None
if has_request_context() and hasattr(g, "_login_user"):
saved_user = g._login_user
with flask_app.app_context(): with flask_app.app_context():
try: try:
# Restore user in new app context
if saved_user is not None:
from flask import g
g._login_user = saved_user
# workflow app # workflow app
runner = WorkflowAppRunner( runner = WorkflowAppRunner(
application_generate_entity=application_generate_entity, application_generate_entity=application_generate_entity,

View File

@ -51,15 +51,19 @@ class LLMGenerator:
response = cast( response = cast(
LLMResult, LLMResult,
model_instance.invoke_llm( model_instance.invoke_llm(
prompt_messages=list(prompts), model_parameters={"max_tokens": 100, "temperature": 1}, stream=False prompt_messages=list(prompts), model_parameters={"max_tokens": 500, "temperature": 1}, stream=False
), ),
) )
answer = cast(str, response.message.content) answer = cast(str, response.message.content)
cleaned_answer = re.sub(r"^.*(\{.*\}).*$", r"\1", answer, flags=re.DOTALL) cleaned_answer = re.sub(r"^.*(\{.*\}).*$", r"\1", answer, flags=re.DOTALL)
if cleaned_answer is None: if cleaned_answer is None:
return "" return ""
result_dict = json.loads(cleaned_answer) try:
answer = result_dict["Your Output"] result_dict = json.loads(cleaned_answer)
answer = result_dict["Your Output"]
except json.JSONDecodeError as e:
logging.exception("Failed to generate name after answer, use query instead")
answer = query
name = answer.strip() name = answer.strip()
if len(name) > 75: if len(name) > 75:

View File

@ -59,6 +59,7 @@ def start_authorization(
metadata: Optional[OAuthMetadata], metadata: Optional[OAuthMetadata],
client_information: OAuthClientInformation, client_information: OAuthClientInformation,
redirect_url: str, redirect_url: str,
provider_id: str,
) -> tuple[str, str]: ) -> tuple[str, str]:
"""Begins the authorization flow.""" """Begins the authorization flow."""
response_type = "code" response_type = "code"
@ -84,7 +85,7 @@ def start_authorization(
"code_challenge": code_challenge, "code_challenge": code_challenge,
"code_challenge_method": code_challenge_method, "code_challenge_method": code_challenge_method,
"redirect_uri": redirect_url, "redirect_uri": redirect_url,
"state": "/tools?provider_id=" + client_information.client_id, "state": provider_id,
} }
authorization_url = f"{authorization_url}?{urllib.parse.urlencode(params)}" authorization_url = f"{authorization_url}?{urllib.parse.urlencode(params)}"
@ -229,6 +230,7 @@ def auth(
metadata, metadata,
client_information, client_information,
provider.redirect_url, provider.redirect_url,
provider.provider_id,
) )
provider.save_code_verifier(code_verifier) provider.save_code_verifier(code_verifier)

View File

@ -10,7 +10,7 @@ from core.datasource.entities.datasource_entities import DatasourceProviderEntit
from core.model_runtime.entities.model_entities import AIModelEntity from core.model_runtime.entities.model_entities import AIModelEntity
from core.model_runtime.entities.provider_entities import ProviderEntity from core.model_runtime.entities.provider_entities import ProviderEntity
from core.plugin.entities.base import BasePluginEntity from core.plugin.entities.base import BasePluginEntity
from core.plugin.entities.plugin import PluginDeclaration from core.plugin.entities.plugin import PluginDeclaration, PluginEntity
from core.tools.entities.common_entities import I18nObject from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_entities import ToolProviderEntityWithPlugin from core.tools.entities.tool_entities import ToolProviderEntityWithPlugin
@ -175,3 +175,8 @@ class PluginOAuthAuthorizationUrlResponse(BaseModel):
class PluginOAuthCredentialsResponse(BaseModel): class PluginOAuthCredentialsResponse(BaseModel):
credentials: Mapping[str, Any] = Field(description="The credentials of the OAuth.") credentials: Mapping[str, Any] = Field(description="The credentials of the OAuth.")
class PluginListResponse(BaseModel):
list: list[PluginEntity]
total: int

View File

@ -32,7 +32,7 @@ class RequestInvokeTool(BaseModel):
Request to invoke a tool Request to invoke a tool
""" """
tool_type: Literal["builtin", "workflow", "api"] tool_type: Literal["builtin", "workflow", "api", "mcp"]
provider: str provider: str
tool: str tool: str
tool_parameters: dict tool_parameters: dict

View File

@ -9,7 +9,12 @@ from core.plugin.entities.plugin import (
PluginInstallation, PluginInstallation,
PluginInstallationSource, PluginInstallationSource,
) )
from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginInstallTaskStartResponse, PluginUploadResponse from core.plugin.entities.plugin_daemon import (
PluginInstallTask,
PluginInstallTaskStartResponse,
PluginListResponse,
PluginUploadResponse,
)
from core.plugin.impl.base import BasePluginClient from core.plugin.impl.base import BasePluginClient
@ -27,11 +32,20 @@ class PluginInstaller(BasePluginClient):
) )
def list_plugins(self, tenant_id: str) -> list[PluginEntity]: def list_plugins(self, tenant_id: str) -> list[PluginEntity]:
result = self._request_with_plugin_daemon_response(
"GET",
f"plugin/{tenant_id}/management/list",
PluginListResponse,
params={"page": 1, "page_size": 256},
)
return result.list
def list_plugins_with_total(self, tenant_id: str, page: int, page_size: int) -> PluginListResponse:
return self._request_with_plugin_daemon_response( return self._request_with_plugin_daemon_response(
"GET", "GET",
f"plugin/{tenant_id}/management/list", f"plugin/{tenant_id}/management/list",
list[PluginEntity], PluginListResponse,
params={"page": 1, "page_size": 256}, params={"page": page, "page_size": page_size},
) )
def upload_pkg( def upload_pkg(

View File

@ -46,6 +46,7 @@ class QdrantConfig(BaseModel):
root_path: Optional[str] = None root_path: Optional[str] = None
grpc_port: int = 6334 grpc_port: int = 6334
prefer_grpc: bool = False prefer_grpc: bool = False
replication_factor: int = 1
def to_qdrant_params(self): def to_qdrant_params(self):
if self.endpoint and self.endpoint.startswith("path:"): if self.endpoint and self.endpoint.startswith("path:"):
@ -119,11 +120,13 @@ class QdrantVector(BaseVector):
max_indexing_threads=0, max_indexing_threads=0,
on_disk=False, on_disk=False,
) )
self._client.create_collection( self._client.create_collection(
collection_name=collection_name, collection_name=collection_name,
vectors_config=vectors_config, vectors_config=vectors_config,
hnsw_config=hnsw_config, hnsw_config=hnsw_config,
timeout=int(self._client_config.timeout), timeout=int(self._client_config.timeout),
replication_factor=self._client_config.replication_factor,
) )
# create group_id payload index # create group_id payload index
@ -466,5 +469,6 @@ class QdrantVectorFactory(AbstractVectorFactory):
timeout=dify_config.QDRANT_CLIENT_TIMEOUT, timeout=dify_config.QDRANT_CLIENT_TIMEOUT,
grpc_port=dify_config.QDRANT_GRPC_PORT, grpc_port=dify_config.QDRANT_GRPC_PORT,
prefer_grpc=dify_config.QDRANT_GRPC_ENABLED, prefer_grpc=dify_config.QDRANT_GRPC_ENABLED,
replication_factor=dify_config.QDRANT_REPLICATION_FACTOR,
), ),
) )

View File

@ -49,6 +49,7 @@ class TidbOnQdrantConfig(BaseModel):
root_path: Optional[str] = None root_path: Optional[str] = None
grpc_port: int = 6334 grpc_port: int = 6334
prefer_grpc: bool = False prefer_grpc: bool = False
replication_factor: int = 1
def to_qdrant_params(self): def to_qdrant_params(self):
if self.endpoint and self.endpoint.startswith("path:"): if self.endpoint and self.endpoint.startswith("path:"):
@ -134,6 +135,7 @@ class TidbOnQdrantVector(BaseVector):
vectors_config=vectors_config, vectors_config=vectors_config,
hnsw_config=hnsw_config, hnsw_config=hnsw_config,
timeout=int(self._client_config.timeout), timeout=int(self._client_config.timeout),
replication_factor=self._client_config.replication_factor,
) )
# create group_id payload index # create group_id payload index
@ -484,6 +486,7 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory):
timeout=dify_config.TIDB_ON_QDRANT_CLIENT_TIMEOUT, timeout=dify_config.TIDB_ON_QDRANT_CLIENT_TIMEOUT,
grpc_port=dify_config.TIDB_ON_QDRANT_GRPC_PORT, grpc_port=dify_config.TIDB_ON_QDRANT_GRPC_PORT,
prefer_grpc=dify_config.TIDB_ON_QDRANT_GRPC_ENABLED, prefer_grpc=dify_config.TIDB_ON_QDRANT_GRPC_ENABLED,
replication_factor=dify_config.QDRANT_REPLICATION_FACTOR,
), ),
) )

View File

@ -53,7 +53,7 @@ class MCPToolProviderController(ToolProviderController):
author=db_provider.user.name if db_provider.user else "Anonymous", author=db_provider.user.name if db_provider.user else "Anonymous",
name=remote_mcp_tool.name, name=remote_mcp_tool.name,
label=I18nObject(en_US=remote_mcp_tool.name, zh_Hans=remote_mcp_tool.name), label=I18nObject(en_US=remote_mcp_tool.name, zh_Hans=remote_mcp_tool.name),
provider=db_provider.name, provider=db_provider.id,
icon=db_provider.icon, icon=db_provider.icon,
), ),
parameters=ToolTransformService.convert_mcp_schema_to_parameter(remote_mcp_tool.inputSchema), parameters=ToolTransformService.convert_mcp_schema_to_parameter(remote_mcp_tool.inputSchema),

View File

@ -746,7 +746,7 @@ class ToolManager:
) )
if provider is None: if provider is None:
raise ToolProviderNotFoundError(f"api provider {provider_id} not found") raise ToolProviderNotFoundError(f"mcp provider {provider_id} not found")
controller = MCPToolProviderController._from_db(provider) controller = MCPToolProviderController._from_db(provider)

View File

@ -1,21 +1,13 @@
import hashlib
import json
import mimetypes import mimetypes
import os
import re import re
import site from collections.abc import Sequence
import subprocess from dataclasses import dataclass
import tempfile from typing import Any, Optional, cast
import unicodedata
from contextlib import contextmanager
from pathlib import Path
from typing import Any, Literal, Optional, cast
from urllib.parse import unquote from urllib.parse import unquote
import chardet import chardet
import cloudscraper # type: ignore import cloudscraper # type: ignore
from bs4 import BeautifulSoup, CData, Comment, NavigableString # type: ignore from readabilipy import simple_json_from_html_string # type: ignore
from regex import regex # type: ignore
from core.helper import ssrf_proxy from core.helper import ssrf_proxy
from core.rag.extractor import extract_processor from core.rag.extractor import extract_processor
@ -23,9 +15,7 @@ from core.rag.extractor.extract_processor import ExtractProcessor
FULL_TEMPLATE = """ FULL_TEMPLATE = """
TITLE: {title} TITLE: {title}
AUTHORS: {authors} AUTHOR: {author}
PUBLISH DATE: {publish_date}
TOP_IMAGE_URL: {top_image}
TEXT: TEXT:
{text} {text}
@ -73,8 +63,8 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str:
response = ssrf_proxy.get(url, headers=headers, follow_redirects=True, timeout=(120, 300)) response = ssrf_proxy.get(url, headers=headers, follow_redirects=True, timeout=(120, 300))
elif response.status_code == 403: elif response.status_code == 403:
scraper = cloudscraper.create_scraper() scraper = cloudscraper.create_scraper()
scraper.perform_request = ssrf_proxy.make_request scraper.perform_request = ssrf_proxy.make_request # type: ignore
response = scraper.get(url, headers=headers, follow_redirects=True, timeout=(120, 300)) response = scraper.get(url, headers=headers, follow_redirects=True, timeout=(120, 300)) # type: ignore
if response.status_code != 200: if response.status_code != 200:
return "URL returned status code {}.".format(response.status_code) return "URL returned status code {}.".format(response.status_code)
@ -90,273 +80,36 @@ def get_url(url: str, user_agent: Optional[str] = None) -> str:
else: else:
content = response.text content = response.text
a = extract_using_readabilipy(content) article = extract_using_readabilipy(content)
if not a["plain_text"] or not a["plain_text"].strip(): if not article.text:
return "" return ""
res = FULL_TEMPLATE.format( res = FULL_TEMPLATE.format(
title=a["title"], title=article.title,
authors=a["byline"], author=article.auther,
publish_date=a["date"], text=article.text,
top_image="",
text=a["plain_text"] or "",
) )
return res return res
def extract_using_readabilipy(html): @dataclass
with tempfile.NamedTemporaryFile(delete=False, mode="w+") as f_html: class Article:
f_html.write(html) title: str
f_html.close() auther: str
html_path = f_html.name text: Sequence[dict]
# Call Mozilla's Readability.js Readability.parse() function via node, writing output to a temporary file
article_json_path = html_path + ".json"
jsdir = os.path.join(find_module_path("readabilipy"), "javascript")
with chdir(jsdir):
subprocess.check_call(["node", "ExtractArticle.js", "-i", html_path, "-o", article_json_path])
# Read output of call to Readability.parse() from JSON file and return as Python dictionary
input_json = json.loads(Path(article_json_path).read_text(encoding="utf-8"))
# Deleting files after processing
os.unlink(article_json_path)
os.unlink(html_path)
article_json: dict[str, Any] = {
"title": None,
"byline": None,
"date": None,
"content": None,
"plain_content": None,
"plain_text": None,
}
# Populate article fields from readability fields where present
if input_json:
if input_json.get("title"):
article_json["title"] = input_json["title"]
if input_json.get("byline"):
article_json["byline"] = input_json["byline"]
if input_json.get("date"):
article_json["date"] = input_json["date"]
if input_json.get("content"):
article_json["content"] = input_json["content"]
article_json["plain_content"] = plain_content(article_json["content"], False, False)
article_json["plain_text"] = extract_text_blocks_as_plain_text(article_json["plain_content"])
if input_json.get("textContent"):
article_json["plain_text"] = input_json["textContent"]
article_json["plain_text"] = re.sub(r"\n\s*\n", "\n", article_json["plain_text"])
return article_json
def find_module_path(module_name): def extract_using_readabilipy(html: str):
for package_path in site.getsitepackages(): json_article: dict[str, Any] = simple_json_from_html_string(html, use_readability=True)
potential_path = os.path.join(package_path, module_name) article = Article(
if os.path.exists(potential_path): title=json_article.get("title") or "",
return potential_path auther=json_article.get("byline") or "",
text=json_article.get("plain_text") or [],
return None
@contextmanager
def chdir(path):
"""Change directory in context and return to original on exit"""
# From https://stackoverflow.com/a/37996581, couldn't find a built-in
original_path = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(original_path)
def extract_text_blocks_as_plain_text(paragraph_html):
# Load article as DOM
soup = BeautifulSoup(paragraph_html, "html.parser")
# Select all lists
list_elements = soup.find_all(["ul", "ol"])
# Prefix text in all list items with "* " and make lists paragraphs
for list_element in list_elements:
plain_items = "".join(
list(filter(None, [plain_text_leaf_node(li)["text"] for li in list_element.find_all("li")]))
)
list_element.string = plain_items
list_element.name = "p"
# Select all text blocks
text_blocks = [s.parent for s in soup.find_all(string=True)]
text_blocks = [plain_text_leaf_node(block) for block in text_blocks]
# Drop empty paragraphs
text_blocks = list(filter(lambda p: p["text"] is not None, text_blocks))
return text_blocks
def plain_text_leaf_node(element):
# Extract all text, stripped of any child HTML elements and normalize it
plain_text = normalize_text(element.get_text())
if plain_text != "" and element.name == "li":
plain_text = "* {}, ".format(plain_text)
if plain_text == "":
plain_text = None
if "data-node-index" in element.attrs:
plain = {"node_index": element["data-node-index"], "text": plain_text}
else:
plain = {"text": plain_text}
return plain
def plain_content(readability_content, content_digests, node_indexes):
# Load article as DOM
soup = BeautifulSoup(readability_content, "html.parser")
# Make all elements plain
elements = plain_elements(soup.contents, content_digests, node_indexes)
if node_indexes:
# Add node index attributes to nodes
elements = [add_node_indexes(element) for element in elements]
# Replace article contents with plain elements
soup.contents = elements
return str(soup)
def plain_elements(elements, content_digests, node_indexes):
# Get plain content versions of all elements
elements = [plain_element(element, content_digests, node_indexes) for element in elements]
if content_digests:
# Add content digest attribute to nodes
elements = [add_content_digest(element) for element in elements]
return elements
def plain_element(element, content_digests, node_indexes):
# For lists, we make each item plain text
if is_leaf(element):
# For leaf node elements, extract the text content, discarding any HTML tags
# 1. Get element contents as text
plain_text = element.get_text()
# 2. Normalize the extracted text string to a canonical representation
plain_text = normalize_text(plain_text)
# 3. Update element content to be plain text
element.string = plain_text
elif is_text(element):
if is_non_printing(element):
# The simplified HTML may have come from Readability.js so might
# have non-printing text (e.g. Comment or CData). In this case, we
# keep the structure, but ensure that the string is empty.
element = type(element)("")
else:
plain_text = element.string
plain_text = normalize_text(plain_text)
element = type(element)(plain_text)
else:
# If not a leaf node or leaf type call recursively on child nodes, replacing
element.contents = plain_elements(element.contents, content_digests, node_indexes)
return element
def add_node_indexes(element, node_index="0"):
# Can't add attributes to string types
if is_text(element):
return element
# Add index to current element
element["data-node-index"] = node_index
# Add index to child elements
for local_idx, child in enumerate([c for c in element.contents if not is_text(c)], start=1):
# Can't add attributes to leaf string types
child_index = "{stem}.{local}".format(stem=node_index, local=local_idx)
add_node_indexes(child, node_index=child_index)
return element
def normalize_text(text):
"""Normalize unicode and whitespace."""
# Normalize unicode first to try and standardize whitespace characters as much as possible before normalizing them
text = strip_control_characters(text)
text = normalize_unicode(text)
text = normalize_whitespace(text)
return text
def strip_control_characters(text):
"""Strip out unicode control characters which might break the parsing."""
# Unicode control characters
# [Cc]: Other, Control [includes new lines]
# [Cf]: Other, Format
# [Cn]: Other, Not Assigned
# [Co]: Other, Private Use
# [Cs]: Other, Surrogate
control_chars = {"Cc", "Cf", "Cn", "Co", "Cs"}
retained_chars = ["\t", "\n", "\r", "\f"]
# Remove non-printing control characters
return "".join(
[
"" if (unicodedata.category(char) in control_chars) and (char not in retained_chars) else char
for char in text
]
) )
return article
def normalize_unicode(text):
"""Normalize unicode such that things that are visually equivalent map to the same unicode string where possible."""
normal_form: Literal["NFC", "NFD", "NFKC", "NFKD"] = "NFKC"
text = unicodedata.normalize(normal_form, text)
return text
def normalize_whitespace(text):
"""Replace runs of whitespace characters with a single space as this is what happens when HTML text is displayed."""
text = regex.sub(r"\s+", " ", text)
# Remove leading and trailing whitespace
text = text.strip()
return text
def is_leaf(element):
return element.name in {"p", "li"}
def is_text(element):
return isinstance(element, NavigableString)
def is_non_printing(element):
return any(isinstance(element, _e) for _e in [Comment, CData])
def add_content_digest(element):
if not is_text(element):
element["data-content-digest"] = content_digest(element)
return element
def content_digest(element):
digest: Any
if is_text(element):
# Hash
trimmed_string = element.string.strip()
if trimmed_string == "":
digest = ""
else:
digest = hashlib.sha256(trimmed_string.encode("utf-8")).hexdigest()
else:
contents = element.contents
num_contents = len(contents)
if num_contents == 0:
# No hash when no child elements exist
digest = ""
elif num_contents == 1:
# If single child, use digest of child
digest = content_digest(contents[0])
else:
# Build content digest from the "non-empty" digests of child nodes
digest = hashlib.sha256()
child_digests = list(filter(lambda x: x != "", [content_digest(content) for content in contents]))
for child in child_digests:
digest.update(child.encode("utf-8"))
digest = digest.hexdigest()
return digest
def get_image_upload_file_ids(content): def get_image_upload_file_ids(content):

View File

@ -9,7 +9,7 @@ from copy import copy, deepcopy
from datetime import UTC, datetime from datetime import UTC, datetime
from typing import Any, Optional, cast from typing import Any, Optional, cast
from flask import Flask, current_app from flask import Flask, current_app, has_request_context
from configs import dify_config from configs import dify_config
from core.app.apps.base_app_queue_manager import GenerateTaskStoppedError from core.app.apps.base_app_queue_manager import GenerateTaskStoppedError
@ -541,8 +541,21 @@ class GraphEngine:
for var, val in context.items(): for var, val in context.items():
var.set(val) var.set(val)
# FIXME(-LAN-): Save current user before entering new app context
from flask import g
saved_user = None
if has_request_context() and hasattr(g, "_login_user"):
saved_user = g._login_user
with flask_app.app_context(): with flask_app.app_context():
try: try:
# Restore user in new app context
if saved_user is not None:
from flask import g
g._login_user = saved_user
q.put( q.put(
ParallelBranchRunStartedEvent( ParallelBranchRunStartedEvent(
parallel_id=parallel_id, parallel_id=parallel_id,

View File

@ -7,6 +7,7 @@ import tempfile
from collections.abc import Mapping, Sequence from collections.abc import Mapping, Sequence
from typing import Any, cast from typing import Any, cast
import chardet
import docx import docx
import pandas as pd import pandas as pd
import pypandoc # type: ignore import pypandoc # type: ignore
@ -184,26 +185,64 @@ def _extract_text_by_file_extension(*, file_content: bytes, file_extension: str)
def _extract_text_from_plain_text(file_content: bytes) -> str: def _extract_text_from_plain_text(file_content: bytes) -> str:
try: try:
return file_content.decode("utf-8", "ignore") # Detect encoding using chardet
except UnicodeDecodeError as e: result = chardet.detect(file_content)
raise TextExtractionError("Failed to decode plain text file") from e encoding = result["encoding"]
# Fallback to utf-8 if detection fails
if not encoding:
encoding = "utf-8"
return file_content.decode(encoding, errors="ignore")
except (UnicodeDecodeError, LookupError) as e:
# If decoding fails, try with utf-8 as last resort
try:
return file_content.decode("utf-8", errors="ignore")
except UnicodeDecodeError:
raise TextExtractionError(f"Failed to decode plain text file: {e}") from e
def _extract_text_from_json(file_content: bytes) -> str: def _extract_text_from_json(file_content: bytes) -> str:
try: try:
json_data = json.loads(file_content.decode("utf-8", "ignore")) # Detect encoding using chardet
result = chardet.detect(file_content)
encoding = result["encoding"]
# Fallback to utf-8 if detection fails
if not encoding:
encoding = "utf-8"
json_data = json.loads(file_content.decode(encoding, errors="ignore"))
return json.dumps(json_data, indent=2, ensure_ascii=False) return json.dumps(json_data, indent=2, ensure_ascii=False)
except (UnicodeDecodeError, json.JSONDecodeError) as e: except (UnicodeDecodeError, LookupError, json.JSONDecodeError) as e:
raise TextExtractionError(f"Failed to decode or parse JSON file: {e}") from e # If decoding fails, try with utf-8 as last resort
try:
json_data = json.loads(file_content.decode("utf-8", errors="ignore"))
return json.dumps(json_data, indent=2, ensure_ascii=False)
except (UnicodeDecodeError, json.JSONDecodeError):
raise TextExtractionError(f"Failed to decode or parse JSON file: {e}") from e
def _extract_text_from_yaml(file_content: bytes) -> str: def _extract_text_from_yaml(file_content: bytes) -> str:
"""Extract the content from yaml file""" """Extract the content from yaml file"""
try: try:
yaml_data = yaml.safe_load_all(file_content.decode("utf-8", "ignore")) # Detect encoding using chardet
result = chardet.detect(file_content)
encoding = result["encoding"]
# Fallback to utf-8 if detection fails
if not encoding:
encoding = "utf-8"
yaml_data = yaml.safe_load_all(file_content.decode(encoding, errors="ignore"))
return cast(str, yaml.dump_all(yaml_data, allow_unicode=True, sort_keys=False)) return cast(str, yaml.dump_all(yaml_data, allow_unicode=True, sort_keys=False))
except (UnicodeDecodeError, yaml.YAMLError) as e: except (UnicodeDecodeError, LookupError, yaml.YAMLError) as e:
raise TextExtractionError(f"Failed to decode or parse YAML file: {e}") from e # If decoding fails, try with utf-8 as last resort
try:
yaml_data = yaml.safe_load_all(file_content.decode("utf-8", errors="ignore"))
return cast(str, yaml.dump_all(yaml_data, allow_unicode=True, sort_keys=False))
except (UnicodeDecodeError, yaml.YAMLError):
raise TextExtractionError(f"Failed to decode or parse YAML file: {e}") from e
def _extract_text_from_pdf(file_content: bytes) -> str: def _extract_text_from_pdf(file_content: bytes) -> str:
@ -342,7 +381,20 @@ def _extract_text_from_file(file: File):
def _extract_text_from_csv(file_content: bytes) -> str: def _extract_text_from_csv(file_content: bytes) -> str:
try: try:
csv_file = io.StringIO(file_content.decode("utf-8", "ignore")) # Detect encoding using chardet
result = chardet.detect(file_content)
encoding = result["encoding"]
# Fallback to utf-8 if detection fails
if not encoding:
encoding = "utf-8"
try:
csv_file = io.StringIO(file_content.decode(encoding, errors="ignore"))
except (UnicodeDecodeError, LookupError):
# If decoding fails, try with utf-8 as last resort
csv_file = io.StringIO(file_content.decode("utf-8", errors="ignore"))
csv_reader = csv.reader(csv_file) csv_reader = csv.reader(csv_file)
rows = list(csv_reader) rows = list(csv_reader)
@ -370,7 +422,7 @@ def _extract_text_from_excel(file_content: bytes) -> str:
df = excel_file.parse(sheet_name=sheet_name) df = excel_file.parse(sheet_name=sheet_name)
df.dropna(how="all", inplace=True) df.dropna(how="all", inplace=True)
# Create Markdown table two times to separate tables with a newline # Create Markdown table two times to separate tables with a newline
markdown_table += df.to_markdown(index=False) + "\n\n" markdown_table += df.to_markdown(index=False, floatfmt="") + "\n\n"
except Exception as e: except Exception as e:
continue continue
return markdown_table return markdown_table

View File

@ -7,7 +7,7 @@ from datetime import UTC, datetime
from queue import Empty, Queue from queue import Empty, Queue
from typing import TYPE_CHECKING, Any, Optional, cast from typing import TYPE_CHECKING, Any, Optional, cast
from flask import Flask, current_app from flask import Flask, current_app, has_request_context
from configs import dify_config from configs import dify_config
from core.variables import ArrayVariable, IntegerVariable, NoneVariable from core.variables import ArrayVariable, IntegerVariable, NoneVariable
@ -590,7 +590,21 @@ class IterationNode(BaseNode[IterationNodeData]):
""" """
for var, val in context.items(): for var, val in context.items():
var.set(val) var.set(val)
# FIXME(-LAN-): Save current user before entering new app context
from flask import g
saved_user = None
if has_request_context() and hasattr(g, "_login_user"):
saved_user = g._login_user
with flask_app.app_context(): with flask_app.app_context():
# Restore user in new app context
if saved_user is not None:
from flask import g
g._login_user = saved_user
parallel_mode_run_id = uuid.uuid4().hex parallel_mode_run_id = uuid.uuid4().hex
graph_engine_copy = graph_engine.create_copy() graph_engine_copy = graph_engine.create_copy()
variable_pool_copy = graph_engine_copy.graph_runtime_state.variable_pool variable_pool_copy = graph_engine_copy.graph_runtime_state.variable_pool

View File

@ -125,6 +125,7 @@ class WorkflowCycleManager:
) )
) )
self._workflow_execution_repository.save(workflow_execution)
return workflow_execution return workflow_execution
def handle_workflow_run_partial_success( def handle_workflow_run_partial_success(
@ -158,6 +159,7 @@ class WorkflowCycleManager:
) )
) )
self._workflow_execution_repository.save(execution)
return execution return execution
def handle_workflow_run_failed( def handle_workflow_run_failed(
@ -172,44 +174,45 @@ class WorkflowCycleManager:
trace_manager: Optional[TraceQueueManager] = None, trace_manager: Optional[TraceQueueManager] = None,
exceptions_count: int = 0, exceptions_count: int = 0,
) -> WorkflowExecution: ) -> WorkflowExecution:
execution = self._get_workflow_execution_or_raise_error(workflow_run_id) workflow_execution = self._get_workflow_execution_or_raise_error(workflow_run_id)
execution.status = WorkflowExecutionStatus(status.value) workflow_execution.status = WorkflowExecutionStatus(status.value)
execution.error_message = error_message workflow_execution.error_message = error_message
execution.total_tokens = total_tokens workflow_execution.total_tokens = total_tokens
execution.total_steps = total_steps workflow_execution.total_steps = total_steps
execution.finished_at = datetime.now(UTC).replace(tzinfo=None) workflow_execution.finished_at = datetime.now(UTC).replace(tzinfo=None)
execution.exceptions_count = exceptions_count workflow_execution.exceptions_count = exceptions_count
# Use the instance repository to find running executions for a workflow run # Use the instance repository to find running executions for a workflow run
running_domain_executions = self._workflow_node_execution_repository.get_running_executions( running_node_executions = self._workflow_node_execution_repository.get_running_executions(
workflow_run_id=execution.id workflow_run_id=workflow_execution.id
) )
# Update the domain models # Update the domain models
now = datetime.now(UTC).replace(tzinfo=None) now = datetime.now(UTC).replace(tzinfo=None)
for domain_execution in running_domain_executions: for node_execution in running_node_executions:
if domain_execution.node_execution_id: if node_execution.node_execution_id:
# Update the domain model # Update the domain model
domain_execution.status = NodeExecutionStatus.FAILED node_execution.status = NodeExecutionStatus.FAILED
domain_execution.error = error_message node_execution.error = error_message
domain_execution.finished_at = now node_execution.finished_at = now
domain_execution.elapsed_time = (now - domain_execution.created_at).total_seconds() node_execution.elapsed_time = (now - node_execution.created_at).total_seconds()
# Update the repository with the domain model # Update the repository with the domain model
self._workflow_node_execution_repository.save(domain_execution) self._workflow_node_execution_repository.save(node_execution)
if trace_manager: if trace_manager:
trace_manager.add_trace_task( trace_manager.add_trace_task(
TraceTask( TraceTask(
TraceTaskName.WORKFLOW_TRACE, TraceTaskName.WORKFLOW_TRACE,
workflow_execution=execution, workflow_execution=workflow_execution,
conversation_id=conversation_id, conversation_id=conversation_id,
user_id=trace_manager.user_id, user_id=trace_manager.user_id,
) )
) )
return execution self._workflow_execution_repository.save(workflow_execution)
return workflow_execution
def handle_node_execution_start( def handle_node_execution_start(
self, self,

View File

@ -5,11 +5,11 @@ from flask import Response, request
from flask_login import user_loaded_from_request, user_logged_in from flask_login import user_loaded_from_request, user_logged_in
from werkzeug.exceptions import NotFound, Unauthorized from werkzeug.exceptions import NotFound, Unauthorized
import contexts from configs import dify_config
from dify_app import DifyApp from dify_app import DifyApp
from extensions.ext_database import db from extensions.ext_database import db
from libs.passport import PassportService from libs.passport import PassportService
from models.account import Account from models.account import Account, Tenant, TenantAccountJoin
from models.model import EndUser from models.model import EndUser
from services.account_service import AccountService from services.account_service import AccountService
@ -32,6 +32,26 @@ def load_user_from_request(request_from_flask_login):
else: else:
auth_token = request.args.get("_token") auth_token = request.args.get("_token")
# Check for admin API key authentication first
if dify_config.ADMIN_API_KEY_ENABLE and auth_header:
admin_api_key = dify_config.ADMIN_API_KEY
if admin_api_key and admin_api_key == auth_token:
workspace_id = request.headers.get("X-WORKSPACE-ID")
if workspace_id:
tenant_account_join = (
db.session.query(Tenant, TenantAccountJoin)
.filter(Tenant.id == workspace_id)
.filter(TenantAccountJoin.tenant_id == Tenant.id)
.filter(TenantAccountJoin.role == "owner")
.one_or_none()
)
if tenant_account_join:
tenant, ta = tenant_account_join
account = db.session.query(Account).filter_by(id=ta.account_id).first()
if account:
account.current_tenant = tenant
return account
if request.blueprint in {"console", "inner_api"}: if request.blueprint in {"console", "inner_api"}:
if not auth_token: if not auth_token:
raise Unauthorized("Invalid Authorization token.") raise Unauthorized("Invalid Authorization token.")
@ -61,8 +81,8 @@ def on_user_logged_in(_sender, user):
Note: AccountService.load_logged_in_account will populate user.current_tenant_id Note: AccountService.load_logged_in_account will populate user.current_tenant_id
through the load_user method, which calls account.set_tenant_id(). through the load_user method, which calls account.set_tenant_id().
""" """
if user and isinstance(user, Account) and user.current_tenant_id: # tenant_id context variable removed - using current_user.current_tenant_id directly
contexts.tenant_id.set(user.current_tenant_id) pass
@login_manager.unauthorized_handler @login_manager.unauthorized_handler

View File

@ -12,19 +12,30 @@ from flask_login import user_loaded_from_request, user_logged_in # type: ignore
from configs import dify_config from configs import dify_config
from dify_app import DifyApp from dify_app import DifyApp
from models import Account, EndUser
@user_logged_in.connect @user_logged_in.connect
@user_loaded_from_request.connect @user_loaded_from_request.connect
def on_user_loaded(_sender, user): def on_user_loaded(_sender, user: Union["Account", "EndUser"]):
if dify_config.ENABLE_OTEL: if dify_config.ENABLE_OTEL:
from opentelemetry.trace import get_current_span from opentelemetry.trace import get_current_span
if user: if user:
current_span = get_current_span() try:
if current_span: current_span = get_current_span()
current_span.set_attribute("service.tenant.id", user.current_tenant_id) if isinstance(user, Account) and user.current_tenant_id:
current_span.set_attribute("service.user.id", user.id) tenant_id = user.current_tenant_id
elif isinstance(user, EndUser):
tenant_id = user.tenant_id
else:
return
if current_span:
current_span.set_attribute("service.tenant.id", tenant_id)
current_span.set_attribute("service.user.id", user.id)
except Exception:
logging.exception("Error setting tenant and user attributes")
pass
def init_app(app: DifyApp): def init_app(app: DifyApp):
@ -47,21 +58,25 @@ def init_app(app: DifyApp):
def response_hook(span: Span, status: str, response_headers: list): def response_hook(span: Span, status: str, response_headers: list):
if span and span.is_recording(): if span and span.is_recording():
if status.startswith("2"): try:
span.set_status(StatusCode.OK) if status.startswith("2"):
else: span.set_status(StatusCode.OK)
span.set_status(StatusCode.ERROR, status) else:
span.set_status(StatusCode.ERROR, status)
status = status.split(" ")[0] status = status.split(" ")[0]
status_code = int(status) status_code = int(status)
status_class = f"{status_code // 100}xx" status_class = f"{status_code // 100}xx"
attributes: dict[str, str | int] = {"status_code": status_code, "status_class": status_class} attributes: dict[str, str | int] = {"status_code": status_code, "status_class": status_class}
request = flask.request request = flask.request
if request and request.url_rule: if request and request.url_rule:
attributes[SpanAttributes.HTTP_TARGET] = str(request.url_rule.rule) attributes[SpanAttributes.HTTP_TARGET] = str(request.url_rule.rule)
if request and request.method: if request and request.method:
attributes[SpanAttributes.HTTP_METHOD] = str(request.method) attributes[SpanAttributes.HTTP_METHOD] = str(request.method)
_http_response_counter.add(1, attributes) _http_response_counter.add(1, attributes)
except Exception:
logging.exception("Error setting status and attributes")
pass
instrumentor = FlaskInstrumentor() instrumentor = FlaskInstrumentor()
if dify_config.DEBUG: if dify_config.DEBUG:
@ -92,7 +107,7 @@ def init_app(app: DifyApp):
class ExceptionLoggingHandler(logging.Handler): class ExceptionLoggingHandler(logging.Handler):
"""Custom logging handler that creates spans for logging.exception() calls""" """Custom logging handler that creates spans for logging.exception() calls"""
def emit(self, record): def emit(self, record: logging.LogRecord):
try: try:
if record.exc_info: if record.exc_info:
tracer = get_tracer_provider().get_tracer("dify.exception.logging") tracer = get_tracer_provider().get_tracer("dify.exception.logging")
@ -107,9 +122,12 @@ def init_app(app: DifyApp):
}, },
) as span: ) as span:
span.set_status(StatusCode.ERROR) span.set_status(StatusCode.ERROR)
span.record_exception(record.exc_info[1]) if record.exc_info[1]:
span.set_attribute("exception.type", record.exc_info[0].__name__) span.record_exception(record.exc_info[1])
span.set_attribute("exception.message", str(record.exc_info[1])) span.set_attribute("exception.message", str(record.exc_info[1]))
if record.exc_info[0]:
span.set_attribute("exception.type", record.exc_info[0].__name__)
except Exception: except Exception:
pass pass

View File

@ -1,8 +1,21 @@
import json
from flask_restful import fields from flask_restful import fields
from fields.workflow_fields import workflow_partial_fields from fields.workflow_fields import workflow_partial_fields
from libs.helper import AppIconUrlField, TimestampField from libs.helper import AppIconUrlField, TimestampField
class JsonStringField(fields.Raw):
def format(self, value):
if isinstance(value, str):
try:
return json.loads(value)
except (json.JSONDecodeError, TypeError):
return value
return value
app_detail_kernel_fields = { app_detail_kernel_fields = {
"id": fields.String, "id": fields.String,
"name": fields.String, "name": fields.String,
@ -100,6 +113,8 @@ app_partial_fields = {
"updated_at": TimestampField, "updated_at": TimestampField,
"tags": fields.List(fields.Nested(tag_fields)), "tags": fields.List(fields.Nested(tag_fields)),
"access_mode": fields.String, "access_mode": fields.String,
"create_user_name": fields.String,
"author_name": fields.String,
} }
@ -223,7 +238,7 @@ app_server_fields = {
"server_code": fields.String, "server_code": fields.String,
"description": fields.String, "description": fields.String,
"status": fields.String, "status": fields.String,
"parameters": fields.Raw, "parameters": JsonStringField,
"created_at": TimestampField, "created_at": TimestampField,
"updated_at": TimestampField, "updated_at": TimestampField,
} }

View File

@ -2,14 +2,11 @@ from functools import wraps
from typing import Any from typing import Any
from flask import current_app, g, has_request_context, request from flask import current_app, g, has_request_context, request
from flask_login import user_logged_in # type: ignore
from flask_login.config import EXEMPT_METHODS # type: ignore from flask_login.config import EXEMPT_METHODS # type: ignore
from werkzeug.exceptions import Unauthorized
from werkzeug.local import LocalProxy from werkzeug.local import LocalProxy
from configs import dify_config from configs import dify_config
from extensions.ext_database import db from models.account import Account
from models.account import Account, Tenant, TenantAccountJoin
from models.model import EndUser from models.model import EndUser
#: A proxy for the current user. If no user is logged in, this will be an #: A proxy for the current user. If no user is logged in, this will be an
@ -53,36 +50,6 @@ def login_required(func):
@wraps(func) @wraps(func)
def decorated_view(*args, **kwargs): def decorated_view(*args, **kwargs):
auth_header = request.headers.get("Authorization")
if dify_config.ADMIN_API_KEY_ENABLE:
if auth_header:
if " " not in auth_header:
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
auth_scheme, auth_token = auth_header.split(None, 1)
auth_scheme = auth_scheme.lower()
if auth_scheme != "bearer":
raise Unauthorized("Invalid Authorization header format. Expected 'Bearer <api-key>' format.")
admin_api_key = dify_config.ADMIN_API_KEY
if admin_api_key:
if admin_api_key == auth_token:
workspace_id = request.headers.get("X-WORKSPACE-ID")
if workspace_id:
tenant_account_join = (
db.session.query(Tenant, TenantAccountJoin)
.filter(Tenant.id == workspace_id)
.filter(TenantAccountJoin.tenant_id == Tenant.id)
.filter(TenantAccountJoin.role == "owner")
.one_or_none()
)
if tenant_account_join:
tenant, ta = tenant_account_join
account = db.session.query(Account).filter_by(id=ta.account_id).first()
# Login admin
if account:
account.current_tenant = tenant
current_app.login_manager._update_request_context_with_user(account) # type: ignore
user_logged_in.send(current_app._get_current_object(), user=_get_user()) # type: ignore
if request.method in EXEMPT_METHODS or dify_config.LOGIN_DISABLED: if request.method in EXEMPT_METHODS or dify_config.LOGIN_DISABLED:
pass pass
elif not current_user.is_authenticated: elif not current_user.is_authenticated:

View File

@ -298,6 +298,15 @@ class App(Base):
def mcp_server(self): def mcp_server(self):
return db.session.query(AppMCPServer).filter(AppMCPServer.app_id == self.id).first() return db.session.query(AppMCPServer).filter(AppMCPServer.app_id == self.id).first()
@property
def author_name(self):
if self.created_by:
account = db.session.query(Account).filter(Account.id == self.created_by).first()
if account:
return account.name
return None
class AppModelConfig(Base): class AppModelConfig(Base):
__tablename__ = "app_model_configs" __tablename__ = "app_model_configs"

View File

@ -233,6 +233,7 @@ class MCPToolProvider(Base):
__table_args__ = ( __table_args__ = (
db.PrimaryKeyConstraint("id", name="tool_mcp_provider_pkey"), db.PrimaryKeyConstraint("id", name="tool_mcp_provider_pkey"),
db.UniqueConstraint("name", "tenant_id", name="unique_mcp_tool_provider"), db.UniqueConstraint("name", "tenant_id", name="unique_mcp_tool_provider"),
db.UniqueConstraint("server_url", name="unique_mcp_tool_provider_server_url"),
) )
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()")) id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuid_generate_v4()"))

View File

@ -6,6 +6,8 @@ from enum import Enum, StrEnum
from typing import TYPE_CHECKING, Any, Optional, Union from typing import TYPE_CHECKING, Any, Optional, Union
from uuid import uuid4 from uuid import uuid4
from flask_login import current_user
from core.variables import utils as variable_utils from core.variables import utils as variable_utils
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from factories.variable_factory import build_segment from factories.variable_factory import build_segment
@ -17,7 +19,6 @@ import sqlalchemy as sa
from sqlalchemy import UniqueConstraint, func from sqlalchemy import UniqueConstraint, func
from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.orm import Mapped, mapped_column
import contexts
from constants import DEFAULT_FILE_NUMBER_LIMITS, HIDDEN_VALUE from constants import DEFAULT_FILE_NUMBER_LIMITS, HIDDEN_VALUE
from core.helper import encrypter from core.helper import encrypter
from core.variables import SecretVariable, Segment, SegmentType, Variable from core.variables import SecretVariable, Segment, SegmentType, Variable
@ -280,7 +281,16 @@ class Workflow(Base):
if self._environment_variables is None: if self._environment_variables is None:
self._environment_variables = "{}" self._environment_variables = "{}"
tenant_id = contexts.tenant_id.get() # Get tenant_id from current_user (Account or EndUser)
if isinstance(current_user, Account):
# Account user
tenant_id = current_user.current_tenant_id
else:
# EndUser
tenant_id = current_user.tenant_id
if not tenant_id:
return []
environment_variables_dict: dict[str, Any] = json.loads(self._environment_variables) environment_variables_dict: dict[str, Any] = json.loads(self._environment_variables)
results = [ results = [
@ -303,7 +313,17 @@ class Workflow(Base):
self._environment_variables = "{}" self._environment_variables = "{}"
return return
tenant_id = contexts.tenant_id.get() # Get tenant_id from current_user (Account or EndUser)
if isinstance(current_user, Account):
# Account user
tenant_id = current_user.current_tenant_id
else:
# EndUser
tenant_id = current_user.tenant_id
if not tenant_id:
self._environment_variables = "{}"
return
value = list(value) value = list(value)
if any(var for var in value if not var.id): if any(var for var in value if not var.id):

View File

@ -193,7 +193,7 @@ vdb = [
"pymilvus~=2.5.0", "pymilvus~=2.5.0",
"pymochow==1.3.1", "pymochow==1.3.1",
"pyobvector~=0.1.6", "pyobvector~=0.1.6",
"qdrant-client==1.7.3", "qdrant-client==1.9.0",
"tablestore==6.1.0", "tablestore==6.1.0",
"tcvectordb~=1.6.4", "tcvectordb~=1.6.4",
"tidb-vector==0.0.9", "tidb-vector==0.0.9",

View File

@ -560,17 +560,15 @@ class DatasetService:
if dataset.permission == DatasetPermissionEnum.ONLY_ME and dataset.created_by != user.id: if dataset.permission == DatasetPermissionEnum.ONLY_ME and dataset.created_by != user.id:
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}") logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
raise NoPermissionError("You do not have permission to access this dataset.") raise NoPermissionError("You do not have permission to access this dataset.")
if dataset.permission == "partial_members": if dataset.permission == DatasetPermissionEnum.PARTIAL_TEAM:
user_permission = ( # For partial team permission, user needs explicit permission or be the creator
db.session.query(DatasetPermission).filter_by(dataset_id=dataset.id, account_id=user.id).first() if dataset.created_by != user.id:
) user_permission = (
if ( db.session.query(DatasetPermission).filter_by(dataset_id=dataset.id, account_id=user.id).first()
not user_permission )
and dataset.tenant_id != user.current_tenant_id if not user_permission:
and dataset.created_by != user.id logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
): raise NoPermissionError("You do not have permission to access this dataset.")
logging.debug(f"User {user.id} does not have permission to access dataset {dataset.id}")
raise NoPermissionError("You do not have permission to access this dataset.")
@staticmethod @staticmethod
def check_dataset_operator_permission(user: Optional[Account] = None, dataset: Optional[Dataset] = None): def check_dataset_operator_permission(user: Optional[Account] = None, dataset: Optional[Dataset] = None):

View File

@ -17,7 +17,7 @@ from core.plugin.entities.plugin import (
PluginInstallation, PluginInstallation,
PluginInstallationSource, PluginInstallationSource,
) )
from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginUploadResponse from core.plugin.entities.plugin_daemon import PluginInstallTask, PluginListResponse, PluginUploadResponse
from core.plugin.impl.asset import PluginAssetManager from core.plugin.impl.asset import PluginAssetManager
from core.plugin.impl.debugging import PluginDebuggingClient from core.plugin.impl.debugging import PluginDebuggingClient
from core.plugin.impl.plugin import PluginInstaller from core.plugin.impl.plugin import PluginInstaller
@ -110,6 +110,15 @@ class PluginService:
plugins = manager.list_plugins(tenant_id) plugins = manager.list_plugins(tenant_id)
return plugins return plugins
@staticmethod
def list_with_total(tenant_id: str, page: int, page_size: int) -> PluginListResponse:
"""
list all plugins of the tenant
"""
manager = PluginInstaller()
plugins = manager.list_plugins_with_total(tenant_id, page, page_size)
return plugins
@staticmethod @staticmethod
def list_installations_from_ids(tenant_id: str, ids: Sequence[str]) -> Sequence[PluginInstallation]: def list_installations_from_ids(tenant_id: str, ids: Sequence[str]) -> Sequence[PluginInstallation]:
""" """

View File

@ -1,5 +1,7 @@
import json import json
from sqlalchemy import or_
from core.mcp.error import MCPAuthError, MCPConnectionError from core.mcp.error import MCPAuthError, MCPConnectionError
from core.mcp.mcp_client import MCPClient from core.mcp.mcp_client import MCPClient
from core.tools.entities.api_entities import ToolProviderApiEntity from core.tools.entities.api_entities import ToolProviderApiEntity
@ -29,13 +31,22 @@ class MCPToolManageService:
@staticmethod @staticmethod
def create_mcp_provider( def create_mcp_provider(
tenant_id: str, name: str, server_url: str, user_id: str, icon: str, icon_type: str, icon_background: str tenant_id: str, name: str, server_url: str, user_id: str, icon: str, icon_type: str, icon_background: str
) -> dict: ) -> ToolProviderApiEntity:
if ( existing_provider = (
db.session.query(MCPToolProvider) db.session.query(MCPToolProvider)
.filter(MCPToolProvider.tenant_id == tenant_id, MCPToolProvider.name == name) .filter(
MCPToolProvider.tenant_id == tenant_id,
or_(MCPToolProvider.name == name, MCPToolProvider.server_url == server_url),
MCPToolProvider.tenant_id == tenant_id,
)
.first() .first()
): )
raise ValueError(f"MCP tool {name} already exists") if existing_provider:
if existing_provider.name == name:
raise ValueError(f"MCP tool {name} already exists")
else:
raise ValueError(f"MCP tool {server_url} already exists")
mcp_tool = MCPToolProvider( mcp_tool = MCPToolProvider(
tenant_id=tenant_id, tenant_id=tenant_id,
name=name, name=name,
@ -47,7 +58,7 @@ class MCPToolManageService:
) )
db.session.add(mcp_tool) db.session.add(mcp_tool)
db.session.commit() db.session.commit()
return {"result": "success"} return ToolTransformService.mcp_provider_to_user_provider(mcp_tool)
@staticmethod @staticmethod
def retrieve_mcp_tools(tenant_id: str) -> list[ToolProviderApiEntity]: def retrieve_mcp_tools(tenant_id: str) -> list[ToolProviderApiEntity]:

View File

@ -4,16 +4,12 @@ from collections.abc import Callable
import click import click
from celery import shared_task # type: ignore from celery import shared_task # type: ignore
from sqlalchemy import delete, select from sqlalchemy import delete
from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import Session
from core.repositories import SQLAlchemyWorkflowNodeExecutionRepository
from extensions.ext_database import db from extensions.ext_database import db
from models import ( from models import (
Account,
ApiToken, ApiToken,
App,
AppAnnotationHitHistory, AppAnnotationHitHistory,
AppAnnotationSetting, AppAnnotationSetting,
AppDatasetJoin, AppDatasetJoin,
@ -35,7 +31,7 @@ from models import (
) )
from models.tools import WorkflowToolProvider from models.tools import WorkflowToolProvider
from models.web import PinnedConversation, SavedMessage from models.web import PinnedConversation, SavedMessage
from models.workflow import ConversationVariable, Workflow, WorkflowAppLog, WorkflowRun from models.workflow import ConversationVariable, Workflow, WorkflowAppLog, WorkflowNodeExecution, WorkflowRun
@shared_task(queue="app_deletion", bind=True, max_retries=3) @shared_task(queue="app_deletion", bind=True, max_retries=3)
@ -205,31 +201,18 @@ def _delete_app_workflow_runs(tenant_id: str, app_id: str):
def _delete_app_workflow_node_executions(tenant_id: str, app_id: str): def _delete_app_workflow_node_executions(tenant_id: str, app_id: str):
# Get app's owner def del_workflow_node_execution(workflow_node_execution_id: str):
with Session(db.engine, expire_on_commit=False) as session: db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution_id).delete(
stmt = select(Account).where(Account.id == App.created_by).where(App.id == app_id) synchronize_session=False
user = session.scalar(stmt)
if user is None:
errmsg = (
f"Failed to delete workflow node executions for tenant {tenant_id} and app {app_id}, app's owner not found"
) )
logging.error(errmsg)
raise ValueError(errmsg)
# Create a repository instance for WorkflowNodeExecution _delete_records(
repository = SQLAlchemyWorkflowNodeExecutionRepository( """select id from workflow_node_executions where tenant_id=:tenant_id and app_id=:app_id limit 1000""",
session_factory=db.engine, {"tenant_id": tenant_id, "app_id": app_id},
user=user, del_workflow_node_execution,
app_id=app_id, "workflow node execution",
triggered_from=None,
) )
# Use the clear method to delete all records for this tenant_id and app_id
repository.clear()
logging.info(click.style(f"Deleted workflow node executions for tenant {tenant_id} and app {app_id}", fg="green"))
def _delete_app_workflow_app_logs(tenant_id: str, app_id: str): def _delete_app_workflow_app_logs(tenant_id: str, app_id: str):
def del_workflow_app_log(workflow_app_log_id: str): def del_workflow_app_log(workflow_app_log_id: str):

View File

@ -10,6 +10,7 @@ from core.workflow.entities.node_entities import NodeRunResult
from core.workflow.nodes.document_extractor import DocumentExtractorNode, DocumentExtractorNodeData from core.workflow.nodes.document_extractor import DocumentExtractorNode, DocumentExtractorNodeData
from core.workflow.nodes.document_extractor.node import ( from core.workflow.nodes.document_extractor.node import (
_extract_text_from_docx, _extract_text_from_docx,
_extract_text_from_excel,
_extract_text_from_pdf, _extract_text_from_pdf,
_extract_text_from_plain_text, _extract_text_from_plain_text,
) )
@ -149,7 +150,7 @@ def test_extract_text_from_plain_text_non_utf8():
temp_file.write(non_utf8_content) temp_file.write(non_utf8_content)
temp_file.seek(0) temp_file.seek(0)
text = _extract_text_from_plain_text(temp_file.read()) text = _extract_text_from_plain_text(temp_file.read())
assert text == "Hello, world." assert text == "Hello, world©."
@patch("pypdfium2.PdfDocument") @patch("pypdfium2.PdfDocument")
@ -182,3 +183,181 @@ def test_extract_text_from_docx(mock_document):
def test_node_type(document_extractor_node): def test_node_type(document_extractor_node):
assert document_extractor_node._node_type == NodeType.DOCUMENT_EXTRACTOR assert document_extractor_node._node_type == NodeType.DOCUMENT_EXTRACTOR
@patch("pandas.ExcelFile")
def test_extract_text_from_excel_single_sheet(mock_excel_file):
"""Test extracting text from Excel file with single sheet."""
# Mock DataFrame
mock_df = Mock()
mock_df.dropna = Mock()
mock_df.to_markdown.return_value = "| Name | Age |\n|------|-----|\n| John | 25 |"
# Mock ExcelFile
mock_excel_instance = Mock()
mock_excel_instance.sheet_names = ["Sheet1"]
mock_excel_instance.parse.return_value = mock_df
mock_excel_file.return_value = mock_excel_instance
file_content = b"fake_excel_content"
result = _extract_text_from_excel(file_content)
expected = "| Name | Age |\n|------|-----|\n| John | 25 |\n\n"
assert result == expected
mock_excel_file.assert_called_once()
mock_df.dropna.assert_called_once_with(how="all", inplace=True)
mock_df.to_markdown.assert_called_once_with(index=False, floatfmt="")
@patch("pandas.ExcelFile")
def test_extract_text_from_excel_multiple_sheets(mock_excel_file):
"""Test extracting text from Excel file with multiple sheets."""
# Mock DataFrames for different sheets
mock_df1 = Mock()
mock_df1.dropna = Mock()
mock_df1.to_markdown.return_value = "| Product | Price |\n|---------|-------|\n| Apple | 1.50 |"
mock_df2 = Mock()
mock_df2.dropna = Mock()
mock_df2.to_markdown.return_value = "| City | Population |\n|------|------------|\n| NYC | 8000000 |"
# Mock ExcelFile
mock_excel_instance = Mock()
mock_excel_instance.sheet_names = ["Products", "Cities"]
mock_excel_instance.parse.side_effect = [mock_df1, mock_df2]
mock_excel_file.return_value = mock_excel_instance
file_content = b"fake_excel_content_multiple_sheets"
result = _extract_text_from_excel(file_content)
expected = (
"| Product | Price |\n|---------|-------|\n| Apple | 1.50 |\n\n"
"| City | Population |\n|------|------------|\n| NYC | 8000000 |\n\n"
)
assert result == expected
assert mock_excel_instance.parse.call_count == 2
@patch("pandas.ExcelFile")
def test_extract_text_from_excel_empty_sheets(mock_excel_file):
"""Test extracting text from Excel file with empty sheets."""
# Mock empty DataFrame
mock_df = Mock()
mock_df.dropna = Mock()
mock_df.to_markdown.return_value = ""
# Mock ExcelFile
mock_excel_instance = Mock()
mock_excel_instance.sheet_names = ["EmptySheet"]
mock_excel_instance.parse.return_value = mock_df
mock_excel_file.return_value = mock_excel_instance
file_content = b"fake_excel_empty_content"
result = _extract_text_from_excel(file_content)
expected = "\n\n"
assert result == expected
@patch("pandas.ExcelFile")
def test_extract_text_from_excel_sheet_parse_error(mock_excel_file):
"""Test handling of sheet parsing errors - should continue with other sheets."""
# Mock DataFrames - one successful, one that raises exception
mock_df_success = Mock()
mock_df_success.dropna = Mock()
mock_df_success.to_markdown.return_value = "| Data | Value |\n|------|-------|\n| Test | 123 |"
# Mock ExcelFile
mock_excel_instance = Mock()
mock_excel_instance.sheet_names = ["GoodSheet", "BadSheet"]
mock_excel_instance.parse.side_effect = [mock_df_success, Exception("Parse error")]
mock_excel_file.return_value = mock_excel_instance
file_content = b"fake_excel_mixed_content"
result = _extract_text_from_excel(file_content)
expected = "| Data | Value |\n|------|-------|\n| Test | 123 |\n\n"
assert result == expected
@patch("pandas.ExcelFile")
def test_extract_text_from_excel_file_error(mock_excel_file):
"""Test handling of Excel file reading errors."""
mock_excel_file.side_effect = Exception("Invalid Excel file")
file_content = b"invalid_excel_content"
with pytest.raises(Exception) as exc_info:
_extract_text_from_excel(file_content)
# Note: The function should raise TextExtractionError, but since it's not imported in the test,
# we check for the general Exception pattern
assert "Failed to extract text from Excel file" in str(exc_info.value)
@patch("pandas.ExcelFile")
def test_extract_text_from_excel_io_bytesio_usage(mock_excel_file):
"""Test that BytesIO is properly used with the file content."""
import io
# Mock DataFrame
mock_df = Mock()
mock_df.dropna = Mock()
mock_df.to_markdown.return_value = "| Test | Data |\n|------|------|\n| 1 | A |"
# Mock ExcelFile
mock_excel_instance = Mock()
mock_excel_instance.sheet_names = ["TestSheet"]
mock_excel_instance.parse.return_value = mock_df
mock_excel_file.return_value = mock_excel_instance
file_content = b"test_excel_bytes"
result = _extract_text_from_excel(file_content)
# Verify that ExcelFile was called with a BytesIO object
mock_excel_file.assert_called_once()
call_args = mock_excel_file.call_args[0][0]
assert isinstance(call_args, io.BytesIO)
expected = "| Test | Data |\n|------|------|\n| 1 | A |\n\n"
assert result == expected
@patch("pandas.ExcelFile")
def test_extract_text_from_excel_all_sheets_fail(mock_excel_file):
"""Test when all sheets fail to parse - should return empty string."""
# Mock ExcelFile
mock_excel_instance = Mock()
mock_excel_instance.sheet_names = ["BadSheet1", "BadSheet2"]
mock_excel_instance.parse.side_effect = [Exception("Error 1"), Exception("Error 2")]
mock_excel_file.return_value = mock_excel_instance
file_content = b"fake_excel_all_bad_sheets"
result = _extract_text_from_excel(file_content)
# Should return empty string when all sheets fail
assert result == ""
@patch("pandas.ExcelFile")
def test_extract_text_from_excel_markdown_formatting(mock_excel_file):
"""Test that markdown formatting parameters are correctly applied."""
# Mock DataFrame
mock_df = Mock()
mock_df.dropna = Mock()
mock_df.to_markdown.return_value = "| Float | Int |\n|-------|-----|\n| 123456.78 | 42 |"
# Mock ExcelFile
mock_excel_instance = Mock()
mock_excel_instance.sheet_names = ["NumberSheet"]
mock_excel_instance.parse.return_value = mock_df
mock_excel_file.return_value = mock_excel_instance
file_content = b"fake_excel_numbers"
result = _extract_text_from_excel(file_content)
# Verify to_markdown was called with correct parameters
mock_df.to_markdown.assert_called_once_with(index=False, floatfmt="")
expected = "| Float | Int |\n|-------|-----|\n| 123456.78 | 42 |\n\n"
assert result == expected

View File

@ -2,14 +2,13 @@ import json
from unittest import mock from unittest import mock
from uuid import uuid4 from uuid import uuid4
import contexts
from constants import HIDDEN_VALUE from constants import HIDDEN_VALUE
from core.variables import FloatVariable, IntegerVariable, SecretVariable, StringVariable from core.variables import FloatVariable, IntegerVariable, SecretVariable, StringVariable
from models.workflow import Workflow, WorkflowNodeExecution, is_system_variable_editable from models.workflow import Workflow, WorkflowNodeExecution, is_system_variable_editable
def test_environment_variables(): def test_environment_variables():
contexts.tenant_id.set("tenant_id") # tenant_id context variable removed - using current_user.current_tenant_id directly
# Create a Workflow instance # Create a Workflow instance
workflow = Workflow( workflow = Workflow(
@ -38,9 +37,14 @@ def test_environment_variables():
{"name": "var4", "value": 3.14, "id": str(uuid4()), "selector": ["env", "var4"]} {"name": "var4", "value": 3.14, "id": str(uuid4()), "selector": ["env", "var4"]}
) )
# Mock current_user as an EndUser
mock_user = mock.Mock()
mock_user.tenant_id = "tenant_id"
with ( with (
mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"), mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"),
mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"), mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"),
mock.patch("models.workflow.current_user", mock_user),
): ):
# Set the environment_variables property of the Workflow instance # Set the environment_variables property of the Workflow instance
variables = [variable1, variable2, variable3, variable4] variables = [variable1, variable2, variable3, variable4]
@ -51,7 +55,7 @@ def test_environment_variables():
def test_update_environment_variables(): def test_update_environment_variables():
contexts.tenant_id.set("tenant_id") # tenant_id context variable removed - using current_user.current_tenant_id directly
# Create a Workflow instance # Create a Workflow instance
workflow = Workflow( workflow = Workflow(
@ -80,9 +84,14 @@ def test_update_environment_variables():
{"name": "var4", "value": 3.14, "id": str(uuid4()), "selector": ["env", "var4"]} {"name": "var4", "value": 3.14, "id": str(uuid4()), "selector": ["env", "var4"]}
) )
# Mock current_user as an EndUser
mock_user = mock.Mock()
mock_user.tenant_id = "tenant_id"
with ( with (
mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"), mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"),
mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"), mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"),
mock.patch("models.workflow.current_user", mock_user),
): ):
variables = [variable1, variable2, variable3, variable4] variables = [variable1, variable2, variable3, variable4]
@ -104,7 +113,7 @@ def test_update_environment_variables():
def test_to_dict(): def test_to_dict():
contexts.tenant_id.set("tenant_id") # tenant_id context variable removed - using current_user.current_tenant_id directly
# Create a Workflow instance # Create a Workflow instance
workflow = Workflow( workflow = Workflow(
@ -121,9 +130,14 @@ def test_to_dict():
# Create some EnvironmentVariable instances # Create some EnvironmentVariable instances
# Mock current_user as an EndUser
mock_user = mock.Mock()
mock_user.tenant_id = "tenant_id"
with ( with (
mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"), mock.patch("core.helper.encrypter.encrypt_token", return_value="encrypted_token"),
mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"), mock.patch("core.helper.encrypter.decrypt_token", return_value="secret"),
mock.patch("models.workflow.current_user", mock_user),
): ):
# Set the environment_variables property of the Workflow instance # Set the environment_variables property of the Workflow instance
workflow.environment_variables = [ workflow.environment_variables = [

View File

@ -0,0 +1,158 @@
from unittest.mock import Mock, patch
import pytest
from models.account import Account, TenantAccountRole
from models.dataset import Dataset, DatasetPermission, DatasetPermissionEnum
from services.dataset_service import DatasetService
from services.errors.account import NoPermissionError
class TestDatasetPermissionService:
"""Test cases for dataset permission checking functionality"""
def setup_method(self):
"""Set up test fixtures"""
# Mock tenant and user
self.tenant_id = "test-tenant-123"
self.creator_id = "creator-456"
self.normal_user_id = "normal-789"
self.owner_user_id = "owner-999"
# Mock dataset
self.dataset = Mock(spec=Dataset)
self.dataset.id = "dataset-123"
self.dataset.tenant_id = self.tenant_id
self.dataset.created_by = self.creator_id
# Mock users
self.creator_user = Mock(spec=Account)
self.creator_user.id = self.creator_id
self.creator_user.current_tenant_id = self.tenant_id
self.creator_user.current_role = TenantAccountRole.EDITOR
self.normal_user = Mock(spec=Account)
self.normal_user.id = self.normal_user_id
self.normal_user.current_tenant_id = self.tenant_id
self.normal_user.current_role = TenantAccountRole.NORMAL
self.owner_user = Mock(spec=Account)
self.owner_user.id = self.owner_user_id
self.owner_user.current_tenant_id = self.tenant_id
self.owner_user.current_role = TenantAccountRole.OWNER
def test_permission_check_different_tenant_should_fail(self):
"""Test that users from different tenants cannot access dataset"""
self.normal_user.current_tenant_id = "different-tenant"
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
def test_owner_can_access_any_dataset(self):
"""Test that tenant owners can access any dataset regardless of permission"""
self.dataset.permission = DatasetPermissionEnum.ONLY_ME
# Should not raise any exception
DatasetService.check_dataset_permission(self.dataset, self.owner_user)
def test_only_me_permission_creator_can_access(self):
"""Test ONLY_ME permission allows only creator to access"""
self.dataset.permission = DatasetPermissionEnum.ONLY_ME
# Creator should be able to access
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
def test_only_me_permission_others_cannot_access(self):
"""Test ONLY_ME permission denies access to non-creators"""
self.dataset.permission = DatasetPermissionEnum.ONLY_ME
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
def test_all_team_permission_allows_access(self):
"""Test ALL_TEAM permission allows any team member to access"""
self.dataset.permission = DatasetPermissionEnum.ALL_TEAM
# Should not raise any exception for team members
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
@patch("services.dataset_service.db.session")
def test_partial_team_permission_creator_can_access(self, mock_session):
"""Test PARTIAL_TEAM permission allows creator to access"""
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
# Should not raise any exception for creator
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
# Should not query database for creator
mock_session.query.assert_not_called()
@patch("services.dataset_service.db.session")
def test_partial_team_permission_with_explicit_permission(self, mock_session):
"""Test PARTIAL_TEAM permission allows users with explicit permission"""
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
# Mock database query to return a permission record
mock_permission = Mock(spec=DatasetPermission)
mock_session.query().filter_by().first.return_value = mock_permission
# Should not raise any exception
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
# Verify database was queried correctly
mock_session.query().filter_by.assert_called_with(dataset_id=self.dataset.id, account_id=self.normal_user.id)
@patch("services.dataset_service.db.session")
def test_partial_team_permission_without_explicit_permission(self, mock_session):
"""Test PARTIAL_TEAM permission denies users without explicit permission"""
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
# Mock database query to return None (no permission record)
mock_session.query().filter_by().first.return_value = None
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
# Verify database was queried correctly
mock_session.query().filter_by.assert_called_with(dataset_id=self.dataset.id, account_id=self.normal_user.id)
@patch("services.dataset_service.db.session")
def test_partial_team_permission_non_creator_without_permission_fails(self, mock_session):
"""Test that non-creators without explicit permission are denied access"""
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
# Create a different user (not the creator)
other_user = Mock(spec=Account)
other_user.id = "other-user-123"
other_user.current_tenant_id = self.tenant_id
other_user.current_role = TenantAccountRole.NORMAL
# Mock database query to return None (no permission record)
mock_session.query().filter_by().first.return_value = None
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset."):
DatasetService.check_dataset_permission(self.dataset, other_user)
def test_partial_team_permission_uses_correct_enum(self):
"""Test that the method correctly uses DatasetPermissionEnum.PARTIAL_TEAM"""
# This test ensures we're using the enum instead of string literals
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
# Creator should always have access
DatasetService.check_dataset_permission(self.dataset, self.creator_user)
@patch("services.dataset_service.logging")
@patch("services.dataset_service.db.session")
def test_permission_denied_logs_debug_message(self, mock_session, mock_logging):
"""Test that permission denied events are logged"""
self.dataset.permission = DatasetPermissionEnum.PARTIAL_TEAM
mock_session.query().filter_by().first.return_value = None
with pytest.raises(NoPermissionError):
DatasetService.check_dataset_permission(self.dataset, self.normal_user)
# Verify debug message was logged
mock_logging.debug.assert_called_with(
f"User {self.normal_user.id} does not have permission to access dataset {self.dataset.id}"
)

301
api/uv.lock generated
View File

@ -36,7 +36,7 @@ wheels = [
[[package]] [[package]]
name = "aiohttp" name = "aiohttp"
version = "3.11.18" version = "3.12.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "aiohappyeyeballs" }, { name = "aiohappyeyeballs" },
@ -47,40 +47,42 @@ dependencies = [
{ name = "propcache" }, { name = "propcache" },
{ name = "yarl" }, { name = "yarl" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/63/e7/fa1a8c00e2c54b05dc8cb5d1439f627f7c267874e3f7bb047146116020f9/aiohttp-3.11.18.tar.gz", hash = "sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a", size = 7678653, upload_time = "2025-04-21T09:43:09.191Z" } sdist = { url = "https://files.pythonhosted.org/packages/06/a2/a946c4f5c54233c97788c2278ea50beaadf45211f452f932b36ce322f660/aiohttp-3.12.2.tar.gz", hash = "sha256:0018956472ee535d2cad761a5bb88eb4ad80f94cd86472cee26a244799f7c79f", size = 7780423, upload_time = "2025-05-27T00:04:27.79Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/2f/10/fd9ee4f9e042818c3c2390054c08ccd34556a3cb209d83285616434cf93e/aiohttp-3.11.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:427fdc56ccb6901ff8088544bde47084845ea81591deb16f957897f0f0ba1be9", size = 712088, upload_time = "2025-04-21T09:40:55.776Z" }, { url = "https://files.pythonhosted.org/packages/08/39/9866f5996a7db870464e1f153b9f6a3412167ee79293f138bad09de783cd/aiohttp-3.12.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:536a37af26ed50bd4f3cf7d989955e5a987e9343f1a55f5393e7950a6ac93fce", size = 701750, upload_time = "2025-05-27T00:01:47.574Z" },
{ url = "https://files.pythonhosted.org/packages/22/eb/6a77f055ca56f7aae2cd2a5607a3c9e7b9554f1497a069dcfcb52bfc9540/aiohttp-3.11.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c828b6d23b984255b85b9b04a5b963a74278b7356a7de84fda5e3b76866597b", size = 471450, upload_time = "2025-04-21T09:40:57.301Z" }, { url = "https://files.pythonhosted.org/packages/81/eb/187fba5f1c210bed03c4e4fe50b6cc64d18c6776e6d17887b527ee2fb806/aiohttp-3.12.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6f8fbb48953238e7ba8ab9dee6757a4f6b72cd6242eb7fe1cb004b24f91effee", size = 474015, upload_time = "2025-05-27T00:01:49.53Z" },
{ url = "https://files.pythonhosted.org/packages/78/dc/5f3c0d27c91abf0bb5d103e9c9b0ff059f60cf6031a5f06f456c90731f42/aiohttp-3.11.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c2eaa145bb36b33af1ff2860820ba0589e165be4ab63a49aebfd0981c173b66", size = 457836, upload_time = "2025-04-21T09:40:59.322Z" }, { url = "https://files.pythonhosted.org/packages/c5/e5/8f203120a8a932739face58614f8c93912ccd26c0b18da3f476b7372158b/aiohttp-3.12.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74190229bd54bc3df7090f634b0b7fe53c45fb41aae5fbfae462093ced35c950", size = 462282, upload_time = "2025-05-27T00:01:51.315Z" },
{ url = "https://files.pythonhosted.org/packages/49/7b/55b65af9ef48b9b811c91ff8b5b9de9650c71147f10523e278d297750bc8/aiohttp-3.11.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d518ce32179f7e2096bf4e3e8438cf445f05fedd597f252de9f54c728574756", size = 1690978, upload_time = "2025-04-21T09:41:00.795Z" }, { url = "https://files.pythonhosted.org/packages/7d/0f/9c33853f4f1c6c75a0f1b3e7b6d955f5883bd14a189232115e2e0c8633f2/aiohttp-3.12.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7af4737ab145fb1ac6e2db24ee206ee9e9f3abb1f7c6b74bd75c9ce0d36fe286", size = 1732474, upload_time = "2025-05-27T00:01:53.155Z" },
{ url = "https://files.pythonhosted.org/packages/a2/5a/3f8938c4f68ae400152b42742653477fc625d6bfe02e764f3521321c8442/aiohttp-3.11.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0700055a6e05c2f4711011a44364020d7a10fbbcd02fbf3e30e8f7e7fddc8717", size = 1745307, upload_time = "2025-04-21T09:41:02.89Z" }, { url = "https://files.pythonhosted.org/packages/e6/72/2dee9dd29a6ce5abbfa5ee7b75db00ce9c213aaea588476464285a3aee57/aiohttp-3.12.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2711392a2afe1dcf4a93b05a94ee25efa966971fa0bf3944f2ce101da182ce91", size = 1681143, upload_time = "2025-05-27T00:01:55.238Z" },
{ url = "https://files.pythonhosted.org/packages/b4/42/89b694a293333ef6f771c62da022163bcf44fb03d4824372d88e3dc12530/aiohttp-3.11.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8bd1cde83e4684324e6ee19adfc25fd649d04078179890be7b29f76b501de8e4", size = 1780692, upload_time = "2025-04-21T09:41:04.461Z" }, { url = "https://files.pythonhosted.org/packages/fc/2d/4eb92b7e42f7efb8ab22d0eca89e73b96653d6fbfb9847435ad29dee385d/aiohttp-3.12.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5169898d17a2ac30e31ea814832ad4cf6bb652459a031af40ed56c9d05894c80", size = 1779934, upload_time = "2025-05-27T00:01:57.212Z" },
{ url = "https://files.pythonhosted.org/packages/e2/ce/1a75384e01dd1bf546898b6062b1b5f7a59b6692ef802e4dd6db64fed264/aiohttp-3.11.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73b8870fe1c9a201b8c0d12c94fe781b918664766728783241a79e0468427e4f", size = 1676934, upload_time = "2025-04-21T09:41:06.728Z" }, { url = "https://files.pythonhosted.org/packages/41/df/c9dc8dd89e40e469386cfb0adbdf63be04e52a85562bae271c1a863de5b0/aiohttp-3.12.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a590566c5c139edfbeeb69de62c6868e6ef667322b0080489607acc39e92add", size = 1818982, upload_time = "2025-05-27T00:01:59.786Z" },
{ url = "https://files.pythonhosted.org/packages/a5/31/442483276e6c368ab5169797d9873b5875213cbcf7e74b95ad1c5003098a/aiohttp-3.11.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25557982dd36b9e32c0a3357f30804e80790ec2c4d20ac6bcc598533e04c6361", size = 1621190, upload_time = "2025-04-21T09:41:08.293Z" }, { url = "https://files.pythonhosted.org/packages/6f/b6/84fd20aca84651e373fd90187abe1daf7596ab5e79b6045b294496b73bea/aiohttp-3.12.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4be1c1adb604591a607abb9c4474eedc6add6739656ee91a9daddf35f7f9fa", size = 1721441, upload_time = "2025-05-27T00:02:01.729Z" },
{ url = "https://files.pythonhosted.org/packages/7b/83/90274bf12c079457966008a58831a99675265b6a34b505243e004b408934/aiohttp-3.11.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e889c9df381a2433802991288a61e5a19ceb4f61bd14f5c9fa165655dcb1fd1", size = 1658947, upload_time = "2025-04-21T09:41:11.054Z" }, { url = "https://files.pythonhosted.org/packages/86/9c/412603ca6e3be2656bc3b662828087f8b3a21f82fe20f94219ba7769a6dd/aiohttp-3.12.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0cf15667ecf20bfe545adb02882d895e10c8d5c821e46b1a62f22d5170c4803e", size = 1658517, upload_time = "2025-05-27T00:02:03.636Z" },
{ url = "https://files.pythonhosted.org/packages/91/c1/da9cee47a0350b78fdc93670ebe7ad74103011d7778ab4c382ca4883098d/aiohttp-3.11.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9ea345fda05bae217b6cce2acf3682ce3b13d0d16dd47d0de7080e5e21362421", size = 1654443, upload_time = "2025-04-21T09:41:13.213Z" }, { url = "https://files.pythonhosted.org/packages/c1/70/d1735c170aebdc4eda456768bb8714529a90743fd1de1bff075e33292ee9/aiohttp-3.12.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:875df9e4ed4f24af643f4e35bf267be3cb25b9461d25da4a0d181877a2b401e4", size = 1706789, upload_time = "2025-05-27T00:02:05.603Z" },
{ url = "https://files.pythonhosted.org/packages/c9/f2/73cbe18dc25d624f79a09448adfc4972f82ed6088759ddcf783cd201956c/aiohttp-3.11.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9f26545b9940c4b46f0a9388fd04ee3ad7064c4017b5a334dd450f616396590e", size = 1644169, upload_time = "2025-04-21T09:41:14.827Z" }, { url = "https://files.pythonhosted.org/packages/61/80/c0f85511b8f315cab5a86615d155d9584cd5d6f1d48c94f92dc3dffd4a7f/aiohttp-3.12.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:722fe14a899ee049562417449a449dfc7c616fdb5409f8a0a2c459815473767f", size = 1701950, upload_time = "2025-05-27T00:02:07.618Z" },
{ url = "https://files.pythonhosted.org/packages/5b/32/970b0a196c4dccb1b0cfa5b4dc3b20f63d76f1c608f41001a84b2fd23c3d/aiohttp-3.11.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3a621d85e85dccabd700294494d7179ed1590b6d07a35709bb9bd608c7f5dd1d", size = 1728532, upload_time = "2025-04-21T09:41:17.168Z" }, { url = "https://files.pythonhosted.org/packages/5e/f5/95a835814bd34378ad18d05e3351e6bd1035263ec20480f69f3688fa04ec/aiohttp-3.12.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:59668d843c91bd22abc1f70674270ce38e1dad3020284cccecc60f492d6f88ae", size = 1682137, upload_time = "2025-05-27T00:02:10.062Z" },
{ url = "https://files.pythonhosted.org/packages/0b/50/b1dc810a41918d2ea9574e74125eb053063bc5e14aba2d98966f7d734da0/aiohttp-3.11.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9c23fd8d08eb9c2af3faeedc8c56e134acdaf36e2117ee059d7defa655130e5f", size = 1750310, upload_time = "2025-04-21T09:41:19.353Z" }, { url = "https://files.pythonhosted.org/packages/89/35/ccf684cd9d343b1401be07f0c43793d8475fed2b2418e01f885bcdcd972b/aiohttp-3.12.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:64e48ed61d5c74b5a4a68fdb3fde664034e59788625ebf3fcae87fb5a2dbde7b", size = 1775737, upload_time = "2025-05-27T00:02:12.121Z" },
{ url = "https://files.pythonhosted.org/packages/95/24/39271f5990b35ff32179cc95537e92499d3791ae82af7dcf562be785cd15/aiohttp-3.11.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9e6b0e519067caa4fd7fb72e3e8002d16a68e84e62e7291092a5433763dc0dd", size = 1691580, upload_time = "2025-04-21T09:41:21.868Z" }, { url = "https://files.pythonhosted.org/packages/22/a8/0075064d24f4d4987ba8e73a67fc8c0c0075134abb087000316147d2bc77/aiohttp-3.12.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7061bce1accdfce6e02c80ac10efcdfcae95718f97f77fc5fbe3273b16b8d4bf", size = 1796197, upload_time = "2025-05-27T00:02:14.723Z" },
{ url = "https://files.pythonhosted.org/packages/6b/78/75d0353feb77f041460564f12fe58e456436bbc00cbbf5d676dbf0038cc2/aiohttp-3.11.18-cp311-cp311-win32.whl", hash = "sha256:122f3e739f6607e5e4c6a2f8562a6f476192a682a52bda8b4c6d4254e1138f4d", size = 417565, upload_time = "2025-04-21T09:41:24.78Z" }, { url = "https://files.pythonhosted.org/packages/4b/bc/de6c5969285b309a11582d0009cea97384f2cac9b2c88e3a35b642cd6d17/aiohttp-3.12.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef392a613f53fc4c3e6ebba2c3b90729266139a3f534e7eba9bf04e2eac40287", size = 1709254, upload_time = "2025-05-27T00:02:17.949Z" },
{ url = "https://files.pythonhosted.org/packages/ed/97/b912dcb654634a813f8518de359364dfc45976f822116e725dc80a688eee/aiohttp-3.11.18-cp311-cp311-win_amd64.whl", hash = "sha256:e6f3c0a3a1e73e88af384b2e8a0b9f4fb73245afd47589df2afcab6b638fa0e6", size = 443652, upload_time = "2025-04-21T09:41:26.48Z" }, { url = "https://files.pythonhosted.org/packages/c1/12/bf9ce81a2954b421cd6acb90a41777075baec3a3a21fb0dd10b483ed3652/aiohttp-3.12.2-cp311-cp311-win32.whl", hash = "sha256:e405ccdd3cada578e5bc4000b7d35b80a345c832089d23b04be30c0e7606fb80", size = 419267, upload_time = "2025-05-27T00:02:19.796Z" },
{ url = "https://files.pythonhosted.org/packages/b5/d2/5bc436f42bf4745c55f33e1e6a2d69e77075d3e768e3d1a34f96ee5298aa/aiohttp-3.11.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:63d71eceb9cad35d47d71f78edac41fcd01ff10cacaa64e473d1aec13fa02df2", size = 706671, upload_time = "2025-04-21T09:41:28.021Z" }, { url = "https://files.pythonhosted.org/packages/97/6c/db68994b49a2c50a4a8943ba3696f66906ab09d206243f91ea8ede7b7d87/aiohttp-3.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:a84cf5db31efc14e811ef830288614bf40093befd445efe743dc015d01e6e92c", size = 443663, upload_time = "2025-05-27T00:02:22.037Z" },
{ url = "https://files.pythonhosted.org/packages/fe/d0/2dbabecc4e078c0474abb40536bbde717fb2e39962f41c5fc7a216b18ea7/aiohttp-3.11.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d1929da615840969929e8878d7951b31afe0bac883d84418f92e5755d7b49508", size = 466169, upload_time = "2025-04-21T09:41:29.783Z" }, { url = "https://files.pythonhosted.org/packages/84/25/17af725b3855ad54eb1cb8e45962b05856a7e4986b64fbc6158331d7b64e/aiohttp-3.12.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:7679b2af5a1d43d8470672079baedc1a843e4f27a47b630fbe092833f9bc4e73", size = 692835, upload_time = "2025-05-27T00:02:24.514Z" },
{ url = "https://files.pythonhosted.org/packages/70/84/19edcf0b22933932faa6e0be0d933a27bd173da02dc125b7354dff4d8da4/aiohttp-3.11.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d0aebeb2392f19b184e3fdd9e651b0e39cd0f195cdb93328bd124a1d455cd0e", size = 457554, upload_time = "2025-04-21T09:41:31.327Z" }, { url = "https://files.pythonhosted.org/packages/cc/5d/770e9f17f0efeb1c40109535561ea7b0a3e9b654bd7853c27f3d62763086/aiohttp-3.12.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4d6941dd4d8f6dfd9292f391bc2e321c9583a9532b4e9b571b84f163bb3f8135", size = 467456, upload_time = "2025-05-27T00:02:26.861Z" },
{ url = "https://files.pythonhosted.org/packages/32/d0/e8d1f034ae5624a0f21e4fb3feff79342ce631f3a4d26bd3e58b31ef033b/aiohttp-3.11.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3849ead845e8444f7331c284132ab314b4dac43bfae1e3cf350906d4fff4620f", size = 1690154, upload_time = "2025-04-21T09:41:33.541Z" }, { url = "https://files.pythonhosted.org/packages/d8/ef/48eda5cd949b8af818d892b5ddf07cb15f0cf133e14c4ac9734ff32ba0a6/aiohttp-3.12.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8345cea33295cc28945c8365ac44ba383ebb757a599b384d752347f40671e984", size = 460294, upload_time = "2025-05-27T00:02:28.876Z" },
{ url = "https://files.pythonhosted.org/packages/16/de/2f9dbe2ac6f38f8495562077131888e0d2897e3798a0ff3adda766b04a34/aiohttp-3.11.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e8452ad6b2863709f8b3d615955aa0807bc093c34b8e25b3b52097fe421cb7f", size = 1733402, upload_time = "2025-04-21T09:41:35.634Z" }, { url = "https://files.pythonhosted.org/packages/94/1e/9724a45cb932b0c01c558493fac5f706a1a53740a77efc22c2f6764ce611/aiohttp-3.12.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8259a311666becf7049ae43c984208ac20eda5ea16aa5f26ea5d24b863f9afcd", size = 1707089, upload_time = "2025-05-27T00:02:30.889Z" },
{ url = "https://files.pythonhosted.org/packages/e0/04/bd2870e1e9aef990d14b6df2a695f17807baf5c85a4c187a492bda569571/aiohttp-3.11.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b8d2b42073611c860a37f718b3d61ae8b4c2b124b2e776e2c10619d920350ec", size = 1783958, upload_time = "2025-04-21T09:41:37.456Z" }, { url = "https://files.pythonhosted.org/packages/8a/a1/3b267d691a79472e6a0d9909363c2dc6cad44e60deb99385ce41e7926b40/aiohttp-3.12.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a6f09589cb5928ee793210806d35d69fffc78d46eca9acaa2d38cc30b3f194e", size = 1689737, upload_time = "2025-05-27T00:02:33.129Z" },
{ url = "https://files.pythonhosted.org/packages/23/06/4203ffa2beb5bedb07f0da0f79b7d9039d1c33f522e0d1a2d5b6218e6f2e/aiohttp-3.11.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fbf91f6a0ac317c0a07eb328a1384941872f6761f2e6f7208b63c4cc0a7ff6", size = 1695288, upload_time = "2025-04-21T09:41:39.756Z" }, { url = "https://files.pythonhosted.org/packages/d9/71/f04d5c86cfa5227ec2a54dd72b8de5b1930eb5c9ea75bd1c987b463cbb36/aiohttp-3.12.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c32972b485828f2b9326a95851520e9a92cdd97efe0a04ae62c7315e8d1098", size = 1744835, upload_time = "2025-05-27T00:02:35.553Z" },
{ url = "https://files.pythonhosted.org/packages/30/b2/e2285dda065d9f29ab4b23d8bcc81eb881db512afb38a3f5247b191be36c/aiohttp-3.11.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ff5625413fec55216da5eaa011cf6b0a2ed67a565914a212a51aa3755b0009", size = 1618871, upload_time = "2025-04-21T09:41:41.972Z" }, { url = "https://files.pythonhosted.org/packages/eb/88/deab2324c7468d6405cf9bae287276edef14a00fd00d084b3010e194e8d9/aiohttp-3.12.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:851d226ecaf30ec7f12d9e9793081ecd0e66fea7f6345bcb5283b39e9ea79c71", size = 1790987, upload_time = "2025-05-27T00:02:37.957Z" },
{ url = "https://files.pythonhosted.org/packages/57/e0/88f2987885d4b646de2036f7296ebea9268fdbf27476da551c1a7c158bc0/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7f33a92a2fde08e8c6b0c61815521324fc1612f397abf96eed86b8e31618fdb4", size = 1646262, upload_time = "2025-04-21T09:41:44.192Z" }, { url = "https://files.pythonhosted.org/packages/17/49/0b7c3fb319c4a9c75c41ec066e578bfd3ee847a550ef579d9fb6d65af3fa/aiohttp-3.12.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7127241e62621eabe437cce249a4858e79896abcdafed4c6f7a90d14d449066", size = 1710431, upload_time = "2025-05-27T00:02:40.05Z" },
{ url = "https://files.pythonhosted.org/packages/e0/19/4d2da508b4c587e7472a032290b2981f7caeca82b4354e19ab3df2f51d56/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:11d5391946605f445ddafda5eab11caf310f90cdda1fd99865564e3164f5cff9", size = 1677431, upload_time = "2025-04-21T09:41:46.049Z" }, { url = "https://files.pythonhosted.org/packages/eb/23/3c366db7343384cd81b0ec9609019dc34e14d25b7099d9390cfa561bb49f/aiohttp-3.12.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bca43af1c77f83e88641e74d1bd24b6089bb518fa0e6be97805a048bdac6bbc3", size = 1626229, upload_time = "2025-05-27T00:02:42.16Z" },
{ url = "https://files.pythonhosted.org/packages/eb/ae/047473ea50150a41440f3265f53db1738870b5a1e5406ece561ca61a3bf4/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3cc314245deb311364884e44242e00c18b5896e4fe6d5f942e7ad7e4cb640adb", size = 1637430, upload_time = "2025-04-21T09:41:47.973Z" }, { url = "https://files.pythonhosted.org/packages/99/3e/a6b7b55a38a6372b7e00f751778fc653cdd14770f1c20c5ed309f1b87768/aiohttp-3.12.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d913623c7e3be188fe5c718bce186e0bbc5977e74c12e4832d540c3637b9f47", size = 1687290, upload_time = "2025-05-27T00:02:44.398Z" },
{ url = "https://files.pythonhosted.org/packages/11/32/c6d1e3748077ce7ee13745fae33e5cb1dac3e3b8f8787bf738a93c94a7d2/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f421843b0f70740772228b9e8093289924359d306530bcd3926f39acbe1adda", size = 1703342, upload_time = "2025-04-21T09:41:50.323Z" }, { url = "https://files.pythonhosted.org/packages/3c/69/0542c4c125e40c47e26bab47d8aff50f831c5626a4d4ab9da7018ee2d15c/aiohttp-3.12.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b4924ca6bc74cb630e47edaf111f1d05e13dfe3c1e580c35277dc998965913d3", size = 1708743, upload_time = "2025-05-27T00:02:47.116Z" },
{ url = "https://files.pythonhosted.org/packages/c5/1d/a3b57bfdbe285f0d45572d6d8f534fd58761da3e9cbc3098372565005606/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e220e7562467dc8d589e31c1acd13438d82c03d7f385c9cd41a3f6d1d15807c1", size = 1740600, upload_time = "2025-04-21T09:41:52.111Z" }, { url = "https://files.pythonhosted.org/packages/1d/0e/73a16e4008f78fa3538a1e564d0ecf026c7fd422f522e87af48337942f48/aiohttp-3.12.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a38e144942d4f0740dcb5be2ceb932cc45fc29e404fe64ffd5eef5bc62eafe39", size = 1649396, upload_time = "2025-05-27T00:02:49.23Z" },
{ url = "https://files.pythonhosted.org/packages/a5/71/f9cd2fed33fa2b7ce4d412fb7876547abb821d5b5520787d159d0748321d/aiohttp-3.11.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ab2ef72f8605046115bc9aa8e9d14fd49086d405855f40b79ed9e5c1f9f4faea", size = 1695131, upload_time = "2025-04-21T09:41:53.94Z" }, { url = "https://files.pythonhosted.org/packages/98/23/0a9b1547260d554f2c484c2f5e9d3800eca31a387146e5e0a0cfb6bfe17e/aiohttp-3.12.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6c31782dae093a507b94792d9f32978bf154d051d5237fdedbb9e74d9464d5dd", size = 1728959, upload_time = "2025-05-27T00:02:51.832Z" },
{ url = "https://files.pythonhosted.org/packages/97/97/d1248cd6d02b9de6aa514793d0dcb20099f0ec47ae71a933290116c070c5/aiohttp-3.11.18-cp312-cp312-win32.whl", hash = "sha256:12a62691eb5aac58d65200c7ae94d73e8a65c331c3a86a2e9670927e94339ee8", size = 412442, upload_time = "2025-04-21T09:41:55.689Z" }, { url = "https://files.pythonhosted.org/packages/31/d0/39b2b1111b81952015e7390ea07b404f417577e6ed4df1a683dc3d1a0a2f/aiohttp-3.12.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7f10d664b638f85acdeb7622f7b16773aaf7d67214a7c3b6075735f171d2f021", size = 1756933, upload_time = "2025-05-27T00:02:54.292Z" },
{ url = "https://files.pythonhosted.org/packages/33/9a/e34e65506e06427b111e19218a99abf627638a9703f4b8bcc3e3021277ed/aiohttp-3.11.18-cp312-cp312-win_amd64.whl", hash = "sha256:364329f319c499128fd5cd2d1c31c44f234c58f9b96cc57f743d16ec4f3238c8", size = 439444, upload_time = "2025-04-21T09:41:57.977Z" }, { url = "https://files.pythonhosted.org/packages/28/77/faf662e3b87e3d5a1ca3092c5cbeaa4349abdff3388bdc3c3c057302b380/aiohttp-3.12.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7181b4ebd70ad9731f4f7af03e3ed0ff003e49cefbf0b6846b5decb32abc30b7", size = 1716543, upload_time = "2025-05-27T00:02:56.621Z" },
{ url = "https://files.pythonhosted.org/packages/d3/1a/c79bae467e39439d5ef044f121b280aa0398cb23ecd77ee49f8a1759dde7/aiohttp-3.12.2-cp312-cp312-win32.whl", hash = "sha256:d602fc26cb307993965e5f5dacb2aaa7fea4f01c6658250658bef51e48dd454e", size = 414002, upload_time = "2025-05-27T00:02:59.144Z" },
{ url = "https://files.pythonhosted.org/packages/e2/dd/0d5f6aef062433cf88ea08ab2c8c3c1466e812311c9e0657370835ed27c9/aiohttp-3.12.2-cp312-cp312-win_amd64.whl", hash = "sha256:35df44dde19fcd146ed13e8847c70f8e138e91138f7615df2bd68b478ac04f99", size = 440104, upload_time = "2025-05-27T00:03:01.115Z" },
] ]
[[package]] [[package]]
@ -542,16 +544,16 @@ wheels = [
[[package]] [[package]]
name = "boto3-stubs" name = "boto3-stubs"
version = "1.38.22" version = "1.38.24"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "botocore-stubs" }, { name = "botocore-stubs" },
{ name = "types-s3transfer" }, { name = "types-s3transfer" },
{ name = "typing-extensions", marker = "python_full_version < '3.12'" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/6a/4b/3c2be7f6b42e6a955c62d863bf3ed8ff721ccae077f647cae602b03d0bb0/boto3_stubs-1.38.22.tar.gz", hash = "sha256:f43ecba814d2c649db3a22451e833635050cc240954ab3406bd84f52067657ac", size = 99045, upload_time = "2025-05-22T19:27:23.368Z" } sdist = { url = "https://files.pythonhosted.org/packages/15/db/357512e220359c1240ac2e8de6e358e181cdfb113397f71879d94193e89a/boto3_stubs-1.38.24.tar.gz", hash = "sha256:1607784fd379458e55ee92f4fd1d76bf328781a27ddbc322c3dd61b7ce4aa1e3", size = 99072, upload_time = "2025-05-27T21:31:32.857Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/7b/e1/ac4a0bd90d60d95fe3bc079db04c16938d7536c9b04587ae64f0d5e45617/boto3_stubs-1.38.22-py3-none-any.whl", hash = "sha256:af1dd47e087892878e78e2afb003e1c24b41ed7973876c0faf4da04d78c7f1f0", size = 68664, upload_time = "2025-05-22T19:27:09.004Z" }, { url = "https://files.pythonhosted.org/packages/f7/f4/09306180c2fd5e5f742dbe0f105dd5a245ed4f42e4a274b29a3a09ac7f97/boto3_stubs-1.38.24-py3-none-any.whl", hash = "sha256:b62c43115013d53bb65816b85ad71786de854d0f98c463d5d3d8d228f1c2c334", size = 68669, upload_time = "2025-05-27T21:31:27.343Z" },
] ]
[package.optional-dependencies] [package.optional-dependencies]
@ -575,14 +577,14 @@ wheels = [
[[package]] [[package]]
name = "botocore-stubs" name = "botocore-stubs"
version = "1.38.19" version = "1.38.24"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "types-awscrt" }, { name = "types-awscrt" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/43/70/6204c97f8d8362364f11c16085566abdcaa114c264d3a4d709ff697b203b/botocore_stubs-1.38.19.tar.gz", hash = "sha256:84f67a42bb240a8ea0c5fe4f05d497cc411177db600bc7012182e499ac24bf19", size = 42269, upload_time = "2025-05-19T20:18:13.556Z" } sdist = { url = "https://files.pythonhosted.org/packages/d7/23/e482bb36372f58ea3751344e285d456c44a91220bcf0384e48e1f7895a67/botocore_stubs-1.38.24.tar.gz", hash = "sha256:ed346917591d1b74992eadc8bd020a09d55228cbdf9d3273a4db2b88f0bdb8f9", size = 42292, upload_time = "2025-05-28T02:23:31.576Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/b4/ce/28b143452c22b678678d832bf8b41218e3d319bf94062b48c28fe5d81163/botocore_stubs-1.38.19-py3-none-any.whl", hash = "sha256:66fd7d231c21134a12acbe313ef7a6b152cbf9bfd7bfa12a62f8c33e94737e26", size = 65603, upload_time = "2025-05-19T20:18:10.445Z" }, { url = "https://files.pythonhosted.org/packages/29/77/c9b8f3943b27718bce7277d9bad72fab7c80f41130880c593faf2511ef95/botocore_stubs-1.38.24-py3-none-any.whl", hash = "sha256:15fcbd07541b7f82271637cfbc88e60e7ec8bccdbe9b33d6611b63b5d22ea7eb", size = 65629, upload_time = "2025-05-28T02:23:28.962Z" },
] ]
[[package]] [[package]]
@ -880,14 +882,14 @@ wheels = [
[[package]] [[package]]
name = "click" name = "click"
version = "8.1.8" version = "8.2.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" }, { name = "colorama", marker = "sys_platform == 'win32'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload_time = "2024-12-21T18:38:44.339Z" } sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload_time = "2025-05-20T23:19:49.832Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload_time = "2024-12-21T18:38:41.666Z" }, { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload_time = "2025-05-20T23:19:47.796Z" },
] ]
[[package]] [[package]]
@ -1546,7 +1548,7 @@ vdb = [
{ name = "pymilvus", specifier = "~=2.5.0" }, { name = "pymilvus", specifier = "~=2.5.0" },
{ name = "pymochow", specifier = "==1.3.1" }, { name = "pymochow", specifier = "==1.3.1" },
{ name = "pyobvector", specifier = "~=0.1.6" }, { name = "pyobvector", specifier = "~=0.1.6" },
{ name = "qdrant-client", specifier = "==1.7.3" }, { name = "qdrant-client", specifier = "==1.9.0" },
{ name = "tablestore", specifier = "==6.1.0" }, { name = "tablestore", specifier = "==6.1.0" },
{ name = "tcvectordb", specifier = "~=1.6.4" }, { name = "tcvectordb", specifier = "~=1.6.4" },
{ name = "tidb-vector", specifier = "==0.0.9" }, { name = "tidb-vector", specifier = "==0.0.9" },
@ -1881,11 +1883,11 @@ wheels = [
[[package]] [[package]]
name = "fsspec" name = "fsspec"
version = "2025.5.0" version = "2025.5.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f2/77/deb99b97981e2e191913454da82d406702405178631c31cd623caebaf1b1/fsspec-2025.5.0.tar.gz", hash = "sha256:e4f4623bb6221f7407fd695cc535d1f857a077eb247580f4ada34f5dc25fd5c8", size = 300989, upload_time = "2025-05-20T15:46:22.484Z" } sdist = { url = "https://files.pythonhosted.org/packages/00/f7/27f15d41f0ed38e8fcc488584b57e902b331da7f7c6dcda53721b15838fc/fsspec-2025.5.1.tar.gz", hash = "sha256:2e55e47a540b91843b755e83ded97c6e897fa0942b11490113f09e9c443c2475", size = 303033, upload_time = "2025-05-24T12:03:23.792Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/a9/a7022f58e081149ec0184c31ea81dcee605e1d46380b48122e1ef94ac24e/fsspec-2025.5.0-py3-none-any.whl", hash = "sha256:0ca253eca6b5333d8a2b8bd98c7326fe821f1f0fdbd34e1b445bddde8e804c95", size = 196164, upload_time = "2025-05-20T15:46:20.89Z" }, { url = "https://files.pythonhosted.org/packages/bb/61/78c7b3851add1481b048b5fdc29067397a1784e2910592bc81bb3f608635/fsspec-2025.5.1-py3-none-any.whl", hash = "sha256:24d3a2e663d5fc735ab256263c4075f374a174c3410c0b25e5bd1970bceaa462", size = 199052, upload_time = "2025-05-24T12:03:21.66Z" },
] ]
[[package]] [[package]]
@ -2375,41 +2377,56 @@ wheels = [
] ]
[[package]] [[package]]
name = "hiredis" name = "hf-xet"
version = "3.2.0" version = "1.1.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/24/c9/5085131f1739143fad09708fc229866f6f45a1b8c137e8104ac4be2330db/hiredis-3.2.0.tar.gz", hash = "sha256:889e8820e27aa3c8e92aa62fcce0e050985cf19a27bb0d5fabbbed1cf73aca8e", size = 89051, upload_time = "2025-05-22T09:49:20.133Z" } sdist = { url = "https://files.pythonhosted.org/packages/95/be/58f20728a5b445f8b064e74f0618897b3439f5ef90934da1916b9dfac76f/hf_xet-1.1.2.tar.gz", hash = "sha256:3712d6d4819d3976a1c18e36db9f503e296283f9363af818f50703506ed63da3", size = 467009, upload_time = "2025-05-16T20:44:34.944Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/41/99/0857c3382efed5ff51f1510a71a41ea9380a8b2eb1997c9cdd9c95e5c004/hiredis-3.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e003033c1ee4ca79bec7cc7c7c416622e501cf62c07500c5801f526950adbe91", size = 82423, upload_time = "2025-05-22T09:47:34.281Z" }, { url = "https://files.pythonhosted.org/packages/45/ae/f1a63f75d9886f18a80220ba31a1c7b9c4752f03aae452f358f538c6a991/hf_xet-1.1.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:dfd1873fd648488c70735cb60f7728512bca0e459e61fcd107069143cd798469", size = 2642559, upload_time = "2025-05-16T20:44:30.217Z" },
{ url = "https://files.pythonhosted.org/packages/33/e7/be65070f494d68aae88abef803164aa128e8013022cefb4b5dea8d5217f9/hiredis-3.2.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:de4fdd884031ebc2810249a2dbcd3444c49fe21ec5c3cf4aa154b144e5d51cb4", size = 45226, upload_time = "2025-05-22T09:47:35.111Z" }, { url = "https://files.pythonhosted.org/packages/50/ab/d2c83ae18f1015d926defd5bfbe94c62d15e93f900e6a192e318ee947105/hf_xet-1.1.2-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:29b584983b2d977c44157d9241dcf0fd50acde0b7bff8897fe4386912330090d", size = 2541360, upload_time = "2025-05-16T20:44:29.056Z" },
{ url = "https://files.pythonhosted.org/packages/98/23/e6f24c7f27a3fcdae75b31098571205622e469beab1219f4f29b6b3b1e35/hiredis-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6d9f7e16bc92d203ea5a45eb9b0c074682d86a6aff78014c7cee3e8ae72addda", size = 43238, upload_time = "2025-05-22T09:47:35.931Z" }, { url = "https://files.pythonhosted.org/packages/9f/a7/693dc9f34f979e30a378125e2150a0b2d8d166e6d83ce3950eeb81e560aa/hf_xet-1.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b29ac84298147fe9164cc55ad994ba47399f90b5d045b0b803b99cf5f06d8ec", size = 5183081, upload_time = "2025-05-16T20:44:27.505Z" },
{ url = "https://files.pythonhosted.org/packages/c7/35/b4d8faf91ec96b8374aaef53f4f2819cd9e083db7d50cc24727355b2b4d2/hiredis-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a793b79034dc3aabdde2f2ec52cefdd702e2c7d7493329a15504a20ed24f19e", size = 169625, upload_time = "2025-05-22T09:47:36.821Z" }, { url = "https://files.pythonhosted.org/packages/3d/23/c48607883f692a36c0a7735f47f98bad32dbe459a32d1568c0f21576985d/hf_xet-1.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d921ba32615676e436a0d15e162331abc9ed43d440916b1d836dc27ce1546173", size = 5356100, upload_time = "2025-05-16T20:44:25.681Z" },
{ url = "https://files.pythonhosted.org/packages/97/25/08e641bba33c0ea8be7113d090c9d3b7037a7f0abcbed1e10f3f1777f38b/hiredis-3.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6672e215e06f01f8459f772dd8930b8a8ccbb13f6a2508a0e5bf548eac5c61f3", size = 165801, upload_time = "2025-05-22T09:47:37.825Z" }, { url = "https://files.pythonhosted.org/packages/eb/5b/b2316c7f1076da0582b52ea228f68bea95e243c388440d1dc80297c9d813/hf_xet-1.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d9b03c34e13c44893ab6e8fea18ee8d2a6878c15328dd3aabedbdd83ee9f2ed3", size = 5647688, upload_time = "2025-05-16T20:44:31.867Z" },
{ url = "https://files.pythonhosted.org/packages/f8/25/8a13735d9dbfb496227ad536573f099c2747faacf7cc48d7332ff4bfcf78/hiredis-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c36dcc3f9e3819c479350b53ceccc2b7e06cd2c2e8b0d40132529b59575071c", size = 180613, upload_time = "2025-05-22T09:47:38.841Z" }, { url = "https://files.pythonhosted.org/packages/2c/98/e6995f0fa579929da7795c961f403f4ee84af36c625963f52741d56f242c/hf_xet-1.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:01b18608955b3d826307d37da8bd38b28a46cd2d9908b3a3655d1363274f941a", size = 5322627, upload_time = "2025-05-16T20:44:33.677Z" },
{ url = "https://files.pythonhosted.org/packages/d4/84/939dd100f98a4be6d8867a06acd825b9a1463b59bca103c3bdcd8e09d883/hiredis-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3373dc51507633484392ce3b7ecd906b4a755ede54ab6cec070922385156d526", size = 169939, upload_time = "2025-05-22T09:47:39.901Z" }, { url = "https://files.pythonhosted.org/packages/59/40/8f1d5a44a64d8bf9e3c19576e789f716af54875b46daae65426714e75db1/hf_xet-1.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:3562902c81299b09f3582ddfb324400c6a901a2f3bc854f83556495755f4954c", size = 2739542, upload_time = "2025-05-16T20:44:36.287Z" },
{ url = "https://files.pythonhosted.org/packages/00/43/628fed2bbf059fd766d696abb99f45ebdc84b4dfbc984e51ffd2f15f38ab/hiredis-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fa00f001e4ce27157d7d0a2124dbb7f17de280bb0aa9e65eda46a94f951abc1", size = 170077, upload_time = "2025-05-22T09:47:40.876Z" }, ]
{ url = "https://files.pythonhosted.org/packages/2b/d5/ba53cf889d594d8213371c8882ac49b4d4f94629d63e81e3543c07adff9f/hiredis-3.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c177179c3a2dd492729e6ba3d39f5d76e6b456d6657db7fc44639e9aa08029d2", size = 164161, upload_time = "2025-05-22T09:47:41.81Z" },
{ url = "https://files.pythonhosted.org/packages/24/26/f81f88a71cc082e2b20d0458d396d5cb3d0080dd5791bfec23f0ad54a74f/hiredis-3.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fe39bab2d7f2b4d2578f2c9d7c835c41cbfdf0a5e52b581400433f583818de1f", size = 162593, upload_time = "2025-05-22T09:47:43.165Z" }, [[package]]
{ url = "https://files.pythonhosted.org/packages/aa/cb/49f4d37aff34c6eaf2c14a34160e8be175587e7036f350dadf1b67e656e8/hiredis-3.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f862aa704f73e9df5ec948cb7d9434f19f8245e56372a672f5af56dc72cbff3a", size = 174810, upload_time = "2025-05-22T09:47:44.164Z" }, name = "hiredis"
{ url = "https://files.pythonhosted.org/packages/8b/f0/d2e51cafd41cceb1a3aa864f7485bc98ed157d1379ea9d4c562a89616b72/hiredis-3.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:38bf4d4faa127a240ea94c049c87064d010f9778cd6f48c84902b2247f62c363", size = 167061, upload_time = "2025-05-22T09:47:45.286Z" }, version = "3.2.1"
{ url = "https://files.pythonhosted.org/packages/cf/92/9318d10282e0bf4d098b253c3c4a94ca1a9086a3c83e391efff338745129/hiredis-3.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d8555c33bd1a79f516ca263934dfc069a7881a08ba3b423ffe446bf2904da937", size = 164833, upload_time = "2025-05-22T09:47:46.231Z" }, source = { registry = "https://pypi.org/simple" }
{ url = "https://files.pythonhosted.org/packages/d2/09/b4efa1db0ccce98b557c4a517cf41376f766250f5f24becded74edee773b/hiredis-3.2.0-cp311-cp311-win32.whl", hash = "sha256:8eae352e54076bce53dc781f6c59b539d91fb51070f6acc71300ab97367fcf4e", size = 20402, upload_time = "2025-05-22T09:47:47.124Z" }, sdist = { url = "https://files.pythonhosted.org/packages/f7/08/24b72f425b75e1de7442fb1740f69ca66d5820b9f9c0e2511ff9aadab3b7/hiredis-3.2.1.tar.gz", hash = "sha256:5a5f64479bf04dd829fe7029fad0ea043eac4023abc6e946668cbbec3493a78d", size = 89096, upload_time = "2025-05-23T11:41:57.227Z" }
{ url = "https://files.pythonhosted.org/packages/2c/22/1ae810ccefb72e6166423ef8be05a42b09db1c0f45c7a536ec7c85b7c7bd/hiredis-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:d79d14390025608ab413dcc7c54a0e5d6d4141d6ddfabb0003eb3ff9c58da29b", size = 22085, upload_time = "2025-05-22T09:47:48.264Z" }, wheels = [
{ url = "https://files.pythonhosted.org/packages/a6/a9/6ca80fdc9f221889499f458c0dc4d547554957f4c7dc546ae8c694d1256a/hiredis-3.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:33cbedebd6f8a395a030e90fb11c7fc2e94677b4e969deb455e1ffd47b84462a", size = 82627, upload_time = "2025-05-22T09:47:49.078Z" }, { url = "https://files.pythonhosted.org/packages/48/84/2ea9636f2ba0811d9eb3bebbbfa84f488238180ddab70c9cb7fa13419d78/hiredis-3.2.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:e4ae0be44cab5e74e6e4c4a93d04784629a45e781ff483b136cc9e1b9c23975c", size = 82425, upload_time = "2025-05-23T11:39:54.135Z" },
{ url = "https://files.pythonhosted.org/packages/a0/80/ed747e94dac50909bba10da77918ce84c95e41a65221d38760b06977e168/hiredis-3.2.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:459df81a1ab6bfcb6d687c650c9f8a73d35aae612cef649ecb1af286982c1e59", size = 45406, upload_time = "2025-05-22T09:47:49.932Z" }, { url = "https://files.pythonhosted.org/packages/fc/24/b9ebf766a99998fda3975937afa4912e98de9d7f8d0b83f48096bdd961c1/hiredis-3.2.1-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:24647e84c9f552934eb60b7f3d2116f8b64a7020361da9369e558935ca45914d", size = 45231, upload_time = "2025-05-23T11:39:55.455Z" },
{ url = "https://files.pythonhosted.org/packages/2f/d2/f78988e2c131839f47933a461b35a864166472fd2d40c277c9b179042735/hiredis-3.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4627bae1ef33d99188be0583f8a61a9a1680c3402ec5aa34ef435c19be3ca99e", size = 43300, upload_time = "2025-05-22T09:47:50.802Z" }, { url = "https://files.pythonhosted.org/packages/68/4c/c009b4d9abeb964d607f0987561892d1589907f770b9e5617552b34a4a4d/hiredis-3.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6fb3e92d1172da8decc5f836bf8b528c0fc9b6d449f1353e79ceeb9dc1801132", size = 43240, upload_time = "2025-05-23T11:39:57.8Z" },
{ url = "https://files.pythonhosted.org/packages/56/e6/bcc1d3a62ec7387cc4b06706edad9e33225229faf57c1bbc00cd29d7b539/hiredis-3.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3093a32bbbc220715c6688df0aafb73b814b9369cfa573b19b8b0d1d521089a", size = 172196, upload_time = "2025-05-22T09:47:51.687Z" }, { url = "https://files.pythonhosted.org/packages/e9/83/d53f3ae9e4ac51b8a35afb7ccd68db871396ed1d7c8ba02ce2c30de0cf17/hiredis-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38ba7a32e51e518b6b3e470142e52ed2674558e04d7d73d86eb19ebcb37d7d40", size = 169624, upload_time = "2025-05-23T11:40:00.055Z" },
{ url = "https://files.pythonhosted.org/packages/9f/ea/a26db2c12e4d328f0473c907c7943fcb7b5fb92017999ed02f39e8fbb6d3/hiredis-3.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e32c2db41ce48e7e21256a6ab53d28e2073fd814a90fc3b5f7cc9d788e448e8", size = 168431, upload_time = "2025-05-22T09:47:53.097Z" }, { url = "https://files.pythonhosted.org/packages/91/2f/f9f091526e22a45385d45f3870204dc78aee365b6fe32e679e65674da6a7/hiredis-3.2.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4fc632be73174891d6bb71480247e57b2fd8f572059f0a1153e4d0339e919779", size = 165799, upload_time = "2025-05-23T11:40:01.194Z" },
{ url = "https://files.pythonhosted.org/packages/eb/5f/40b184b84a5add1f646227d35d27671b5bd6ef74cf5369bc19e3318a652e/hiredis-3.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50a3b82066dd25065075df2147b5dcd86d51b4cd6fe7ed294878952ffc1f545d", size = 182969, upload_time = "2025-05-22T09:47:54.202Z" }, { url = "https://files.pythonhosted.org/packages/1c/cc/e561274438cdb19794f0638136a5a99a9ca19affcb42679b12a78016b8ad/hiredis-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f03e6839ff21379ad3c195e0700fc9c209e7f344946dea0f8a6d7b5137a2a141", size = 180612, upload_time = "2025-05-23T11:40:02.385Z" },
{ url = "https://files.pythonhosted.org/packages/dc/a9/b57e2ef3c6ca3ef96ebf0a243d3cc64f42e483f1b2214cbe767ca56dd511/hiredis-3.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d81b41308b61ac2fa5fc1eb284a402f5aab58ec164a1cabbe4f21b36c2963132", size = 172497, upload_time = "2025-05-22T09:47:55.205Z" }, { url = "https://files.pythonhosted.org/packages/83/ba/a8a989f465191d55672e57aea2a331bfa3a74b5cbc6f590031c9e11f7491/hiredis-3.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99983873e37c71bb71deb544670ff4f9d6920dab272aaf52365606d87a4d6c73", size = 169934, upload_time = "2025-05-23T11:40:03.524Z" },
{ url = "https://files.pythonhosted.org/packages/59/40/ec8fdffb8b29c37a48a34e924439e0689596bc3dad485e1bbda82cecd5d7/hiredis-3.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6678a9dd43e3edfe9bd8014cbe3a2abe1a34698d5f70f5b59522a6f5b1c1475f", size = 173144, upload_time = "2025-05-22T09:47:57.128Z" }, { url = "https://files.pythonhosted.org/packages/52/5f/1148e965df1c67b17bdcaef199f54aec3def0955d19660a39c6ee10a6f55/hiredis-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd982c419f48e3a57f592678c72474429465bb4bfc96472ec805f5d836523f0", size = 170074, upload_time = "2025-05-23T11:40:04.618Z" },
{ url = "https://files.pythonhosted.org/packages/93/1a/e03e62123a5a43d904f797bbc52c830f64d0942c144caf121b4b55d23a9f/hiredis-3.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3e8af33a1154022129a8c3ad34eb8ef186812398d221b0c0f427eeda1921cdc4", size = 166435, upload_time = "2025-05-22T09:47:58.178Z" }, { url = "https://files.pythonhosted.org/packages/43/5e/e6846ad159a938b539fb8d472e2e68cb6758d7c9454ea0520211f335ea72/hiredis-3.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc993f4aa4abc029347f309e722f122e05a3b8a0c279ae612849b5cc9dc69f2d", size = 164158, upload_time = "2025-05-23T11:40:05.653Z" },
{ url = "https://files.pythonhosted.org/packages/96/45/bef85c736bc8ad1ea32c6c4b6d0b7238b75d1241c7cfcf2ada2ed6d0b4ca/hiredis-3.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19d8957a74731b06a4f44be030acc32efdd4cdd7143644fa898419fe0a0140b8", size = 164888, upload_time = "2025-05-22T09:47:59.206Z" }, { url = "https://files.pythonhosted.org/packages/0a/a1/5891e0615f0993f194c1b51a65aaac063b0db318a70df001b28e49f0579d/hiredis-3.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dde790d420081f18b5949227649ccb3ed991459df33279419a25fcae7f97cd92", size = 162591, upload_time = "2025-05-23T11:40:07.041Z" },
{ url = "https://files.pythonhosted.org/packages/81/e3/ec801e020e90bd88f3e3f6bee40f146098004e11045835d3001b340f0bc6/hiredis-3.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a9a9af7173bb60e00e82262035d52920b071066cb8f2e79616b0fa4e3b1937a1", size = 177264, upload_time = "2025-05-22T09:48:00.385Z" }, { url = "https://files.pythonhosted.org/packages/d4/da/8bce52ca81716f53c1014f689aea4c170ba6411e6848f81a1bed1fc375eb/hiredis-3.2.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b0c8cae7edbef860afcf3177b705aef43e10b5628f14d5baf0ec69668247d08d", size = 174808, upload_time = "2025-05-23T11:40:09.146Z" },
{ url = "https://files.pythonhosted.org/packages/b4/7b/820bbf1bc747cd773a213fd9fa96907e6aa0ba0258d7f420a816312eda0e/hiredis-3.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5e11f8f9107ce15b6f0cd513217b576cc47fed3000929dfbde0892c098a8c6dd", size = 169621, upload_time = "2025-05-22T09:48:01.431Z" }, { url = "https://files.pythonhosted.org/packages/84/91/fc1ef444ed4dc432b5da9b48e9bd23266c703528db7be19e2b608d67ba06/hiredis-3.2.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e8a90eaca7e1ce7f175584f07a2cdbbcab13f4863f9f355d7895c4d28805f65b", size = 167060, upload_time = "2025-05-23T11:40:10.757Z" },
{ url = "https://files.pythonhosted.org/packages/68/cc/32d1883f001c28771c1baf951e1a31d6e4b6be7cac52c863412e8e397c95/hiredis-3.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7a7229e229ce2e77625518c7cfbc78cedb5d862575b4e566ca3b35c4e2200b99", size = 167304, upload_time = "2025-05-22T09:48:02.519Z" }, { url = "https://files.pythonhosted.org/packages/66/ad/beebf73a5455f232b97e00564d1e8ad095d4c6e18858c60c6cfdd893ac1e/hiredis-3.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:476031958fa44e245e803827e0787d49740daa4de708fe514370293ce519893a", size = 164833, upload_time = "2025-05-23T11:40:12.001Z" },
{ url = "https://files.pythonhosted.org/packages/4e/c5/af8448fe3abbeb31e1d97cf6a0618d7186f19e46f2179ffba34e1f917043/hiredis-3.2.0-cp312-cp312-win32.whl", hash = "sha256:998c21b58f02cb9d4dd61054e77b54070ed54e660ca17ec92ea2753e8cf5059d", size = 20551, upload_time = "2025-05-22T09:48:03.484Z" }, { url = "https://files.pythonhosted.org/packages/75/79/a9591bdc0148c0fbdf54cf6f3d449932d3b3b8779e87f33fa100a5a8088f/hiredis-3.2.1-cp311-cp311-win32.whl", hash = "sha256:eb3f5df2a9593b4b4b676dce3cea53b9c6969fc372875188589ddf2bafc7f624", size = 20402, upload_time = "2025-05-23T11:40:13.216Z" },
{ url = "https://files.pythonhosted.org/packages/32/a6/973b63e3861338132e7f455b823fd032122a7cd313d579f2c81da738e2c0/hiredis-3.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:9640ac0fa667548bc02a3c3629a7d3de7bf2c85b0c14990aa7d318ea2db8b3d6", size = 22127, upload_time = "2025-05-22T09:48:04.274Z" }, { url = "https://files.pythonhosted.org/packages/9f/05/c93cc6fab31e3c01b671126c82f44372fb211facb8bd4571fd372f50898d/hiredis-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:1402e763d8a9fdfcc103bbf8b2913971c0a3f7b8a73deacbda3dfe5f3a9d1e0b", size = 22085, upload_time = "2025-05-23T11:40:14.19Z" },
{ url = "https://files.pythonhosted.org/packages/60/a1/6da1578a22df1926497f7a3f6a3d2408fe1d1559f762c1640af5762a8eb6/hiredis-3.2.1-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:3742d8b17e73c198cabeab11da35f2e2a81999d406f52c6275234592256bf8e8", size = 82627, upload_time = "2025-05-23T11:40:15.362Z" },
{ url = "https://files.pythonhosted.org/packages/6c/b1/1056558ca8dc330be5bb25162fe5f268fee71571c9a535153df9f871a073/hiredis-3.2.1-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:9c2f3176fb617a79f6cccf22cb7d2715e590acb534af6a82b41f8196ad59375d", size = 45404, upload_time = "2025-05-23T11:40:16.72Z" },
{ url = "https://files.pythonhosted.org/packages/58/4f/13d1fa1a6b02a99e9fed8f546396f2d598c3613c98e6c399a3284fa65361/hiredis-3.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a8bd46189c7fa46174e02670dc44dfecb60f5bd4b67ed88cb050d8f1fd842f09", size = 43299, upload_time = "2025-05-23T11:40:17.697Z" },
{ url = "https://files.pythonhosted.org/packages/c0/25/ddfac123ba5a32eb1f0b40ba1b2ec98a599287f7439def8856c3c7e5dd0d/hiredis-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f86ee4488c8575b58139cdfdddeae17f91e9a893ffee20260822add443592e2f", size = 172194, upload_time = "2025-05-23T11:40:19.143Z" },
{ url = "https://files.pythonhosted.org/packages/2c/1e/443a3703ce570b631ca43494094fbaeb051578a0ebe4bfcefde351e1ba25/hiredis-3.2.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3717832f4a557b2fe7060b9d4a7900e5de287a15595e398c3f04df69019ca69d", size = 168429, upload_time = "2025-05-23T11:40:20.329Z" },
{ url = "https://files.pythonhosted.org/packages/3b/d6/0d8c6c706ed79b2298c001b5458c055615e3166533dcee3900e821a18a3e/hiredis-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5cb12c21fb9e2403d28c4e6a38120164973342d34d08120f2d7009b66785644", size = 182967, upload_time = "2025-05-23T11:40:21.921Z" },
{ url = "https://files.pythonhosted.org/packages/da/68/da8dd231fbce858b5a20ab7d7bf558912cd125f08bac4c778865ef5fe2c2/hiredis-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:080fda1510bbd389af91f919c11a4f2aa4d92f0684afa4709236faa084a42cac", size = 172495, upload_time = "2025-05-23T11:40:23.105Z" },
{ url = "https://files.pythonhosted.org/packages/65/25/83a31420535e2778662caa95533d5c997011fa6a88331f0cdb22afea9ec3/hiredis-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1252e10a1f3273d1c6bf2021e461652c2e11b05b83e0915d6eb540ec7539afe2", size = 173142, upload_time = "2025-05-23T11:40:24.24Z" },
{ url = "https://files.pythonhosted.org/packages/41/d7/cb907348889eb75e2aa2e6b63e065b611459e0f21fe1e371a968e13f0d55/hiredis-3.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d9e320e99ab7d2a30dc91ff6f745ba38d39b23f43d345cdee9881329d7b511d6", size = 166433, upload_time = "2025-05-23T11:40:25.287Z" },
{ url = "https://files.pythonhosted.org/packages/01/5d/7cbc69d82af7b29a95723d50f5261555ba3d024bfbdc414bdc3d23c0defb/hiredis-3.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:641668f385f16550fdd6fdc109b0af6988b94ba2acc06770a5e06a16e88f320c", size = 164883, upload_time = "2025-05-23T11:40:26.454Z" },
{ url = "https://files.pythonhosted.org/packages/f9/00/f995b1296b1d7e0247651347aa230f3225a9800e504fdf553cf7cd001cf7/hiredis-3.2.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1e1f44208c39d6c345ff451f82f21e9eeda6fe9af4ac65972cc3eeb58d41f7cb", size = 177262, upload_time = "2025-05-23T11:40:27.576Z" },
{ url = "https://files.pythonhosted.org/packages/c5/f3/723a67d729e94764ce9e0d73fa5f72a0f87d3ce3c98c9a0b27cbf001cc79/hiredis-3.2.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f882a0d6415fffe1ffcb09e6281d0ba8b1ece470e866612bbb24425bf76cf397", size = 169619, upload_time = "2025-05-23T11:40:29.671Z" },
{ url = "https://files.pythonhosted.org/packages/45/58/f69028df00fb1b223e221403f3be2059ae86031e7885f955d26236bdfc17/hiredis-3.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b4e78719a0730ebffe335528531d154bc8867a246418f74ecd88adbc4d938c49", size = 167303, upload_time = "2025-05-23T11:40:30.902Z" },
{ url = "https://files.pythonhosted.org/packages/2b/7d/567411e65cce76cf265a9a4f837fd2ebc564bef6368dd42ac03f7a517c0a/hiredis-3.2.1-cp312-cp312-win32.whl", hash = "sha256:33c4604d9f79a13b84da79950a8255433fca7edaf292bbd3364fd620864ed7b2", size = 20551, upload_time = "2025-05-23T11:40:32.69Z" },
{ url = "https://files.pythonhosted.org/packages/90/74/b4c291eb4a4a874b3690ff9fc311a65d5292072556421b11b1d786e3e1d0/hiredis-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7b9749375bf9d171aab8813694f379f2cff0330d7424000f5e92890ad4932dc9", size = 22128, upload_time = "2025-05-23T11:40:33.686Z" },
] ]
[[package]] [[package]]
@ -2516,20 +2533,21 @@ wheels = [
[[package]] [[package]]
name = "huggingface-hub" name = "huggingface-hub"
version = "0.31.4" version = "0.32.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "filelock" }, { name = "filelock" },
{ name = "fsspec" }, { name = "fsspec" },
{ name = "hf-xet", marker = "platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" },
{ name = "packaging" }, { name = "packaging" },
{ name = "pyyaml" }, { name = "pyyaml" },
{ name = "requests" }, { name = "requests" },
{ name = "tqdm" }, { name = "tqdm" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/05/a0/7445e07427a917399db619e3c7383de3cd723c20d3b3a8a527a096c49a44/huggingface_hub-0.31.4.tar.gz", hash = "sha256:5a7bc710b9f9c028aee5b1476867b4ec5c1b92f043cb364d5fdc54354757e4ce", size = 407736, upload_time = "2025-05-19T09:37:13.73Z" } sdist = { url = "https://files.pythonhosted.org/packages/d0/76/44f7025d1b3f29336aeb7324a57dd7c19f7c69f6612b7637b39ac7c17302/huggingface_hub-0.32.2.tar.gz", hash = "sha256:64a288b1eadad6b60bbfd50f0e52fd6cfa2ef77ab13c3e8a834a038ae929de54", size = 422847, upload_time = "2025-05-27T09:23:00.306Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/33/c7/852d4473788cfd7d79b73951244b87a6d75fdac296c90aeb5e85dbb2fb5e/huggingface_hub-0.31.4-py3-none-any.whl", hash = "sha256:4f70704760296cc69b612916056e9845f5490a33782b924fc531767967acc15d", size = 489319, upload_time = "2025-05-19T09:37:11.506Z" }, { url = "https://files.pythonhosted.org/packages/32/30/532fe57467a6cc7ff2e39f088db1cb6d6bf522f724a4a5c7beda1282d5a6/huggingface_hub-0.32.2-py3-none-any.whl", hash = "sha256:f8fcf14603237eadf96dbe577d30b330f8c27b4a0a31e8f6c94fdc25e021fdb8", size = 509968, upload_time = "2025-05-27T09:22:57.967Z" },
] ]
[[package]] [[package]]
@ -2555,15 +2573,15 @@ wheels = [
[[package]] [[package]]
name = "hypothesis" name = "hypothesis"
version = "6.131.23" version = "6.131.30"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "attrs" }, { name = "attrs" },
{ name = "sortedcontainers" }, { name = "sortedcontainers" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/4b/08/4abb1d608a5a4c3a16bc9dc05a7e9f2a0664b0d6aaa787c1a09574306302/hypothesis-6.131.23.tar.gz", hash = "sha256:7723d82885b6cddc39d0ce5b2d8c5b890b3ebc2263a3707d9430ceaf368de402", size = 438101, upload_time = "2025-05-23T05:18:49.585Z" } sdist = { url = "https://files.pythonhosted.org/packages/49/7f/e1d7a5ee9f96ca73e0fe51d226e2ad15029ff1ff16b6096ced2837c4af2f/hypothesis-6.131.30.tar.gz", hash = "sha256:c04f748c9cb6c3e3d134699258c2d076afebf40e2752572b6f05f86bd3f23fe5", size = 442221, upload_time = "2025-05-27T18:05:40.098Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/af/5f/1bbfed2cc039eed31d4389bf6429ebbf0304b27466ad4e04f3e35fc5a506/hypothesis-6.131.23-py3-none-any.whl", hash = "sha256:5c1212d000aec42c949a1e41415aef2bba297cde807cea50058d162520da7d3f", size = 502615, upload_time = "2025-05-23T05:18:45.87Z" }, { url = "https://files.pythonhosted.org/packages/e3/a5/59fd76d3445e54cfb3982ffbca627aa58cca127e05d6552a6c4302926a6f/hypothesis-6.131.30-py3-none-any.whl", hash = "sha256:1a04a43f282a32bffb21dc4b1ab7e68c9b34db0298b9b91933484eca4682d6b4", size = 506833, upload_time = "2025-05-27T18:05:35.867Z" },
] ]
[[package]] [[package]]
@ -2684,11 +2702,11 @@ wheels = [
[[package]] [[package]]
name = "joblib" name = "joblib"
version = "1.5.0" version = "1.5.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/30/08/8bd4a0250247861420a040b33ccf42f43c426ac91d99405374ef117e5872/joblib-1.5.0.tar.gz", hash = "sha256:d8757f955389a3dd7a23152e43bc297c2e0c2d3060056dad0feefc88a06939b5", size = 330234, upload_time = "2025-05-03T21:09:39.553Z" } sdist = { url = "https://files.pythonhosted.org/packages/dc/fe/0f5a938c54105553436dbff7a61dc4fed4b1b2c98852f8833beaf4d5968f/joblib-1.5.1.tar.gz", hash = "sha256:f4f86e351f39fe3d0d32a9f2c3d8af1ee4cec285aafcb27003dda5205576b444", size = 330475, upload_time = "2025-05-23T12:04:37.097Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/da/d3/13ee227a148af1c693654932b8b0b02ed64af5e1f7406d56b088b57574cd/joblib-1.5.0-py3-none-any.whl", hash = "sha256:206144b320246485b712fc8cc51f017de58225fa8b414a1fe1764a7231aca491", size = 307682, upload_time = "2025-05-03T21:09:37.892Z" }, { url = "https://files.pythonhosted.org/packages/7d/4f/1195bbac8e0c2acc5f740661631d8d750dc38d4a32b23ee5df3cde6f4e0d/joblib-1.5.1-py3-none-any.whl", hash = "sha256:4719a31f054c7d766948dcd83e9613686b27114f190f717cec7eaa2084f8a74a", size = 307746, upload_time = "2025-05-23T12:04:35.124Z" },
] ]
[[package]] [[package]]
@ -2702,7 +2720,7 @@ wheels = [
[[package]] [[package]]
name = "jsonschema" name = "jsonschema"
version = "4.23.0" version = "4.24.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "attrs" }, { name = "attrs" },
@ -2710,9 +2728,9 @@ dependencies = [
{ name = "referencing" }, { name = "referencing" },
{ name = "rpds-py" }, { name = "rpds-py" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/38/2e/03362ee4034a4c917f697890ccd4aec0800ccf9ded7f511971c75451deec/jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4", size = 325778, upload_time = "2024-07-08T18:40:05.546Z" } sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480, upload_time = "2025-05-26T18:48:10.459Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/69/4a/4f9dbeb84e8850557c02365a0eee0649abe5eb1d84af92a25731c6c0f922/jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566", size = 88462, upload_time = "2024-07-08T18:40:00.165Z" }, { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709, upload_time = "2025-05-26T18:48:08.417Z" },
] ]
[[package]] [[package]]
@ -3442,18 +3460,18 @@ wheels = [
[[package]] [[package]]
name = "opendal" name = "opendal"
version = "0.45.19" version = "0.45.20"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/24/3d/11177fe2a107ea131a4216c7406839a99b5a867c83d07832136263dba66a/opendal-0.45.19.tar.gz", hash = "sha256:29018eb029eda28fd9709b579c8ddacd89a201b524fe8a80496afb05ea2fd29c", size = 987067, upload_time = "2025-05-13T09:48:25.143Z" } sdist = { url = "https://files.pythonhosted.org/packages/2f/3f/927dfe1349ae58b9238b8eafba747af648d660a9425f486dda01a10f0b78/opendal-0.45.20.tar.gz", hash = "sha256:9f6f90d9e9f9d6e9e5a34aa7729169ef34d2f1869ad1e01ddc39b1c0ce0c9405", size = 990267, upload_time = "2025-05-26T07:02:11.819Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/42/3c/703c99e72426b9e4dd85452f7b759693c11a98c74b68003f03a50b389c56/opendal-0.45.19-cp311-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:b6d3bb0ffc6dc453c896fdbe6833ad0f1dc295a8a75502907593684fbede0e0c", size = 27064893, upload_time = "2025-05-13T09:47:42.818Z" }, { url = "https://files.pythonhosted.org/packages/84/77/6427e16b8630f0cc71f4a1b01648ed3264f1e04f1f6d9b5d09e5c6a4dd2f/opendal-0.45.20-cp311-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:35acdd8001e4a741532834fdbff3020ffb10b40028bb49fbe93c4f8197d66d8c", size = 26910966, upload_time = "2025-05-26T07:01:24.987Z" },
{ url = "https://files.pythonhosted.org/packages/f2/2c/e67e46480e57e3743d4184671161f84f9cfc0cefd7436549e309ebc00310/opendal-0.45.19-cp311-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a2d6673abf0fbca491543a783e07a314932578da226228cbb0cbfdfd860e0b58", size = 13014262, upload_time = "2025-05-13T09:47:45.358Z" }, { url = "https://files.pythonhosted.org/packages/12/1f/83e415334739f1ab4dba55cdd349abf0b66612249055afb422a354b96ac8/opendal-0.45.20-cp311-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:629bfe8d384364bced6cbeb01f49b99779fa5151c68048a1869ff645ddcfcb25", size = 13002770, upload_time = "2025-05-26T07:01:30.385Z" },
{ url = "https://files.pythonhosted.org/packages/d3/1b/6f6e84b16d60e317ee38f4bfa7a7205a398c87e1b23535c00db1de547d72/opendal-0.45.19-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51edc429f3af65c9e6aad8d66872456f217c2dd4b5b735e7ccda337fea13f1d0", size = 14515103, upload_time = "2025-05-13T09:47:47.518Z" }, { url = "https://files.pythonhosted.org/packages/49/94/c5de6ed54a02d7413636c2ccefa71d8dd09c2ada1cd6ecab202feb1fdeda/opendal-0.45.20-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12cc5ac7e441fb93d86d1673112d9fb08580fc3226f864434f4a56a72efec53", size = 14387218, upload_time = "2025-05-26T07:01:33.017Z" },
{ url = "https://files.pythonhosted.org/packages/63/4a/8ca7b5f5e94b1808e5c6064ccada98ccdd2c4ede7be2eb0ed7b87e5ea824/opendal-0.45.19-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d923c19871cc7504e3096e067dd9e855a0e518557460b39b4bedf87b56dcba6b", size = 13598898, upload_time = "2025-05-13T09:47:49.798Z" }, { url = "https://files.pythonhosted.org/packages/c6/83/713a1e1de8cbbd69af50e26644bbdeef3c1068b89f442417376fa3c0f591/opendal-0.45.20-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:45a3adae1f473052234fc4054a6f210df3ded9aff10db8d545d0a37eff3b13cc", size = 13424302, upload_time = "2025-05-26T07:01:36.417Z" },
{ url = "https://files.pythonhosted.org/packages/5e/15/89ddd83849c79d684f270ff3c23abef7b68a7b9272d61b352fff3884c726/opendal-0.45.19-cp311-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b37786c13ac9370017b071744eec3b1cabd54408049e34e7317c76cd3867f8a7", size = 13766229, upload_time = "2025-05-13T09:47:52.374Z" }, { url = "https://files.pythonhosted.org/packages/c7/78/c9651e753aaf6eb61887ca372a3f9c2ae57dae03c3159d24deaf018c26dc/opendal-0.45.20-cp311-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d8947857052c85a4b0e251d50e23f5f68f0cdd9e509e32e614a5e4b2fc7424c4", size = 13622483, upload_time = "2025-05-26T07:01:38.886Z" },
{ url = "https://files.pythonhosted.org/packages/2f/16/434cbd6a0e0136315e813755d8db62029aece9dd5fb7b49e13af8f5ed5bf/opendal-0.45.19-cp311-abi3-musllinux_1_1_armv7l.whl", hash = "sha256:c6182b5f47057bf9160242f6a0ad9e6351d2e78d32fdea0b99137bbcd1678006", size = 13311226, upload_time = "2025-05-13T09:47:54.672Z" }, { url = "https://files.pythonhosted.org/packages/3c/9d/5d8c20c0fc93df5e349e5694167de30afdc54c5755704cc64764a6cbb309/opendal-0.45.20-cp311-abi3-musllinux_1_1_armv7l.whl", hash = "sha256:891d2f9114efeef648973049ed15e56477e8feb9e48b540bd8d6105ea22a253c", size = 13320229, upload_time = "2025-05-26T07:01:41.965Z" },
{ url = "https://files.pythonhosted.org/packages/e3/3c/41ee777f15b3d6fe6ccd2b6bb28d20e703c39f7826355ce2588bee4c875f/opendal-0.45.19-cp311-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a20e956745bd8d1c9ecddbfdb4bc1fb86dd82fe1e1b84ccaaec1551d80396713", size = 14699416, upload_time = "2025-05-13T09:47:56.959Z" }, { url = "https://files.pythonhosted.org/packages/21/39/05262f748a2085522e0c85f03eab945589313dc9caedc002872c39162776/opendal-0.45.20-cp311-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:539de9b825f6783d6289d88c0c9ac5415daa4d892d761e3540c565bda51e8997", size = 14574280, upload_time = "2025-05-26T07:01:44.413Z" },
{ url = "https://files.pythonhosted.org/packages/38/03/d445d30ad32bef12f3399644c9259c7798520d65f273406b65ba0fcf52dd/opendal-0.45.19-cp311-abi3-win_amd64.whl", hash = "sha256:4b72fc14aa55ff7ff409f67b96f12c271b4373091743be6f213cb980531f1bfe", size = 15223706, upload_time = "2025-05-13T09:48:03.819Z" }, { url = "https://files.pythonhosted.org/packages/74/83/cc7c6de29b0a7585cd445258d174ca204d37729c3874ad08e515b0bf331c/opendal-0.45.20-cp311-abi3-win_amd64.whl", hash = "sha256:145efd56aa33b493d5b652c3e4f5ae5097ab69d38c132d80f108e9f5c1e4d863", size = 14929888, upload_time = "2025-05-26T07:01:46.929Z" },
] ]
[[package]] [[package]]
@ -3744,7 +3762,7 @@ wheels = [
[[package]] [[package]]
name = "opik" name = "opik"
version = "1.7.26" version = "1.7.27"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "boto3-stubs", extra = ["bedrock-runtime"] }, { name = "boto3-stubs", extra = ["bedrock-runtime"] },
@ -3763,9 +3781,9 @@ dependencies = [
{ name = "tqdm" }, { name = "tqdm" },
{ name = "uuid6" }, { name = "uuid6" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/2a/cf/78e3d4debedb44dd04b22aa93f9c6d58903a340b76adbd979e6ab005feed/opik-1.7.26.tar.gz", hash = "sha256:da58175f1da5e48b17b138910b2eb5e9a57bf8b446b6d2eec4eaffe65db864e0", size = 295024, upload_time = "2025-05-22T12:21:37.564Z" } sdist = { url = "https://files.pythonhosted.org/packages/60/b7/f560a761339cbee977cf1264f1af2e9924caff695086233c2ceee085ab79/opik-1.7.27.tar.gz", hash = "sha256:7a296d97e0c33be41dea7ceece865860db1f33608e4381693af812ab8ebf21da", size = 297457, upload_time = "2025-05-26T13:05:27.19Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/8c/cb/58395a4e7bcc30ac2de6be30993488ffb2f2e0a76a77dbef3e05c9a2ab41/opik-1.7.26-py3-none-any.whl", hash = "sha256:a27a75d730db8fcc5576bf6bbd37f3ee96c3ca06286ca344295df37d96d4f757", size = 553037, upload_time = "2025-05-22T12:21:36.131Z" }, { url = "https://files.pythonhosted.org/packages/0d/92/1b31e77b942ad821ca4a5ada09e93286740d0337b1c9af45b849d16fee6f/opik-1.7.27-py3-none-any.whl", hash = "sha256:9ec1417a16d6a4ebf5e72749a2298c7c47b6b0c64cdf5da0d521d20666ede2a6", size = 558716, upload_time = "2025-05-26T13:05:25.279Z" },
] ]
[[package]] [[package]]
@ -3908,15 +3926,15 @@ performance = [
[[package]] [[package]]
name = "pandas-stubs" name = "pandas-stubs"
version = "2.2.3.250308" version = "2.2.3.250527"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "numpy" }, { name = "numpy" },
{ name = "types-pytz" }, { name = "types-pytz" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/2e/5a/261f5c67a73e46df2d5984fe7129d66a3ed4864fd7aa9d8721abb3fc802e/pandas_stubs-2.2.3.250308.tar.gz", hash = "sha256:3a6e9daf161f00b85c83772ed3d5cff9522028f07a94817472c07b91f46710fd", size = 103986, upload_time = "2025-03-08T20:51:04.999Z" } sdist = { url = "https://files.pythonhosted.org/packages/5f/0d/5fe7f7f3596eb1c2526fea151e9470f86b379183d8b9debe44b2098651ca/pandas_stubs-2.2.3.250527.tar.gz", hash = "sha256:e2d694c4e72106055295ad143664e5c99e5815b07190d1ff85b73b13ff019e63", size = 106312, upload_time = "2025-05-27T15:24:29.716Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/ba/64/ab61d9ca06ff66c07eb804ec27dec1a2be1978b3c3767caaa91e363438cc/pandas_stubs-2.2.3.250308-py3-none-any.whl", hash = "sha256:a377edff3b61f8b268c82499fdbe7c00fdeed13235b8b71d6a1dc347aeddc74d", size = 158053, upload_time = "2025-03-08T20:51:03.411Z" }, { url = "https://files.pythonhosted.org/packages/ec/f8/46141ba8c9d7064dc5008bfb4a6ae5bd3c30e4c61c28b5c5ed485bf358ba/pandas_stubs-2.2.3.250527-py3-none-any.whl", hash = "sha256:cd0a49a95b8c5f944e605be711042a4dd8550e2c559b43d70ba2c4b524b66163", size = 159683, upload_time = "2025-05-27T15:24:28.4Z" },
] ]
[[package]] [[package]]
@ -4062,7 +4080,7 @@ wheels = [
[[package]] [[package]]
name = "posthog" name = "posthog"
version = "4.1.0" version = "4.2.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "backoff" }, { name = "backoff" },
@ -4071,9 +4089,9 @@ dependencies = [
{ name = "requests" }, { name = "requests" },
{ name = "six" }, { name = "six" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/89/ed/6c6bc0f9e20673fa43f2938de2f401395a65e94b74acce3271cc10192ee3/posthog-4.1.0.tar.gz", hash = "sha256:d9c7a7ed72ee2b20e76fca04180f18e00a23cb7270c1404b6212bdb2cc651b2d", size = 78509, upload_time = "2025-05-23T00:34:27.668Z" } sdist = { url = "https://files.pythonhosted.org/packages/ce/5b/2e9890700b7b55a370edbfbe5948eae780d48af9b46ad06ea2e7970576f4/posthog-4.2.0.tar.gz", hash = "sha256:c4abc95de03294be005b3b7e8735e9d7abab88583da26262112bacce64b0c3b5", size = 80727, upload_time = "2025-05-23T23:23:55.943Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/24/ba/71cf69842a0d60342975a957ff321ec30be7b8a71f281cb6d70895c1bf41/posthog-4.1.0-py2.py3-none-any.whl", hash = "sha256:942460323a1dc85e9111248a4946276f99268883182aaa7f6c9db135c84cf613", size = 93147, upload_time = "2025-05-23T00:34:26.101Z" }, { url = "https://files.pythonhosted.org/packages/51/16/7b6c5844acee2d343d463ee0e3143cd8c7c48a6c0d079a2f7daf0c80b95c/posthog-4.2.0-py2.py3-none-any.whl", hash = "sha256:60c7066caac43e43e326e9196d8c1aadeafc8b0be9e5c108446e352711fa456b", size = 96692, upload_time = "2025-05-23T23:23:54.384Z" },
] ]
[[package]] [[package]]
@ -4389,7 +4407,7 @@ crypto = [
[[package]] [[package]]
name = "pymilvus" name = "pymilvus"
version = "2.5.9" version = "2.5.10"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "grpcio" }, { name = "grpcio" },
@ -4400,9 +4418,9 @@ dependencies = [
{ name = "setuptools" }, { name = "setuptools" },
{ name = "ujson" }, { name = "ujson" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/1c/1e/80a5e39c3343aff39e350820a611acfb5b0f91d64af64977d9d42f417c68/pymilvus-2.5.9.tar.gz", hash = "sha256:5f524233450494f64d4db6549d0f56e7eb147e96da6183486c737c71a3f7dc61", size = 1260401, upload_time = "2025-05-19T03:57:01.024Z" } sdist = { url = "https://files.pythonhosted.org/packages/da/e2/88f126a08d8eefba7341e3eb323406a227146094aab7137a2b91d882e98d/pymilvus-2.5.10.tar.gz", hash = "sha256:cc44ad776aeab781ee4c4a4d334b73e746066ab2fb6722c5311f02efa6fc54a2", size = 1260364, upload_time = "2025-05-23T06:08:06.992Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/9d/3c/f28eca6a607009f6a3f955efef3e0d6bf4584b2d41431a0e062bbc1d0969/pymilvus-2.5.9-py3-none-any.whl", hash = "sha256:e905a38429c3d8682a314e133a2dcc6e97f7b3cd28f07ef41c679e0cf92b0e0a", size = 227607, upload_time = "2025-05-19T03:56:58.745Z" }, { url = "https://files.pythonhosted.org/packages/b0/4b/847704930ad8ddd0d0975e9a3a5e3fe704f642debe97454135c2b9ee7081/pymilvus-2.5.10-py3-none-any.whl", hash = "sha256:7da540f93068871cda3941602c55227aeaafb66f2f0d9c05e8f9db783716b100", size = 227635, upload_time = "2025-05-23T06:08:05.397Z" },
] ]
[[package]] [[package]]
@ -4583,14 +4601,14 @@ wheels = [
[[package]] [[package]]
name = "pytest-mock" name = "pytest-mock"
version = "3.14.0" version = "3.14.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "pytest" }, { name = "pytest" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/c6/90/a955c3ab35ccd41ad4de556596fa86685bf4fc5ffcc62d22d856cfd4e29a/pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0", size = 32814, upload_time = "2024-03-21T22:14:04.964Z" } sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241, upload_time = "2025-05-26T13:58:45.167Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/f2/3b/b26f90f74e2986a82df6e7ac7e319b8ea7ccece1caec9f8ab6104dc70603/pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f", size = 9863, upload_time = "2024-03-21T22:14:02.694Z" }, { url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload_time = "2025-05-26T13:58:43.487Z" },
] ]
[[package]] [[package]]
@ -4770,7 +4788,7 @@ wheels = [
[[package]] [[package]]
name = "qdrant-client" name = "qdrant-client"
version = "1.7.3" version = "1.9.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "grpcio" }, { name = "grpcio" },
@ -4781,9 +4799,9 @@ dependencies = [
{ name = "pydantic" }, { name = "pydantic" },
{ name = "urllib3" }, { name = "urllib3" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/71/f0/76b2583fe09d134ac659a308e6cf7a5f48443200d7c79c1963af042377a2/qdrant_client-1.7.3.tar.gz", hash = "sha256:7b809be892cdc5137ae80ea3335da40c06499ad0b0072b5abc6bad79da1d29fc", size = 180484, upload_time = "2024-02-08T17:53:05.89Z" } sdist = { url = "https://files.pythonhosted.org/packages/86/cf/db06a74694bf8f126ed4a869c70ef576f01ee691ef20799fba3d561d3565/qdrant_client-1.9.0.tar.gz", hash = "sha256:7b1792f616651a6f0a76312f945c13d088e9451726795b82ce0350f7df3b7981", size = 199999, upload_time = "2024-04-22T13:35:49.444Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/98/c3/8b2cebb07e0258b478f770dc2e9d246be17318c3858673ace0b345d147b0/qdrant_client-1.7.3-py3-none-any.whl", hash = "sha256:b062420ba55eb847652c7d2a26404fb1986bea13aa785763024013f96a7a915c", size = 206318, upload_time = "2024-02-08T17:53:03.025Z" }, { url = "https://files.pythonhosted.org/packages/3a/fa/5abd82cde353f1009c068cca820195efd94e403d261b787e78ea7a9c8318/qdrant_client-1.9.0-py3-none-any.whl", hash = "sha256:ee02893eab1f642481b1ac1e38eb68ec30bab0f673bef7cc05c19fa5d2cbf43e", size = 229258, upload_time = "2024-04-22T13:35:46.81Z" },
] ]
[[package]] [[package]]
@ -5180,11 +5198,11 @@ wheels = [
[[package]] [[package]]
name = "setuptools" name = "setuptools"
version = "80.8.0" version = "80.9.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/8d/d2/ec1acaaff45caed5c2dedb33b67055ba9d4e96b091094df90762e60135fe/setuptools-80.8.0.tar.gz", hash = "sha256:49f7af965996f26d43c8ae34539c8d99c5042fbff34302ea151eaa9c207cd257", size = 1319720, upload_time = "2025-05-20T14:02:53.503Z" } sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload_time = "2025-05-27T00:56:51.443Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/58/29/93c53c098d301132196c3238c312825324740851d77a8500a2462c0fd888/setuptools-80.8.0-py3-none-any.whl", hash = "sha256:95a60484590d24103af13b686121328cc2736bee85de8936383111e421b9edc0", size = 1201470, upload_time = "2025-05-20T14:02:51.348Z" }, { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload_time = "2025-05-27T00:56:49.664Z" },
] ]
[[package]] [[package]]
@ -5308,11 +5326,11 @@ wheels = [
[[package]] [[package]]
name = "sqlglot" name = "sqlglot"
version = "26.19.0" version = "26.22.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/27/93/64691ea1fee6c96ce95663bb4e4b940a99a891d7954628aedd4f4ced21d1/sqlglot-26.19.0.tar.gz", hash = "sha256:042589f9fae232deb227a7a5c977a827c71da75caa2c0c45a2bcb4064dc4a622", size = 5366769, upload_time = "2025-05-22T10:41:04.35Z" } sdist = { url = "https://files.pythonhosted.org/packages/6b/ac/7813c14ca194161fc8b1a7132cb2e9b30904ff2b81953998de3ee5217ca0/sqlglot-26.22.0.tar.gz", hash = "sha256:30f71a771ea306b14581dc546f0faa9ebfcd745ea3c882336b33131399ae4989", size = 5369050, upload_time = "2025-05-27T22:15:03.682Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/1c/13/9f3088fc1f58b44f8561b97c6141e515b4efd5815ba2e1a2b98a18c7b104/sqlglot-26.19.0-py3-none-any.whl", hash = "sha256:f8d2193f0e618ce3ce636290d33c94f444304ca9853b546046fb74983261d249", size = 462578, upload_time = "2025-05-22T10:41:01.721Z" }, { url = "https://files.pythonhosted.org/packages/43/9c/56df221770c1a45ea3d9a9e17a78baf88c6a62601844c2d2c39cb8720581/sqlglot-26.22.0-py3-none-any.whl", hash = "sha256:53f9e486562cbdcb22236d19f160474193ca9b2dc36881c0e5b2af83b9a4a139", size = 463434, upload_time = "2025-05-27T22:15:00.865Z" },
] ]
[[package]] [[package]]
@ -5609,7 +5627,7 @@ wheels = [
[[package]] [[package]]
name = "typer" name = "typer"
version = "0.15.4" version = "0.16.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "click" }, { name = "click" },
@ -5617,9 +5635,9 @@ dependencies = [
{ name = "shellingham" }, { name = "shellingham" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/6c/89/c527e6c848739be8ceb5c44eb8208c52ea3515c6cf6406aa61932887bf58/typer-0.15.4.tar.gz", hash = "sha256:89507b104f9b6a0730354f27c39fae5b63ccd0c95b1ce1f1a6ba0cfd329997c3", size = 101559, upload_time = "2025-05-14T16:34:57.704Z" } sdist = { url = "https://files.pythonhosted.org/packages/c5/8c/7d682431efca5fd290017663ea4588bf6f2c6aad085c7f108c5dbc316e70/typer-0.16.0.tar.gz", hash = "sha256:af377ffaee1dbe37ae9440cb4e8f11686ea5ce4e9bae01b84ae7c63b87f1dd3b", size = 102625, upload_time = "2025-05-26T14:30:31.824Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/c9/62/d4ba7afe2096d5659ec3db8b15d8665bdcb92a3c6ff0b95e99895b335a9c/typer-0.15.4-py3-none-any.whl", hash = "sha256:eb0651654dcdea706780c466cf06d8f174405a659ffff8f163cfbfee98c0e173", size = 45258, upload_time = "2025-05-14T16:34:55.583Z" }, { url = "https://files.pythonhosted.org/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317, upload_time = "2025-05-26T14:30:30.523Z" },
] ]
[[package]] [[package]]
@ -5690,11 +5708,11 @@ wheels = [
[[package]] [[package]]
name = "types-docutils" name = "types-docutils"
version = "0.21.0.20250523" version = "0.21.0.20250526"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ef/30/c7f2c002f0d2bb8ec02c5ecdbf0715f57a0c37829b8b69f9902301c61a2a/types_docutils-0.21.0.20250523.tar.gz", hash = "sha256:ce32a9022199df2600570b3d5f4f440c006ff487839ef09dcbbce26f55a4e469", size = 38089, upload_time = "2025-05-23T03:05:32.578Z" } sdist = { url = "https://files.pythonhosted.org/packages/34/bf/bb5695f7a9660f79a9cd999ea13ff7331b8f2d03aec3d2fd7c38be4bc8aa/types_docutils-0.21.0.20250526.tar.gz", hash = "sha256:6c7ba387716315df0d86a796baec9d5a71825ed2746cb7763193aafbb70ac86c", size = 38140, upload_time = "2025-05-26T03:10:49.242Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/92/1f/3defcd0397b46560e4a089333b3553e35be325fd8be6ae6b08419a950da3/types_docutils-0.21.0.20250523-py3-none-any.whl", hash = "sha256:ec76a4e379e7430a632e19bc7595ce6a013a465fa3fb88deb1bddcc78195f071", size = 61930, upload_time = "2025-05-23T03:05:30.652Z" }, { url = "https://files.pythonhosted.org/packages/35/84/73bca8d1364f6685bd6e00eaa15e653ef96163231fbd7a612f3a845497fb/types_docutils-0.21.0.20250526-py3-none-any.whl", hash = "sha256:44d9f9ed19bb75071deb6804947c123f30bbc617a656420f044e09b9f16b72d1", size = 62000, upload_time = "2025-05-26T03:10:48.101Z" },
] ]
[[package]] [[package]]
@ -5939,11 +5957,11 @@ wheels = [
[[package]] [[package]]
name = "types-s3transfer" name = "types-s3transfer"
version = "0.12.0" version = "0.13.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/fb/d5/830e9efe91a26601a2bebde6f299239d2d26e542f5d4b3bc7e8c23c81a3f/types_s3transfer-0.12.0.tar.gz", hash = "sha256:f8f59201481e904362873bf0be3267f259d60ad946ebdfcb847d092a1fa26f98", size = 14096, upload_time = "2025-04-23T00:38:19.131Z" } sdist = { url = "https://files.pythonhosted.org/packages/42/c1/45038f259d6741c252801044e184fec4dbaeff939a58f6160d7c32bf4975/types_s3transfer-0.13.0.tar.gz", hash = "sha256:203dadcb9865c2f68fb44bc0440e1dc05b79197ba4a641c0976c26c9af75ef52", size = 14175, upload_time = "2025-05-28T02:16:07.614Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/fc/43/6097275152463ac9bacf1e00aab30bc6682bf45f6a031be8bf029c030ba2/types_s3transfer-0.12.0-py3-none-any.whl", hash = "sha256:101bbc5b7f00b71512374df881f480fc6bf63c948b5098ab024bf3370fbfb0e8", size = 19553, upload_time = "2025-04-23T00:38:17.865Z" }, { url = "https://files.pythonhosted.org/packages/c8/5d/6bbe4bf6a79fb727945291aef88b5ecbdba857a603f1bbcf1a6be0d3f442/types_s3transfer-0.13.0-py3-none-any.whl", hash = "sha256:79c8375cbf48a64bff7654c02df1ec4b20d74f8c5672fc13e382f593ca5565b3", size = 19588, upload_time = "2025-05-28T02:16:06.709Z" },
] ]
[[package]] [[package]]
@ -6382,9 +6400,10 @@ wheels = [
[[package]] [[package]]
name = "weave" name = "weave"
version = "0.51.47" version = "0.51.48"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "click" },
{ name = "diskcache" }, { name = "diskcache" },
{ name = "emoji" }, { name = "emoji" },
{ name = "gql", extra = ["aiohttp", "requests"] }, { name = "gql", extra = ["aiohttp", "requests"] },
@ -6398,9 +6417,9 @@ dependencies = [
{ name = "uuid-utils" }, { name = "uuid-utils" },
{ name = "wandb" }, { name = "wandb" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/ec/0e/30f4323aa146698b9bf688f8a3d3d981937863632fe4eb7b71de5de7e1fc/weave-0.51.47.tar.gz", hash = "sha256:9cbec8e7bccbb7aab4cb815179af88f979be92d2a96633b0aa4a89a5342d24a3", size = 401833, upload_time = "2025-05-15T22:01:47.41Z" } sdist = { url = "https://files.pythonhosted.org/packages/0a/9f/b1da2e1ca2033db77fa6b76ad73f98458f50e38712511977cfa702aac041/weave-0.51.48.tar.gz", hash = "sha256:3c689ea41e1fbd9e40306f33c1e43f8e88adbe60ee092910f0b39dda44125bd4", size = 405037, upload_time = "2025-05-23T19:04:10.911Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/ca/7d/c3637aa7cbe66e5166fd253d47b27675114693c6940e6cf7691893c6c7b6/weave-0.51.47-py3-none-any.whl", hash = "sha256:0408470a17371a686d701d03fa252c2af98ad667b201b35d200fb2078583fe7a", size = 512410, upload_time = "2025-05-15T22:01:44.823Z" }, { url = "https://files.pythonhosted.org/packages/1b/d8/64233b745ca005643978e427277be6900e45138cc3818d59f98740b225cd/weave-0.51.48-py3-none-any.whl", hash = "sha256:edfde3c523d91000033ce8e4a3dbe84d9b71df7abf6a72fbd3765f84fe00900d", size = 516942, upload_time = "2025-05-23T19:04:08.641Z" },
] ]
[[package]] [[package]]
@ -6607,11 +6626,11 @@ wheels = [
[[package]] [[package]]
name = "zipp" name = "zipp"
version = "3.21.0" version = "3.22.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/3f/50/bad581df71744867e9468ebd0bcd6505de3b275e06f202c2cb016e3ff56f/zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", size = 24545, upload_time = "2024-11-10T15:05:20.202Z" } sdist = { url = "https://files.pythonhosted.org/packages/12/b6/7b3d16792fdf94f146bed92be90b4eb4563569eca91513c8609aebf0c167/zipp-3.22.0.tar.gz", hash = "sha256:dd2f28c3ce4bc67507bfd3781d21b7bb2be31103b51a4553ad7d90b84e57ace5", size = 25257, upload_time = "2025-05-26T14:46:32.217Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/1a/7e4798e9339adc931158c9d69ecc34f5e6791489d469f5e50ec15e35f458/zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931", size = 9630, upload_time = "2024-11-10T15:05:19.275Z" }, { url = "https://files.pythonhosted.org/packages/ad/da/f64669af4cae46f17b90798a827519ce3737d31dbafad65d391e49643dc4/zipp-3.22.0-py3-none-any.whl", hash = "sha256:fe208f65f2aca48b81f9e6fd8cf7b8b32c26375266b009b413d45306b6148343", size = 9796, upload_time = "2025-05-26T14:46:30.775Z" },
] ]
[[package]] [[package]]

View File

@ -412,6 +412,7 @@ QDRANT_API_KEY=difyai123456
QDRANT_CLIENT_TIMEOUT=20 QDRANT_CLIENT_TIMEOUT=20
QDRANT_GRPC_ENABLED=false QDRANT_GRPC_ENABLED=false
QDRANT_GRPC_PORT=6334 QDRANT_GRPC_PORT=6334
QDRANT_REPLICATION_FACTOR=1
# Milvus configuration. Only available when VECTOR_STORE is `milvus`. # Milvus configuration. Only available when VECTOR_STORE is `milvus`.
# The milvus uri. # The milvus uri.

View File

@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env
services: services:
# API service # API service
api: api:
image: langgenius/dify-api:1.4.0 image: langgenius/dify-api:1.4.1
restart: always restart: always
environment: environment:
# Use the shared environment variables. # Use the shared environment variables.
@ -31,7 +31,7 @@ services:
# worker service # worker service
# The Celery worker for processing the queue. # The Celery worker for processing the queue.
worker: worker:
image: langgenius/dify-api:1.4.0 image: langgenius/dify-api:1.4.1
restart: always restart: always
environment: environment:
# Use the shared environment variables. # Use the shared environment variables.
@ -57,7 +57,7 @@ services:
# Frontend web application. # Frontend web application.
web: web:
image: langgenius/dify-web:1.4.0 image: langgenius/dify-web:1.4.1
restart: always restart: always
environment: environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-} CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -142,7 +142,7 @@ services:
# plugin daemon # plugin daemon
plugin_daemon: plugin_daemon:
image: langgenius/dify-plugin-daemon:0.0.10-local image: langgenius/dify-plugin-daemon:0.1.1-local
restart: always restart: always
environment: environment:
# Use the shared environment variables. # Use the shared environment variables.

View File

@ -71,7 +71,7 @@ services:
# plugin daemon # plugin daemon
plugin_daemon: plugin_daemon:
image: langgenius/dify-plugin-daemon:0.0.10-local image: langgenius/dify-plugin-daemon:0.1.1-local
restart: always restart: always
env_file: env_file:
- ./middleware.env - ./middleware.env

View File

@ -138,6 +138,7 @@ x-shared-env: &shared-api-worker-env
QDRANT_CLIENT_TIMEOUT: ${QDRANT_CLIENT_TIMEOUT:-20} QDRANT_CLIENT_TIMEOUT: ${QDRANT_CLIENT_TIMEOUT:-20}
QDRANT_GRPC_ENABLED: ${QDRANT_GRPC_ENABLED:-false} QDRANT_GRPC_ENABLED: ${QDRANT_GRPC_ENABLED:-false}
QDRANT_GRPC_PORT: ${QDRANT_GRPC_PORT:-6334} QDRANT_GRPC_PORT: ${QDRANT_GRPC_PORT:-6334}
QDRANT_REPLICATION_FACTOR: ${QDRANT_REPLICATION_FACTOR:-1}
MILVUS_URI: ${MILVUS_URI:-http://host.docker.internal:19530} MILVUS_URI: ${MILVUS_URI:-http://host.docker.internal:19530}
MILVUS_DATABASE: ${MILVUS_DATABASE:-} MILVUS_DATABASE: ${MILVUS_DATABASE:-}
MILVUS_TOKEN: ${MILVUS_TOKEN:-} MILVUS_TOKEN: ${MILVUS_TOKEN:-}
@ -500,7 +501,7 @@ x-shared-env: &shared-api-worker-env
services: services:
# API service # API service
api: api:
image: langgenius/dify-api:1.4.0 image: langgenius/dify-api:1.4.1
restart: always restart: always
environment: environment:
# Use the shared environment variables. # Use the shared environment variables.
@ -529,7 +530,7 @@ services:
# worker service # worker service
# The Celery worker for processing the queue. # The Celery worker for processing the queue.
worker: worker:
image: langgenius/dify-api:1.4.0 image: langgenius/dify-api:1.4.1
restart: always restart: always
environment: environment:
# Use the shared environment variables. # Use the shared environment variables.
@ -555,7 +556,7 @@ services:
# Frontend web application. # Frontend web application.
web: web:
image: langgenius/dify-web:1.4.0 image: langgenius/dify-web:1.4.1
restart: always restart: always
environment: environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-} CONSOLE_API_URL: ${CONSOLE_API_URL:-}
@ -640,7 +641,7 @@ services:
# plugin daemon # plugin daemon
plugin_daemon: plugin_daemon:
image: langgenius/dify-plugin-daemon:0.0.10-local image: langgenius/dify-plugin-daemon:0.1.1-local
restart: always restart: always
environment: environment:
# Use the shared environment variables. # Use the shared environment variables.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 53 KiB

After

Width:  |  Height:  |  Size: 60 KiB

View File

@ -2,7 +2,7 @@
import { useContext, useContextSelector } from 'use-context-selector' import { useContext, useContextSelector } from 'use-context-selector'
import { useRouter } from 'next/navigation' import { useRouter } from 'next/navigation'
import { useCallback, useEffect, useState } from 'react' import { useCallback, useEffect, useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next' import { useTranslation } from 'react-i18next'
import { RiBuildingLine, RiGlobalLine, RiLockLine, RiMoreFill, RiVerifiedBadgeLine } from '@remixicon/react' import { RiBuildingLine, RiGlobalLine, RiLockLine, RiMoreFill, RiVerifiedBadgeLine } from '@remixicon/react'
import cn from '@/utils/classnames' import cn from '@/utils/classnames'
@ -35,6 +35,7 @@ import Tooltip from '@/app/components/base/tooltip'
import AccessControl from '@/app/components/app/app-access-control' import AccessControl from '@/app/components/app/app-access-control'
import { AccessMode } from '@/models/access-control' import { AccessMode } from '@/models/access-control'
import { useGlobalPublicStore } from '@/context/global-public-context' import { useGlobalPublicStore } from '@/context/global-public-context'
import { formatTime } from '@/utils/time'
export type AppCardProps = { export type AppCardProps = {
app: App app: App
@ -296,6 +297,15 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
setTags(app.tags) setTags(app.tags)
}, [app.tags]) }, [app.tags])
const EditTimeText = useMemo(() => {
const timeText = formatTime({
date: (app.updated_at || app.created_at) * 1000,
dateFormat: 'MM/DD/YYYY h:mm',
})
return `${t('datasetDocuments.segment.editedAt')} ${timeText}`
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [app.updated_at, app.created_at])
return ( return (
<> <>
<div <div
@ -320,12 +330,10 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
<div className='flex items-center text-sm font-semibold leading-5 text-text-secondary'> <div className='flex items-center text-sm font-semibold leading-5 text-text-secondary'>
<div className='truncate' title={app.name}>{app.name}</div> <div className='truncate' title={app.name}>{app.name}</div>
</div> </div>
<div className='flex items-center text-[10px] font-medium leading-[18px] text-text-tertiary'> <div className='flex items-center gap-1 text-[10px] font-medium leading-[18px] text-text-tertiary'>
{app.mode === 'advanced-chat' && <div className='truncate'>{t('app.types.advanced').toUpperCase()}</div>} <div className='truncate' title={app.author_name}>{app.author_name}</div>
{app.mode === 'chat' && <div className='truncate'>{t('app.types.chatbot').toUpperCase()}</div>} <div>·</div>
{app.mode === 'agent-chat' && <div className='truncate'>{t('app.types.agent').toUpperCase()}</div>} <div className='truncate'>{EditTimeText}</div>
{app.mode === 'workflow' && <div className='truncate'>{t('app.types.workflow').toUpperCase()}</div>}
{app.mode === 'completion' && <div className='truncate'>{t('app.types.completion').toUpperCase()}</div>}
</div> </div>
</div> </div>
<div className='flex h-5 w-5 shrink-0 items-center justify-center'> <div className='flex h-5 w-5 shrink-0 items-center justify-center'>

View File

@ -111,7 +111,7 @@ const DatasetCard = ({
return ( return (
<> <>
<div <div
className='group relative col-span-1 flex min-h-[160px] cursor-pointer flex-col rounded-xl border-[0.5px] border-solid border-components-card-border bg-components-card-bg shadow-sm transition-all duration-200 ease-in-out hover:shadow-lg' className='group relative col-span-1 flex min-h-[171px] cursor-pointer flex-col rounded-xl border-[0.5px] border-solid border-components-card-border bg-components-card-bg shadow-sm transition-all duration-200 ease-in-out hover:shadow-lg'
data-disable-nprogress={true} data-disable-nprogress={true}
onClick={(e) => { onClick={(e) => {
e.preventDefault() e.preventDefault()

View File

@ -32,7 +32,6 @@ import useBreakpoints, { MediaType } from '@/hooks/use-breakpoints'
import TextGeneration from '@/app/components/app/text-generate/item' import TextGeneration from '@/app/components/app/text-generate/item'
import { addFileInfos, sortAgentSorts } from '@/app/components/tools/utils' import { addFileInfos, sortAgentSorts } from '@/app/components/tools/utils'
import MessageLogModal from '@/app/components/base/message-log-modal' import MessageLogModal from '@/app/components/base/message-log-modal'
import PromptLogModal from '@/app/components/base/prompt-log-modal'
import { useStore as useAppStore } from '@/app/components/app/store' import { useStore as useAppStore } from '@/app/components/app/store'
import { useAppContext } from '@/context/app-context' import { useAppContext } from '@/context/app-context'
import useTimestamp from '@/hooks/use-timestamp' import useTimestamp from '@/hooks/use-timestamp'
@ -191,13 +190,11 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
const { userProfile: { timezone } } = useAppContext() const { userProfile: { timezone } } = useAppContext()
const { formatTime } = useTimestamp() const { formatTime } = useTimestamp()
const { onClose, appDetail } = useContext(DrawerContext) const { onClose, appDetail } = useContext(DrawerContext)
const { currentLogItem, setCurrentLogItem, showMessageLogModal, setShowMessageLogModal, showPromptLogModal, setShowPromptLogModal, currentLogModalActiveTab } = useAppStore(useShallow(state => ({ const { currentLogItem, setCurrentLogItem, showMessageLogModal, setShowMessageLogModal, currentLogModalActiveTab } = useAppStore(useShallow(state => ({
currentLogItem: state.currentLogItem, currentLogItem: state.currentLogItem,
setCurrentLogItem: state.setCurrentLogItem, setCurrentLogItem: state.setCurrentLogItem,
showMessageLogModal: state.showMessageLogModal, showMessageLogModal: state.showMessageLogModal,
setShowMessageLogModal: state.setShowMessageLogModal, setShowMessageLogModal: state.setShowMessageLogModal,
showPromptLogModal: state.showPromptLogModal,
setShowPromptLogModal: state.setShowPromptLogModal,
currentLogModalActiveTab: state.currentLogModalActiveTab, currentLogModalActiveTab: state.currentLogModalActiveTab,
}))) })))
const { t } = useTranslation() const { t } = useTranslation()
@ -518,16 +515,6 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
defaultTab={currentLogModalActiveTab} defaultTab={currentLogModalActiveTab}
/> />
)} )}
{showPromptLogModal && (
<PromptLogModal
width={width}
currentLogItem={currentLogItem}
onCancel={() => {
setCurrentLogItem()
setShowPromptLogModal(false)
}}
/>
)}
</div> </div>
) )
} }

View File

@ -234,6 +234,4 @@ const Answer: FC<AnswerProps> = ({
) )
} }
export default memo(Answer, (prevProps, nextProps) => export default memo(Answer)
prevProps.responding === false && nextProps.responding === false,
)

View File

@ -134,6 +134,24 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
const languageShowName = getCorrectCapitalizationLanguageName(language || '') const languageShowName = getCorrectCapitalizationLanguageName(language || '')
const isDarkMode = theme === Theme.dark const isDarkMode = theme === Theme.dark
const echartsStyle = useMemo(() => ({
height: '350px',
width: '100%',
}), [])
const echartsOpts = useMemo(() => ({
renderer: 'canvas',
width: 'auto',
}) as any, [])
const echartsOnEvents = useMemo(() => ({
finished: () => {
const instance = echartsRef.current?.getEchartsInstance?.()
if (instance)
instance.resize()
},
}), [echartsRef]) // echartsRef is stable, so this effectively runs once.
// Handle container resize for echarts // Handle container resize for echarts
useEffect(() => { useEffect(() => {
if (language !== 'echarts' || !echartsRef.current) return if (language !== 'echarts' || !echartsRef.current) return
@ -329,24 +347,11 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
<ReactEcharts <ReactEcharts
ref={echartsRef} ref={echartsRef}
option={finalChartOption} option={finalChartOption}
style={{ style={echartsStyle}
height: '350px',
width: '100%',
}}
theme={isDarkMode ? 'dark' : undefined} theme={isDarkMode ? 'dark' : undefined}
opts={{ opts={echartsOpts}
renderer: 'canvas',
width: 'auto',
}}
notMerge={true} notMerge={true}
onEvents={{ onEvents={echartsOnEvents}
// Force resize when chart is finished rendering
finished: () => {
const instance = echartsRef.current?.getEchartsInstance?.()
if (instance)
instance.resize()
},
}}
/> />
</ErrorBoundary> </ErrorBoundary>
</div> </div>
@ -374,15 +379,9 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
<ReactEcharts <ReactEcharts
ref={echartsRef} ref={echartsRef}
option={errorOption} option={errorOption}
style={{ style={echartsStyle}
height: '350px',
width: '100%',
}}
theme={isDarkMode ? 'dark' : undefined} theme={isDarkMode ? 'dark' : undefined}
opts={{ opts={echartsOpts}
renderer: 'canvas',
width: 'auto',
}}
notMerge={true} notMerge={true}
/> />
</ErrorBoundary> </ErrorBoundary>
@ -423,7 +422,7 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
</SyntaxHighlighter> </SyntaxHighlighter>
) )
} }
}, [children, language, isSVG, finalChartOption, props, theme, match]) }, [children, language, isSVG, finalChartOption, props, theme, match, chartState, isDarkMode, echartsStyle, echartsOpts, echartsOnEvents])
if (inline || !match) if (inline || !match)
return <code {...props} className={className}>{children}</code> return <code {...props} className={className}>{children}</code>

View File

@ -91,6 +91,11 @@ const initMermaid = () => {
numberSectionStyles: 4, numberSectionStyles: 4,
axisFormat: '%Y-%m-%d', axisFormat: '%Y-%m-%d',
}, },
mindmap: {
useMaxWidth: true,
padding: 10,
diagramPadding: 20,
},
maxTextSize: 50000, maxTextSize: 50000,
}) })
isMermaidInitialized = true isMermaidInitialized = true
@ -289,11 +294,12 @@ const Flowchart = React.forwardRef((props: {
try { try {
let finalCode: string let finalCode: string
// Check if it's a gantt chart // Check if it's a gantt chart or mindmap
const isGanttChart = primitiveCode.trim().startsWith('gantt') const isGanttChart = primitiveCode.trim().startsWith('gantt')
const isMindMap = primitiveCode.trim().startsWith('mindmap')
if (isGanttChart) { if (isGanttChart || isMindMap) {
// For gantt charts, ensure each task is on its own line // For gantt charts and mindmaps, ensure each task is on its own line
// and preserve exact whitespace/format // and preserve exact whitespace/format
finalCode = primitiveCode.trim() finalCode = primitiveCode.trim()
} }
@ -352,6 +358,11 @@ const Flowchart = React.forwardRef((props: {
numberSectionStyles: 4, numberSectionStyles: 4,
axisFormat: '%Y-%m-%d', axisFormat: '%Y-%m-%d',
}, },
mindmap: {
useMaxWidth: true,
padding: 10,
diagramPadding: 20,
},
} }
if (look === 'classic') { if (look === 'classic') {

View File

@ -22,6 +22,10 @@ export function preprocessMermaidCode(code: string): string {
.replace(/section\s+([^:]+):/g, (match, sectionName) => `section ${sectionName}`) .replace(/section\s+([^:]+):/g, (match, sectionName) => `section ${sectionName}`)
// Fix common syntax issues // Fix common syntax issues
.replace(/fifopacket/g, 'rect') .replace(/fifopacket/g, 'rect')
// Ensure graph has direction
.replace(/^graph\s+((?:TB|BT|RL|LR)*)/, (match, direction) => {
return direction ? match : 'graph TD'
})
// Clean up empty lines and extra spaces // Clean up empty lines and extra spaces
.trim() .trim()
} }
@ -32,9 +36,9 @@ export function preprocessMermaidCode(code: string): string {
export function prepareMermaidCode(code: string, style: 'classic' | 'handDrawn'): string { export function prepareMermaidCode(code: string, style: 'classic' | 'handDrawn'): string {
let finalCode = preprocessMermaidCode(code) let finalCode = preprocessMermaidCode(code)
// Special handling for gantt charts // Special handling for gantt charts and mindmaps
if (finalCode.trim().startsWith('gantt')) { if (finalCode.trim().startsWith('gantt') || finalCode.trim().startsWith('mindmap')) {
// For gantt charts, preserve the structure exactly as is // For gantt charts and mindmaps, preserve the structure exactly as is
return finalCode return finalCode
} }
@ -173,8 +177,15 @@ export function isMermaidCodeComplete(code: string): boolean {
return lines.length >= 3 return lines.length >= 3
} }
// Special handling for mindmaps
if (trimmedCode.startsWith('mindmap')) {
// For mindmaps, check if it has at least a root node
const lines = trimmedCode.split('\n').filter(line => line.trim().length > 0)
return lines.length >= 2
}
// Check for basic syntax structure // Check for basic syntax structure
const hasValidStart = /^(graph|flowchart|sequenceDiagram|classDiagram|classDef|class|stateDiagram|gantt|pie|er|journey|requirementDiagram)/.test(trimmedCode) const hasValidStart = /^(graph|flowchart|sequenceDiagram|classDiagram|classDef|class|stateDiagram|gantt|pie|er|journey|requirementDiagram|mindmap)/.test(trimmedCode)
// Check for balanced brackets and parentheses // Check for balanced brackets and parentheses
const isBalanced = (() => { const isBalanced = (() => {

View File

@ -91,6 +91,7 @@ const NewChildSegmentModal: FC<NewChildSegmentModalProps> = ({
customComponent: isFullDocMode && CustomButton, customComponent: isFullDocMode && CustomButton,
}) })
handleCancel('add') handleCancel('add')
setContent('')
if (isFullDocMode) { if (isFullDocMode) {
refreshTimer.current = setTimeout(() => { refreshTimer.current = setTimeout(() => {
onSave() onSave()

View File

@ -118,6 +118,9 @@ const NewSegmentModal: FC<NewSegmentModalProps> = ({
customComponent: CustomButton, customComponent: CustomButton,
}) })
handleCancel('add') handleCancel('add')
setQuestion('')
setAnswer('')
setKeywords([])
refreshTimer.current = setTimeout(() => { refreshTimer.current = setTimeout(() => {
onSave() onSave()
}, 3000) }, 3000)

View File

@ -1,6 +1,5 @@
import type { FC } from 'react' import type { FC } from 'react'
import type { ModelProvider } from '../declarations' import type { ModelProvider } from '../declarations'
import { basePath } from '@/utils/var'
import { useLanguage } from '../hooks' import { useLanguage } from '../hooks'
import { Openai } from '@/app/components/base/icons/src/vender/other' import { Openai } from '@/app/components/base/icons/src/vender/other'
import { AnthropicDark, AnthropicLight } from '@/app/components/base/icons/src/public/llm' import { AnthropicDark, AnthropicLight } from '@/app/components/base/icons/src/public/llm'
@ -41,7 +40,7 @@ const ProviderIcon: FC<ProviderIconProps> = ({
<div className={cn('inline-flex items-center gap-2', className)}> <div className={cn('inline-flex items-center gap-2', className)}>
<img <img
alt='provider-icon' alt='provider-icon'
src={basePath + renderI18nObject(provider.icon_small, language)} src={renderI18nObject(provider.icon_small, language)}
className='h-6 w-6' className='h-6 w-6'
/> />
<div className='system-md-semibold text-text-primary'> <div className='system-md-semibold text-text-primary'>

View File

@ -1,17 +1,39 @@
'use client'
import useTheme from '@/hooks/use-theme'
type LineProps = { type LineProps = {
className?: string className?: string
} }
const Line = ({ const Line = ({
className, className,
}: LineProps) => { }: LineProps) => {
const { theme } = useTheme()
const isDarkMode = theme === 'dark'
if (isDarkMode) {
return (
<svg xmlns='http://www.w3.org/2000/svg' width='2' height='240' viewBox='0 0 2 240' fill='none' className={className}>
<path d='M1 0L1 240' stroke='url(#paint0_linear_6295_52176)' />
<defs>
<linearGradient id='paint0_linear_6295_52176' x1='-7.99584' y1='240' x2='-7.88094' y2='3.95539e-05' gradientUnits='userSpaceOnUse'>
<stop stopOpacity='0.01' />
<stop offset='0.503965' stopColor='#C8CEDA' stopOpacity='0.14' />
<stop offset='1' stopOpacity='0.01' />
</linearGradient>
</defs>
</svg>
)
}
return ( return (
<svg xmlns="http://www.w3.org/2000/svg" width="2" height="241" viewBox="0 0 2 241" fill="none" className={className}> <svg xmlns='http://www.w3.org/2000/svg' width='2' height='241' viewBox='0 0 2 241' fill='none' className={className}>
<path d="M1 0.5L1 240.5" stroke="url(#paint0_linear_1989_74474)"/> <path d='M1 0.5L1 240.5' stroke='url(#paint0_linear_1989_74474)' />
<defs> <defs>
<linearGradient id="paint0_linear_1989_74474" x1="-7.99584" y1="240.5" x2="-7.88094" y2="0.50004" gradientUnits="userSpaceOnUse"> <linearGradient id='paint0_linear_1989_74474' x1='-7.99584' y1='240.5' x2='-7.88094' y2='0.50004' gradientUnits='userSpaceOnUse'>
<stop stopColor="white" stopOpacity="0.01"/> <stop stopColor='white' stopOpacity='0.01' />
<stop offset="0.503965" stopColor="#101828" stopOpacity="0.08"/> <stop offset='0.503965' stopColor='#101828' stopOpacity='0.08' />
<stop offset="1" stopColor="white" stopOpacity="0.01"/> <stop offset='1' stopColor='white' stopOpacity='0.01' />
</linearGradient> </linearGradient>
</defs> </defs>
</svg> </svg>

View File

@ -78,7 +78,7 @@ const ActionList = ({
className='w-full' className='w-full'
onClick={() => setShowSettingAuth(true)} onClick={() => setShowSettingAuth(true)}
disabled={!isCurrentWorkspaceManager} disabled={!isCurrentWorkspaceManager}
>{t('tools.auth.unauthorized')}</Button> >{t('workflow.nodes.tool.authorize')}</Button>
)} )}
</div> </div>
<div className='flex flex-col gap-2'> <div className='flex flex-col gap-2'>

View File

@ -141,7 +141,7 @@ const MultipleToolSelector = ({
} }
panelShowState={panelShowState} panelShowState={panelShowState}
onPanelShowStateChange={setPanelShowState} onPanelShowStateChange={setPanelShowState}
isEdit={false}
/> />
{value.length === 0 && ( {value.length === 0 && (
<div className='system-xs-regular flex justify-center rounded-[10px] bg-background-section p-3 text-text-tertiary'>{t('plugin.detailPanel.toolSelector.empty')}</div> <div className='system-xs-regular flex justify-center rounded-[10px] bg-background-section p-3 text-text-tertiary'>{t('plugin.detailPanel.toolSelector.empty')}</div>
@ -158,6 +158,7 @@ const MultipleToolSelector = ({
onSelect={item => handleConfigure(item, index)} onSelect={item => handleConfigure(item, index)}
onDelete={() => handleDelete(index)} onDelete={() => handleDelete(index)}
supportEnableSwitch supportEnableSwitch
isEdit
/> />
</div> </div>
))} ))}

View File

@ -54,6 +54,7 @@ type Props = {
scope?: string scope?: string
value?: ToolValue value?: ToolValue
selectedTools?: ToolValue[] selectedTools?: ToolValue[]
isEdit?: boolean
onSelect: (tool: { onSelect: (tool: {
provider_name: string provider_name: string
tool_name: string tool_name: string
@ -77,6 +78,7 @@ type Props = {
const ToolSelector: FC<Props> = ({ const ToolSelector: FC<Props> = ({
value, value,
selectedTools, selectedTools,
isEdit,
disabled, disabled,
placement = 'left', placement = 'left',
offset = 4, offset = 4,
@ -277,7 +279,7 @@ const ToolSelector: FC<Props> = ({
<div className={cn('relative max-h-[642px] min-h-20 w-[361px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur pb-4 shadow-lg backdrop-blur-sm', !isShowSettingAuth && 'overflow-y-auto pb-2')}> <div className={cn('relative max-h-[642px] min-h-20 w-[361px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur pb-4 shadow-lg backdrop-blur-sm', !isShowSettingAuth && 'overflow-y-auto pb-2')}>
{!isShowSettingAuth && ( {!isShowSettingAuth && (
<> <>
<div className='system-xl-semibold px-4 pb-1 pt-3.5 text-text-primary'>{t('plugin.detailPanel.toolSelector.title')}</div> <div className='system-xl-semibold px-4 pb-1 pt-3.5 text-text-primary'>{t(`plugin.detailPanel.toolSelector.${isEdit ? 'toolSetting' : 'title'}`)}</div>
{/* base form */} {/* base form */}
<div className='flex flex-col gap-3 px-4 py-2'> <div className='flex flex-col gap-3 px-4 py-2'>
<div className='flex flex-col gap-1'> <div className='flex flex-col gap-1'>

View File

@ -12,6 +12,7 @@ import { useTranslation } from 'react-i18next'
import { SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS } from '@/config' import { SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS } from '@/config'
import { noop } from 'lodash-es' import { noop } from 'lodash-es'
import { useGlobalPublicStore } from '@/context/global-public-context' import { useGlobalPublicStore } from '@/context/global-public-context'
import Button from '@/app/components/base/button'
const Empty = () => { const Empty = () => {
const { t } = useTranslation() const { t } = useTranslation()
@ -43,14 +44,14 @@ const Empty = () => {
{/* skeleton */} {/* skeleton */}
<div className='absolute top-0 z-10 grid h-full w-full grid-cols-2 gap-2 overflow-hidden px-12'> <div className='absolute top-0 z-10 grid h-full w-full grid-cols-2 gap-2 overflow-hidden px-12'>
{Array.from({ length: 20 }).fill(0).map((_, i) => ( {Array.from({ length: 20 }).fill(0).map((_, i) => (
<div key={i} className='h-[100px] rounded-xl bg-components-card-bg' /> <div key={i} className='h-24 rounded-xl bg-components-card-bg' />
))} ))}
</div> </div>
{/* mask */} {/* mask */}
<div className='absolute z-20 h-full w-full bg-gradient-to-b from-components-panel-bg-transparent to-components-panel-bg' /> <div className='absolute z-20 h-full w-full bg-gradient-to-b from-components-panel-bg-transparent to-components-panel-bg' />
<div className='relative z-30 flex h-full items-center justify-center'> <div className='relative z-30 flex h-full items-center justify-center'>
<div className='flex flex-col items-center gap-y-3'> <div className='flex flex-col items-center gap-y-3'>
<div className='relative -z-10 flex h-[52px] w-[52px] items-center justify-center rounded-xl <div className='relative -z-10 flex size-14 items-center justify-center rounded-xl
border-[1px] border-dashed border-divider-deep bg-components-card-bg shadow-xl shadow-shadow-shadow-5'> border-[1px] border-dashed border-divider-deep bg-components-card-bg shadow-xl shadow-shadow-shadow-5'>
<Group className='h-5 w-5 text-text-tertiary' /> <Group className='h-5 w-5 text-text-tertiary' />
<Line className='absolute right-[-1px] top-1/2 -translate-y-1/2' /> <Line className='absolute right-[-1px] top-1/2 -translate-y-1/2' />
@ -58,10 +59,10 @@ const Empty = () => {
<Line className='absolute left-1/2 top-0 -translate-x-1/2 -translate-y-1/2 rotate-90' /> <Line className='absolute left-1/2 top-0 -translate-x-1/2 -translate-y-1/2 rotate-90' />
<Line className='absolute left-1/2 top-full -translate-x-1/2 -translate-y-1/2 rotate-90' /> <Line className='absolute left-1/2 top-full -translate-x-1/2 -translate-y-1/2 rotate-90' />
</div> </div>
<div className='text-sm font-normal text-text-tertiary'> <div className='system-md-regular text-text-tertiary'>
{text} {text}
</div> </div>
<div className='flex w-[240px] flex-col'> <div className='flex w-[236px] flex-col'>
<input <input
type='file' type='file'
ref={fileInputRef} ref={fileInputRef}
@ -79,10 +80,9 @@ const Empty = () => {
{ icon: Github, text: t('plugin.list.source.github'), action: 'github' }, { icon: Github, text: t('plugin.list.source.github'), action: 'github' },
{ icon: FileZip, text: t('plugin.list.source.local'), action: 'local' }, { icon: FileZip, text: t('plugin.list.source.local'), action: 'local' },
].map(({ icon: Icon, text, action }) => ( ].map(({ icon: Icon, text, action }) => (
<div <Button
key={action} key={action}
className='flex cursor-pointer items-center gap-x-1 rounded-lg border-[0.5px] bg-components-button-secondary-bg className='justify-start gap-x-0.5 px-3'
px-3 py-2 shadow-xs shadow-shadow-shadow-3 hover:bg-state-base-hover'
onClick={() => { onClick={() => {
if (action === 'local') if (action === 'local')
fileInputRef.current?.click() fileInputRef.current?.click()
@ -92,9 +92,9 @@ const Empty = () => {
setSelectedAction(action) setSelectedAction(action)
}} }}
> >
<Icon className="h-4 w-4 text-text-tertiary" /> <Icon className='size-4' />
<span className='system-md-regular text-text-secondary'>{text}</span> <span className='px-0.5'>{text}</span>
</div> </Button>
))} ))}
</div> </div>
</div> </div>

View File

@ -1,20 +1,23 @@
'use client' 'use client'
import { useMemo } from 'react' import { useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import type { FilterState } from './filter-management' import type { FilterState } from './filter-management'
import FilterManagement from './filter-management' import FilterManagement from './filter-management'
import List from './list' import List from './list'
import { useInstalledLatestVersion, useInstalledPluginList, useInvalidateInstalledPluginList } from '@/service/use-plugins' import { useInstalledLatestVersion, useInstalledPluginListWithPagination, useInvalidateInstalledPluginList } from '@/service/use-plugins'
import PluginDetailPanel from '@/app/components/plugins/plugin-detail-panel' import PluginDetailPanel from '@/app/components/plugins/plugin-detail-panel'
import { usePluginPageContext } from './context' import { usePluginPageContext } from './context'
import { useDebounceFn } from 'ahooks' import { useDebounceFn } from 'ahooks'
import Button from '@/app/components/base/button'
import Empty from './empty' import Empty from './empty'
import Loading from '../../base/loading' import Loading from '../../base/loading'
import { PluginSource } from '../types' import { PluginSource } from '../types'
const PluginsPanel = () => { const PluginsPanel = () => {
const { t } = useTranslation()
const filters = usePluginPageContext(v => v.filters) as FilterState const filters = usePluginPageContext(v => v.filters) as FilterState
const setFilters = usePluginPageContext(v => v.setFilters) const setFilters = usePluginPageContext(v => v.setFilters)
const { data: pluginList, isLoading: isPluginListLoading } = useInstalledPluginList() const { data: pluginList, isLoading: isPluginListLoading, isFetching, isLastPage, loadNextPage } = useInstalledPluginListWithPagination()
const { data: installedLatestVersion } = useInstalledLatestVersion( const { data: installedLatestVersion } = useInstalledLatestVersion(
pluginList?.plugins pluginList?.plugins
.filter(plugin => plugin.source === PluginSource.marketplace) .filter(plugin => plugin.source === PluginSource.marketplace)
@ -64,10 +67,16 @@ const PluginsPanel = () => {
/> />
</div> </div>
{isPluginListLoading ? <Loading type='app' /> : (filteredList?.length ?? 0) > 0 ? ( {isPluginListLoading ? <Loading type='app' /> : (filteredList?.length ?? 0) > 0 ? (
<div className='flex grow flex-wrap content-start items-start gap-2 self-stretch px-12'> <div className='flex grow flex-wrap content-start items-start justify-center gap-2 self-stretch px-12'>
<div className='w-full'> <div className='w-full'>
<List pluginList={filteredList || []} /> <List pluginList={filteredList || []} />
</div> </div>
{!isLastPage && !isFetching && (
<Button onClick={loadNextPage}>
{t('workflow.common.loadMore')}
</Button>
)}
{isFetching && <div className='system-md-semibold text-text-secondary'>{t('appLog.detail.loading')}</div>}
</div> </div>
) : ( ) : (
<Empty /> <Empty />

View File

@ -325,6 +325,11 @@ export type InstalledPluginListResponse = {
plugins: PluginDetail[] plugins: PluginDetail[]
} }
export type InstalledPluginListWithTotalResponse = {
plugins: PluginDetail[]
total: number
}
export type InstalledLatestVersionResponse = { export type InstalledLatestVersionResponse = {
versions: { versions: {
[plugin_id: string]: { [plugin_id: string]: {

View File

@ -126,7 +126,7 @@ const nodeDefault: NodeDefault<AgentNodeType> = {
} }
} }
// common params // common params
if (param.required && !payload.agent_parameters?.[param.name]?.value) { if (param.required && !(payload.agent_parameters?.[param.name]?.value || param.default)) {
return { return {
isValid: false, isValid: false,
errorMessage: t('workflow.errorMsg.fieldRequired', { field: renderI18nObject(param.label, language) }), errorMessage: t('workflow.errorMsg.fieldRequired', { field: renderI18nObject(param.label, language) }),

View File

@ -42,6 +42,12 @@ const useConfig = (id: string, payload: HttpNodeType) => {
data: transformToBodyPayload(bodyData, [BodyType.formData, BodyType.xWwwFormUrlencoded].includes(newInputs.body.type)), data: transformToBodyPayload(bodyData, [BodyType.formData, BodyType.xWwwFormUrlencoded].includes(newInputs.body.type)),
} }
} }
else if (!bodyData) {
newInputs.body = {
...newInputs.body,
data: [],
}
}
setInputs(newInputs) setInputs(newInputs)
setIsDataReady(true) setIsDataReady(true)
@ -151,7 +157,7 @@ const useConfig = (id: string, payload: HttpNodeType) => {
inputs.url, inputs.url,
inputs.headers, inputs.headers,
inputs.params, inputs.params,
typeof inputs.body.data === 'string' ? inputs.body.data : inputs.body.data.map(item => item.value).join(''), typeof inputs.body.data === 'string' ? inputs.body.data : inputs.body.data?.map(item => item.value).join(''),
fileVarInputs, fileVarInputs,
]) ])

View File

@ -79,7 +79,7 @@ const Panel: FC<NodePanelProps<ToolNodeType>> = ({
className='w-full' className='w-full'
onClick={showSetAuthModal} onClick={showSetAuthModal}
> >
{t(`${i18nPrefix}.toAuthorize`)} {t(`${i18nPrefix}.authorize`)}
</Button> </Button>
</div> </div>
</> </>

View File

@ -28,7 +28,7 @@ const IterationLogTrigger = ({
if (parallelNodes.length > 0) if (parallelNodes.length > 0)
return parallelNodes return parallelNodes
const serialIndex = parseInt(key, 10) const serialIndex = Number.parseInt(key, 10)
if (!isNaN(serialIndex)) { if (!isNaN(serialIndex)) {
const serialNodes = allExecutions.filter(exec => const serialNodes = allExecutions.filter(exec =>
exec.execution_metadata?.iteration_id === nodeInfo.node_id exec.execution_metadata?.iteration_id === nodeInfo.node_id
@ -49,7 +49,6 @@ const IterationLogTrigger = ({
const iterDurationMap = nodeInfo?.iterDurationMap || iterationNodeMeta?.iteration_duration_map || {} const iterDurationMap = nodeInfo?.iterDurationMap || iterationNodeMeta?.iteration_duration_map || {}
let structuredList: NodeTracing[][] = [] let structuredList: NodeTracing[][] = []
if (iterationNodeMeta?.iteration_duration_map) { if (iterationNodeMeta?.iteration_duration_map) {
const instanceKeys = Object.keys(iterationNodeMeta.iteration_duration_map) const instanceKeys = Object.keys(iterationNodeMeta.iteration_duration_map)
structuredList = instanceKeys structuredList = instanceKeys

View File

@ -29,7 +29,7 @@ const LoopLogTrigger = ({
if (parallelNodes.length > 0) if (parallelNodes.length > 0)
return parallelNodes return parallelNodes
const serialIndex = parseInt(key, 10) const serialIndex = Number.parseInt(key, 10)
if (!isNaN(serialIndex)) { if (!isNaN(serialIndex)) {
const serialNodes = allExecutions.filter(exec => const serialNodes = allExecutions.filter(exec =>
exec.execution_metadata?.loop_id === nodeInfo.node_id exec.execution_metadata?.loop_id === nodeInfo.node_id
@ -51,16 +51,15 @@ const LoopLogTrigger = ({
const loopVarMap = loopNodeMeta?.loop_variable_map || {} const loopVarMap = loopNodeMeta?.loop_variable_map || {}
let structuredList: NodeTracing[][] = [] let structuredList: NodeTracing[][] = []
if (nodeInfo.details?.length) {
if (loopNodeMeta?.loop_duration_map) { structuredList = nodeInfo.details
}
else if (loopNodeMeta?.loop_duration_map) {
const instanceKeys = Object.keys(loopNodeMeta.loop_duration_map) const instanceKeys = Object.keys(loopNodeMeta.loop_duration_map)
structuredList = instanceKeys structuredList = instanceKeys
.map(key => filterNodesForInstance(key)) .map(key => filterNodesForInstance(key))
.filter(branchNodes => branchNodes.length > 0) .filter(branchNodes => branchNodes.length > 0)
} }
else if (nodeInfo.details?.length) {
structuredList = nodeInfo.details
}
onShowLoopResultList( onShowLoopResultList(
structuredList, structuredList,

View File

@ -10,7 +10,7 @@ export default function RoutePrefixHandle() {
const addPrefixToImg = (e: HTMLImageElement) => { const addPrefixToImg = (e: HTMLImageElement) => {
const url = new URL(e.src) const url = new URL(e.src)
const prefix = url.pathname.substr(0, basePath.length) const prefix = url.pathname.substr(0, basePath.length)
if (prefix !== basePath) { if (prefix !== basePath && !url.href.startsWith('blob:') && !url.href.startsWith('data:')) {
url.pathname = basePath + url.pathname url.pathname = basePath + url.pathname
e.src = url.toString() e.src = url.toString()
} }

View File

@ -14,7 +14,6 @@ const translation = {
}, },
author: 'Von', author: 'Von',
auth: { auth: {
unauthorized: 'Zur Autorisierung',
authorized: 'Autorisiert', authorized: 'Autorisiert',
setup: 'Autorisierung einrichten, um zu nutzen', setup: 'Autorisierung einrichten, um zu nutzen',
setupModalTitle: 'Autorisierung einrichten', setupModalTitle: 'Autorisierung einrichten',

View File

@ -648,7 +648,6 @@ const translation = {
'assignedVarsDescription': 'Zugewiesene Variablen müssen beschreibbare Variablen sein, z. B. Konversationsvariablen.', 'assignedVarsDescription': 'Zugewiesene Variablen müssen beschreibbare Variablen sein, z. B. Konversationsvariablen.',
}, },
tool: { tool: {
toAuthorize: 'Autorisieren',
inputVars: 'Eingabevariablen', inputVars: 'Eingabevariablen',
outputVars: { outputVars: {
text: 'durch das Tool generierter Inhalt', text: 'durch das Tool generierter Inhalt',

View File

@ -77,6 +77,7 @@ const translation = {
modelNum: '{{num}} MODELS INCLUDED', modelNum: '{{num}} MODELS INCLUDED',
toolSelector: { toolSelector: {
title: 'Add tool', title: 'Add tool',
toolSetting: 'Tool Settings',
toolLabel: 'Tool', toolLabel: 'Tool',
descriptionLabel: 'Tool description', descriptionLabel: 'Tool description',
descriptionPlaceholder: 'Brief description of the tool\'s purpose, e.g., get the temperature for a specific location.', descriptionPlaceholder: 'Brief description of the tool\'s purpose, e.g., get the temperature for a specific location.',

View File

@ -15,7 +15,6 @@ const translation = {
}, },
author: 'By', author: 'By',
auth: { auth: {
unauthorized: 'To Authorize',
authorized: 'Authorized', authorized: 'Authorized',
setup: 'Set up authorization to use', setup: 'Set up authorization to use',
setupModalTitle: 'Set Up Authorization', setupModalTitle: 'Set Up Authorization',

View File

@ -651,7 +651,7 @@ const translation = {
'assignedVarsDescription': 'Assigned variables must be writable variables, such as conversation variables.', 'assignedVarsDescription': 'Assigned variables must be writable variables, such as conversation variables.',
}, },
tool: { tool: {
toAuthorize: 'To authorize', authorize: 'Authorize',
inputVars: 'Input Variables', inputVars: 'Input Variables',
outputVars: { outputVars: {
text: 'tool generated content', text: 'tool generated content',

View File

@ -15,7 +15,6 @@ const translation = {
}, },
author: 'Por', author: 'Por',
auth: { auth: {
unauthorized: 'Para Autorizar',
authorized: 'Autorizado', authorized: 'Autorizado',
setup: 'Configurar la autorización para usar', setup: 'Configurar la autorización para usar',
setupModalTitle: 'Configurar Autorización', setupModalTitle: 'Configurar Autorización',

View File

@ -646,7 +646,6 @@ const translation = {
'assignedVarsDescription': 'Las variables asignadas deben ser variables grabables, como las variables de conversación.', 'assignedVarsDescription': 'Las variables asignadas deben ser variables grabables, como las variables de conversación.',
}, },
tool: { tool: {
toAuthorize: 'Para autorizar',
inputVars: 'Variables de entrada', inputVars: 'Variables de entrada',
outputVars: { outputVars: {
text: 'Contenido generado por la herramienta', text: 'Contenido generado por la herramienta',

View File

@ -15,7 +15,6 @@ const translation = {
}, },
author: 'توسط', author: 'توسط',
auth: { auth: {
unauthorized: 'برای مجوز دادن',
authorized: 'مجوز داده شده', authorized: 'مجوز داده شده',
setup: 'تنظیم مجوز برای استفاده', setup: 'تنظیم مجوز برای استفاده',
setupModalTitle: 'تنظیم مجوز', setupModalTitle: 'تنظیم مجوز',

View File

@ -648,7 +648,6 @@ const translation = {
'varNotSet': 'متغیر NOT Set', 'varNotSet': 'متغیر NOT Set',
}, },
tool: { tool: {
toAuthorize: 'برای مجوز دادن',
inputVars: 'متغیرهای ورودی', inputVars: 'متغیرهای ورودی',
outputVars: { outputVars: {
text: 'محتوای تولید شده توسط ابزار', text: 'محتوای تولید شده توسط ابزار',

View File

@ -14,7 +14,6 @@ const translation = {
}, },
author: 'Par', author: 'Par',
auth: { auth: {
unauthorized: 'Pour Autoriser',
authorized: 'Autorisé', authorized: 'Autorisé',
setup: 'Mettez en place l\'autorisation à utiliser', setup: 'Mettez en place l\'autorisation à utiliser',
setupModalTitle: 'Configurer l\'Autorisation', setupModalTitle: 'Configurer l\'Autorisation',

View File

@ -647,7 +647,6 @@ const translation = {
'selectAssignedVariable': 'Sélectionner la variable affectée...', 'selectAssignedVariable': 'Sélectionner la variable affectée...',
}, },
tool: { tool: {
toAuthorize: 'Autoriser',
inputVars: 'Variables de saisie', inputVars: 'Variables de saisie',
outputVars: { outputVars: {
text: 'contenu généré par l\'outil', text: 'contenu généré par l\'outil',

View File

@ -15,7 +15,6 @@ const translation = {
}, },
author: 'द्वारा', author: 'द्वारा',
auth: { auth: {
unauthorized: 'अधिकृत करने के लिए',
authorized: 'अधिकृत', authorized: 'अधिकृत',
setup: 'उपयोग करने के लिए अधिकृति सेटअप करें', setup: 'उपयोग करने के लिए अधिकृति सेटअप करें',
setupModalTitle: 'अधिकृति सेटअप करें', setupModalTitle: 'अधिकृति सेटअप करें',

View File

@ -664,7 +664,6 @@ const translation = {
'noAssignedVars': 'कोई उपलब्ध असाइन किए गए चर नहीं', 'noAssignedVars': 'कोई उपलब्ध असाइन किए गए चर नहीं',
}, },
tool: { tool: {
toAuthorize: 'अधिकृत करने के लिए',
inputVars: 'इनपुट वेरिएबल्स', inputVars: 'इनपुट वेरिएबल्स',
outputVars: { outputVars: {
text: 'उपकरण द्वारा उत्पन्न सामग्री', text: 'उपकरण द्वारा उत्पन्न सामग्री',

View File

@ -15,7 +15,6 @@ const translation = {
}, },
author: 'Di', author: 'Di',
auth: { auth: {
unauthorized: 'Per Autorizzare',
authorized: 'Autorizzato', authorized: 'Autorizzato',
setup: 'Configura l\'autorizzazione per utilizzare', setup: 'Configura l\'autorizzazione per utilizzare',
setupModalTitle: 'Configura Autorizzazione', setupModalTitle: 'Configura Autorizzazione',

View File

@ -666,7 +666,6 @@ const translation = {
'noVarTip': 'Fare clic sul pulsante "+" per aggiungere variabili', 'noVarTip': 'Fare clic sul pulsante "+" per aggiungere variabili',
}, },
tool: { tool: {
toAuthorize: 'Per autorizzare',
inputVars: 'Variabili di Input', inputVars: 'Variabili di Input',
outputVars: { outputVars: {
text: 'contenuto generato dallo strumento', text: 'contenuto generato dallo strumento',

Some files were not shown because too many files have changed in this diff Show More