From 4124e804a0f9d0c32ca31873b9cf7696f2dbc046 Mon Sep 17 00:00:00 2001 From: IAOTW Date: Tue, 8 Apr 2025 16:04:50 +0800 Subject: [PATCH 01/15] fix(transport): add missing verify parameter to httpx.HTTPTransport (#17612) --- api/core/helper/ssrf_proxy.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py index 6367e45638..969cd112ee 100644 --- a/api/core/helper/ssrf_proxy.py +++ b/api/core/helper/ssrf_proxy.py @@ -56,8 +56,12 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): response = client.request(method=method, url=url, **kwargs) elif dify_config.SSRF_PROXY_HTTP_URL and dify_config.SSRF_PROXY_HTTPS_URL: proxy_mounts = { - "http://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTP_URL), - "https://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTPS_URL), + "http://": httpx.HTTPTransport( + proxy=dify_config.SSRF_PROXY_HTTP_URL, verify=HTTP_REQUEST_NODE_SSL_VERIFY + ), + "https://": httpx.HTTPTransport( + proxy=dify_config.SSRF_PROXY_HTTPS_URL, verify=HTTP_REQUEST_NODE_SSL_VERIFY + ), } with httpx.Client(mounts=proxy_mounts, verify=HTTP_REQUEST_NODE_SSL_VERIFY) as client: response = client.request(method=method, url=url, **kwargs) From 5a6219c726a6fec0c4ced717e35cee5ec7b4a7d7 Mon Sep 17 00:00:00 2001 From: Joel Date: Tue, 8 Apr 2025 16:39:11 +0800 Subject: [PATCH 02/15] chore: add unit test to high frequency hooks (#17617) --- .../model-provider-page/hooks.spec.ts | 90 ++++++++++++++++++ web/hooks/use-breakpoints.spec.ts | 93 +++++++++++++++++++ web/jest.config.ts | 4 +- 3 files changed, 185 insertions(+), 2 deletions(-) create mode 100644 web/app/components/header/account-setting/model-provider-page/hooks.spec.ts create mode 100644 web/hooks/use-breakpoints.spec.ts diff --git a/web/app/components/header/account-setting/model-provider-page/hooks.spec.ts b/web/app/components/header/account-setting/model-provider-page/hooks.spec.ts new file mode 100644 index 0000000000..4d6941ddc6 --- /dev/null +++ b/web/app/components/header/account-setting/model-provider-page/hooks.spec.ts @@ -0,0 +1,90 @@ +import { renderHook } from '@testing-library/react' +import { useLanguage } from './hooks' +import { useContext } from 'use-context-selector' +import { after } from 'node:test' + +jest.mock('swr', () => ({ + __esModule: true, + default: jest.fn(), // mock useSWR + useSWRConfig: jest.fn(), +})) + +// mock use-context-selector +jest.mock('use-context-selector', () => ({ + useContext: jest.fn(), +})) + +// mock service/common functions +jest.mock('@/service/common', () => ({ + fetchDefaultModal: jest.fn(), + fetchModelList: jest.fn(), + fetchModelProviderCredentials: jest.fn(), + fetchModelProviders: jest.fn(), + getPayUrl: jest.fn(), +})) + +// mock context hooks +jest.mock('@/context/i18n', () => ({ + __esModule: true, + default: jest.fn(), +})) + +jest.mock('@/context/provider-context', () => ({ + useProviderContext: jest.fn(), +})) + +jest.mock('@/context/modal-context', () => ({ + useModalContextSelector: jest.fn(), +})) + +jest.mock('@/context/event-emitter', () => ({ + useEventEmitterContextContext: jest.fn(), +})) + +// mock plugins +jest.mock('@/app/components/plugins/marketplace/hooks', () => ({ + useMarketplacePlugins: jest.fn(), +})) + +jest.mock('@/app/components/plugins/marketplace/utils', () => ({ + getMarketplacePluginsByCollectionId: jest.fn(), +})) + +jest.mock('./provider-added-card', () => { + // eslint-disable-next-line no-labels, ts/no-unused-expressions + UPDATE_MODEL_PROVIDER_CUSTOM_MODEL_LIST: [] +}) + +after(() => { + jest.resetModules() + jest.clearAllMocks() +}) + +describe('useLanguage', () => { + it('should replace hyphen with underscore in locale', () => { + (useContext as jest.Mock).mockReturnValue({ + locale: 'en-US', + }) + const { result } = renderHook(() => useLanguage()) + expect(result.current).toBe('en_US') + }) + + it('should return locale as is if no hyphen exists', () => { + (useContext as jest.Mock).mockReturnValue({ + locale: 'enUS', + }) + + const { result } = renderHook(() => useLanguage()) + expect(result.current).toBe('enUS') + }) + + it('should handle multiple hyphens', () => { + // Mock the I18n context return value + (useContext as jest.Mock).mockReturnValue({ + locale: 'zh-Hans-CN', + }) + + const { result } = renderHook(() => useLanguage()) + expect(result.current).toBe('zh_Hans-CN') + }) +}) diff --git a/web/hooks/use-breakpoints.spec.ts b/web/hooks/use-breakpoints.spec.ts new file mode 100644 index 0000000000..315e514f0f --- /dev/null +++ b/web/hooks/use-breakpoints.spec.ts @@ -0,0 +1,93 @@ +import { act, renderHook } from '@testing-library/react' +import useBreakpoints, { MediaType } from './use-breakpoints' + +describe('useBreakpoints', () => { + const originalInnerWidth = window.innerWidth + + // Mock the window resize event + const fireResize = (width: number) => { + window.innerWidth = width + act(() => { + window.dispatchEvent(new Event('resize')) + }) + } + + // Restore the original innerWidth after tests + afterAll(() => { + window.innerWidth = originalInnerWidth + }) + + it('should return mobile for width <= 640px', () => { + // Mock window.innerWidth for mobile + Object.defineProperty(window, 'innerWidth', { + writable: true, + configurable: true, + value: 640, + }) + + const { result } = renderHook(() => useBreakpoints()) + expect(result.current).toBe(MediaType.mobile) + }) + + it('should return tablet for width > 640px and <= 768px', () => { + // Mock window.innerWidth for tablet + Object.defineProperty(window, 'innerWidth', { + writable: true, + configurable: true, + value: 768, + }) + + const { result } = renderHook(() => useBreakpoints()) + expect(result.current).toBe(MediaType.tablet) + }) + + it('should return pc for width > 768px', () => { + // Mock window.innerWidth for pc + Object.defineProperty(window, 'innerWidth', { + writable: true, + configurable: true, + value: 1024, + }) + + const { result } = renderHook(() => useBreakpoints()) + expect(result.current).toBe(MediaType.pc) + }) + + it('should update media type when window resizes', () => { + // Start with desktop + Object.defineProperty(window, 'innerWidth', { + writable: true, + configurable: true, + value: 1024, + }) + + const { result } = renderHook(() => useBreakpoints()) + expect(result.current).toBe(MediaType.pc) + + // Resize to tablet + fireResize(768) + expect(result.current).toBe(MediaType.tablet) + + // Resize to mobile + fireResize(600) + expect(result.current).toBe(MediaType.mobile) + }) + + it('should clean up event listeners on unmount', () => { + // Spy on addEventListener and removeEventListener + const addEventListenerSpy = jest.spyOn(window, 'addEventListener') + const removeEventListenerSpy = jest.spyOn(window, 'removeEventListener') + + const { unmount } = renderHook(() => useBreakpoints()) + + // Unmount should trigger cleanup + unmount() + + expect(addEventListenerSpy).toHaveBeenCalledWith('resize', expect.any(Function)) + expect(removeEventListenerSpy).toHaveBeenCalledWith('resize', expect.any(Function)) + + // Clean up spies + addEventListenerSpy.mockRestore() + removeEventListenerSpy.mockRestore() + }) +}) diff --git a/web/jest.config.ts b/web/jest.config.ts index aa2f22bf82..9164734d64 100644 --- a/web/jest.config.ts +++ b/web/jest.config.ts @@ -98,7 +98,7 @@ const config: Config = { // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module moduleNameMapper: { - '^@/components/(.*)$': '/components/$1', + '^@/(.*)$': '/$1', '^lodash-es$': 'lodash', }, @@ -133,7 +133,7 @@ const config: Config = { // restoreMocks: false, // The root directory that Jest should scan for tests and modules within - // rootDir: undefined, + rootDir: './', // A list of paths to directories that Jest should use to search for files in // roots: [ From be3ebea45b0e86ed3d62a2869ecbb36b0ee1d388 Mon Sep 17 00:00:00 2001 From: Bowen Liang Date: Tue, 8 Apr 2025 17:12:25 +0800 Subject: [PATCH 03/15] chore: bump pnpm to v10 in web dockerfile (#17611) --- .devcontainer/post_create_command.sh | 2 +- .github/workflows/web-tests.yml | 2 ++ web/Dockerfile | 2 +- web/README.md | 4 +++- 4 files changed, 7 insertions(+), 3 deletions(-) diff --git a/.devcontainer/post_create_command.sh b/.devcontainer/post_create_command.sh index 5e76bdc2a3..c53c26bb9a 100755 --- a/.devcontainer/post_create_command.sh +++ b/.devcontainer/post_create_command.sh @@ -1,6 +1,6 @@ #!/bin/bash -npm add -g pnpm@9.12.2 +npm add -g pnpm@10.8.0 cd web && pnpm install pipx install poetry diff --git a/.github/workflows/web-tests.yml b/.github/workflows/web-tests.yml index acee26af2f..7fe3f45a8a 100644 --- a/.github/workflows/web-tests.yml +++ b/.github/workflows/web-tests.yml @@ -31,7 +31,9 @@ jobs: uses: tj-actions/changed-files@v45 with: files: web/** + - name: Install pnpm + if: steps.changed-files.outputs.any_changed == 'true' uses: pnpm/action-setup@v4 with: version: 10 diff --git a/web/Dockerfile b/web/Dockerfile index 8d50154873..80ec6d652c 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -6,7 +6,7 @@ LABEL maintainer="takatost@gmail.com" # RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories RUN apk add --no-cache tzdata -RUN npm install -g pnpm@9.12.2 +RUN npm install -g pnpm@10.8.0 ENV PNPM_HOME="/pnpm" ENV PATH="$PNPM_HOME:$PATH" diff --git a/web/README.md b/web/README.md index 900924f348..3236347e80 100644 --- a/web/README.md +++ b/web/README.md @@ -6,7 +6,9 @@ This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next ### Run by source code -To start the web frontend service, you will need [Node.js v18.x (LTS)](https://nodejs.org/en) and [pnpm version 9.12.2](https://pnpm.io). +Before starting the web frontend service, please make sure the following environment is ready. +- [Node.js](https://nodejs.org) >= v18.x +- [pnpm](https://pnpm.io) v10.x First, install the dependencies: From cd7ac20d809a8cb3c5484624d7c8a105a7c9856d Mon Sep 17 00:00:00 2001 From: Wu Tianwei <30284043+WTW0313@users.noreply.github.com> Date: Tue, 8 Apr 2025 18:01:43 +0800 Subject: [PATCH 04/15] feat: enhance index type handling and add error notification for missing embedding model (#16836) --- .../datasets/create/step-two/index.tsx | 29 ++++++++++--------- web/i18n/en-US/app-debug.ts | 1 + web/i18n/zh-Hans/app-debug.ts | 1 + 3 files changed, 18 insertions(+), 13 deletions(-) diff --git a/web/app/components/datasets/create/step-two/index.tsx b/web/app/components/datasets/create/step-two/index.tsx index 9b485e1bde..6bef25ee9f 100644 --- a/web/app/components/datasets/create/step-two/index.tsx +++ b/web/app/components/datasets/create/step-two/index.tsx @@ -169,12 +169,11 @@ const StepTwo = ({ const [rules, setRules] = useState([]) const [defaultConfig, setDefaultConfig] = useState() const hasSetIndexType = !!indexingType - const [indexType, setIndexType] = useState( - (indexingType - || isAPIKeySet) - ? IndexingType.QUALIFIED - : IndexingType.ECONOMICAL, - ) + const [indexType, setIndexType] = useState(() => { + if (hasSetIndexType) + return indexingType + return isAPIKeySet ? IndexingType.QUALIFIED : IndexingType.ECONOMICAL + }) const [previewFile, setPreviewFile] = useState( (datasetId && documentDetail) @@ -421,6 +420,13 @@ const StepTwo = ({ } else { // create const indexMethod = getIndexing_technique() + if (indexMethod === IndexingType.QUALIFIED && (!embeddingModel.model || !embeddingModel.provider)) { + Toast.notify({ + type: 'error', + message: t('appDebug.datasetConfig.embeddingModelRequired'), + }) + return + } if ( !isReRankModelSelected({ rerankModelList, @@ -568,7 +574,6 @@ const StepTwo = ({ // get indexing type by props if (indexingType) setIndexType(indexingType as IndexingType) - else setIndexType(isAPIKeySet ? IndexingType.QUALIFIED : IndexingType.ECONOMICAL) }, [isAPIKeySet, indexingType, datasetId]) @@ -848,10 +853,9 @@ const StepTwo = ({ description={t('datasetCreation.stepTwo.qualifiedTip')} icon={} isActive={!hasSetIndexType && indexType === IndexingType.QUALIFIED} - disabled={!isAPIKeySet || hasSetIndexType} + disabled={hasSetIndexType} onSwitched={() => { - if (isAPIKeySet) - setIndexType(IndexingType.QUALIFIED) + setIndexType(IndexingType.QUALIFIED) }} /> )} @@ -894,11 +898,10 @@ const StepTwo = ({ description={t('datasetCreation.stepTwo.economicalTip')} icon={} isActive={!hasSetIndexType && indexType === IndexingType.ECONOMICAL} - disabled={!isAPIKeySet || hasSetIndexType || docForm !== ChunkingMode.text} + disabled={hasSetIndexType || docForm !== ChunkingMode.text} ref={economyDomRef} onSwitched={() => { - if (isAPIKeySet && docForm === ChunkingMode.text) - setIndexType(IndexingType.ECONOMICAL) + setIndexType(IndexingType.ECONOMICAL) }} /> diff --git a/web/i18n/en-US/app-debug.ts b/web/i18n/en-US/app-debug.ts index 00f681f843..3ee5fd3e1d 100644 --- a/web/i18n/en-US/app-debug.ts +++ b/web/i18n/en-US/app-debug.ts @@ -483,6 +483,7 @@ const translation = { title: 'Multi-path retrieval', description: 'Based on user intent, queries across all Knowledge, retrieves relevant text from multi-sources, and selects the best results matching the user query after reranking.', }, + embeddingModelRequired: 'A configured Embedding Model is required', rerankModelRequired: 'A configured Rerank Model is required', params: 'Params', top_k: 'Top K', diff --git a/web/i18n/zh-Hans/app-debug.ts b/web/i18n/zh-Hans/app-debug.ts index 781ee39671..c2c659b41f 100644 --- a/web/i18n/zh-Hans/app-debug.ts +++ b/web/i18n/zh-Hans/app-debug.ts @@ -475,6 +475,7 @@ const translation = { title: '多路召回', description: '根据用户意图同时匹配所有知识库,从多路知识库查询相关文本片段,经过重排序步骤,从多路查询结果中选择匹配用户问题的最佳结果。', }, + embeddingModelRequired: '未配置 Embedding 模型', rerankModelRequired: '未配置 Rerank 模型', params: '参数设置', top_k: 'Top K', From 106604682ab062a120562051c953b8402a747f6a Mon Sep 17 00:00:00 2001 From: Lao Date: Tue, 8 Apr 2025 21:00:00 +0800 Subject: [PATCH 05/15] Fixed the model-modal titles not being clearly distinguished between "Add" and "Setup" (#17634) --- .../account-setting/model-provider-page/model-modal/index.tsx | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx b/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx index 4adab6d2e0..bd1bb6ced9 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx @@ -270,8 +270,7 @@ const ModelModal: FC = ({ } const renderTitlePrefix = () => { - const prefix = configurateMethod === ConfigurationMethodEnum.customizableModel ? t('common.operation.add') : t('common.operation.setup') - + const prefix = isEditMode ? t('common.operation.setup') : t('common.operation.add') return `${prefix} ${provider.label[language] || provider.label.en_US}` } From b73607da8061a2738fe64b1cc19186c1489d23a4 Mon Sep 17 00:00:00 2001 From: Bowen Liang Date: Wed, 9 Apr 2025 09:40:11 +0800 Subject: [PATCH 06/15] chore: bump Nodejs in web image from 20 to 22 LTS (#13341) --- .github/workflows/style.yml | 3 ++- .github/workflows/tool-test-sdks.yaml | 2 +- .github/workflows/translate-i18n-base-on-english.yml | 2 +- .github/workflows/web-tests.yml | 2 +- web/Dockerfile | 2 +- 5 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index d73a782c93..625930b5f5 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -82,7 +82,7 @@ jobs: uses: actions/setup-node@v4 if: steps.changed-files.outputs.any_changed == 'true' with: - node-version: 20 + node-version: 22 cache: pnpm cache-dependency-path: ./web/package.json @@ -153,6 +153,7 @@ jobs: env: BASH_SEVERITY: warning DEFAULT_BRANCH: main + FILTER_REGEX_INCLUDE: pnpm-lock.yaml GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} IGNORE_GENERATED_FILES: true IGNORE_GITIGNORED_FILES: true diff --git a/.github/workflows/tool-test-sdks.yaml b/.github/workflows/tool-test-sdks.yaml index 93edb2737a..a6e48d1359 100644 --- a/.github/workflows/tool-test-sdks.yaml +++ b/.github/workflows/tool-test-sdks.yaml @@ -18,7 +18,7 @@ jobs: strategy: matrix: - node-version: [16, 18, 20] + node-version: [16, 18, 20, 22] defaults: run: diff --git a/.github/workflows/translate-i18n-base-on-english.yml b/.github/workflows/translate-i18n-base-on-english.yml index 80b78a1311..3f8082eb69 100644 --- a/.github/workflows/translate-i18n-base-on-english.yml +++ b/.github/workflows/translate-i18n-base-on-english.yml @@ -33,7 +33,7 @@ jobs: - name: Set up Node.js if: env.FILES_CHANGED == 'true' - uses: actions/setup-node@v2 + uses: actions/setup-node@v4 with: node-version: 'lts/*' diff --git a/.github/workflows/web-tests.yml b/.github/workflows/web-tests.yml index 7fe3f45a8a..85e8b99473 100644 --- a/.github/workflows/web-tests.yml +++ b/.github/workflows/web-tests.yml @@ -43,7 +43,7 @@ jobs: uses: actions/setup-node@v4 if: steps.changed-files.outputs.any_changed == 'true' with: - node-version: 20 + node-version: 22 cache: pnpm cache-dependency-path: ./web/package.json diff --git a/web/Dockerfile b/web/Dockerfile index 80ec6d652c..dfc5ba8b46 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -1,5 +1,5 @@ # base image -FROM node:20-alpine3.20 AS base +FROM node:22-alpine3.21 AS base LABEL maintainer="takatost@gmail.com" # if you located in China, you can use aliyun mirror to speed up From b5498a373a4c05bb8ddd2f4617e3bb120fa87646 Mon Sep 17 00:00:00 2001 From: Han <109904848+wanghan5@users.noreply.github.com> Date: Wed, 9 Apr 2025 10:12:16 +0800 Subject: [PATCH 07/15] Accelerate migration (#17088) Co-authored-by: Wang Han --- api/services/plugin/data_migration.py | 52 ++++++++++++++++++--------- 1 file changed, 35 insertions(+), 17 deletions(-) diff --git a/api/services/plugin/data_migration.py b/api/services/plugin/data_migration.py index 7228a16632..597585588b 100644 --- a/api/services/plugin/data_migration.py +++ b/api/services/plugin/data_migration.py @@ -127,18 +127,32 @@ limit 1000""" processed_count = 0 failed_ids = [] + last_id = "00000000-0000-0000-0000-000000000000" + while True: - sql = f"""select id, {provider_column_name} as provider_name from {table_name} -where {provider_column_name} not like '%/%' and {provider_column_name} is not null and {provider_column_name} != '' -limit 1000""" + sql = f""" + SELECT id, {provider_column_name} AS provider_name + FROM {table_name} + WHERE {provider_column_name} NOT LIKE '%/%' + AND {provider_column_name} IS NOT NULL + AND {provider_column_name} != '' + AND id > :last_id + ORDER BY id ASC + LIMIT 5000 + """ + params = {"last_id": last_id or ""} + with db.engine.begin() as conn: - rs = conn.execute(db.text(sql)) + rs = conn.execute(db.text(sql), params) current_iter_count = 0 + batch_updates = [] + for i in rs: current_iter_count += 1 processed_count += 1 record_id = str(i.id) + last_id = record_id provider_name = str(i.provider_name) if record_id in failed_ids: @@ -152,19 +166,9 @@ limit 1000""" ) try: - # update provider name append with "langgenius/{provider_name}/{provider_name}" - sql = f"""update {table_name} - set {provider_column_name} = - concat('{DEFAULT_PLUGIN_ID}/', {provider_column_name}, '/', {provider_column_name}) - where id = :record_id""" - conn.execute(db.text(sql), {"record_id": record_id}) - click.echo( - click.style( - f"[{processed_count}] Migrated [{table_name}] {record_id} ({provider_name})", - fg="green", - ) - ) - except Exception: + updated_value = f"{DEFAULT_PLUGIN_ID}/{provider_name}/{provider_name}" + batch_updates.append((updated_value, record_id)) + except Exception as e: failed_ids.append(record_id) click.echo( click.style( @@ -177,6 +181,20 @@ limit 1000""" ) continue + if batch_updates: + update_sql = f""" + UPDATE {table_name} + SET {provider_column_name} = :updated_value + WHERE id = :record_id + """ + conn.execute(db.text(update_sql), [{"updated_value": u, "record_id": r} for u, r in batch_updates]) + click.echo( + click.style( + f"[{processed_count}] Batch migrated [{len(batch_updates)}] records from [{table_name}]", + fg="green", + ) + ) + if not current_iter_count: break From f1e4d5ed6c7f3f569e0ff0b191884dd50e63618b Mon Sep 17 00:00:00 2001 From: Han <109904848+wanghan5@users.noreply.github.com> Date: Wed, 9 Apr 2025 11:22:53 +0800 Subject: [PATCH 08/15] Fix Performance Issues: (#17083) Co-authored-by: Wang Han --- api/core/rag/datasource/retrieval_service.py | 189 ++++++++++-------- ..._change_documentsegment_and_childchunk_.py | 43 ++++ api/models/dataset.py | 4 +- api/services/hit_testing_service.py | 9 - 4 files changed, 151 insertions(+), 94 deletions(-) create mode 100644 api/migrations/versions/2025_03_29_2227-6a9f914f656c_change_documentsegment_and_childchunk_.py diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index fea4d0edf7..c4a1e9f059 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -1,4 +1,6 @@ import concurrent.futures +import logging +import time from concurrent.futures import ThreadPoolExecutor from typing import Optional @@ -46,7 +48,7 @@ class RetrievalService: if not query: return [] dataset = cls._get_dataset(dataset_id) - if not dataset or dataset.available_document_count == 0 or dataset.available_segment_count == 0: + if not dataset: return [] all_documents: list[Document] = [] @@ -178,6 +180,7 @@ class RetrievalService: if not dataset: raise ValueError("dataset not found") + start = time.time() vector = Vector(dataset=dataset) documents = vector.search_by_vector( query, @@ -187,6 +190,7 @@ class RetrievalService: filter={"group_id": [dataset.id]}, document_ids_filter=document_ids_filter, ) + logging.debug(f"embedding_search ends at {time.time() - start:.2f} seconds") if documents: if ( @@ -270,7 +274,8 @@ class RetrievalService: return [] try: - # Collect document IDs + start_time = time.time() + # Collect document IDs with existence check document_ids = {doc.metadata.get("document_id") for doc in documents if "document_id" in doc.metadata} if not document_ids: return [] @@ -288,43 +293,102 @@ class RetrievalService: include_segment_ids = set() segment_child_map = {} - # Process documents + # Precompute doc_forms to avoid redundant checks + doc_forms = {} + for doc in documents: + document_id = doc.metadata.get("document_id") + dataset_doc = dataset_documents.get(document_id) + if dataset_doc: + doc_forms[document_id] = dataset_doc.doc_form + + # Batch collect index node IDs with type safety + child_index_node_ids = [] + index_node_ids = [] + for doc in documents: + document_id = doc.metadata.get("document_id") + if doc_forms.get(document_id) == IndexType.PARENT_CHILD_INDEX: + child_index_node_ids.append(doc.metadata.get("doc_id")) + else: + index_node_ids.append(doc.metadata.get("doc_id")) + + # Batch query ChildChunk + child_chunks = db.session.query(ChildChunk).filter(ChildChunk.index_node_id.in_(child_index_node_ids)).all() + child_chunk_map = {chunk.index_node_id: chunk for chunk in child_chunks} + + # Batch query DocumentSegment with unified conditions + segment_map = { + segment.id: segment + for segment in db.session.query(DocumentSegment) + .filter( + ( + DocumentSegment.index_node_id.in_(index_node_ids) + | DocumentSegment.id.in_([chunk.segment_id for chunk in child_chunks]) + ), + DocumentSegment.enabled == True, + DocumentSegment.status == "completed", + ) + .options( + load_only( + DocumentSegment.id, + DocumentSegment.content, + DocumentSegment.answer, + ) + ) + .all() + } + for document in documents: document_id = document.metadata.get("document_id") - if document_id not in dataset_documents: - continue - - dataset_document = dataset_documents[document_id] + dataset_document = dataset_documents.get(document_id) if not dataset_document: continue - if dataset_document.doc_form == IndexType.PARENT_CHILD_INDEX: - # Handle parent-child documents + doc_form = doc_forms.get(document_id) + if doc_form == IndexType.PARENT_CHILD_INDEX: + # Handle parent-child documents using preloaded data child_index_node_id = document.metadata.get("doc_id") + if not child_index_node_id: + continue - child_chunk = ( - db.session.query(ChildChunk).filter(ChildChunk.index_node_id == child_index_node_id).first() - ) - + child_chunk = child_chunk_map.get(child_index_node_id) if not child_chunk: continue - segment = ( - db.session.query(DocumentSegment) - .filter( - DocumentSegment.dataset_id == dataset_document.dataset_id, - DocumentSegment.enabled == True, - DocumentSegment.status == "completed", - DocumentSegment.id == child_chunk.segment_id, - ) - .options( - load_only( - DocumentSegment.id, - DocumentSegment.content, - DocumentSegment.answer, - ) - ) - .first() + segment = segment_map.get(child_chunk.segment_id) + if not segment: + continue + + if segment.id not in include_segment_ids: + include_segment_ids.add(segment.id) + map_detail = {"max_score": document.metadata.get("score", 0.0), "child_chunks": []} + segment_child_map[segment.id] = map_detail + records.append({"segment": segment}) + + # Append child chunk details + child_chunk_detail = { + "id": child_chunk.id, + "content": child_chunk.content, + "position": child_chunk.position, + "score": document.metadata.get("score", 0.0), + } + segment_child_map[segment.id]["child_chunks"].append(child_chunk_detail) + segment_child_map[segment.id]["max_score"] = max( + segment_child_map[segment.id]["max_score"], document.metadata.get("score", 0.0) + ) + + else: + # Handle normal documents + index_node_id = document.metadata.get("doc_id") + if not index_node_id: + continue + + segment = next( + ( + s + for s in segment_map.values() + if s.index_node_id == index_node_id and s.dataset_id == dataset_document.dataset_id + ), + None, ) if not segment: @@ -332,66 +396,23 @@ class RetrievalService: if segment.id not in include_segment_ids: include_segment_ids.add(segment.id) - child_chunk_detail = { - "id": child_chunk.id, - "content": child_chunk.content, - "position": child_chunk.position, - "score": document.metadata.get("score", 0.0), - } - map_detail = { - "max_score": document.metadata.get("score", 0.0), - "child_chunks": [child_chunk_detail], - } - segment_child_map[segment.id] = map_detail - record = { - "segment": segment, - } - records.append(record) - else: - child_chunk_detail = { - "id": child_chunk.id, - "content": child_chunk.content, - "position": child_chunk.position, - "score": document.metadata.get("score", 0.0), - } - segment_child_map[segment.id]["child_chunks"].append(child_chunk_detail) - segment_child_map[segment.id]["max_score"] = max( - segment_child_map[segment.id]["max_score"], document.metadata.get("score", 0.0) + records.append( + { + "segment": segment, + "score": document.metadata.get("score", 0.0), + } ) - else: - # Handle normal documents - index_node_id = document.metadata.get("doc_id") - if not index_node_id: - continue - segment = ( - db.session.query(DocumentSegment) - .filter( - DocumentSegment.dataset_id == dataset_document.dataset_id, - DocumentSegment.enabled == True, - DocumentSegment.status == "completed", - DocumentSegment.index_node_id == index_node_id, - ) - .first() - ) - - if not segment: - continue - - include_segment_ids.add(segment.id) - record = { - "segment": segment, - "score": document.metadata.get("score"), # type: ignore - } - records.append(record) - - # Add child chunks information to records + # Merge child chunks information for record in records: - if record["segment"].id in segment_child_map: - record["child_chunks"] = segment_child_map[record["segment"].id].get("child_chunks") # type: ignore - record["score"] = segment_child_map[record["segment"].id]["max_score"] + segment_id = record["segment"].id + if segment_id in segment_child_map: + record["child_chunks"] = segment_child_map[segment_id]["child_chunks"] + record["score"] = segment_child_map[segment_id]["max_score"] + logging.debug(f"Formatting retrieval documents took {time.time() - start_time:.2f} seconds") return [RetrievalSegments(**record) for record in records] except Exception as e: + # Only rollback if there were write operations db.session.rollback() raise e diff --git a/api/migrations/versions/2025_03_29_2227-6a9f914f656c_change_documentsegment_and_childchunk_.py b/api/migrations/versions/2025_03_29_2227-6a9f914f656c_change_documentsegment_and_childchunk_.py new file mode 100644 index 0000000000..45904f0c80 --- /dev/null +++ b/api/migrations/versions/2025_03_29_2227-6a9f914f656c_change_documentsegment_and_childchunk_.py @@ -0,0 +1,43 @@ +"""change documentsegment and childchunk indexes + +Revision ID: 6a9f914f656c +Revises: d20049ed0af6 +Create Date: 2025-03-29 22:27:24.789481 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '6a9f914f656c' +down_revision = 'd20049ed0af6' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('child_chunks', schema=None) as batch_op: + batch_op.create_index('child_chunks_node_idx', ['index_node_id', 'dataset_id'], unique=False) + batch_op.create_index('child_chunks_segment_idx', ['segment_id'], unique=False) + + with op.batch_alter_table('document_segments', schema=None) as batch_op: + batch_op.drop_index('document_segment_dataset_node_idx') + batch_op.create_index('document_segment_node_dataset_idx', ['index_node_id', 'dataset_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('document_segments', schema=None) as batch_op: + batch_op.drop_index('document_segment_node_dataset_idx') + batch_op.create_index('document_segment_dataset_node_idx', ['dataset_id', 'index_node_id'], unique=False) + + with op.batch_alter_table('child_chunks', schema=None) as batch_op: + batch_op.drop_index('child_chunks_segment_idx') + batch_op.drop_index('child_chunks_node_idx') + + # ### end Alembic commands ### diff --git a/api/models/dataset.py b/api/models/dataset.py index 47f96c669e..d6708ac88b 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -643,7 +643,7 @@ class DocumentSegment(db.Model): # type: ignore[name-defined] db.Index("document_segment_document_id_idx", "document_id"), db.Index("document_segment_tenant_dataset_idx", "dataset_id", "tenant_id"), db.Index("document_segment_tenant_document_idx", "document_id", "tenant_id"), - db.Index("document_segment_dataset_node_idx", "dataset_id", "index_node_id"), + db.Index("document_segment_node_dataset_idx", "index_node_id", "dataset_id"), db.Index("document_segment_tenant_idx", "tenant_id"), ) @@ -791,6 +791,8 @@ class ChildChunk(db.Model): # type: ignore[name-defined] __table_args__ = ( db.PrimaryKeyConstraint("id", name="child_chunk_pkey"), db.Index("child_chunk_dataset_id_idx", "tenant_id", "dataset_id", "document_id", "segment_id", "index_node_id"), + db.Index("child_chunks_node_idx", "index_node_id", "dataset_id"), + db.Index("child_chunks_segment_idx", "segment_id"), ) # initial fields diff --git a/api/services/hit_testing_service.py b/api/services/hit_testing_service.py index f8c1c1d297..0b98065f5d 100644 --- a/api/services/hit_testing_service.py +++ b/api/services/hit_testing_service.py @@ -29,15 +29,6 @@ class HitTestingService: external_retrieval_model: dict, limit: int = 10, ) -> dict: - if dataset.available_document_count == 0 or dataset.available_segment_count == 0: - return { - "query": { - "content": query, - "tsne_position": {"x": 0, "y": 0}, - }, - "records": [], - } - start = time.perf_counter() # get retrieval model , if the model is not setting , using default From f633d1ee9220bd80cca83674764151efcad7d101 Mon Sep 17 00:00:00 2001 From: yusheng chen Date: Wed, 9 Apr 2025 12:10:17 +0800 Subject: [PATCH 09/15] chore: add `'no-empty-function': 'error'` to `eslint.config.mjs` (#17656) --- .../dataset-config/params-config/config-content.tsx | 5 +++-- web/app/components/app/configuration/index.tsx | 3 --- .../components/base/audio-btn/audio.player.manager.ts | 5 +---- web/app/components/base/audio-btn/audio.ts | 10 +++------- web/app/components/base/chat/chat/hooks.ts | 3 ++- web/app/components/base/pagination/pagination.tsx | 3 ++- web/app/components/datasets/create/step-two/index.tsx | 3 ++- web/app/components/datasets/documents/list.tsx | 5 +++-- web/app/components/explore/create-app-modal/index.tsx | 3 ++- .../data-source-page/panel/config-item.tsx | 3 ++- .../block-selector/market-place-plugin/list.tsx | 3 ++- web/app/components/workflow/hooks/use-workflow-run.ts | 3 ++- .../workflow/nodes/_base/hooks/use-one-step-run.ts | 8 +++----- .../workflow/run/utils/format-log/agent/data.ts | 3 --- web/context/i18n.ts | 3 ++- web/eslint.config.mjs | 3 +++ 16 files changed, 32 insertions(+), 34 deletions(-) diff --git a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx index 14f0c3d865..3b9078f1be 100644 --- a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx +++ b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx @@ -25,6 +25,7 @@ import { useSelectedDatasetsMode } from '@/app/components/workflow/nodes/knowled import Switch from '@/app/components/base/switch' import Toast from '@/app/components/base/toast' import Divider from '@/app/components/base/divider' +import { noop } from 'lodash-es' type Props = { datasetConfigs: DatasetConfigs @@ -41,8 +42,8 @@ const ConfigContent: FC = ({ onChange, isInWorkflow, singleRetrievalModelConfig: singleRetrievalConfig = {} as ModelConfig, - onSingleRetrievalModelChange = () => { }, - onSingleRetrievalModelParamsChange = () => { }, + onSingleRetrievalModelChange = noop, + onSingleRetrievalModelParamsChange = noop, selectedDatasets = [], }) => { const { t } = useTranslation() diff --git a/web/app/components/app/configuration/index.tsx b/web/app/components/app/configuration/index.tsx index cc6909d151..249624a294 100644 --- a/web/app/components/app/configuration/index.tsx +++ b/web/app/components/app/configuration/index.tsx @@ -197,9 +197,6 @@ const Configuration: FC = () => { const isOpenAI = modelConfig.provider === 'langgenius/openai/openai' const [collectionList, setCollectionList] = useState([]) - useEffect(() => { - - }, []) const [datasetConfigs, doSetDatasetConfigs] = useState({ retrieval_model: RETRIEVE_TYPE.multiWay, reranking_model: { diff --git a/web/app/components/base/audio-btn/audio.player.manager.ts b/web/app/components/base/audio-btn/audio.player.manager.ts index 848aef6cba..15be7a3d8c 100644 --- a/web/app/components/base/audio-btn/audio.player.manager.ts +++ b/web/app/components/base/audio-btn/audio.player.manager.ts @@ -12,9 +12,6 @@ export class AudioPlayerManager { private audioPlayers: AudioPlayer | null = null private msgId: string | undefined - private constructor() { - } - public static getInstance(): AudioPlayerManager { if (!AudioPlayerManager.instance) { AudioPlayerManager.instance = new AudioPlayerManager() @@ -24,7 +21,7 @@ export class AudioPlayerManager { return AudioPlayerManager.instance } - public getAudioPlayer(url: string, isPublic: boolean, id: string | undefined, msgContent: string | null | undefined, voice: string | undefined, callback: ((event: string) => {}) | null): AudioPlayer { + public getAudioPlayer(url: string, isPublic: boolean, id: string | undefined, msgContent: string | null | undefined, voice: string | undefined, callback: ((event: string) => void) | null): AudioPlayer { if (this.msgId && this.msgId === id && this.audioPlayers) { this.audioPlayers.setCallback(callback) return this.audioPlayers diff --git a/web/app/components/base/audio-btn/audio.ts b/web/app/components/base/audio-btn/audio.ts index d7fae02f82..cd40930f43 100644 --- a/web/app/components/base/audio-btn/audio.ts +++ b/web/app/components/base/audio-btn/audio.ts @@ -21,9 +21,9 @@ export default class AudioPlayer { isLoadData = false url: string isPublic: boolean - callback: ((event: string) => {}) | null + callback: ((event: string) => void) | null - constructor(streamUrl: string, isPublic: boolean, msgId: string | undefined, msgContent: string | null | undefined, voice: string | undefined, callback: ((event: string) => {}) | null) { + constructor(streamUrl: string, isPublic: boolean, msgId: string | undefined, msgContent: string | null | undefined, voice: string | undefined, callback: ((event: string) => void) | null) { this.audioContext = new AudioContext() this.msgId = msgId this.msgContent = msgContent @@ -68,7 +68,7 @@ export default class AudioPlayer { }) } - public setCallback(callback: ((event: string) => {}) | null) { + public setCallback(callback: ((event: string) => void) | null) { this.callback = callback if (callback) { this.audio.addEventListener('ended', () => { @@ -211,10 +211,6 @@ export default class AudioPlayer { this.audioContext.suspend() } - private cancer() { - - } - private receiveAudioData(unit8Array: Uint8Array) { if (!unit8Array) { this.finishStream() diff --git a/web/app/components/base/chat/chat/hooks.ts b/web/app/components/base/chat/chat/hooks.ts index eb48f9515b..aad17ccc52 100644 --- a/web/app/components/base/chat/chat/hooks.ts +++ b/web/app/components/base/chat/chat/hooks.ts @@ -34,6 +34,7 @@ import { getProcessedFiles, getProcessedFilesFromResponse, } from '@/app/components/base/file-uploader/utils' +import { noop } from 'lodash-es' type GetAbortController = (abortController: AbortController) => void type SendCallback = { @@ -308,7 +309,7 @@ export const useChat = ( else ttsUrl = `/apps/${params.appId}/text-to-audio` } - const player = AudioPlayerManager.getInstance().getAudioPlayer(ttsUrl, ttsIsPublic, uuidV4(), 'none', 'none', (_: any): any => { }) + const player = AudioPlayerManager.getInstance().getAudioPlayer(ttsUrl, ttsIsPublic, uuidV4(), 'none', 'none', noop) ssePost( url, { diff --git a/web/app/components/base/pagination/pagination.tsx b/web/app/components/base/pagination/pagination.tsx index 5898c4e924..ec8b0355f4 100644 --- a/web/app/components/base/pagination/pagination.tsx +++ b/web/app/components/base/pagination/pagination.tsx @@ -7,10 +7,11 @@ import type { IPaginationProps, PageButtonProps, } from './type' +import { noop } from 'lodash-es' const defaultState: IPagination = { currentPage: 0, - setCurrentPage: () => {}, + setCurrentPage: noop, truncableText: '...', truncableClassName: '', pages: [], diff --git a/web/app/components/datasets/create/step-two/index.tsx b/web/app/components/datasets/create/step-two/index.tsx index 6bef25ee9f..12fd54d0fe 100644 --- a/web/app/components/datasets/create/step-two/index.tsx +++ b/web/app/components/datasets/create/step-two/index.tsx @@ -62,6 +62,7 @@ import Tooltip from '@/app/components/base/tooltip' import CustomDialog from '@/app/components/base/dialog' import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '@/app/components/base/portal-to-follow-elem' import { AlertTriangle } from '@/app/components/base/icons/src/vender/solid/alertsAndFeedback' +import { noop } from 'lodash-es' const TextLabel: FC = (props) => { return @@ -1010,7 +1011,7 @@ const StepTwo = ({ )} - { }} footer={null}> + { const { t } = useTranslation() @@ -265,7 +266,7 @@ export const OperationAction: FC<{ return
e.stopPropagation()}> {isListScene && !embeddingAvailable && ( - { }} disabled={true} size='md' /> + )} {isListScene && embeddingAvailable && ( <> @@ -276,7 +277,7 @@ export const OperationAction: FC<{ needsDelay >
- { }} disabled={true} size='md' /> +
: handleSwitch(v ? 'enable' : 'disable')} size='md' /> diff --git a/web/app/components/explore/create-app-modal/index.tsx b/web/app/components/explore/create-app-modal/index.tsx index 585c52f828..62116192d7 100644 --- a/web/app/components/explore/create-app-modal/index.tsx +++ b/web/app/components/explore/create-app-modal/index.tsx @@ -13,6 +13,7 @@ import AppIcon from '@/app/components/base/app-icon' import { useProviderContext } from '@/context/provider-context' import AppsFull from '@/app/components/billing/apps-full-in-dialog' import type { AppIconType } from '@/types/app' +import { noop } from 'lodash-es' export type CreateAppModalProps = { show: boolean @@ -85,7 +86,7 @@ const CreateAppModal = ({ <> {}} + onClose={noop} className='relative !max-w-[480px] px-8' >
diff --git a/web/app/components/header/account-setting/data-source-page/panel/config-item.tsx b/web/app/components/header/account-setting/data-source-page/panel/config-item.tsx index 3dad51f566..6faf840529 100644 --- a/web/app/components/header/account-setting/data-source-page/panel/config-item.tsx +++ b/web/app/components/header/account-setting/data-source-page/panel/config-item.tsx @@ -10,6 +10,7 @@ import Operate from '../data-source-notion/operate' import { DataSourceType } from './types' import s from './style.module.css' import cn from '@/utils/classnames' +import { noop } from 'lodash-es' export type ConfigItemType = { id: string @@ -41,7 +42,7 @@ const ConfigItem: FC = ({ const { t } = useTranslation() const isNotion = type === DataSourceType.notion const isWebsite = type === DataSourceType.website - const onChangeAuthorizedPage = notionActions?.onChangeAuthorizedPage || function () { } + const onChangeAuthorizedPage = notionActions?.onChangeAuthorizedPage || noop return (
diff --git a/web/app/components/workflow/block-selector/market-place-plugin/list.tsx b/web/app/components/workflow/block-selector/market-place-plugin/list.tsx index d74f170589..97110093b0 100644 --- a/web/app/components/workflow/block-selector/market-place-plugin/list.tsx +++ b/web/app/components/workflow/block-selector/market-place-plugin/list.tsx @@ -9,6 +9,7 @@ import Link from 'next/link' import { marketplaceUrlPrefix } from '@/config' import { RiArrowRightUpLine, RiSearchLine } from '@remixicon/react' // import { RiArrowRightUpLine } from '@remixicon/react' +import { noop } from 'lodash-es' type Props = { wrapElemRef: React.RefObject @@ -107,7 +108,7 @@ const List = ( { }} + onAction={noop} /> ))}
diff --git a/web/app/components/workflow/hooks/use-workflow-run.ts b/web/app/components/workflow/hooks/use-workflow-run.ts index 87ff2186fc..99d9a45702 100644 --- a/web/app/components/workflow/hooks/use-workflow-run.ts +++ b/web/app/components/workflow/hooks/use-workflow-run.ts @@ -18,6 +18,7 @@ import { stopWorkflowRun } from '@/service/workflow' import { useFeaturesStore } from '@/app/components/base/features/hooks' import { AudioPlayerManager } from '@/app/components/base/audio-btn/audio.player.manager' import type { VersionHistory } from '@/types/workflow' +import { noop } from 'lodash-es' export const useWorkflowRun = () => { const store = useStoreApi() @@ -168,7 +169,7 @@ export const useWorkflowRun = () => { else ttsUrl = `/apps/${params.appId}/text-to-audio` } - const player = AudioPlayerManager.getInstance().getAudioPlayer(ttsUrl, ttsIsPublic, uuidV4(), 'none', 'none', (_: any): any => { }) + const player = AudioPlayerManager.getInstance().getAudioPlayer(ttsUrl, ttsIsPublic, uuidV4(), 'none', 'none', noop) ssePost( url, diff --git a/web/app/components/workflow/nodes/_base/hooks/use-one-step-run.ts b/web/app/components/workflow/nodes/_base/hooks/use-one-step-run.ts index 7d8b7fe086..f23af5812c 100644 --- a/web/app/components/workflow/nodes/_base/hooks/use-one-step-run.ts +++ b/web/app/components/workflow/nodes/_base/hooks/use-one-step-run.ts @@ -30,7 +30,7 @@ import IterationDefault from '@/app/components/workflow/nodes/iteration/default' import DocumentExtractorDefault from '@/app/components/workflow/nodes/document-extractor/default' import LoopDefault from '@/app/components/workflow/nodes/loop/default' import { ssePost } from '@/service/base' - +import { noop } from 'lodash-es' import { getInputVars as doGetInputVars } from '@/app/components/base/prompt-editor/constants' import type { NodeTracing } from '@/types/workflow' const { checkValid: checkLLMValid } = LLMDefault @@ -233,8 +233,7 @@ const useOneStepRun = ({ getIterationSingleNodeRunUrl(isChatMode, appId!, id), { body: { inputs: submitData } }, { - onWorkflowStarted: () => { - }, + onWorkflowStarted: noop, onWorkflowFinished: (params) => { handleNodeDataUpdate({ id, @@ -331,8 +330,7 @@ const useOneStepRun = ({ getLoopSingleNodeRunUrl(isChatMode, appId!, id), { body: { inputs: submitData } }, { - onWorkflowStarted: () => { - }, + onWorkflowStarted: noop, onWorkflowFinished: (params) => { handleNodeDataUpdate({ id, diff --git a/web/app/components/workflow/run/utils/format-log/agent/data.ts b/web/app/components/workflow/run/utils/format-log/agent/data.ts index a1e06bf63b..d90933c293 100644 --- a/web/app/components/workflow/run/utils/format-log/agent/data.ts +++ b/web/app/components/workflow/run/utils/format-log/agent/data.ts @@ -177,6 +177,3 @@ export const multiStepsCircle = (() => { }], } })() - -export const CircleNestCircle = (() => { -})() diff --git a/web/context/i18n.ts b/web/context/i18n.ts index be41730b07..6db211dd5d 100644 --- a/web/context/i18n.ts +++ b/web/context/i18n.ts @@ -4,6 +4,7 @@ import { } from 'use-context-selector' import type { Locale } from '@/i18n' import { getLanguage } from '@/i18n/language' +import { noop } from 'lodash-es' type II18NContext = { locale: Locale @@ -14,7 +15,7 @@ type II18NContext = { const I18NContext = createContext({ locale: 'en-US', i18n: {}, - setLocaleOnClient: (_lang: Locale, _reloadPage?: boolean) => { }, + setLocaleOnClient: noop, }) export const useI18N = () => useContext(I18NContext) diff --git a/web/eslint.config.mjs b/web/eslint.config.mjs index 9ce151c751..204efc4715 100644 --- a/web/eslint.config.mjs +++ b/web/eslint.config.mjs @@ -117,6 +117,9 @@ export default combine( // antfu migrate to eslint-plugin-unused-imports 'unused-imports/no-unused-vars': 'warn', 'unused-imports/no-unused-imports': 'warn', + + // We use `import { noop } from 'lodash-es'` across `web` project + 'no-empty-function': 'error', }, languageOptions: { From eb0e51d44d9ef899d4fe8dd825c3bfc8d77ba9d2 Mon Sep 17 00:00:00 2001 From: FangHao Date: Wed, 9 Apr 2025 12:16:48 +0800 Subject: [PATCH 10/15] optimize: docker-compose.middleware.yaml update env_file dependence (#17646) Co-authored-by: fanghao --- docker/docker-compose.middleware.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index b4f772cc82..230b8a05be 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -29,6 +29,8 @@ services: redis: image: redis:6-alpine restart: always + env_file: + - ./middleware.env environment: REDISCLI_AUTH: ${REDIS_PASSWORD:-difyai123456} volumes: @@ -45,6 +47,8 @@ services: sandbox: image: langgenius/dify-sandbox:0.2.11 restart: always + env_file: + - ./middleware.env environment: # The DifySandbox configurations # Make sure you are changing this key for your deployment with a strong key. @@ -68,6 +72,8 @@ services: plugin_daemon: image: langgenius/dify-plugin-daemon:0.0.6-local restart: always + env_file: + - ./middleware.env environment: # Use the shared environment variables. DB_HOST: ${DB_HOST:-db} @@ -107,6 +113,8 @@ services: - ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template - ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh entrypoint: [ "sh", "-c", "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ] + env_file: + - ./middleware.env environment: # pls clearly modify the squid env vars to fit your network environment. HTTP_PORT: ${SSRF_HTTP_PORT:-3128} From 0b1259fc4a45294176ac2415ad2df06d899c6d85 Mon Sep 17 00:00:00 2001 From: Bowen Liang Date: Wed, 9 Apr 2025 13:03:53 +0800 Subject: [PATCH 11/15] chore: add script for running mypy type checks and speed up mypy checks in CI jobs (#17489) --- .github/workflows/api-tests.yml | 9 +- api/poetry.lock | 454 ++++++++++++++++++++++++-------- api/pyproject.toml | 34 ++- dev/reformat | 3 + dev/run-mypy | 11 + 5 files changed, 391 insertions(+), 120 deletions(-) create mode 100755 dev/run-mypy diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index b9547b6452..dca8e640c7 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -53,9 +53,14 @@ jobs: - name: Run dify config tests run: poetry run -P api python dev/pytest/pytest_config_tests.py + - name: Cache MyPy + uses: actions/cache@v4 + with: + path: api/.mypy_cache + key: mypy-${{ matrix.python-version }}-${{ runner.os }}-${{ hashFiles('api/poetry.lock') }} + - name: Run mypy - run: | - poetry run -C api python -m mypy --install-types --non-interactive . + run: dev/run-mypy - name: Set up dotenvs run: | diff --git a/api/poetry.lock b/api/poetry.lock index 0cda9d322f..a91023707e 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -845,10 +845,6 @@ files = [ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"}, {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, @@ -861,14 +857,8 @@ files = [ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"}, {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, @@ -879,24 +869,8 @@ files = [ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"}, {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"}, - {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"}, - {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"}, {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, @@ -906,10 +880,6 @@ files = [ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"}, {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, @@ -921,10 +891,6 @@ files = [ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"}, {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, @@ -937,10 +903,6 @@ files = [ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"}, {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, @@ -953,10 +915,6 @@ files = [ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"}, {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, @@ -4405,6 +4363,21 @@ html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=3.0.11,<3.1.0)"] +[[package]] +name = "lxml-stubs" +version = "0.5.1" +description = "Type annotations for the lxml package" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "lxml-stubs-0.5.1.tar.gz", hash = "sha256:e0ec2aa1ce92d91278b719091ce4515c12adc1d564359dfaf81efa7d4feab79d"}, + {file = "lxml_stubs-0.5.1-py3-none-any.whl", hash = "sha256:1f689e5dbc4b9247cb09ae820c7d34daeb1fdbd1db06123814b856dae7787272"}, +] + +[package.extras] +test = ["coverage[toml] (>=7.2.5)", "mypy (>=1.2.0)", "pytest (>=7.3.0)", "pytest-mypy-plugins (>=1.10.1)"] + [[package]] name = "lz4" version = "4.4.3" @@ -4944,49 +4917,49 @@ files = [ [[package]] name = "mypy" -version = "1.13.0" +version = "1.15.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, - {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, - {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, - {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, - {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, - {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, - {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, - {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, - {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, - {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, - {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, - {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, - {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, - {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, - {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, - {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, - {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, - {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, - {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, - {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, - {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, - {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, ] [package.dependencies] -mypy-extensions = ">=1.0.0" -typing-extensions = ">=4.6.0" +mypy_extensions = ">=1.0.0" +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -5135,7 +5108,7 @@ version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" -groups = ["main", "indirect", "vdb"] +groups = ["main", "dev", "indirect", "vdb"] files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -8859,6 +8832,18 @@ rich = ">=10.11.0" shellingham = ">=1.3.0" typing-extensions = ">=3.7.4.3" +[[package]] +name = "types-aiofiles" +version = "24.1.0.20250326" +description = "Typing stubs for aiofiles" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_aiofiles-24.1.0.20250326-py3-none-any.whl", hash = "sha256:dfb58c9aa18bd449e80fb5d7f49dc3dd20d31de920a46223a61798ee4a521a70"}, + {file = "types_aiofiles-24.1.0.20250326.tar.gz", hash = "sha256:c4bbe432fd043911ba83fb635456f5cc54f6d05fda2aadf6bef12a84f07a6efe"}, +] + [[package]] name = "types-beautifulsoup4" version = "4.12.0.20250204" @@ -8874,6 +8859,42 @@ files = [ [package.dependencies] types-html5lib = "*" +[[package]] +name = "types-cachetools" +version = "5.5.0.20240820" +description = "Typing stubs for cachetools" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-cachetools-5.5.0.20240820.tar.gz", hash = "sha256:b888ab5c1a48116f7799cd5004b18474cd82b5463acb5ffb2db2fc9c7b053bc0"}, + {file = "types_cachetools-5.5.0.20240820-py3-none-any.whl", hash = "sha256:efb2ed8bf27a4b9d3ed70d33849f536362603a90b8090a328acf0cd42fda82e2"}, +] + +[[package]] +name = "types-colorama" +version = "0.4.15.20240311" +description = "Typing stubs for colorama" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-colorama-0.4.15.20240311.tar.gz", hash = "sha256:a28e7f98d17d2b14fb9565d32388e419f4108f557a7d939a66319969b2b99c7a"}, + {file = "types_colorama-0.4.15.20240311-py3-none-any.whl", hash = "sha256:6391de60ddc0db3f147e31ecb230006a6823e81e380862ffca1e4695c13a0b8e"}, +] + +[[package]] +name = "types-defusedxml" +version = "0.7.0.20240218" +description = "Typing stubs for defusedxml" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-defusedxml-0.7.0.20240218.tar.gz", hash = "sha256:05688a7724dc66ea74c4af5ca0efc554a150c329cb28c13a64902cab878d06ed"}, + {file = "types_defusedxml-0.7.0.20240218-py3-none-any.whl", hash = "sha256:2b7f3c5ca14fdbe728fab0b846f5f7eb98c4bd4fd2b83d25f79e923caa790ced"}, +] + [[package]] name = "types-deprecated" version = "1.2.15.20250304" @@ -8886,16 +8907,28 @@ files = [ {file = "types_deprecated-1.2.15.20250304.tar.gz", hash = "sha256:c329030553029de5cc6cb30f269c11f4e00e598c4241290179f63cda7d33f719"}, ] +[[package]] +name = "types-docutils" +version = "0.21.0.20241128" +description = "Typing stubs for docutils" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types_docutils-0.21.0.20241128-py3-none-any.whl", hash = "sha256:e0409204009639e9b0bf4521eeabe58b5e574ce9c0db08421c2ac26c32be0039"}, + {file = "types_docutils-0.21.0.20241128.tar.gz", hash = "sha256:4dd059805b83ac6ec5a223699195c4e9eeb0446a4f7f2aeff1759a4a7cc17473"}, +] + [[package]] name = "types-flask-cors" -version = "4.0.0.20240828" +version = "5.0.0.20240902" description = "Typing stubs for Flask-Cors" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "types-Flask-Cors-4.0.0.20240828.tar.gz", hash = "sha256:f48ecf6366da923331311907cde3500e1435e07df01397ce0ef2306e263a5e85"}, - {file = "types_Flask_Cors-4.0.0.20240828-py3-none-any.whl", hash = "sha256:36b752e88d6517fb82973b4240fe9bde44d29485bbd92dfff762a7101bdac3a0"}, + {file = "types-Flask-Cors-5.0.0.20240902.tar.gz", hash = "sha256:8921b273bf7cd9636df136b66408efcfa6338a935e5c8f53f5eff1cee03f3394"}, + {file = "types_Flask_Cors-5.0.0.20240902-py3-none-any.whl", hash = "sha256:595e5f36056cd128ab905832e055f2e5d116fbdc685356eea4490bc77df82137"}, ] [package.dependencies] @@ -8917,6 +8950,34 @@ files = [ Flask = ">=2.0.0" Flask-SQLAlchemy = ">=3.0.1" +[[package]] +name = "types-gevent" +version = "24.11.0.20250401" +description = "Typing stubs for gevent" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_gevent-24.11.0.20250401-py3-none-any.whl", hash = "sha256:6764faf861ea99250c38179c58076392c44019ac3393029f71b06c4a15e8c1d1"}, + {file = "types_gevent-24.11.0.20250401.tar.gz", hash = "sha256:1443f796a442062698e67d818fca50aa88067dee4021d457a7c0c6bedd6f46ca"}, +] + +[package.dependencies] +types-greenlet = "*" +types-psutil = "*" + +[[package]] +name = "types-greenlet" +version = "3.1.0.20250401" +description = "Typing stubs for greenlet" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_greenlet-3.1.0.20250401-py3-none-any.whl", hash = "sha256:77987f3249b0f21415dc0254057e1ae4125a696a9bba28b0bcb67ee9e3dc14f6"}, + {file = "types_greenlet-3.1.0.20250401.tar.gz", hash = "sha256:949389b64c34ca9472f6335189e9fe0b2e9704436d4f0850e39e9b7145909082"}, +] + [[package]] name = "types-html5lib" version = "1.1.11.20241018" @@ -8929,6 +8990,54 @@ files = [ {file = "types_html5lib-1.1.11.20241018-py3-none-any.whl", hash = "sha256:3f1e064d9ed2c289001ae6392c84c93833abb0816165c6ff0abfc304a779f403"}, ] +[[package]] +name = "types-markdown" +version = "3.7.0.20250322" +description = "Typing stubs for Markdown" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_markdown-3.7.0.20250322-py3-none-any.whl", hash = "sha256:7e855503027b4290355a310fb834871940d9713da7c111f3e98a5e1cbc77acfb"}, + {file = "types_markdown-3.7.0.20250322.tar.gz", hash = "sha256:a48ed82dfcb6954592a10f104689d2d44df9125ce51b3cee20e0198a5216d55c"}, +] + +[[package]] +name = "types-oauthlib" +version = "3.2.0.20250403" +description = "Typing stubs for oauthlib" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_oauthlib-3.2.0.20250403-py3-none-any.whl", hash = "sha256:02466f91a01522adfa4aaf0d7e76274f00a102eed40034117c5ecae768a2571e"}, + {file = "types_oauthlib-3.2.0.20250403.tar.gz", hash = "sha256:40a4fcfb2e95235e399b5c0dd1cbe9d8c4b19415c09fb54c648d3397e02e0425"}, +] + +[[package]] +name = "types-objgraph" +version = "3.6.0.20240907" +description = "Typing stubs for objgraph" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-objgraph-3.6.0.20240907.tar.gz", hash = "sha256:2e3dee675843ae387889731550b0ddfed06e9420946cf78a4bca565b5fc53634"}, + {file = "types_objgraph-3.6.0.20240907-py3-none-any.whl", hash = "sha256:67207633a9b5789ee1911d740b269c3371081b79c0d8f68b00e7b8539f5c43f5"}, +] + +[[package]] +name = "types-olefile" +version = "0.47.0.20240806" +description = "Typing stubs for olefile" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-olefile-0.47.0.20240806.tar.gz", hash = "sha256:96490f208cbb449a52283855319d73688ba9167ae58858ef8c506bf7ca2c6b67"}, + {file = "types_olefile-0.47.0.20240806-py3-none-any.whl", hash = "sha256:c760a3deab7adb87a80d33b0e4edbbfbab865204a18d5121746022d7f8555118"}, +] + [[package]] name = "types-openpyxl" version = "3.1.5.20250306" @@ -8942,27 +9051,39 @@ files = [ ] [[package]] -name = "types-protobuf" -version = "4.25.0.20240417" -description = "Typing stubs for protobuf" +name = "types-pexpect" +version = "4.9.0.20241208" +description = "Typing stubs for pexpect" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "types-protobuf-4.25.0.20240417.tar.gz", hash = "sha256:c34eff17b9b3a0adb6830622f0f302484e4c089f533a46e3f147568313544352"}, - {file = "types_protobuf-4.25.0.20240417-py3-none-any.whl", hash = "sha256:e9b613227c2127e3d4881d75d93c93b4d6fd97b5f6a099a0b654a05351c8685d"}, + {file = "types_pexpect-4.9.0.20241208-py3-none-any.whl", hash = "sha256:1928f478528454f0fea3495c16cf1ee2e67fca5c9fe97d60b868ac48c1fd5633"}, + {file = "types_pexpect-4.9.0.20241208.tar.gz", hash = "sha256:bbca0d0819947a719989a5cfe83641d9212bef893e2f0a7a01e47926bc82401d"}, +] + +[[package]] +name = "types-protobuf" +version = "5.29.1.20250403" +description = "Typing stubs for protobuf" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_protobuf-5.29.1.20250403-py3-none-any.whl", hash = "sha256:c71de04106a2d54e5b2173d0a422058fae0ef2d058d70cf369fb797bf61ffa59"}, + {file = "types_protobuf-5.29.1.20250403.tar.gz", hash = "sha256:7ff44f15022119c9d7558ce16e78b2d485bf7040b4fadced4dd069bb5faf77a2"}, ] [[package]] name = "types-psutil" -version = "7.0.0.20250218" +version = "7.0.0.20250401" description = "Typing stubs for psutil" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_psutil-7.0.0.20250218-py3-none-any.whl", hash = "sha256:1447a30c282aafefcf8941ece854e1100eee7b0296a9d9be9977292f0269b121"}, - {file = "types_psutil-7.0.0.20250218.tar.gz", hash = "sha256:1e642cdafe837b240295b23b1cbd4691d80b08a07d29932143cbbae30eb0db9c"}, + {file = "types_psutil-7.0.0.20250401-py3-none-any.whl", hash = "sha256:ed23f7140368104afe4e05a6085a5fa56fbe8c880a0f4dfe8d63e041106071ed"}, + {file = "types_psutil-7.0.0.20250401.tar.gz", hash = "sha256:2a7d663c0888a079fc1643ebc109ad12e57a21c9552a9e2035da504191336dbf"}, ] [[package]] @@ -8977,6 +9098,33 @@ files = [ {file = "types_psycopg2-2.9.21.20250318.tar.gz", hash = "sha256:eb6eac5bfb16adfd5f16b818918b9e26a40ede147e0f2bbffdf53a6ef7025a87"}, ] +[[package]] +name = "types-pygments" +version = "2.19.0.20250305" +description = "Typing stubs for Pygments" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_pygments-2.19.0.20250305-py3-none-any.whl", hash = "sha256:ca88aae5ec426f9b107c0f7adc36dc096d2882d930a49f679eaf4b8b643db35d"}, + {file = "types_pygments-2.19.0.20250305.tar.gz", hash = "sha256:044c50e80ecd4128c00a7268f20355e16f5c55466d3d49dfda09be920af40b4b"}, +] + +[package.dependencies] +types-docutils = "*" + +[[package]] +name = "types-pymysql" +version = "1.1.0.20241103" +description = "Typing stubs for PyMySQL" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-PyMySQL-1.1.0.20241103.tar.gz", hash = "sha256:a7628542919a0ba87625fb79eefb2a2de45fb4ad32afe6e561e8f2f27fb58b8c"}, + {file = "types_PyMySQL-1.1.0.20241103-py3-none-any.whl", hash = "sha256:1a32efd8a74b5bf74c4de92a86c1cc6edaf3802dcfd5546635ab501eb5e3c096"}, +] + [[package]] name = "types-python-dateutil" version = "2.9.0.20241206" @@ -8995,78 +9143,162 @@ version = "2025.1.0.20250318" description = "Typing stubs for pytz" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "types_pytz-2025.1.0.20250318-py3-none-any.whl", hash = "sha256:04dba4907c5415777083f9548693c6d9f80ec53adcaff55a38526a3f8ddcae04"}, {file = "types_pytz-2025.1.0.20250318.tar.gz", hash = "sha256:97e0e35184c6fe14e3a5014512057f2c57bb0c6582d63c1cfcc4809f82180449"}, ] [[package]] -name = "types-pyyaml" -version = "6.0.12.20241230" -description = "Typing stubs for PyYAML" +name = "types-pywin32" +version = "310.0.0.20250319" +description = "Typing stubs for pywin32" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, - {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, + {file = "types_pywin32-310.0.0.20250319-py3-none-any.whl", hash = "sha256:baeb558a82251f7d430d135036b054740893902fdee3f9fe568322730ff49779"}, + {file = "types_pywin32-310.0.0.20250319.tar.gz", hash = "sha256:4d28fb85b3f268a92905a7242df48c530c847cfe4cdb112386101ab6407660d8"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250402" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_pyyaml-6.0.12.20250402-py3-none-any.whl", hash = "sha256:652348fa9e7a203d4b0d21066dfb00760d3cbd5a15ebb7cf8d33c88a49546681"}, + {file = "types_pyyaml-6.0.12.20250402.tar.gz", hash = "sha256:d7c13c3e6d335b6af4b0122a01ff1d270aba84ab96d1a1a1063ecba3e13ec075"}, ] [[package]] name = "types-regex" -version = "2024.11.6.20250318" +version = "2024.11.6.20250403" description = "Typing stubs for regex" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_regex-2024.11.6.20250318-py3-none-any.whl", hash = "sha256:9309fe5918ee7ffe859c04c18040697655fade366c4dc844bbebe86976a9980b"}, - {file = "types_regex-2024.11.6.20250318.tar.gz", hash = "sha256:6d472d0acf37b138cb32f67bd5ab1e7a200e94da8c1aa93ca3625a63e2efe1f3"}, + {file = "types_regex-2024.11.6.20250403-py3-none-any.whl", hash = "sha256:e22c0f67d73f4b4af6086a340f387b6f7d03bed8a0bb306224b75c51a29b0001"}, + {file = "types_regex-2024.11.6.20250403.tar.gz", hash = "sha256:3fdf2a70bbf830de4b3a28e9649a52d43dabb57cdb18fbfe2252eefb53666665"}, ] [[package]] name = "types-requests" -version = "2.31.0.20240406" +version = "2.32.0.20250328" description = "Typing stubs for requests" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, - {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, + {file = "types_requests-2.32.0.20250328-py3-none-any.whl", hash = "sha256:72ff80f84b15eb3aa7a8e2625fffb6a93f2ad5a0c20215fc1dcfa61117bcb2a2"}, + {file = "types_requests-2.32.0.20250328.tar.gz", hash = "sha256:c9e67228ea103bd811c96984fac36ed2ae8da87a36a633964a21f199d60baf32"}, ] [package.dependencies] urllib3 = ">=2" +[[package]] +name = "types-requests-oauthlib" +version = "2.0.0.20250306" +description = "Typing stubs for requests-oauthlib" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_requests_oauthlib-2.0.0.20250306-py3-none-any.whl", hash = "sha256:37707de81d9ce54894afcccd70d4a845dbe4c59e747908faaeba59a96453d993"}, + {file = "types_requests_oauthlib-2.0.0.20250306.tar.gz", hash = "sha256:92e5f1ed35689b1804fdcd60b7ac39b0bd440a4b96693685879bc835b334797f"}, +] + +[package.dependencies] +types-oauthlib = "*" +types-requests = "*" + +[[package]] +name = "types-shapely" +version = "2.0.0.20250404" +description = "Typing stubs for shapely" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_shapely-2.0.0.20250404-py3-none-any.whl", hash = "sha256:170fb92f5c168a120db39b3287697fdec5c93ef3e1ad15e52552c36b25318821"}, + {file = "types_shapely-2.0.0.20250404.tar.gz", hash = "sha256:863f540b47fa626c33ae64eae06df171f9ab0347025d4458d2df496537296b4f"}, +] + +[package.dependencies] +numpy = ">=1.20" + +[[package]] +name = "types-simplejson" +version = "3.20.0.20250326" +description = "Typing stubs for simplejson" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_simplejson-3.20.0.20250326-py3-none-any.whl", hash = "sha256:db1ddea7b8f7623b27a137578f22fc6c618db8c83ccfb1828ca0d2f0ec11efa7"}, + {file = "types_simplejson-3.20.0.20250326.tar.gz", hash = "sha256:b2689bc91e0e672d7a5a947b4cb546b76ae7ddc2899c6678e72a10bf96cd97d2"}, +] + [[package]] name = "types-six" -version = "1.17.0.20250304" +version = "1.17.0.20250403" description = "Typing stubs for six" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_six-1.17.0.20250304-py3-none-any.whl", hash = "sha256:e482df1d439375f4b7c1f2540b1b8584aea82850164a296203ead4a7024fe14f"}, - {file = "types_six-1.17.0.20250304.tar.gz", hash = "sha256:eeb240f9faec63ddd0498d6c0b6abd0496b154a66f960c004d4d733cf31bb4bd"}, + {file = "types_six-1.17.0.20250403-py3-none-any.whl", hash = "sha256:0bbb20fc34a18163afe7cac70b85864bd6937e6d73413c5b8f424def28760ae8"}, + {file = "types_six-1.17.0.20250403.tar.gz", hash = "sha256:82076f86e6e672a95adbf8b52625b1b3c72a8b9a893180344c1a02a6daabead6"}, ] +[[package]] +name = "types-tensorflow" +version = "2.18.0.20250404" +description = "Typing stubs for tensorflow" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_tensorflow-2.18.0.20250404-py3-none-any.whl", hash = "sha256:4ad86534e6cfd6b36b2c97239ef9d122c44b167b25630b7c873a1483f9befd15"}, + {file = "types_tensorflow-2.18.0.20250404.tar.gz", hash = "sha256:b38a427bbec805e4879d248f070baea802673c04cc5ccbe5979d742faa160670"}, +] + +[package.dependencies] +numpy = ">=1.20" +types-protobuf = "*" +types-requests = "*" + [[package]] name = "types-tqdm" -version = "4.67.0.20250301" +version = "4.67.0.20250404" description = "Typing stubs for tqdm" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_tqdm-4.67.0.20250301-py3-none-any.whl", hash = "sha256:8af97deb8e6874af833555dc1fe0fcd456b1a789470bf6cd8813d4e7ee4f6c5b"}, - {file = "types_tqdm-4.67.0.20250301.tar.gz", hash = "sha256:5e89a38ad89b867823368eb97d9f90d2fc69806bb055dde62716a05da62b5e0d"}, + {file = "types_tqdm-4.67.0.20250404-py3-none-any.whl", hash = "sha256:4a9b897eb4036f757240f4cb4a794f296265c04de46fdd058e453891f0186eed"}, + {file = "types_tqdm-4.67.0.20250404.tar.gz", hash = "sha256:e9997c655ffbba3ab78f4418b5511c05a54e76824d073d212166dc73aa56c768"}, ] [package.dependencies] types-requests = "*" +[[package]] +name = "types-ujson" +version = "5.10.0.20250326" +description = "Typing stubs for ujson" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_ujson-5.10.0.20250326-py3-none-any.whl", hash = "sha256:acc0913f569def62ef6a892c8a47703f65d05669a3252391a97765cf207dca5b"}, + {file = "types_ujson-5.10.0.20250326.tar.gz", hash = "sha256:5469e05f2c31ecb3c4c0267cc8fe41bcd116826fbb4ded69801a645c687dd014"}, +] + [[package]] name = "typing-extensions" version = "4.12.2" @@ -10148,4 +10380,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.13" -content-hash = "a094082d2fd4d8ea480ac800e54029bdb604de70a7b4348778bdcddb39b06c7e" +content-hash = "7bdb4c26ad249bacd8149e8931f4cdc25d9d0cb319329b1e939e1b4f2c7f40b1" diff --git a/api/pyproject.toml b/api/pyproject.toml index 0cea0293a6..3879352293 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "dify-api" requires-python = ">=3.11,<3.13" -dynamic = [ "dependencies" ] +dynamic = ["dependencies"] [build-system] requires = ["poetry-core>=2.0.0"] @@ -147,27 +147,47 @@ optional = true [tool.poetry.group.dev.dependencies] coverage = "~7.2.4" faker = "~32.1.0" -mypy = "~1.13.0" +lxml-stubs = "~0.5.1" +mypy = "~1.15.0" pytest = "~8.3.2" pytest-benchmark = "~4.0.0" pytest-env = "~1.1.3" pytest-mock = "~3.14.0" +types-aiofiles = "~24.1.0" types-beautifulsoup4 = "~4.12.0" +types-cachetools = "~5.5.0" +types-colorama = "~0.4.15" +types-defusedxml = "~0.7.0" types-deprecated = "~1.2.15" -types-flask-cors = "~4.0.0" +types-docutils = "~0.21.0" +types-flask-cors = "~5.0.0" types-flask-migrate = "~4.1.0" +types-gevent = "~24.11.0" +types-greenlet = "~3.1.0" types-html5lib = "~1.1.11" +types-markdown = "~3.7.0" +types-oauthlib = "~3.2.0" +types-objgraph = "~3.6.0" +types-olefile = "~0.47.0" types-openpyxl = "~3.1.5" -types-protobuf = "~4.25.0" +types-pexpect = "~4.9.0" +types-protobuf = "~5.29.1" types-psutil = "~7.0.0" types-psycopg2 = "~2.9.21" +types-pygments = "~2.19.0" +types-pymysql = "~1.1.0" types-python-dateutil = "~2.9.0" -types-pytz = "~2025.1" -types-pyyaml = "~6.0.2" +types-pywin32 = "~310.0.0" +types-pyyaml = "~6.0.12" types-regex = "~2024.11.6" -types-requests = "~2.31.0" +types-requests = "~2.32.0" +types-requests-oauthlib = "~2.0.0" +types-shapely = "~2.0.0" +types-simplejson = "~3.20.0" types-six = "~1.17.0" +types-tensorflow = "~2.18.0" types-tqdm = "~4.67.0" +types-ujson = "~5.10.0" ############################################################ # [ Lint ] dependency group diff --git a/dev/reformat b/dev/reformat index 82f96b8e8f..daab538951 100755 --- a/dev/reformat +++ b/dev/reformat @@ -16,3 +16,6 @@ poetry run -C api ruff format ./ # run dotenv-linter linter poetry run -P api dotenv-linter ./api/.env.example ./web/.env.example + +# run mypy check +dev/run-mypy diff --git a/dev/run-mypy b/dev/run-mypy new file mode 100755 index 0000000000..cdbbef515d --- /dev/null +++ b/dev/run-mypy @@ -0,0 +1,11 @@ +#!/bin/bash + +set -x + +if ! command -v mypy &> /dev/null; then + poetry install -C api --with dev +fi + +# run mypy checks +poetry run -C api \ + python -m mypy --install-types --non-interactive . From 9000f4ad050f967c5f4768c9f5cfad17998b498c Mon Sep 17 00:00:00 2001 From: quicksand Date: Wed, 9 Apr 2025 14:02:17 +0800 Subject: [PATCH 12/15] feat: add plugin daemon oss env config (#17663) --- docker/.env.example | 25 +++++++++++++++++++ docker/docker-compose-template.yaml | 17 +++++++++++++ docker/docker-compose.middleware.yaml | 17 +++++++++++++ docker/docker-compose.yaml | 35 +++++++++++++++++++++++++++ docker/middleware.env.example | 27 ++++++++++++++++++++- 5 files changed, 120 insertions(+), 1 deletion(-) diff --git a/docker/.env.example b/docker/.env.example index 4ab55a9623..29d33360ea 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -1005,3 +1005,28 @@ PLUGIN_PYTHON_ENV_INIT_TIMEOUT=120 PLUGIN_MAX_EXECUTION_TIMEOUT=600 # PIP_MIRROR_URL=https://pypi.tuna.tsinghua.edu.cn/simple PIP_MIRROR_URL= + +# https://github.com/langgenius/dify-plugin-daemon/blob/main/.env.example +# Plugin storage type, local aws_s3 tencent_cos azure_blob +PLUGIN_STORAGE_TYPE=local +PLUGIN_STORAGE_LOCAL_ROOT=/app/storage +PLUGIN_WORKING_PATH=/app/storage/cwd +PLUGIN_INSTALLED_PATH=plugin +PLUGIN_PACKAGE_CACHE_PATH=plugin_packages +PLUGIN_MEDIA_CACHE_PATH=assets +# Plugin oss bucket +PLUGIN_STORAGE_OSS_BUCKET= +# Plugin oss s3 credentials +PLUGIN_S3_USE_AWS_MANAGED_IAM= +PLUGIN_S3_ENDPOINT= +PLUGIN_S3_USE_PATH_STYLE= +PLUGIN_AWS_ACCESS_KEY= +PLUGIN_AWS_SECRET_KEY= +PLUGIN_AWS_REGION= +# Plugin oss azure blob +PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME= +PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING= +# Plugin oss tencent cos +PLUGIN_TENCENT_COS_SECRET_KEY= +PLUGIN_TENCENT_COS_SECRET_ID= +PLUGIN_TENCENT_COS_REGION= diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index e8ed382917..76f70c53b8 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -153,6 +153,23 @@ services: PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} PIP_MIRROR_URL: ${PIP_MIRROR_URL:-} + PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local} + PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage} + PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin} + PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages} + PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets} + PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-} + S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-} + S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-} + S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-} + AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-} + PAWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-} + AWS_REGION: ${PLUGIN_AWS_REGION:-} + AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-} + AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-} + TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} + TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} + TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index 230b8a05be..0035183fca 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -97,6 +97,23 @@ services: PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} PIP_MIRROR_URL: ${PIP_MIRROR_URL:-} + PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local} + PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage} + PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin} + PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages} + PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets} + PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-} + S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-} + S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-} + S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-} + AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-} + PAWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-} + AWS_REGION: ${PLUGIN_AWS_REGION:-} + AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-} + AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-} + TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} + TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} + TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} ports: - "${EXPOSE_PLUGIN_DAEMON_PORT:-5002}:${PLUGIN_DAEMON_PORT:-5002}" - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 6a3e744cfd..3d84af07f4 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -436,6 +436,24 @@ x-shared-env: &shared-api-worker-env PLUGIN_PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} PIP_MIRROR_URL: ${PIP_MIRROR_URL:-} + PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local} + PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage} + PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd} + PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin} + PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages} + PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets} + PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-} + PLUGIN_S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-} + PLUGIN_S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-} + PLUGIN_S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-} + PLUGIN_AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-} + PLUGIN_AWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-} + PLUGIN_AWS_REGION: ${PLUGIN_AWS_REGION:-} + PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-} + PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-} + PLUGIN_TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} + PLUGIN_TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} + PLUGIN_TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} services: # API service @@ -591,6 +609,23 @@ services: PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} PIP_MIRROR_URL: ${PIP_MIRROR_URL:-} + PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local} + PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage} + PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin} + PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages} + PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets} + PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-} + S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-} + S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-} + S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-} + AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-} + PAWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-} + AWS_REGION: ${PLUGIN_AWS_REGION:-} + AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-} + AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-} + TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} + TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} + TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: diff --git a/docker/middleware.env.example b/docker/middleware.env.example index d01f9abe53..eb38526d57 100644 --- a/docker/middleware.env.example +++ b/docker/middleware.env.example @@ -119,4 +119,29 @@ FORCE_VERIFYING_SIGNATURE=true PLUGIN_PYTHON_ENV_INIT_TIMEOUT=120 PLUGIN_MAX_EXECUTION_TIMEOUT=600 # PIP_MIRROR_URL=https://pypi.tuna.tsinghua.edu.cn/simple -PIP_MIRROR_URL= \ No newline at end of file +PIP_MIRROR_URL= + +# https://github.com/langgenius/dify-plugin-daemon/blob/main/.env.example +# Plugin storage type, local aws_s3 tencent_cos azure_blob +PLUGIN_STORAGE_TYPE=local +PLUGIN_STORAGE_LOCAL_ROOT=/app/storage +PLUGIN_WORKING_PATH=/app/storage/cwd +PLUGIN_INSTALLED_PATH=plugin +PLUGIN_PACKAGE_CACHE_PATH=plugin_packages +PLUGIN_MEDIA_CACHE_PATH=assets +# Plugin oss bucket +PLUGIN_STORAGE_OSS_BUCKET= +# Plugin oss s3 credentials +PLUGIN_S3_USE_AWS_MANAGED_IAM= +PLUGIN_S3_ENDPOINT= +PLUGIN_S3_USE_PATH_STYLE= +PLUGIN_AWS_ACCESS_KEY= +PLUGIN_AWS_SECRET_KEY= +PLUGIN_AWS_REGION= +# Plugin oss azure blob +PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME= +PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING= +# Plugin oss tencent cos +PLUGIN_TENCENT_COS_SECRET_KEY= +PLUGIN_TENCENT_COS_SECRET_ID= +PLUGIN_TENCENT_COS_REGION= \ No newline at end of file From eb8584613beda9621051936e391d31b63ff56efd Mon Sep 17 00:00:00 2001 From: crazywoola <100913391+crazywoola@users.noreply.github.com> Date: Wed, 9 Apr 2025 14:07:32 +0800 Subject: [PATCH 13/15] fix: Account.query => db.session.query(Account) (#17667) --- api/controllers/service_api/wraps.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index ff33b62eda..7facb03358 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -69,7 +69,7 @@ def validate_app_token(view: Optional[Callable] = None, *, fetch_user_arg: Optio ) # TODO: only owner information is required, so only one is returned. if tenant_account_join: tenant, ta = tenant_account_join - account = Account.query.filter_by(id=ta.account_id).first() + account = db.session.query(Account).filter(Account.id == ta.account_id).first() # Login admin if account: account.current_tenant = tenant From df03c89a48f6da90f28287eabd75276adc9a4596 Mon Sep 17 00:00:00 2001 From: KVOJJJin Date: Wed, 9 Apr 2025 15:10:08 +0800 Subject: [PATCH 14/15] Chore: remove beta tag of app type (#17676) --- web/app/components/app/create-app-modal/index.tsx | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/web/app/components/app/create-app-modal/index.tsx b/web/app/components/app/create-app-modal/index.tsx index c442b6e979..6abc871868 100644 --- a/web/app/components/app/create-app-modal/index.tsx +++ b/web/app/components/app/create-app-modal/index.tsx @@ -148,7 +148,6 @@ function CreateApp({ onClose, onSuccess, onCreateFromTemplate }: CreateAppProps)
void } -function AppTypeCard({ icon, title, beta = false, description, active, onClick }: AppTypeCardProps) { +function AppTypeCard({ icon, title, description, active, onClick }: AppTypeCardProps) { const { t } = useTranslation() return
- {beta &&
{t('common.menus.status')}
} {icon}
{title}
{description}
From c9f18aae0fca88ec7c440e60b6153333a64d14bb Mon Sep 17 00:00:00 2001 From: Joel Date: Wed, 9 Apr 2025 15:39:12 +0800 Subject: [PATCH 15/15] chore: find code with high complexity (#17679) --- web/.vscode/extensions.json | 5 +++-- web/eslint.config.mjs | 3 +-- web/package.json | 1 + 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/web/.vscode/extensions.json b/web/.vscode/extensions.json index d7680d74a5..a9afbcc640 100644 --- a/web/.vscode/extensions.json +++ b/web/.vscode/extensions.json @@ -1,6 +1,7 @@ { "recommendations": [ "bradlc.vscode-tailwindcss", - "firsttris.vscode-jest-runner" + "firsttris.vscode-jest-runner", + "kisstkondoros.vscode-codemetrics" ] -} +} \ No newline at end of file diff --git a/web/eslint.config.mjs b/web/eslint.config.mjs index 204efc4715..750cee5545 100644 --- a/web/eslint.config.mjs +++ b/web/eslint.config.mjs @@ -65,8 +65,6 @@ export default combine( // use `ESLINT_CONFIG_INSPECTOR=true pnpx @eslint/config-inspector` to check the config // ...process.env.ESLINT_CONFIG_INSPECTOR // ? [] - // TODO: remove this when upgrade to nextjs 15 - // : fixupConfigRules(compat.extends('next')), { rules: { // performance issue, and not used. @@ -87,6 +85,7 @@ export default combine( { // orignal config rules: { + 'complexity': ['warn', { max: 10 }], // orignal ts/no-var-requires 'ts/no-require-imports': 'off', 'no-console': 'off', diff --git a/web/package.json b/web/package.json index 74eead4eba..f439c03767 100644 --- a/web/package.json +++ b/web/package.json @@ -13,6 +13,7 @@ "fix": "next lint --fix", "eslint-fix": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix", "eslint-fix-only-show-error": "eslint --cache --cache-location node_modules/.cache/eslint/.eslint-cache --fix --quiet", + "eslint-complexity": "eslint --rule 'complexity: [error, {max: 15}]' --quiet", "prepare": "cd ../ && node -e \"if (process.env.NODE_ENV !== 'production'){process.exit(1)} \" || husky ./web/.husky", "gen-icons": "node ./app/components/base/icons/script.mjs", "uglify-embed": "node ./bin/uglify-embed",