diff --git a/.devcontainer/post_create_command.sh b/.devcontainer/post_create_command.sh index 5e76bdc2a3..c53c26bb9a 100755 --- a/.devcontainer/post_create_command.sh +++ b/.devcontainer/post_create_command.sh @@ -1,6 +1,6 @@ #!/bin/bash -npm add -g pnpm@9.12.2 +npm add -g pnpm@10.8.0 cd web && pnpm install pipx install poetry diff --git a/.github/workflows/api-tests.yml b/.github/workflows/api-tests.yml index b9547b6452..dca8e640c7 100644 --- a/.github/workflows/api-tests.yml +++ b/.github/workflows/api-tests.yml @@ -53,9 +53,14 @@ jobs: - name: Run dify config tests run: poetry run -P api python dev/pytest/pytest_config_tests.py + - name: Cache MyPy + uses: actions/cache@v4 + with: + path: api/.mypy_cache + key: mypy-${{ matrix.python-version }}-${{ runner.os }}-${{ hashFiles('api/poetry.lock') }} + - name: Run mypy - run: | - poetry run -C api python -m mypy --install-types --non-interactive . + run: dev/run-mypy - name: Set up dotenvs run: | diff --git a/.github/workflows/build-push.yml b/.github/workflows/build-push.yml index 851621ee49..cc735ae67c 100644 --- a/.github/workflows/build-push.yml +++ b/.github/workflows/build-push.yml @@ -6,7 +6,6 @@ on: - "main" - "deploy/dev" - "deploy/enterprise" - - release/1.1.3-fix1 tags: - "*" diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index d73a782c93..625930b5f5 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -82,7 +82,7 @@ jobs: uses: actions/setup-node@v4 if: steps.changed-files.outputs.any_changed == 'true' with: - node-version: 20 + node-version: 22 cache: pnpm cache-dependency-path: ./web/package.json @@ -153,6 +153,7 @@ jobs: env: BASH_SEVERITY: warning DEFAULT_BRANCH: main + FILTER_REGEX_INCLUDE: pnpm-lock.yaml GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} IGNORE_GENERATED_FILES: true IGNORE_GITIGNORED_FILES: true diff --git a/.github/workflows/tool-test-sdks.yaml b/.github/workflows/tool-test-sdks.yaml index 93edb2737a..a6e48d1359 100644 --- a/.github/workflows/tool-test-sdks.yaml +++ b/.github/workflows/tool-test-sdks.yaml @@ -18,7 +18,7 @@ jobs: strategy: matrix: - node-version: [16, 18, 20] + node-version: [16, 18, 20, 22] defaults: run: diff --git a/.github/workflows/translate-i18n-base-on-english.yml b/.github/workflows/translate-i18n-base-on-english.yml index 80b78a1311..3f8082eb69 100644 --- a/.github/workflows/translate-i18n-base-on-english.yml +++ b/.github/workflows/translate-i18n-base-on-english.yml @@ -33,7 +33,7 @@ jobs: - name: Set up Node.js if: env.FILES_CHANGED == 'true' - uses: actions/setup-node@v2 + uses: actions/setup-node@v4 with: node-version: 'lts/*' diff --git a/.github/workflows/web-tests.yml b/.github/workflows/web-tests.yml index acee26af2f..85e8b99473 100644 --- a/.github/workflows/web-tests.yml +++ b/.github/workflows/web-tests.yml @@ -31,7 +31,9 @@ jobs: uses: tj-actions/changed-files@v45 with: files: web/** + - name: Install pnpm + if: steps.changed-files.outputs.any_changed == 'true' uses: pnpm/action-setup@v4 with: version: 10 @@ -41,7 +43,7 @@ jobs: uses: actions/setup-node@v4 if: steps.changed-files.outputs.any_changed == 'true' with: - node-version: 20 + node-version: 22 cache: pnpm cache-dependency-path: ./web/package.json diff --git a/api/.env.example b/api/.env.example index e4961cd040..c969db4bfa 100644 --- a/api/.env.example +++ b/api/.env.example @@ -327,6 +327,7 @@ MULTIMODAL_SEND_FORMAT=base64 PROMPT_GENERATION_MAX_TOKENS=512 CODE_GENERATION_MAX_TOKENS=1024 STRUCTURED_OUTPUT_MAX_TOKENS=1024 +PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false # Mail configuration, support: resend, smtp MAIL_TYPE= diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py index fa8e8c2bf6..d35a74e3ee 100644 --- a/api/configs/feature/__init__.py +++ b/api/configs/feature/__init__.py @@ -442,7 +442,7 @@ class LoggingConfig(BaseSettings): class ModelLoadBalanceConfig(BaseSettings): """ - Configuration for model load balancing + Configuration for model load balancing and token counting """ MODEL_LB_ENABLED: bool = Field( @@ -450,6 +450,11 @@ class ModelLoadBalanceConfig(BaseSettings): default=False, ) + PLUGIN_BASED_TOKEN_COUNTING_ENABLED: bool = Field( + description="Enable or disable plugin based token counting. If disabled, token counting will return 0.", + default=False, + ) + class BillingConfig(BaseSettings): """ diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py index 0ef5a724b3..c7aedc5b8a 100644 --- a/api/configs/packaging/__init__.py +++ b/api/configs/packaging/__init__.py @@ -9,7 +9,7 @@ class PackagingInfo(BaseSettings): CURRENT_VERSION: str = Field( description="Dify version", - default="1.1.3", + default="1.2.0", ) COMMIT_SHA: str = Field( diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index 3700f007f1..302bc30905 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -49,6 +49,23 @@ class PluginListApi(Resource): return jsonable_encoder({"plugins": plugins}) +class PluginListLatestVersionsApi(Resource): + @setup_required + @login_required + @account_initialization_required + def post(self): + req = reqparse.RequestParser() + req.add_argument("plugin_ids", type=list, required=True, location="json") + args = req.parse_args() + + try: + versions = PluginService.list_latest_versions(args["plugin_ids"]) + except PluginDaemonClientSideError as e: + raise ValueError(e) + + return jsonable_encoder({"versions": versions}) + + class PluginListInstallationsFromIdsApi(Resource): @setup_required @login_required @@ -453,6 +470,7 @@ class PluginFetchPermissionApi(Resource): api.add_resource(PluginDebuggingKeyApi, "/workspaces/current/plugin/debugging-key") api.add_resource(PluginListApi, "/workspaces/current/plugin/list") +api.add_resource(PluginListLatestVersionsApi, "/workspaces/current/plugin/list/latest-versions") api.add_resource(PluginListInstallationsFromIdsApi, "/workspaces/current/plugin/list/installations/ids") api.add_resource(PluginIconApi, "/workspaces/current/plugin/icon") api.add_resource(PluginUploadFromPkgApi, "/workspaces/current/plugin/upload/pkg") diff --git a/api/controllers/service_api/app/message.py b/api/controllers/service_api/app/message.py index 38917bf345..95e538f4c7 100644 --- a/api/controllers/service_api/app/message.py +++ b/api/controllers/service_api/app/message.py @@ -20,14 +20,6 @@ from services.message_service import MessageService class MessageListApi(Resource): - def get_retriever_resources(self): - try: - if self.message_metadata: - return json.loads(self.message_metadata).get("retriever_resources", []) - return [] - except (json.JSONDecodeError, TypeError): - return [] - message_fields = { "id": fields.String, "conversation_id": fields.String, @@ -37,7 +29,11 @@ class MessageListApi(Resource): "answer": fields.String(attribute="re_sign_file_url_answer"), "message_files": fields.List(fields.Nested(message_file_fields)), "feedback": fields.Nested(feedback_fields, attribute="user_feedback", allow_null=True), - "retriever_resources": get_retriever_resources, + "retriever_resources": fields.Raw( + attribute=lambda obj: json.loads(obj.message_metadata).get("retriever_resources", []) + if obj.message_metadata + else [] + ), "created_at": TimestampField, "agent_thoughts": fields.List(fields.Nested(agent_thought_fields)), "status": fields.String, diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index ff33b62eda..7facb03358 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -69,7 +69,7 @@ def validate_app_token(view: Optional[Callable] = None, *, fetch_user_arg: Optio ) # TODO: only owner information is required, so only one is returned. if tenant_account_join: tenant, ta = tenant_account_join - account = Account.query.filter_by(id=ta.account_id).first() + account = db.session.query(Account).filter(Account.id == ta.account_id).first() # Login admin if account: account.current_tenant = tenant diff --git a/api/core/app/apps/agent_chat/app_runner.py b/api/core/app/apps/agent_chat/app_runner.py index 72a1717112..71328f6d1b 100644 --- a/api/core/app/apps/agent_chat/app_runner.py +++ b/api/core/app/apps/agent_chat/app_runner.py @@ -53,20 +53,6 @@ class AgentChatAppRunner(AppRunner): query = application_generate_entity.query files = application_generate_entity.files - # Pre-calculate the number of tokens of the prompt messages, - # and return the rest number of tokens by model context token size limit and max token size limit. - # If the rest number of tokens is not enough, raise exception. - # Include: prompt template, inputs, query(optional), files(optional) - # Not Include: memory, external data, dataset context - self.get_pre_calculate_rest_tokens( - app_record=app_record, - model_config=application_generate_entity.model_conf, - prompt_template_entity=app_config.prompt_template, - inputs=dict(inputs), - files=list(files), - query=query, - ) - memory = None if application_generate_entity.conversation_id: # get memory of conversation (read-only) diff --git a/api/core/app/apps/chat/app_runner.py b/api/core/app/apps/chat/app_runner.py index 8641f188f7..39597fc036 100644 --- a/api/core/app/apps/chat/app_runner.py +++ b/api/core/app/apps/chat/app_runner.py @@ -61,20 +61,6 @@ class ChatAppRunner(AppRunner): ) image_detail_config = image_detail_config or ImagePromptMessageContent.DETAIL.LOW - # Pre-calculate the number of tokens of the prompt messages, - # and return the rest number of tokens by model context token size limit and max token size limit. - # If the rest number of tokens is not enough, raise exception. - # Include: prompt template, inputs, query(optional), files(optional) - # Not Include: memory, external data, dataset context - self.get_pre_calculate_rest_tokens( - app_record=app_record, - model_config=application_generate_entity.model_conf, - prompt_template_entity=app_config.prompt_template, - inputs=inputs, - files=files, - query=query, - ) - memory = None if application_generate_entity.conversation_id: # get memory of conversation (read-only) diff --git a/api/core/app/apps/completion/app_runner.py b/api/core/app/apps/completion/app_runner.py index 4f16247318..80fdd0b80e 100644 --- a/api/core/app/apps/completion/app_runner.py +++ b/api/core/app/apps/completion/app_runner.py @@ -54,20 +54,6 @@ class CompletionAppRunner(AppRunner): ) image_detail_config = image_detail_config or ImagePromptMessageContent.DETAIL.LOW - # Pre-calculate the number of tokens of the prompt messages, - # and return the rest number of tokens by model context token size limit and max token size limit. - # If the rest number of tokens is not enough, raise exception. - # Include: prompt template, inputs, query(optional), files(optional) - # Not Include: memory, external data, dataset context - self.get_pre_calculate_rest_tokens( - app_record=app_record, - model_config=application_generate_entity.model_conf, - prompt_template_entity=app_config.prompt_template, - inputs=inputs, - files=files, - query=query, - ) - # organize all inputs and template to prompt messages # Include: prompt template, inputs, query(optional), files(optional) prompt_messages, stop = self.organize_prompt_messages( diff --git a/api/core/helper/ssrf_proxy.py b/api/core/helper/ssrf_proxy.py index 6367e45638..969cd112ee 100644 --- a/api/core/helper/ssrf_proxy.py +++ b/api/core/helper/ssrf_proxy.py @@ -56,8 +56,12 @@ def make_request(method, url, max_retries=SSRF_DEFAULT_MAX_RETRIES, **kwargs): response = client.request(method=method, url=url, **kwargs) elif dify_config.SSRF_PROXY_HTTP_URL and dify_config.SSRF_PROXY_HTTPS_URL: proxy_mounts = { - "http://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTP_URL), - "https://": httpx.HTTPTransport(proxy=dify_config.SSRF_PROXY_HTTPS_URL), + "http://": httpx.HTTPTransport( + proxy=dify_config.SSRF_PROXY_HTTP_URL, verify=HTTP_REQUEST_NODE_SSL_VERIFY + ), + "https://": httpx.HTTPTransport( + proxy=dify_config.SSRF_PROXY_HTTPS_URL, verify=HTTP_REQUEST_NODE_SSL_VERIFY + ), } with httpx.Client(mounts=proxy_mounts, verify=HTTP_REQUEST_NODE_SSL_VERIFY) as client: response = client.request(method=method, url=url, **kwargs) diff --git a/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md b/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md index f050919d81..b5a714a172 100644 --- a/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md +++ b/api/core/model_runtime/docs/en_US/customizable_model_scale_out.md @@ -192,7 +192,7 @@ def get_num_tokens(self, model: str, credentials: dict, prompt_messages: list[Pr ``` -Sometimes, you might not want to return 0 directly. In such cases, you can use `self._get_num_tokens_by_gpt2(text: str)` to get pre-computed tokens. This method is provided by the `AIModel` base class, and it uses GPT2's Tokenizer for calculation. However, it should be noted that this is only a substitute and may not be fully accurate. +Sometimes, you might not want to return 0 directly. In such cases, you can use `self._get_num_tokens_by_gpt2(text: str)` to get pre-computed tokens and ensure environment variable `PLUGIN_BASED_TOKEN_COUNTING_ENABLED` is set to `true`, This method is provided by the `AIModel` base class, and it uses GPT2's Tokenizer for calculation. However, it should be noted that this is only a substitute and may not be fully accurate. - Model Credentials Validation diff --git a/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md b/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md index 240f65802b..c36575b9af 100644 --- a/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md +++ b/api/core/model_runtime/docs/zh_Hans/customizable_model_scale_out.md @@ -179,7 +179,7 @@ provider_credential_schema: """ ``` - 有时候,也许你不需要直接返回0,所以你可以使用`self._get_num_tokens_by_gpt2(text: str)`来获取预计算的tokens,这个方法位于`AIModel`基类中,它会使用GPT2的Tokenizer进行计算,但是只能作为替代方法,并不完全准确。 + 有时候,也许你不需要直接返回0,所以你可以使用`self._get_num_tokens_by_gpt2(text: str)`来获取预计算的tokens,并确保环境变量`PLUGIN_BASED_TOKEN_COUNTING_ENABLED`设置为`true`,这个方法位于`AIModel`基类中,它会使用GPT2的Tokenizer进行计算,但是只能作为替代方法,并不完全准确。 - 模型凭据校验 diff --git a/api/core/model_runtime/model_providers/__base/large_language_model.py b/api/core/model_runtime/model_providers/__base/large_language_model.py index ed67fef768..b81ccafc1e 100644 --- a/api/core/model_runtime/model_providers/__base/large_language_model.py +++ b/api/core/model_runtime/model_providers/__base/large_language_model.py @@ -295,18 +295,20 @@ class LargeLanguageModel(AIModel): :param tools: tools for tool calling :return: """ - plugin_model_manager = PluginModelManager() - return plugin_model_manager.get_llm_num_tokens( - tenant_id=self.tenant_id, - user_id="unknown", - plugin_id=self.plugin_id, - provider=self.provider_name, - model_type=self.model_type.value, - model=model, - credentials=credentials, - prompt_messages=prompt_messages, - tools=tools, - ) + if dify_config.PLUGIN_BASED_TOKEN_COUNTING_ENABLED: + plugin_model_manager = PluginModelManager() + return plugin_model_manager.get_llm_num_tokens( + tenant_id=self.tenant_id, + user_id="unknown", + plugin_id=self.plugin_id, + provider=self.provider_name, + model_type=self.model_type.value, + model=model, + credentials=credentials, + prompt_messages=prompt_messages, + tools=tools, + ) + return 0 def _calc_response_usage( self, model: str, credentials: dict, prompt_tokens: int, completion_tokens: int diff --git a/api/core/plugin/entities/plugin.py b/api/core/plugin/entities/plugin.py index 61f8a65918..421c16093d 100644 --- a/api/core/plugin/entities/plugin.py +++ b/api/core/plugin/entities/plugin.py @@ -120,8 +120,6 @@ class PluginEntity(PluginInstallation): name: str installation_id: str version: str - latest_version: Optional[str] = None - latest_unique_identifier: Optional[str] = None @model_validator(mode="after") def set_plugin_id(self): diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index fea4d0edf7..46a5330bdb 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -46,7 +46,7 @@ class RetrievalService: if not query: return [] dataset = cls._get_dataset(dataset_id) - if not dataset or dataset.available_document_count == 0 or dataset.available_segment_count == 0: + if not dataset: return [] all_documents: list[Document] = [] diff --git a/api/core/rag/datasource/vdb/tencent/tencent_vector.py b/api/core/rag/datasource/vdb/tencent/tencent_vector.py index 540d71bb88..e266659075 100644 --- a/api/core/rag/datasource/vdb/tencent/tencent_vector.py +++ b/api/core/rag/datasource/vdb/tencent/tencent_vector.py @@ -51,6 +51,7 @@ class TencentVector(BaseVector): self._client = RPCVectorDBClient(**self._client_config.to_tencent_params()) self._enable_hybrid_search = False self._dimension = 1024 + self._init_database() self._load_collection() self._bm25 = BM25Encoder.default("zh") @@ -279,7 +280,10 @@ class TencentVector(BaseVector): return docs def delete(self) -> None: - self._client.drop_collection(database_name=self._client_config.database, collection_name=self.collection_name) + if self._has_collection(): + self._client.drop_collection( + database_name=self._client_config.database, collection_name=self.collection_name + ) class TencentVectorFactory(AbstractVectorFactory): diff --git a/api/core/rag/extractor/word_extractor.py b/api/core/rag/extractor/word_extractor.py index 0a6ffaa1dd..70c618a631 100644 --- a/api/core/rag/extractor/word_extractor.py +++ b/api/core/rag/extractor/word_extractor.py @@ -85,7 +85,7 @@ class WordExtractor(BaseExtractor): if "image" in rel.target_ref: image_count += 1 if rel.is_external: - url = rel.reltype + url = rel.target_ref response = ssrf_proxy.get(url) if response.status_code == 200: image_ext = mimetypes.guess_extension(response.headers["Content-Type"]) diff --git a/api/core/tools/tool_file_manager.py b/api/core/tools/tool_file_manager.py index 1573e0c219..7e8d4280d4 100644 --- a/api/core/tools/tool_file_manager.py +++ b/api/core/tools/tool_file_manager.py @@ -108,7 +108,11 @@ class ToolFileManager: except httpx.TimeoutException: raise ValueError(f"timeout when downloading file from {file_url}") - mimetype = guess_type(file_url)[0] or "application/octet-stream" + mimetype = ( + guess_type(file_url)[0] + or response.headers.get("Content-Type", "").split(";")[0].strip() + or "application/octet-stream" + ) extension = guess_extension(mimetype) or ".bin" unique_name = uuid4().hex filename = f"{unique_name}{extension}" diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index d0f3041d5d..36273d8ec1 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -641,6 +641,8 @@ class GraphEngine: try: # run node retry_start_at = datetime.now(UTC).replace(tzinfo=None) + # yield control to other threads + time.sleep(0.001) generator = node_instance.run() for item in generator: if isinstance(item, GraphEngineEvent): diff --git a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py index 7b1b8cf483..be43639fc0 100644 --- a/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py +++ b/api/core/workflow/nodes/parameter_extractor/parameter_extractor_node.py @@ -1,4 +1,5 @@ import json +import logging import uuid from collections.abc import Mapping, Sequence from typing import Any, Optional, cast @@ -58,6 +59,30 @@ from .prompts import ( FUNCTION_CALLING_EXTRACTOR_USER_TEMPLATE, ) +logger = logging.getLogger(__name__) + + +def extract_json(text): + """ + From a given JSON started from '{' or '[' extract the complete JSON object. + """ + stack = [] + for i, c in enumerate(text): + if c in {"{", "["}: + stack.append(c) + elif c in {"}", "]"}: + # check if stack is empty + if not stack: + return text[:i] + # check if the last element in stack is matching + if (c == "}" and stack[-1] == "{") or (c == "]" and stack[-1] == "["): + stack.pop() + if not stack: + return text[: i + 1] + else: + return text[:i] + return None + class ParameterExtractorNode(LLMNode): """ @@ -594,27 +619,6 @@ class ParameterExtractorNode(LLMNode): Extract complete json response. """ - def extract_json(text): - """ - From a given JSON started from '{' or '[' extract the complete JSON object. - """ - stack = [] - for i, c in enumerate(text): - if c in {"{", "["}: - stack.append(c) - elif c in {"}", "]"}: - # check if stack is empty - if not stack: - return text[:i] - # check if the last element in stack is matching - if (c == "}" and stack[-1] == "{") or (c == "]" and stack[-1] == "["): - stack.pop() - if not stack: - return text[: i + 1] - else: - return text[:i] - return None - # extract json from the text for idx in range(len(result)): if result[idx] == "{" or result[idx] == "[": @@ -624,6 +628,7 @@ class ParameterExtractorNode(LLMNode): return cast(dict, json.loads(json_str)) except Exception: pass + logger.info(f"extra error: {result}") return None def _extract_json_from_tool_call(self, tool_call: AssistantPromptMessage.ToolCall) -> Optional[dict]: @@ -633,7 +638,18 @@ class ParameterExtractorNode(LLMNode): if not tool_call or not tool_call.function.arguments: return None - return cast(dict, json.loads(tool_call.function.arguments)) + result = tool_call.function.arguments + # extract json from the arguments + for idx in range(len(result)): + if result[idx] == "{" or result[idx] == "[": + json_str = extract_json(result[idx:]) + if json_str: + try: + return cast(dict, json.loads(json_str)) + except Exception: + pass + logger.info(f"extra error: {result}") + return None def _generate_default_result(self, data: ParameterExtractorNodeData) -> dict: """ diff --git a/api/core/workflow/nodes/question_classifier/template_prompts.py b/api/core/workflow/nodes/question_classifier/template_prompts.py index 53fc136b2c..70178ed934 100644 --- a/api/core/workflow/nodes/question_classifier/template_prompts.py +++ b/api/core/workflow/nodes/question_classifier/template_prompts.py @@ -1,21 +1,21 @@ QUESTION_CLASSIFIER_SYSTEM_PROMPT = """ - ### Job Description', - You are a text classification engine that analyzes text data and assigns categories based on user input or automatically determined categories. - ### Task - Your task is to assign one categories ONLY to the input text and only one category may be assigned returned in the output. Additionally, you need to extract the key words from the text that are related to the classification. - ### Format - The input text is in the variable input_text. Categories are specified as a category list with two filed category_id and category_name in the variable categories. Classification instructions may be included to improve the classification accuracy. - ### Constraint - DO NOT include anything other than the JSON array in your response. - ### Memory - Here are the chat histories between human and assistant, inside XML tags. - - {histories} - +### Job Description', +You are a text classification engine that analyzes text data and assigns categories based on user input or automatically determined categories. +### Task +Your task is to assign one categories ONLY to the input text and only one category may be assigned returned in the output. Additionally, you need to extract the key words from the text that are related to the classification. +### Format +The input text is in the variable input_text. Categories are specified as a category list with two filed category_id and category_name in the variable categories. Classification instructions may be included to improve the classification accuracy. +### Constraint +DO NOT include anything other than the JSON array in your response. +### Memory +Here are the chat histories between human and assistant, inside XML tags. + +{histories} + """ # noqa: E501 QUESTION_CLASSIFIER_USER_PROMPT_1 = """ - { "input_text": ["I recently had a great experience with your company. The service was prompt and the staff was very friendly."], + {"input_text": ["I recently had a great experience with your company. The service was prompt and the staff was very friendly."], "categories": [{"category_id":"f5660049-284f-41a7-b301-fd24176a711c","category_name":"Customer Service"},{"category_id":"8d007d06-f2c9-4be5-8ff6-cd4381c13c60","category_name":"Satisfaction"},{"category_id":"5fbbbb18-9843-466d-9b8e-b9bfbb9482c8","category_name":"Sales"},{"category_id":"23623c75-7184-4a2e-8226-466c2e4631e4","category_name":"Product"}], "classification_instructions": ["classify the text based on the feedback provided by customer"]} """ # noqa: E501 @@ -43,9 +43,9 @@ QUESTION_CLASSIFIER_ASSISTANT_PROMPT_2 = """ """ QUESTION_CLASSIFIER_USER_PROMPT_3 = """ - '{{"input_text": ["{input_text}"],', - '"categories": {categories}, ', - '"classification_instructions": ["{classification_instructions}"]}}' + {{"input_text": ["{input_text}"], + "categories": {categories}, + "classification_instructions": ["{classification_instructions}"]}} """ QUESTION_CLASSIFIER_COMPLETION_PROMPT = """ diff --git a/api/migrations/versions/2025_03_29_2227-6a9f914f656c_change_documentsegment_and_childchunk_.py b/api/migrations/versions/2025_03_29_2227-6a9f914f656c_change_documentsegment_and_childchunk_.py new file mode 100644 index 0000000000..45904f0c80 --- /dev/null +++ b/api/migrations/versions/2025_03_29_2227-6a9f914f656c_change_documentsegment_and_childchunk_.py @@ -0,0 +1,43 @@ +"""change documentsegment and childchunk indexes + +Revision ID: 6a9f914f656c +Revises: d20049ed0af6 +Create Date: 2025-03-29 22:27:24.789481 + +""" +from alembic import op +import models as models +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '6a9f914f656c' +down_revision = 'd20049ed0af6' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('child_chunks', schema=None) as batch_op: + batch_op.create_index('child_chunks_node_idx', ['index_node_id', 'dataset_id'], unique=False) + batch_op.create_index('child_chunks_segment_idx', ['segment_id'], unique=False) + + with op.batch_alter_table('document_segments', schema=None) as batch_op: + batch_op.drop_index('document_segment_dataset_node_idx') + batch_op.create_index('document_segment_node_dataset_idx', ['index_node_id', 'dataset_id'], unique=False) + + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('document_segments', schema=None) as batch_op: + batch_op.drop_index('document_segment_node_dataset_idx') + batch_op.create_index('document_segment_dataset_node_idx', ['dataset_id', 'index_node_id'], unique=False) + + with op.batch_alter_table('child_chunks', schema=None) as batch_op: + batch_op.drop_index('child_chunks_segment_idx') + batch_op.drop_index('child_chunks_node_idx') + + # ### end Alembic commands ### diff --git a/api/models/dataset.py b/api/models/dataset.py index 47f96c669e..d6708ac88b 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -643,7 +643,7 @@ class DocumentSegment(db.Model): # type: ignore[name-defined] db.Index("document_segment_document_id_idx", "document_id"), db.Index("document_segment_tenant_dataset_idx", "dataset_id", "tenant_id"), db.Index("document_segment_tenant_document_idx", "document_id", "tenant_id"), - db.Index("document_segment_dataset_node_idx", "dataset_id", "index_node_id"), + db.Index("document_segment_node_dataset_idx", "index_node_id", "dataset_id"), db.Index("document_segment_tenant_idx", "tenant_id"), ) @@ -791,6 +791,8 @@ class ChildChunk(db.Model): # type: ignore[name-defined] __table_args__ = ( db.PrimaryKeyConstraint("id", name="child_chunk_pkey"), db.Index("child_chunk_dataset_id_idx", "tenant_id", "dataset_id", "document_id", "segment_id", "index_node_id"), + db.Index("child_chunks_node_idx", "index_node_id", "dataset_id"), + db.Index("child_chunks_segment_idx", "segment_id"), ) # initial fields diff --git a/api/poetry.lock b/api/poetry.lock index f0ebfdfc5a..94c7ce698f 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -845,10 +845,6 @@ files = [ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"}, {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, @@ -861,14 +857,8 @@ files = [ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"}, {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, @@ -879,24 +869,8 @@ files = [ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"}, {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"}, - {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"}, - {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"}, {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, @@ -906,10 +880,6 @@ files = [ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"}, {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, @@ -921,10 +891,6 @@ files = [ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"}, {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, @@ -937,10 +903,6 @@ files = [ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"}, {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, @@ -953,10 +915,6 @@ files = [ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"}, {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, @@ -4417,6 +4375,21 @@ html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] source = ["Cython (>=3.0.11,<3.1.0)"] +[[package]] +name = "lxml-stubs" +version = "0.5.1" +description = "Type annotations for the lxml package" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "lxml-stubs-0.5.1.tar.gz", hash = "sha256:e0ec2aa1ce92d91278b719091ce4515c12adc1d564359dfaf81efa7d4feab79d"}, + {file = "lxml_stubs-0.5.1-py3-none-any.whl", hash = "sha256:1f689e5dbc4b9247cb09ae820c7d34daeb1fdbd1db06123814b856dae7787272"}, +] + +[package.extras] +test = ["coverage[toml] (>=7.2.5)", "mypy (>=1.2.0)", "pytest (>=7.3.0)", "pytest-mypy-plugins (>=1.10.1)"] + [[package]] name = "lz4" version = "4.4.3" @@ -4956,49 +4929,49 @@ files = [ [[package]] name = "mypy" -version = "1.13.0" +version = "1.15.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, - {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, - {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, - {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, - {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, - {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, - {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, - {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, - {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, - {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, - {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, - {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, - {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, - {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, - {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, - {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, - {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, - {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, - {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, - {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, - {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, - {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, ] [package.dependencies] -mypy-extensions = ">=1.0.0" -typing-extensions = ">=4.6.0" +mypy_extensions = ">=1.0.0" +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -5147,7 +5120,7 @@ version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" -groups = ["main", "indirect", "vdb"] +groups = ["main", "dev", "indirect", "vdb"] files = [ {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, @@ -8871,6 +8844,18 @@ rich = ">=10.11.0" shellingham = ">=1.3.0" typing-extensions = ">=3.7.4.3" +[[package]] +name = "types-aiofiles" +version = "24.1.0.20250326" +description = "Typing stubs for aiofiles" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_aiofiles-24.1.0.20250326-py3-none-any.whl", hash = "sha256:dfb58c9aa18bd449e80fb5d7f49dc3dd20d31de920a46223a61798ee4a521a70"}, + {file = "types_aiofiles-24.1.0.20250326.tar.gz", hash = "sha256:c4bbe432fd043911ba83fb635456f5cc54f6d05fda2aadf6bef12a84f07a6efe"}, +] + [[package]] name = "types-beautifulsoup4" version = "4.12.0.20250204" @@ -8886,6 +8871,42 @@ files = [ [package.dependencies] types-html5lib = "*" +[[package]] +name = "types-cachetools" +version = "5.5.0.20240820" +description = "Typing stubs for cachetools" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-cachetools-5.5.0.20240820.tar.gz", hash = "sha256:b888ab5c1a48116f7799cd5004b18474cd82b5463acb5ffb2db2fc9c7b053bc0"}, + {file = "types_cachetools-5.5.0.20240820-py3-none-any.whl", hash = "sha256:efb2ed8bf27a4b9d3ed70d33849f536362603a90b8090a328acf0cd42fda82e2"}, +] + +[[package]] +name = "types-colorama" +version = "0.4.15.20240311" +description = "Typing stubs for colorama" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-colorama-0.4.15.20240311.tar.gz", hash = "sha256:a28e7f98d17d2b14fb9565d32388e419f4108f557a7d939a66319969b2b99c7a"}, + {file = "types_colorama-0.4.15.20240311-py3-none-any.whl", hash = "sha256:6391de60ddc0db3f147e31ecb230006a6823e81e380862ffca1e4695c13a0b8e"}, +] + +[[package]] +name = "types-defusedxml" +version = "0.7.0.20240218" +description = "Typing stubs for defusedxml" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-defusedxml-0.7.0.20240218.tar.gz", hash = "sha256:05688a7724dc66ea74c4af5ca0efc554a150c329cb28c13a64902cab878d06ed"}, + {file = "types_defusedxml-0.7.0.20240218-py3-none-any.whl", hash = "sha256:2b7f3c5ca14fdbe728fab0b846f5f7eb98c4bd4fd2b83d25f79e923caa790ced"}, +] + [[package]] name = "types-deprecated" version = "1.2.15.20250304" @@ -8898,16 +8919,28 @@ files = [ {file = "types_deprecated-1.2.15.20250304.tar.gz", hash = "sha256:c329030553029de5cc6cb30f269c11f4e00e598c4241290179f63cda7d33f719"}, ] +[[package]] +name = "types-docutils" +version = "0.21.0.20241128" +description = "Typing stubs for docutils" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types_docutils-0.21.0.20241128-py3-none-any.whl", hash = "sha256:e0409204009639e9b0bf4521eeabe58b5e574ce9c0db08421c2ac26c32be0039"}, + {file = "types_docutils-0.21.0.20241128.tar.gz", hash = "sha256:4dd059805b83ac6ec5a223699195c4e9eeb0446a4f7f2aeff1759a4a7cc17473"}, +] + [[package]] name = "types-flask-cors" -version = "4.0.0.20240828" +version = "5.0.0.20240902" description = "Typing stubs for Flask-Cors" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "types-Flask-Cors-4.0.0.20240828.tar.gz", hash = "sha256:f48ecf6366da923331311907cde3500e1435e07df01397ce0ef2306e263a5e85"}, - {file = "types_Flask_Cors-4.0.0.20240828-py3-none-any.whl", hash = "sha256:36b752e88d6517fb82973b4240fe9bde44d29485bbd92dfff762a7101bdac3a0"}, + {file = "types-Flask-Cors-5.0.0.20240902.tar.gz", hash = "sha256:8921b273bf7cd9636df136b66408efcfa6338a935e5c8f53f5eff1cee03f3394"}, + {file = "types_Flask_Cors-5.0.0.20240902-py3-none-any.whl", hash = "sha256:595e5f36056cd128ab905832e055f2e5d116fbdc685356eea4490bc77df82137"}, ] [package.dependencies] @@ -8929,6 +8962,34 @@ files = [ Flask = ">=2.0.0" Flask-SQLAlchemy = ">=3.0.1" +[[package]] +name = "types-gevent" +version = "24.11.0.20250401" +description = "Typing stubs for gevent" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_gevent-24.11.0.20250401-py3-none-any.whl", hash = "sha256:6764faf861ea99250c38179c58076392c44019ac3393029f71b06c4a15e8c1d1"}, + {file = "types_gevent-24.11.0.20250401.tar.gz", hash = "sha256:1443f796a442062698e67d818fca50aa88067dee4021d457a7c0c6bedd6f46ca"}, +] + +[package.dependencies] +types-greenlet = "*" +types-psutil = "*" + +[[package]] +name = "types-greenlet" +version = "3.1.0.20250401" +description = "Typing stubs for greenlet" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_greenlet-3.1.0.20250401-py3-none-any.whl", hash = "sha256:77987f3249b0f21415dc0254057e1ae4125a696a9bba28b0bcb67ee9e3dc14f6"}, + {file = "types_greenlet-3.1.0.20250401.tar.gz", hash = "sha256:949389b64c34ca9472f6335189e9fe0b2e9704436d4f0850e39e9b7145909082"}, +] + [[package]] name = "types-html5lib" version = "1.1.11.20241018" @@ -8941,6 +9002,54 @@ files = [ {file = "types_html5lib-1.1.11.20241018-py3-none-any.whl", hash = "sha256:3f1e064d9ed2c289001ae6392c84c93833abb0816165c6ff0abfc304a779f403"}, ] +[[package]] +name = "types-markdown" +version = "3.7.0.20250322" +description = "Typing stubs for Markdown" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_markdown-3.7.0.20250322-py3-none-any.whl", hash = "sha256:7e855503027b4290355a310fb834871940d9713da7c111f3e98a5e1cbc77acfb"}, + {file = "types_markdown-3.7.0.20250322.tar.gz", hash = "sha256:a48ed82dfcb6954592a10f104689d2d44df9125ce51b3cee20e0198a5216d55c"}, +] + +[[package]] +name = "types-oauthlib" +version = "3.2.0.20250403" +description = "Typing stubs for oauthlib" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_oauthlib-3.2.0.20250403-py3-none-any.whl", hash = "sha256:02466f91a01522adfa4aaf0d7e76274f00a102eed40034117c5ecae768a2571e"}, + {file = "types_oauthlib-3.2.0.20250403.tar.gz", hash = "sha256:40a4fcfb2e95235e399b5c0dd1cbe9d8c4b19415c09fb54c648d3397e02e0425"}, +] + +[[package]] +name = "types-objgraph" +version = "3.6.0.20240907" +description = "Typing stubs for objgraph" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-objgraph-3.6.0.20240907.tar.gz", hash = "sha256:2e3dee675843ae387889731550b0ddfed06e9420946cf78a4bca565b5fc53634"}, + {file = "types_objgraph-3.6.0.20240907-py3-none-any.whl", hash = "sha256:67207633a9b5789ee1911d740b269c3371081b79c0d8f68b00e7b8539f5c43f5"}, +] + +[[package]] +name = "types-olefile" +version = "0.47.0.20240806" +description = "Typing stubs for olefile" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-olefile-0.47.0.20240806.tar.gz", hash = "sha256:96490f208cbb449a52283855319d73688ba9167ae58858ef8c506bf7ca2c6b67"}, + {file = "types_olefile-0.47.0.20240806-py3-none-any.whl", hash = "sha256:c760a3deab7adb87a80d33b0e4edbbfbab865204a18d5121746022d7f8555118"}, +] + [[package]] name = "types-openpyxl" version = "3.1.5.20250306" @@ -8954,27 +9063,39 @@ files = [ ] [[package]] -name = "types-protobuf" -version = "4.25.0.20240417" -description = "Typing stubs for protobuf" +name = "types-pexpect" +version = "4.9.0.20241208" +description = "Typing stubs for pexpect" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "types-protobuf-4.25.0.20240417.tar.gz", hash = "sha256:c34eff17b9b3a0adb6830622f0f302484e4c089f533a46e3f147568313544352"}, - {file = "types_protobuf-4.25.0.20240417-py3-none-any.whl", hash = "sha256:e9b613227c2127e3d4881d75d93c93b4d6fd97b5f6a099a0b654a05351c8685d"}, + {file = "types_pexpect-4.9.0.20241208-py3-none-any.whl", hash = "sha256:1928f478528454f0fea3495c16cf1ee2e67fca5c9fe97d60b868ac48c1fd5633"}, + {file = "types_pexpect-4.9.0.20241208.tar.gz", hash = "sha256:bbca0d0819947a719989a5cfe83641d9212bef893e2f0a7a01e47926bc82401d"}, +] + +[[package]] +name = "types-protobuf" +version = "5.29.1.20250403" +description = "Typing stubs for protobuf" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_protobuf-5.29.1.20250403-py3-none-any.whl", hash = "sha256:c71de04106a2d54e5b2173d0a422058fae0ef2d058d70cf369fb797bf61ffa59"}, + {file = "types_protobuf-5.29.1.20250403.tar.gz", hash = "sha256:7ff44f15022119c9d7558ce16e78b2d485bf7040b4fadced4dd069bb5faf77a2"}, ] [[package]] name = "types-psutil" -version = "7.0.0.20250218" +version = "7.0.0.20250401" description = "Typing stubs for psutil" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_psutil-7.0.0.20250218-py3-none-any.whl", hash = "sha256:1447a30c282aafefcf8941ece854e1100eee7b0296a9d9be9977292f0269b121"}, - {file = "types_psutil-7.0.0.20250218.tar.gz", hash = "sha256:1e642cdafe837b240295b23b1cbd4691d80b08a07d29932143cbbae30eb0db9c"}, + {file = "types_psutil-7.0.0.20250401-py3-none-any.whl", hash = "sha256:ed23f7140368104afe4e05a6085a5fa56fbe8c880a0f4dfe8d63e041106071ed"}, + {file = "types_psutil-7.0.0.20250401.tar.gz", hash = "sha256:2a7d663c0888a079fc1643ebc109ad12e57a21c9552a9e2035da504191336dbf"}, ] [[package]] @@ -8989,6 +9110,33 @@ files = [ {file = "types_psycopg2-2.9.21.20250318.tar.gz", hash = "sha256:eb6eac5bfb16adfd5f16b818918b9e26a40ede147e0f2bbffdf53a6ef7025a87"}, ] +[[package]] +name = "types-pygments" +version = "2.19.0.20250305" +description = "Typing stubs for Pygments" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_pygments-2.19.0.20250305-py3-none-any.whl", hash = "sha256:ca88aae5ec426f9b107c0f7adc36dc096d2882d930a49f679eaf4b8b643db35d"}, + {file = "types_pygments-2.19.0.20250305.tar.gz", hash = "sha256:044c50e80ecd4128c00a7268f20355e16f5c55466d3d49dfda09be920af40b4b"}, +] + +[package.dependencies] +types-docutils = "*" + +[[package]] +name = "types-pymysql" +version = "1.1.0.20241103" +description = "Typing stubs for PyMySQL" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-PyMySQL-1.1.0.20241103.tar.gz", hash = "sha256:a7628542919a0ba87625fb79eefb2a2de45fb4ad32afe6e561e8f2f27fb58b8c"}, + {file = "types_PyMySQL-1.1.0.20241103-py3-none-any.whl", hash = "sha256:1a32efd8a74b5bf74c4de92a86c1cc6edaf3802dcfd5546635ab501eb5e3c096"}, +] + [[package]] name = "types-python-dateutil" version = "2.9.0.20241206" @@ -9007,78 +9155,162 @@ version = "2025.1.0.20250318" description = "Typing stubs for pytz" optional = false python-versions = ">=3.9" -groups = ["main", "dev"] +groups = ["main"] files = [ {file = "types_pytz-2025.1.0.20250318-py3-none-any.whl", hash = "sha256:04dba4907c5415777083f9548693c6d9f80ec53adcaff55a38526a3f8ddcae04"}, {file = "types_pytz-2025.1.0.20250318.tar.gz", hash = "sha256:97e0e35184c6fe14e3a5014512057f2c57bb0c6582d63c1cfcc4809f82180449"}, ] [[package]] -name = "types-pyyaml" -version = "6.0.12.20241230" -description = "Typing stubs for PyYAML" +name = "types-pywin32" +version = "310.0.0.20250319" +description = "Typing stubs for pywin32" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"}, - {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"}, + {file = "types_pywin32-310.0.0.20250319-py3-none-any.whl", hash = "sha256:baeb558a82251f7d430d135036b054740893902fdee3f9fe568322730ff49779"}, + {file = "types_pywin32-310.0.0.20250319.tar.gz", hash = "sha256:4d28fb85b3f268a92905a7242df48c530c847cfe4cdb112386101ab6407660d8"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250402" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_pyyaml-6.0.12.20250402-py3-none-any.whl", hash = "sha256:652348fa9e7a203d4b0d21066dfb00760d3cbd5a15ebb7cf8d33c88a49546681"}, + {file = "types_pyyaml-6.0.12.20250402.tar.gz", hash = "sha256:d7c13c3e6d335b6af4b0122a01ff1d270aba84ab96d1a1a1063ecba3e13ec075"}, ] [[package]] name = "types-regex" -version = "2024.11.6.20250318" +version = "2024.11.6.20250403" description = "Typing stubs for regex" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_regex-2024.11.6.20250318-py3-none-any.whl", hash = "sha256:9309fe5918ee7ffe859c04c18040697655fade366c4dc844bbebe86976a9980b"}, - {file = "types_regex-2024.11.6.20250318.tar.gz", hash = "sha256:6d472d0acf37b138cb32f67bd5ab1e7a200e94da8c1aa93ca3625a63e2efe1f3"}, + {file = "types_regex-2024.11.6.20250403-py3-none-any.whl", hash = "sha256:e22c0f67d73f4b4af6086a340f387b6f7d03bed8a0bb306224b75c51a29b0001"}, + {file = "types_regex-2024.11.6.20250403.tar.gz", hash = "sha256:3fdf2a70bbf830de4b3a28e9649a52d43dabb57cdb18fbfe2252eefb53666665"}, ] [[package]] name = "types-requests" -version = "2.31.0.20240406" +version = "2.32.0.20250328" description = "Typing stubs for requests" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types-requests-2.31.0.20240406.tar.gz", hash = "sha256:4428df33c5503945c74b3f42e82b181e86ec7b724620419a2966e2de604ce1a1"}, - {file = "types_requests-2.31.0.20240406-py3-none-any.whl", hash = "sha256:6216cdac377c6b9a040ac1c0404f7284bd13199c0e1bb235f4324627e8898cf5"}, + {file = "types_requests-2.32.0.20250328-py3-none-any.whl", hash = "sha256:72ff80f84b15eb3aa7a8e2625fffb6a93f2ad5a0c20215fc1dcfa61117bcb2a2"}, + {file = "types_requests-2.32.0.20250328.tar.gz", hash = "sha256:c9e67228ea103bd811c96984fac36ed2ae8da87a36a633964a21f199d60baf32"}, ] [package.dependencies] urllib3 = ">=2" +[[package]] +name = "types-requests-oauthlib" +version = "2.0.0.20250306" +description = "Typing stubs for requests-oauthlib" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_requests_oauthlib-2.0.0.20250306-py3-none-any.whl", hash = "sha256:37707de81d9ce54894afcccd70d4a845dbe4c59e747908faaeba59a96453d993"}, + {file = "types_requests_oauthlib-2.0.0.20250306.tar.gz", hash = "sha256:92e5f1ed35689b1804fdcd60b7ac39b0bd440a4b96693685879bc835b334797f"}, +] + +[package.dependencies] +types-oauthlib = "*" +types-requests = "*" + +[[package]] +name = "types-shapely" +version = "2.0.0.20250404" +description = "Typing stubs for shapely" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_shapely-2.0.0.20250404-py3-none-any.whl", hash = "sha256:170fb92f5c168a120db39b3287697fdec5c93ef3e1ad15e52552c36b25318821"}, + {file = "types_shapely-2.0.0.20250404.tar.gz", hash = "sha256:863f540b47fa626c33ae64eae06df171f9ab0347025d4458d2df496537296b4f"}, +] + +[package.dependencies] +numpy = ">=1.20" + +[[package]] +name = "types-simplejson" +version = "3.20.0.20250326" +description = "Typing stubs for simplejson" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_simplejson-3.20.0.20250326-py3-none-any.whl", hash = "sha256:db1ddea7b8f7623b27a137578f22fc6c618db8c83ccfb1828ca0d2f0ec11efa7"}, + {file = "types_simplejson-3.20.0.20250326.tar.gz", hash = "sha256:b2689bc91e0e672d7a5a947b4cb546b76ae7ddc2899c6678e72a10bf96cd97d2"}, +] + [[package]] name = "types-six" -version = "1.17.0.20250304" +version = "1.17.0.20250403" description = "Typing stubs for six" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_six-1.17.0.20250304-py3-none-any.whl", hash = "sha256:e482df1d439375f4b7c1f2540b1b8584aea82850164a296203ead4a7024fe14f"}, - {file = "types_six-1.17.0.20250304.tar.gz", hash = "sha256:eeb240f9faec63ddd0498d6c0b6abd0496b154a66f960c004d4d733cf31bb4bd"}, + {file = "types_six-1.17.0.20250403-py3-none-any.whl", hash = "sha256:0bbb20fc34a18163afe7cac70b85864bd6937e6d73413c5b8f424def28760ae8"}, + {file = "types_six-1.17.0.20250403.tar.gz", hash = "sha256:82076f86e6e672a95adbf8b52625b1b3c72a8b9a893180344c1a02a6daabead6"}, ] +[[package]] +name = "types-tensorflow" +version = "2.18.0.20250404" +description = "Typing stubs for tensorflow" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_tensorflow-2.18.0.20250404-py3-none-any.whl", hash = "sha256:4ad86534e6cfd6b36b2c97239ef9d122c44b167b25630b7c873a1483f9befd15"}, + {file = "types_tensorflow-2.18.0.20250404.tar.gz", hash = "sha256:b38a427bbec805e4879d248f070baea802673c04cc5ccbe5979d742faa160670"}, +] + +[package.dependencies] +numpy = ">=1.20" +types-protobuf = "*" +types-requests = "*" + [[package]] name = "types-tqdm" -version = "4.67.0.20250301" +version = "4.67.0.20250404" description = "Typing stubs for tqdm" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_tqdm-4.67.0.20250301-py3-none-any.whl", hash = "sha256:8af97deb8e6874af833555dc1fe0fcd456b1a789470bf6cd8813d4e7ee4f6c5b"}, - {file = "types_tqdm-4.67.0.20250301.tar.gz", hash = "sha256:5e89a38ad89b867823368eb97d9f90d2fc69806bb055dde62716a05da62b5e0d"}, + {file = "types_tqdm-4.67.0.20250404-py3-none-any.whl", hash = "sha256:4a9b897eb4036f757240f4cb4a794f296265c04de46fdd058e453891f0186eed"}, + {file = "types_tqdm-4.67.0.20250404.tar.gz", hash = "sha256:e9997c655ffbba3ab78f4418b5511c05a54e76824d073d212166dc73aa56c768"}, ] [package.dependencies] types-requests = "*" +[[package]] +name = "types-ujson" +version = "5.10.0.20250326" +description = "Typing stubs for ujson" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_ujson-5.10.0.20250326-py3-none-any.whl", hash = "sha256:acc0913f569def62ef6a892c8a47703f65d05669a3252391a97765cf207dca5b"}, + {file = "types_ujson-5.10.0.20250326.tar.gz", hash = "sha256:5469e05f2c31ecb3c4c0267cc8fe41bcd116826fbb4ded69801a645c687dd014"}, +] + [[package]] name = "typing-extensions" version = "4.12.2" @@ -10160,4 +10392,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.1" python-versions = ">=3.11,<3.13" -content-hash = "5aaeadfb807cfc51a987065213562eba1f0d9552733abbff915062c6a3483a9d" +content-hash = "8ffd55020aab4ae38c8b5a4fffe670ce2a6a78bbbe15f70a5bc6ade0af339f3e" diff --git a/api/pyproject.toml b/api/pyproject.toml index 0783af6fe8..e35717956f 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "dify-api" requires-python = ">=3.11,<3.13" -dynamic = [ "dependencies" ] +dynamic = ["dependencies"] [build-system] requires = ["poetry-core>=2.0.0"] @@ -74,7 +74,7 @@ starlette = "0.41.0" tiktoken = "~0.8.0" tokenizers = "~0.15.0" transformers = "~4.35.0" -unstructured = { version = "~0.16.1", extras = ["docx", "epub", "md", "msg", "ppt", "pptx"] } +unstructured = { version = "~0.16.1", extras = ["docx", "epub", "md", "ppt", "pptx"] } validators = "0.21.0" yarl = "~1.18.3" # Before adding new dependency, consider place it in alphabet order (a-z) and suitable group. @@ -148,27 +148,47 @@ optional = true [tool.poetry.group.dev.dependencies] coverage = "~7.2.4" faker = "~32.1.0" -mypy = "~1.13.0" +lxml-stubs = "~0.5.1" +mypy = "~1.15.0" pytest = "~8.3.2" pytest-benchmark = "~4.0.0" pytest-env = "~1.1.3" pytest-mock = "~3.14.0" +types-aiofiles = "~24.1.0" types-beautifulsoup4 = "~4.12.0" +types-cachetools = "~5.5.0" +types-colorama = "~0.4.15" +types-defusedxml = "~0.7.0" types-deprecated = "~1.2.15" -types-flask-cors = "~4.0.0" +types-docutils = "~0.21.0" +types-flask-cors = "~5.0.0" types-flask-migrate = "~4.1.0" +types-gevent = "~24.11.0" +types-greenlet = "~3.1.0" types-html5lib = "~1.1.11" +types-markdown = "~3.7.0" +types-oauthlib = "~3.2.0" +types-objgraph = "~3.6.0" +types-olefile = "~0.47.0" types-openpyxl = "~3.1.5" -types-protobuf = "~4.25.0" +types-pexpect = "~4.9.0" +types-protobuf = "~5.29.1" types-psutil = "~7.0.0" types-psycopg2 = "~2.9.21" +types-pygments = "~2.19.0" +types-pymysql = "~1.1.0" types-python-dateutil = "~2.9.0" -types-pytz = "~2025.1" -types-pyyaml = "~6.0.2" +types-pywin32 = "~310.0.0" +types-pyyaml = "~6.0.12" types-regex = "~2024.11.6" -types-requests = "~2.31.0" +types-requests = "~2.32.0" +types-requests-oauthlib = "~2.0.0" +types-shapely = "~2.0.0" +types-simplejson = "~3.20.0" types-six = "~1.17.0" +types-tensorflow = "~2.18.0" types-tqdm = "~4.67.0" +types-ujson = "~5.10.0" ############################################################ # [ Lint ] dependency group diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index 61dc86a028..b019cf6b63 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -1786,12 +1786,8 @@ class SegmentService: ) elif document.doc_form in (IndexType.PARAGRAPH_INDEX, IndexType.QA_INDEX): if args.enabled or keyword_changed: - VectorService.create_segments_vector( - [args.keywords] if args.keywords else None, - [segment], - dataset, - document.doc_form, - ) + # update segment vector index + VectorService.update_segment_vector(args.keywords, segment, dataset) else: segment_hash = helper.generate_text_hash(content) tokens = 0 diff --git a/api/services/file_service.py b/api/services/file_service.py index 284e96c97a..b4442c36c3 100644 --- a/api/services/file_service.py +++ b/api/services/file_service.py @@ -92,6 +92,11 @@ class FileService: db.session.add(upload_file) db.session.commit() + if not upload_file.source_url: + upload_file.source_url = file_helpers.get_signed_file_url(upload_file_id=upload_file.id) + db.session.add(upload_file) + db.session.commit() + return upload_file @staticmethod diff --git a/api/services/hit_testing_service.py b/api/services/hit_testing_service.py index f8c1c1d297..0b98065f5d 100644 --- a/api/services/hit_testing_service.py +++ b/api/services/hit_testing_service.py @@ -29,15 +29,6 @@ class HitTestingService: external_retrieval_model: dict, limit: int = 10, ) -> dict: - if dataset.available_document_count == 0 or dataset.available_segment_count == 0: - return { - "query": { - "content": query, - "tsne_position": {"x": 0, "y": 0}, - }, - "records": [], - } - start = time.perf_counter() # get retrieval model , if the model is not setting , using default diff --git a/api/services/plugin/data_migration.py b/api/services/plugin/data_migration.py index 7228a16632..597585588b 100644 --- a/api/services/plugin/data_migration.py +++ b/api/services/plugin/data_migration.py @@ -127,18 +127,32 @@ limit 1000""" processed_count = 0 failed_ids = [] + last_id = "00000000-0000-0000-0000-000000000000" + while True: - sql = f"""select id, {provider_column_name} as provider_name from {table_name} -where {provider_column_name} not like '%/%' and {provider_column_name} is not null and {provider_column_name} != '' -limit 1000""" + sql = f""" + SELECT id, {provider_column_name} AS provider_name + FROM {table_name} + WHERE {provider_column_name} NOT LIKE '%/%' + AND {provider_column_name} IS NOT NULL + AND {provider_column_name} != '' + AND id > :last_id + ORDER BY id ASC + LIMIT 5000 + """ + params = {"last_id": last_id or ""} + with db.engine.begin() as conn: - rs = conn.execute(db.text(sql)) + rs = conn.execute(db.text(sql), params) current_iter_count = 0 + batch_updates = [] + for i in rs: current_iter_count += 1 processed_count += 1 record_id = str(i.id) + last_id = record_id provider_name = str(i.provider_name) if record_id in failed_ids: @@ -152,19 +166,9 @@ limit 1000""" ) try: - # update provider name append with "langgenius/{provider_name}/{provider_name}" - sql = f"""update {table_name} - set {provider_column_name} = - concat('{DEFAULT_PLUGIN_ID}/', {provider_column_name}, '/', {provider_column_name}) - where id = :record_id""" - conn.execute(db.text(sql), {"record_id": record_id}) - click.echo( - click.style( - f"[{processed_count}] Migrated [{table_name}] {record_id} ({provider_name})", - fg="green", - ) - ) - except Exception: + updated_value = f"{DEFAULT_PLUGIN_ID}/{provider_name}/{provider_name}" + batch_updates.append((updated_value, record_id)) + except Exception as e: failed_ids.append(record_id) click.echo( click.style( @@ -177,6 +181,20 @@ limit 1000""" ) continue + if batch_updates: + update_sql = f""" + UPDATE {table_name} + SET {provider_column_name} = :updated_value + WHERE id = :record_id + """ + conn.execute(db.text(update_sql), [{"updated_value": u, "record_id": r} for u, r in batch_updates]) + click.echo( + click.style( + f"[{processed_count}] Batch migrated [{len(batch_updates)}] records from [{table_name}]", + fg="green", + ) + ) + if not current_iter_count: break diff --git a/api/services/plugin/plugin_service.py b/api/services/plugin/plugin_service.py index 749bb1a5b4..25d192410f 100644 --- a/api/services/plugin/plugin_service.py +++ b/api/services/plugin/plugin_service.py @@ -94,6 +94,13 @@ class PluginService: manager = PluginDebuggingManager() return manager.get_debugging_key(tenant_id) + @staticmethod + def list_latest_versions(plugin_ids: Sequence[str]) -> Mapping[str, Optional[LatestPluginCache]]: + """ + List the latest versions of the plugins + """ + return PluginService.fetch_latest_plugin_version(plugin_ids) + @staticmethod def list(tenant_id: str) -> list[PluginEntity]: """ @@ -101,22 +108,6 @@ class PluginService: """ manager = PluginInstallationManager() plugins = manager.list_plugins(tenant_id) - plugin_ids = [plugin.plugin_id for plugin in plugins if plugin.source == PluginInstallationSource.Marketplace] - try: - manifests = PluginService.fetch_latest_plugin_version(plugin_ids) - except Exception: - manifests = {} - logger.exception("failed to fetch plugin manifests") - - for plugin in plugins: - if plugin.source == PluginInstallationSource.Marketplace: - if plugin.plugin_id in manifests: - latest_plugin_cache = manifests[plugin.plugin_id] - if latest_plugin_cache: - # set latest_version - plugin.latest_version = latest_plugin_cache.version - plugin.latest_unique_identifier = latest_plugin_cache.unique_identifier - return plugins @staticmethod diff --git a/api/tasks/add_document_to_index_task.py b/api/tasks/add_document_to_index_task.py index 0b7d2ad31f..be88881efc 100644 --- a/api/tasks/add_document_to_index_task.py +++ b/api/tasks/add_document_to_index_task.py @@ -37,6 +37,10 @@ def add_document_to_index_task(dataset_document_id: str): indexing_cache_key = "document_{}_indexing".format(dataset_document.id) try: + dataset = dataset_document.dataset + if not dataset: + raise Exception(f"Document {dataset_document.id} dataset {dataset_document.dataset_id} doesn't exist.") + segments = ( db.session.query(DocumentSegment) .filter( @@ -77,11 +81,6 @@ def add_document_to_index_task(dataset_document_id: str): document.children = child_documents documents.append(document) - dataset = dataset_document.dataset - - if not dataset: - raise Exception("Document has no dataset") - index_type = dataset.doc_form index_processor = IndexProcessorFactory(index_type).init_index_processor() index_processor.load(dataset, documents) diff --git a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py index ca055f5cc5..5c6bb82024 100644 --- a/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py +++ b/api/tests/integration_tests/workflow/nodes/test_parameter_extractor.py @@ -5,6 +5,7 @@ from typing import Optional from unittest.mock import MagicMock from core.app.entities.app_invoke_entities import InvokeFrom +from core.model_runtime.entities import AssistantPromptMessage from core.workflow.entities.variable_pool import VariablePool from core.workflow.enums import SystemVariableKey from core.workflow.graph_engine.entities.graph import Graph @@ -311,6 +312,46 @@ def test_extract_json_response(): assert result["location"] == "kawaii" +def test_extract_json_from_tool_call(): + """ + Test extract json response. + """ + + node = init_parameter_extractor_node( + config={ + "id": "llm", + "data": { + "title": "123", + "type": "parameter-extractor", + "model": { + "provider": "langgenius/openai/openai", + "name": "gpt-3.5-turbo-instruct", + "mode": "completion", + "completion_params": {}, + }, + "query": ["sys", "query"], + "parameters": [{"name": "location", "type": "string", "description": "location", "required": True}], + "reasoning_mode": "prompt", + "instruction": "{{#sys.query#}}", + "memory": None, + }, + }, + ) + + result = node._extract_json_from_tool_call( + AssistantPromptMessage.ToolCall( + id="llm", + type="parameter-extractor", + function=AssistantPromptMessage.ToolCall.ToolCallFunction( + name="foo", arguments="""{"location":"kawaii"}{"location": 1}""" + ), + ) + ) + + assert result is not None + assert result["location"] == "kawaii" + + def test_chat_parameter_extractor_with_memory(setup_model_mock): """ Test chat parameter extractor with memory. diff --git a/dev/reformat b/dev/reformat index 82f96b8e8f..daab538951 100755 --- a/dev/reformat +++ b/dev/reformat @@ -16,3 +16,6 @@ poetry run -C api ruff format ./ # run dotenv-linter linter poetry run -P api dotenv-linter ./api/.env.example ./web/.env.example + +# run mypy check +dev/run-mypy diff --git a/dev/run-mypy b/dev/run-mypy new file mode 100755 index 0000000000..cdbbef515d --- /dev/null +++ b/dev/run-mypy @@ -0,0 +1,11 @@ +#!/bin/bash + +set -x + +if ! command -v mypy &> /dev/null; then + poetry install -C api --with dev +fi + +# run mypy checks +poetry run -C api \ + python -m mypy --install-types --non-interactive . diff --git a/docker/.env.example b/docker/.env.example index 8791e6cc8e..d65b625681 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -75,7 +75,7 @@ SECRET_KEY=sk-9f73s3ljTXVcMT3Blb3ljTqtsKiGHXVcMT3BlbkFJLK7U # Password for admin user initialization. # If left unset, admin user will not be prompted for a password -# when creating the initial admin account. +# when creating the initial admin account. # The length of the password cannot exceed 30 characters. INIT_PASSWORD= @@ -605,13 +605,13 @@ SCARF_NO_ANALYTICS=true # ------------------------------ # The maximum number of tokens allowed for prompt generation. -# This setting controls the upper limit of tokens that can be used by the LLM +# This setting controls the upper limit of tokens that can be used by the LLM # when generating a prompt in the prompt generation tool. # Default: 512 tokens. PROMPT_GENERATION_MAX_TOKENS=512 # The maximum number of tokens allowed for code generation. -# This setting controls the upper limit of tokens that can be used by the LLM +# This setting controls the upper limit of tokens that can be used by the LLM # when generating code in the code generation tool. # Default: 1024 tokens. CODE_GENERATION_MAX_TOKENS=1024 @@ -621,6 +621,10 @@ CODE_GENERATION_MAX_TOKENS=1024 # when generating structured output in the structured output tool. # Default: 1024 tokens. STRUCTURED_OUTPUT_MAX_TOKENS=1024 +# Enable or disable plugin based token counting. If disabled, token counting will return 0. +# This can improve performance by skipping token counting operations. +# Default: false (disabled). +PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false # ------------------------------ # Multi-modal Configuration @@ -1011,3 +1015,28 @@ PLUGIN_PYTHON_ENV_INIT_TIMEOUT=120 PLUGIN_MAX_EXECUTION_TIMEOUT=600 # PIP_MIRROR_URL=https://pypi.tuna.tsinghua.edu.cn/simple PIP_MIRROR_URL= + +# https://github.com/langgenius/dify-plugin-daemon/blob/main/.env.example +# Plugin storage type, local aws_s3 tencent_cos azure_blob +PLUGIN_STORAGE_TYPE=local +PLUGIN_STORAGE_LOCAL_ROOT=/app/storage +PLUGIN_WORKING_PATH=/app/storage/cwd +PLUGIN_INSTALLED_PATH=plugin +PLUGIN_PACKAGE_CACHE_PATH=plugin_packages +PLUGIN_MEDIA_CACHE_PATH=assets +# Plugin oss bucket +PLUGIN_STORAGE_OSS_BUCKET= +# Plugin oss s3 credentials +PLUGIN_S3_USE_AWS_MANAGED_IAM=false +PLUGIN_S3_ENDPOINT= +PLUGIN_S3_USE_PATH_STYLE=false +PLUGIN_AWS_ACCESS_KEY= +PLUGIN_AWS_SECRET_KEY= +PLUGIN_AWS_REGION= +# Plugin oss azure blob +PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME= +PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING= +# Plugin oss tencent cos +PLUGIN_TENCENT_COS_SECRET_KEY= +PLUGIN_TENCENT_COS_SECRET_ID= +PLUGIN_TENCENT_COS_REGION= diff --git a/docker/docker-compose-template.yaml b/docker/docker-compose-template.yaml index e8ed382917..ef58bf99f3 100644 --- a/docker/docker-compose-template.yaml +++ b/docker/docker-compose-template.yaml @@ -2,7 +2,7 @@ x-shared-env: &shared-api-worker-env services: # API service api: - image: langgenius/dify-api:1.1.3 + image: langgenius/dify-api:1.2.0 restart: always environment: # Use the shared environment variables. @@ -29,7 +29,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.1.3 + image: langgenius/dify-api:1.2.0 restart: always environment: # Use the shared environment variables. @@ -53,7 +53,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.1.3 + image: langgenius/dify-web:1.2.0 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -134,7 +134,7 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.0.6-local + image: langgenius/dify-plugin-daemon:0.0.7-local restart: always environment: # Use the shared environment variables. @@ -153,6 +153,23 @@ services: PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} PIP_MIRROR_URL: ${PIP_MIRROR_URL:-} + PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local} + PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage} + PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin} + PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages} + PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets} + PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-} + S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false} + S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-} + S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false} + AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-} + PAWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-} + AWS_REGION: ${PLUGIN_AWS_REGION:-} + AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-} + AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-} + TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} + TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} + TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: diff --git a/docker/docker-compose.middleware.yaml b/docker/docker-compose.middleware.yaml index b4f772cc82..bfd2354314 100644 --- a/docker/docker-compose.middleware.yaml +++ b/docker/docker-compose.middleware.yaml @@ -29,6 +29,8 @@ services: redis: image: redis:6-alpine restart: always + env_file: + - ./middleware.env environment: REDISCLI_AUTH: ${REDIS_PASSWORD:-difyai123456} volumes: @@ -45,6 +47,8 @@ services: sandbox: image: langgenius/dify-sandbox:0.2.11 restart: always + env_file: + - ./middleware.env environment: # The DifySandbox configurations # Make sure you are changing this key for your deployment with a strong key. @@ -66,8 +70,10 @@ services: # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.0.6-local + image: langgenius/dify-plugin-daemon:0.0.7-local restart: always + env_file: + - ./middleware.env environment: # Use the shared environment variables. DB_HOST: ${DB_HOST:-db} @@ -91,6 +97,23 @@ services: PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} PIP_MIRROR_URL: ${PIP_MIRROR_URL:-} + PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local} + PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage} + PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin} + PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages} + PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets} + PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-} + S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false} + S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-} + S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false} + AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-} + PAWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-} + AWS_REGION: ${PLUGIN_AWS_REGION:-} + AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-} + AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-} + TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} + TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} + TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} ports: - "${EXPOSE_PLUGIN_DAEMON_PORT:-5002}:${PLUGIN_DAEMON_PORT:-5002}" - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" @@ -107,6 +130,8 @@ services: - ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template - ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh entrypoint: [ "sh", "-c", "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ] + env_file: + - ./middleware.env environment: # pls clearly modify the squid env vars to fit your network environment. HTTP_PORT: ${SSRF_HTTP_PORT:-3128} diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index 818c7041e4..c9f3aa2f27 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -277,6 +277,7 @@ x-shared-env: &shared-api-worker-env PROMPT_GENERATION_MAX_TOKENS: ${PROMPT_GENERATION_MAX_TOKENS:-512} CODE_GENERATION_MAX_TOKENS: ${CODE_GENERATION_MAX_TOKENS:-1024} STRUCTURED_OUTPUT_MAX_TOKENS: ${STRUCTURED_OUTPUT_MAX_TOKENS:-1024} + PLUGIN_BASED_TOKEN_COUNTING_ENABLED: ${PLUGIN_BASED_TOKEN_COUNTING_ENABLED:-false} MULTIMODAL_SEND_FORMAT: ${MULTIMODAL_SEND_FORMAT:-base64} UPLOAD_IMAGE_FILE_SIZE_LIMIT: ${UPLOAD_IMAGE_FILE_SIZE_LIMIT:-10} UPLOAD_VIDEO_FILE_SIZE_LIMIT: ${UPLOAD_VIDEO_FILE_SIZE_LIMIT:-100} @@ -437,11 +438,29 @@ x-shared-env: &shared-api-worker-env PLUGIN_PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} PIP_MIRROR_URL: ${PIP_MIRROR_URL:-} + PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local} + PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage} + PLUGIN_WORKING_PATH: ${PLUGIN_WORKING_PATH:-/app/storage/cwd} + PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin} + PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages} + PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets} + PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-} + PLUGIN_S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false} + PLUGIN_S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-} + PLUGIN_S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false} + PLUGIN_AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-} + PLUGIN_AWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-} + PLUGIN_AWS_REGION: ${PLUGIN_AWS_REGION:-} + PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-} + PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-} + PLUGIN_TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} + PLUGIN_TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} + PLUGIN_TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} services: # API service api: - image: langgenius/dify-api:1.1.3 + image: langgenius/dify-api:1.2.0 restart: always environment: # Use the shared environment variables. @@ -468,7 +487,7 @@ services: # worker service # The Celery worker for processing the queue. worker: - image: langgenius/dify-api:1.1.3 + image: langgenius/dify-api:1.2.0 restart: always environment: # Use the shared environment variables. @@ -492,7 +511,7 @@ services: # Frontend web application. web: - image: langgenius/dify-web:1.1.3 + image: langgenius/dify-web:1.2.0 restart: always environment: CONSOLE_API_URL: ${CONSOLE_API_URL:-} @@ -529,7 +548,7 @@ services: volumes: - ./volumes/db/data:/var/lib/postgresql/data healthcheck: - test: [ 'CMD', 'pg_isready' ] + test: ["CMD", "pg_isready"] interval: 1s timeout: 3s retries: 30 @@ -546,7 +565,7 @@ services: # Set the redis password when startup redis server. command: redis-server --requirepass ${REDIS_PASSWORD:-difyai123456} healthcheck: - test: [ 'CMD', 'redis-cli', 'ping' ] + test: ["CMD", "redis-cli", "ping"] # The DifySandbox sandbox: @@ -567,13 +586,13 @@ services: - ./volumes/sandbox/dependencies:/dependencies - ./volumes/sandbox/conf:/conf healthcheck: - test: [ 'CMD', 'curl', '-f', 'http://localhost:8194/health' ] + test: ["CMD", "curl", "-f", "http://localhost:8194/health"] networks: - ssrf_proxy_network # plugin daemon plugin_daemon: - image: langgenius/dify-plugin-daemon:0.0.6-local + image: langgenius/dify-plugin-daemon:0.0.7-local restart: always environment: # Use the shared environment variables. @@ -592,6 +611,23 @@ services: PYTHON_ENV_INIT_TIMEOUT: ${PLUGIN_PYTHON_ENV_INIT_TIMEOUT:-120} PLUGIN_MAX_EXECUTION_TIMEOUT: ${PLUGIN_MAX_EXECUTION_TIMEOUT:-600} PIP_MIRROR_URL: ${PIP_MIRROR_URL:-} + PLUGIN_STORAGE_TYPE: ${PLUGIN_STORAGE_TYPE:-local} + PLUGIN_STORAGE_LOCAL_ROOT: ${PLUGIN_STORAGE_LOCAL_ROOT:-/app/storage} + PLUGIN_INSTALLED_PATH: ${PLUGIN_INSTALLED_PATH:-plugin} + PLUGIN_PACKAGE_CACHE_PATH: ${PLUGIN_PACKAGE_CACHE_PATH:-plugin_packages} + PLUGIN_MEDIA_CACHE_PATH: ${PLUGIN_MEDIA_CACHE_PATH:-assets} + PLUGIN_STORAGE_OSS_BUCKET: ${PLUGIN_STORAGE_OSS_BUCKET:-} + S3_USE_AWS_MANAGED_IAM: ${PLUGIN_S3_USE_AWS_MANAGED_IAM:-false} + S3_ENDPOINT: ${PLUGIN_S3_ENDPOINT:-} + S3_USE_PATH_STYLE: ${PLUGIN_S3_USE_PATH_STYLE:-false} + AWS_ACCESS_KEY: ${PLUGIN_AWS_ACCESS_KEY:-} + PAWS_SECRET_KEY: ${PLUGIN_AWS_SECRET_KEY:-} + AWS_REGION: ${PLUGIN_AWS_REGION:-} + AZURE_BLOB_STORAGE_CONNECTION_STRING: ${PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING:-} + AZURE_BLOB_STORAGE_CONTAINER_NAME: ${PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME:-} + TENCENT_COS_SECRET_KEY: ${PLUGIN_TENCENT_COS_SECRET_KEY:-} + TENCENT_COS_SECRET_ID: ${PLUGIN_TENCENT_COS_SECRET_ID:-} + TENCENT_COS_REGION: ${PLUGIN_TENCENT_COS_REGION:-} ports: - "${EXPOSE_PLUGIN_DEBUGGING_PORT:-5003}:${PLUGIN_DEBUGGING_PORT:-5003}" volumes: @@ -608,7 +644,12 @@ services: volumes: - ./ssrf_proxy/squid.conf.template:/etc/squid/squid.conf.template - ./ssrf_proxy/docker-entrypoint.sh:/docker-entrypoint-mount.sh - entrypoint: [ 'sh', '-c', "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ] + entrypoint: + [ + "sh", + "-c", + "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh", + ] environment: # pls clearly modify the squid env vars to fit your network environment. HTTP_PORT: ${SSRF_HTTP_PORT:-3128} @@ -637,8 +678,8 @@ services: - CERTBOT_EMAIL=${CERTBOT_EMAIL} - CERTBOT_DOMAIN=${CERTBOT_DOMAIN} - CERTBOT_OPTIONS=${CERTBOT_OPTIONS:-} - entrypoint: [ '/docker-entrypoint.sh' ] - command: [ 'tail', '-f', '/dev/null' ] + entrypoint: ["/docker-entrypoint.sh"] + command: ["tail", "-f", "/dev/null"] # The nginx reverse proxy. # used for reverse proxying the API service and Web service. @@ -655,7 +696,12 @@ services: - ./volumes/certbot/conf/live:/etc/letsencrypt/live # cert dir (with certbot container) - ./volumes/certbot/conf:/etc/letsencrypt - ./volumes/certbot/www:/var/www/html - entrypoint: [ 'sh', '-c', "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh" ] + entrypoint: + [ + "sh", + "-c", + "cp /docker-entrypoint-mount.sh /docker-entrypoint.sh && sed -i 's/\r$$//' /docker-entrypoint.sh && chmod +x /docker-entrypoint.sh && /docker-entrypoint.sh", + ] environment: NGINX_SERVER_NAME: ${NGINX_SERVER_NAME:-_} NGINX_HTTPS_ENABLED: ${NGINX_HTTPS_ENABLED:-false} @@ -677,14 +723,14 @@ services: - api - web ports: - - '${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}' - - '${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}' + - "${EXPOSE_NGINX_PORT:-80}:${NGINX_PORT:-80}" + - "${EXPOSE_NGINX_SSL_PORT:-443}:${NGINX_SSL_PORT:-443}" # The Weaviate vector store. weaviate: image: semitechnologies/weaviate:1.19.0 profiles: - - '' + - "" - weaviate restart: always volumes: @@ -737,13 +783,17 @@ services: working_dir: /opt/couchbase stdin_open: true tty: true - entrypoint: [ "" ] + entrypoint: [""] command: sh -c "/opt/couchbase/init/init-cbserver.sh" volumes: - ./volumes/couchbase/data:/opt/couchbase/var/lib/couchbase/data healthcheck: # ensure bucket was created before proceeding - test: [ "CMD-SHELL", "curl -s -f -u Administrator:password http://localhost:8091/pools/default/buckets | grep -q '\\[{' || exit 1" ] + test: + [ + "CMD-SHELL", + "curl -s -f -u Administrator:password http://localhost:8091/pools/default/buckets | grep -q '\\[{' || exit 1", + ] interval: 10s retries: 10 start_period: 30s @@ -769,9 +819,9 @@ services: volumes: - ./volumes/pgvector/data:/var/lib/postgresql/data - ./pgvector/docker-entrypoint.sh:/docker-entrypoint.sh - entrypoint: [ '/docker-entrypoint.sh' ] + entrypoint: ["/docker-entrypoint.sh"] healthcheck: - test: [ 'CMD', 'pg_isready' ] + test: ["CMD", "pg_isready"] interval: 1s timeout: 3s retries: 30 @@ -793,7 +843,7 @@ services: volumes: - ./volumes/pgvecto_rs/data:/var/lib/postgresql/data healthcheck: - test: [ 'CMD', 'pg_isready' ] + test: ["CMD", "pg_isready"] interval: 1s timeout: 3s retries: 30 @@ -861,7 +911,7 @@ services: - ./volumes/milvus/etcd:/etcd command: etcd -advertise-client-urls=http://127.0.0.1:2379 -listen-client-urls http://0.0.0.0:2379 --data-dir /etcd healthcheck: - test: [ 'CMD', 'etcdctl', 'endpoint', 'health' ] + test: ["CMD", "etcdctl", "endpoint", "health"] interval: 30s timeout: 20s retries: 3 @@ -880,7 +930,7 @@ services: - ./volumes/milvus/minio:/minio_data command: minio server /minio_data --console-address ":9001" healthcheck: - test: [ 'CMD', 'curl', '-f', 'http://localhost:9000/minio/health/live' ] + test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] interval: 30s timeout: 20s retries: 3 @@ -892,7 +942,7 @@ services: image: milvusdb/milvus:v2.5.0-beta profiles: - milvus - command: [ 'milvus', 'run', 'standalone' ] + command: ["milvus", "run", "standalone"] environment: ETCD_ENDPOINTS: ${ETCD_ENDPOINTS:-etcd:2379} MINIO_ADDRESS: ${MINIO_ADDRESS:-minio:9000} @@ -900,7 +950,7 @@ services: volumes: - ./volumes/milvus/milvus:/var/lib/milvus healthcheck: - test: [ 'CMD', 'curl', '-f', 'http://localhost:9091/healthz' ] + test: ["CMD", "curl", "-f", "http://localhost:9091/healthz"] interval: 30s start_period: 90s timeout: 20s @@ -1007,18 +1057,19 @@ services: node.name: dify-es0 discovery.type: single-node xpack.license.self_generated.type: basic - xpack.security.enabled: 'true' - xpack.security.enrollment.enabled: 'false' - xpack.security.http.ssl.enabled: 'false' + xpack.security.enabled: "true" + xpack.security.enrollment.enabled: "false" + xpack.security.http.ssl.enabled: "false" ports: - ${ELASTICSEARCH_PORT:-9200}:9200 deploy: resources: limits: memory: 2g - entrypoint: [ 'sh', '-c', "sh /docker-entrypoint-mount.sh" ] + entrypoint: ["sh", "-c", "sh /docker-entrypoint-mount.sh"] healthcheck: - test: [ 'CMD', 'curl', '-s', 'http://localhost:9200/_cluster/health?pretty' ] + test: + ["CMD", "curl", "-s", "http://localhost:9200/_cluster/health?pretty"] interval: 30s timeout: 10s retries: 50 @@ -1036,17 +1087,17 @@ services: environment: XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: d1a66dfd-c4d3-4a0a-8290-2abcb83ab3aa NO_PROXY: localhost,127.0.0.1,elasticsearch,kibana - XPACK_SECURITY_ENABLED: 'true' - XPACK_SECURITY_ENROLLMENT_ENABLED: 'false' - XPACK_SECURITY_HTTP_SSL_ENABLED: 'false' - XPACK_FLEET_ISAIRGAPPED: 'true' + XPACK_SECURITY_ENABLED: "true" + XPACK_SECURITY_ENROLLMENT_ENABLED: "false" + XPACK_SECURITY_HTTP_SSL_ENABLED: "false" + XPACK_FLEET_ISAIRGAPPED: "true" I18N_LOCALE: zh-CN - SERVER_PORT: '5601' + SERVER_PORT: "5601" ELASTICSEARCH_HOSTS: http://elasticsearch:9200 ports: - ${KIBANA_PORT:-5601}:5601 healthcheck: - test: [ 'CMD-SHELL', 'curl -s http://localhost:5601 >/dev/null || exit 1' ] + test: ["CMD-SHELL", "curl -s http://localhost:5601 >/dev/null || exit 1"] interval: 30s timeout: 10s retries: 3 diff --git a/docker/middleware.env.example b/docker/middleware.env.example index d01f9abe53..1a4484a9b5 100644 --- a/docker/middleware.env.example +++ b/docker/middleware.env.example @@ -119,4 +119,29 @@ FORCE_VERIFYING_SIGNATURE=true PLUGIN_PYTHON_ENV_INIT_TIMEOUT=120 PLUGIN_MAX_EXECUTION_TIMEOUT=600 # PIP_MIRROR_URL=https://pypi.tuna.tsinghua.edu.cn/simple -PIP_MIRROR_URL= \ No newline at end of file +PIP_MIRROR_URL= + +# https://github.com/langgenius/dify-plugin-daemon/blob/main/.env.example +# Plugin storage type, local aws_s3 tencent_cos azure_blob +PLUGIN_STORAGE_TYPE=local +PLUGIN_STORAGE_LOCAL_ROOT=/app/storage +PLUGIN_WORKING_PATH=/app/storage/cwd +PLUGIN_INSTALLED_PATH=plugin +PLUGIN_PACKAGE_CACHE_PATH=plugin_packages +PLUGIN_MEDIA_CACHE_PATH=assets +# Plugin oss bucket +PLUGIN_STORAGE_OSS_BUCKET= +# Plugin oss s3 credentials +PLUGIN_S3_USE_AWS_MANAGED_IAM=false +PLUGIN_S3_ENDPOINT= +PLUGIN_S3_USE_PATH_STYLE=false +PLUGIN_AWS_ACCESS_KEY= +PLUGIN_AWS_SECRET_KEY= +PLUGIN_AWS_REGION= +# Plugin oss azure blob +PLUGIN_AZURE_BLOB_STORAGE_CONTAINER_NAME= +PLUGIN_AZURE_BLOB_STORAGE_CONNECTION_STRING= +# Plugin oss tencent cos +PLUGIN_TENCENT_COS_SECRET_KEY= +PLUGIN_TENCENT_COS_SECRET_ID= +PLUGIN_TENCENT_COS_REGION= \ No newline at end of file diff --git a/web/.vscode/extensions.json b/web/.vscode/extensions.json index d7680d74a5..a9afbcc640 100644 --- a/web/.vscode/extensions.json +++ b/web/.vscode/extensions.json @@ -1,6 +1,7 @@ { "recommendations": [ "bradlc.vscode-tailwindcss", - "firsttris.vscode-jest-runner" + "firsttris.vscode-jest-runner", + "kisstkondoros.vscode-codemetrics" ] -} +} \ No newline at end of file diff --git a/web/Dockerfile b/web/Dockerfile index 8d50154873..dfc5ba8b46 100644 --- a/web/Dockerfile +++ b/web/Dockerfile @@ -1,12 +1,12 @@ # base image -FROM node:20-alpine3.20 AS base +FROM node:22-alpine3.21 AS base LABEL maintainer="takatost@gmail.com" # if you located in China, you can use aliyun mirror to speed up # RUN sed -i 's/dl-cdn.alpinelinux.org/mirrors.aliyun.com/g' /etc/apk/repositories RUN apk add --no-cache tzdata -RUN npm install -g pnpm@9.12.2 +RUN npm install -g pnpm@10.8.0 ENV PNPM_HOME="/pnpm" ENV PATH="$PNPM_HOME:$PATH" diff --git a/web/README.md b/web/README.md index 900924f348..3236347e80 100644 --- a/web/README.md +++ b/web/README.md @@ -6,7 +6,9 @@ This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next ### Run by source code -To start the web frontend service, you will need [Node.js v18.x (LTS)](https://nodejs.org/en) and [pnpm version 9.12.2](https://pnpm.io). +Before starting the web frontend service, please make sure the following environment is ready. +- [Node.js](https://nodejs.org) >= v18.x +- [pnpm](https://pnpm.io) v10.x First, install the dependencies: diff --git a/web/app/(commonLayout)/apps/Apps.tsx b/web/app/(commonLayout)/apps/Apps.tsx index d98851c4e9..1375f4dfd6 100644 --- a/web/app/(commonLayout)/apps/Apps.tsx +++ b/web/app/(commonLayout)/apps/Apps.tsx @@ -66,6 +66,7 @@ const Apps = () => { const [isCreatedByMe, setIsCreatedByMe] = useState(queryIsCreatedByMe) const [tagFilterValue, setTagFilterValue] = useState(tagIDs) const [searchKeywords, setSearchKeywords] = useState(keywords) + const newAppCardRef = useRef(null) const setKeywords = useCallback((keywords: string) => { setQuery(prev => ({ ...prev, keywords })) }, [setQuery]) @@ -73,10 +74,15 @@ const Apps = () => { setQuery(prev => ({ ...prev, tagIDs })) }, [setQuery]) - const { data, isLoading, setSize, mutate } = useSWRInfinite( + const { data, isLoading, error, setSize, mutate } = useSWRInfinite( (pageIndex: number, previousPageData: AppListResponse) => getKey(pageIndex, previousPageData, activeTab, isCreatedByMe, tagIDs, searchKeywords), fetchAppList, - { revalidateFirstPage: true }, + { + revalidateFirstPage: true, + shouldRetryOnError: false, + dedupingInterval: 500, + errorRetryCount: 3, + }, ) const anchorRef = useRef(null) @@ -105,15 +111,22 @@ const Apps = () => { useEffect(() => { const hasMore = data?.at(-1)?.has_more ?? true let observer: IntersectionObserver | undefined + + if (error) { + if (observer) + observer.disconnect() + return + } + if (anchorRef.current) { observer = new IntersectionObserver((entries) => { - if (entries[0].isIntersecting && !isLoading && hasMore) + if (entries[0].isIntersecting && !isLoading && !error && hasMore) setSize((size: number) => size + 1) }, { rootMargin: '100px' }) observer.observe(anchorRef.current) } return () => observer?.disconnect() - }, [isLoading, setSize, anchorRef, mutate, data]) + }, [isLoading, setSize, anchorRef, mutate, data, error]) const { run: handleSearch } = useDebounceFn(() => { setSearchKeywords(keywords) @@ -166,14 +179,14 @@ const Apps = () => { {(data && data[0].total > 0) ?
{isCurrentWorkspaceEditor - && } + && } {data.map(({ data: apps }) => apps.map(app => ( )))}
:
{isCurrentWorkspaceEditor - && } + && }
} diff --git a/web/app/(commonLayout)/datasets/template/template.en.mdx b/web/app/(commonLayout)/datasets/template/template.en.mdx index 862344659d..ca86c7e6d6 100644 --- a/web/app/(commonLayout)/datasets/template/template.en.mdx +++ b/web/app/(commonLayout)/datasets/template/template.en.mdx @@ -386,11 +386,20 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi ### Query + + Search keyword, optional + + + Tag ID list, optional + - Page number + Page number, optional, default 1 - Number of items returned, default 20, range 1-100 + Number of items returned, optional, default 20, range 1-100 + + + Whether to include all datasets (only effective for owners), optional, defaults to false diff --git a/web/app/(commonLayout)/datasets/template/template.ja.mdx b/web/app/(commonLayout)/datasets/template/template.ja.mdx index e2bdd27d80..45f3adfc4d 100644 --- a/web/app/(commonLayout)/datasets/template/template.ja.mdx +++ b/web/app/(commonLayout)/datasets/template/template.ja.mdx @@ -386,11 +386,20 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi ### クエリ + + 検索キーワード、オプション + + + タグIDリスト、オプション + - ページ番号 + ページ番号、オプション、デフォルト1 - 返されるアイテム数、デフォルトは 20、範囲は 1-100 + 返されるアイテム数、オプション、デフォルト20、範囲1-100 + + + すべてのデータセットを含めるかどうか(所有者のみ有効)、オプション、デフォルトはfalse diff --git a/web/app/(commonLayout)/datasets/template/template.zh.mdx b/web/app/(commonLayout)/datasets/template/template.zh.mdx index 1d4e5ead6d..20511b8cf7 100644 --- a/web/app/(commonLayout)/datasets/template/template.zh.mdx +++ b/web/app/(commonLayout)/datasets/template/template.zh.mdx @@ -387,11 +387,20 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi ### Query - - 页码 + + 搜索关键词,可选 + + + 标签 ID 列表,可选 + + + 页码,可选,默认为 1 - 返回条数,默认 20,范围 1-100 + 返回条数,可选,默认 20,范围 1-100 + + + 是否包含所有数据集(仅对所有者生效),可选,默认为 false diff --git a/web/app/(commonLayout)/plugins/page.tsx b/web/app/(commonLayout)/plugins/page.tsx index cc525992fa..47f2791075 100644 --- a/web/app/(commonLayout)/plugins/page.tsx +++ b/web/app/(commonLayout)/plugins/page.tsx @@ -8,7 +8,7 @@ const PluginList = async () => { return ( } - marketplace={} + marketplace={} /> ) } diff --git a/web/app/components/app/configuration/config-var/config-select/index.tsx b/web/app/components/app/configuration/config-var/config-select/index.tsx index 0e4256c691..d2dc1662c1 100644 --- a/web/app/components/app/configuration/config-var/config-select/index.tsx +++ b/web/app/components/app/configuration/config-var/config-select/index.tsx @@ -1,12 +1,10 @@ 'use client' import type { FC } from 'react' -import React from 'react' +import React, { useState } from 'react' +import { RiAddLine, RiDeleteBinLine, RiDraggable } from '@remixicon/react' import { useTranslation } from 'react-i18next' -import { PlusIcon } from '@heroicons/react/24/outline' import { ReactSortable } from 'react-sortablejs' -import RemoveIcon from '../../base/icons/remove-icon' - -import s from './style.module.css' +import cn from '@/utils/classnames' export type Options = string[] export type IConfigSelectProps = { @@ -19,6 +17,8 @@ const ConfigSelect: FC = ({ onChange, }) => { const { t } = useTranslation() + const [focusID, setFocusID] = useState(null) + const [deletingID, setDeletingID] = useState(null) const optionList = options.map((content, index) => { return ({ @@ -40,12 +40,15 @@ const ConfigSelect: FC = ({ animation={150} > {options.map((o, index) => ( -
-
- - - -
+
+ = ({ return item })) }} - className={'h-9 w-full grow cursor-pointer border-0 bg-transparent pl-1.5 pr-8 text-sm leading-9 text-gray-900 focus:outline-none'} + className={'h-9 w-full grow cursor-pointer overflow-x-auto rounded-lg border-0 bg-transparent pl-1.5 pr-8 text-sm leading-9 text-text-secondary focus:outline-none'} + onFocus={() => setFocusID(index)} + onBlur={() => setFocusID(null)} /> - { onChange(options.filter((_, i) => index !== i)) }} - /> + onMouseEnter={() => setDeletingID(index)} + onMouseLeave={() => setDeletingID(null)} + > + +
))} @@ -75,9 +84,9 @@ const ConfigSelect: FC = ({
{ onChange([...options, '']) }} - className='flex h-9 cursor-pointer items-center gap-2 rounded-lg bg-gray-100 px-3 text-gray-400'> - -
{t('appDebug.variableConfig.addOption')}
+ className='mt-1 flex h-9 cursor-pointer items-center gap-2 rounded-lg bg-components-button-tertiary-bg px-3 text-components-button-tertiary-text hover:bg-components-button-tertiary-bg-hover'> + +
{t('appDebug.variableConfig.addOption')}
) diff --git a/web/app/components/app/configuration/config-var/config-select/style.module.css b/web/app/components/app/configuration/config-var/config-select/style.module.css deleted file mode 100644 index a09d19537d..0000000000 --- a/web/app/components/app/configuration/config-var/config-select/style.module.css +++ /dev/null @@ -1,21 +0,0 @@ -.inputWrap { - display: flex; - align-items: center; - border-radius: 8px; - border: 1px solid #EAECF0; - padding-left: 10px; - cursor: pointer; -} - -.deleteBtn { - display: none; - display: flex; -} - -.inputWrap:hover { - box-shadow: 0px 1px 2px rgba(16, 24, 40, 0.05); -} - -.inputWrap:hover .deleteBtn { - display: flex; -} \ No newline at end of file diff --git a/web/app/components/app/configuration/dataset-config/index.tsx b/web/app/components/app/configuration/dataset-config/index.tsx index 01ba8c606d..6165cfdeec 100644 --- a/web/app/components/app/configuration/dataset-config/index.tsx +++ b/web/app/components/app/configuration/dataset-config/index.tsx @@ -270,7 +270,7 @@ const DatasetConfig: FC = () => { handleMetadataModelChange={handleMetadataModelChange} handleMetadataCompletionParamsChange={handleMetadataCompletionParamsChange} isCommonVariable - availableCommonStringVars={promptVariablesToSelect.filter(item => item.type === MetadataFilteringVariableType.string)} + availableCommonStringVars={promptVariablesToSelect.filter(item => item.type === MetadataFilteringVariableType.string || item.type === MetadataFilteringVariableType.select)} availableCommonNumberVars={promptVariablesToSelect.filter(item => item.type === MetadataFilteringVariableType.number)} /> diff --git a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx index 14f0c3d865..3b9078f1be 100644 --- a/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx +++ b/web/app/components/app/configuration/dataset-config/params-config/config-content.tsx @@ -25,6 +25,7 @@ import { useSelectedDatasetsMode } from '@/app/components/workflow/nodes/knowled import Switch from '@/app/components/base/switch' import Toast from '@/app/components/base/toast' import Divider from '@/app/components/base/divider' +import { noop } from 'lodash-es' type Props = { datasetConfigs: DatasetConfigs @@ -41,8 +42,8 @@ const ConfigContent: FC = ({ onChange, isInWorkflow, singleRetrievalModelConfig: singleRetrievalConfig = {} as ModelConfig, - onSingleRetrievalModelChange = () => { }, - onSingleRetrievalModelParamsChange = () => { }, + onSingleRetrievalModelChange = noop, + onSingleRetrievalModelParamsChange = noop, selectedDatasets = [], }) => { const { t } = useTranslation() diff --git a/web/app/components/app/configuration/index.tsx b/web/app/components/app/configuration/index.tsx index cc6909d151..249624a294 100644 --- a/web/app/components/app/configuration/index.tsx +++ b/web/app/components/app/configuration/index.tsx @@ -197,9 +197,6 @@ const Configuration: FC = () => { const isOpenAI = modelConfig.provider === 'langgenius/openai/openai' const [collectionList, setCollectionList] = useState([]) - useEffect(() => { - - }, []) const [datasetConfigs, doSetDatasetConfigs] = useState({ retrieval_model: RETRIEVE_TYPE.multiWay, reranking_model: { diff --git a/web/app/components/app/create-app-dialog/index.tsx b/web/app/components/app/create-app-dialog/index.tsx index acc3650211..794bbbf9e8 100644 --- a/web/app/components/app/create-app-dialog/index.tsx +++ b/web/app/components/app/create-app-dialog/index.tsx @@ -1,4 +1,6 @@ 'use client' +import { useCallback } from 'react' +import { useKeyPress } from 'ahooks' import AppList from './app-list' import FullScreenModal from '@/app/components/base/fullscreen-modal' @@ -10,6 +12,13 @@ type CreateAppDialogProps = { } const CreateAppTemplateDialog = ({ show, onSuccess, onClose, onCreateFromBlank }: CreateAppDialogProps) => { + const handleEscKeyPress = useCallback(() => { + if (show) + onClose() + }, [show, onClose]) + + useKeyPress('esc', handleEscKeyPress) + return ( { + const category = searchParams.get('category') + if (category && AppModes.includes(category as AppMode)) + setAppMode(category as AppMode) + }, [searchParams]) + const onCreate = useCallback(async () => { if (!appMode) { notify({ type: 'error', message: t('app.newApp.appTypeRequired') }) @@ -148,7 +157,6 @@ function CreateApp({ onClose, onSuccess, onCreateFromTemplate }: CreateAppProps)
void } -function AppTypeCard({ icon, title, beta = false, description, active, onClick }: AppTypeCardProps) { +function AppTypeCard({ icon, title, description, active, onClick }: AppTypeCardProps) { const { t } = useTranslation() return
- {beta &&
{t('common.menus.status')}
} {icon}
{title}
{description}
diff --git a/web/app/components/app/create-from-dsl-modal/index.tsx b/web/app/components/app/create-from-dsl-modal/index.tsx index e9124bd13b..c1df10ed64 100644 --- a/web/app/components/app/create-from-dsl-modal/index.tsx +++ b/web/app/components/app/create-from-dsl-modal/index.tsx @@ -5,7 +5,8 @@ import { useMemo, useRef, useState } from 'react' import { useRouter } from 'next/navigation' import { useContext } from 'use-context-selector' import { useTranslation } from 'react-i18next' -import { RiCloseLine } from '@remixicon/react' +import { RiCloseLine, RiCommandLine, RiCornerDownLeftLine } from '@remixicon/react' +import { useDebounceFn, useKeyPress } from 'ahooks' import Uploader from './uploader' import Button from '@/app/components/base/button' import Input from '@/app/components/base/input' @@ -143,6 +144,18 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS isCreatingRef.current = false } + const { run: handleCreateApp } = useDebounceFn(onCreate, { wait: 300 }) + + useKeyPress(['meta.enter', 'ctrl.enter'], () => { + if (show && !isAppsFull && ((currentTab === CreateFromDSLModalTab.FROM_FILE && currentFile) || (currentTab === CreateFromDSLModalTab.FROM_URL && dslUrlValue))) + handleCreateApp() + }) + + useKeyPress('esc', () => { + if (show && !showErrorModal) + onClose() + }) + const onDSLConfirm: MouseEventHandler = async () => { try { if (!importId) @@ -266,7 +279,18 @@ const CreateFromDSLModal = ({ show, onSuccess, onClose, activeTab = CreateFromDS )}
- +
{}) | null): AudioPlayer { + public getAudioPlayer(url: string, isPublic: boolean, id: string | undefined, msgContent: string | null | undefined, voice: string | undefined, callback: ((event: string) => void) | null): AudioPlayer { if (this.msgId && this.msgId === id && this.audioPlayers) { this.audioPlayers.setCallback(callback) return this.audioPlayers diff --git a/web/app/components/base/audio-btn/audio.ts b/web/app/components/base/audio-btn/audio.ts index d7fae02f82..cd40930f43 100644 --- a/web/app/components/base/audio-btn/audio.ts +++ b/web/app/components/base/audio-btn/audio.ts @@ -21,9 +21,9 @@ export default class AudioPlayer { isLoadData = false url: string isPublic: boolean - callback: ((event: string) => {}) | null + callback: ((event: string) => void) | null - constructor(streamUrl: string, isPublic: boolean, msgId: string | undefined, msgContent: string | null | undefined, voice: string | undefined, callback: ((event: string) => {}) | null) { + constructor(streamUrl: string, isPublic: boolean, msgId: string | undefined, msgContent: string | null | undefined, voice: string | undefined, callback: ((event: string) => void) | null) { this.audioContext = new AudioContext() this.msgId = msgId this.msgContent = msgContent @@ -68,7 +68,7 @@ export default class AudioPlayer { }) } - public setCallback(callback: ((event: string) => {}) | null) { + public setCallback(callback: ((event: string) => void) | null) { this.callback = callback if (callback) { this.audio.addEventListener('ended', () => { @@ -211,10 +211,6 @@ export default class AudioPlayer { this.audioContext.suspend() } - private cancer() { - - } - private receiveAudioData(unit8Array: Uint8Array) { if (!unit8Array) { this.finishStream() diff --git a/web/app/components/base/chat/chat-with-history/hooks.tsx b/web/app/components/base/chat/chat-with-history/hooks.tsx index 88f6c8f616..0a4cbae964 100644 --- a/web/app/components/base/chat/chat-with-history/hooks.tsx +++ b/web/app/components/base/chat/chat-with-history/hooks.tsx @@ -266,7 +266,7 @@ export const useChatWithHistory = (installedAppInfo?: InstalledApp) => { const currentConversationLatestInputs = useMemo(() => { if (!currentConversationId || !appChatListData?.data.length) - return {} + return newConversationInputsRef.current || {} return appChatListData.data.slice().pop().inputs || {} }, [appChatListData, currentConversationId]) const [currentConversationInputs, setCurrentConversationInputs] = useState>(currentConversationLatestInputs || {}) diff --git a/web/app/components/base/chat/chat/hooks.ts b/web/app/components/base/chat/chat/hooks.ts index eb48f9515b..aad17ccc52 100644 --- a/web/app/components/base/chat/chat/hooks.ts +++ b/web/app/components/base/chat/chat/hooks.ts @@ -34,6 +34,7 @@ import { getProcessedFiles, getProcessedFilesFromResponse, } from '@/app/components/base/file-uploader/utils' +import { noop } from 'lodash-es' type GetAbortController = (abortController: AbortController) => void type SendCallback = { @@ -308,7 +309,7 @@ export const useChat = ( else ttsUrl = `/apps/${params.appId}/text-to-audio` } - const player = AudioPlayerManager.getInstance().getAudioPlayer(ttsUrl, ttsIsPublic, uuidV4(), 'none', 'none', (_: any): any => { }) + const player = AudioPlayerManager.getInstance().getAudioPlayer(ttsUrl, ttsIsPublic, uuidV4(), 'none', 'none', noop) ssePost( url, { diff --git a/web/app/components/base/chat/embedded-chatbot/hooks.tsx b/web/app/components/base/chat/embedded-chatbot/hooks.tsx index 197aa7649c..a5665ab346 100644 --- a/web/app/components/base/chat/embedded-chatbot/hooks.tsx +++ b/web/app/components/base/chat/embedded-chatbot/hooks.tsx @@ -242,7 +242,7 @@ export const useEmbeddedChatbot = () => { const currentConversationLatestInputs = useMemo(() => { if (!currentConversationId || !appChatListData?.data.length) - return {} + return newConversationInputsRef.current || {} return appChatListData.data.slice().pop().inputs || {} }, [appChatListData, currentConversationId]) const [currentConversationInputs, setCurrentConversationInputs] = useState>(currentConversationLatestInputs || {}) diff --git a/web/app/components/base/markdown.tsx b/web/app/components/base/markdown.tsx index 0e583fd48f..987a4d0c14 100644 --- a/web/app/components/base/markdown.tsx +++ b/web/app/components/base/markdown.tsx @@ -222,19 +222,21 @@ const Paragraph = (paragraph: any) => { const children_node = node.children if (children_node && children_node[0] && 'tagName' in children_node[0] && children_node[0].tagName === 'img') { return ( - <> +
{ - Array.isArray(paragraph.children) ?

{paragraph.children.slice(1)}

: null + Array.isArray(paragraph.children) && paragraph.children.length > 1 && ( +
{paragraph.children.slice(1)}
+ ) } - +
) } return

{paragraph.children}

} const Img = ({ src }: any) => { - return () + return
} const Link = ({ node, ...props }: any) => { diff --git a/web/app/components/base/pagination/pagination.tsx b/web/app/components/base/pagination/pagination.tsx index 5898c4e924..ec8b0355f4 100644 --- a/web/app/components/base/pagination/pagination.tsx +++ b/web/app/components/base/pagination/pagination.tsx @@ -7,10 +7,11 @@ import type { IPaginationProps, PageButtonProps, } from './type' +import { noop } from 'lodash-es' const defaultState: IPagination = { currentPage: 0, - setCurrentPage: () => {}, + setCurrentPage: noop, truncableText: '...', truncableClassName: '', pages: [], diff --git a/web/app/components/base/prompt-editor/utils.ts b/web/app/components/base/prompt-editor/utils.ts index 6d7d636e55..4b2570e697 100644 --- a/web/app/components/base/prompt-editor/utils.ts +++ b/web/app/components/base/prompt-editor/utils.ts @@ -296,7 +296,7 @@ export function $splitNodeContainingQuery(match: MenuTextMatch): TextNode | null } export function textToEditorState(text: string) { - const paragraph = text ? text.split('\n') : [''] + const paragraph = text && (typeof text === 'string') ? text.split('\n') : [''] return JSON.stringify({ root: { diff --git a/web/app/components/base/tag-input/index.tsx b/web/app/components/base/tag-input/index.tsx index f3df585d48..2be9c5ffc7 100644 --- a/web/app/components/base/tag-input/index.tsx +++ b/web/app/components/base/tag-input/index.tsx @@ -70,7 +70,7 @@ const TagInput: FC = ({ } return ( -
+
{ (items || []).map((item, index) => (
= (props) => { return @@ -169,12 +170,11 @@ const StepTwo = ({ const [rules, setRules] = useState([]) const [defaultConfig, setDefaultConfig] = useState() const hasSetIndexType = !!indexingType - const [indexType, setIndexType] = useState( - (indexingType - || isAPIKeySet) - ? IndexingType.QUALIFIED - : IndexingType.ECONOMICAL, - ) + const [indexType, setIndexType] = useState(() => { + if (hasSetIndexType) + return indexingType + return isAPIKeySet ? IndexingType.QUALIFIED : IndexingType.ECONOMICAL + }) const [previewFile, setPreviewFile] = useState( (datasetId && documentDetail) @@ -421,6 +421,13 @@ const StepTwo = ({ } else { // create const indexMethod = getIndexing_technique() + if (indexMethod === IndexingType.QUALIFIED && (!embeddingModel.model || !embeddingModel.provider)) { + Toast.notify({ + type: 'error', + message: t('appDebug.datasetConfig.embeddingModelRequired'), + }) + return + } if ( !isReRankModelSelected({ rerankModelList, @@ -568,7 +575,6 @@ const StepTwo = ({ // get indexing type by props if (indexingType) setIndexType(indexingType as IndexingType) - else setIndexType(isAPIKeySet ? IndexingType.QUALIFIED : IndexingType.ECONOMICAL) }, [isAPIKeySet, indexingType, datasetId]) @@ -848,10 +854,9 @@ const StepTwo = ({ description={t('datasetCreation.stepTwo.qualifiedTip')} icon={} isActive={!hasSetIndexType && indexType === IndexingType.QUALIFIED} - disabled={!isAPIKeySet || hasSetIndexType} + disabled={hasSetIndexType} onSwitched={() => { - if (isAPIKeySet) - setIndexType(IndexingType.QUALIFIED) + setIndexType(IndexingType.QUALIFIED) }} /> )} @@ -894,11 +899,10 @@ const StepTwo = ({ description={t('datasetCreation.stepTwo.economicalTip')} icon={} isActive={!hasSetIndexType && indexType === IndexingType.ECONOMICAL} - disabled={!isAPIKeySet || hasSetIndexType || docForm !== ChunkingMode.text} + disabled={hasSetIndexType || docForm !== ChunkingMode.text} ref={economyDomRef} onSwitched={() => { - if (isAPIKeySet && docForm === ChunkingMode.text) - setIndexType(IndexingType.ECONOMICAL) + setIndexType(IndexingType.ECONOMICAL) }} /> @@ -1007,7 +1011,7 @@ const StepTwo = ({
)}
- { }} footer={null}> + { const { t } = useTranslation() @@ -265,7 +266,7 @@ export const OperationAction: FC<{ return
e.stopPropagation()}> {isListScene && !embeddingAvailable && ( - { }} disabled={true} size='md' /> + )} {isListScene && embeddingAvailable && ( <> @@ -276,7 +277,7 @@ export const OperationAction: FC<{ needsDelay >
- { }} disabled={true} size='md' /> +
: handleSwitch(v ? 'enable' : 'disable')} size='md' /> diff --git a/web/app/components/explore/create-app-modal/index.tsx b/web/app/components/explore/create-app-modal/index.tsx index 585c52f828..d6d521833a 100644 --- a/web/app/components/explore/create-app-modal/index.tsx +++ b/web/app/components/explore/create-app-modal/index.tsx @@ -1,7 +1,8 @@ 'use client' -import React, { useState } from 'react' +import React, { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' -import { RiCloseLine } from '@remixicon/react' +import { RiCloseLine, RiCommandLine, RiCornerDownLeftLine } from '@remixicon/react' +import { useDebounceFn, useKeyPress } from 'ahooks' import AppIconPicker from '../../base/app-icon-picker' import Modal from '@/app/components/base/modal' import Button from '@/app/components/base/button' @@ -13,6 +14,7 @@ import AppIcon from '@/app/components/base/app-icon' import { useProviderContext } from '@/context/provider-context' import AppsFull from '@/app/components/billing/apps-full-in-dialog' import type { AppIconType } from '@/types/app' +import { noop } from 'lodash-es' export type CreateAppModalProps = { show: boolean @@ -65,7 +67,7 @@ const CreateAppModal = ({ const { plan, enableBilling } = useProviderContext() const isAppsFull = (enableBilling && plan.usage.buildApps >= plan.total.buildApps) - const submit = () => { + const submit = useCallback(() => { if (!name.trim()) { Toast.notify({ type: 'error', message: t('explore.appCustomize.nameRequired') }) return @@ -79,13 +81,25 @@ const CreateAppModal = ({ use_icon_as_answer_icon: useIconAsAnswerIcon, }) onHide() - } + }, [name, appIcon, description, useIconAsAnswerIcon, onConfirm, onHide, t]) + + const { run: handleSubmit } = useDebounceFn(submit, { wait: 300 }) + + useKeyPress(['meta.enter', 'ctrl.enter'], () => { + if (show && !(!isEditModal && isAppsFull) && name.trim()) + handleSubmit() + }) + + useKeyPress('esc', () => { + if (show) + onHide() + }) return ( <> {}} + onClose={noop} className='relative !max-w-[480px] px-8' >
@@ -145,7 +159,18 @@ const CreateAppModal = ({ {!isEditModal && isAppsFull && }
- +
diff --git a/web/app/components/header/account-setting/data-source-page/panel/config-item.tsx b/web/app/components/header/account-setting/data-source-page/panel/config-item.tsx index 3dad51f566..6faf840529 100644 --- a/web/app/components/header/account-setting/data-source-page/panel/config-item.tsx +++ b/web/app/components/header/account-setting/data-source-page/panel/config-item.tsx @@ -10,6 +10,7 @@ import Operate from '../data-source-notion/operate' import { DataSourceType } from './types' import s from './style.module.css' import cn from '@/utils/classnames' +import { noop } from 'lodash-es' export type ConfigItemType = { id: string @@ -41,7 +42,7 @@ const ConfigItem: FC = ({ const { t } = useTranslation() const isNotion = type === DataSourceType.notion const isWebsite = type === DataSourceType.website - const onChangeAuthorizedPage = notionActions?.onChangeAuthorizedPage || function () { } + const onChangeAuthorizedPage = notionActions?.onChangeAuthorizedPage || noop return (
diff --git a/web/app/components/header/account-setting/model-provider-page/hooks.spec.ts b/web/app/components/header/account-setting/model-provider-page/hooks.spec.ts new file mode 100644 index 0000000000..4d6941ddc6 --- /dev/null +++ b/web/app/components/header/account-setting/model-provider-page/hooks.spec.ts @@ -0,0 +1,90 @@ +import { renderHook } from '@testing-library/react' +import { useLanguage } from './hooks' +import { useContext } from 'use-context-selector' +import { after } from 'node:test' + +jest.mock('swr', () => ({ + __esModule: true, + default: jest.fn(), // mock useSWR + useSWRConfig: jest.fn(), +})) + +// mock use-context-selector +jest.mock('use-context-selector', () => ({ + useContext: jest.fn(), +})) + +// mock service/common functions +jest.mock('@/service/common', () => ({ + fetchDefaultModal: jest.fn(), + fetchModelList: jest.fn(), + fetchModelProviderCredentials: jest.fn(), + fetchModelProviders: jest.fn(), + getPayUrl: jest.fn(), +})) + +// mock context hooks +jest.mock('@/context/i18n', () => ({ + __esModule: true, + default: jest.fn(), +})) + +jest.mock('@/context/provider-context', () => ({ + useProviderContext: jest.fn(), +})) + +jest.mock('@/context/modal-context', () => ({ + useModalContextSelector: jest.fn(), +})) + +jest.mock('@/context/event-emitter', () => ({ + useEventEmitterContextContext: jest.fn(), +})) + +// mock plugins +jest.mock('@/app/components/plugins/marketplace/hooks', () => ({ + useMarketplacePlugins: jest.fn(), +})) + +jest.mock('@/app/components/plugins/marketplace/utils', () => ({ + getMarketplacePluginsByCollectionId: jest.fn(), +})) + +jest.mock('./provider-added-card', () => { + // eslint-disable-next-line no-labels, ts/no-unused-expressions + UPDATE_MODEL_PROVIDER_CUSTOM_MODEL_LIST: [] +}) + +after(() => { + jest.resetModules() + jest.clearAllMocks() +}) + +describe('useLanguage', () => { + it('should replace hyphen with underscore in locale', () => { + (useContext as jest.Mock).mockReturnValue({ + locale: 'en-US', + }) + const { result } = renderHook(() => useLanguage()) + expect(result.current).toBe('en_US') + }) + + it('should return locale as is if no hyphen exists', () => { + (useContext as jest.Mock).mockReturnValue({ + locale: 'enUS', + }) + + const { result } = renderHook(() => useLanguage()) + expect(result.current).toBe('enUS') + }) + + it('should handle multiple hyphens', () => { + // Mock the I18n context return value + (useContext as jest.Mock).mockReturnValue({ + locale: 'zh-Hans-CN', + }) + + const { result } = renderHook(() => useLanguage()) + expect(result.current).toBe('zh_Hans-CN') + }) +}) diff --git a/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx b/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx index c2fbe7930e..9d1846cdf0 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-icon/index.tsx @@ -7,7 +7,7 @@ import { useLanguage } from '../hooks' import { Group } from '@/app/components/base/icons/src/vender/other' import { OpenaiBlue, OpenaiViolet } from '@/app/components/base/icons/src/public/llm' import cn from '@/utils/classnames' -import { renderI18nObject } from '@/hooks/use-i18n' +import { renderI18nObject } from '@/i18n' type ModelIconProps = { provider?: Model | ModelProvider diff --git a/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx b/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx index 4adab6d2e0..bd1bb6ced9 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-modal/index.tsx @@ -270,8 +270,7 @@ const ModelModal: FC = ({ } const renderTitlePrefix = () => { - const prefix = configurateMethod === ConfigurationMethodEnum.customizableModel ? t('common.operation.add') : t('common.operation.setup') - + const prefix = isEditMode ? t('common.operation.setup') : t('common.operation.add') return `${prefix} ${provider.label[language] || provider.label.en_US}` } diff --git a/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/trigger.tsx b/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/trigger.tsx index 8a9c6bbf88..7c96c9a0af 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/trigger.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/trigger.tsx @@ -47,7 +47,7 @@ const Trigger: FC = ({ 'relative flex h-8 cursor-pointer items-center rounded-lg px-2', !isInWorkflow && 'border ring-inset hover:ring-[0.5px]', !isInWorkflow && (disabled ? 'border-text-warning bg-state-warning-hover ring-text-warning' : 'border-util-colors-indigo-indigo-600 bg-state-accent-hover ring-util-colors-indigo-indigo-600'), - isInWorkflow && 'border border-workflow-block-parma-bg bg-workflow-block-parma-bg pr-[30px] hover:border-gray-200', + isInWorkflow && 'border border-workflow-block-parma-bg bg-workflow-block-parma-bg pr-[30px] hover:border-components-input-border-active', )} > { diff --git a/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx b/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx index 1eb579a7a0..253269d920 100644 --- a/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/provider-icon/index.tsx @@ -3,7 +3,7 @@ import type { ModelProvider } from '../declarations' import { useLanguage } from '../hooks' import { Openai } from '@/app/components/base/icons/src/vender/other' import { AnthropicDark, AnthropicLight } from '@/app/components/base/icons/src/public/llm' -import { renderI18nObject } from '@/hooks/use-i18n' +import { renderI18nObject } from '@/i18n' import { Theme } from '@/types/app' import cn from '@/utils/classnames' import useTheme from '@/hooks/use-theme' diff --git a/web/app/components/plugins/card/index.tsx b/web/app/components/plugins/card/index.tsx index f4878a433c..1cc18ac24f 100644 --- a/web/app/components/plugins/card/index.tsx +++ b/web/app/components/plugins/card/index.tsx @@ -11,7 +11,7 @@ import cn from '@/utils/classnames' import { useGetLanguage } from '@/context/i18n' import { getLanguage } from '@/i18n/language' import { useSingleCategories } from '../hooks' -import { renderI18nObject } from '@/hooks/use-i18n' +import { renderI18nObject } from '@/i18n' import { useMixedTranslation } from '@/app/components/plugins/marketplace/hooks' import Partner from '../base/badges/partner' import Verified from '../base/badges/verified' diff --git a/web/app/components/plugins/hooks.ts b/web/app/components/plugins/hooks.ts index f4b81d98c1..0349c46f9e 100644 --- a/web/app/components/plugins/hooks.ts +++ b/web/app/components/plugins/hooks.ts @@ -92,3 +92,17 @@ export const useSingleCategories = (translateFromOut?: TFunction) => { categoriesMap, } } + +export const PLUGIN_PAGE_TABS_MAP = { + plugins: 'plugins', + marketplace: 'discover', +} + +export const usePluginPageTabs = () => { + const { t } = useTranslation() + const tabs = [ + { value: PLUGIN_PAGE_TABS_MAP.plugins, text: t('common.menus.plugins') }, + { value: PLUGIN_PAGE_TABS_MAP.marketplace, text: t('common.menus.exploreMarketplace') }, + ] + return tabs +} diff --git a/web/app/components/plugins/marketplace/context.tsx b/web/app/components/plugins/marketplace/context.tsx index 53f57c0252..91621afaf8 100644 --- a/web/app/components/plugins/marketplace/context.tsx +++ b/web/app/components/plugins/marketplace/context.tsx @@ -35,9 +35,10 @@ import { import { getMarketplaceListCondition, getMarketplaceListFilterType, + updateSearchParams, } from './utils' import { useInstalledPluginList } from '@/service/use-plugins' -import { noop } from 'lodash-es' +import { debounce, noop } from 'lodash-es' export type MarketplaceContextValue = { intersected: boolean @@ -96,6 +97,7 @@ type MarketplaceContextProviderProps = { searchParams?: SearchParams shouldExclude?: boolean scrollContainerId?: string + showSearchParams?: boolean } export function useMarketplaceContext(selector: (value: MarketplaceContextValue) => any) { @@ -107,6 +109,7 @@ export const MarketplaceContextProvider = ({ searchParams, shouldExclude, scrollContainerId, + showSearchParams, }: MarketplaceContextProviderProps) => { const { data, isSuccess } = useInstalledPluginList(!shouldExclude) const exclude = useMemo(() => { @@ -159,7 +162,10 @@ export const MarketplaceContextProvider = ({ type: getMarketplaceListFilterType(activePluginTypeRef.current), page: pageRef.current, }) - history.pushState({}, '', `/${searchParams?.language ? `?language=${searchParams?.language}` : ''}`) + const url = new URL(window.location.href) + if (searchParams?.language) + url.searchParams.set('language', searchParams?.language) + history.replaceState({}, '', url) } else { if (shouldExclude && isSuccess) { @@ -182,7 +188,31 @@ export const MarketplaceContextProvider = ({ resetPlugins() }, [exclude, queryMarketplaceCollectionsAndPlugins, resetPlugins]) + const debouncedUpdateSearchParams = useMemo(() => debounce(() => { + updateSearchParams({ + query: searchPluginTextRef.current, + category: activePluginTypeRef.current, + tags: filterPluginTagsRef.current, + }) + }, 500), []) + + const handleUpdateSearchParams = useCallback((debounced?: boolean) => { + if (!showSearchParams) + return + if (debounced) { + debouncedUpdateSearchParams() + } + else { + updateSearchParams({ + query: searchPluginTextRef.current, + category: activePluginTypeRef.current, + tags: filterPluginTagsRef.current, + }) + } + }, [debouncedUpdateSearchParams, showSearchParams]) + const handleQueryPlugins = useCallback((debounced?: boolean) => { + handleUpdateSearchParams(debounced) if (debounced) { queryPluginsWithDebounced({ query: searchPluginTextRef.current, @@ -207,17 +237,18 @@ export const MarketplaceContextProvider = ({ page: pageRef.current, }) } - }, [exclude, queryPluginsWithDebounced, queryPlugins]) + }, [exclude, queryPluginsWithDebounced, queryPlugins, handleUpdateSearchParams]) const handleQuery = useCallback((debounced?: boolean) => { if (!searchPluginTextRef.current && !filterPluginTagsRef.current.length) { + handleUpdateSearchParams(debounced) cancelQueryPluginsWithDebounced() handleQueryMarketplaceCollectionsAndPlugins() return } handleQueryPlugins(debounced) - }, [handleQueryMarketplaceCollectionsAndPlugins, handleQueryPlugins, cancelQueryPluginsWithDebounced]) + }, [handleQueryMarketplaceCollectionsAndPlugins, handleQueryPlugins, cancelQueryPluginsWithDebounced, handleUpdateSearchParams]) const handleSearchPluginTextChange = useCallback((text: string) => { setSearchPluginText(text) @@ -242,11 +273,9 @@ export const MarketplaceContextProvider = ({ activePluginTypeRef.current = type setPage(1) pageRef.current = 1 - }, []) - useEffect(() => { handleQuery() - }, [activePluginType, handleQuery]) + }, [handleQuery]) const handleSortChange = useCallback((sort: PluginsSort) => { setSort(sort) diff --git a/web/app/components/plugins/marketplace/index.tsx b/web/app/components/plugins/marketplace/index.tsx index 5e6fbeec97..7a29556bda 100644 --- a/web/app/components/plugins/marketplace/index.tsx +++ b/web/app/components/plugins/marketplace/index.tsx @@ -17,6 +17,7 @@ type MarketplaceProps = { pluginTypeSwitchClassName?: string intersectionContainerId?: string scrollContainerId?: string + showSearchParams?: boolean } const Marketplace = async ({ locale, @@ -27,6 +28,7 @@ const Marketplace = async ({ pluginTypeSwitchClassName, intersectionContainerId, scrollContainerId, + showSearchParams = true, }: MarketplaceProps) => { let marketplaceCollections: any = [] let marketplaceCollectionPluginsMap = {} @@ -42,6 +44,7 @@ const Marketplace = async ({ searchParams={searchParams} shouldExclude={shouldExclude} scrollContainerId={scrollContainerId} + showSearchParams={showSearchParams} > @@ -53,6 +56,7 @@ const Marketplace = async ({ locale={locale} className={pluginTypeSwitchClassName} searchBoxAutoAnimate={searchBoxAutoAnimate} + showSearchParams={showSearchParams} /> { const { t } = useMixedTranslation(locale) const activePluginType = useMarketplaceContext(s => s.activePluginType) @@ -70,6 +73,23 @@ const PluginTypeSwitch = ({ }, ] + const handlePopState = useCallback(() => { + if (!showSearchParams) + return + const url = new URL(window.location.href) + const category = url.searchParams.get('category') || PLUGIN_TYPE_SEARCH_MAP.all + handleActivePluginTypeChange(category) + }, [showSearchParams, handleActivePluginTypeChange]) + + useEffect(() => { + window.addEventListener('popstate', () => { + handlePopState() + }) + return () => { + window.removeEventListener('popstate', handlePopState) + } + }, [handlePopState]) + return (
{ return 'plugin' } + +export const updateSearchParams = (pluginsSearchParams: PluginsSearchParams) => { + const { query, category, tags } = pluginsSearchParams + const url = new URL(window.location.href) + const categoryChanged = url.searchParams.get('category') !== category + if (query) + url.searchParams.set('q', query) + else + url.searchParams.delete('q') + if (category) + url.searchParams.set('category', category) + else + url.searchParams.delete('category') + if (tags && tags.length) + url.searchParams.set('tags', tags.join(',')) + else + url.searchParams.delete('tags') + history[`${categoryChanged ? 'pushState' : 'replaceState'}`]({}, '', url) +} diff --git a/web/app/components/plugins/plugin-page/context.tsx b/web/app/components/plugins/plugin-page/context.tsx index cf26cd4e08..ae1ad7d053 100644 --- a/web/app/components/plugins/plugin-page/context.tsx +++ b/web/app/components/plugins/plugin-page/context.tsx @@ -12,9 +12,9 @@ import { } from 'use-context-selector' import { useSelector as useAppContextSelector } from '@/context/app-context' import type { FilterState } from './filter-management' -import { useTranslation } from 'react-i18next' import { useTabSearchParams } from '@/hooks/use-tab-searchparams' import { noop } from 'lodash-es' +import { PLUGIN_PAGE_TABS_MAP, usePluginPageTabs } from '../hooks' export type PluginPageContextValue = { containerRef: React.RefObject @@ -53,7 +53,6 @@ export function usePluginPageContext(selector: (value: PluginPageContextValue) = export const PluginPageContextProvider = ({ children, }: PluginPageContextProviderProps) => { - const { t } = useTranslation() const containerRef = useRef(null) const [filters, setFilters] = useState({ categories: [], @@ -63,16 +62,10 @@ export const PluginPageContextProvider = ({ const [currentPluginID, setCurrentPluginID] = useState() const { enable_marketplace } = useAppContextSelector(s => s.systemFeatures) + const tabs = usePluginPageTabs() const options = useMemo(() => { - return [ - { value: 'plugins', text: t('common.menus.plugins') }, - ...( - enable_marketplace - ? [{ value: 'discover', text: t('common.menus.exploreMarketplace') }] - : [] - ), - ] - }, [t, enable_marketplace]) + return enable_marketplace ? tabs : tabs.filter(tab => tab.value !== PLUGIN_PAGE_TABS_MAP.marketplace) + }, [tabs, enable_marketplace]) const [activeTab, setActiveTab] = useTabSearchParams({ defaultTab: options[0].value, }) diff --git a/web/app/components/plugins/plugin-page/index.tsx b/web/app/components/plugins/plugin-page/index.tsx index 801eaf6607..072b8ee22f 100644 --- a/web/app/components/plugins/plugin-page/index.tsx +++ b/web/app/components/plugins/plugin-page/index.tsx @@ -40,6 +40,8 @@ import { SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS } from '@/config' import { LanguagesSupported } from '@/i18n/language' import I18n from '@/context/i18n' import { noop } from 'lodash-es' +import { PLUGIN_TYPE_SEARCH_MAP } from '../marketplace/plugin-type-switch' +import { PLUGIN_PAGE_TABS_MAP } from '../hooks' const PACKAGE_IDS_KEY = 'package-ids' const BUNDLE_INFO_KEY = 'bundle-info' @@ -136,40 +138,45 @@ const PluginPage = ({ const setActiveTab = usePluginPageContext(v => v.setActiveTab) const { enable_marketplace } = useAppContextSelector(s => s.systemFeatures) + const isPluginsTab = useMemo(() => activeTab === PLUGIN_PAGE_TABS_MAP.plugins, [activeTab]) + const isExploringMarketplace = useMemo(() => { + const values = Object.values(PLUGIN_TYPE_SEARCH_MAP) + return activeTab === PLUGIN_PAGE_TABS_MAP.marketplace || values.includes(activeTab) + }, [activeTab]) + const uploaderProps = useUploader({ onFileChange: setCurrentFile, containerRef, - enabled: activeTab === 'plugins', + enabled: isPluginsTab, }) const { dragging, fileUploader, fileChangeHandle, removeFile } = uploaderProps - return (
{ - activeTab === 'discover' && ( + isExploringMarketplace && ( <>
- {activeTab === 'plugins' && ( + {isPluginsTab && ( <> {plugins} {dragging && ( @@ -246,7 +253,7 @@ const PluginPage = ({ )} { - activeTab === 'discover' && enable_marketplace && marketplace + isExploringMarketplace && enable_marketplace && marketplace } {showPluginSettingModal && ( diff --git a/web/app/components/plugins/plugin-page/plugins-panel.tsx b/web/app/components/plugins/plugin-page/plugins-panel.tsx index 063cec8721..125e6f0a70 100644 --- a/web/app/components/plugins/plugin-page/plugins-panel.tsx +++ b/web/app/components/plugins/plugin-page/plugins-panel.tsx @@ -3,17 +3,23 @@ import { useMemo } from 'react' import type { FilterState } from './filter-management' import FilterManagement from './filter-management' import List from './list' -import { useInstalledPluginList, useInvalidateInstalledPluginList } from '@/service/use-plugins' +import { useInstalledLatestVersion, useInstalledPluginList, useInvalidateInstalledPluginList } from '@/service/use-plugins' import PluginDetailPanel from '@/app/components/plugins/plugin-detail-panel' import { usePluginPageContext } from './context' import { useDebounceFn } from 'ahooks' import Empty from './empty' import Loading from '../../base/loading' +import { PluginSource } from '../types' const PluginsPanel = () => { const filters = usePluginPageContext(v => v.filters) as FilterState const setFilters = usePluginPageContext(v => v.setFilters) const { data: pluginList, isLoading: isPluginListLoading } = useInstalledPluginList() + const { data: installedLatestVersion } = useInstalledLatestVersion( + pluginList?.plugins + .filter(plugin => plugin.source === PluginSource.marketplace) + .map(plugin => plugin.plugin_id) ?? [], + ) const invalidateInstalledPluginList = useInvalidateInstalledPluginList() const currentPluginID = usePluginPageContext(v => v.currentPluginID) const setCurrentPluginID = usePluginPageContext(v => v.setCurrentPluginID) @@ -22,9 +28,17 @@ const PluginsPanel = () => { setFilters(filters) }, { wait: 500 }) + const pluginListWithLatestVersion = useMemo(() => { + return pluginList?.plugins.map(plugin => ({ + ...plugin, + latest_version: installedLatestVersion?.versions[plugin.plugin_id]?.version ?? '', + latest_unique_identifier: installedLatestVersion?.versions[plugin.plugin_id]?.unique_identifier ?? '', + })) || [] + }, [pluginList, installedLatestVersion]) + const filteredList = useMemo(() => { const { categories, searchQuery, tags } = filters - const filteredList = pluginList?.plugins.filter((plugin) => { + const filteredList = pluginListWithLatestVersion.filter((plugin) => { return ( (categories.length === 0 || categories.includes(plugin.declaration.category)) && (tags.length === 0 || tags.some(tag => plugin.declaration.tags.includes(tag))) @@ -32,12 +46,12 @@ const PluginsPanel = () => { ) }) return filteredList - }, [pluginList, filters]) + }, [pluginListWithLatestVersion, filters]) const currentPluginDetail = useMemo(() => { - const detail = pluginList?.plugins.find(plugin => plugin.plugin_id === currentPluginID) + const detail = pluginListWithLatestVersion.find(plugin => plugin.plugin_id === currentPluginID) return detail - }, [currentPluginID, pluginList?.plugins]) + }, [currentPluginID, pluginListWithLatestVersion]) const handleHide = () => setCurrentPluginID(undefined) diff --git a/web/app/components/plugins/types.ts b/web/app/components/plugins/types.ts index 1ed379511b..64f15a08a9 100644 --- a/web/app/components/plugins/types.ts +++ b/web/app/components/plugins/types.ts @@ -318,6 +318,15 @@ export type InstalledPluginListResponse = { plugins: PluginDetail[] } +export type InstalledLatestVersionResponse = { + versions: { + [plugin_id: string]: { + unique_identifier: string + version: string + } | null + } +} + export type UninstallPluginResponse = { success: boolean } diff --git a/web/app/components/share/text-generation/run-once/index.tsx b/web/app/components/share/text-generation/run-once/index.tsx index f31c5d5e85..e413bd53ac 100644 --- a/web/app/components/share/text-generation/run-once/index.tsx +++ b/web/app/components/share/text-generation/run-once/index.tsx @@ -1,4 +1,4 @@ -import type { FC, FormEvent } from 'react' +import type { ChangeEvent, FC, FormEvent } from 'react' import { useEffect } from 'react' import React, { useCallback } from 'react' import { useTranslation } from 'react-i18next' @@ -66,75 +66,73 @@ const RunOnce: FC = ({ newInputs[item.key] = '' }) onInputsChange(newInputs) - }, [promptConfig.prompt_variables]) - - if (inputs === null || inputs === undefined || Object.keys(inputs).length === 0) - return null + }, [promptConfig.prompt_variables, onInputsChange]) return (
{/* input form */}
- {promptConfig.prompt_variables.map(item => ( -
- -
- {item.type === 'select' && ( - { handleInputsChange({ ...inputsRef.current, [item.key]: e.target.value }) }} - maxLength={item.max_length || DEFAULT_VALUE_MAX_LEN} - /> - )} - {item.type === 'paragraph' && ( -