diff --git a/api/controllers/console/__init__.py b/api/controllers/console/__init__.py index e7b9d4ef72..a974c63e35 100644 --- a/api/controllers/console/__init__.py +++ b/api/controllers/console/__init__.py @@ -81,6 +81,7 @@ from .datasets import ( datasets_segments, external, hit_testing, + metadata, website, ) diff --git a/api/controllers/console/datasets/datasets_document.py b/api/controllers/console/datasets/datasets_document.py index 38a80b4129..0b40312368 100644 --- a/api/controllers/console/datasets/datasets_document.py +++ b/api/controllers/console/datasets/datasets_document.py @@ -621,7 +621,7 @@ class DocumentDetailApi(DocumentResource): raise InvalidMetadataError(f"Invalid metadata value: {metadata}") if metadata == "only": - response = {"id": document.id, "doc_type": document.doc_type, "doc_metadata": document.doc_metadata} + response = {"id": document.id, "doc_type": document.doc_type, "doc_metadata": document.doc_metadata_details} elif metadata == "without": dataset_process_rules = DatasetService.get_process_rules(dataset_id) document_process_rules = document.dataset_process_rule.to_dict() @@ -682,7 +682,7 @@ class DocumentDetailApi(DocumentResource): "disabled_by": document.disabled_by, "archived": document.archived, "doc_type": document.doc_type, - "doc_metadata": document.doc_metadata, + "doc_metadata": document.doc_metadata_details, "segment_count": document.segment_count, "average_segment_length": document.average_segment_length, "hit_count": document.hit_count, diff --git a/api/controllers/console/datasets/metadata.py b/api/controllers/console/datasets/metadata.py new file mode 100644 index 0000000000..14183a1e67 --- /dev/null +++ b/api/controllers/console/datasets/metadata.py @@ -0,0 +1,155 @@ +from flask_login import current_user # type: ignore # type: ignore +from flask_restful import Resource, marshal_with, reqparse # type: ignore +from werkzeug.exceptions import NotFound + +from controllers.console import api +from controllers.console.wraps import account_initialization_required, enterprise_license_required, setup_required +from fields.dataset_fields import dataset_metadata_fields +from libs.login import login_required +from services.dataset_service import DatasetService +from services.entities.knowledge_entities.knowledge_entities import ( + MetadataArgs, + MetadataOperationData, +) +from services.metadata_service import MetadataService + + +def _validate_name(name): + if not name or len(name) < 1 or len(name) > 40: + raise ValueError("Name must be between 1 to 40 characters.") + return name + + +def _validate_description_length(description): + if len(description) > 400: + raise ValueError("Description cannot exceed 400 characters.") + return description + + +class DatasetMetadataCreateApi(Resource): + @setup_required + @login_required + @account_initialization_required + @enterprise_license_required + @marshal_with(dataset_metadata_fields) + def post(self, dataset_id): + parser = reqparse.RequestParser() + parser.add_argument("type", type=str, required=True, nullable=True, location="json") + parser.add_argument("name", type=str, required=True, nullable=True, location="json") + args = parser.parse_args() + metadata_args = MetadataArgs(**args) + + dataset_id_str = str(dataset_id) + dataset = DatasetService.get_dataset(dataset_id_str) + if dataset is None: + raise NotFound("Dataset not found.") + DatasetService.check_dataset_permission(dataset, current_user) + + metadata = MetadataService.create_metadata(dataset_id_str, metadata_args) + return metadata, 201 + + @setup_required + @login_required + @account_initialization_required + @enterprise_license_required + def get(self, dataset_id): + dataset_id_str = str(dataset_id) + dataset = DatasetService.get_dataset(dataset_id_str) + if dataset is None: + raise NotFound("Dataset not found.") + return MetadataService.get_dataset_metadatas(dataset), 200 + + +class DatasetMetadataApi(Resource): + @setup_required + @login_required + @account_initialization_required + @enterprise_license_required + @marshal_with(dataset_metadata_fields) + def patch(self, dataset_id, metadata_id): + parser = reqparse.RequestParser() + parser.add_argument("name", type=str, required=True, nullable=True, location="json") + args = parser.parse_args() + + dataset_id_str = str(dataset_id) + metadata_id_str = str(metadata_id) + dataset = DatasetService.get_dataset(dataset_id_str) + if dataset is None: + raise NotFound("Dataset not found.") + DatasetService.check_dataset_permission(dataset, current_user) + + metadata = MetadataService.update_metadata_name(dataset_id_str, metadata_id_str, args.get("name")) + return metadata, 200 + + @setup_required + @login_required + @account_initialization_required + @enterprise_license_required + def delete(self, dataset_id, metadata_id): + dataset_id_str = str(dataset_id) + metadata_id_str = str(metadata_id) + dataset = DatasetService.get_dataset(dataset_id_str) + if dataset is None: + raise NotFound("Dataset not found.") + DatasetService.check_dataset_permission(dataset, current_user) + + MetadataService.delete_metadata(dataset_id_str, metadata_id_str) + return 200 + + +class DatasetMetadataBuiltInFieldApi(Resource): + @setup_required + @login_required + @account_initialization_required + @enterprise_license_required + def get(self): + built_in_fields = MetadataService.get_built_in_fields() + return {"fields": built_in_fields}, 200 + + +class DatasetMetadataBuiltInFieldActionApi(Resource): + @setup_required + @login_required + @account_initialization_required + @enterprise_license_required + def post(self, dataset_id, action): + dataset_id_str = str(dataset_id) + dataset = DatasetService.get_dataset(dataset_id_str) + if dataset is None: + raise NotFound("Dataset not found.") + DatasetService.check_dataset_permission(dataset, current_user) + + if action == "enable": + MetadataService.enable_built_in_field(dataset) + elif action == "disable": + MetadataService.disable_built_in_field(dataset) + return 200 + + +class DocumentMetadataEditApi(Resource): + @setup_required + @login_required + @account_initialization_required + @enterprise_license_required + def post(self, dataset_id): + dataset_id_str = str(dataset_id) + dataset = DatasetService.get_dataset(dataset_id_str) + if dataset is None: + raise NotFound("Dataset not found.") + DatasetService.check_dataset_permission(dataset, current_user) + + parser = reqparse.RequestParser() + parser.add_argument("operation_data", type=list, required=True, nullable=True, location="json") + args = parser.parse_args() + metadata_args = MetadataOperationData(**args) + + MetadataService.update_documents_metadata(dataset, metadata_args) + + return 200 + + +api.add_resource(DatasetMetadataCreateApi, "/datasets//metadata") +api.add_resource(DatasetMetadataApi, "/datasets//metadata/") +api.add_resource(DatasetMetadataBuiltInFieldApi, "/datasets/metadata/built-in") +api.add_resource(DatasetMetadataBuiltInFieldActionApi, "/datasets//metadata/built-in/") +api.add_resource(DocumentMetadataEditApi, "/datasets//documents/metadata") diff --git a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py index a1a65e2287..20189053f4 100644 --- a/api/core/app/app_config/easy_ui_based_app/dataset/manager.py +++ b/api/core/app/app_config/easy_ui_based_app/dataset/manager.py @@ -1,7 +1,12 @@ import uuid from typing import Optional -from core.app.app_config.entities import DatasetEntity, DatasetRetrieveConfigEntity +from core.app.app_config.entities import ( + DatasetEntity, + DatasetRetrieveConfigEntity, + MetadataFilteringCondition, + ModelConfig, +) from core.entities.agent_entities import PlanningStrategy from models.model import AppMode from services.dataset_service import DatasetService @@ -78,6 +83,15 @@ class DatasetConfigManager: retrieve_strategy=DatasetRetrieveConfigEntity.RetrieveStrategy.value_of( dataset_configs["retrieval_model"] ), + metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"), + metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config")) + if dataset_configs.get("metadata_model_config") + else None, + metadata_filtering_conditions=MetadataFilteringCondition( + **dataset_configs.get("metadata_filtering_conditions", {}) + ) + if dataset_configs.get("metadata_filtering_conditions") + else None, ), ) else: @@ -96,6 +110,15 @@ class DatasetConfigManager: weights=dataset_configs.get("weights"), reranking_enabled=dataset_configs.get("reranking_enabled", True), rerank_mode=dataset_configs.get("reranking_mode", "reranking_model"), + metadata_filtering_mode=dataset_configs.get("metadata_filtering_mode", "disabled"), + metadata_model_config=ModelConfig(**dataset_configs.get("metadata_model_config")) + if dataset_configs.get("metadata_model_config") + else None, + metadata_filtering_conditions=MetadataFilteringCondition( + **dataset_configs.get("metadata_filtering_conditions", {}) + ) + if dataset_configs.get("metadata_filtering_conditions") + else None, ), ) diff --git a/api/core/app/app_config/entities.py b/api/core/app/app_config/entities.py index 16b69a4468..8ae52131f2 100644 --- a/api/core/app/app_config/entities.py +++ b/api/core/app/app_config/entities.py @@ -1,10 +1,11 @@ from collections.abc import Sequence from enum import Enum, StrEnum -from typing import Any, Optional +from typing import Any, Literal, Optional from pydantic import BaseModel, Field, field_validator from core.file import FileTransferMethod, FileType, FileUploadConfig +from core.model_runtime.entities.llm_entities import LLMMode from core.model_runtime.entities.message_entities import PromptMessageRole from models.model import AppMode @@ -135,6 +136,55 @@ class ExternalDataVariableEntity(BaseModel): config: dict[str, Any] = Field(default_factory=dict) +SupportedComparisonOperator = Literal[ + # for string or array + "contains", + "not contains", + "start with", + "end with", + "is", + "is not", + "empty", + "not empty", + # for number + "=", + "≠", + ">", + "<", + "≥", + "≤", + # for time + "before", + "after", +] + + +class ModelConfig(BaseModel): + provider: str + name: str + mode: LLMMode + completion_params: dict[str, Any] = {} + + +class Condition(BaseModel): + """ + Conditon detail + """ + + name: str + comparison_operator: SupportedComparisonOperator + value: str | Sequence[str] | None | int | float = None + + +class MetadataFilteringCondition(BaseModel): + """ + Metadata Filtering Condition. + """ + + logical_operator: Optional[Literal["and", "or"]] = "and" + conditions: Optional[list[Condition]] = Field(default=None, deprecated=True) + + class DatasetRetrieveConfigEntity(BaseModel): """ Dataset Retrieve Config Entity. @@ -171,6 +221,9 @@ class DatasetRetrieveConfigEntity(BaseModel): reranking_model: Optional[dict] = None weights: Optional[dict] = None reranking_enabled: Optional[bool] = True + metadata_filtering_mode: Optional[Literal["disabled", "automatic", "manual"]] = "disabled" + metadata_model_config: Optional[ModelConfig] = None + metadata_filtering_conditions: Optional[MetadataFilteringCondition] = None class DatasetEntity(BaseModel): diff --git a/api/core/app/apps/chat/app_runner.py b/api/core/app/apps/chat/app_runner.py index 46c8031633..8641f188f7 100644 --- a/api/core/app/apps/chat/app_runner.py +++ b/api/core/app/apps/chat/app_runner.py @@ -180,6 +180,7 @@ class ChatAppRunner(AppRunner): hit_callback=hit_callback, memory=memory, message_id=message.id, + inputs=inputs, ) # reorganize all inputs and template to prompt messages diff --git a/api/core/app/apps/completion/app_runner.py b/api/core/app/apps/completion/app_runner.py index 0ed06c9c98..4f16247318 100644 --- a/api/core/app/apps/completion/app_runner.py +++ b/api/core/app/apps/completion/app_runner.py @@ -139,6 +139,7 @@ class CompletionAppRunner(AppRunner): show_retrieve_source=app_config.additional_features.show_retrieve_source, hit_callback=hit_callback, message_id=message.id, + inputs=inputs, ) # reorganize all inputs and template to prompt messages diff --git a/api/core/rag/datasource/keyword/jieba/jieba.py b/api/core/rag/datasource/keyword/jieba/jieba.py index 95a2316f1d..d6d0bd88b2 100644 --- a/api/core/rag/datasource/keyword/jieba/jieba.py +++ b/api/core/rag/datasource/keyword/jieba/jieba.py @@ -88,16 +88,17 @@ class Jieba(BaseKeyword): keyword_table = self._get_dataset_keyword_table() k = kwargs.get("top_k", 4) - + document_ids_filter = kwargs.get("document_ids_filter") sorted_chunk_indices = self._retrieve_ids_by_query(keyword_table or {}, query, k) documents = [] for chunk_index in sorted_chunk_indices: - segment = ( - db.session.query(DocumentSegment) - .filter(DocumentSegment.dataset_id == self.dataset.id, DocumentSegment.index_node_id == chunk_index) - .first() + segment_query = db.session.query(DocumentSegment).filter( + DocumentSegment.dataset_id == self.dataset.id, DocumentSegment.index_node_id == chunk_index ) + if document_ids_filter: + segment_query = segment_query.filter(DocumentSegment.document_id.in_(document_ids_filter)) + segment = segment_query.first() if segment: documents.append( diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index 3904bf6231..c0c0d21a2b 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -41,6 +41,7 @@ class RetrievalService: reranking_model: Optional[dict] = None, reranking_mode: str = "reranking_model", weights: Optional[dict] = None, + document_ids_filter: Optional[list[str]] = None, ): if not query: return [] @@ -64,6 +65,7 @@ class RetrievalService: top_k=top_k, all_documents=all_documents, exceptions=exceptions, + document_ids_filter=document_ids_filter, ) ) if RetrievalMethod.is_support_semantic_search(retrieval_method): @@ -79,6 +81,7 @@ class RetrievalService: all_documents=all_documents, retrieval_method=retrieval_method, exceptions=exceptions, + document_ids_filter=document_ids_filter, ) ) if RetrievalMethod.is_support_fulltext_search(retrieval_method): @@ -130,7 +133,14 @@ class RetrievalService: @classmethod def keyword_search( - cls, flask_app: Flask, dataset_id: str, query: str, top_k: int, all_documents: list, exceptions: list + cls, + flask_app: Flask, + dataset_id: str, + query: str, + top_k: int, + all_documents: list, + exceptions: list, + document_ids_filter: Optional[list[str]] = None, ): with flask_app.app_context(): try: @@ -139,7 +149,10 @@ class RetrievalService: raise ValueError("dataset not found") keyword = Keyword(dataset=dataset) - documents = keyword.search(cls.escape_query_for_search(query), top_k=top_k) + + documents = keyword.search( + cls.escape_query_for_search(query), top_k=top_k, document_ids_filter=document_ids_filter + ) all_documents.extend(documents) except Exception as e: exceptions.append(str(e)) @@ -156,6 +169,7 @@ class RetrievalService: all_documents: list, retrieval_method: str, exceptions: list, + document_ids_filter: Optional[list[str]] = None, ): with flask_app.app_context(): try: @@ -170,6 +184,7 @@ class RetrievalService: top_k=top_k, score_threshold=score_threshold, filter={"group_id": [dataset.id]}, + document_ids_filter=document_ids_filter, ) if documents: diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py index 603d3fdbcd..b9e488362e 100644 --- a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py +++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector.py @@ -53,7 +53,7 @@ class AnalyticdbVector(BaseVector): self.analyticdb_vector.delete_by_metadata_field(key, value) def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: - return self.analyticdb_vector.search_by_vector(query_vector) + return self.analyticdb_vector.search_by_vector(query_vector, **kwargs) def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: return self.analyticdb_vector.search_by_full_text(query, **kwargs) diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py index 0b2f4cf6e2..ec9c9d94cc 100644 --- a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py +++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_sql.py @@ -196,6 +196,11 @@ class AnalyticdbVectorBySql: top_k = kwargs.get("top_k", 4) if not isinstance(top_k, int) or top_k <= 0: raise ValueError("top_k must be a positive integer") + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = "WHERE 1=1" + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + where_clause += f"AND metadata_->>'document_id' IN ({document_ids})" score_threshold = float(kwargs.get("score_threshold") or 0.0) with self._get_cursor() as cur: query_vector_str = json.dumps(query_vector) @@ -204,7 +209,7 @@ class AnalyticdbVectorBySql: f"SELECT t.id AS id, t.vector AS vector, (1.0 - t.score) AS score, " f"t.page_content as page_content, t.metadata_ AS metadata_ " f"FROM (SELECT id, vector, page_content, metadata_, vector <=> %s AS score " - f"FROM {self.table_name} ORDER BY score LIMIT {top_k} ) t", + f"FROM {self.table_name} {where_clause} ORDER BY score LIMIT {top_k} ) t", (query_vector_str,), ) documents = [] @@ -224,12 +229,17 @@ class AnalyticdbVectorBySql: top_k = kwargs.get("top_k", 4) if not isinstance(top_k, int) or top_k <= 0: raise ValueError("top_k must be a positive integer") + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = "" + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + where_clause += f"AND metadata_->>'document_id' IN ({document_ids})" with self._get_cursor() as cur: cur.execute( f"""SELECT id, vector, page_content, metadata_, ts_rank(to_tsvector, to_tsquery_from_text(%s, 'zh_cn'), 32) AS score FROM {self.table_name} - WHERE to_tsvector@@to_tsquery_from_text(%s, 'zh_cn') + WHERE to_tsvector@@to_tsquery_from_text(%s, 'zh_cn') {where_clause} ORDER BY score DESC LIMIT {top_k}""", (f"'{query}'", f"'{query}'"), diff --git a/api/core/rag/datasource/vdb/baidu/baidu_vector.py b/api/core/rag/datasource/vdb/baidu/baidu_vector.py index a658495af7..86f1f5bfe4 100644 --- a/api/core/rag/datasource/vdb/baidu/baidu_vector.py +++ b/api/core/rag/datasource/vdb/baidu/baidu_vector.py @@ -123,11 +123,21 @@ class BaiduVector(BaseVector): def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: query_vector = [float(val) if isinstance(val, np.float64) else val for val in query_vector] - anns = AnnSearch( - vector_field=self.field_vector, - vector_floats=query_vector, - params=HNSWSearchParams(ef=kwargs.get("ef", 10), limit=kwargs.get("top_k", 4)), - ) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + anns = AnnSearch( + vector_field=self.field_vector, + vector_floats=query_vector, + params=HNSWSearchParams(ef=kwargs.get("ef", 10), limit=kwargs.get("top_k", 4)), + filter=f"document_id IN ({document_ids})", + ) + else: + anns = AnnSearch( + vector_field=self.field_vector, + vector_floats=query_vector, + params=HNSWSearchParams(ef=kwargs.get("ef", 10), limit=kwargs.get("top_k", 4)), + ) res = self._db.table(self._collection_name).search( anns=anns, projections=[self.field_id, self.field_text, self.field_metadata], diff --git a/api/core/rag/datasource/vdb/chroma/chroma_vector.py b/api/core/rag/datasource/vdb/chroma/chroma_vector.py index 81a1207d1d..b8b265d5e6 100644 --- a/api/core/rag/datasource/vdb/chroma/chroma_vector.py +++ b/api/core/rag/datasource/vdb/chroma/chroma_vector.py @@ -95,7 +95,15 @@ class ChromaVector(BaseVector): def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: collection = self._client.get_or_create_collection(self._collection_name) - results: QueryResult = collection.query(query_embeddings=query_vector, n_results=kwargs.get("top_k", 4)) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + results: QueryResult = collection.query( + query_embeddings=query_vector, + n_results=kwargs.get("top_k", 4), + where={"document_id": {"$in": document_ids_filter}}, # type: ignore + ) + else: + results: QueryResult = collection.query(query_embeddings=query_vector, n_results=kwargs.get("top_k", 4)) # type: ignore score_threshold = float(kwargs.get("score_threshold") or 0.0) # Check if results contain data diff --git a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py index cca696baee..093368b0cc 100644 --- a/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py +++ b/api/core/rag/datasource/vdb/elasticsearch/elasticsearch_vector.py @@ -117,6 +117,9 @@ class ElasticSearchVector(BaseVector): top_k = kwargs.get("top_k", 4) num_candidates = math.ceil(top_k * 1.5) knn = {"field": Field.VECTOR.value, "query_vector": query_vector, "k": top_k, "num_candidates": num_candidates} + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + knn["filter"] = {"terms": {"metadata.document_id": document_ids_filter}} results = self._client.search(index=self._collection_name, knn=knn, size=top_k) @@ -145,6 +148,9 @@ class ElasticSearchVector(BaseVector): def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: query_str = {"match": {Field.CONTENT_KEY.value: query}} + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + query_str["filter"] = {"terms": {"metadata.document_id": document_ids_filter}} # type: ignore results = self._client.search(index=self._collection_name, query=query_str, size=kwargs.get("top_k", 4)) docs = [] for hit in results["hits"]["hits"]: diff --git a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py index f403337623..d3f5283034 100644 --- a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py +++ b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py @@ -168,7 +168,12 @@ class LindormVectorStore(BaseVector): raise ValueError("All elements in query_vector should be floats") top_k = kwargs.get("top_k", 10) - query = default_vector_search_query(query_vector=query_vector, k=top_k, **kwargs) + document_ids_filter = kwargs.get("document_ids_filter") + filters = [] + if document_ids_filter: + filters.append({"terms": {"metadata.document_id": document_ids_filter}}) + query = default_vector_search_query(query_vector=query_vector, k=top_k, filters=filters, **kwargs) + try: params = {} if self._using_ugc: @@ -206,7 +211,10 @@ class LindormVectorStore(BaseVector): should = kwargs.get("should") minimum_should_match = kwargs.get("minimum_should_match", 0) top_k = kwargs.get("top_k", 10) - filters = kwargs.get("filter") + filters = kwargs.get("filter", []) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + filters.append({"terms": {"metadata.document_id": document_ids_filter}}) routing = self._routing full_text_query = default_text_search_query( query_text=query, diff --git a/api/core/rag/datasource/vdb/milvus/milvus_vector.py b/api/core/rag/datasource/vdb/milvus/milvus_vector.py index cc29b92825..a1180a6505 100644 --- a/api/core/rag/datasource/vdb/milvus/milvus_vector.py +++ b/api/core/rag/datasource/vdb/milvus/milvus_vector.py @@ -228,12 +228,18 @@ class MilvusVector(BaseVector): """ Search for documents by vector similarity. """ + document_ids_filter = kwargs.get("document_ids_filter") + filter = "" + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + filter = f'metadata["document_id"] in ({document_ids})' results = self._client.search( collection_name=self._collection_name, data=[query_vector], anns_field=Field.VECTOR.value, limit=kwargs.get("top_k", 4), output_fields=[Field.CONTENT_KEY.value, Field.METADATA_KEY.value], + filter=filter, ) return self._process_search_results( @@ -249,6 +255,11 @@ class MilvusVector(BaseVector): if not self._hybrid_search_enabled or not self.field_exists(Field.SPARSE_VECTOR.value): logger.warning("Full-text search is not supported in current Milvus version (requires >= 2.5.0)") return [] + document_ids_filter = kwargs.get("document_ids_filter") + filter = "" + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + filter = f'metadata["document_id"] in ({document_ids})' results = self._client.search( collection_name=self._collection_name, @@ -256,6 +267,7 @@ class MilvusVector(BaseVector): anns_field=Field.SPARSE_VECTOR.value, limit=kwargs.get("top_k", 4), output_fields=[Field.CONTENT_KEY.value, Field.METADATA_KEY.value], + filter=filter, ) return self._process_search_results( diff --git a/api/core/rag/datasource/vdb/myscale/myscale_vector.py b/api/core/rag/datasource/vdb/myscale/myscale_vector.py index 3223010966..dbb1a7fe19 100644 --- a/api/core/rag/datasource/vdb/myscale/myscale_vector.py +++ b/api/core/rag/datasource/vdb/myscale/myscale_vector.py @@ -133,6 +133,10 @@ class MyScaleVector(BaseVector): if self._metric.upper() == "COSINE" and order == SortOrder.ASC and score_threshold > 0.0 else "" ) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + where_str = f"{where_str} AND metadata['document_id'] in ({document_ids})" sql = f""" SELECT text, vector, metadata, {dist} as dist FROM {self._config.database}.{self._collection_name} {where_str} ORDER BY dist {order.value} LIMIT {top_k} diff --git a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py index 3c2d53ce78..8ff97f2f26 100644 --- a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py +++ b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py @@ -154,6 +154,11 @@ class OceanBaseVector(BaseVector): return [] def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = None + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + where_clause = f"metadata->>'$.document_id' in ({document_ids})" ef_search = kwargs.get("ef_search", self._hnsw_ef_search) if ef_search != self._hnsw_ef_search: self._client.set_ob_hnsw_ef_search(ef_search) @@ -167,6 +172,7 @@ class OceanBaseVector(BaseVector): distance_func=func.l2_distance, output_column_names=["text", "metadata"], with_dist=True, + where_clause=where_clause, ) docs = [] for text, metadata, distance in cur: diff --git a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py index 72a1502205..6636646cff 100644 --- a/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py +++ b/api/core/rag/datasource/vdb/opensearch/opensearch_vector.py @@ -154,6 +154,9 @@ class OpenSearchVector(BaseVector): "size": kwargs.get("top_k", 4), "query": {"knn": {Field.VECTOR.value: {Field.VECTOR.value: query_vector, "k": kwargs.get("top_k", 4)}}}, } + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + query["query"] = {"terms": {"metadata.document_id": document_ids_filter}} try: response = self._client.search(index=self._collection_name.lower(), body=query) @@ -179,6 +182,9 @@ class OpenSearchVector(BaseVector): def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: full_text_query = {"query": {"match": {Field.CONTENT_KEY.value: query}}} + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + full_text_query["query"]["terms"] = {"metadata.document_id": document_ids_filter} response = self._client.search(index=self._collection_name.lower(), body=full_text_query) diff --git a/api/core/rag/datasource/vdb/oracle/oraclevector.py b/api/core/rag/datasource/vdb/oracle/oraclevector.py index e5ca4492c6..5888e04c71 100644 --- a/api/core/rag/datasource/vdb/oracle/oraclevector.py +++ b/api/core/rag/datasource/vdb/oracle/oraclevector.py @@ -201,10 +201,15 @@ class OracleVector(BaseVector): :return: List of Documents that are nearest to the query vector. """ top_k = kwargs.get("top_k", 4) + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = "" + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + where_clause = f"WHERE metadata->>'document_id' in ({document_ids})" with self._get_cursor() as cur: cur.execute( f"SELECT meta, text, vector_distance(embedding,:1) AS distance FROM {self.table_name}" - f" ORDER BY distance fetch first {top_k} rows only", + f" {where_clause} ORDER BY distance fetch first {top_k} rows only", [numpy.array(query_vector)], ) docs = [] @@ -257,9 +262,15 @@ class OracleVector(BaseVector): if token not in stop_words: entities.append(token) with self._get_cursor() as cur: + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = "" + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + where_clause = f" AND metadata->>'document_id' in ({document_ids}) " cur.execute( f"select meta, text, embedding FROM {self.table_name}" - f" WHERE CONTAINS(text, :1, 1) > 0 order by score(1) desc fetch first {top_k} rows only", + f"WHERE CONTAINS(text, :1, 1) > 0 {where_clause} " + f"order by score(1) desc fetch first {top_k} rows only", [" ACCUM ".join(entities)], ) docs = [] diff --git a/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py b/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py index 221bc68d68..46aefef11d 100644 --- a/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py +++ b/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py @@ -189,6 +189,9 @@ class PGVectoRS(BaseVector): .limit(kwargs.get("top_k", 4)) .order_by("distance") ) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + stmt = stmt.where(self._table.meta["document_id"].in_(document_ids_filter)) res = session.execute(stmt) results = [(row[0], row[1]) for row in res] diff --git a/api/core/rag/datasource/vdb/pgvector/pgvector.py b/api/core/rag/datasource/vdb/pgvector/pgvector.py index b6153a5b09..783ad93b0c 100644 --- a/api/core/rag/datasource/vdb/pgvector/pgvector.py +++ b/api/core/rag/datasource/vdb/pgvector/pgvector.py @@ -173,10 +173,16 @@ class PGVector(BaseVector): top_k = kwargs.get("top_k", 4) if not isinstance(top_k, int) or top_k <= 0: raise ValueError("top_k must be a positive integer") + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = "" + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + where_clause = f" WHERE metadata->>'document_id' in ({document_ids}) " with self._get_cursor() as cur: cur.execute( f"SELECT meta, text, embedding <=> %s AS distance FROM {self.table_name}" + f" {where_clause}" f" ORDER BY distance LIMIT {top_k}", (json.dumps(query_vector),), ) @@ -195,12 +201,18 @@ class PGVector(BaseVector): if not isinstance(top_k, int) or top_k <= 0: raise ValueError("top_k must be a positive integer") with self._get_cursor() as cur: + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = "" + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + where_clause = f" AND metadata->>'document_id' in ({document_ids}) " if self.pg_bigm: cur.execute("SET pg_bigm.similarity_limit TO 0.000001") cur.execute( f"""SELECT meta, text, bigm_similarity(unistr(%s), coalesce(text, '')) AS score FROM {self.table_name} WHERE text =%% unistr(%s) + {where_clause} ORDER BY score DESC LIMIT {top_k}""", # f"'{query}'" is required in order to account for whitespace in query @@ -211,6 +223,7 @@ class PGVector(BaseVector): f"""SELECT meta, text, ts_rank(to_tsvector(coalesce(text, '')), plainto_tsquery(%s)) AS score FROM {self.table_name} WHERE to_tsvector(text) @@ plainto_tsquery(%s) + {where_clause} ORDER BY score DESC LIMIT {top_k}""", # f"'{query}'" is required in order to account for whitespace in query diff --git a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py index 6e94cb69db..5597871cc1 100644 --- a/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py +++ b/api/core/rag/datasource/vdb/qdrant/qdrant_vector.py @@ -286,27 +286,26 @@ class QdrantVector(BaseVector): from qdrant_client.http import models from qdrant_client.http.exceptions import UnexpectedResponse - for node_id in ids: - try: - filter = models.Filter( - must=[ - models.FieldCondition( - key="metadata.doc_id", - match=models.MatchValue(value=node_id), - ), - ], - ) - self._client.delete( - collection_name=self._collection_name, - points_selector=FilterSelector(filter=filter), - ) - except UnexpectedResponse as e: - # Collection does not exist, so return - if e.status_code == 404: - return - # Some other error occurred, so re-raise the exception - else: - raise e + try: + filter = models.Filter( + must=[ + models.FieldCondition( + key="metadata.doc_id", + match=models.MatchAny(any=ids), + ), + ], + ) + self._client.delete( + collection_name=self._collection_name, + points_selector=FilterSelector(filter=filter), + ) + except UnexpectedResponse as e: + # Collection does not exist, so return + if e.status_code == 404: + return + # Some other error occurred, so re-raise the exception + else: + raise e def text_exists(self, id: str) -> bool: all_collection_name = [] @@ -331,6 +330,15 @@ class QdrantVector(BaseVector): ), ], ) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + if filter.must: + filter.must.append( + models.FieldCondition( + key="metadata.document_id", + match=models.MatchAny(any=document_ids_filter), + ) + ) results = self._client.search( collection_name=self._collection_name, query_vector=query_vector, @@ -377,6 +385,15 @@ class QdrantVector(BaseVector): ), ] ) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + if scroll_filter.must: + scroll_filter.must.append( + models.FieldCondition( + key="metadata.document_id", + match=models.MatchAny(any=document_ids_filter), + ) + ) response = self._client.scroll( collection_name=self._collection_name, scroll_filter=scroll_filter, diff --git a/api/core/rag/datasource/vdb/relyt/relyt_vector.py b/api/core/rag/datasource/vdb/relyt/relyt_vector.py index a3a20448ff..a6c4baf4f8 100644 --- a/api/core/rag/datasource/vdb/relyt/relyt_vector.py +++ b/api/core/rag/datasource/vdb/relyt/relyt_vector.py @@ -223,8 +223,12 @@ class RelytVector(BaseVector): return len(result) > 0 def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + document_ids_filter = kwargs.get("document_ids_filter") + filter = kwargs.get("filter", {}) + if document_ids_filter: + filter["document_id"] = document_ids_filter results = self.similarity_search_with_score_by_vector( - k=int(kwargs.get("top_k", 4)), embedding=query_vector, filter=kwargs.get("filter") + k=int(kwargs.get("top_k", 4)), embedding=query_vector, filter=filter ) # Organize results. @@ -246,9 +250,9 @@ class RelytVector(BaseVector): filter_condition = "" if filter is not None: conditions = [ - f"metadata->>{key!r} in ({', '.join(map(repr, value))})" + f"metadata->>'{key!r}' in ({', '.join(map(repr, value))})" if len(value) > 1 - else f"metadata->>{key!r} = {value[0]!r}" + else f"metadata->>'{key!r}' = {value[0]!r}" for key, value in filter.items() ] filter_condition = f"WHERE {' AND '.join(conditions)}" diff --git a/api/core/rag/datasource/vdb/tencent/tencent_vector.py b/api/core/rag/datasource/vdb/tencent/tencent_vector.py index 1a4fa7b87e..304d9538a7 100644 --- a/api/core/rag/datasource/vdb/tencent/tencent_vector.py +++ b/api/core/rag/datasource/vdb/tencent/tencent_vector.py @@ -145,11 +145,16 @@ class TencentVector(BaseVector): self._db.collection(self._collection_name).delete(document_ids=ids) def delete_by_metadata_field(self, key: str, value: str) -> None: - self._db.collection(self._collection_name).delete(filter=Filter(Filter.In(key, [value]))) + self._db.collection(self._collection_name).delete(filter=Filter(Filter.In(f"metadata.{key}", [value]))) def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: + document_ids_filter = kwargs.get("document_ids_filter") + filter = None + if document_ids_filter: + filter = Filter(Filter.In("metadata.document_id", document_ids_filter)) res = self._db.collection(self._collection_name).search( vectors=[query_vector], + filter=filter, params=document.HNSWSearchParams(ef=kwargs.get("ef", 10)), retrieve_vector=False, limit=kwargs.get("top_k", 4), diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py index 549f0175eb..6d50c1dd9d 100644 --- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py @@ -326,6 +326,18 @@ class TidbOnQdrantVector(BaseVector): ), ], ) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + should_conditions = [] + for document_id_filter in document_ids_filter: + should_conditions.append( + models.FieldCondition( + key="metadata.document_id", + match=models.MatchValue(value=document_id_filter), + ) + ) + if should_conditions: + filter.should = should_conditions # type: ignore results = self._client.search( collection_name=self._collection_name, query_vector=query_vector, @@ -368,6 +380,18 @@ class TidbOnQdrantVector(BaseVector): ) ] ) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + should_conditions = [] + for document_id_filter in document_ids_filter: + should_conditions.append( + models.FieldCondition( + key="metadata.document_id", + match=models.MatchValue(value=document_id_filter), + ) + ) + if should_conditions: + scroll_filter.should = should_conditions # type: ignore response = self._client.scroll( collection_name=self._collection_name, scroll_filter=scroll_filter, diff --git a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py index 6dd4be65c8..77c5786042 100644 --- a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py +++ b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py @@ -196,6 +196,11 @@ class TiDBVector(BaseVector): docs = [] tidb_dist_func = self._get_distance_func() + document_ids_filter = kwargs.get("document_ids_filter") + where_clause = "" + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + where_clause = f" WHERE meta->>'$.document_id' in ({document_ids}) " with Session(self._engine) as session: select_statement = sql_text(f""" @@ -206,6 +211,7 @@ class TiDBVector(BaseVector): text, {tidb_dist_func}(vector, :query_vector_str) AS distance FROM {self._collection_name} + {where_clause} ORDER BY distance ASC LIMIT :top_k ) t diff --git a/api/core/rag/datasource/vdb/upstash/upstash_vector.py b/api/core/rag/datasource/vdb/upstash/upstash_vector.py index 5c3fee98a9..e4f15be2b0 100644 --- a/api/core/rag/datasource/vdb/upstash/upstash_vector.py +++ b/api/core/rag/datasource/vdb/upstash/upstash_vector.py @@ -88,7 +88,20 @@ class UpstashVector(BaseVector): def search_by_vector(self, query_vector: list[float], **kwargs: Any) -> list[Document]: top_k = kwargs.get("top_k", 4) - result = self.index.query(vector=query_vector, top_k=top_k, include_metadata=True, include_data=True) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + document_ids = ", ".join(f"'{id}'" for id in document_ids_filter) + filter = f"document_id in ({document_ids})" + else: + filter = "" + result = self.index.query( + vector=query_vector, + top_k=top_k, + include_metadata=True, + include_data=True, + include_vectors=False, + filter=filter, + ) docs = [] score_threshold = float(kwargs.get("score_threshold") or 0.0) for record in result: diff --git a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py index 9de8761a91..9166d35bc8 100644 --- a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py +++ b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py @@ -177,7 +177,11 @@ class VikingDBVector(BaseVector): query_vector, limit=kwargs.get("top_k", 4) ) score_threshold = float(kwargs.get("score_threshold") or 0.0) - return self._get_search_res(results, score_threshold) + docs = self._get_search_res(results, score_threshold) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + docs = [doc for doc in docs if doc.metadata.get("document_id") in document_ids_filter] + return docs def _get_search_res(self, results, score_threshold) -> list[Document]: if len(results) == 0: diff --git a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py index 68d043a19f..15bed54783 100644 --- a/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py +++ b/api/core/rag/datasource/vdb/weaviate/weaviate_vector.py @@ -187,8 +187,10 @@ class WeaviateVector(BaseVector): query_obj = self._client.query.get(collection_name, properties) vector = {"vector": query_vector} - if kwargs.get("where_filter"): - query_obj = query_obj.with_where(kwargs.get("where_filter")) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + where_filter = {"operator": "ContainsAny", "path": ["document_id"], "valueTextArray": document_ids_filter} + query_obj = query_obj.with_where(where_filter) result = ( query_obj.with_near_vector(vector) .with_limit(kwargs.get("top_k", 4)) @@ -233,8 +235,10 @@ class WeaviateVector(BaseVector): if kwargs.get("search_distance"): content["certainty"] = kwargs.get("search_distance") query_obj = self._client.query.get(collection_name, properties) - if kwargs.get("where_filter"): - query_obj = query_obj.with_where(kwargs.get("where_filter")) + document_ids_filter = kwargs.get("document_ids_filter") + if document_ids_filter: + where_filter = {"operator": "ContainsAny", "path": ["document_id"], "valueTextArray": document_ids_filter} + query_obj = query_obj.with_where(where_filter) query_obj = query_obj.with_additional(["vector"]) properties = ["text"] result = query_obj.with_bm25(query=query, properties=properties).with_limit(kwargs.get("top_k", 4)).do() diff --git a/api/core/rag/entities/metadata_entities.py b/api/core/rag/entities/metadata_entities.py new file mode 100644 index 0000000000..6ef932ad22 --- /dev/null +++ b/api/core/rag/entities/metadata_entities.py @@ -0,0 +1,45 @@ +from collections.abc import Sequence +from typing import Literal, Optional + +from pydantic import BaseModel, Field + +SupportedComparisonOperator = Literal[ + # for string or array + "contains", + "not contains", + "start with", + "end with", + "is", + "is not", + "empty", + "not empty", + # for number + "=", + "≠", + ">", + "<", + "≥", + "≤", + # for time + "before", + "after", +] + + +class Condition(BaseModel): + """ + Conditon detail + """ + + name: str + comparison_operator: SupportedComparisonOperator + value: str | Sequence[str] | None | int | float = None + + +class MetadataCondition(BaseModel): + """ + Metadata Condition. + """ + + logical_operator: Optional[Literal["and", "or"]] = "and" + conditions: Optional[list[Condition]] = Field(default=None, deprecated=True) diff --git a/api/core/rag/index_processor/constant/built_in_field.py b/api/core/rag/index_processor/constant/built_in_field.py new file mode 100644 index 0000000000..09c5e949eb --- /dev/null +++ b/api/core/rag/index_processor/constant/built_in_field.py @@ -0,0 +1,15 @@ +from enum import Enum + + +class BuiltInField(str, Enum): + document_name = "document_name" + uploader = "uploader" + upload_date = "upload_date" + last_update_date = "last_update_date" + source = "source" + + +class MetadataDataSource(Enum): + upload_file = "file_upload" + website_crawl = "website" + notion_import = "notion" diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index 48454bd66e..62d5e4b8fe 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -1,35 +1,61 @@ +import json import math +import re import threading -from collections import Counter -from typing import Any, Optional, cast +from collections import Counter, defaultdict +from collections.abc import Generator, Mapping +from typing import Any, Optional, Union, cast from flask import Flask, current_app +from sqlalchemy import Integer, and_, or_, text +from sqlalchemy import cast as sqlalchemy_cast -from core.app.app_config.entities import DatasetEntity, DatasetRetrieveConfigEntity +from core.app.app_config.entities import ( + DatasetEntity, + DatasetRetrieveConfigEntity, + MetadataFilteringCondition, + ModelConfig, +) from core.app.entities.app_invoke_entities import InvokeFrom, ModelConfigWithCredentialsEntity from core.callback_handler.index_tool_callback_handler import DatasetIndexToolCallbackHandler from core.entities.agent_entities import PlanningStrategy +from core.entities.model_entities import ModelStatus from core.memory.token_buffer_memory import TokenBufferMemory from core.model_manager import ModelInstance, ModelManager -from core.model_runtime.entities.message_entities import PromptMessageTool +from core.model_runtime.entities.llm_entities import LLMResult, LLMUsage +from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageRole, PromptMessageTool from core.model_runtime.entities.model_entities import ModelFeature, ModelType from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel from core.ops.entities.trace_entity import TraceTaskName from core.ops.ops_trace_manager import TraceQueueManager, TraceTask from core.ops.utils import measure_time +from core.prompt.advanced_prompt_transform import AdvancedPromptTransform +from core.prompt.entities.advanced_prompt_entities import ChatModelMessage, CompletionModelPromptTemplate +from core.prompt.simple_prompt_transform import ModelMode from core.rag.data_post_processor.data_post_processor import DataPostProcessor from core.rag.datasource.keyword.jieba.jieba_keyword_table_handler import JiebaKeywordTableHandler from core.rag.datasource.retrieval_service import RetrievalService from core.rag.entities.context_entities import DocumentContext +from core.rag.entities.metadata_entities import Condition, MetadataCondition from core.rag.index_processor.constant.index_type import IndexType from core.rag.models.document import Document from core.rag.rerank.rerank_type import RerankMode from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.rag.retrieval.router.multi_dataset_function_call_router import FunctionCallMultiDatasetRouter from core.rag.retrieval.router.multi_dataset_react_route import ReactMultiDatasetRouter +from core.rag.retrieval.template_prompts import ( + METADATA_FILTER_ASSISTANT_PROMPT_1, + METADATA_FILTER_ASSISTANT_PROMPT_2, + METADATA_FILTER_COMPLETION_PROMPT, + METADATA_FILTER_SYSTEM_PROMPT, + METADATA_FILTER_USER_PROMPT_1, + METADATA_FILTER_USER_PROMPT_2, + METADATA_FILTER_USER_PROMPT_3, +) from core.tools.utils.dataset_retriever.dataset_retriever_base_tool import DatasetRetrieverBaseTool from extensions.ext_database import db -from models.dataset import ChildChunk, Dataset, DatasetQuery, DocumentSegment +from libs.json_in_md_parser import parse_and_check_json_markdown +from models.dataset import ChildChunk, Dataset, DatasetMetadata, DatasetQuery, DocumentSegment from models.dataset import Document as DatasetDocument from services.external_knowledge_service import ExternalDatasetService @@ -59,6 +85,7 @@ class DatasetRetrieval: hit_callback: DatasetIndexToolCallbackHandler, message_id: str, memory: Optional[TokenBufferMemory] = None, + inputs: Optional[Mapping[str, Any]] = None, ) -> Optional[str]: """ Retrieve dataset. @@ -116,6 +143,22 @@ class DatasetRetrieval: continue available_datasets.append(dataset) + if inputs: + inputs = {key: str(value) for key, value in inputs.items()} + else: + inputs = {} + available_datasets_ids = [dataset.id for dataset in available_datasets] + metadata_filter_document_ids, metadata_condition = self._get_metadata_filter_condition( + available_datasets_ids, + query, + tenant_id, + user_id, + retrieve_config.metadata_filtering_mode, # type: ignore + retrieve_config.metadata_model_config, # type: ignore + retrieve_config.metadata_filtering_conditions, + inputs, + ) + all_documents = [] user_from = "account" if invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} else "end_user" if retrieve_config.retrieve_strategy == DatasetRetrieveConfigEntity.RetrieveStrategy.SINGLE: @@ -130,6 +173,8 @@ class DatasetRetrieval: model_config, planning_strategy, message_id, + metadata_filter_document_ids, + metadata_condition, ) elif retrieve_config.retrieve_strategy == DatasetRetrieveConfigEntity.RetrieveStrategy.MULTIPLE: all_documents = self.multiple_retrieve( @@ -146,6 +191,8 @@ class DatasetRetrieval: retrieve_config.weights, retrieve_config.reranking_enabled or True, message_id, + metadata_filter_document_ids, + metadata_condition, ) dify_documents = [item for item in all_documents if item.provider == "dify"] @@ -239,6 +286,8 @@ class DatasetRetrieval: model_config: ModelConfigWithCredentialsEntity, planning_strategy: PlanningStrategy, message_id: Optional[str] = None, + metadata_filter_document_ids: Optional[dict[str, list[str]]] = None, + metadata_condition: Optional[MetadataCondition] = None, ): tools = [] for dataset in available_datasets: @@ -279,6 +328,7 @@ class DatasetRetrieval: dataset_id=dataset_id, query=query, external_retrieval_parameters=dataset.retrieval_model, + metadata_condition=metadata_condition, ) for external_document in external_documents: document = Document( @@ -293,6 +343,15 @@ class DatasetRetrieval: document.metadata["dataset_name"] = dataset.name results.append(document) else: + if metadata_condition and not metadata_filter_document_ids: + return [] + document_ids_filter = None + if metadata_filter_document_ids: + document_ids = metadata_filter_document_ids.get(dataset.id, []) + if document_ids: + document_ids_filter = document_ids + else: + return [] retrieval_model_config = dataset.retrieval_model or default_retrieval_model # get top k @@ -324,6 +383,7 @@ class DatasetRetrieval: reranking_model=reranking_model, reranking_mode=retrieval_model_config.get("reranking_mode", "reranking_model"), weights=retrieval_model_config.get("weights", None), + document_ids_filter=document_ids_filter, ) self._on_query(query, [dataset_id], app_id, user_from, user_id) @@ -348,6 +408,8 @@ class DatasetRetrieval: weights: Optional[dict[str, Any]] = None, reranking_enable: bool = True, message_id: Optional[str] = None, + metadata_filter_document_ids: Optional[dict[str, list[str]]] = None, + metadata_condition: Optional[MetadataCondition] = None, ): if not available_datasets: return [] @@ -387,6 +449,16 @@ class DatasetRetrieval: for dataset in available_datasets: index_type = dataset.indexing_technique + document_ids_filter = None + if dataset.provider != "external": + if metadata_condition and not metadata_filter_document_ids: + continue + if metadata_filter_document_ids: + document_ids = metadata_filter_document_ids.get(dataset.id, []) + if document_ids: + document_ids_filter = document_ids + else: + continue retrieval_thread = threading.Thread( target=self._retriever, kwargs={ @@ -395,6 +467,8 @@ class DatasetRetrieval: "query": query, "top_k": top_k, "all_documents": all_documents, + "document_ids_filter": document_ids_filter, + "metadata_condition": metadata_condition, }, ) threads.append(retrieval_thread) @@ -493,7 +567,16 @@ class DatasetRetrieval: db.session.add_all(dataset_queries) db.session.commit() - def _retriever(self, flask_app: Flask, dataset_id: str, query: str, top_k: int, all_documents: list): + def _retriever( + self, + flask_app: Flask, + dataset_id: str, + query: str, + top_k: int, + all_documents: list, + document_ids_filter: Optional[list[str]] = None, + metadata_condition: Optional[MetadataCondition] = None, + ): with flask_app.app_context(): dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first() @@ -506,6 +589,7 @@ class DatasetRetrieval: dataset_id=dataset_id, query=query, external_retrieval_parameters=dataset.retrieval_model, + metadata_condition=metadata_condition, ) for external_document in external_documents: document = Document( @@ -546,6 +630,7 @@ class DatasetRetrieval: else None, reranking_mode=retrieval_model.get("reranking_mode") or "reranking_model", weights=retrieval_model.get("weights", None), + document_ids_filter=document_ids_filter, ) all_documents.extend(documents) @@ -733,3 +818,340 @@ class DatasetRetrieval: filter_documents, key=lambda x: x.metadata.get("score", 0) if x.metadata else 0, reverse=True ) return filter_documents[:top_k] if top_k else filter_documents + + def _get_metadata_filter_condition( + self, + dataset_ids: list, + query: str, + tenant_id: str, + user_id: str, + metadata_filtering_mode: str, + metadata_model_config: ModelConfig, + metadata_filtering_conditions: Optional[MetadataFilteringCondition], + inputs: dict, + ) -> tuple[Optional[dict[str, list[str]]], Optional[MetadataCondition]]: + document_query = db.session.query(DatasetDocument).filter( + DatasetDocument.dataset_id.in_(dataset_ids), + DatasetDocument.indexing_status == "completed", + DatasetDocument.enabled == True, + DatasetDocument.archived == False, + ) + filters = [] # type: ignore + metadata_condition = None + if metadata_filtering_mode == "disabled": + return None, None + elif metadata_filtering_mode == "automatic": + automatic_metadata_filters = self._automatic_metadata_filter_func( + dataset_ids, query, tenant_id, user_id, metadata_model_config + ) + if automatic_metadata_filters: + conditions = [] + for filter in automatic_metadata_filters: + self._process_metadata_filter_func( + filter.get("condition"), # type: ignore + filter.get("metadata_name"), # type: ignore + filter.get("value"), + filters, # type: ignore + ) + conditions.append( + Condition( + name=filter.get("metadata_name"), # type: ignore + comparison_operator=filter.get("condition"), # type: ignore + value=filter.get("value"), + ) + ) + metadata_condition = MetadataCondition( + logical_operator=metadata_filtering_conditions.logical_operator, # type: ignore + conditions=conditions, + ) + elif metadata_filtering_mode == "manual": + if metadata_filtering_conditions: + metadata_condition = MetadataCondition(**metadata_filtering_conditions.model_dump()) + for condition in metadata_filtering_conditions.conditions: # type: ignore + metadata_name = condition.name + expected_value = condition.value + if expected_value or condition.comparison_operator in ("empty", "not empty"): + if isinstance(expected_value, str): + expected_value = self._replace_metadata_filter_value(expected_value, inputs) + filters = self._process_metadata_filter_func( + condition.comparison_operator, metadata_name, expected_value, filters + ) + else: + raise ValueError("Invalid metadata filtering mode") + if filters: + if metadata_filtering_conditions.logical_operator == "or": # type: ignore + document_query = document_query.filter(or_(*filters)) + else: + document_query = document_query.filter(and_(*filters)) + documents = document_query.all() + # group by dataset_id + metadata_filter_document_ids = defaultdict(list) if documents else None # type: ignore + for document in documents: + metadata_filter_document_ids[document.dataset_id].append(document.id) # type: ignore + return metadata_filter_document_ids, metadata_condition + + def _replace_metadata_filter_value(self, text: str, inputs: dict) -> str: + def replacer(match): + key = match.group(1) + return str(inputs.get(key, f"{{{{{key}}}}}")) + + pattern = re.compile(r"\{\{(\w+)\}\}") + return pattern.sub(replacer, text) + + def _automatic_metadata_filter_func( + self, dataset_ids: list, query: str, tenant_id: str, user_id: str, metadata_model_config: ModelConfig + ) -> Optional[list[dict[str, Any]]]: + # get all metadata field + metadata_fields = db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id.in_(dataset_ids)).all() + all_metadata_fields = [metadata_field.name for metadata_field in metadata_fields] + # get metadata model config + if metadata_model_config is None: + raise ValueError("metadata_model_config is required") + # get metadata model instance + # fetch model config + model_instance, model_config = self._fetch_model_config(tenant_id, metadata_model_config) + + # fetch prompt messages + prompt_messages, stop = self._get_prompt_template( + model_config=model_config, + mode=metadata_model_config.mode, + metadata_fields=all_metadata_fields, + query=query or "", + ) + + result_text = "" + try: + # handle invoke result + invoke_result = cast( + Generator[LLMResult, None, None], + model_instance.invoke_llm( + prompt_messages=prompt_messages, + model_parameters=model_config.parameters, + stop=stop, + stream=True, + user=user_id, + ), + ) + + # handle invoke result + result_text, usage = self._handle_invoke_result(invoke_result=invoke_result) + + result_text_json = parse_and_check_json_markdown(result_text, []) + automatic_metadata_filters = [] + if "metadata_map" in result_text_json: + metadata_map = result_text_json["metadata_map"] + for item in metadata_map: + if item.get("metadata_field_name") in all_metadata_fields: + automatic_metadata_filters.append( + { + "metadata_name": item.get("metadata_field_name"), + "value": item.get("metadata_field_value"), + "condition": item.get("comparison_operator"), + } + ) + except Exception as e: + return None + return automatic_metadata_filters + + def _process_metadata_filter_func(self, condition: str, metadata_name: str, value: Optional[Any], filters: list): + match condition: + case "contains": + filters.append( + (text("documents.doc_metadata ->> :key LIKE :value")).params(key=metadata_name, value=f"%{value}%") + ) + case "not contains": + filters.append( + (text("documents.doc_metadata ->> :key NOT LIKE :value")).params( + key=metadata_name, value=f"%{value}%" + ) + ) + case "start with": + filters.append( + (text("documents.doc_metadata ->> :key LIKE :value")).params(key=metadata_name, value=f"{value}%") + ) + + case "end with": + filters.append( + (text("documents.doc_metadata ->> :key LIKE :value")).params(key=metadata_name, value=f"%{value}") + ) + case "is" | "=": + if isinstance(value, str): + filters.append(DatasetDocument.doc_metadata[metadata_name] == f'"{value}"') + else: + filters.append( + sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Integer) == value + ) + case "is not" | "≠": + if isinstance(value, str): + filters.append(DatasetDocument.doc_metadata[metadata_name] != f'"{value}"') + else: + filters.append( + sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Integer) != value + ) + case "empty": + filters.append(DatasetDocument.doc_metadata[metadata_name].is_(None)) + case "not empty": + filters.append(DatasetDocument.doc_metadata[metadata_name].isnot(None)) + case "before" | "<": + filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Integer) < value) + case "after" | ">": + filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Integer) > value) + case "≤" | ">=": + filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Integer) <= value) + case "≥" | ">=": + filters.append(sqlalchemy_cast(DatasetDocument.doc_metadata[metadata_name].astext, Integer) >= value) + case _: + pass + return filters + + def _fetch_model_config( + self, tenant_id: str, model: ModelConfig + ) -> tuple[ModelInstance, ModelConfigWithCredentialsEntity]: + """ + Fetch model config + :param node_data: node data + :return: + """ + if model is None: + raise ValueError("single_retrieval_config is required") + model_name = model.name + provider_name = model.provider + + model_manager = ModelManager() + model_instance = model_manager.get_model_instance( + tenant_id=tenant_id, model_type=ModelType.LLM, provider=provider_name, model=model_name + ) + + provider_model_bundle = model_instance.provider_model_bundle + model_type_instance = model_instance.model_type_instance + model_type_instance = cast(LargeLanguageModel, model_type_instance) + + model_credentials = model_instance.credentials + + # check model + provider_model = provider_model_bundle.configuration.get_provider_model( + model=model_name, model_type=ModelType.LLM + ) + + if provider_model is None: + raise ValueError(f"Model {model_name} not exist.") + + if provider_model.status == ModelStatus.NO_CONFIGURE: + raise ValueError(f"Model {model_name} credentials is not initialized.") + elif provider_model.status == ModelStatus.NO_PERMISSION: + raise ValueError(f"Dify Hosted OpenAI {model_name} currently not support.") + elif provider_model.status == ModelStatus.QUOTA_EXCEEDED: + raise ValueError(f"Model provider {provider_name} quota exceeded.") + + # model config + completion_params = model.completion_params + stop = [] + if "stop" in completion_params: + stop = completion_params["stop"] + del completion_params["stop"] + + # get model mode + model_mode = model.mode + if not model_mode: + raise ValueError("LLM mode is required.") + + model_schema = model_type_instance.get_model_schema(model_name, model_credentials) + + if not model_schema: + raise ValueError(f"Model {model_name} not exist.") + + return model_instance, ModelConfigWithCredentialsEntity( + provider=provider_name, + model=model_name, + model_schema=model_schema, + mode=model_mode, + provider_model_bundle=provider_model_bundle, + credentials=model_credentials, + parameters=completion_params, + stop=stop, + ) + + def _get_prompt_template( + self, model_config: ModelConfigWithCredentialsEntity, mode: str, metadata_fields: list, query: str + ): + model_mode = ModelMode.value_of(mode) + input_text = query + + prompt_template: Union[CompletionModelPromptTemplate, list[ChatModelMessage]] + if model_mode == ModelMode.CHAT: + prompt_template = [] + system_prompt_messages = ChatModelMessage(role=PromptMessageRole.SYSTEM, text=METADATA_FILTER_SYSTEM_PROMPT) + prompt_template.append(system_prompt_messages) + user_prompt_message_1 = ChatModelMessage(role=PromptMessageRole.USER, text=METADATA_FILTER_USER_PROMPT_1) + prompt_template.append(user_prompt_message_1) + assistant_prompt_message_1 = ChatModelMessage( + role=PromptMessageRole.ASSISTANT, text=METADATA_FILTER_ASSISTANT_PROMPT_1 + ) + prompt_template.append(assistant_prompt_message_1) + user_prompt_message_2 = ChatModelMessage(role=PromptMessageRole.USER, text=METADATA_FILTER_USER_PROMPT_2) + prompt_template.append(user_prompt_message_2) + assistant_prompt_message_2 = ChatModelMessage( + role=PromptMessageRole.ASSISTANT, text=METADATA_FILTER_ASSISTANT_PROMPT_2 + ) + prompt_template.append(assistant_prompt_message_2) + user_prompt_message_3 = ChatModelMessage( + role=PromptMessageRole.USER, + text=METADATA_FILTER_USER_PROMPT_3.format( + input_text=input_text, + metadata_fields=json.dumps(metadata_fields, ensure_ascii=False), + ), + ) + prompt_template.append(user_prompt_message_3) + elif model_mode == ModelMode.COMPLETION: + prompt_template = CompletionModelPromptTemplate( + text=METADATA_FILTER_COMPLETION_PROMPT.format( + input_text=input_text, + metadata_fields=json.dumps(metadata_fields, ensure_ascii=False), + ) + ) + + else: + raise ValueError(f"Model mode {model_mode} not support.") + + prompt_transform = AdvancedPromptTransform() + prompt_messages = prompt_transform.get_prompt( + prompt_template=prompt_template, + inputs={}, + query=query or "", + files=[], + context=None, + memory_config=None, + memory=None, + model_config=model_config, + ) + stop = model_config.stop + + return prompt_messages, stop + + def _handle_invoke_result(self, invoke_result: Generator) -> tuple[str, LLMUsage]: + """ + Handle invoke result + :param invoke_result: invoke result + :return: + """ + model = None + prompt_messages: list[PromptMessage] = [] + full_text = "" + usage = None + for result in invoke_result: + text = result.delta.message.content + full_text += text + + if not model: + model = result.model + + if not prompt_messages: + prompt_messages = result.prompt_messages + + if not usage and result.delta.usage: + usage = result.delta.usage + + if not usage: + usage = LLMUsage.empty_usage() + + return full_text, usage diff --git a/api/core/rag/retrieval/template_prompts.py b/api/core/rag/retrieval/template_prompts.py new file mode 100644 index 0000000000..7abd55d798 --- /dev/null +++ b/api/core/rag/retrieval/template_prompts.py @@ -0,0 +1,66 @@ +METADATA_FILTER_SYSTEM_PROMPT = """ + ### Job Description', + You are a text metadata extract engine that extract text's metadata based on user input and set the metadata value + ### Task + Your task is to ONLY extract the metadatas that exist in the input text from the provided metadata list and Use the following operators ["=", "!=", ">", "<", ">=", "<="] to express logical relationships, then return result in JSON format with the key "metadata_fields" and value "metadata_field_value" and comparison operator "comparison_operator". + ### Format + The input text is in the variable input_text. Metadata are specified as a list in the variable metadata_fields. + ### Constraint + DO NOT include anything other than the JSON array in your response. +""" # noqa: E501 + +METADATA_FILTER_USER_PROMPT_1 = """ + { "input_text": "I want to know which company’s email address test@example.com is?", + "metadata_fields": ["filename", "email", "phone", "address"] + } +""" + +METADATA_FILTER_ASSISTANT_PROMPT_1 = """ +```json + {"metadata_map": [ + {"metadata_field_name": "email", "metadata_field_value": "test@example.com", "comparison_operator": "="} + ] + } +``` +""" + +METADATA_FILTER_USER_PROMPT_2 = """ + {"input_text": "What are the movies with a score of more than 9 in 2024?", + "metadata_fields": ["name", "year", "rating", "country"]} +""" + +METADATA_FILTER_ASSISTANT_PROMPT_2 = """ +```json + {"metadata_map": [ + {"metadata_field_name": "year", "metadata_field_value": "2024", "comparison_operator": "="}, + {"metadata_field_name": "rating", "metadata_field_value": "9", "comparison_operator": ">"}, + ]} +``` +""" + +METADATA_FILTER_USER_PROMPT_3 = """ + '{{"input_text": "{input_text}",', + '"metadata_fields": {metadata_fields}}}' +""" + +METADATA_FILTER_COMPLETION_PROMPT = """ +### Job Description +You are a text metadata extract engine that extract text's metadata based on user input and set the metadata value +### Task +# Your task is to ONLY extract the metadatas that exist in the input text from the provided metadata list and Use the following operators ["=", "!=", ">", "<", ">=", "<="] to express logical relationships, then return result in JSON format with the key "metadata_fields" and value "metadata_field_value" and comparison operator "comparison_operator". +### Format +The input text is in the variable input_text. Metadata are specified as a list in the variable metadata_fields. +### Constraint +DO NOT include anything other than the JSON array in your response. +### Example +Here is the chat example between human and assistant, inside XML tags. + +User:{{"input_text": ["I want to know which company’s email address test@example.com is?"], "metadata_fields": ["filename", "email", "phone", "address"]}} +Assistant:{{"metadata_map": [{{"metadata_field_name": "email", "metadata_field_value": "test@example.com", "comparison_operator": "="}}]}} +User:{{"input_text": "What are the movies with a score of more than 9 in 2024?", "metadata_fields": ["name", "year", "rating", "country"]}} +Assistant:{{"metadata_map": [{{"metadata_field_name": "year", "metadata_field_value": "2024", "comparison_operator": "="}, {{"metadata_field_name": "rating", "metadata_field_value": "9", "comparison_operator": ">"}}]}} + +### User Input +{{"input_text" : "{input_text}", "metadata_fields" : {metadata_fields}}} +### Assistant Output +""" # noqa: E501 diff --git a/api/core/workflow/nodes/knowledge_retrieval/entities.py b/api/core/workflow/nodes/knowledge_retrieval/entities.py index e8972d1381..d2e5a15545 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/entities.py +++ b/api/core/workflow/nodes/knowledge_retrieval/entities.py @@ -1,8 +1,10 @@ +from collections.abc import Sequence from typing import Any, Literal, Optional -from pydantic import BaseModel +from pydantic import BaseModel, Field from core.workflow.nodes.base import BaseNodeData +from core.workflow.nodes.llm.entities import VisionConfig class RerankingModelConfig(BaseModel): @@ -73,6 +75,48 @@ class SingleRetrievalConfig(BaseModel): model: ModelConfig +SupportedComparisonOperator = Literal[ + # for string or array + "contains", + "not contains", + "start with", + "end with", + "is", + "is not", + "empty", + "not empty", + # for number + "=", + "≠", + ">", + "<", + "≥", + "≤", + # for time + "before", + "after", +] + + +class Condition(BaseModel): + """ + Conditon detail + """ + + name: str + comparison_operator: SupportedComparisonOperator + value: str | Sequence[str] | None | int | float = None + + +class MetadataFilteringCondition(BaseModel): + """ + Metadata Filtering Condition. + """ + + logical_operator: Optional[Literal["and", "or"]] = "and" + conditions: Optional[list[Condition]] = Field(default=None, deprecated=True) + + class KnowledgeRetrievalNodeData(BaseNodeData): """ Knowledge retrieval Node Data. @@ -84,3 +128,7 @@ class KnowledgeRetrievalNodeData(BaseNodeData): retrieval_mode: Literal["single", "multiple"] multiple_retrieval_config: Optional[MultipleRetrievalConfig] = None single_retrieval_config: Optional[SingleRetrievalConfig] = None + metadata_filtering_mode: Optional[Literal["disabled", "automatic", "manual"]] = "disabled" + metadata_model_config: Optional[ModelConfig] = None + metadata_filtering_conditions: Optional[MetadataFilteringCondition] = None + vision: VisionConfig = Field(default_factory=VisionConfig) diff --git a/api/core/workflow/nodes/knowledge_retrieval/exc.py b/api/core/workflow/nodes/knowledge_retrieval/exc.py index 0c3b6e86fa..6bcdc32790 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/exc.py +++ b/api/core/workflow/nodes/knowledge_retrieval/exc.py @@ -16,3 +16,7 @@ class ModelNotSupportedError(KnowledgeRetrievalNodeError): class ModelQuotaExceededError(KnowledgeRetrievalNodeError): """Raised when the model provider quota is exceeded.""" + + +class InvalidModelTypeError(KnowledgeRetrievalNodeError): + """Raised when the model is not a Large Language Model.""" diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 11c56e3172..e6bc1e44df 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -1,32 +1,51 @@ +import json import logging import time +from collections import defaultdict from collections.abc import Mapping, Sequence -from typing import Any, cast +from typing import Any, Optional, cast -from sqlalchemy import func +from sqlalchemy import Integer, and_, func, or_, text +from sqlalchemy import cast as sqlalchemy_cast from core.app.app_config.entities import DatasetRetrieveConfigEntity from core.app.entities.app_invoke_entities import ModelConfigWithCredentialsEntity from core.entities.agent_entities import PlanningStrategy from core.entities.model_entities import ModelStatus from core.model_manager import ModelInstance, ModelManager +from core.model_runtime.entities.message_entities import PromptMessageRole from core.model_runtime.entities.model_entities import ModelFeature, ModelType from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel +from core.prompt.simple_prompt_transform import ModelMode from core.rag.datasource.retrieval_service import RetrievalService +from core.rag.entities.metadata_entities import Condition, MetadataCondition from core.rag.retrieval.dataset_retrieval import DatasetRetrieval from core.rag.retrieval.retrieval_methods import RetrievalMethod from core.variables import StringSegment from core.workflow.entities.node_entities import NodeRunResult -from core.workflow.nodes.base import BaseNode from core.workflow.nodes.enums import NodeType +from core.workflow.nodes.event.event import ModelInvokeCompletedEvent +from core.workflow.nodes.knowledge_retrieval.template_prompts import ( + METADATA_FILTER_ASSISTANT_PROMPT_1, + METADATA_FILTER_ASSISTANT_PROMPT_2, + METADATA_FILTER_COMPLETION_PROMPT, + METADATA_FILTER_SYSTEM_PROMPT, + METADATA_FILTER_USER_PROMPT_1, + METADATA_FILTER_USER_PROMPT_3, +) +from core.workflow.nodes.llm.entities import LLMNodeChatModelMessage, LLMNodeCompletionModelPromptTemplate +from core.workflow.nodes.llm.node import LLMNode +from core.workflow.nodes.question_classifier.template_prompts import QUESTION_CLASSIFIER_USER_PROMPT_2 from extensions.ext_database import db from extensions.ext_redis import redis_client -from models.dataset import Dataset, Document, RateLimitLog +from libs.json_in_md_parser import parse_and_check_json_markdown +from models.dataset import Dataset, DatasetMetadata, Document, RateLimitLog from models.workflow import WorkflowNodeExecutionStatus from services.feature_service import FeatureService -from .entities import KnowledgeRetrievalNodeData +from .entities import KnowledgeRetrievalNodeData, ModelConfig from .exc import ( + InvalidModelTypeError, KnowledgeRetrievalNodeError, ModelCredentialsNotInitializedError, ModelNotExistError, @@ -45,13 +64,14 @@ default_retrieval_model = { } -class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]): - _node_data_cls = KnowledgeRetrievalNodeData +class KnowledgeRetrievalNode(LLMNode): + _node_data_cls = KnowledgeRetrievalNodeData # type: ignore _node_type = NodeType.KNOWLEDGE_RETRIEVAL - def _run(self) -> NodeRunResult: + def _run(self) -> NodeRunResult: # type: ignore + node_data = cast(KnowledgeRetrievalNodeData, self.node_data) # extract variables - variable = self.graph_runtime_state.variable_pool.get(self.node_data.query_variable_selector) + variable = self.graph_runtime_state.variable_pool.get(node_data.query_variable_selector) if not isinstance(variable, StringSegment): return NodeRunResult( status=WorkflowNodeExecutionStatus.FAILED, @@ -91,7 +111,7 @@ class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]): # retrieve knowledge try: - results = self._fetch_dataset_retriever(node_data=self.node_data, query=query) + results = self._fetch_dataset_retriever(node_data=node_data, query=query) outputs = {"result": results} return NodeRunResult( status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=variables, process_data=None, outputs=outputs @@ -145,11 +165,14 @@ class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]): if not dataset: continue available_datasets.append(dataset) + metadata_filter_document_ids, metadata_condition = self._get_metadata_filter_condition( + [dataset.id for dataset in available_datasets], query, node_data + ) all_documents = [] dataset_retrieval = DatasetRetrieval() if node_data.retrieval_mode == DatasetRetrieveConfigEntity.RetrieveStrategy.SINGLE.value: # fetch model config - model_instance, model_config = self._fetch_model_config(node_data) + model_instance, model_config = self._fetch_model_config(node_data.single_retrieval_config.model) # type: ignore # check model is support tool calling model_type_instance = model_config.provider_model_bundle.model_type_instance model_type_instance = cast(LargeLanguageModel, model_type_instance) @@ -174,6 +197,8 @@ class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]): model_config=model_config, model_instance=model_instance, planning_strategy=planning_strategy, + metadata_filter_document_ids=metadata_filter_document_ids, + metadata_condition=metadata_condition, ) elif node_data.retrieval_mode == DatasetRetrieveConfigEntity.RetrieveStrategy.MULTIPLE.value: if node_data.multiple_retrieval_config is None: @@ -220,6 +245,8 @@ class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]): reranking_model=reranking_model, weights=weights, reranking_enable=node_data.multiple_retrieval_config.reranking_enable, + metadata_filter_document_ids=metadata_filter_document_ids, + metadata_condition=metadata_condition, ) dify_documents = [item for item in all_documents if item.provider == "dify"] external_documents = [item for item in all_documents if item.provider == "external"] @@ -287,13 +314,187 @@ class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]): item["metadata"]["position"] = position return retrieval_resource_list + def _get_metadata_filter_condition( + self, dataset_ids: list, query: str, node_data: KnowledgeRetrievalNodeData + ) -> tuple[Optional[dict[str, list[str]]], Optional[MetadataCondition]]: + document_query = db.session.query(Document).filter( + Document.dataset_id.in_(dataset_ids), + Document.indexing_status == "completed", + Document.enabled == True, + Document.archived == False, + ) + filters = [] # type: ignore + metadata_condition = None + if node_data.metadata_filtering_mode == "disabled": + return None, None + elif node_data.metadata_filtering_mode == "automatic": + automatic_metadata_filters = self._automatic_metadata_filter_func(dataset_ids, query, node_data) + if automatic_metadata_filters: + conditions = [] + for filter in automatic_metadata_filters: + self._process_metadata_filter_func( + filter.get("condition", ""), + filter.get("metadata_name", ""), + filter.get("value"), + filters, # type: ignore + ) + conditions.append( + Condition( + name=filter.get("metadata_name"), # type: ignore + comparison_operator=filter.get("condition"), # type: ignore + value=filter.get("value"), + ) + ) + metadata_condition = MetadataCondition( + logical_operator=node_data.metadata_filtering_conditions.logical_operator, # type: ignore + conditions=conditions, + ) + elif node_data.metadata_filtering_mode == "manual": + if node_data.metadata_filtering_conditions: + metadata_condition = MetadataCondition(**node_data.metadata_filtering_conditions.model_dump()) + if node_data.metadata_filtering_conditions: + for condition in node_data.metadata_filtering_conditions.conditions: # type: ignore + metadata_name = condition.name + expected_value = condition.value + if expected_value or condition.comparison_operator in ("empty", "not empty"): + if isinstance(expected_value, str): + expected_value = self.graph_runtime_state.variable_pool.convert_template( + expected_value + ).text + + filters = self._process_metadata_filter_func( + condition.comparison_operator, metadata_name, expected_value, filters + ) + else: + raise ValueError("Invalid metadata filtering mode") + if filters: + if node_data.metadata_filtering_conditions.logical_operator == "and": # type: ignore + document_query = document_query.filter(and_(*filters)) + else: + document_query = document_query.filter(or_(*filters)) + documents = document_query.all() + # group by dataset_id + metadata_filter_document_ids = defaultdict(list) if documents else None # type: ignore + for document in documents: + metadata_filter_document_ids[document.dataset_id].append(document.id) # type: ignore + return metadata_filter_document_ids, metadata_condition + + def _automatic_metadata_filter_func( + self, dataset_ids: list, query: str, node_data: KnowledgeRetrievalNodeData + ) -> list[dict[str, Any]]: + # get all metadata field + metadata_fields = db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id.in_(dataset_ids)).all() + all_metadata_fields = [metadata_field.field_name for metadata_field in metadata_fields] + # get metadata model config + metadata_model_config = node_data.metadata_model_config + if metadata_model_config is None: + raise ValueError("metadata_model_config is required") + # get metadata model instance + # fetch model config + model_instance, model_config = self._fetch_model_config(node_data.metadata_model_config) # type: ignore + # fetch prompt messages + prompt_template = self._get_prompt_template( + node_data=node_data, + metadata_fields=all_metadata_fields, + query=query or "", + ) + prompt_messages, stop = self._fetch_prompt_messages( + prompt_template=prompt_template, + sys_query=query, + memory=None, + model_config=model_config, + sys_files=[], + vision_enabled=node_data.vision.enabled, + vision_detail=node_data.vision.configs.detail, + variable_pool=self.graph_runtime_state.variable_pool, + jinja2_variables=[], + ) + + result_text = "" + try: + # handle invoke result + generator = self._invoke_llm( + node_data_model=node_data.metadata_model_config, # type: ignore + model_instance=model_instance, + prompt_messages=prompt_messages, + stop=stop, + ) + + for event in generator: + if isinstance(event, ModelInvokeCompletedEvent): + result_text = event.text + break + + result_text_json = parse_and_check_json_markdown(result_text, []) + automatic_metadata_filters = [] + if "metadata_map" in result_text_json: + metadata_map = result_text_json["metadata_map"] + for item in metadata_map: + if item.get("metadata_field_name") in all_metadata_fields: + automatic_metadata_filters.append( + { + "metadata_name": item.get("metadata_field_name"), + "value": item.get("metadata_field_value"), + "condition": item.get("comparison_operator"), + } + ) + except Exception as e: + return [] + return automatic_metadata_filters + + def _process_metadata_filter_func(self, condition: str, metadata_name: str, value: Optional[str], filters: list): + match condition: + case "contains": + filters.append( + (text("documents.doc_metadata ->> :key LIKE :value")).params(key=metadata_name, value=f"%{value}%") + ) + case "not contains": + filters.append( + (text("documents.doc_metadata ->> :key NOT LIKE :value")).params( + key=metadata_name, value=f"%{value}%" + ) + ) + case "start with": + filters.append( + (text("documents.doc_metadata ->> :key LIKE :value")).params(key=metadata_name, value=f"{value}%") + ) + case "end with": + filters.append( + (text("documents.doc_metadata ->> :key LIKE :value")).params(key=metadata_name, value=f"%{value}") + ) + case "=" | "is": + if isinstance(value, str): + filters.append(Document.doc_metadata[metadata_name] == f'"{value}"') + else: + filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Integer) == value) + case "is not" | "≠": + if isinstance(value, str): + filters.append(Document.doc_metadata[metadata_name] != f'"{value}"') + else: + filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Integer) != value) + case "empty": + filters.append(Document.doc_metadata[metadata_name].is_(None)) + case "not empty": + filters.append(Document.doc_metadata[metadata_name].isnot(None)) + case "before" | "<": + filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Integer) < value) + case "after" | ">": + filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Integer) > value) + case "≤" | ">=": + filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Integer) <= value) + case "≥" | ">=": + filters.append(sqlalchemy_cast(Document.doc_metadata[metadata_name].astext, Integer) >= value) + case _: + pass + return filters + @classmethod def _extract_variable_selector_to_variable_mapping( cls, *, graph_config: Mapping[str, Any], node_id: str, - node_data: KnowledgeRetrievalNodeData, + node_data: KnowledgeRetrievalNodeData, # type: ignore ) -> Mapping[str, Sequence[str]]: """ Extract variable selector to variable mapping @@ -306,18 +507,16 @@ class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]): variable_mapping[node_id + ".query"] = node_data.query_variable_selector return variable_mapping - def _fetch_model_config( - self, node_data: KnowledgeRetrievalNodeData - ) -> tuple[ModelInstance, ModelConfigWithCredentialsEntity]: + def _fetch_model_config(self, model: ModelConfig) -> tuple[ModelInstance, ModelConfigWithCredentialsEntity]: # type: ignore """ Fetch model config - :param node_data: node data + :param model: model :return: """ - if node_data.single_retrieval_config is None: - raise ValueError("single_retrieval_config is required") - model_name = node_data.single_retrieval_config.model.name - provider_name = node_data.single_retrieval_config.model.provider + if model is None: + raise ValueError("model is required") + model_name = model.name + provider_name = model.provider model_manager = ModelManager() model_instance = model_manager.get_model_instance( @@ -346,14 +545,14 @@ class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]): raise ModelQuotaExceededError(f"Model provider {provider_name} quota exceeded.") # model config - completion_params = node_data.single_retrieval_config.model.completion_params + completion_params = model.completion_params stop = [] if "stop" in completion_params: stop = completion_params["stop"] del completion_params["stop"] # get model mode - model_mode = node_data.single_retrieval_config.model.mode + model_mode = model.mode if not model_mode: raise ModelNotExistError("LLM mode is required.") @@ -372,3 +571,50 @@ class KnowledgeRetrievalNode(BaseNode[KnowledgeRetrievalNodeData]): parameters=completion_params, stop=stop, ) + + def _get_prompt_template(self, node_data: KnowledgeRetrievalNodeData, metadata_fields: list, query: str): + model_mode = ModelMode.value_of(node_data.metadata_model_config.mode) # type: ignore + input_text = query + memory_str = "" + + prompt_messages: list[LLMNodeChatModelMessage] = [] + if model_mode == ModelMode.CHAT: + system_prompt_messages = LLMNodeChatModelMessage( + role=PromptMessageRole.SYSTEM, text=METADATA_FILTER_SYSTEM_PROMPT + ) + prompt_messages.append(system_prompt_messages) + user_prompt_message_1 = LLMNodeChatModelMessage( + role=PromptMessageRole.USER, text=METADATA_FILTER_USER_PROMPT_1 + ) + prompt_messages.append(user_prompt_message_1) + assistant_prompt_message_1 = LLMNodeChatModelMessage( + role=PromptMessageRole.ASSISTANT, text=METADATA_FILTER_ASSISTANT_PROMPT_1 + ) + prompt_messages.append(assistant_prompt_message_1) + user_prompt_message_2 = LLMNodeChatModelMessage( + role=PromptMessageRole.USER, text=QUESTION_CLASSIFIER_USER_PROMPT_2 + ) + prompt_messages.append(user_prompt_message_2) + assistant_prompt_message_2 = LLMNodeChatModelMessage( + role=PromptMessageRole.ASSISTANT, text=METADATA_FILTER_ASSISTANT_PROMPT_2 + ) + prompt_messages.append(assistant_prompt_message_2) + user_prompt_message_3 = LLMNodeChatModelMessage( + role=PromptMessageRole.USER, + text=METADATA_FILTER_USER_PROMPT_3.format( + input_text=input_text, + metadata_fields=json.dumps(metadata_fields, ensure_ascii=False), + ), + ) + prompt_messages.append(user_prompt_message_3) + return prompt_messages + elif model_mode == ModelMode.COMPLETION: + return LLMNodeCompletionModelPromptTemplate( + text=METADATA_FILTER_COMPLETION_PROMPT.format( + input_text=input_text, + metadata_fields=json.dumps(metadata_fields, ensure_ascii=False), + ) + ) + + else: + raise InvalidModelTypeError(f"Model mode {model_mode} not support.") diff --git a/api/core/workflow/nodes/knowledge_retrieval/template_prompts.py b/api/core/workflow/nodes/knowledge_retrieval/template_prompts.py new file mode 100644 index 0000000000..7abd55d798 --- /dev/null +++ b/api/core/workflow/nodes/knowledge_retrieval/template_prompts.py @@ -0,0 +1,66 @@ +METADATA_FILTER_SYSTEM_PROMPT = """ + ### Job Description', + You are a text metadata extract engine that extract text's metadata based on user input and set the metadata value + ### Task + Your task is to ONLY extract the metadatas that exist in the input text from the provided metadata list and Use the following operators ["=", "!=", ">", "<", ">=", "<="] to express logical relationships, then return result in JSON format with the key "metadata_fields" and value "metadata_field_value" and comparison operator "comparison_operator". + ### Format + The input text is in the variable input_text. Metadata are specified as a list in the variable metadata_fields. + ### Constraint + DO NOT include anything other than the JSON array in your response. +""" # noqa: E501 + +METADATA_FILTER_USER_PROMPT_1 = """ + { "input_text": "I want to know which company’s email address test@example.com is?", + "metadata_fields": ["filename", "email", "phone", "address"] + } +""" + +METADATA_FILTER_ASSISTANT_PROMPT_1 = """ +```json + {"metadata_map": [ + {"metadata_field_name": "email", "metadata_field_value": "test@example.com", "comparison_operator": "="} + ] + } +``` +""" + +METADATA_FILTER_USER_PROMPT_2 = """ + {"input_text": "What are the movies with a score of more than 9 in 2024?", + "metadata_fields": ["name", "year", "rating", "country"]} +""" + +METADATA_FILTER_ASSISTANT_PROMPT_2 = """ +```json + {"metadata_map": [ + {"metadata_field_name": "year", "metadata_field_value": "2024", "comparison_operator": "="}, + {"metadata_field_name": "rating", "metadata_field_value": "9", "comparison_operator": ">"}, + ]} +``` +""" + +METADATA_FILTER_USER_PROMPT_3 = """ + '{{"input_text": "{input_text}",', + '"metadata_fields": {metadata_fields}}}' +""" + +METADATA_FILTER_COMPLETION_PROMPT = """ +### Job Description +You are a text metadata extract engine that extract text's metadata based on user input and set the metadata value +### Task +# Your task is to ONLY extract the metadatas that exist in the input text from the provided metadata list and Use the following operators ["=", "!=", ">", "<", ">=", "<="] to express logical relationships, then return result in JSON format with the key "metadata_fields" and value "metadata_field_value" and comparison operator "comparison_operator". +### Format +The input text is in the variable input_text. Metadata are specified as a list in the variable metadata_fields. +### Constraint +DO NOT include anything other than the JSON array in your response. +### Example +Here is the chat example between human and assistant, inside XML tags. + +User:{{"input_text": ["I want to know which company’s email address test@example.com is?"], "metadata_fields": ["filename", "email", "phone", "address"]}} +Assistant:{{"metadata_map": [{{"metadata_field_name": "email", "metadata_field_value": "test@example.com", "comparison_operator": "="}}]}} +User:{{"input_text": "What are the movies with a score of more than 9 in 2024?", "metadata_fields": ["name", "year", "rating", "country"]}} +Assistant:{{"metadata_map": [{{"metadata_field_name": "year", "metadata_field_value": "2024", "comparison_operator": "="}, {{"metadata_field_name": "rating", "metadata_field_value": "9", "comparison_operator": ">"}}]}} + +### User Input +{{"input_text" : "{input_text}", "metadata_fields" : {metadata_fields}}} +### Assistant Output +""" # noqa: E501 diff --git a/api/fields/dataset_fields.py b/api/fields/dataset_fields.py index bedab5750f..397532af93 100644 --- a/api/fields/dataset_fields.py +++ b/api/fields/dataset_fields.py @@ -53,6 +53,8 @@ external_knowledge_info_fields = { "external_knowledge_api_endpoint": fields.String, } +doc_metadata_fields = {"id": fields.String, "name": fields.String, "type": fields.String} + dataset_detail_fields = { "id": fields.String, "name": fields.String, @@ -76,6 +78,8 @@ dataset_detail_fields = { "doc_form": fields.String, "external_knowledge_info": fields.Nested(external_knowledge_info_fields), "external_retrieval_model": fields.Nested(external_retrieval_model_fields, allow_null=True), + "doc_metadata": fields.List(fields.Nested(doc_metadata_fields)), + "built_in_field_enabled": fields.Boolean, } dataset_query_detail_fields = { @@ -87,3 +91,9 @@ dataset_query_detail_fields = { "created_by": fields.String, "created_at": TimestampField, } + +dataset_metadata_fields = { + "id": fields.String, + "type": fields.String, + "name": fields.String, +} diff --git a/api/fields/document_fields.py b/api/fields/document_fields.py index f2250d964a..6d59ee9baa 100644 --- a/api/fields/document_fields.py +++ b/api/fields/document_fields.py @@ -3,6 +3,13 @@ from flask_restful import fields # type: ignore from fields.dataset_fields import dataset_fields from libs.helper import TimestampField +document_metadata_fields = { + "id": fields.String, + "name": fields.String, + "type": fields.String, + "value": fields.String, +} + document_fields = { "id": fields.String, "position": fields.Integer, @@ -25,6 +32,7 @@ document_fields = { "word_count": fields.Integer, "hit_count": fields.Integer, "doc_form": fields.String, + "doc_metadata": fields.List(fields.Nested(document_metadata_fields), attribute="doc_metadata_details"), } document_with_segments_fields = { @@ -51,6 +59,7 @@ document_with_segments_fields = { "hit_count": fields.Integer, "completed_segments": fields.Integer, "total_segments": fields.Integer, + "doc_metadata": fields.List(fields.Nested(document_metadata_fields), attribute="doc_metadata_details"), } dataset_and_document_fields = { diff --git a/api/migrations/versions/2025_02_27_0917-d20049ed0af6_add_metadata_function.py b/api/migrations/versions/2025_02_27_0917-d20049ed0af6_add_metadata_function.py new file mode 100644 index 0000000000..877e3a5eed --- /dev/null +++ b/api/migrations/versions/2025_02_27_0917-d20049ed0af6_add_metadata_function.py @@ -0,0 +1,90 @@ +"""add_metadata_function + +Revision ID: d20049ed0af6 +Revises: 08ec4f75af5e +Create Date: 2025-02-27 09:17:48.903213 + +""" +from alembic import op +import models as models +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'd20049ed0af6' +down_revision = 'f051706725cc' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('dataset_metadata_bindings', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('metadata_id', models.types.StringUUID(), nullable=False), + sa.Column('document_id', models.types.StringUUID(), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.PrimaryKeyConstraint('id', name='dataset_metadata_binding_pkey') + ) + with op.batch_alter_table('dataset_metadata_bindings', schema=None) as batch_op: + batch_op.create_index('dataset_metadata_binding_dataset_idx', ['dataset_id'], unique=False) + batch_op.create_index('dataset_metadata_binding_document_idx', ['document_id'], unique=False) + batch_op.create_index('dataset_metadata_binding_metadata_idx', ['metadata_id'], unique=False) + batch_op.create_index('dataset_metadata_binding_tenant_idx', ['tenant_id'], unique=False) + + op.create_table('dataset_metadatas', + sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False), + sa.Column('tenant_id', models.types.StringUUID(), nullable=False), + sa.Column('dataset_id', models.types.StringUUID(), nullable=False), + sa.Column('type', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP(0)'), nullable=False), + sa.Column('created_by', models.types.StringUUID(), nullable=False), + sa.Column('updated_by', models.types.StringUUID(), nullable=True), + sa.PrimaryKeyConstraint('id', name='dataset_metadata_pkey') + ) + with op.batch_alter_table('dataset_metadatas', schema=None) as batch_op: + batch_op.create_index('dataset_metadata_dataset_idx', ['dataset_id'], unique=False) + batch_op.create_index('dataset_metadata_tenant_idx', ['tenant_id'], unique=False) + + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.add_column(sa.Column('built_in_field_enabled', sa.Boolean(), server_default=sa.text('false'), nullable=False)) + + with op.batch_alter_table('documents', schema=None) as batch_op: + batch_op.alter_column('doc_metadata', + existing_type=postgresql.JSON(astext_type=sa.Text()), + type_=postgresql.JSONB(astext_type=sa.Text()), + existing_nullable=True) + batch_op.create_index('document_metadata_idx', ['doc_metadata'], unique=False, postgresql_using='gin') + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + with op.batch_alter_table('documents', schema=None) as batch_op: + batch_op.drop_index('document_metadata_idx', postgresql_using='gin') + batch_op.alter_column('doc_metadata', + existing_type=postgresql.JSONB(astext_type=sa.Text()), + type_=postgresql.JSON(astext_type=sa.Text()), + existing_nullable=True) + + with op.batch_alter_table('datasets', schema=None) as batch_op: + batch_op.drop_column('built_in_field_enabled') + + with op.batch_alter_table('dataset_metadatas', schema=None) as batch_op: + batch_op.drop_index('dataset_metadata_tenant_idx') + batch_op.drop_index('dataset_metadata_dataset_idx') + + op.drop_table('dataset_metadatas') + with op.batch_alter_table('dataset_metadata_bindings', schema=None) as batch_op: + batch_op.drop_index('dataset_metadata_binding_tenant_idx') + batch_op.drop_index('dataset_metadata_binding_metadata_idx') + batch_op.drop_index('dataset_metadata_binding_document_idx') + batch_op.drop_index('dataset_metadata_binding_dataset_idx') + + op.drop_table('dataset_metadata_bindings') + # ### end Alembic commands ### diff --git a/api/models/dataset.py b/api/models/dataset.py index 4275f2728c..28589eb8c1 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -16,6 +16,7 @@ from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped from configs import dify_config +from core.rag.index_processor.constant.built_in_field import BuiltInField, MetadataDataSource from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_storage import storage from services.entities.knowledge_entities.knowledge_entities import ParentMode, Rule @@ -60,6 +61,7 @@ class Dataset(db.Model): # type: ignore[name-defined] embedding_model_provider = db.Column(db.String(255), nullable=True) collection_binding_id = db.Column(StringUUID, nullable=True) retrieval_model = db.Column(JSONB, nullable=True) + built_in_field_enabled = db.Column(db.Boolean, nullable=False, server_default=db.text("false")) @property def dataset_keyword_table(self): @@ -197,6 +199,56 @@ class Dataset(db.Model): # type: ignore[name-defined] "external_knowledge_api_endpoint": json.loads(external_knowledge_api.settings).get("endpoint", ""), } + @property + def doc_metadata(self): + dataset_metadatas = db.session.query(DatasetMetadata).filter(DatasetMetadata.dataset_id == self.id).all() + + doc_metadata = [ + { + "id": dataset_metadata.id, + "name": dataset_metadata.name, + "type": dataset_metadata.type, + } + for dataset_metadata in dataset_metadatas + ] + if self.built_in_field_enabled: + doc_metadata.append( + { + "id": "built-in", + "name": BuiltInField.document_name.value, + "type": "string", + } + ) + doc_metadata.append( + { + "id": "built-in", + "name": BuiltInField.uploader.value, + "type": "string", + } + ) + doc_metadata.append( + { + "id": "built-in", + "name": BuiltInField.upload_date.value, + "type": "time", + } + ) + doc_metadata.append( + { + "id": "built-in", + "name": BuiltInField.last_update_date.value, + "type": "time", + } + ) + doc_metadata.append( + { + "id": "built-in", + "name": BuiltInField.source.value, + "type": "string", + } + ) + return doc_metadata + @staticmethod def gen_collection_name_by_id(dataset_id: str) -> str: normalized_dataset_id = dataset_id.replace("-", "_") @@ -250,6 +302,7 @@ class Document(db.Model): # type: ignore[name-defined] db.Index("document_dataset_id_idx", "dataset_id"), db.Index("document_is_paused_idx", "is_paused"), db.Index("document_tenant_idx", "tenant_id"), + db.Index("document_metadata_idx", "doc_metadata", postgresql_using="gin"), ) # initial fields @@ -306,7 +359,7 @@ class Document(db.Model): # type: ignore[name-defined] archived_at = db.Column(db.DateTime, nullable=True) updated_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) doc_type = db.Column(db.String(40), nullable=True) - doc_metadata = db.Column(db.JSON, nullable=True) + doc_metadata = db.Column(JSONB, nullable=True) doc_form = db.Column(db.String(255), nullable=False, server_default=db.text("'text_model'::character varying")) doc_language = db.Column(db.String(255), nullable=True) @@ -396,12 +449,95 @@ class Document(db.Model): # type: ignore[name-defined] .scalar() ) + @property + def uploader(self): + user = db.session.query(Account).filter(Account.id == self.created_by).first() + return user.name if user else None + + @property + def upload_date(self): + return self.created_at + + @property + def last_update_date(self): + return self.updated_at + + @property + def doc_metadata_details(self): + if self.doc_metadata: + document_metadatas = ( + db.session.query(DatasetMetadata) + .join(DatasetMetadataBinding, DatasetMetadataBinding.metadata_id == DatasetMetadata.id) + .filter( + DatasetMetadataBinding.dataset_id == self.dataset_id, DatasetMetadataBinding.document_id == self.id + ) + .all() + ) + metadata_list = [] + for metadata in document_metadatas: + metadata_dict = { + "id": metadata.id, + "name": metadata.name, + "type": metadata.type, + "value": self.doc_metadata.get(metadata.name), + } + metadata_list.append(metadata_dict) + # deal built-in fields + metadata_list.extend(self.get_built_in_fields()) + + return metadata_list + return None + @property def process_rule_dict(self): if self.dataset_process_rule_id: return self.dataset_process_rule.to_dict() return None + def get_built_in_fields(self): + built_in_fields = [] + built_in_fields.append( + { + "id": "built-in", + "name": BuiltInField.document_name, + "type": "string", + "value": self.name, + } + ) + built_in_fields.append( + { + "id": "built-in", + "name": BuiltInField.uploader, + "type": "string", + "value": self.uploader, + } + ) + built_in_fields.append( + { + "id": "built-in", + "name": BuiltInField.upload_date, + "type": "time", + "value": self.created_at.timestamp(), + } + ) + built_in_fields.append( + { + "id": "built-in", + "name": BuiltInField.last_update_date, + "type": "time", + "value": self.updated_at.timestamp(), + } + ) + built_in_fields.append( + { + "id": "built-in", + "name": BuiltInField.source, + "type": "string", + "value": MetadataDataSource[self.data_source_type].value, + } + ) + return built_in_fields + def to_dict(self): return { "id": self.id, @@ -945,3 +1081,41 @@ class RateLimitLog(db.Model): # type: ignore[name-defined] subscription_plan = db.Column(db.String(255), nullable=False) operation = db.Column(db.String(255), nullable=False) created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + + +class DatasetMetadata(db.Model): # type: ignore[name-defined] + __tablename__ = "dataset_metadatas" + __table_args__ = ( + db.PrimaryKeyConstraint("id", name="dataset_metadata_pkey"), + db.Index("dataset_metadata_tenant_idx", "tenant_id"), + db.Index("dataset_metadata_dataset_idx", "dataset_id"), + ) + + id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = db.Column(StringUUID, nullable=False) + dataset_id = db.Column(StringUUID, nullable=False) + type = db.Column(db.String(255), nullable=False) + name = db.Column(db.String(255), nullable=False) + created_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + updated_at = db.Column(db.DateTime, nullable=False, server_default=db.text("CURRENT_TIMESTAMP(0)")) + created_by = db.Column(StringUUID, nullable=False) + updated_by = db.Column(StringUUID, nullable=True) + + +class DatasetMetadataBinding(db.Model): # type: ignore[name-defined] + __tablename__ = "dataset_metadata_bindings" + __table_args__ = ( + db.PrimaryKeyConstraint("id", name="dataset_metadata_binding_pkey"), + db.Index("dataset_metadata_binding_tenant_idx", "tenant_id"), + db.Index("dataset_metadata_binding_dataset_idx", "dataset_id"), + db.Index("dataset_metadata_binding_metadata_idx", "metadata_id"), + db.Index("dataset_metadata_binding_document_idx", "document_id"), + ) + + id = db.Column(StringUUID, server_default=db.text("uuid_generate_v4()")) + tenant_id = db.Column(StringUUID, nullable=False) + dataset_id = db.Column(StringUUID, nullable=False) + metadata_id = db.Column(StringUUID, nullable=False) + document_id = db.Column(StringUUID, nullable=False) + created_at = db.Column(db.DateTime, nullable=False, server_default=func.current_timestamp()) + created_by = db.Column(StringUUID, nullable=False) diff --git a/api/poetry.lock b/api/poetry.lock index 5eb956e81a..68516ca1f3 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -14,105 +14,105 @@ files = [ [[package]] name = "aiohappyeyeballs" -version = "2.5.0" +version = "2.6.1" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.9" groups = ["main", "storage", "vdb"] files = [ - {file = "aiohappyeyeballs-2.5.0-py3-none-any.whl", hash = "sha256:0850b580748c7071db98bffff6d4c94028d0d3035acc20fd721a0ce7e8cac35d"}, - {file = "aiohappyeyeballs-2.5.0.tar.gz", hash = "sha256:18fde6204a76deeabc97c48bdd01d5801cfda5d6b9c8bbeb1aaaee9d648ca191"}, + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, ] [[package]] name = "aiohttp" -version = "3.11.13" +version = "3.11.14" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" groups = ["main", "storage", "vdb"] files = [ - {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a4fe27dbbeec445e6e1291e61d61eb212ee9fed6e47998b27de71d70d3e8777d"}, - {file = "aiohttp-3.11.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e64ca2dbea28807f8484c13f684a2f761e69ba2640ec49dacd342763cc265ef"}, - {file = "aiohttp-3.11.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9840be675de208d1f68f84d578eaa4d1a36eee70b16ae31ab933520c49ba1325"}, - {file = "aiohttp-3.11.13-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28a772757c9067e2aee8a6b2b425d0efaa628c264d6416d283694c3d86da7689"}, - {file = "aiohttp-3.11.13-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b88aca5adbf4625e11118df45acac29616b425833c3be7a05ef63a6a4017bfdb"}, - {file = "aiohttp-3.11.13-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce10ddfbe26ed5856d6902162f71b8fe08545380570a885b4ab56aecfdcb07f4"}, - {file = "aiohttp-3.11.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa48dac27f41b36735c807d1ab093a8386701bbf00eb6b89a0f69d9fa26b3671"}, - {file = "aiohttp-3.11.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89ce611b1eac93ce2ade68f1470889e0173d606de20c85a012bfa24be96cf867"}, - {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:78e4dd9c34ec7b8b121854eb5342bac8b02aa03075ae8618b6210a06bbb8a115"}, - {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:66047eacbc73e6fe2462b77ce39fc170ab51235caf331e735eae91c95e6a11e4"}, - {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5ad8f1c19fe277eeb8bc45741c6d60ddd11d705c12a4d8ee17546acff98e0802"}, - {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64815c6f02e8506b10113ddbc6b196f58dbef135751cc7c32136df27b736db09"}, - {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:967b93f21b426f23ca37329230d5bd122f25516ae2f24a9cea95a30023ff8283"}, - {file = "aiohttp-3.11.13-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cf1f31f83d16ec344136359001c5e871915c6ab685a3d8dee38e2961b4c81730"}, - {file = "aiohttp-3.11.13-cp310-cp310-win32.whl", hash = "sha256:00c8ac69e259c60976aa2edae3f13d9991cf079aaa4d3cd5a49168ae3748dee3"}, - {file = "aiohttp-3.11.13-cp310-cp310-win_amd64.whl", hash = "sha256:90d571c98d19a8b6e793b34aa4df4cee1e8fe2862d65cc49185a3a3d0a1a3996"}, - {file = "aiohttp-3.11.13-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b35aab22419ba45f8fc290d0010898de7a6ad131e468ffa3922b1b0b24e9d2e"}, - {file = "aiohttp-3.11.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f81cba651db8795f688c589dd11a4fbb834f2e59bbf9bb50908be36e416dc760"}, - {file = "aiohttp-3.11.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f55d0f242c2d1fcdf802c8fabcff25a9d85550a4cf3a9cf5f2a6b5742c992839"}, - {file = "aiohttp-3.11.13-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4bea08a6aad9195ac9b1be6b0c7e8a702a9cec57ce6b713698b4a5afa9c2e33"}, - {file = "aiohttp-3.11.13-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6070bcf2173a7146bb9e4735b3c62b2accba459a6eae44deea0eb23e0035a23"}, - {file = "aiohttp-3.11.13-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:718d5deb678bc4b9d575bfe83a59270861417da071ab44542d0fcb6faa686636"}, - {file = "aiohttp-3.11.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f6b2c5b4a4d22b8fb2c92ac98e0747f5f195e8e9448bfb7404cd77e7bfa243f"}, - {file = "aiohttp-3.11.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:747ec46290107a490d21fe1ff4183bef8022b848cf9516970cb31de6d9460088"}, - {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:01816f07c9cc9d80f858615b1365f8319d6a5fd079cd668cc58e15aafbc76a54"}, - {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:a08ad95fcbd595803e0c4280671d808eb170a64ca3f2980dd38e7a72ed8d1fea"}, - {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c97be90d70f7db3aa041d720bfb95f4869d6063fcdf2bb8333764d97e319b7d0"}, - {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ab915a57c65f7a29353c8014ac4be685c8e4a19e792a79fe133a8e101111438e"}, - {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:35cda4e07f5e058a723436c4d2b7ba2124ab4e0aa49e6325aed5896507a8a42e"}, - {file = "aiohttp-3.11.13-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:af55314407714fe77a68a9ccaab90fdb5deb57342585fd4a3a8102b6d4370080"}, - {file = "aiohttp-3.11.13-cp311-cp311-win32.whl", hash = "sha256:42d689a5c0a0c357018993e471893e939f555e302313d5c61dfc566c2cad6185"}, - {file = "aiohttp-3.11.13-cp311-cp311-win_amd64.whl", hash = "sha256:b73a2b139782a07658fbf170fe4bcdf70fc597fae5ffe75e5b67674c27434a9f"}, - {file = "aiohttp-3.11.13-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2eabb269dc3852537d57589b36d7f7362e57d1ece308842ef44d9830d2dc3c90"}, - {file = "aiohttp-3.11.13-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b77ee42addbb1c36d35aca55e8cc6d0958f8419e458bb70888d8c69a4ca833d"}, - {file = "aiohttp-3.11.13-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55789e93c5ed71832e7fac868167276beadf9877b85697020c46e9a75471f55f"}, - {file = "aiohttp-3.11.13-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c929f9a7249a11e4aa5c157091cfad7f49cc6b13f4eecf9b747104befd9f56f2"}, - {file = "aiohttp-3.11.13-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d33851d85537bbf0f6291ddc97926a754c8f041af759e0aa0230fe939168852b"}, - {file = "aiohttp-3.11.13-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9229d8613bd8401182868fe95688f7581673e1c18ff78855671a4b8284f47bcb"}, - {file = "aiohttp-3.11.13-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669dd33f028e54fe4c96576f406ebb242ba534dd3a981ce009961bf49960f117"}, - {file = "aiohttp-3.11.13-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c1b20a1ace54af7db1f95af85da530fe97407d9063b7aaf9ce6a32f44730778"}, - {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5724cc77f4e648362ebbb49bdecb9e2b86d9b172c68a295263fa072e679ee69d"}, - {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:aa36c35e94ecdb478246dd60db12aba57cfcd0abcad43c927a8876f25734d496"}, - {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9b5b37c863ad5b0892cc7a4ceb1e435e5e6acd3f2f8d3e11fa56f08d3c67b820"}, - {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e06cf4852ce8c4442a59bae5a3ea01162b8fcb49ab438d8548b8dc79375dad8a"}, - {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5194143927e494616e335d074e77a5dac7cd353a04755330c9adc984ac5a628e"}, - {file = "aiohttp-3.11.13-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afcb6b275c2d2ba5d8418bf30a9654fa978b4f819c2e8db6311b3525c86fe637"}, - {file = "aiohttp-3.11.13-cp312-cp312-win32.whl", hash = "sha256:7104d5b3943c6351d1ad7027d90bdd0ea002903e9f610735ac99df3b81f102ee"}, - {file = "aiohttp-3.11.13-cp312-cp312-win_amd64.whl", hash = "sha256:47dc018b1b220c48089b5b9382fbab94db35bef2fa192995be22cbad3c5730c8"}, - {file = "aiohttp-3.11.13-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9862d077b9ffa015dbe3ce6c081bdf35135948cb89116e26667dd183550833d1"}, - {file = "aiohttp-3.11.13-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fbfef0666ae9e07abfa2c54c212ac18a1f63e13e0760a769f70b5717742f3ece"}, - {file = "aiohttp-3.11.13-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:93a1f7d857c4fcf7cabb1178058182c789b30d85de379e04f64c15b7e88d66fb"}, - {file = "aiohttp-3.11.13-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba40b7ae0f81c7029583a338853f6607b6d83a341a3dcde8bed1ea58a3af1df9"}, - {file = "aiohttp-3.11.13-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5b95787335c483cd5f29577f42bbe027a412c5431f2f80a749c80d040f7ca9f"}, - {file = "aiohttp-3.11.13-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7d474c5c1f0b9405c1565fafdc4429fa7d986ccbec7ce55bc6a330f36409cad"}, - {file = "aiohttp-3.11.13-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e83fb1991e9d8982b3b36aea1e7ad27ea0ce18c14d054c7a404d68b0319eebb"}, - {file = "aiohttp-3.11.13-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4586a68730bd2f2b04a83e83f79d271d8ed13763f64b75920f18a3a677b9a7f0"}, - {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fe4eb0e7f50cdb99b26250d9328faef30b1175a5dbcfd6d0578d18456bac567"}, - {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2a8a6bc19818ac3e5596310ace5aa50d918e1ebdcc204dc96e2f4d505d51740c"}, - {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7f27eec42f6c3c1df09cfc1f6786308f8b525b8efaaf6d6bd76c1f52c6511f6a"}, - {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:2a4a13dfbb23977a51853b419141cd0a9b9573ab8d3a1455c6e63561387b52ff"}, - {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:02876bf2f69b062584965507b07bc06903c2dc93c57a554b64e012d636952654"}, - {file = "aiohttp-3.11.13-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b992778d95b60a21c4d8d4a5f15aaab2bd3c3e16466a72d7f9bfd86e8cea0d4b"}, - {file = "aiohttp-3.11.13-cp313-cp313-win32.whl", hash = "sha256:507ab05d90586dacb4f26a001c3abf912eb719d05635cbfad930bdbeb469b36c"}, - {file = "aiohttp-3.11.13-cp313-cp313-win_amd64.whl", hash = "sha256:5ceb81a4db2decdfa087381b5fc5847aa448244f973e5da232610304e199e7b2"}, - {file = "aiohttp-3.11.13-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:51c3ff9c7a25f3cad5c09d9aacbc5aefb9267167c4652c1eb737989b554fe278"}, - {file = "aiohttp-3.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e271beb2b1dabec5cd84eb488bdabf9758d22ad13471e9c356be07ad139b3012"}, - {file = "aiohttp-3.11.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e9eb7e5764abcb49f0e2bd8f5731849b8728efbf26d0cac8e81384c95acec3f"}, - {file = "aiohttp-3.11.13-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baae005092e3f200de02699314ac8933ec20abf998ec0be39448f6605bce93df"}, - {file = "aiohttp-3.11.13-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1982c98ac62c132d2b773d50e2fcc941eb0b8bad3ec078ce7e7877c4d5a2dce7"}, - {file = "aiohttp-3.11.13-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2b25b2eeb35707113b2d570cadc7c612a57f1c5d3e7bb2b13870fe284e08fc0"}, - {file = "aiohttp-3.11.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b27961d65639128336b7a7c3f0046dcc62a9443d5ef962e3c84170ac620cec47"}, - {file = "aiohttp-3.11.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a01fe9f1e05025eacdd97590895e2737b9f851d0eb2e017ae9574d9a4f0b6252"}, - {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa1fb1b61881c8405829c50e9cc5c875bfdbf685edf57a76817dfb50643e4a1a"}, - {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:25de43bb3cf83ad83efc8295af7310219af6dbe4c543c2e74988d8e9c8a2a917"}, - {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe7065e2215e4bba63dc00db9ae654c1ba3950a5fff691475a32f511142fcddb"}, - {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7836587eef675a17d835ec3d98a8c9acdbeb2c1d72b0556f0edf4e855a25e9c1"}, - {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:85fa0b18558eb1427090912bd456a01f71edab0872f4e0f9e4285571941e4090"}, - {file = "aiohttp-3.11.13-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a86dc177eb4c286c19d1823ac296299f59ed8106c9536d2b559f65836e0fb2c6"}, - {file = "aiohttp-3.11.13-cp39-cp39-win32.whl", hash = "sha256:684eea71ab6e8ade86b9021bb62af4bf0881f6be4e926b6b5455de74e420783a"}, - {file = "aiohttp-3.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:82c249f2bfa5ecbe4a1a7902c81c0fba52ed9ebd0176ab3047395d02ad96cfcb"}, - {file = "aiohttp-3.11.13.tar.gz", hash = "sha256:8ce789231404ca8fff7f693cdce398abf6d90fd5dae2b1847477196c243b1fbb"}, + {file = "aiohttp-3.11.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e2bc827c01f75803de77b134afdbf74fa74b62970eafdf190f3244931d7a5c0d"}, + {file = "aiohttp-3.11.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e365034c5cf6cf74f57420b57682ea79e19eb29033399dd3f40de4d0171998fa"}, + {file = "aiohttp-3.11.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c32593ead1a8c6aabd58f9d7ee706e48beac796bb0cb71d6b60f2c1056f0a65f"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4e7c7ec4146a94a307ca4f112802a8e26d969018fabed526efc340d21d3e7d0"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8b2df9feac55043759aa89f722a967d977d80f8b5865a4153fc41c93b957efc"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7571f99525c76a6280f5fe8e194eeb8cb4da55586c3c61c59c33a33f10cfce7"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b59d096b5537ec7c85954cb97d821aae35cfccce3357a2cafe85660cc6295628"}, + {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b42dbd097abb44b3f1156b4bf978ec5853840802d6eee2784857be11ee82c6a0"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b05774864c87210c531b48dfeb2f7659407c2dda8643104fb4ae5e2c311d12d9"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4e2e8ef37d4bc110917d038807ee3af82700a93ab2ba5687afae5271b8bc50ff"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e9faafa74dbb906b2b6f3eb9942352e9e9db8d583ffed4be618a89bd71a4e914"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:7e7abe865504f41b10777ac162c727af14e9f4db9262e3ed8254179053f63e6d"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:4848ae31ad44330b30f16c71e4f586cd5402a846b11264c412de99fa768f00f3"}, + {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2d0b46abee5b5737cb479cc9139b29f010a37b1875ee56d142aefc10686a390b"}, + {file = "aiohttp-3.11.14-cp310-cp310-win32.whl", hash = "sha256:a0d2c04a623ab83963576548ce098baf711a18e2c32c542b62322a0b4584b990"}, + {file = "aiohttp-3.11.14-cp310-cp310-win_amd64.whl", hash = "sha256:5409a59d5057f2386bb8b8f8bbcfb6e15505cedd8b2445db510563b5d7ea1186"}, + {file = "aiohttp-3.11.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f296d637a50bb15fb6a229fbb0eb053080e703b53dbfe55b1e4bb1c5ed25d325"}, + {file = "aiohttp-3.11.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6cd1954ca2bbf0970f531a628da1b1338f594bf5da7e361e19ba163ecc4f3b"}, + {file = "aiohttp-3.11.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:572def4aad0a4775af66d5a2b5923c7de0820ecaeeb7987dcbccda2a735a993f"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c68e41c4d576cd6aa6c6d2eddfb32b2acfb07ebfbb4f9da991da26633a3db1a"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b8bbfc8111826aa8363442c0fc1f5751456b008737ff053570f06a151650b3"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b0a200e85da5c966277a402736a96457b882360aa15416bf104ca81e6f5807b"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d173c0ac508a2175f7c9a115a50db5fd3e35190d96fdd1a17f9cb10a6ab09aa1"}, + {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:413fe39fd929329f697f41ad67936f379cba06fcd4c462b62e5b0f8061ee4a77"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65c75b14ee74e8eeff2886321e76188cbe938d18c85cff349d948430179ad02c"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:321238a42ed463848f06e291c4bbfb3d15ba5a79221a82c502da3e23d7525d06"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:59a05cdc636431f7ce843c7c2f04772437dd816a5289f16440b19441be6511f1"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:daf20d9c3b12ae0fdf15ed92235e190f8284945563c4b8ad95b2d7a31f331cd3"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:05582cb2d156ac7506e68b5eac83179faedad74522ed88f88e5861b78740dc0e"}, + {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:12c5869e7ddf6b4b1f2109702b3cd7515667b437da90a5a4a50ba1354fe41881"}, + {file = "aiohttp-3.11.14-cp311-cp311-win32.whl", hash = "sha256:92868f6512714efd4a6d6cb2bfc4903b997b36b97baea85f744229f18d12755e"}, + {file = "aiohttp-3.11.14-cp311-cp311-win_amd64.whl", hash = "sha256:bccd2cb7aa5a3bfada72681bdb91637094d81639e116eac368f8b3874620a654"}, + {file = "aiohttp-3.11.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:70ab0f61c1a73d3e0342cedd9a7321425c27a7067bebeeacd509f96695b875fc"}, + {file = "aiohttp-3.11.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:602d4db80daf4497de93cb1ce00b8fc79969c0a7cf5b67bec96fa939268d806a"}, + {file = "aiohttp-3.11.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a8a0d127c10b8d89e69bbd3430da0f73946d839e65fec00ae48ca7916a31948"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9f835cdfedcb3f5947304e85b8ca3ace31eef6346d8027a97f4de5fb687534"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8aa5c68e1e68fff7cd3142288101deb4316b51f03d50c92de6ea5ce646e6c71f"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b512f1de1c688f88dbe1b8bb1283f7fbeb7a2b2b26e743bb2193cbadfa6f307"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc9253069158d57e27d47a8453d8a2c5a370dc461374111b5184cf2f147a3cc3"}, + {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b2501f1b981e70932b4a552fc9b3c942991c7ae429ea117e8fba57718cdeed0"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:28a3d083819741592685762d51d789e6155411277050d08066537c5edc4066e6"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0df3788187559c262922846087e36228b75987f3ae31dd0a1e5ee1034090d42f"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e73fa341d8b308bb799cf0ab6f55fc0461d27a9fa3e4582755a3d81a6af8c09"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:51ba80d473eb780a329d73ac8afa44aa71dfb521693ccea1dea8b9b5c4df45ce"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8d1dd75aa4d855c7debaf1ef830ff2dfcc33f893c7db0af2423ee761ebffd22b"}, + {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41cf0cefd9e7b5c646c2ef529c8335e7eafd326f444cc1cdb0c47b6bc836f9be"}, + {file = "aiohttp-3.11.14-cp312-cp312-win32.whl", hash = "sha256:948abc8952aff63de7b2c83bfe3f211c727da3a33c3a5866a0e2cf1ee1aa950f"}, + {file = "aiohttp-3.11.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b420d076a46f41ea48e5fcccb996f517af0d406267e31e6716f480a3d50d65c"}, + {file = "aiohttp-3.11.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d14e274828561db91e4178f0057a915f3af1757b94c2ca283cb34cbb6e00b50"}, + {file = "aiohttp-3.11.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f30fc72daf85486cdcdfc3f5e0aea9255493ef499e31582b34abadbfaafb0965"}, + {file = "aiohttp-3.11.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4edcbe34e6dba0136e4cabf7568f5a434d89cc9de5d5155371acda275353d228"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a7169ded15505f55a87f8f0812c94c9412623c744227b9e51083a72a48b68a5"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad1f2fb9fe9b585ea4b436d6e998e71b50d2b087b694ab277b30e060c434e5db"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20412c7cc3720e47a47e63c0005f78c0c2370020f9f4770d7fc0075f397a9fb0"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dd9766da617855f7e85f27d2bf9a565ace04ba7c387323cd3e651ac4329db91"}, + {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:599b66582f7276ebefbaa38adf37585e636b6a7a73382eb412f7bc0fc55fb73d"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b41693b7388324b80f9acfabd479bd1c84f0bc7e8f17bab4ecd9675e9ff9c734"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:86135c32d06927339c8c5e64f96e4eee8825d928374b9b71a3c42379d7437058"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04eb541ce1e03edc1e3be1917a0f45ac703e913c21a940111df73a2c2db11d73"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dc311634f6f28661a76cbc1c28ecf3b3a70a8edd67b69288ab7ca91058eb5a33"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:69bb252bfdca385ccabfd55f4cd740d421dd8c8ad438ded9637d81c228d0da49"}, + {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2b86efe23684b58a88e530c4ab5b20145f102916bbb2d82942cafec7bd36a647"}, + {file = "aiohttp-3.11.14-cp313-cp313-win32.whl", hash = "sha256:b9c60d1de973ca94af02053d9b5111c4fbf97158e139b14f1be68337be267be6"}, + {file = "aiohttp-3.11.14-cp313-cp313-win_amd64.whl", hash = "sha256:0a29be28e60e5610d2437b5b2fed61d6f3dcde898b57fb048aa5079271e7f6f3"}, + {file = "aiohttp-3.11.14-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:14fc03508359334edc76d35b2821832f092c8f092e4b356e74e38419dfe7b6de"}, + {file = "aiohttp-3.11.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:92007c89a8cb7be35befa2732b0b32bf3a394c1b22ef2dff0ef12537d98a7bda"}, + {file = "aiohttp-3.11.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6d3986112e34eaa36e280dc8286b9dd4cc1a5bcf328a7f147453e188f6fe148f"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:749f1eb10e51dbbcdba9df2ef457ec060554842eea4d23874a3e26495f9e87b1"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:781c8bd423dcc4641298c8c5a2a125c8b1c31e11f828e8d35c1d3a722af4c15a"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:997b57e38aa7dc6caab843c5e042ab557bc83a2f91b7bd302e3c3aebbb9042a1"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a8b0321e40a833e381d127be993b7349d1564b756910b28b5f6588a159afef3"}, + {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8778620396e554b758b59773ab29c03b55047841d8894c5e335f12bfc45ebd28"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e906da0f2bcbf9b26cc2b144929e88cb3bf943dd1942b4e5af066056875c7618"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:87f0e003fb4dd5810c7fbf47a1239eaa34cd929ef160e0a54c570883125c4831"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7f2dadece8b85596ac3ab1ec04b00694bdd62abc31e5618f524648d18d9dd7fa"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:fe846f0a98aa9913c2852b630cd39b4098f296e0907dd05f6c7b30d911afa4c3"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ced66c5c6ad5bcaf9be54560398654779ec1c3695f1a9cf0ae5e3606694a000a"}, + {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a40087b82f83bd671cbeb5f582c233d196e9653220404a798798bfc0ee189fff"}, + {file = "aiohttp-3.11.14-cp39-cp39-win32.whl", hash = "sha256:95d7787f2bcbf7cb46823036a8d64ccfbc2ffc7d52016b4044d901abceeba3db"}, + {file = "aiohttp-3.11.14-cp39-cp39-win_amd64.whl", hash = "sha256:22a8107896877212130c58f74e64b77f7007cb03cea8698be317272643602d45"}, + {file = "aiohttp-3.11.14.tar.gz", hash = "sha256:d6edc538c7480fa0a3b2bdd705f8010062d74700198da55d16498e1b49549b9c"}, ] [package.dependencies] @@ -459,14 +459,14 @@ files = [ [[package]] name = "anyio" -version = "4.8.0" +version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" groups = ["main", "storage", "vdb"] files = [ - {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, - {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, ] [package.dependencies] @@ -475,8 +475,8 @@ sniffio = ">=1.1" typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -509,21 +509,21 @@ files = [ [[package]] name = "attrs" -version = "25.1.0" +version = "25.3.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" groups = ["main", "lint", "storage", "vdb"] files = [ - {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, - {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, ] [package.extras] benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] @@ -845,10 +845,6 @@ files = [ {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"}, - {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"}, {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, @@ -861,14 +857,8 @@ files = [ {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"}, - {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"}, {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"}, - {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, @@ -879,24 +869,8 @@ files = [ {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"}, - {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"}, {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"}, - {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"}, - {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"}, - {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"}, - {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"}, - {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"}, {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, @@ -906,10 +880,6 @@ files = [ {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"}, - {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"}, {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, @@ -921,10 +891,6 @@ files = [ {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"}, - {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"}, {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, @@ -937,10 +903,6 @@ files = [ {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"}, - {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"}, {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, @@ -953,10 +915,6 @@ files = [ {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"}, - {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"}, {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, @@ -1404,14 +1362,14 @@ uvicorn = {version = ">=0.18.3", extras = ["standard"]} [[package]] name = "circuitbreaker" -version = "2.0.0" +version = "2.1.0" description = "Python Circuit Breaker pattern implementation" optional = false python-versions = "*" groups = ["main"] files = [ - {file = "circuitbreaker-2.0.0-py2.py3-none-any.whl", hash = "sha256:c8c6f044b616cd5066368734ce4488020392c962b4bd2869d406d883c36d9859"}, - {file = "circuitbreaker-2.0.0.tar.gz", hash = "sha256:28110761ca81a2accbd6b33186bc8c433e69b0933d85e89f280028dbb8c1dd14"}, + {file = "circuitbreaker-2.1.0-py2.py3-none-any.whl", hash = "sha256:6a07e9a33bb23a37228155108b50108b4e52611e4c26183c4aa54c255da233cb"}, + {file = "circuitbreaker-2.1.0.tar.gz", hash = "sha256:b09c3f7a8614df6d43e780cdd80931fad5ccb1d23418dbe4c15bcfe6db12528c"}, ] [[package]] @@ -1969,14 +1927,14 @@ files = [ [[package]] name = "elastic-transport" -version = "8.17.0" +version = "8.17.1" description = "Transport classes and utilities shared among Python Elastic client libraries" optional = false python-versions = ">=3.8" groups = ["vdb"] files = [ - {file = "elastic_transport-8.17.0-py3-none-any.whl", hash = "sha256:59f553300866750e67a38828fede000576562a0e66930c641adb75249e0c95af"}, - {file = "elastic_transport-8.17.0.tar.gz", hash = "sha256:e755f38f99fa6ec5456e236b8e58f0eb18873ac8fe710f74b91a16dd562de2a5"}, + {file = "elastic_transport-8.17.1-py3-none-any.whl", hash = "sha256:192718f498f1d10c5e9aa8b9cf32aed405e469a7f0e9d6a8923431dbb2c59fb8"}, + {file = "elastic_transport-8.17.1.tar.gz", hash = "sha256:5edef32ac864dca8e2f0a613ef63491ee8d6b8cfb52881fa7313ba9290cac6d2"}, ] [package.dependencies] @@ -1984,7 +1942,7 @@ certifi = "*" urllib3 = ">=1.26.2,<3" [package.extras] -develop = ["aiohttp", "furo", "httpcore (<1.0.6)", "httpx", "opentelemetry-api", "opentelemetry-sdk", "orjson", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "respx", "sphinx (>2)", "sphinx-autodoc-typehints", "trustme"] +develop = ["aiohttp", "furo", "httpx", "opentelemetry-api", "opentelemetry-sdk", "orjson", "pytest", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "pytest-mock", "requests", "respx", "sphinx (>2)", "sphinx-autodoc-typehints", "trustme"] [[package]] name = "elasticsearch" @@ -2102,14 +2060,14 @@ standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "htt [[package]] name = "filelock" -version = "3.17.0" +version = "3.18.0" description = "A platform independent file lock." optional = false python-versions = ">=3.9" groups = ["main", "vdb"] files = [ - {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, - {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, + {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, + {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, ] [package.extras] @@ -2730,14 +2688,14 @@ tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] [[package]] name = "google-cloud-core" -version = "2.4.2" +version = "2.4.3" description = "Google Cloud API client core library" optional = false python-versions = ">=3.7" groups = ["main", "storage"] files = [ - {file = "google_cloud_core-2.4.2-py2.py3-none-any.whl", hash = "sha256:7459c3e83de7cb8b9ecfec9babc910efb4314030c56dd798eaad12c426f7d180"}, - {file = "google_cloud_core-2.4.2.tar.gz", hash = "sha256:a4fcb0e2fcfd4bfe963837fad6d10943754fd79c1a50097d68540b6eb3d67f35"}, + {file = "google_cloud_core-2.4.3-py2.py3-none-any.whl", hash = "sha256:5130f9f4c14b4fafdff75c79448f9495cfade0d8775facf1b09c3bf67e027f6e"}, + {file = "google_cloud_core-2.4.3.tar.gz", hash = "sha256:1fab62d7102844b278fe6dead3af32408b1df3eb06f5c7e8634cbd40edc4da53"}, ] [package.dependencies] @@ -2749,22 +2707,22 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] [[package]] name = "google-cloud-resource-manager" -version = "1.14.1" +version = "1.14.2" description = "Google Cloud Resource Manager API client library" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "google_cloud_resource_manager-1.14.1-py2.py3-none-any.whl", hash = "sha256:68340599f85ebf07a6e18487e460ea07cc15e132068f6b188786d01c2cf25518"}, - {file = "google_cloud_resource_manager-1.14.1.tar.gz", hash = "sha256:41e9e546aaa03d5160cdfa2341dbe81ef7596706c300a89b94c429f1f3411f87"}, + {file = "google_cloud_resource_manager-1.14.2-py3-none-any.whl", hash = "sha256:d0fa954dedd1d2b8e13feae9099c01b8aac515b648e612834f9942d2795a9900"}, + {file = "google_cloud_resource_manager-1.14.2.tar.gz", hash = "sha256:962e2d904c550d7bac48372607904ff7bb3277e3bb4a36d80cc9a37e28e6eb74"}, ] [package.dependencies] -google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} -google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" -grpc-google-iam-v1 = ">=0.14.0,<1.0.0dev" -proto-plus = ">=1.22.3,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0" +grpc-google-iam-v1 = ">=0.14.0,<1.0.0" +proto-plus = ">=1.22.3,<2.0.0" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" [[package]] name = "google-cloud-storage" @@ -2791,39 +2749,45 @@ protobuf = ["protobuf (<5.0.0dev)"] [[package]] name = "google-crc32c" -version = "1.6.0" +version = "1.7.0" description = "A python wrapper of the C library 'Google CRC32C'" optional = false python-versions = ">=3.9" groups = ["main", "storage"] files = [ - {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa"}, - {file = "google_crc32c-1.6.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9"}, - {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7"}, - {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e"}, - {file = "google_crc32c-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc"}, - {file = "google_crc32c-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42"}, - {file = "google_crc32c-1.6.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4"}, - {file = "google_crc32c-1.6.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8"}, - {file = "google_crc32c-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d"}, - {file = "google_crc32c-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f"}, - {file = "google_crc32c-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3"}, - {file = "google_crc32c-1.6.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d"}, - {file = "google_crc32c-1.6.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b"}, - {file = "google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00"}, - {file = "google_crc32c-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3"}, - {file = "google_crc32c-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760"}, - {file = "google_crc32c-1.6.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205"}, - {file = "google_crc32c-1.6.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0"}, - {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2"}, - {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871"}, - {file = "google_crc32c-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57"}, - {file = "google_crc32c-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c"}, - {file = "google_crc32c-1.6.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc"}, - {file = "google_crc32c-1.6.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d"}, - {file = "google_crc32c-1.6.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24"}, - {file = "google_crc32c-1.6.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d"}, - {file = "google_crc32c-1.6.0.tar.gz", hash = "sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc"}, + {file = "google_crc32c-1.7.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:18f1dfc6baeb3b28b1537d54b3622363352f75fcb2d4b6ffcc37584fe431f122"}, + {file = "google_crc32c-1.7.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:732378dc4ca08953eac0d13d1c312d99a54d5b483c90b4a5a536132669ed1c24"}, + {file = "google_crc32c-1.7.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14fdac94aa60d5794652f8ea6c2fcc532032e31f9050698b7ecdc6d4c3a61784"}, + {file = "google_crc32c-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bc14187a7fe5c61024c0dd1b578d7f9391df55459bf373c07f66426e09353b6"}, + {file = "google_crc32c-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54af59a98a427d0f98b6b0446df52ad286948ab7745da80a1edeb32ad633b3ae"}, + {file = "google_crc32c-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:2515aa89e46c6fa99190ec29bf27f33457ff98e5ca5c6c05602f74e0fb005752"}, + {file = "google_crc32c-1.7.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:96e33b249776f5aa7017a494b78994cf3cc8461291d460b46e75f6bc6cc40dc8"}, + {file = "google_crc32c-1.7.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:c2dc799827990dd06b777067e27f57c2a552ddde4c4cd2d883b1b615ee92f9cf"}, + {file = "google_crc32c-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c29f7718f48e32810a41b17126e0ca588a0ae6158b4da2926d8074241a155d"}, + {file = "google_crc32c-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f30548e65291a4658c9e56f6f516159663f2b4a2c991b9af5846f0084ea25d4"}, + {file = "google_crc32c-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:9754f9eaa5ff82166512908f02745d5e883650d7b04d1732b5df3335986ad359"}, + {file = "google_crc32c-1.7.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:11b3b2f16a534c76ce3c9503800c7c2578c13a56e8e409eac273330e25b5c521"}, + {file = "google_crc32c-1.7.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:fd3afea81a7c7b95f98c065edc5a0fdb58f1fea5960e166962b142ec037fe5e0"}, + {file = "google_crc32c-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d07ad3ff51f26cef5bbad66622004eca3639271230cfc2443845ec4650e1c57"}, + {file = "google_crc32c-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af6e80f83d882b247eef2c0366ec72b1ffb89433b9f35dc621d79190840f1ea6"}, + {file = "google_crc32c-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:10721764a9434546b7961194fbb1f80efbcaf45b8498ed379d64f8891d4c155b"}, + {file = "google_crc32c-1.7.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:76bb19d182b999f9c9d580b1d7ab6e9334ab23dd669bf91f501812103408c85b"}, + {file = "google_crc32c-1.7.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:6a40522958040051c755a173eb98c05ad4d64a6dd898888c3e5ccca2d1cbdcdc"}, + {file = "google_crc32c-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f714fe5cdf5007d7064c57cf7471a99e0cbafda24ddfa829117fc3baafa424f7"}, + {file = "google_crc32c-1.7.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f04e58dbe1bf0c9398e603a9be5aaa09e0ba7eb022a3293195d8749459a01069"}, + {file = "google_crc32c-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:364067b063664dd8d1fec75a3fe85edf05c46f688365269beccaf42ef5dfe889"}, + {file = "google_crc32c-1.7.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1b0d6044799f6ac51d1cc2decb997280a83c448b3bef517a54b57a3b71921c0"}, + {file = "google_crc32c-1.7.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:02bc3295d26cd7666521fd6d5b7b93923ae1eb4417ddd3bc57185a5881ad7b96"}, + {file = "google_crc32c-1.7.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:807503eedd7bf9bfded2776e0bcd0b017998f45b8b1e813cde3b9bf8f7593313"}, + {file = "google_crc32c-1.7.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ff62f4959fabc8e5e90af920cc2fb0a9ccf1e74fd6da8a56d8ef04a600a68f97"}, + {file = "google_crc32c-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2daec556d537b07af25fa3f91841f022dd6869a648ca4aea1add56f87b80647"}, + {file = "google_crc32c-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dc17abb62bd7c7a33da307f24ae05cd6e36c24e847a67e3e2165cba23a06dd4"}, + {file = "google_crc32c-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:a79f7e486756385c4309d7815ede53a72f6cbab12ddf140c5f5b335caf08edfb"}, + {file = "google_crc32c-1.7.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b8f48dddd1451026a517d7eb1f8c4ee2491998bfa383abb5fdebf32b0aa333e"}, + {file = "google_crc32c-1.7.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60f89e06ce462a65dd4d14a97bd29d92730713316b8b89720f9b2bb1aef270f7"}, + {file = "google_crc32c-1.7.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1277c27428a6cc89a51f5afbc04b81fae0288fb631117383f0de4f2bf78ffad6"}, + {file = "google_crc32c-1.7.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:731921158ef113bf157b8e65f13303d627fb540f173a410260f2fb7570af644c"}, + {file = "google_crc32c-1.7.0.tar.gz", hash = "sha256:c8c15a04b290c7556f277acc55ad98503a8bc0893ea6860fd5b5d210f3f558ce"}, ] [package.extras] @@ -2973,20 +2937,20 @@ test = ["objgraph", "psutil"] [[package]] name = "grpc-google-iam-v1" -version = "0.14.1" +version = "0.14.2" description = "IAM API client library" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "grpc_google_iam_v1-0.14.1-py2.py3-none-any.whl", hash = "sha256:b4eca35b2231dd76066ebf1728f3cd30d51034db946827ef63ef138da14eea16"}, - {file = "grpc_google_iam_v1-0.14.1.tar.gz", hash = "sha256:14149f37af0e5779fa8a22a8ae588663269e8a479d9c2e69a5056e589bf8a891"}, + {file = "grpc_google_iam_v1-0.14.2-py3-none-any.whl", hash = "sha256:a3171468459770907926d56a440b2bb643eec1d7ba215f48f3ecece42b4d8351"}, + {file = "grpc_google_iam_v1-0.14.2.tar.gz", hash = "sha256:b3e1fc387a1a329e41672197d0ace9de22c78dd7d215048c4c78712073f7bd20"}, ] [package.dependencies] -googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} -grpcio = ">=1.44.0,<2.0.0dev" -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" +googleapis-common-protos = {version = ">=1.56.0,<2.0.0", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" [[package]] name = "grpcio" @@ -3462,14 +3426,14 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "huggingface-hub" -version = "0.29.2" +version = "0.29.3" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" groups = ["main", "vdb"] files = [ - {file = "huggingface_hub-0.29.2-py3-none-any.whl", hash = "sha256:c56f20fca09ef19da84dcde2b76379ecdaddf390b083f59f166715584953307d"}, - {file = "huggingface_hub-0.29.2.tar.gz", hash = "sha256:590b29c0dcbd0ee4b7b023714dc1ad8563fe4a68a91463438b74e980d28afaf3"}, + {file = "huggingface_hub-0.29.3-py3-none-any.whl", hash = "sha256:0b25710932ac649c08cdbefa6c6ccb8e88eef82927cacdb048efb726429453aa"}, + {file = "huggingface_hub-0.29.3.tar.gz", hash = "sha256:64519a25716e0ba382ba2d3fb3ca082e7c7eb4a2fc634d200e8380006e0760e5"}, ] [package.dependencies] @@ -3539,27 +3503,23 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 [[package]] name = "importlib-metadata" -version = "8.5.0" +version = "8.4.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" groups = ["main", "vdb"] files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, + {file = "importlib_metadata-8.4.0-py3-none-any.whl", hash = "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1"}, + {file = "importlib_metadata-8.4.0.tar.gz", hash = "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5"}, ] [package.dependencies] -zipp = ">=3.20" +zipp = ">=0.5" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] +test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] [[package]] name = "importlib-resources" @@ -3648,88 +3608,88 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jiter" -version = "0.8.2" +version = "0.9.0" description = "Fast iterable JSON parser." optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, - {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c826a221851a8dc028eb6d7d6429ba03184fa3c7e83ae01cd6d3bd1d4bd17d"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d35c864c2dff13dfd79fb070fc4fc6235d7b9b359efe340e1261deb21b9fcb66"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f557c55bc2b7676e74d39d19bcb8775ca295c7a028246175d6a8b431e70835e5"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:580ccf358539153db147e40751a0b41688a5ceb275e6f3e93d91c9467f42b2e3"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af102d3372e917cffce49b521e4c32c497515119dc7bd8a75665e90a718bbf08"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cadcc978f82397d515bb2683fc0d50103acff2a180552654bb92d6045dec2c49"}, - {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba5bdf56969cad2019d4e8ffd3f879b5fdc792624129741d3d83fc832fef8c7d"}, - {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3b94a33a241bee9e34b8481cdcaa3d5c2116f575e0226e421bed3f7a6ea71cff"}, - {file = "jiter-0.8.2-cp310-cp310-win32.whl", hash = "sha256:6e5337bf454abddd91bd048ce0dca5134056fc99ca0205258766db35d0a2ea43"}, - {file = "jiter-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:4a9220497ca0cb1fe94e3f334f65b9b5102a0b8147646118f020d8ce1de70105"}, - {file = "jiter-0.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2dd61c5afc88a4fda7d8b2cf03ae5947c6ac7516d32b7a15bf4b49569a5c076b"}, - {file = "jiter-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6c710d657c8d1d2adbbb5c0b0c6bfcec28fd35bd6b5f016395f9ac43e878a15"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9584de0cd306072635fe4b89742bf26feae858a0683b399ad0c2509011b9dc0"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a90a923338531b7970abb063cfc087eebae6ef8ec8139762007188f6bc69a9f"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21974d246ed0181558087cd9f76e84e8321091ebfb3a93d4c341479a736f099"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32475a42b2ea7b344069dc1e81445cfc00b9d0e3ca837f0523072432332e9f74"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9931fd36ee513c26b5bf08c940b0ac875de175341cbdd4fa3be109f0492586"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0820f4a3a59ddced7fce696d86a096d5cc48d32a4183483a17671a61edfddc"}, - {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ffc86ae5e3e6a93765d49d1ab47b6075a9c978a2b3b80f0f32628f39caa0c88"}, - {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5127dc1abd809431172bc3fbe8168d6b90556a30bb10acd5ded41c3cfd6f43b6"}, - {file = "jiter-0.8.2-cp311-cp311-win32.whl", hash = "sha256:66227a2c7b575720c1871c8800d3a0122bb8ee94edb43a5685aa9aceb2782d44"}, - {file = "jiter-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:cde031d8413842a1e7501e9129b8e676e62a657f8ec8166e18a70d94d4682855"}, - {file = "jiter-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e6ec2be506e7d6f9527dae9ff4b7f54e68ea44a0ef6b098256ddf895218a2f8f"}, - {file = "jiter-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76e324da7b5da060287c54f2fabd3db5f76468006c811831f051942bf68c9d44"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:180a8aea058f7535d1c84183c0362c710f4750bef66630c05f40c93c2b152a0f"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025337859077b41548bdcbabe38698bcd93cfe10b06ff66617a48ff92c9aec60"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecff0dc14f409599bbcafa7e470c00b80f17abc14d1405d38ab02e4b42e55b57"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffd9fee7d0775ebaba131f7ca2e2d83839a62ad65e8e02fe2bd8fc975cedeb9e"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14601dcac4889e0a1c75ccf6a0e4baf70dbc75041e51bcf8d0e9274519df6887"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92249669925bc1c54fcd2ec73f70f2c1d6a817928480ee1c65af5f6b81cdf12d"}, - {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e725edd0929fa79f8349ab4ec7f81c714df51dc4e991539a578e5018fa4a7152"}, - {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bf55846c7b7a680eebaf9c3c48d630e1bf51bdf76c68a5f654b8524335b0ad29"}, - {file = "jiter-0.8.2-cp312-cp312-win32.whl", hash = "sha256:7efe4853ecd3d6110301665a5178b9856be7e2a9485f49d91aa4d737ad2ae49e"}, - {file = "jiter-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:83c0efd80b29695058d0fd2fa8a556490dbce9804eac3e281f373bbc99045f6c"}, - {file = "jiter-0.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ca1f08b8e43dc3bd0594c992fb1fd2f7ce87f7bf0d44358198d6da8034afdf84"}, - {file = "jiter-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5672a86d55416ccd214c778efccf3266b84f87b89063b582167d803246354be4"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58dc9bc9767a1101f4e5e22db1b652161a225874d66f0e5cb8e2c7d1c438b587"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b2998606d6dadbb5ccda959a33d6a5e853252d921fec1792fc902351bb4e2c"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab9a87f3784eb0e098f84a32670cfe4a79cb6512fd8f42ae3d0709f06405d18"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79aec8172b9e3c6d05fd4b219d5de1ac616bd8da934107325a6c0d0e866a21b6"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:711e408732d4e9a0208008e5892c2966b485c783cd2d9a681f3eb147cf36c7ef"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:653cf462db4e8c41995e33d865965e79641ef45369d8a11f54cd30888b7e6ff1"}, - {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:9c63eaef32b7bebac8ebebf4dabebdbc6769a09c127294db6babee38e9f405b9"}, - {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:eb21aaa9a200d0a80dacc7a81038d2e476ffe473ffdd9c91eb745d623561de05"}, - {file = "jiter-0.8.2-cp313-cp313-win32.whl", hash = "sha256:789361ed945d8d42850f919342a8665d2dc79e7e44ca1c97cc786966a21f627a"}, - {file = "jiter-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:ab7f43235d71e03b941c1630f4b6e3055d46b6cb8728a17663eaac9d8e83a865"}, - {file = "jiter-0.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b426f72cd77da3fec300ed3bc990895e2dd6b49e3bfe6c438592a3ba660e41ca"}, - {file = "jiter-0.8.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2dd880785088ff2ad21ffee205e58a8c1ddabc63612444ae41e5e4b321b39c0"}, - {file = "jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566"}, - {file = "jiter-0.8.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9e1fa156ee9454642adb7e7234a383884452532bc9d53d5af2d18d98ada1d79c"}, - {file = "jiter-0.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cf5dfa9956d96ff2efb0f8e9c7d055904012c952539a774305aaaf3abdf3d6c"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e52bf98c7e727dd44f7c4acb980cb988448faeafed8433c867888268899b298b"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a2ecaa3c23e7a7cf86d00eda3390c232f4d533cd9ddea4b04f5d0644faf642c5"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08d4c92bf480e19fc3f2717c9ce2aa31dceaa9163839a311424b6862252c943e"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d9a1eded738299ba8e106c6779ce5c3893cffa0e32e4485d680588adae6db8"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20be8b7f606df096e08b0b1b4a3c6f0515e8dac296881fe7461dfa0fb5ec817"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d33f94615fcaf872f7fd8cd98ac3b429e435c77619777e8a449d9d27e01134d1"}, - {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:317b25e98a35ffec5c67efe56a4e9970852632c810d35b34ecdd70cc0e47b3b6"}, - {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9043259ee430ecd71d178fccabd8c332a3bf1e81e50cae43cc2b28d19e4cb7"}, - {file = "jiter-0.8.2-cp38-cp38-win32.whl", hash = "sha256:fc5adda618205bd4678b146612ce44c3cbfdee9697951f2c0ffdef1f26d72b63"}, - {file = "jiter-0.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cd646c827b4f85ef4a78e4e58f4f5854fae0caf3db91b59f0d73731448a970c6"}, - {file = "jiter-0.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e41e75344acef3fc59ba4765df29f107f309ca9e8eace5baacabd9217e52a5ee"}, - {file = "jiter-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f22b16b35d5c1df9dfd58843ab2cd25e6bf15191f5a236bed177afade507bfc"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7200b8f7619d36aa51c803fd52020a2dfbea36ffec1b5e22cab11fd34d95a6d"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70bf4c43652cc294040dbb62256c83c8718370c8b93dd93d934b9a7bf6c4f53c"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9d471356dc16f84ed48768b8ee79f29514295c7295cb41e1133ec0b2b8d637d"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859e8eb3507894093d01929e12e267f83b1d5f6221099d3ec976f0c995cb6bd9"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa58399c01db555346647a907b4ef6d4f584b123943be6ed5588c3f2359c9f4"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8f2d5ed877f089862f4c7aacf3a542627c1496f972a34d0474ce85ee7d939c27"}, - {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03c9df035d4f8d647f8c210ddc2ae0728387275340668fb30d2421e17d9a0841"}, - {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8bd2a824d08d8977bb2794ea2682f898ad3d8837932e3a74937e93d62ecbb637"}, - {file = "jiter-0.8.2-cp39-cp39-win32.whl", hash = "sha256:ca29b6371ebc40e496995c94b988a101b9fbbed48a51190a4461fcb0a68b4a36"}, - {file = "jiter-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1c0dfbd1be3cbefc7510102370d86e35d1d53e5a93d48519688b1bf0f761160a"}, - {file = "jiter-0.8.2.tar.gz", hash = "sha256:cd73d3e740666d0e639f678adb176fad25c1bcbdae88d8d7b857e1783bb4212d"}, + {file = "jiter-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:816ec9b60fdfd1fec87da1d7ed46c66c44ffec37ab2ef7de5b147b2fce3fd5ad"}, + {file = "jiter-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b1d3086f8a3ee0194ecf2008cf81286a5c3e540d977fa038ff23576c023c0ea"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1339f839b91ae30b37c409bf16ccd3dc453e8b8c3ed4bd1d6a567193651a4a51"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ffba79584b3b670fefae66ceb3a28822365d25b7bf811e030609a3d5b876f538"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cfc7d0a8e899089d11f065e289cb5b2daf3d82fbe028f49b20d7b809193958d"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e00a1a2bbfaaf237e13c3d1592356eab3e9015d7efd59359ac8b51eb56390a12"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1d9870561eb26b11448854dce0ff27a9a27cb616b632468cafc938de25e9e51"}, + {file = "jiter-0.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9872aeff3f21e437651df378cb75aeb7043e5297261222b6441a620218b58708"}, + {file = "jiter-0.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1fd19112d1049bdd47f17bfbb44a2c0001061312dcf0e72765bfa8abd4aa30e5"}, + {file = "jiter-0.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6ef5da104664e526836070e4a23b5f68dec1cc673b60bf1edb1bfbe8a55d0678"}, + {file = "jiter-0.9.0-cp310-cp310-win32.whl", hash = "sha256:cb12e6d65ebbefe5518de819f3eda53b73187b7089040b2d17f5b39001ff31c4"}, + {file = "jiter-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:c43ca669493626d8672be3b645dbb406ef25af3f4b6384cfd306da7eb2e70322"}, + {file = "jiter-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6c4d99c71508912a7e556d631768dcdef43648a93660670986916b297f1c54af"}, + {file = "jiter-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f60fb8ce7df529812bf6c625635a19d27f30806885139e367af93f6e734ef58"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51c4e1a4f8ea84d98b7b98912aa4290ac3d1eabfde8e3c34541fae30e9d1f08b"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f4c677c424dc76684fea3e7285a7a2a7493424bea89ac441045e6a1fb1d7b3b"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2221176dfec87f3470b21e6abca056e6b04ce9bff72315cb0b243ca9e835a4b5"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c7adb66f899ffa25e3c92bfcb593391ee1947dbdd6a9a970e0d7e713237d572"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98d27330fdfb77913c1097a7aab07f38ff2259048949f499c9901700789ac15"}, + {file = "jiter-0.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eda3f8cc74df66892b1d06b5d41a71670c22d95a1ca2cbab73654745ce9d0419"}, + {file = "jiter-0.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dd5ab5ddc11418dce28343123644a100f487eaccf1de27a459ab36d6cca31043"}, + {file = "jiter-0.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42f8a68a69f047b310319ef8e2f52fdb2e7976fb3313ef27df495cf77bcad965"}, + {file = "jiter-0.9.0-cp311-cp311-win32.whl", hash = "sha256:a25519efb78a42254d59326ee417d6f5161b06f5da827d94cf521fed961b1ff2"}, + {file = "jiter-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:923b54afdd697dfd00d368b7ccad008cccfeb1efb4e621f32860c75e9f25edbd"}, + {file = "jiter-0.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7b46249cfd6c48da28f89eb0be3f52d6fdb40ab88e2c66804f546674e539ec11"}, + {file = "jiter-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:609cf3c78852f1189894383cf0b0b977665f54cb38788e3e6b941fa6d982c00e"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d726a3890a54561e55a9c5faea1f7655eda7f105bd165067575ace6e65f80bb2"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e89dc075c1fef8fa9be219e249f14040270dbc507df4215c324a1839522ea75"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04e8ffa3c353b1bc4134f96f167a2082494351e42888dfcf06e944f2729cbe1d"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:203f28a72a05ae0e129b3ed1f75f56bc419d5f91dfacd057519a8bd137b00c42"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fca1a02ad60ec30bb230f65bc01f611c8608b02d269f998bc29cca8619a919dc"}, + {file = "jiter-0.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:237e5cee4d5d2659aaf91bbf8ec45052cc217d9446070699441a91b386ae27dc"}, + {file = "jiter-0.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:528b6b71745e7326eed73c53d4aa57e2a522242320b6f7d65b9c5af83cf49b6e"}, + {file = "jiter-0.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9f48e86b57bc711eb5acdfd12b6cb580a59cc9a993f6e7dcb6d8b50522dcd50d"}, + {file = "jiter-0.9.0-cp312-cp312-win32.whl", hash = "sha256:699edfde481e191d81f9cf6d2211debbfe4bd92f06410e7637dffb8dd5dfde06"}, + {file = "jiter-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:099500d07b43f61d8bd780466d429c45a7b25411b334c60ca875fa775f68ccb0"}, + {file = "jiter-0.9.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2764891d3f3e8b18dce2cff24949153ee30c9239da7c00f032511091ba688ff7"}, + {file = "jiter-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:387b22fbfd7a62418d5212b4638026d01723761c75c1c8232a8b8c37c2f1003b"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d8da8629ccae3606c61d9184970423655fb4e33d03330bcdfe52d234d32f69"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1be73d8982bdc278b7b9377426a4b44ceb5c7952073dd7488e4ae96b88e1103"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2228eaaaa111ec54b9e89f7481bffb3972e9059301a878d085b2b449fbbde635"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11509bfecbc319459647d4ac3fd391d26fdf530dad00c13c4dadabf5b81f01a4"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f22238da568be8bbd8e0650e12feeb2cfea15eda4f9fc271d3b362a4fa0604d"}, + {file = "jiter-0.9.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17f5d55eb856597607562257c8e36c42bc87f16bef52ef7129b7da11afc779f3"}, + {file = "jiter-0.9.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:6a99bed9fbb02f5bed416d137944419a69aa4c423e44189bc49718859ea83bc5"}, + {file = "jiter-0.9.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e057adb0cd1bd39606100be0eafe742de2de88c79df632955b9ab53a086b3c8d"}, + {file = "jiter-0.9.0-cp313-cp313-win32.whl", hash = "sha256:f7e6850991f3940f62d387ccfa54d1a92bd4bb9f89690b53aea36b4364bcab53"}, + {file = "jiter-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:c8ae3bf27cd1ac5e6e8b7a27487bf3ab5f82318211ec2e1346a5b058756361f7"}, + {file = "jiter-0.9.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0b2827fb88dda2cbecbbc3e596ef08d69bda06c6f57930aec8e79505dc17001"}, + {file = "jiter-0.9.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062b756ceb1d40b0b28f326cba26cfd575a4918415b036464a52f08632731e5a"}, + {file = "jiter-0.9.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6f7838bc467ab7e8ef9f387bd6de195c43bad82a569c1699cb822f6609dd4cdf"}, + {file = "jiter-0.9.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4a2d16360d0642cd68236f931b85fe50288834c383492e4279d9f1792e309571"}, + {file = "jiter-0.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e84ed1c9c9ec10bbb8c37f450077cbe3c0d4e8c2b19f0a49a60ac7ace73c7452"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f3c848209ccd1bfa344a1240763975ca917de753c7875c77ec3034f4151d06c"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7825f46e50646bee937e0f849d14ef3a417910966136f59cd1eb848b8b5bb3e4"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d82a811928b26d1a6311a886b2566f68ccf2b23cf3bfed042e18686f1f22c2d7"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c058ecb51763a67f019ae423b1cbe3fa90f7ee6280c31a1baa6ccc0c0e2d06e"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9897115ad716c48f0120c1f0c4efae348ec47037319a6c63b2d7838bb53aaef4"}, + {file = "jiter-0.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:351f4c90a24c4fb8c87c6a73af2944c440494ed2bea2094feecacb75c50398ae"}, + {file = "jiter-0.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d45807b0f236c485e1e525e2ce3a854807dfe28ccf0d013dd4a563395e28008a"}, + {file = "jiter-0.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1537a890724ba00fdba21787010ac6f24dad47f763410e9e1093277913592784"}, + {file = "jiter-0.9.0-cp38-cp38-win32.whl", hash = "sha256:e3630ec20cbeaddd4b65513fa3857e1b7c4190d4481ef07fb63d0fad59033321"}, + {file = "jiter-0.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:2685f44bf80e95f8910553bf2d33b9c87bf25fceae6e9f0c1355f75d2922b0ee"}, + {file = "jiter-0.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:9ef340fae98065071ccd5805fe81c99c8f80484e820e40043689cf97fb66b3e2"}, + {file = "jiter-0.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:efb767d92c63b2cd9ec9f24feeb48f49574a713870ec87e9ba0c2c6e9329c3e2"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:113f30f87fb1f412510c6d7ed13e91422cfd329436364a690c34c8b8bd880c42"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8793b6df019b988526f5a633fdc7456ea75e4a79bd8396a3373c371fc59f5c9b"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a9aaa5102dba4e079bb728076fadd5a2dca94c05c04ce68004cfd96f128ea34"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d838650f6ebaf4ccadfb04522463e74a4c378d7e667e0eb1865cfe3990bfac49"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0194f813efdf4b8865ad5f5c5f50f8566df7d770a82c51ef593d09e0b347020"}, + {file = "jiter-0.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7954a401d0a8a0b8bc669199db78af435aae1e3569187c2939c477c53cb6a0a"}, + {file = "jiter-0.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4feafe787eb8a8d98168ab15637ca2577f6ddf77ac6c8c66242c2d028aa5420e"}, + {file = "jiter-0.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:27cd1f2e8bb377f31d3190b34e4328d280325ad7ef55c6ac9abde72f79e84d2e"}, + {file = "jiter-0.9.0-cp39-cp39-win32.whl", hash = "sha256:161d461dcbe658cf0bd0aa375b30a968b087cdddc624fc585f3867c63c6eca95"}, + {file = "jiter-0.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:e8b36d8a16a61993be33e75126ad3d8aa29cf450b09576f3c427d27647fcb4aa"}, + {file = "jiter-0.9.0.tar.gz", hash = "sha256:aadba0964deb424daa24492abc3d229c60c4a31bfee205aedbf1acc7639d7893"}, ] [[package]] @@ -3823,19 +3783,19 @@ files = [ [[package]] name = "kombu" -version = "5.4.2" +version = "5.5.0" description = "Messaging library for Python." optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "kombu-5.4.2-py3-none-any.whl", hash = "sha256:14212f5ccf022fc0a70453bb025a1dcc32782a588c49ea866884047d66e14763"}, - {file = "kombu-5.4.2.tar.gz", hash = "sha256:eef572dd2fd9fc614b37580e3caeafdd5af46c1eff31e7fba89138cdb406f2cf"}, + {file = "kombu-5.5.0-py3-none-any.whl", hash = "sha256:526c6cf038c986b998639109a1eb762502f831e8da148cc928f1f95cd91eb874"}, + {file = "kombu-5.5.0.tar.gz", hash = "sha256:72e65c062e903ee1b4e8b68d348f63c02afc172eda409e3aca85867752e79c0b"}, ] [package.dependencies] amqp = ">=5.1.1,<6.0.0" -tzdata = {version = "*", markers = "python_version >= \"3.9\""} +tzdata = {version = "2025.1", markers = "python_version >= \"3.9\""} vine = "5.1.0" [package.extras] @@ -3843,15 +3803,16 @@ azureservicebus = ["azure-servicebus (>=7.10.0)"] azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] confluentkafka = ["confluent-kafka (>=2.2.0)"] consul = ["python-consul2 (==0.1.5)"] +gcpubsub = ["google-cloud-monitoring (>=2.16.0)", "google-cloud-pubsub (>=2.18.4)", "grpcio (==1.67.0)", "protobuf (==4.25.5)"] librabbitmq = ["librabbitmq (>=2.0.0) ; python_version < \"3.11\""] mongodb = ["pymongo (>=4.1.1)"] msgpack = ["msgpack (==1.1.0)"] pyro = ["pyro4 (==4.82)"] qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2)"] +redis = ["redis (>=4.5.2,!=4.5.5,!=5.0.2,<=5.2.1)"] slmq = ["softlayer-messaging (>=1.0.3)"] sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] -sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5) ; sys_platform != \"win32\" and platform_python_implementation == \"CPython\"", "urllib3 (>=1.26.16)"] +sqs = ["boto3 (>=1.26.143)", "urllib3 (>=1.26.16)"] yaml = ["PyYAML (>=3.10)"] zookeeper = ["kazoo (>=2.8.0)"] @@ -4058,14 +4019,14 @@ rapidfuzz = ">=3.9.0,<4.0.0" [[package]] name = "litellm" -version = "1.63.3" +version = "1.63.7" description = "Library to easily interface with LLM API providers" optional = false python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" groups = ["main"] files = [ - {file = "litellm-1.63.3-py3-none-any.whl", hash = "sha256:bbe56d3b4afa5dda1e9730071780601e03278bba003da1634c4a2dc93c83ae05"}, - {file = "litellm-1.63.3.tar.gz", hash = "sha256:329fa60e2c93e95d28798c2fbc63aaca67d8da38b2256d015cb169e97e5382f3"}, + {file = "litellm-1.63.7-py3-none-any.whl", hash = "sha256:fbdee39a894506c68f158c6b4e0079f9e9c023441fff7215e7b8e42162dba0a7"}, + {file = "litellm-1.63.7.tar.gz", hash = "sha256:2fbd7236d5e5379eee18556857ed62a5ed49f4f09e03ff33cf15932306b984f1"}, ] [package.dependencies] @@ -4648,18 +4609,18 @@ tests = ["pytest (>=4.6)"] [[package]] name = "msal" -version = "1.31.1" +version = "1.32.0" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "msal-1.31.1-py3-none-any.whl", hash = "sha256:29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17"}, - {file = "msal-1.31.1.tar.gz", hash = "sha256:11b5e6a3f802ffd3a72107203e20c4eac6ef53401961b880af2835b723d80578"}, + {file = "msal-1.32.0-py3-none-any.whl", hash = "sha256:9dbac5384a10bbbf4dae5c7ea0d707d14e087b92c5aa4954b3feaa2d1aa0bcb7"}, + {file = "msal-1.32.0.tar.gz", hash = "sha256:5445fe3af1da6be484991a7ab32eaa82461dc2347de105b76af92c610c3335c2"}, ] [package.dependencies] -cryptography = ">=2.5,<46" +cryptography = ">=2.5,<47" PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} requests = ">=2.0.0,<3" @@ -4668,19 +4629,21 @@ broker = ["pymsalruntime (>=0.14,<0.18) ; python_version >= \"3.6\" and platform [[package]] name = "msal-extensions" -version = "1.2.0" +version = "1.3.1" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d"}, - {file = "msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef"}, + {file = "msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca"}, + {file = "msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4"}, ] [package.dependencies] msal = ">=1.29,<2" -portalocker = ">=1.4,<3" + +[package.extras] +portalocker = ["portalocker (>=1.4,<4)"] [[package]] name = "msrest" @@ -4706,104 +4669,104 @@ async = ["aiodns ; python_version >= \"3.5\"", "aiohttp (>=3.0) ; python_version [[package]] name = "multidict" -version = "6.1.0" +version = "6.2.0" description = "multidict implementation" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main", "storage", "vdb"] files = [ - {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, - {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, - {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, - {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, - {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, - {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, - {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, - {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, - {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, - {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, - {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, - {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, - {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, - {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, - {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, - {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, - {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, - {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, - {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, - {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, - {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, - {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, - {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, - {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, - {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, - {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, - {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, - {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, - {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, - {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, - {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, - {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, + {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b9f6392d98c0bd70676ae41474e2eecf4c7150cb419237a41f8f96043fcb81d1"}, + {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3501621d5e86f1a88521ea65d5cad0a0834c77b26f193747615b7c911e5422d2"}, + {file = "multidict-6.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32ed748ff9ac682eae7859790d3044b50e3076c7d80e17a44239683769ff485e"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc826b9a8176e686b67aa60fd6c6a7047b0461cae5591ea1dc73d28f72332a8a"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:214207dcc7a6221d9942f23797fe89144128a71c03632bf713d918db99bd36de"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05fefbc3cddc4e36da209a5e49f1094bbece9a581faa7f3589201fd95df40e5d"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e851e6363d0dbe515d8de81fd544a2c956fdec6f8a049739562286727d4a00c3"}, + {file = "multidict-6.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32c9b4878f48be3e75808ea7e499d6223b1eea6d54c487a66bc10a1871e3dc6a"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7243c5a6523c5cfeca76e063efa5f6a656d1d74c8b1fc64b2cd1e84e507f7e2a"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0e5a644e50ef9fb87878d4d57907f03a12410d2aa3b93b3acdf90a741df52c49"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0dc25a3293c50744796e87048de5e68996104d86d940bb24bc3ec31df281b191"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a49994481b99cd7dedde07f2e7e93b1d86c01c0fca1c32aded18f10695ae17eb"}, + {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:641cf2e3447c9ecff2f7aa6e9eee9eaa286ea65d57b014543a4911ff2799d08a"}, + {file = "multidict-6.2.0-cp310-cp310-win32.whl", hash = "sha256:0c383d28857f66f5aebe3e91d6cf498da73af75fbd51cedbe1adfb85e90c0460"}, + {file = "multidict-6.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:a33273a541f1e1a8219b2a4ed2de355848ecc0254264915b9290c8d2de1c74e1"}, + {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:84e87a7d75fa36839a3a432286d719975362d230c70ebfa0948549cc38bd5b46"}, + {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8de4d42dffd5ced9117af2ce66ba8722402541a3aa98ffdf78dde92badb68932"}, + {file = "multidict-6.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d91a230c7f8af86c904a5a992b8c064b66330544693fd6759c3d6162382ecf"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f6cad071960ba1914fa231677d21b1b4a3acdcce463cee41ea30bc82e6040cf"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f74f2fc51555f4b037ef278efc29a870d327053aba5cb7d86ae572426c7cccc"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14ed9ed1bfedd72a877807c71113deac292bf485159a29025dfdc524c326f3e1"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac3fcf9a2d369bd075b2c2965544036a27ccd277fc3c04f708338cc57533081"}, + {file = "multidict-6.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fc6af8e39f7496047c7876314f4317736eac82bf85b54c7c76cf1a6f8e35d98"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f8cb1329f42fadfb40d6211e5ff568d71ab49be36e759345f91c69d1033d633"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5389445f0173c197f4a3613713b5fb3f3879df1ded2a1a2e4bc4b5b9c5441b7e"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94a7bb972178a8bfc4055db80c51efd24baefaced5e51c59b0d598a004e8305d"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da51d8928ad8b4244926fe862ba1795f0b6e68ed8c42cd2f822d435db9c2a8f4"}, + {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:063be88bd684782a0715641de853e1e58a2f25b76388538bd62d974777ce9bc2"}, + {file = "multidict-6.2.0-cp311-cp311-win32.whl", hash = "sha256:52b05e21ff05729fbea9bc20b3a791c3c11da61649ff64cce8257c82a020466d"}, + {file = "multidict-6.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1e2a2193d3aa5cbf5758f6d5680a52aa848e0cf611da324f71e5e48a9695cc86"}, + {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:437c33561edb6eb504b5a30203daf81d4a9b727e167e78b0854d9a4e18e8950b"}, + {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9f49585f4abadd2283034fc605961f40c638635bc60f5162276fec075f2e37a4"}, + {file = "multidict-6.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5dd7106d064d05896ce28c97da3f46caa442fe5a43bc26dfb258e90853b39b44"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e25b11a0417475f093d0f0809a149aff3943c2c56da50fdf2c3c88d57fe3dfbd"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac380cacdd3b183338ba63a144a34e9044520a6fb30c58aa14077157a033c13e"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61d5541f27533f803a941d3a3f8a3d10ed48c12cf918f557efcbf3cd04ef265c"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:facaf11f21f3a4c51b62931feb13310e6fe3475f85e20d9c9fdce0d2ea561b87"}, + {file = "multidict-6.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:095a2eabe8c43041d3e6c2cb8287a257b5f1801c2d6ebd1dd877424f1e89cf29"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0cc398350ef31167e03f3ca7c19313d4e40a662adcb98a88755e4e861170bdd"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7c611345bbe7cb44aabb877cb94b63e86f2d0db03e382667dbd037866d44b4f8"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8cd1a0644ccaf27e9d2f6d9c9474faabee21f0578fe85225cc5af9a61e1653df"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:89b3857652183b8206a891168af47bac10b970d275bba1f6ee46565a758c078d"}, + {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:125dd82b40f8c06d08d87b3510beaccb88afac94e9ed4a6f6c71362dc7dbb04b"}, + {file = "multidict-6.2.0-cp312-cp312-win32.whl", hash = "sha256:76b34c12b013d813e6cb325e6bd4f9c984db27758b16085926bbe7ceeaace626"}, + {file = "multidict-6.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:0b183a959fb88ad1be201de2c4bdf52fa8e46e6c185d76201286a97b6f5ee65c"}, + {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5c5e7d2e300d5cb3b2693b6d60d3e8c8e7dd4ebe27cd17c9cb57020cac0acb80"}, + {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:256d431fe4583c5f1e0f2e9c4d9c22f3a04ae96009b8cfa096da3a8723db0a16"}, + {file = "multidict-6.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a3c0ff89fe40a152e77b191b83282c9664357dce3004032d42e68c514ceff27e"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7d48207926edbf8b16b336f779c557dd8f5a33035a85db9c4b0febb0706817"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c099d3899b14e1ce52262eb82a5f5cb92157bb5106bf627b618c090a0eadc"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e16e7297f29a544f49340012d6fc08cf14de0ab361c9eb7529f6a57a30cbfda1"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:042028348dc5a1f2be6c666437042a98a5d24cee50380f4c0902215e5ec41844"}, + {file = "multidict-6.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08549895e6a799bd551cf276f6e59820aa084f0f90665c0f03dd3a50db5d3c48"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ccfd74957ef53fa7380aaa1c961f523d582cd5e85a620880ffabd407f8202c0"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:83b78c680d4b15d33042d330c2fa31813ca3974197bddb3836a5c635a5fd013f"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b4c153863dd6569f6511845922c53e39c8d61f6e81f228ad5443e690fca403de"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98aa8325c7f47183b45588af9c434533196e241be0a4e4ae2190b06d17675c02"}, + {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e658d1373c424457ddf6d55ec1db93c280b8579276bebd1f72f113072df8a5d"}, + {file = "multidict-6.2.0-cp313-cp313-win32.whl", hash = "sha256:3157126b028c074951839233647bd0e30df77ef1fedd801b48bdcad242a60f4e"}, + {file = "multidict-6.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:2e87f1926e91855ae61769ba3e3f7315120788c099677e0842e697b0bfb659f2"}, + {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:2529ddbdaa424b2c6c2eb668ea684dd6b75b839d0ad4b21aad60c168269478d7"}, + {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:13551d0e2d7201f0959725a6a769b6f7b9019a168ed96006479c9ac33fe4096b"}, + {file = "multidict-6.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d1996ee1330e245cd3aeda0887b4409e3930524c27642b046e4fae88ffa66c5e"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c537da54ce4ff7c15e78ab1292e5799d0d43a2108e006578a57f531866f64025"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f249badb360b0b4d694307ad40f811f83df4da8cef7b68e429e4eea939e49dd"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48d39b1824b8d6ea7de878ef6226efbe0773f9c64333e1125e0efcfdd18a24c7"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b99aac6bb2c37db336fa03a39b40ed4ef2818bf2dfb9441458165ebe88b793af"}, + {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bfa8bc649783e703263f783f73e27fef8cd37baaad4389816cf6a133141331"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2c00ad31fbc2cbac85d7d0fcf90853b2ca2e69d825a2d3f3edb842ef1544a2c"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d57a01a2a9fa00234aace434d8c131f0ac6e0ac6ef131eda5962d7e79edfb5b"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:abf5b17bc0cf626a8a497d89ac691308dbd825d2ac372aa990b1ca114e470151"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:f7716f7e7138252d88607228ce40be22660d6608d20fd365d596e7ca0738e019"}, + {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d5a36953389f35f0a4e88dc796048829a2f467c9197265504593f0e420571547"}, + {file = "multidict-6.2.0-cp313-cp313t-win32.whl", hash = "sha256:e653d36b1bf48fa78c7fcebb5fa679342e025121ace8c87ab05c1cefd33b34fc"}, + {file = "multidict-6.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ca23db5fb195b5ef4fd1f77ce26cadefdf13dba71dab14dadd29b34d457d7c44"}, + {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b4f3d66dd0354b79761481fc15bdafaba0b9d9076f1f42cc9ce10d7fcbda205a"}, + {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e2a2d6749e1ff2c9c76a72c6530d5baa601205b14e441e6d98011000f47a7ac"}, + {file = "multidict-6.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cca83a629f77402cfadd58352e394d79a61c8015f1694b83ab72237ec3941f88"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:781b5dd1db18c9e9eacc419027b0acb5073bdec9de1675c0be25ceb10e2ad133"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf8d370b2fea27fb300825ec3984334f7dd54a581bde6456799ba3776915a656"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25bb96338512e2f46f615a2bb7c6012fe92a4a5ebd353e5020836a7e33120349"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e2819b0b468174de25c0ceed766606a07cedeab132383f1e83b9a4e96ccb4f"}, + {file = "multidict-6.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aed763b6a1b28c46c055692836879328f0b334a6d61572ee4113a5d0c859872"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a1133414b771619aa3c3000701c11b2e4624a7f492f12f256aedde97c28331a2"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:639556758c36093b35e2e368ca485dada6afc2bd6a1b1207d85ea6dfc3deab27"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:163f4604e76639f728d127293d24c3e208b445b463168af3d031b92b0998bb90"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2325105e16d434749e1be8022f942876a936f9bece4ec41ae244e3d7fae42aaf"}, + {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e4371591e621579cb6da8401e4ea405b33ff25a755874a3567c4075ca63d56e2"}, + {file = "multidict-6.2.0-cp39-cp39-win32.whl", hash = "sha256:d1175b0e0d6037fab207f05774a176d71210ebd40b1c51f480a04b65ec5c786d"}, + {file = "multidict-6.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad81012b24b88aad4c70b2cbc2dad84018783221b7f923e926f4690ff8569da3"}, + {file = "multidict-6.2.0-py3-none-any.whl", hash = "sha256:5d26547423e5e71dcc562c4acdc134b900640a39abd9066d7326a7cc2324c530"}, + {file = "multidict-6.2.0.tar.gz", hash = "sha256:0085b0afb2446e57050140240a8595846ed64d1cbd26cef936bfab3192c673b8"}, ] [[package]] @@ -5240,115 +5203,127 @@ kerberos = ["requests-kerberos"] [[package]] name = "opentelemetry-api" -version = "1.30.0" +version = "1.27.0" description = "OpenTelemetry Python API" optional = false python-versions = ">=3.8" groups = ["vdb"] files = [ - {file = "opentelemetry_api-1.30.0-py3-none-any.whl", hash = "sha256:d5f5284890d73fdf47f843dda3210edf37a38d66f44f2b5aedc1e89ed455dc09"}, - {file = "opentelemetry_api-1.30.0.tar.gz", hash = "sha256:375893400c1435bf623f7dfb3bcd44825fe6b56c34d0667c542ea8257b1a1240"}, + {file = "opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7"}, + {file = "opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342"}, ] [package.dependencies] deprecated = ">=1.2.6" -importlib-metadata = ">=6.0,<=8.5.0" +importlib-metadata = ">=6.0,<=8.4.0" [[package]] -name = "opentelemetry-exporter-otlp-proto-grpc" -version = "1.15.0" -description = "OpenTelemetry Collector Protobuf over gRPC Exporter" +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.27.0" +description = "OpenTelemetry Protobuf encoding" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["vdb"] files = [ - {file = "opentelemetry_exporter_otlp_proto_grpc-1.15.0-py3-none-any.whl", hash = "sha256:c2a5492ba7d140109968135d641d06ce3c5bd73c50665f787526065d57d7fd1d"}, - {file = "opentelemetry_exporter_otlp_proto_grpc-1.15.0.tar.gz", hash = "sha256:844f2a4bb9bcda34e4eb6fe36765e5031aacb36dc60ed88c90fc246942ea26e7"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a"}, + {file = "opentelemetry_exporter_otlp_proto_common-1.27.0.tar.gz", hash = "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8"}, ] [package.dependencies] -backoff = {version = ">=1.10.0,<3.0.0", markers = "python_version >= \"3.7\""} +opentelemetry-proto = "1.27.0" + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.27.0" +description = "OpenTelemetry Collector Protobuf over gRPC Exporter" +optional = false +python-versions = ">=3.8" +groups = ["vdb"] +files = [ + {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e"}, + {file = "opentelemetry_exporter_otlp_proto_grpc-1.27.0.tar.gz", hash = "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f"}, +] + +[package.dependencies] +deprecated = ">=1.2.6" googleapis-common-protos = ">=1.52,<2.0" grpcio = ">=1.0.0,<2.0.0" -opentelemetry-api = ">=1.12,<2.0" -opentelemetry-proto = "1.15.0" -opentelemetry-sdk = ">=1.12,<2.0" - -[package.extras] -test = ["pytest-grpc"] +opentelemetry-api = ">=1.15,<2.0" +opentelemetry-exporter-otlp-proto-common = "1.27.0" +opentelemetry-proto = "1.27.0" +opentelemetry-sdk = ">=1.27.0,<1.28.0" [[package]] name = "opentelemetry-instrumentation" -version = "0.51b0" +version = "0.48b0" description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" optional = false python-versions = ">=3.8" groups = ["vdb"] files = [ - {file = "opentelemetry_instrumentation-0.51b0-py3-none-any.whl", hash = "sha256:c6de8bd26b75ec8b0e54dff59e198946e29de6a10ec65488c357d4b34aa5bdcf"}, - {file = "opentelemetry_instrumentation-0.51b0.tar.gz", hash = "sha256:4ca266875e02f3988536982467f7ef8c32a38b8895490ddce9ad9604649424fa"}, + {file = "opentelemetry_instrumentation-0.48b0-py3-none-any.whl", hash = "sha256:a69750dc4ba6a5c3eb67986a337185a25b739966d80479befe37b546fc870b44"}, + {file = "opentelemetry_instrumentation-0.48b0.tar.gz", hash = "sha256:94929685d906380743a71c3970f76b5f07476eea1834abd5dd9d17abfe23cc35"}, ] [package.dependencies] opentelemetry-api = ">=1.4,<2.0" -opentelemetry-semantic-conventions = "0.51b0" -packaging = ">=18.0" +setuptools = ">=16.0" wrapt = ">=1.0.0,<2.0.0" [[package]] name = "opentelemetry-instrumentation-asgi" -version = "0.51b0" +version = "0.48b0" description = "ASGI instrumentation for OpenTelemetry" optional = false python-versions = ">=3.8" groups = ["vdb"] files = [ - {file = "opentelemetry_instrumentation_asgi-0.51b0-py3-none-any.whl", hash = "sha256:e8072993db47303b633c6ec1bc74726ba4d32bd0c46c28dfadf99f79521a324c"}, - {file = "opentelemetry_instrumentation_asgi-0.51b0.tar.gz", hash = "sha256:b3fe97c00f0bfa934371a69674981d76591c68d937b6422a5716ca21081b4148"}, + {file = "opentelemetry_instrumentation_asgi-0.48b0-py3-none-any.whl", hash = "sha256:ddb1b5fc800ae66e85a4e2eca4d9ecd66367a8c7b556169d9e7b57e10676e44d"}, + {file = "opentelemetry_instrumentation_asgi-0.48b0.tar.gz", hash = "sha256:04c32174b23c7fa72ddfe192dad874954968a6a924608079af9952964ecdf785"}, ] [package.dependencies] asgiref = ">=3.0,<4.0" opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.51b0" -opentelemetry-semantic-conventions = "0.51b0" -opentelemetry-util-http = "0.51b0" +opentelemetry-instrumentation = "0.48b0" +opentelemetry-semantic-conventions = "0.48b0" +opentelemetry-util-http = "0.48b0" [package.extras] instruments = ["asgiref (>=3.0,<4.0)"] [[package]] name = "opentelemetry-instrumentation-fastapi" -version = "0.51b0" +version = "0.48b0" description = "OpenTelemetry FastAPI Instrumentation" optional = false python-versions = ">=3.8" groups = ["vdb"] files = [ - {file = "opentelemetry_instrumentation_fastapi-0.51b0-py3-none-any.whl", hash = "sha256:10513bbc11a1188adb9c1d2c520695f7a8f2b5f4de14e8162098035901cd6493"}, - {file = "opentelemetry_instrumentation_fastapi-0.51b0.tar.gz", hash = "sha256:1624e70f2f4d12ceb792d8a0c331244cd6723190ccee01336273b4559bc13abc"}, + {file = "opentelemetry_instrumentation_fastapi-0.48b0-py3-none-any.whl", hash = "sha256:afeb820a59e139d3e5d96619600f11ce0187658b8ae9e3480857dd790bc024f2"}, + {file = "opentelemetry_instrumentation_fastapi-0.48b0.tar.gz", hash = "sha256:21a72563ea412c0b535815aeed75fc580240f1f02ebc72381cfab672648637a2"}, ] [package.dependencies] opentelemetry-api = ">=1.12,<2.0" -opentelemetry-instrumentation = "0.51b0" -opentelemetry-instrumentation-asgi = "0.51b0" -opentelemetry-semantic-conventions = "0.51b0" -opentelemetry-util-http = "0.51b0" +opentelemetry-instrumentation = "0.48b0" +opentelemetry-instrumentation-asgi = "0.48b0" +opentelemetry-semantic-conventions = "0.48b0" +opentelemetry-util-http = "0.48b0" [package.extras] instruments = ["fastapi (>=0.58,<1.0)"] [[package]] name = "opentelemetry-proto" -version = "1.15.0" +version = "1.27.0" description = "OpenTelemetry Python Proto" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" groups = ["vdb"] files = [ - {file = "opentelemetry_proto-1.15.0-py3-none-any.whl", hash = "sha256:044b6d044b4d10530f250856f933442b8753a17f94ae37c207607f733fb9a844"}, - {file = "opentelemetry_proto-1.15.0.tar.gz", hash = "sha256:9c4008e40ac8cab359daac283fbe7002c5c29c77ea2674ad5626a249e64e0101"}, + {file = "opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace"}, + {file = "opentelemetry_proto-1.27.0.tar.gz", hash = "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6"}, ] [package.dependencies] @@ -5356,47 +5331,47 @@ protobuf = ">=3.19,<5.0" [[package]] name = "opentelemetry-sdk" -version = "1.30.0" +version = "1.27.0" description = "OpenTelemetry Python SDK" optional = false python-versions = ">=3.8" groups = ["vdb"] files = [ - {file = "opentelemetry_sdk-1.30.0-py3-none-any.whl", hash = "sha256:14fe7afc090caad881addb6926cec967129bd9260c4d33ae6a217359f6b61091"}, - {file = "opentelemetry_sdk-1.30.0.tar.gz", hash = "sha256:c9287a9e4a7614b9946e933a67168450b9ab35f08797eb9bc77d998fa480fa18"}, + {file = "opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d"}, + {file = "opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f"}, ] [package.dependencies] -opentelemetry-api = "1.30.0" -opentelemetry-semantic-conventions = "0.51b0" +opentelemetry-api = "1.27.0" +opentelemetry-semantic-conventions = "0.48b0" typing-extensions = ">=3.7.4" [[package]] name = "opentelemetry-semantic-conventions" -version = "0.51b0" +version = "0.48b0" description = "OpenTelemetry Semantic Conventions" optional = false python-versions = ">=3.8" groups = ["vdb"] files = [ - {file = "opentelemetry_semantic_conventions-0.51b0-py3-none-any.whl", hash = "sha256:fdc777359418e8d06c86012c3dc92c88a6453ba662e941593adb062e48c2eeae"}, - {file = "opentelemetry_semantic_conventions-0.51b0.tar.gz", hash = "sha256:3fabf47f35d1fd9aebcdca7e6802d86bd5ebc3bc3408b7e3248dde6e87a18c47"}, + {file = "opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f"}, + {file = "opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a"}, ] [package.dependencies] deprecated = ">=1.2.6" -opentelemetry-api = "1.30.0" +opentelemetry-api = "1.27.0" [[package]] name = "opentelemetry-util-http" -version = "0.51b0" +version = "0.48b0" description = "Web util for OpenTelemetry" optional = false python-versions = ">=3.8" groups = ["vdb"] files = [ - {file = "opentelemetry_util_http-0.51b0-py3-none-any.whl", hash = "sha256:0561d7a6e9c422b9ef9ae6e77eafcfcd32a2ab689f5e801475cbb67f189efa20"}, - {file = "opentelemetry_util_http-0.51b0.tar.gz", hash = "sha256:05edd19ca1cc3be3968b1e502fd94816901a365adbeaab6b6ddb974384d3a0b9"}, + {file = "opentelemetry_util_http-0.48b0-py3-none-any.whl", hash = "sha256:76f598af93aab50328d2a69c786beaedc8b6a7770f7a818cc307eb353debfffb"}, + {file = "opentelemetry_util_http-0.48b0.tar.gz", hash = "sha256:60312015153580cc20f322e5cdc3d3ecad80a71743235bdb77716e742814623c"}, ] [[package]] @@ -5875,7 +5850,7 @@ version = "2.10.1" description = "Wraps the portalocker recipe for easy usage" optional = false python-versions = ">=3.8" -groups = ["main", "vdb"] +groups = ["vdb"] files = [ {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, @@ -5908,14 +5883,14 @@ pydantic = ">=1.9,<3.0" [[package]] name = "posthog" -version = "3.19.0" +version = "3.21.0" description = "Integrate PostHog into any python application." optional = false python-versions = "*" groups = ["vdb"] files = [ - {file = "posthog-3.19.0-py2.py3-none-any.whl", hash = "sha256:c294bc0a939e21ecf88d625496f8073cc566c28ec2a917a47d5d32ba33e90a7f"}, - {file = "posthog-3.19.0.tar.gz", hash = "sha256:7fe5c9e494fc2cca9baa2bd8074c0844d572df46a54378101bc20eec2776027e"}, + {file = "posthog-3.21.0-py2.py3-none-any.whl", hash = "sha256:1e07626bb5219369dd36826881fa61711713e8175d3557db4657e64ecb351467"}, + {file = "posthog-3.21.0.tar.gz", hash = "sha256:62e339789f6f018b6a892357f5703d1f1e63c97aee75061b3dc97c5e5c6a5304"}, ] [package.dependencies] @@ -6057,18 +6032,18 @@ files = [ [[package]] name = "proto-plus" -version = "1.26.0" +version = "1.26.1" description = "Beautiful, Pythonic protocol buffers" optional = false python-versions = ">=3.7" groups = ["main", "storage"] files = [ - {file = "proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7"}, - {file = "proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22"}, + {file = "proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66"}, + {file = "proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012"}, ] [package.dependencies] -protobuf = ">=3.19.0,<6.0.0dev" +protobuf = ">=3.19.0,<7.0.0" [package.extras] testing = ["google-api-core (>=1.31.5)"] @@ -6640,14 +6615,14 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pypdf" -version = "5.3.1" +version = "5.4.0" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" optional = false python-versions = ">=3.8" groups = ["main"] files = [ - {file = "pypdf-5.3.1-py3-none-any.whl", hash = "sha256:20ea5b8686faad1b695fda054462b667d5e5f51e25fbbc092f12c5e0bb20d738"}, - {file = "pypdf-5.3.1.tar.gz", hash = "sha256:0b9b715252b3c60bacc052e6a780e8b742cee9b9a2135f6007bb018e22a5adad"}, + {file = "pypdf-5.4.0-py3-none-any.whl", hash = "sha256:db994ab47cadc81057ea1591b90e5b543e2b7ef2d0e31ef41a9bfe763c119dab"}, + {file = "pypdf-5.4.0.tar.gz", hash = "sha256:9af476a9dc30fcb137659b0dec747ea94aa954933c52cf02ee33e39a16fe9175"}, ] [package.extras] @@ -7033,29 +7008,29 @@ files = [ [[package]] name = "pywin32" -version = "309" +version = "310" description = "Python for Window Extensions" optional = false python-versions = "*" -groups = ["main", "vdb"] +groups = ["vdb"] markers = "platform_system == \"Windows\"" files = [ - {file = "pywin32-309-cp310-cp310-win32.whl", hash = "sha256:5b78d98550ca093a6fe7ab6d71733fbc886e2af9d4876d935e7f6e1cd6577ac9"}, - {file = "pywin32-309-cp310-cp310-win_amd64.whl", hash = "sha256:728d08046f3d65b90d4c77f71b6fbb551699e2005cc31bbffd1febd6a08aa698"}, - {file = "pywin32-309-cp310-cp310-win_arm64.whl", hash = "sha256:c667bcc0a1e6acaca8984eb3e2b6e42696fc035015f99ff8bc6c3db4c09a466a"}, - {file = "pywin32-309-cp311-cp311-win32.whl", hash = "sha256:d5df6faa32b868baf9ade7c9b25337fa5eced28eb1ab89082c8dae9c48e4cd51"}, - {file = "pywin32-309-cp311-cp311-win_amd64.whl", hash = "sha256:e7ec2cef6df0926f8a89fd64959eba591a1eeaf0258082065f7bdbe2121228db"}, - {file = "pywin32-309-cp311-cp311-win_arm64.whl", hash = "sha256:54ee296f6d11db1627216e9b4d4c3231856ed2d9f194c82f26c6cb5650163f4c"}, - {file = "pywin32-309-cp312-cp312-win32.whl", hash = "sha256:de9acacced5fa82f557298b1fed5fef7bd49beee04190f68e1e4783fbdc19926"}, - {file = "pywin32-309-cp312-cp312-win_amd64.whl", hash = "sha256:6ff9eebb77ffc3d59812c68db33c0a7817e1337e3537859499bd27586330fc9e"}, - {file = "pywin32-309-cp312-cp312-win_arm64.whl", hash = "sha256:619f3e0a327b5418d833f44dc87859523635cf339f86071cc65a13c07be3110f"}, - {file = "pywin32-309-cp313-cp313-win32.whl", hash = "sha256:008bffd4afd6de8ca46c6486085414cc898263a21a63c7f860d54c9d02b45c8d"}, - {file = "pywin32-309-cp313-cp313-win_amd64.whl", hash = "sha256:bd0724f58492db4cbfbeb1fcd606495205aa119370c0ddc4f70e5771a3ab768d"}, - {file = "pywin32-309-cp313-cp313-win_arm64.whl", hash = "sha256:8fd9669cfd41863b688a1bc9b1d4d2d76fd4ba2128be50a70b0ea66b8d37953b"}, - {file = "pywin32-309-cp38-cp38-win32.whl", hash = "sha256:617b837dc5d9dfa7e156dbfa7d3906c009a2881849a80a9ae7519f3dd8c6cb86"}, - {file = "pywin32-309-cp38-cp38-win_amd64.whl", hash = "sha256:0be3071f555480fbfd86a816a1a773880ee655bf186aa2931860dbb44e8424f8"}, - {file = "pywin32-309-cp39-cp39-win32.whl", hash = "sha256:72ae9ae3a7a6473223589a1621f9001fe802d59ed227fd6a8503c9af67c1d5f4"}, - {file = "pywin32-309-cp39-cp39-win_amd64.whl", hash = "sha256:88bc06d6a9feac70783de64089324568ecbc65866e2ab318eab35da3811fd7ef"}, + {file = "pywin32-310-cp310-cp310-win32.whl", hash = "sha256:6dd97011efc8bf51d6793a82292419eba2c71cf8e7250cfac03bba284454abc1"}, + {file = "pywin32-310-cp310-cp310-win_amd64.whl", hash = "sha256:c3e78706e4229b915a0821941a84e7ef420bf2b77e08c9dae3c76fd03fd2ae3d"}, + {file = "pywin32-310-cp310-cp310-win_arm64.whl", hash = "sha256:33babed0cf0c92a6f94cc6cc13546ab24ee13e3e800e61ed87609ab91e4c8213"}, + {file = "pywin32-310-cp311-cp311-win32.whl", hash = "sha256:1e765f9564e83011a63321bb9d27ec456a0ed90d3732c4b2e312b855365ed8bd"}, + {file = "pywin32-310-cp311-cp311-win_amd64.whl", hash = "sha256:126298077a9d7c95c53823934f000599f66ec9296b09167810eb24875f32689c"}, + {file = "pywin32-310-cp311-cp311-win_arm64.whl", hash = "sha256:19ec5fc9b1d51c4350be7bb00760ffce46e6c95eaf2f0b2f1150657b1a43c582"}, + {file = "pywin32-310-cp312-cp312-win32.whl", hash = "sha256:8a75a5cc3893e83a108c05d82198880704c44bbaee4d06e442e471d3c9ea4f3d"}, + {file = "pywin32-310-cp312-cp312-win_amd64.whl", hash = "sha256:bf5c397c9a9a19a6f62f3fb821fbf36cac08f03770056711f765ec1503972060"}, + {file = "pywin32-310-cp312-cp312-win_arm64.whl", hash = "sha256:2349cc906eae872d0663d4d6290d13b90621eaf78964bb1578632ff20e152966"}, + {file = "pywin32-310-cp313-cp313-win32.whl", hash = "sha256:5d241a659c496ada3253cd01cfaa779b048e90ce4b2b38cd44168ad555ce74ab"}, + {file = "pywin32-310-cp313-cp313-win_amd64.whl", hash = "sha256:667827eb3a90208ddbdcc9e860c81bde63a135710e21e4cb3348968e4bd5249e"}, + {file = "pywin32-310-cp313-cp313-win_arm64.whl", hash = "sha256:e308f831de771482b7cf692a1f308f8fca701b2d8f9dde6cc440c7da17e47b33"}, + {file = "pywin32-310-cp38-cp38-win32.whl", hash = "sha256:0867beb8addefa2e3979d4084352e4ac6e991ca45373390775f7084cc0209b9c"}, + {file = "pywin32-310-cp38-cp38-win_amd64.whl", hash = "sha256:30f0a9b3138fb5e07eb4973b7077e1883f558e40c578c6925acc7a94c34eaa36"}, + {file = "pywin32-310-cp39-cp39-win32.whl", hash = "sha256:851c8d927af0d879221e616ae1f66145253537bbdd321a77e8ef701b443a9a1a"}, + {file = "pywin32-310-cp39-cp39-win_amd64.whl", hash = "sha256:96867217335559ac619f00ad70e513c0fcf84b8a3af9fc2bba3b59b97da70475"}, ] [[package]] @@ -7933,14 +7908,14 @@ tornado = ["tornado (>=5)"] [[package]] name = "setuptools" -version = "76.0.0" +version = "76.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" groups = ["main", "vdb"] files = [ - {file = "setuptools-76.0.0-py3-none-any.whl", hash = "sha256:199466a166ff664970d0ee145839f5582cb9bca7a0a3a2e795b6a9cb2308e9c6"}, - {file = "setuptools-76.0.0.tar.gz", hash = "sha256:43b4ee60e10b0d0ee98ad11918e114c70701bc6051662a9a675a0496c1a158f4"}, + {file = "setuptools-76.1.0-py3-none-any.whl", hash = "sha256:34750dcb17d046929f545dec9b8349fe42bf4ba13ddffee78428aec422dbfb73"}, + {file = "setuptools-76.1.0.tar.gz", hash = "sha256:4959b9ad482ada2ba2320c8f1a8d8481d4d8d668908a7a1b84d987375cd7f5bd"}, ] [package.extras] @@ -8712,14 +8687,14 @@ files = [ [[package]] name = "types-protobuf" -version = "5.29.1.20250208" +version = "5.29.1.20250315" description = "Typing stubs for protobuf" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_protobuf-5.29.1.20250208-py3-none-any.whl", hash = "sha256:c5f8bfb4afdc1b5cbca1848f2c8b361a2090add7401f410b22b599ef647bf483"}, - {file = "types_protobuf-5.29.1.20250208.tar.gz", hash = "sha256:c1acd6a59ab554dbe09b5d1fa7dd701e2fcfb2212937a3af1c03b736060b792a"}, + {file = "types_protobuf-5.29.1.20250315-py3-none-any.whl", hash = "sha256:57efd51fd0979d1f5e1d94053d1e7cfff9c028e8d05b17e341b91a1c7fce37c4"}, + {file = "types_protobuf-5.29.1.20250315.tar.gz", hash = "sha256:0b05bc34621d046de54b94fddd5f4eb3bf849fe2e13a50f8fb8e89f35045ff49"}, ] [[package]] @@ -8736,14 +8711,14 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.20250121" +version = "2.9.21.20250318" description = "Typing stubs for psycopg2" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_psycopg2-2.9.21.20250121-py3-none-any.whl", hash = "sha256:b890dc6f5a08b6433f0ff73a4ec9a834deedad3e914f2a4a6fd43df021f745f1"}, - {file = "types_psycopg2-2.9.21.20250121.tar.gz", hash = "sha256:2b0e2cd0f3747af1ae25a7027898716d80209604770ef3cbf350fe055b9c349b"}, + {file = "types_psycopg2-2.9.21.20250318-py3-none-any.whl", hash = "sha256:7296d111ad950bbd2fc979a1ab0572acae69047f922280e77db657c00d2c79c0"}, + {file = "types_psycopg2-2.9.21.20250318.tar.gz", hash = "sha256:eb6eac5bfb16adfd5f16b818918b9e26a40ede147e0f2bbffdf53a6ef7025a87"}, ] [[package]] @@ -8784,14 +8759,14 @@ files = [ [[package]] name = "types-regex" -version = "2024.11.6.20250305" +version = "2024.11.6.20250318" description = "Typing stubs for regex" optional = false python-versions = ">=3.9" groups = ["dev"] files = [ - {file = "types_regex-2024.11.6.20250305-py3-none-any.whl", hash = "sha256:bd6b97d2169563c190b9b8c56e0e03caad1e24e0bea2f1c1fdfe5d354772aa42"}, - {file = "types_regex-2024.11.6.20250305.tar.gz", hash = "sha256:28b886bf4eb23400030aa681a2d76b3ee869440c3d458b7017ec168000c1b62f"}, + {file = "types_regex-2024.11.6.20250318-py3-none-any.whl", hash = "sha256:9309fe5918ee7ffe859c04c18040697655fade366c4dc844bbebe86976a9980b"}, + {file = "types_regex-2024.11.6.20250318.tar.gz", hash = "sha256:6d472d0acf37b138cb32f67bd5ab1e7a200e94da8c1aa93ca3625a63e2efe1f3"}, ] [[package]] diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index dde434c3ad..7ce4e4af22 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -1,3 +1,4 @@ +import copy import datetime import json import logging @@ -17,6 +18,7 @@ from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError from core.model_manager import ModelManager from core.model_runtime.entities.model_entities import ModelType from core.plugin.entities.plugin import ModelProviderID +from core.rag.index_processor.constant.built_in_field import BuiltInField from core.rag.index_processor.constant.index_type import IndexType from core.rag.retrieval.retrieval_methods import RetrievalMethod from events.dataset_event import dataset_was_deleted @@ -643,9 +645,45 @@ class DocumentService: return document + @staticmethod + def get_document_by_ids(document_ids: list[str]) -> list[Document]: + documents = ( + db.session.query(Document) + .filter( + Document.id.in_(document_ids), + Document.enabled == True, + Document.indexing_status == "completed", + Document.archived == False, + ) + .all() + ) + return documents + @staticmethod def get_document_by_dataset_id(dataset_id: str) -> list[Document]: - documents = db.session.query(Document).filter(Document.dataset_id == dataset_id, Document.enabled == True).all() + documents = ( + db.session.query(Document) + .filter( + Document.dataset_id == dataset_id, + Document.enabled == True, + ) + .all() + ) + + return documents + + @staticmethod + def get_working_documents_by_dataset_id(dataset_id: str) -> list[Document]: + documents = ( + db.session.query(Document) + .filter( + Document.dataset_id == dataset_id, + Document.enabled == True, + Document.indexing_status == "completed", + Document.archived == False, + ) + .all() + ) return documents @@ -728,8 +766,13 @@ class DocumentService: if document.tenant_id != current_user.current_tenant_id: raise ValueError("No permission.") - document.name = name + if dataset.built_in_field_enabled: + if document.doc_metadata: + doc_metadata = copy.deepcopy(document.doc_metadata) + doc_metadata[BuiltInField.document_name.value] = name + document.doc_metadata = doc_metadata + document.name = name db.session.add(document) db.session.commit() @@ -1128,9 +1171,20 @@ class DocumentService: doc_form=document_form, doc_language=document_language, ) + doc_metadata = {} + if dataset.built_in_field_enabled: + doc_metadata = { + BuiltInField.document_name: name, + BuiltInField.uploader: account.name, + BuiltInField.upload_date: datetime.datetime.now(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S"), + BuiltInField.last_update_date: datetime.datetime.now(datetime.UTC).strftime("%Y-%m-%d %H:%M:%S"), + BuiltInField.source: data_source_type, + } if metadata is not None: - document.doc_metadata = metadata.doc_metadata + doc_metadata.update(metadata.doc_metadata) document.doc_type = metadata.doc_type + if doc_metadata: + document.doc_metadata = doc_metadata return document @staticmethod diff --git a/api/services/entities/knowledge_entities/knowledge_entities.py b/api/services/entities/knowledge_entities/knowledge_entities.py index b84339fd5f..6b0ecd7e33 100644 --- a/api/services/entities/knowledge_entities/knowledge_entities.py +++ b/api/services/entities/knowledge_entities/knowledge_entities.py @@ -125,3 +125,36 @@ class SegmentUpdateArgs(BaseModel): class ChildChunkUpdateArgs(BaseModel): id: Optional[str] = None content: str + + +class MetadataArgs(BaseModel): + type: Literal["string", "number", "time"] + name: str + + +class MetadataUpdateArgs(BaseModel): + name: str + value: Optional[str | int | float] = None + + +class MetadataValueUpdateArgs(BaseModel): + fields: list[MetadataUpdateArgs] + + +class MetadataDetail(BaseModel): + id: str + name: str + value: Optional[str | int | float] = None + + +class DocumentMetadataOperation(BaseModel): + document_id: str + metadata_list: list[MetadataDetail] + + +class MetadataOperationData(BaseModel): + """ + Metadata operation data + """ + + operation_data: list[DocumentMetadataOperation] diff --git a/api/services/external_knowledge_service.py b/api/services/external_knowledge_service.py index 8916a951c7..d9ee221a3c 100644 --- a/api/services/external_knowledge_service.py +++ b/api/services/external_knowledge_service.py @@ -8,6 +8,7 @@ import validators from constants import HIDDEN_VALUE from core.helper import ssrf_proxy +from core.rag.entities.metadata_entities import MetadataCondition from extensions.ext_database import db from models.dataset import ( Dataset, @@ -245,7 +246,11 @@ class ExternalDatasetService: @staticmethod def fetch_external_knowledge_retrieval( - tenant_id: str, dataset_id: str, query: str, external_retrieval_parameters: dict + tenant_id: str, + dataset_id: str, + query: str, + external_retrieval_parameters: dict, + metadata_condition: Optional[MetadataCondition] = None, ) -> list: external_knowledge_binding = ExternalKnowledgeBindings.query.filter_by( dataset_id=dataset_id, tenant_id=tenant_id @@ -272,6 +277,7 @@ class ExternalDatasetService: }, "query": query, "knowledge_id": external_knowledge_binding.external_knowledge_id, + "metadata_condition": metadata_condition.model_dump() if metadata_condition else None, } response = ExternalDatasetService.process_external_api( diff --git a/api/services/metadata_service.py b/api/services/metadata_service.py new file mode 100644 index 0000000000..a43b970a39 --- /dev/null +++ b/api/services/metadata_service.py @@ -0,0 +1,241 @@ +import copy +import datetime +import logging +from typing import Optional + +from flask_login import current_user # type: ignore + +from core.rag.index_processor.constant.built_in_field import BuiltInField, MetadataDataSource +from extensions.ext_database import db +from extensions.ext_redis import redis_client +from models.dataset import Dataset, DatasetMetadata, DatasetMetadataBinding +from services.dataset_service import DocumentService +from services.entities.knowledge_entities.knowledge_entities import ( + MetadataArgs, + MetadataOperationData, +) + + +class MetadataService: + @staticmethod + def create_metadata(dataset_id: str, metadata_args: MetadataArgs) -> DatasetMetadata: + # check if metadata name already exists + if DatasetMetadata.query.filter_by( + tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=metadata_args.name + ).first(): + raise ValueError("Metadata name already exists.") + for field in BuiltInField: + if field.value == metadata_args.name: + raise ValueError("Metadata name already exists in Built-in fields.") + metadata = DatasetMetadata( + tenant_id=current_user.current_tenant_id, + dataset_id=dataset_id, + type=metadata_args.type, + name=metadata_args.name, + created_by=current_user.id, + ) + db.session.add(metadata) + db.session.commit() + return metadata + + @staticmethod + def update_metadata_name(dataset_id: str, metadata_id: str, name: str) -> DatasetMetadata: # type: ignore + lock_key = f"dataset_metadata_lock_{dataset_id}" + # check if metadata name already exists + if DatasetMetadata.query.filter_by( + tenant_id=current_user.current_tenant_id, dataset_id=dataset_id, name=name + ).first(): + raise ValueError("Metadata name already exists.") + for field in BuiltInField: + if field.value == name: + raise ValueError("Metadata name already exists in Built-in fields.") + try: + MetadataService.knowledge_base_metadata_lock_check(dataset_id, None) + metadata = DatasetMetadata.query.filter_by(id=metadata_id).first() + if metadata is None: + raise ValueError("Metadata not found.") + old_name = metadata.name + metadata.name = name + metadata.updated_by = current_user.id + metadata.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None) + + # update related documents + dataset_metadata_bindings = DatasetMetadataBinding.query.filter_by(metadata_id=metadata_id).all() + if dataset_metadata_bindings: + document_ids = [binding.document_id for binding in dataset_metadata_bindings] + documents = DocumentService.get_document_by_ids(document_ids) + for document in documents: + doc_metadata = copy.deepcopy(document.doc_metadata) + value = doc_metadata.pop(old_name, None) + doc_metadata[name] = value + document.doc_metadata = doc_metadata + db.session.add(document) + db.session.commit() + return metadata # type: ignore + except Exception: + logging.exception("Update metadata name failed") + finally: + redis_client.delete(lock_key) + + @staticmethod + def delete_metadata(dataset_id: str, metadata_id: str): + lock_key = f"dataset_metadata_lock_{dataset_id}" + try: + MetadataService.knowledge_base_metadata_lock_check(dataset_id, None) + metadata = DatasetMetadata.query.filter_by(id=metadata_id).first() + if metadata is None: + raise ValueError("Metadata not found.") + db.session.delete(metadata) + + # deal related documents + dataset_metadata_bindings = DatasetMetadataBinding.query.filter_by(metadata_id=metadata_id).all() + if dataset_metadata_bindings: + document_ids = [binding.document_id for binding in dataset_metadata_bindings] + documents = DocumentService.get_document_by_ids(document_ids) + for document in documents: + doc_metadata = copy.deepcopy(document.doc_metadata) + doc_metadata.pop(metadata.name, None) + document.doc_metadata = doc_metadata + db.session.add(document) + db.session.commit() + return metadata + except Exception: + logging.exception("Delete metadata failed") + finally: + redis_client.delete(lock_key) + + @staticmethod + def get_built_in_fields(): + return [ + {"name": BuiltInField.document_name.value, "type": "string"}, + {"name": BuiltInField.uploader.value, "type": "string"}, + {"name": BuiltInField.upload_date.value, "type": "time"}, + {"name": BuiltInField.last_update_date.value, "type": "time"}, + {"name": BuiltInField.source.value, "type": "string"}, + ] + + @staticmethod + def enable_built_in_field(dataset: Dataset): + if dataset.built_in_field_enabled: + return + lock_key = f"dataset_metadata_lock_{dataset.id}" + try: + MetadataService.knowledge_base_metadata_lock_check(dataset.id, None) + dataset.built_in_field_enabled = True + db.session.add(dataset) + documents = DocumentService.get_working_documents_by_dataset_id(dataset.id) + if documents: + for document in documents: + if not document.doc_metadata: + doc_metadata = {} + else: + doc_metadata = copy.deepcopy(document.doc_metadata) + doc_metadata[BuiltInField.document_name.value] = document.name + doc_metadata[BuiltInField.uploader.value] = document.uploader + doc_metadata[BuiltInField.upload_date.value] = document.upload_date.timestamp() + doc_metadata[BuiltInField.last_update_date.value] = document.last_update_date.timestamp() + doc_metadata[BuiltInField.source.value] = MetadataDataSource[document.data_source_type].value + document.doc_metadata = doc_metadata + db.session.add(document) + db.session.commit() + except Exception: + logging.exception("Enable built-in field failed") + finally: + redis_client.delete(lock_key) + + @staticmethod + def disable_built_in_field(dataset: Dataset): + if not dataset.built_in_field_enabled: + return + lock_key = f"dataset_metadata_lock_{dataset.id}" + try: + MetadataService.knowledge_base_metadata_lock_check(dataset.id, None) + dataset.built_in_field_enabled = False + db.session.add(dataset) + documents = DocumentService.get_working_documents_by_dataset_id(dataset.id) + document_ids = [] + if documents: + for document in documents: + doc_metadata = copy.deepcopy(document.doc_metadata) + doc_metadata.pop(BuiltInField.document_name.value, None) + doc_metadata.pop(BuiltInField.uploader.value, None) + doc_metadata.pop(BuiltInField.upload_date.value, None) + doc_metadata.pop(BuiltInField.last_update_date.value, None) + doc_metadata.pop(BuiltInField.source.value, None) + document.doc_metadata = doc_metadata + db.session.add(document) + document_ids.append(document.id) + db.session.commit() + except Exception: + logging.exception("Disable built-in field failed") + finally: + redis_client.delete(lock_key) + + @staticmethod + def update_documents_metadata(dataset: Dataset, metadata_args: MetadataOperationData): + for operation in metadata_args.operation_data: + lock_key = f"document_metadata_lock_{operation.document_id}" + try: + MetadataService.knowledge_base_metadata_lock_check(None, operation.document_id) + document = DocumentService.get_document(dataset.id, operation.document_id) + if document is None: + raise ValueError("Document not found.") + doc_metadata = {} + for metadata_value in operation.metadata_list: + doc_metadata[metadata_value.name] = metadata_value.value + if dataset.built_in_field_enabled: + doc_metadata[BuiltInField.document_name.value] = document.name + doc_metadata[BuiltInField.uploader.value] = document.uploader + doc_metadata[BuiltInField.upload_date.value] = document.upload_date.timestamp() + doc_metadata[BuiltInField.last_update_date.value] = document.last_update_date.timestamp() + doc_metadata[BuiltInField.source.value] = MetadataDataSource[document.data_source_type].value + document.doc_metadata = doc_metadata + db.session.add(document) + db.session.commit() + # deal metadata binding + DatasetMetadataBinding.query.filter_by(document_id=operation.document_id).delete() + for metadata_value in operation.metadata_list: + dataset_metadata_binding = DatasetMetadataBinding( + tenant_id=current_user.current_tenant_id, + dataset_id=dataset.id, + document_id=operation.document_id, + metadata_id=metadata_value.id, + created_by=current_user.id, + ) + db.session.add(dataset_metadata_binding) + db.session.commit() + except Exception: + logging.exception("Update documents metadata failed") + finally: + redis_client.delete(lock_key) + + @staticmethod + def knowledge_base_metadata_lock_check(dataset_id: Optional[str], document_id: Optional[str]): + if dataset_id: + lock_key = f"dataset_metadata_lock_{dataset_id}" + if redis_client.get(lock_key): + raise ValueError("Another knowledge base metadata operation is running, please wait a moment.") + redis_client.set(lock_key, 1, ex=3600) + if document_id: + lock_key = f"document_metadata_lock_{document_id}" + if redis_client.get(lock_key): + raise ValueError("Another document metadata operation is running, please wait a moment.") + redis_client.set(lock_key, 1, ex=3600) + + @staticmethod + def get_dataset_metadatas(dataset: Dataset): + return { + "doc_metadata": [ + { + "id": item.get("id"), + "name": item.get("name"), + "type": item.get("type"), + "count": DatasetMetadataBinding.query.filter_by( + metadata_id=item.get("id"), dataset_id=dataset.id + ).count(), + } + for item in dataset.doc_metadata or [] + if item.get("id") != "built-in" + ], + "built_in_field_enabled": dataset.built_in_field_enabled, + } diff --git a/api/services/tag_service.py b/api/services/tag_service.py index 1fbaee96e8..8cc903bde5 100644 --- a/api/services/tag_service.py +++ b/api/services/tag_service.py @@ -20,7 +20,7 @@ class TagService: ) if keyword: query = query.filter(db.and_(Tag.name.ilike(f"%{keyword}%"))) - query = query.group_by(Tag.id, Tag.type, Tag.name, Tag.created_at) + query = query.group_by(Tag.id, Tag.type, Tag.name) results: list = query.order_by(Tag.created_at.desc()).all() return results